diff --git a/LICENSE b/LICENSE index da09a0890f..dfcd71eb3f 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2018 pype club +Copyright (c) 2018 orbi tools s.r.o Permission is hereby granted, free of charge, to any person obtaining a copy diff --git a/pype/avalon_apps/avalon_app.py b/pype/avalon_apps/avalon_app.py index 547ecd2299..35ab4c1eb7 100644 --- a/pype/avalon_apps/avalon_app.py +++ b/pype/avalon_apps/avalon_app.py @@ -14,6 +14,11 @@ class AvalonApps: self.parent = parent self.app_launcher = None + def process_modules(self, modules): + if "RestApiServer" in modules: + from .rest_api import AvalonRestApi + self.rest_api_obj = AvalonRestApi() + # Definition of Tray menu def tray_menu(self, parent_menu=None): # Actions diff --git a/pype/avalon_apps/rest_api.py b/pype/avalon_apps/rest_api.py new file mode 100644 index 0000000000..ae027383a1 --- /dev/null +++ b/pype/avalon_apps/rest_api.py @@ -0,0 +1,86 @@ +import os +import re +import json +import bson +import bson.json_util +from pype.services.rest_api import RestApi, abort, CallbackResult +from pype.ftrack.lib.custom_db_connector import DbConnector + + +class AvalonRestApi(RestApi): + dbcon = DbConnector( + os.environ["AVALON_MONGO"], + os.environ["AVALON_DB"] + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.dbcon.install() + + @RestApi.route("/projects/", url_prefix="/avalon", methods="GET") + def get_project(self, request): + project_name = request.url_data["project_name"] + if not project_name: + output = {} + for project_name in self.dbcon.tables(): + project = self.dbcon[project_name].find_one({"type": "project"}) + output[project_name] = project + + return CallbackResult(data=self.result_to_json(output)) + + project = self.dbcon[project_name].find_one({"type": "project"}) + + if project: + return CallbackResult(data=self.result_to_json(project)) + + abort(404, "Project \"{}\" was not found in database".format( + project_name + )) + + @RestApi.route("/projects//assets/", url_prefix="/avalon", methods="GET") + def get_assets(self, request): + _project_name = request.url_data["project_name"] + _asset = request.url_data["asset"] + + if not self.dbcon.exist_table(_project_name): + abort(404, "Project \"{}\" was not found in database".format( + project_name + )) + + if not _asset: + assets = self.dbcon[_project_name].find({"type": "asset"}) + output = self.result_to_json(assets) + return CallbackResult(data=output) + + # identificator can be specified with url query (default is `name`) + identificator = request.query.get("identificator", "name") + + asset = self.dbcon[_project_name].find_one({ + "type": "asset", + identificator: _asset + }) + if asset: + id = asset["_id"] + asset["_id"] = str(id) + return asset + + abort(404, "Asset \"{}\" with {} was not found in project {}".format( + _asset, identificator, project_name + )) + + def result_to_json(self, result): + """ Converts result of MongoDB query to dict without $oid (ObjectId) + keys with help of regex matching. + + ..note: + This will convert object type entries similar to ObjectId. + """ + bson_json = bson.json_util.dumps(result) + # Replace "{$oid: "{entity id}"}" with "{entity id}" + regex1 = '(?P{\"\$oid\": \"[^\"]+\"})' + regex2 = '{\"\$oid\": (?P\"[^\"]+\")}' + for value in re.findall(regex1, bson_json): + for substr in re.findall(regex2, value): + bson_json = bson_json.replace(value, substr) + + return json.loads(bson_json) diff --git a/pype/clockify/ftrack_actions/action_clockify_sync.py b/pype/clockify/ftrack_actions/action_clockify_sync.py index e679894d0e..59444bc3ac 100644 --- a/pype/clockify/ftrack_actions/action_clockify_sync.py +++ b/pype/clockify/ftrack_actions/action_clockify_sync.py @@ -3,7 +3,7 @@ import sys import argparse import logging import json -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction, MissingPermision from pype.clockify import ClockifyAPI diff --git a/pype/ftrack/__init__.py b/pype/ftrack/__init__.py index 45ca8384b5..aa8f04bffb 100644 --- a/pype/ftrack/__init__.py +++ b/pype/ftrack/__init__.py @@ -1,2 +1,2 @@ from .lib import * -from .ftrack_server import FtrackServer +from .ftrack_server import FtrackServer, check_ftrack_url diff --git a/pype/ftrack/actions/action_attributes_remapper.py b/pype/ftrack/actions/action_attributes_remapper.py index a0393ece40..2059edb8c8 100644 --- a/pype/ftrack/actions/action_attributes_remapper.py +++ b/pype/ftrack/actions/action_attributes_remapper.py @@ -1,6 +1,6 @@ import os -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction from pype.ftrack.lib.io_nonsingleton import DbConnector diff --git a/pype/ftrack/actions/action_client_review_sort.py b/pype/ftrack/actions/action_client_review_sort.py index 91926c2874..bd8d3653d9 100644 --- a/pype/ftrack/actions/action_client_review_sort.py +++ b/pype/ftrack/actions/action_client_review_sort.py @@ -2,7 +2,7 @@ import sys import argparse import logging -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction diff --git a/pype/ftrack/actions/action_component_open.py b/pype/ftrack/actions/action_component_open.py index 98d773dba6..d721731ab0 100644 --- a/pype/ftrack/actions/action_component_open.py +++ b/pype/ftrack/actions/action_component_open.py @@ -3,7 +3,7 @@ import sys import argparse import logging import subprocess -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction diff --git a/pype/ftrack/actions/action_create_cust_attrs.py b/pype/ftrack/actions/action_create_cust_attrs.py index ac6dcb0fd7..5279a95a20 100644 --- a/pype/ftrack/actions/action_create_cust_attrs.py +++ b/pype/ftrack/actions/action_create_cust_attrs.py @@ -4,8 +4,9 @@ import argparse import json import arrow import logging -from pype.vendor import ftrack_api -from pype.ftrack import BaseAction, get_ca_mongoid +import ftrack_api +from pype.ftrack import BaseAction +from pype.ftrack.lib.avalon_sync import CustAttrIdKey from pypeapp import config from ftrack_api.exception import NoResultFoundError @@ -171,7 +172,6 @@ class CustomAttributes(BaseAction): def avalon_mongo_id_attributes(self, session): # Attribute Name and Label - cust_attr_name = get_ca_mongoid() cust_attr_label = 'Avalon/Mongo Id' # Types that don't need object_type_id @@ -207,7 +207,7 @@ class CustomAttributes(BaseAction): group = self.get_group('avalon') data = {} - data['key'] = cust_attr_name + data['key'] = CustAttrIdKey data['label'] = cust_attr_label data['type'] = custom_attribute_type data['default'] = '' diff --git a/pype/ftrack/actions/action_create_folders.py b/pype/ftrack/actions/action_create_folders.py index 44e9741bab..68cf837469 100644 --- a/pype/ftrack/actions/action_create_folders.py +++ b/pype/ftrack/actions/action_create_folders.py @@ -4,7 +4,7 @@ import logging import argparse import re -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction from avalon import lib as avalonlib from pype.ftrack.lib.io_nonsingleton import DbConnector diff --git a/pype/ftrack/actions/action_create_project_structure.py b/pype/ftrack/actions/action_create_project_structure.py index c39c717b11..4589802f3a 100644 --- a/pype/ftrack/actions/action_create_project_structure.py +++ b/pype/ftrack/actions/action_create_project_structure.py @@ -4,7 +4,7 @@ import re import argparse import logging -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction from pypeapp import config @@ -142,6 +142,13 @@ class CreateProjectFolders(BaseAction): else: data['project_id'] = parent['project']['id'] + existing_entity = self.session.query(( + "TypedContext where name is \"{}\" and " + "parent_id is \"{}\" and project_id is \"{}\"" + ).format(name, data['parent_id'], data['project_id'])).first() + if existing_entity: + return existing_entity + new_ent = self.session.create(ent_type, data) self.session.commit() return new_ent diff --git a/pype/ftrack/actions/action_cust_attr_doctor.py b/pype/ftrack/actions/action_cust_attr_doctor.py index 0469b3a1e6..a14db4687a 100644 --- a/pype/ftrack/actions/action_cust_attr_doctor.py +++ b/pype/ftrack/actions/action_cust_attr_doctor.py @@ -4,7 +4,7 @@ import json import argparse import logging -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction diff --git a/pype/ftrack/actions/action_delete_asset.py b/pype/ftrack/actions/action_delete_asset.py index 106c81758a..7eb9126fca 100644 --- a/pype/ftrack/actions/action_delete_asset.py +++ b/pype/ftrack/actions/action_delete_asset.py @@ -1,357 +1,606 @@ import os -import sys -import logging +import collections +import uuid +from datetime import datetime +from queue import Queue + from bson.objectid import ObjectId -import argparse -from pype.vendor import ftrack_api from pype.ftrack import BaseAction from pype.ftrack.lib.io_nonsingleton import DbConnector -class DeleteAsset(BaseAction): +class DeleteAssetSubset(BaseAction): '''Edit meta data action.''' #: Action identifier. - identifier = 'delete.asset' + identifier = "delete.asset.subset" #: Action label. - label = 'Delete Asset/Subsets' + label = "Delete Asset/Subsets" #: Action description. - description = 'Removes from Avalon with all childs and asset from Ftrack' - icon = '{}/ftrack/action_icons/DeleteAsset.svg'.format( - os.environ.get('PYPE_STATICS_SERVER', '') + description = "Removes from Avalon with all childs and asset from Ftrack" + icon = "{}/ftrack/action_icons/DeleteAsset.svg".format( + os.environ.get("PYPE_STATICS_SERVER", "") ) #: roles that are allowed to register this action - role_list = ['Pypeclub', 'Administrator'] - #: Db - db = DbConnector() + role_list = ["Pypeclub", "Administrator", "Project Manager"] + #: Db connection + dbcon = DbConnector() - value = None + splitter = {"type": "label", "value": "---"} + action_data_by_id = {} + asset_prefix = "asset:" + subset_prefix = "subset:" def discover(self, session, entities, event): - ''' Validation ''' - if len(entities) != 1: - return False + """ Validation """ + task_ids = [] + for ent_info in event["data"]["selection"]: + entType = ent_info.get("entityType", "") + if entType == "task": + task_ids.append(ent_info["entityId"]) - valid = ["task"] - entityType = event["data"]["selection"][0].get("entityType", "") - if entityType.lower() not in valid: - return False - - return True + for entity in entities: + ftrack_id = entity["id"] + if ftrack_id not in task_ids: + continue + if entity.entity_type.lower() != "task": + return True + return False def _launch(self, event): - self.reset_session() try: - self.db.install() args = self._translate_event( self.session, event ) + if "values" not in event["data"]: + self.dbcon.install() + return self._interface(self.session, *args) - interface = self._interface( - self.session, *args - ) - - confirmation = self.confirm_delete( - True, *args - ) - - if interface: - return interface - + confirmation = self.confirm_delete(*args) if confirmation: return confirmation + self.dbcon.install() response = self.launch( self.session, *args ) finally: - self.db.uninstall() + self.dbcon.uninstall() return self._handle_result( self.session, response, *args ) def interface(self, session, entities, event): - if not event['data'].get('values', {}): - self.attempt = 1 - items = [] - entity = entities[0] - title = 'Choose items to delete from "{}"'.format(entity['name']) - project = entity['project'] + self.show_message(event, "Preparing data...", True) + items = [] + title = "Choose items to delete" - self.db.Session['AVALON_PROJECT'] = project["full_name"] + # Filter selection and get ftrack ids + selection = event["data"].get("selection") or [] + ftrack_ids = [] + project_in_selection = False + for entity in selection: + entity_type = (entity.get("entityType") or "").lower() + if entity_type != "task": + if entity_type == "show": + project_in_selection = True + continue - av_entity = self.db.find_one({ - 'type': 'asset', - 'name': entity['name'] + ftrack_id = entity.get("entityId") + if not ftrack_id: + continue + + ftrack_ids.append(ftrack_id) + + if project_in_selection: + msg = "It is not possible to use this action on project entity." + self.show_message(event, msg, True) + + # Filter event even more (skip task entities) + # - task entities are not relevant for avalon + for entity in entities: + ftrack_id = entity["id"] + if ftrack_id not in ftrack_ids: + continue + + if entity.entity_type.lower() == "task": + ftrack_ids.remove(ftrack_id) + + if not ftrack_ids: + # It is bug if this happens! + return { + "success": False, + "message": "Invalid selection for this action (Bug)" + } + + if entities[0].entity_type.lower() == "project": + project = entities[0] + else: + project = entities[0]["project"] + + project_name = project["full_name"] + self.dbcon.Session["AVALON_PROJECT"] = project_name + + selected_av_entities = self.dbcon.find({ + "type": "asset", + "data.ftrackId": {"$in": ftrack_ids} + }) + selected_av_entities = [ent for ent in selected_av_entities] + if not selected_av_entities: + return { + "success": False, + "message": "Didn't found entities in avalon" + } + + # Remove cached action older than 2 minutes + old_action_ids = [] + for id, data in self.action_data_by_id.items(): + created_at = data.get("created_at") + if not created_at: + old_action_ids.append(id) + continue + cur_time = datetime.now() + existing_in_sec = (created_at - cur_time).total_seconds() + if existing_in_sec > 60 * 2: + old_action_ids.append(id) + + for id in old_action_ids: + self.action_data_by_id.pop(id, None) + + # Store data for action id + action_id = str(uuid.uuid1()) + self.action_data_by_id[action_id] = { + "attempt": 1, + "created_at": datetime.now(), + "project_name": project_name, + "subset_ids_by_name": {}, + "subset_ids_by_parent": {} + } + + id_item = { + "type": "hidden", + "name": "action_id", + "value": action_id + } + + items.append(id_item) + asset_ids = [ent["_id"] for ent in selected_av_entities] + subsets_for_selection = self.dbcon.find({ + "type": "subset", + "parent": {"$in": asset_ids} + }) + + asset_ending = "" + if len(selected_av_entities) > 1: + asset_ending = "s" + + asset_title = { + "type": "label", + "value": "# Delete asset{}:".format(asset_ending) + } + asset_note = { + "type": "label", + "value": ( + "

NOTE: Action will delete checked entities" + " in Ftrack and Avalon with all children entities and" + " published content.

" + ) + } + + items.append(asset_title) + items.append(asset_note) + + asset_items = collections.defaultdict(list) + for asset in selected_av_entities: + ent_path_items = [project_name] + ent_path_items.extend(asset.get("data", {}).get("parents") or []) + ent_path_to_parent = "/".join(ent_path_items) + "/" + asset_items[ent_path_to_parent].append(asset) + + for asset_parent_path, assets in sorted(asset_items.items()): + items.append({ + "type": "label", + "value": "## - {}".format(asset_parent_path) }) - - if av_entity is None: - return { - 'success': False, - 'message': 'Didn\'t found assets in avalon' - } - - asset_label = { - 'type': 'label', - 'value': '## Delete whole asset: ##' - } - asset_item = { - 'label': av_entity['name'], - 'name': 'whole_asset', - 'type': 'boolean', - 'value': False - } - splitter = { - 'type': 'label', - 'value': '{}'.format(200*"-") - } - subset_label = { - 'type': 'label', - 'value': '## Subsets: ##' - } - if av_entity is not None: - items.append(asset_label) - items.append(asset_item) - items.append(splitter) - - all_subsets = self.db.find({ - 'type': 'subset', - 'parent': av_entity['_id'] + for asset in assets: + items.append({ + "label": asset["name"], + "name": "{}{}".format( + self.asset_prefix, str(asset["_id"]) + ), + "type": 'boolean', + "value": False }) - subset_items = [] - for subset in all_subsets: - item = { - 'label': subset['name'], - 'name': str(subset['_id']), - 'type': 'boolean', - 'value': False - } - subset_items.append(item) - if len(subset_items) > 0: - items.append(subset_label) - items.extend(subset_items) - else: - return { - 'success': False, - 'message': 'Didn\'t found assets in avalon' - } + subset_ids_by_name = collections.defaultdict(list) + subset_ids_by_parent = collections.defaultdict(list) + for subset in subsets_for_selection: + subset_id = subset["_id"] + name = subset["name"] + parent_id = subset["parent"] + subset_ids_by_name[name].append(subset_id) + subset_ids_by_parent[parent_id].append(subset_id) + if not subset_ids_by_name: return { - 'items': items, - 'title': title + "items": items, + "title": title } - def confirm_delete(self, first_attempt, entities, event): - if first_attempt is True: - if 'values' not in event['data']: - return + subset_ending = "" + if len(subset_ids_by_name.keys()) > 1: + subset_ending = "s" - values = event['data']['values'] + subset_title = { + "type": "label", + "value": "# Subset{} to delete:".format(subset_ending) + } + subset_note = { + "type": "label", + "value": ( + "

WARNING: Subset{} will be removed" + " for all selected entities.

" + ).format(subset_ending) + } - if len(values) <= 0: - return - if 'whole_asset' not in values: - return - else: - values = self.values + items.append(self.splitter) + items.append(subset_title) + items.append(subset_note) - title = 'Confirmation of deleting {}' - if values['whole_asset'] is True: - title = title.format( - 'whole asset {}'.format( - entities[0]['name'] - ) - ) - else: - subsets = [] - for key, value in values.items(): - if value is True: - subsets.append(key) - len_subsets = len(subsets) - if len_subsets == 0: + for name in subset_ids_by_name: + items.append({ + "label": "{}".format(name), + "name": "{}{}".format(self.subset_prefix, name), + "type": "boolean", + "value": False + }) + + self.action_data_by_id[action_id]["subset_ids_by_parent"] = ( + subset_ids_by_parent + ) + self.action_data_by_id[action_id]["subset_ids_by_name"] = ( + subset_ids_by_name + ) + + return { + "items": items, + "title": title + } + + def confirm_delete(self, entities, event): + values = event["data"]["values"] + action_id = values.get("action_id") + spec_data = self.action_data_by_id.get(action_id) + if not spec_data: + # it is a bug if this happens! + return { + "success": False, + "message": "Something bad has happened. Please try again." + } + + # Process Delete confirmation + delete_key = values.get("delete_key") + if delete_key: + delete_key = delete_key.lower().strip() + # Go to launch part if user entered `delete` + if delete_key == "delete": + return + # Skip whole process if user didn't enter any text + elif delete_key == "": + self.action_data_by_id.pop(action_id, None) return { - 'success': True, - 'message': 'Nothing was selected to delete' + "success": True, + "message": "Deleting cancelled (delete entry was empty)" } - elif len_subsets == 1: - title = title.format( - '{} subset'.format(len_subsets) - ) - else: - title = title.format( - '{} subsets'.format(len_subsets) - ) + # Get data to show again + to_delete = spec_data["to_delete"] + + else: + to_delete = collections.defaultdict(list) + for key, value in values.items(): + if not value: + continue + if key.startswith(self.asset_prefix): + _key = key.replace(self.asset_prefix, "") + to_delete["assets"].append(_key) + + elif key.startswith(self.subset_prefix): + _key = key.replace(self.subset_prefix, "") + to_delete["subsets"].append(_key) + + self.action_data_by_id[action_id]["to_delete"] = to_delete + + asset_to_delete = len(to_delete.get("assets") or []) > 0 + subset_to_delete = len(to_delete.get("subsets") or []) > 0 + + if not asset_to_delete and not subset_to_delete: + self.action_data_by_id.pop(action_id, None) + return { + "success": True, + "message": "Nothing was selected to delete" + } + + attempt = spec_data["attempt"] + if attempt > 3: + self.action_data_by_id.pop(action_id, None) + return { + "success": False, + "message": "You didn't enter \"DELETE\" properly 3 times!" + } + + self.action_data_by_id[action_id]["attempt"] += 1 + + title = "Confirmation of deleting" + + if asset_to_delete: + asset_len = len(to_delete["assets"]) + asset_ending = "" + if asset_len > 1: + asset_ending = "s" + title += " {} Asset{}".format(asset_len, asset_ending) + if subset_to_delete: + title += " and" + + if subset_to_delete: + sub_len = len(to_delete["subsets"]) + type_ending = "" + sub_ending = "" + if sub_len == 1: + subset_ids_by_name = spec_data["subset_ids_by_name"] + if len(subset_ids_by_name[to_delete["subsets"][0]]) > 1: + sub_ending = "s" + + elif sub_len > 1: + type_ending = "s" + sub_ending = "s" + + title += " {} type{} of subset{}".format( + sub_len, type_ending, sub_ending + ) - self.values = values items = [] + id_item = {"type": "hidden", "name": "action_id", "value": action_id} delete_label = { 'type': 'label', 'value': '# Please enter "DELETE" to confirm #' } - delete_item = { - 'name': 'delete_key', - 'type': 'text', - 'value': '', - 'empty_text': 'Type Delete here...' + "name": "delete_key", + "type": "text", + "value": "", + "empty_text": "Type Delete here..." } + + items.append(id_item) items.append(delete_label) items.append(delete_item) return { - 'items': items, - 'title': title + "items": items, + "title": title } def launch(self, session, entities, event): - if 'values' not in event['data']: - return - - values = event['data']['values'] - if len(values) <= 0: - return - if 'delete_key' not in values: - return - - if values['delete_key'].lower() != 'delete': - if values['delete_key'].lower() == '': - return { - 'success': False, - 'message': 'Deleting cancelled' - } - if self.attempt < 3: - self.attempt += 1 - return_dict = self.confirm_delete(False, entities, event) - return_dict['title'] = '{} ({} attempt)'.format( - return_dict['title'], self.attempt - ) - return return_dict + self.show_message(event, "Processing...", True) + values = event["data"]["values"] + action_id = values.get("action_id") + spec_data = self.action_data_by_id.get(action_id) + if not spec_data: + # it is a bug if this happens! return { - 'success': False, - 'message': 'You didn\'t enter "DELETE" properly 3 times!' + "success": False, + "message": "Something bad has happened. Please try again." } - entity = entities[0] - project = entity['project'] + report_messages = collections.defaultdict(list) - self.db.Session['AVALON_PROJECT'] = project["full_name"] + project_name = spec_data["project_name"] + to_delete = spec_data["to_delete"] + self.dbcon.Session["AVALON_PROJECT"] = project_name - all_ids = [] - if self.values.get('whole_asset', False) is True: - av_entity = self.db.find_one({ - 'type': 'asset', - 'name': entity['name'] + assets_to_delete = to_delete.get("assets") or [] + subsets_to_delete = to_delete.get("subsets") or [] + + # Convert asset ids to ObjectId obj + assets_to_delete = [ObjectId(id) for id in assets_to_delete if id] + + subset_ids_by_parent = spec_data["subset_ids_by_parent"] + subset_ids_by_name = spec_data["subset_ids_by_name"] + + subset_ids_to_archive = [] + asset_ids_to_archive = [] + ftrack_ids_to_delete = [] + if len(assets_to_delete) > 0: + # Prepare data when deleting whole avalon asset + avalon_assets = self.dbcon.find({"type": "asset"}) + avalon_assets_by_parent = collections.defaultdict(list) + for asset in avalon_assets: + parent_id = asset["data"]["visualParent"] + avalon_assets_by_parent[parent_id].append(asset) + if asset["_id"] in assets_to_delete: + ftrack_id = asset["data"]["ftrackId"] + ftrack_ids_to_delete.append(ftrack_id) + + children_queue = Queue() + for mongo_id in assets_to_delete: + children_queue.put(mongo_id) + + while not children_queue.empty(): + mongo_id = children_queue.get() + if mongo_id in asset_ids_to_archive: + continue + + asset_ids_to_archive.append(mongo_id) + for subset_id in subset_ids_by_parent.get(mongo_id, []): + if subset_id not in subset_ids_to_archive: + subset_ids_to_archive.append(subset_id) + + children = avalon_assets_by_parent.get(mongo_id) + if not children: + continue + + for child in children: + child_id = child["_id"] + if child_id not in asset_ids_to_archive: + children_queue.put(child_id) + + # Prepare names of assets in ftrack and ids of subsets in mongo + asset_names_to_delete = [] + if len(subsets_to_delete) > 0: + for name in subsets_to_delete: + asset_names_to_delete.append(name) + for subset_id in subset_ids_by_name[name]: + if subset_id in subset_ids_to_archive: + continue + subset_ids_to_archive.append(subset_id) + + # Get ftrack ids of entities where will be delete only asset + not_deleted_entities_id = [] + ftrack_id_name_map = {} + if asset_names_to_delete: + for entity in entities: + ftrack_id = entity["id"] + ftrack_id_name_map[ftrack_id] = entity["name"] + if ftrack_id in ftrack_ids_to_delete: + continue + not_deleted_entities_id.append(ftrack_id) + + mongo_proc_txt = "MongoProcessing: " + ftrack_proc_txt = "Ftrack processing: " + if asset_ids_to_archive: + self.log.debug("{}Archivation of assets <{}>".format( + mongo_proc_txt, + ", ".join([str(id) for id in asset_ids_to_archive]) + )) + self.dbcon.update_many( + { + "_id": {"$in": asset_ids_to_archive}, + "type": "asset" + }, + {"$set": {"type": "archived_asset"}} + ) + + if subset_ids_to_archive: + self.log.debug("{}Archivation of subsets <{}>".format( + mongo_proc_txt, + ", ".join([str(id) for id in subset_ids_to_archive]) + )) + self.dbcon.update_many( + { + "_id": {"$in": subset_ids_to_archive}, + "type": "subset" + }, + {"$set": {"type": "archived_subset"}} + ) + + if ftrack_ids_to_delete: + self.log.debug("{}Deleting Ftrack Entities <{}>".format( + ftrack_proc_txt, ", ".join(ftrack_ids_to_delete) + )) + + joined_ids_to_delete = ", ".join( + ["\"{}\"".format(id) for id in ftrack_ids_to_delete] + ) + ftrack_ents_to_delete = self.session.query( + "select id, link from TypedContext where id in ({})".format( + joined_ids_to_delete + ) + ).all() + for entity in ftrack_ents_to_delete: + self.session.delete(entity) + try: + self.session.commit() + except Exception: + ent_path = "/".join( + [ent["name"] for ent in entity["link"]] + ) + msg = "Failed to delete entity" + report_messages[msg].append(ent_path) + self.session.rollback() + self.log.warning( + "{} <{}>".format(msg, ent_path), + exc_info=True + ) + + if not_deleted_entities_id: + joined_not_deleted = ", ".join([ + "\"{}\"".format(ftrack_id) + for ftrack_id in not_deleted_entities_id + ]) + joined_asset_names = ", ".join([ + "\"{}\"".format(name) + for name in asset_names_to_delete + ]) + # Find assets of selected entities with names of checked subsets + assets = self.session.query(( + "select id from Asset where" + " context_id in ({}) and name in ({})" + ).format(joined_not_deleted, joined_asset_names)).all() + + self.log.debug("{}Deleting Ftrack Assets <{}>".format( + ftrack_proc_txt, + ", ".join([asset["id"] for asset in assets]) + )) + for asset in assets: + self.session.delete(asset) + try: + self.session.commit() + except Exception: + self.session.rollback() + msg = "Failed to delete asset" + report_messages[msg].append(asset["id"]) + self.log.warning( + "{} <{}>".format(asset["id"]), + exc_info=True + ) + + return self.report_handle(report_messages, project_name, event) + + def report_handle(self, report_messages, project_name, event): + if not report_messages: + return { + "success": True, + "message": "Deletion was successful!" + } + + title = "Delete report ({}):".format(project_name) + items = [] + items.append({ + "type": "label", + "value": "# Deleting was not completely successful" + }) + items.append({ + "type": "label", + "value": "

Check logs for more information

" + }) + for msg, _items in report_messages.items(): + if not _items or not msg: + continue + + items.append({ + "type": "label", + "value": "# {}".format(msg) }) - if av_entity is not None: - all_ids.append(av_entity['_id']) - all_ids.extend(self.find_child(av_entity)) + if isinstance(_items, str): + _items = [_items] + items.append({ + "type": "label", + "value": '

{}

'.format("
".join(_items)) + }) + items.append(self.splitter) - session.delete(entity) - session.commit() - else: - subset_names = [] - for key, value in self.values.items(): - if key == 'delete_key' or value is False: - continue - - entity_id = ObjectId(key) - av_entity = self.db.find_one({'_id': entity_id}) - subset_names.append(av_entity['name']) - if av_entity is None: - continue - all_ids.append(entity_id) - all_ids.extend(self.find_child(av_entity)) - - for ft_asset in entity['assets']: - if ft_asset['name'] in subset_names: - session.delete(ft_asset) - session.commit() - - if len(all_ids) == 0: - return { - 'success': True, - 'message': 'No entities to delete in avalon' - } - - or_subquery = [] - for id in all_ids: - or_subquery.append({'_id': id}) - delete_query = {'$or': or_subquery} - self.db.delete_many(delete_query) + self.show_interface(items, title, event) return { - 'success': True, - 'message': 'All assets were deleted!' + "success": False, + "message": "Deleting finished. Read report messages." } - def find_child(self, entity): - output = [] - id = entity['_id'] - visuals = [x for x in self.db.find({'data.visualParent': id})] - assert len(visuals) == 0, 'This asset has another asset as child' - childs = self.db.find({'parent': id}) - for child in childs: - output.append(child['_id']) - output.extend(self.find_child(child)) - return output - - def find_assets(self, asset_names): - assets = [] - for name in asset_names: - entity = self.db.find_one({ - 'type': 'asset', - 'name': name - }) - if entity is not None and entity not in assets: - assets.append(entity) - return assets - def register(session, plugins_presets={}): '''Register plugin. Called when used as an plugin.''' - DeleteAsset(session, plugins_presets).register() - - -def main(arguments=None): - '''Set up logging and register action.''' - if arguments is None: - arguments = [] - - parser = argparse.ArgumentParser() - # Allow setting of logging level from arguments. - loggingLevels = {} - for level in ( - logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, - logging.ERROR, logging.CRITICAL - ): - loggingLevels[logging.getLevelName(level).lower()] = level - - parser.add_argument( - '-v', '--verbosity', - help='Set the logging output verbosity.', - choices=loggingLevels.keys(), - default='info' - ) - namespace = parser.parse_args(arguments) - - # Set up basic logging - logging.basicConfig(level=loggingLevels[namespace.verbosity]) - - session = ftrack_api.Session() - - register(session) - - # Wait for events - logging.info( - 'Registered actions and listening for events. Use Ctrl-C to abort.' - ) - session.event_hub.wait() - - -if __name__ == '__main__': - raise SystemExit(main(sys.argv[1:])) + DeleteAssetSubset(session, plugins_presets).register() diff --git a/pype/ftrack/actions/action_delete_asset_byname.py b/pype/ftrack/actions/action_delete_asset_byname.py deleted file mode 100644 index 4a3807f8f0..0000000000 --- a/pype/ftrack/actions/action_delete_asset_byname.py +++ /dev/null @@ -1,178 +0,0 @@ -import os -import sys -import logging -import argparse -from pype.vendor import ftrack_api -from pype.ftrack import BaseAction -from pype.ftrack.lib.io_nonsingleton import DbConnector - - -class AssetsRemover(BaseAction): - '''Edit meta data action.''' - - #: Action identifier. - identifier = 'remove.assets' - #: Action label. - label = "Pype Admin" - variant = '- Delete Assets by Name' - #: Action description. - description = 'Removes assets from Ftrack and Avalon db with all childs' - #: roles that are allowed to register this action - role_list = ['Pypeclub', 'Administrator'] - icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( - os.environ.get('PYPE_STATICS_SERVER', '') - ) - #: Db - db = DbConnector() - - def discover(self, session, entities, event): - ''' Validation ''' - if len(entities) != 1: - return False - - valid = ["show", "task"] - entityType = event["data"]["selection"][0].get("entityType", "") - if entityType.lower() not in valid: - return False - - return True - - def interface(self, session, entities, event): - if not event['data'].get('values', {}): - title = 'Enter Asset names to delete' - - items = [] - for i in range(15): - - item = { - 'label': 'Asset {}'.format(i+1), - 'name': 'asset_{}'.format(i+1), - 'type': 'text', - 'value': '' - } - items.append(item) - - return { - 'items': items, - 'title': title - } - - def launch(self, session, entities, event): - entity = entities[0] - if entity.entity_type.lower() != 'Project': - project = entity['project'] - else: - project = entity - - if 'values' not in event['data']: - return - - values = event['data']['values'] - if len(values) <= 0: - return { - 'success': True, - 'message': 'No Assets to delete!' - } - - asset_names = [] - - for k, v in values.items(): - if v.replace(' ', '') != '': - asset_names.append(v) - - self.db.install() - self.db.Session['AVALON_PROJECT'] = project["full_name"] - - assets = self.find_assets(asset_names) - - all_ids = [] - for asset in assets: - all_ids.append(asset['_id']) - all_ids.extend(self.find_child(asset)) - - if len(all_ids) == 0: - self.db.uninstall() - return { - 'success': True, - 'message': 'None of assets' - } - - or_subquery = [] - for id in all_ids: - or_subquery.append({'_id': id}) - delete_query = {'$or': or_subquery} - self.db.delete_many(delete_query) - - self.db.uninstall() - return { - 'success': True, - 'message': 'All assets were deleted!' - } - - def find_child(self, entity): - output = [] - id = entity['_id'] - visuals = [x for x in self.db.find({'data.visualParent': id})] - assert len(visuals) == 0, 'This asset has another asset as child' - childs = self.db.find({'parent': id}) - for child in childs: - output.append(child['_id']) - output.extend(self.find_child(child)) - return output - - def find_assets(self, asset_names): - assets = [] - for name in asset_names: - entity = self.db.find_one({ - 'type': 'asset', - 'name': name - }) - if entity is not None and entity not in assets: - assets.append(entity) - return assets - - -def register(session, plugins_presets={}): - '''Register plugin. Called when used as an plugin.''' - - AssetsRemover(session, plugins_presets).register() - - -def main(arguments=None): - '''Set up logging and register action.''' - if arguments is None: - arguments = [] - - parser = argparse.ArgumentParser() - # Allow setting of logging level from arguments. - loggingLevels = {} - for level in ( - logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, - logging.ERROR, logging.CRITICAL - ): - loggingLevels[logging.getLevelName(level).lower()] = level - - parser.add_argument( - '-v', '--verbosity', - help='Set the logging output verbosity.', - choices=loggingLevels.keys(), - default='info' - ) - namespace = parser.parse_args(arguments) - - # Set up basic logging - logging.basicConfig(level=loggingLevels[namespace.verbosity]) - - session = ftrack_api.Session() - - register(session) - - # Wait for events - logging.info( - 'Registered actions and listening for events. Use Ctrl-C to abort.' - ) - session.event_hub.wait() - - -if __name__ == '__main__': - raise SystemExit(main(sys.argv[1:])) diff --git a/pype/ftrack/actions/action_djvview.py b/pype/ftrack/actions/action_djvview.py index 9da12dd67c..ad00e33360 100644 --- a/pype/ftrack/actions/action_djvview.py +++ b/pype/ftrack/actions/action_djvview.py @@ -4,7 +4,7 @@ import json import logging import subprocess from operator import itemgetter -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction from pypeapp import Logger, config @@ -36,12 +36,13 @@ class DJVViewAction(BaseAction): 'file_ext', ["img", "mov", "exr"] ) - def register(self): - assert (self.djv_path is not None), ( - 'DJV View is not installed' - ' or paths in presets are not set correctly' - ) - super().register() + def preregister(self): + if self.djv_path is None: + return ( + 'DJV View is not installed' + ' or paths in presets are not set correctly' + ) + return True def discover(self, session, entities, event): """Return available actions based on *event*. """ diff --git a/pype/ftrack/actions/action_job_killer.py b/pype/ftrack/actions/action_job_killer.py index 56a280a663..f01cebd0a8 100644 --- a/pype/ftrack/actions/action_job_killer.py +++ b/pype/ftrack/actions/action_job_killer.py @@ -4,7 +4,7 @@ import argparse import logging import json -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction @@ -108,6 +108,7 @@ class JobKiller(BaseAction): 'Changing Job ({}) status: {} -> failed' ).format(job['id'], origin_status)) except Exception: + session.rollback() self.log.warning(( 'Changing Job ({}) has failed' ).format(job['id'])) diff --git a/pype/ftrack/actions/action_multiple_notes.py b/pype/ftrack/actions/action_multiple_notes.py index bd51cb2984..20637c001d 100644 --- a/pype/ftrack/actions/action_multiple_notes.py +++ b/pype/ftrack/actions/action_multiple_notes.py @@ -2,7 +2,7 @@ import os import sys import argparse import logging -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction diff --git a/pype/ftrack/actions/action_prepare_project.py b/pype/ftrack/actions/action_prepare_project.py index d645748f6f..4cc6cfd8df 100644 --- a/pype/ftrack/actions/action_prepare_project.py +++ b/pype/ftrack/actions/action_prepare_project.py @@ -2,12 +2,9 @@ import os import json from ruamel import yaml -from pype.vendor import ftrack_api from pype.ftrack import BaseAction from pypeapp import config -from pype.ftrack.lib import get_avalon_attr - -from pype.vendor.ftrack_api import session as fa_session +from pype.ftrack.lib.avalon_sync import get_avalon_attr class PrepareProject(BaseAction): @@ -55,6 +52,8 @@ class PrepareProject(BaseAction): attributes_to_set = {} for attr in hier_cust_attrs: key = attr["key"] + if key.startswith("avalon_"): + continue attributes_to_set[key] = { "label": attr["label"], "object": attr, @@ -65,6 +64,8 @@ class PrepareProject(BaseAction): if attr["entity_type"].lower() != "show": continue key = attr["key"] + if key.startswith("avalon_"): + continue attributes_to_set[key] = { "label": attr["label"], "object": attr, diff --git a/pype/ftrack/actions/action_rv.py b/pype/ftrack/actions/action_rv.py index e32997e5a9..0e2cd59981 100644 --- a/pype/ftrack/actions/action_rv.py +++ b/pype/ftrack/actions/action_rv.py @@ -7,7 +7,7 @@ import json from pypeapp import Logger, config from pype.ftrack import BaseAction -from pype.vendor import ftrack_api +import ftrack_api from avalon import io, api log = Logger().get_logger(__name__) @@ -61,12 +61,12 @@ class RVAction(BaseAction): def set_rv_path(self): self.rv_path = self.config_data.get("rv_path") - def register(self): - assert (self.rv_path is not None), ( - 'RV is not installed' - ' or paths in presets are not set correctly' - ) - super().register() + def preregister(self): + if self.rv_path is None: + return ( + 'RV is not installed or paths in presets are not set correctly' + ) + return True def get_components_from_entity(self, session, entity, components): """Get components from various entity types. diff --git a/pype/ftrack/actions/action_seed.py b/pype/ftrack/actions/action_seed.py new file mode 100644 index 0000000000..cf0a4b0445 --- /dev/null +++ b/pype/ftrack/actions/action_seed.py @@ -0,0 +1,347 @@ +import os +from operator import itemgetter +from pype.ftrack import BaseAction + + +class SeedDebugProject(BaseAction): + '''Edit meta data action.''' + + #: Action identifier. + identifier = "seed.debug.project" + #: Action label. + label = "SeedDebugProject" + #: Action description. + description = "Description" + #: priority + priority = 100 + #: roles that are allowed to register this action + role_list = ["Pypeclub"] + icon = "{}/ftrack/action_icons/SeedProject.svg".format( + os.environ.get("PYPE_STATICS_SERVER", "") + ) + + # Asset names which will be created in `Assets` entity + assets = [ + "Addax", "Alpaca", "Ant", "Antelope", "Aye", "Badger", "Bear", "Bee", + "Beetle", "Bluebird", "Bongo", "Bontebok", "Butterflie", "Caiman", + "Capuchin", "Capybara", "Cat", "Caterpillar", "Coyote", "Crocodile", + "Cuckoo", "Deer", "Dragonfly", "Duck", "Eagle", "Egret", "Elephant", + "Falcon", "Fossa", "Fox", "Gazelle", "Gecko", "Gerbil", + "GiantArmadillo", "Gibbon", "Giraffe", "Goose", "Gorilla", + "Grasshoper", "Hare", "Hawk", "Hedgehog", "Heron", "Hog", + "Hummingbird", "Hyena", "Chameleon", "Cheetah", "Iguana", "Jackal", + "Jaguar", "Kingfisher", "Kinglet", "Kite", "Komodo", "Lemur", + "Leopard", "Lion", "Lizard", "Macaw", "Malachite", "Mandrill", + "Mantis", "Marmoset", "Meadowlark", "Meerkat", "Mockingbird", + "Mongoose", "Monkey", "Nyal", "Ocelot", "Okapi", "Oribi", "Oriole", + "Otter", "Owl", "Panda", "Parrot", "Pelican", "Pig", "Porcupine", + "Reedbuck", "Rhinocero", "Sandpiper", "Servil", "Skink", "Sloth", + "Snake", "Spider", "Squirrel", "Sunbird", "Swallow", "Swift", "Tiger", + "Sylph", "Tanager", "Vulture", "Warthog", "Waterbuck", "Woodpecker", + "Zebra" + ] + + # Tasks which will be created for Assets + asset_tasks = [ + "Modeling", "Lookdev", "Rigging" + ] + # Tasks which will be created for Shots + shot_tasks = [ + "Animation", "Lighting", "Compositing", "FX" + ] + + # Define how much sequences will be created + default_seq_count = 5 + # Define how much shots will be created for each sequence + default_shots_count = 10 + + existing_projects = None + new_project_item = "< New Project >" + current_project_item = "< Current Project >" + + def discover(self, session, entities, event): + ''' Validation ''' + return True + + def interface(self, session, entities, event): + if event["data"].get("values", {}): + return + + title = "Select Project where you want to create seed data" + + items = [] + item_splitter = {"type": "label", "value": "---"} + + description_label = { + "type": "label", + "value": ( + "WARNING: Action does NOT check if entities already exist !!!" + ) + } + items.append(description_label) + + all_projects = session.query("select full_name from Project").all() + self.existing_projects = [proj["full_name"] for proj in all_projects] + projects_items = [ + {"label": proj, "value": proj} for proj in self.existing_projects + ] + + data_items = [] + + data_items.append({ + "label": self.new_project_item, + "value": self.new_project_item + }) + + data_items.append({ + "label": self.current_project_item, + "value": self.current_project_item + }) + + data_items.extend(sorted( + projects_items, + key=itemgetter("label"), + reverse=False + )) + projects_item = { + "label": "Choose Project", + "type": "enumerator", + "name": "project_name", + "data": data_items, + "value": self.current_project_item + } + items.append(projects_item) + items.append(item_splitter) + + items.append({ + "label": "Number of assets", + "type": "number", + "name": "asset_count", + "value": len(self.assets) + }) + items.append({ + "label": "Number of sequences", + "type": "number", + "name": "seq_count", + "value": self.default_seq_count + }) + items.append({ + "label": "Number of shots", + "type": "number", + "name": "shots_count", + "value": self.default_shots_count + }) + items.append(item_splitter) + + note_label = { + "type": "label", + "value": ( + "

NOTE: Enter project name and choose schema if you " + "chose `\"< New Project >\"`(code is optional)

" + ) + } + items.append(note_label) + items.append({ + "label": "Project name", + "name": "new_project_name", + "type": "text", + "value": "" + }) + + project_schemas = [ + sch["name"] for sch in self.session.query("ProjectSchema").all() + ] + schemas_item = { + "label": "Choose Schema", + "type": "enumerator", + "name": "new_schema_name", + "data": [ + {"label": sch, "value": sch} for sch in project_schemas + ], + "value": project_schemas[0] + } + items.append(schemas_item) + + items.append({ + "label": "*Project code", + "name": "new_project_code", + "type": "text", + "value": "", + "empty_text": "Optional..." + }) + + return { + "items": items, + "title": title + } + + def launch(self, session, in_entities, event): + if "values" not in event["data"]: + return + + # THIS IS THE PROJECT PART + values = event["data"]["values"] + selected_project = values["project_name"] + if selected_project == self.new_project_item: + project_name = values["new_project_name"] + if project_name in self.existing_projects: + msg = "Project \"{}\" already exist".format(project_name) + self.log.error(msg) + return {"success": False, "message": msg} + + project_code = values["new_project_code"] + project_schema_name = values["new_schema_name"] + if not project_code: + project_code = project_name + project_code = project_code.lower().replace(" ", "_").strip() + _project = session.query( + "Project where name is \"{}\"".format(project_code) + ).first() + if _project: + msg = "Project with code \"{}\" already exist".format( + project_code + ) + self.log.error(msg) + return {"success": False, "message": msg} + + project_schema = session.query( + "ProjectSchema where name is \"{}\"".format( + project_schema_name + ) + ).one() + # Create the project with the chosen schema. + self.log.debug(( + "*** Creating Project: name <{}>, code <{}>, schema <{}>" + ).format(project_name, project_code, project_schema_name)) + project = session.create("Project", { + "name": project_code, + "full_name": project_name, + "project_schema": project_schema + }) + session.commit() + + elif selected_project == self.current_project_item: + entity = in_entities[0] + if entity.entity_type.lower() == "project": + project = entity + else: + if "project" in entity: + project = entity["project"] + else: + project = entity["parent"]["project"] + project_schema = project["project_schema"] + self.log.debug(( + "*** Using Project: name <{}>, code <{}>, schema <{}>" + ).format( + project["full_name"], project["name"], project_schema["name"] + )) + else: + project = session.query("Project where full_name is \"{}\"".format( + selected_project + )).one() + project_schema = project["project_schema"] + self.log.debug(( + "*** Using Project: name <{}>, code <{}>, schema <{}>" + ).format( + project["full_name"], project["name"], project_schema["name"] + )) + + # THIS IS THE MAGIC PART + task_types = {} + for _type in project_schema["_task_type_schema"]["types"]: + if _type["name"] not in task_types: + task_types[_type["name"]] = _type + self.task_types = task_types + + asset_count = values.get("asset_count") or len(self.assets) + seq_count = values.get("seq_count") or self.default_seq_count + shots_count = values.get("shots_count") or self.default_shots_count + + self.create_assets(project, asset_count) + self.create_shots(project, seq_count, shots_count) + + return True + + def create_assets(self, project, asset_count): + self.log.debug("*** Creating assets:") + + main_entity = self.session.create("Folder", { + "name": "Assets", + "parent": project + }) + self.log.debug("- Assets") + available_assets = len(self.assets) + repetitive_times = ( + int(asset_count / available_assets) + + (asset_count % available_assets > 0) + ) + created_assets = 0 + for _asset_name in self.assets: + if created_assets >= asset_count: + break + for asset_num in range(1, repetitive_times + 1): + if created_assets >= asset_count: + break + asset_name = "%s_%02d" % (_asset_name, asset_num) + asset = self.session.create("AssetBuild", { + "name": asset_name, + "parent": main_entity + }) + created_assets += 1 + self.log.debug("- Assets/{}".format(asset_name)) + + for task_name in self.asset_tasks: + self.session.create("Task", { + "name": task_name, + "parent": asset, + "type": self.task_types[task_name] + }) + self.log.debug("- Assets/{}/{}".format( + asset_name, task_name + )) + + self.log.debug("*** Commiting Assets") + self.session.commit() + + def create_shots(self, project, seq_count, shots_count): + self.log.debug("*** Creating shots:") + main_entity = self.session.create("Folder", { + "name": "Shots", + "parent": project + }) + self.log.debug("- Shots") + + for seq_num in range(1, seq_count+1): + seq_name = "sq%03d" % seq_num + seq = self.session.create("Sequence", { + "name": seq_name, + "parent": main_entity + }) + self.log.debug("- Shots/{}".format(seq_name)) + + for shot_num in range(1, shots_count+1): + shot_name = "%ssh%04d" % (seq_name, (shot_num*10)) + shot = self.session.create("Shot", { + "name": shot_name, + "parent": seq + }) + self.log.debug("- Shots/{}/{}".format(seq_name, shot_name)) + + for task_name in self.shot_tasks: + self.session.create("Task", { + "name": task_name, + "parent": shot, + "type": self.task_types[task_name] + }) + self.log.debug("- Shots/{}/{}/{}".format( + seq_name, shot_name, task_name + )) + + self.log.debug("*** Commiting Shots") + self.session.commit() + + +def register(session, plugins_presets={}): + '''Register plugin. Called when used as an plugin.''' + + SeedDebugProject(session, plugins_presets).register() diff --git a/pype/ftrack/actions/action_start_timer.py b/pype/ftrack/actions/action_start_timer.py index 292789e9f3..bd77276b6d 100644 --- a/pype/ftrack/actions/action_start_timer.py +++ b/pype/ftrack/actions/action_start_timer.py @@ -1,4 +1,4 @@ -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction diff --git a/pype/ftrack/actions/action_sync_hier_attrs_local.py b/pype/ftrack/actions/action_sync_hier_attrs_local.py deleted file mode 100644 index 289abd0122..0000000000 --- a/pype/ftrack/actions/action_sync_hier_attrs_local.py +++ /dev/null @@ -1,351 +0,0 @@ -import os -import sys -import json -import argparse -import logging -import collections - -from pype.vendor import ftrack_api -from pype.ftrack import BaseAction, lib -from pype.ftrack.lib.io_nonsingleton import DbConnector -from bson.objectid import ObjectId - - -class SyncHierarchicalAttrs(BaseAction): - - db_con = DbConnector() - ca_mongoid = lib.get_ca_mongoid() - - #: Action identifier. - identifier = 'sync.hierarchical.attrs.local' - #: Action label. - label = "Pype Admin" - variant = '- Sync Hier Attrs (Local)' - #: Action description. - description = 'Synchronize hierarchical attributes' - #: Icon - icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( - os.environ.get('PYPE_STATICS_SERVER', '') - ) - - #: roles that are allowed to register this action - role_list = ['Pypeclub', 'Administrator', 'Project Manager'] - - def discover(self, session, entities, event): - ''' Validation ''' - for entity in entities: - if ( - entity.get('context_type', '').lower() in ('show', 'task') and - entity.entity_type.lower() != 'task' - ): - return True - return False - - def launch(self, session, entities, event): - self.interface_messages = {} - user = session.query( - 'User where id is "{}"'.format(event['source']['user']['id']) - ).one() - - job = session.create('Job', { - 'user': user, - 'status': 'running', - 'data': json.dumps({ - 'description': 'Sync Hierachical attributes' - }) - }) - session.commit() - self.log.debug('Job with id "{}" created'.format(job['id'])) - - process_session = ftrack_api.Session( - server_url=session.server_url, - api_key=session.api_key, - api_user=session.api_user, - auto_connect_event_hub=True - ) - - try: - # Collect hierarchical attrs - self.log.debug('Collecting Hierarchical custom attributes started') - custom_attributes = {} - all_avalon_attr = process_session.query( - 'CustomAttributeGroup where name is "avalon"' - ).one() - - error_key = ( - 'Hierarchical attributes with set "default" value (not allowed)' - ) - - for cust_attr in all_avalon_attr['custom_attribute_configurations']: - if 'avalon_' in cust_attr['key']: - continue - - if not cust_attr['is_hierarchical']: - continue - - if cust_attr['default']: - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - self.interface_messages[error_key].append( - cust_attr['label'] - ) - - self.log.warning(( - 'Custom attribute "{}" has set default value.' - ' This attribute can\'t be synchronized' - ).format(cust_attr['label'])) - continue - - custom_attributes[cust_attr['key']] = cust_attr - - self.log.debug( - 'Collecting Hierarchical custom attributes has finished' - ) - - if not custom_attributes: - msg = 'No hierarchical attributes to sync.' - self.log.debug(msg) - return { - 'success': True, - 'message': msg - } - - entity = entities[0] - if entity.entity_type.lower() == 'project': - project_name = entity['full_name'] - else: - project_name = entity['project']['full_name'] - - self.db_con.install() - self.db_con.Session['AVALON_PROJECT'] = project_name - - _entities = self._get_entities(event, process_session) - - for entity in _entities: - self.log.debug(30*'-') - self.log.debug( - 'Processing entity "{}"'.format(entity.get('name', entity)) - ) - - ent_name = entity.get('name', entity) - if entity.entity_type.lower() == 'project': - ent_name = entity['full_name'] - - for key in custom_attributes: - self.log.debug(30*'*') - self.log.debug( - 'Processing Custom attribute key "{}"'.format(key) - ) - # check if entity has that attribute - if key not in entity['custom_attributes']: - error_key = 'Missing key on entities' - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - - self.interface_messages[error_key].append( - '- key: "{}" - entity: "{}"'.format(key, ent_name) - ) - - self.log.error(( - '- key "{}" not found on "{}"' - ).format(key, ent_name)) - continue - - value = self.get_hierarchical_value(key, entity) - if value is None: - error_key = ( - 'Missing value for key on entity' - ' and its parents (synchronization was skipped)' - ) - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - - self.interface_messages[error_key].append( - '- key: "{}" - entity: "{}"'.format(key, ent_name) - ) - - self.log.warning(( - '- key "{}" not set on "{}" or its parents' - ).format(key, ent_name)) - continue - - self.update_hierarchical_attribute(entity, key, value) - - job['status'] = 'done' - session.commit() - - except Exception: - self.log.error( - 'Action "{}" failed'.format(self.label), - exc_info=True - ) - - finally: - self.db_con.uninstall() - - if job['status'] in ('queued', 'running'): - job['status'] = 'failed' - session.commit() - if self.interface_messages: - title = "Errors during SyncHierarchicalAttrs" - self.show_interface_from_dict( - messages=self.interface_messages, title=title, event=event - ) - - return True - - def get_hierarchical_value(self, key, entity): - value = entity['custom_attributes'][key] - if ( - value is not None or - entity.entity_type.lower() == 'project' - ): - return value - - return self.get_hierarchical_value(key, entity['parent']) - - def update_hierarchical_attribute(self, entity, key, value): - if ( - entity['context_type'].lower() not in ('show', 'task') or - entity.entity_type.lower() == 'task' - ): - return - - ent_name = entity.get('name', entity) - if entity.entity_type.lower() == 'project': - ent_name = entity['full_name'] - - hierarchy = '/'.join( - [a['name'] for a in entity.get('ancestors', [])] - ) - if hierarchy: - hierarchy = '/'.join( - [entity['project']['full_name'], hierarchy, entity['name']] - ) - elif entity.entity_type.lower() == 'project': - hierarchy = entity['full_name'] - else: - hierarchy = '/'.join( - [entity['project']['full_name'], entity['name']] - ) - - self.log.debug('- updating entity "{}"'.format(hierarchy)) - - # collect entity's custom attributes - custom_attributes = entity.get('custom_attributes') - if not custom_attributes: - return - - mongoid = custom_attributes.get(self.ca_mongoid) - if not mongoid: - error_key = 'Missing MongoID on entities (try SyncToAvalon first)' - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - - if ent_name not in self.interface_messages[error_key]: - self.interface_messages[error_key].append(ent_name) - - self.log.warning( - '-- entity "{}" is not synchronized to avalon. Skipping'.format( - ent_name - ) - ) - return - - try: - mongoid = ObjectId(mongoid) - except Exception: - error_key = 'Invalid MongoID on entities (try SyncToAvalon)' - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - - if ent_name not in self.interface_messages[error_key]: - self.interface_messages[error_key].append(ent_name) - - self.log.warning( - '-- entity "{}" has stored invalid MongoID. Skipping'.format( - ent_name - ) - ) - return - # Find entity in Mongo DB - mongo_entity = self.db_con.find_one({'_id': mongoid}) - if not mongo_entity: - error_key = 'Entities not found in Avalon DB (try SyncToAvalon)' - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - - if ent_name not in self.interface_messages[error_key]: - self.interface_messages[error_key].append(ent_name) - - self.log.warning( - '-- entity "{}" was not found in DB by id "{}". Skipping'.format( - ent_name, str(mongoid) - ) - ) - return - - # Change value if entity has set it's own - entity_value = custom_attributes[key] - if entity_value is not None: - value = entity_value - - data = mongo_entity.get('data') or {} - - data[key] = value - self.db_con.update_many( - {'_id': mongoid}, - {'$set': {'data': data}} - ) - - self.log.debug( - '-- stored value "{}"'.format(value) - ) - - for child in entity.get('children', []): - self.update_hierarchical_attribute(child, key, value) - - -def register(session, plugins_presets={}): - '''Register plugin. Called when used as an plugin.''' - - SyncHierarchicalAttrs(session, plugins_presets).register() - - -def main(arguments=None): - '''Set up logging and register action.''' - if arguments is None: - arguments = [] - - parser = argparse.ArgumentParser() - # Allow setting of logging level from arguments. - loggingLevels = {} - for level in ( - logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, - logging.ERROR, logging.CRITICAL - ): - loggingLevels[logging.getLevelName(level).lower()] = level - - parser.add_argument( - '-v', '--verbosity', - help='Set the logging output verbosity.', - choices=loggingLevels.keys(), - default='info' - ) - namespace = parser.parse_args(arguments) - - # Set up basic logging - logging.basicConfig(level=loggingLevels[namespace.verbosity]) - - session = ftrack_api.Session() - register(session) - - # Wait for events - logging.info( - 'Registered actions and listening for events. Use Ctrl-C to abort.' - ) - session.event_hub.wait() - - -if __name__ == '__main__': - raise SystemExit(main(sys.argv[1:])) diff --git a/pype/ftrack/actions/action_sync_to_avalon.py b/pype/ftrack/actions/action_sync_to_avalon.py new file mode 100644 index 0000000000..01d0b866bf --- /dev/null +++ b/pype/ftrack/actions/action_sync_to_avalon.py @@ -0,0 +1,193 @@ +import os +import time +import traceback + +from pype.ftrack import BaseAction +from pype.ftrack.lib.avalon_sync import SyncEntitiesFactory + + +class SyncToAvalonLocal(BaseAction): + """ + Synchronizing data action - from Ftrack to Avalon DB + + Stores all information about entity. + - Name(string) - Most important information = identifier of entity + - Parent(ObjectId) - Avalon Project Id, if entity is not project itself + - Data(dictionary): + - VisualParent(ObjectId) - Avalon Id of parent asset + - Parents(array of string) - All parent names except project + - Tasks(array of string) - Tasks on asset + - FtrackId(string) + - entityType(string) - entity's type on Ftrack + * All Custom attributes in group 'Avalon' + - custom attributes that start with 'avalon_' are skipped + + * These information are stored for entities in whole project. + + Avalon ID of asset is stored to Ftrack + - Custom attribute 'avalon_mongo_id'. + - action IS NOT creating this Custom attribute if doesn't exist + - run 'Create Custom Attributes' action + - or do it manually (Not recommended) + """ + + #: Action identifier. + identifier = "sync.to.avalon.local" + #: Action label. + label = "Pype Admin" + #: Action variant + variant = "- Sync To Avalon (Local)" + #: Action description. + description = "Send data from Ftrack to Avalon" + #: priority + priority = 200 + #: roles that are allowed to register this action + role_list = ["Pypeclub"] + icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( + os.environ.get('PYPE_STATICS_SERVER', '') + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.entities_factory = SyncEntitiesFactory(self.log, self.session) + + def discover(self, session, entities, event): + ''' Validation ''' + for ent in event["data"]["selection"]: + # Ignore entities that are not tasks or projects + if ent["entityType"].lower() in ["show", "task"]: + return True + return False + + def launch(self, session, in_entities, event): + time_start = time.time() + + self.show_message(event, "Synchronization - Preparing data", True) + # Get ftrack project + if in_entities[0].entity_type.lower() == "project": + ft_project_name = in_entities[0]["full_name"] + else: + ft_project_name = in_entities[0]["project"]["full_name"] + + try: + self.entities_factory.launch_setup(ft_project_name) + time_1 = time.time() + + self.entities_factory.set_cutom_attributes() + time_2 = time.time() + + # This must happen before all filtering!!! + self.entities_factory.prepare_avalon_entities(ft_project_name) + time_3 = time.time() + + self.entities_factory.filter_by_ignore_sync() + time_4 = time.time() + + self.entities_factory.duplicity_regex_check() + time_5 = time.time() + + self.entities_factory.prepare_ftrack_ent_data() + time_6 = time.time() + + self.entities_factory.synchronize() + time_7 = time.time() + + self.log.debug( + "*** Synchronization finished ***" + ) + self.log.debug( + "preparation <{}>".format(time_1 - time_start) + ) + self.log.debug( + "set_cutom_attributes <{}>".format(time_2 - time_1) + ) + self.log.debug( + "prepare_avalon_entities <{}>".format(time_3 - time_2) + ) + self.log.debug( + "filter_by_ignore_sync <{}>".format(time_4 - time_3) + ) + self.log.debug( + "duplicity_regex_check <{}>".format(time_5 - time_4) + ) + self.log.debug( + "prepare_ftrack_ent_data <{}>".format(time_6 - time_5) + ) + self.log.debug( + "synchronize <{}>".format(time_7 - time_6) + ) + self.log.debug( + "* Total time: {}".format(time_7 - time_start) + ) + + report = self.entities_factory.report() + if report and report.get("items"): + default_title = "Synchronization report ({}):".format( + ft_project_name + ) + self.show_interface( + items=report["items"], + title=report.get("title", default_title), + event=event + ) + return { + "success": True, + "message": "Synchronization Finished" + } + + except Exception: + self.log.error( + "Synchronization failed due to code error", exc_info=True + ) + msg = "An error occurred during synchronization" + title = "Synchronization report ({}):".format(ft_project_name) + items = [] + items.append({ + "type": "label", + "value": "# {}".format(msg) + }) + items.append({ + "type": "label", + "value": "## Traceback of the error" + }) + items.append({ + "type": "label", + "value": "

{}

".format( + str(traceback.format_exc()).replace( + "\n", "
").replace( + " ", " " + ) + ) + }) + + report = {"items": []} + try: + report = self.entities_factory.report() + except Exception: + pass + + _items = report.get("items", []) + if _items: + items.append(self.entities_factory.report_splitter) + items.extend(_items) + + self.show_interface(items, title, event) + + return {"success": True, "message": msg} + + finally: + try: + self.entities_factory.dbcon.uninstall() + except Exception: + pass + + try: + self.entities_factory.session.close() + except Exception: + pass + + +def register(session, plugins_presets={}): + '''Register plugin. Called when used as an plugin.''' + + SyncToAvalonLocal(session, plugins_presets).register() diff --git a/pype/ftrack/actions/action_sync_to_avalon_local.py b/pype/ftrack/actions/action_sync_to_avalon_local.py deleted file mode 100644 index 61050f9883..0000000000 --- a/pype/ftrack/actions/action_sync_to_avalon_local.py +++ /dev/null @@ -1,266 +0,0 @@ -import os -import sys -import time -import argparse -import logging -import json -import collections - -from pype.vendor import ftrack_api -from pype.ftrack import BaseAction -from pype.ftrack.lib import avalon_sync as ftracklib -from pype.vendor.ftrack_api import session as fa_session - - -class SyncToAvalon(BaseAction): - ''' - Synchronizing data action - from Ftrack to Avalon DB - - Stores all information about entity. - - Name(string) - Most important information = identifier of entity - - Parent(ObjectId) - Avalon Project Id, if entity is not project itself - - Silo(string) - Last parent except project - - Data(dictionary): - - VisualParent(ObjectId) - Avalon Id of parent asset - - Parents(array of string) - All parent names except project - - Tasks(array of string) - Tasks on asset - - FtrackId(string) - - entityType(string) - entity's type on Ftrack - * All Custom attributes in group 'Avalon' which name don't start with 'avalon_' - - * These information are stored also for all parents and children entities. - - Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'. - - action IS NOT creating this Custom attribute if doesn't exist - - run 'Create Custom Attributes' action or do it manually (Not recommended) - - If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID: - - name, parents and silo are checked -> shows error if are not exact the same - - after sync it is not allowed to change names or move entities - - If ID in 'avalon_mongo_id' is empty string or is not found in DB: - - tries to find entity by name - - found: - - raise error if ftrackId/visual parent/parents are not same - - not found: - - Creates asset/project - - ''' - - #: Action identifier. - identifier = 'sync.to.avalon.local' - #: Action label. - label = "Pype Admin" - variant = '- Sync To Avalon (Local)' - #: Action description. - description = 'Send data from Ftrack to Avalon' - #: Action icon. - icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( - os.environ.get('PYPE_STATICS_SERVER', '') - ) - #: roles that are allowed to register this action - role_list = ['Pypeclub'] - #: Action priority - priority = 200 - - project_query = ( - "select full_name, name, custom_attributes" - ", project_schema._task_type_schema.types.name" - " from Project where full_name is \"{}\"" - ) - - entities_query = ( - "select id, name, parent_id, link, custom_attributes" - " from TypedContext where project.full_name is \"{}\"" - ) - - # Entity type names(lowered) that won't be synchronized with their children - ignore_entity_types = ["task", "milestone"] - - def __init__(self, session, plugins_presets): - super(SyncToAvalon, self).__init__(session) - # reload utils on initialize (in case of server restart) - - def discover(self, session, entities, event): - ''' Validation ''' - for entity in entities: - if entity.entity_type.lower() not in ['task', 'assetversion']: - return True - - return False - - def launch(self, session, entities, event): - time_start = time.time() - message = "" - - # JOB SETTINGS - userId = event['source']['user']['id'] - user = session.query('User where id is ' + userId).one() - - job = session.create('Job', { - 'user': user, - 'status': 'running', - 'data': json.dumps({ - 'description': 'Sync Ftrack to Avalon.' - }) - }) - session.commit() - try: - self.log.debug("Preparing entities for synchronization") - - if entities[0].entity_type.lower() == "project": - ft_project_name = entities[0]["full_name"] - else: - ft_project_name = entities[0]["project"]["full_name"] - - project_entities = session.query( - self.entities_query.format(ft_project_name) - ).all() - - ft_project = session.query( - self.project_query.format(ft_project_name) - ).one() - - entities_by_id = {} - entities_by_parent = collections.defaultdict(list) - - entities_by_id[ft_project["id"]] = ft_project - for ent in project_entities: - entities_by_id[ent["id"]] = ent - entities_by_parent[ent["parent_id"]].append(ent) - - importable = [] - for ent_info in event["data"]["selection"]: - ent = entities_by_id[ent_info["entityId"]] - for link_ent_info in ent["link"]: - link_ent = entities_by_id[link_ent_info["id"]] - if ( - ent.entity_type.lower() in self.ignore_entity_types or - link_ent in importable - ): - continue - - importable.append(link_ent) - - def add_children(parent_id): - ents = entities_by_parent[parent_id] - for ent in ents: - if ent.entity_type.lower() in self.ignore_entity_types: - continue - - if ent not in importable: - importable.append(ent) - - add_children(ent["id"]) - - # add children of selection to importable - for ent_info in event["data"]["selection"]: - add_children(ent_info["entityId"]) - - # Check names: REGEX in schema/duplicates - raise error if found - all_names = [] - duplicates = [] - - for entity in importable: - ftracklib.avalon_check_name(entity) - if entity.entity_type.lower() == "project": - continue - - if entity['name'] in all_names: - duplicates.append("'{}'".format(entity['name'])) - else: - all_names.append(entity['name']) - - if len(duplicates) > 0: - # TODO Show information to user and return False - raise ValueError( - "Entity name duplication: {}".format(", ".join(duplicates)) - ) - - # ----- PROJECT ------ - avalon_project = ftracklib.get_avalon_project(ft_project) - custom_attributes = ftracklib.get_avalon_attr(session) - - # Import all entities to Avalon DB - for entity in importable: - result = ftracklib.import_to_avalon( - session=session, - entity=entity, - ft_project=ft_project, - av_project=avalon_project, - custom_attributes=custom_attributes - ) - # TODO better error handling - # maybe split into critical, warnings and messages? - if 'errors' in result and len(result['errors']) > 0: - job['status'] = 'failed' - session.commit() - - ftracklib.show_errors(self, event, result['errors']) - - return { - 'success': False, - 'message': "Sync to avalon FAILED" - } - - if avalon_project is None: - if 'project' in result: - avalon_project = result['project'] - - job['status'] = 'done' - - except ValueError as ve: - # TODO remove this part!!!! - job['status'] = 'failed' - message = str(ve) - self.log.error( - 'Error during syncToAvalon: {}'.format(message), - exc_info=True - ) - - except Exception as e: - job['status'] = 'failed' - exc_type, exc_obj, exc_tb = sys.exc_info() - fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - log_message = "{}/{}/Line: {}".format( - exc_type, fname, exc_tb.tb_lineno - ) - self.log.error( - 'Error during syncToAvalon: {}'.format(log_message), - exc_info=True - ) - # TODO add traceback to message and show to user - message = ( - 'Unexpected Error' - ' - Please check Log for more information' - ) - finally: - if job['status'] in ['queued', 'running']: - job['status'] = 'failed' - session.commit() - - time_end = time.time() - self.log.debug("Synchronization took \"{}\"".format( - str(time_end - time_start) - )) - - if job["status"] != "failed": - self.log.debug("Triggering Sync hierarchical attributes") - self.trigger_action("sync.hierarchical.attrs.local", event) - - if len(message) > 0: - message = "Unable to sync: {}".format(message) - return { - 'success': False, - 'message': message - } - - return { - 'success': True, - 'message': "Synchronization was successfull" - } - - -def register(session, plugins_presets={}): - '''Register plugin. Called when used as an plugin.''' - SyncToAvalon(session, plugins_presets).register() diff --git a/pype/ftrack/actions/action_test.py b/pype/ftrack/actions/action_test.py index 58f7210e3b..502373e626 100644 --- a/pype/ftrack/actions/action_test.py +++ b/pype/ftrack/actions/action_test.py @@ -6,7 +6,7 @@ import collections import json import re -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction from avalon import io, inventory, schema diff --git a/pype/ftrack/actions/action_thumbnail_to_childern.py b/pype/ftrack/actions/action_thumbnail_to_childern.py index 7d189cf652..7d8c02d6c7 100644 --- a/pype/ftrack/actions/action_thumbnail_to_childern.py +++ b/pype/ftrack/actions/action_thumbnail_to_childern.py @@ -4,7 +4,7 @@ import argparse import logging import json -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction @@ -43,7 +43,7 @@ class ThumbToChildren(BaseAction): 'description': 'Push thumbnails to Childrens' }) }) - + session.commit() try: for entity in entities: thumbid = entity['thumbnail_id'] @@ -53,10 +53,11 @@ class ThumbToChildren(BaseAction): # inform the user that the job is done job['status'] = 'done' - except Exception: + except Exception as exc: + session.rollback() # fail the job if something goes wrong job['status'] = 'failed' - raise + raise exc finally: session.commit() diff --git a/pype/ftrack/actions/action_thumbnail_to_parent.py b/pype/ftrack/actions/action_thumbnail_to_parent.py index efafca4a96..afc214a37e 100644 --- a/pype/ftrack/actions/action_thumbnail_to_parent.py +++ b/pype/ftrack/actions/action_thumbnail_to_parent.py @@ -3,7 +3,7 @@ import sys import argparse import logging import json -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction @@ -40,9 +40,9 @@ class ThumbToParent(BaseAction): 'status': 'running', 'data': json.dumps({ 'description': 'Push thumbnails to parents' - }) }) - + }) + session.commit() try: for entity in entities: parent = None @@ -74,10 +74,11 @@ class ThumbToParent(BaseAction): # inform the user that the job is done job['status'] = status or 'done' - except Exception as e: + except Exception as exc: + session.rollback() # fail the job if something goes wrong job['status'] = 'failed' - raise e + raise exc finally: session.commit() diff --git a/pype/ftrack/actions/action_update_from_v2-2-0.py b/pype/ftrack/actions/action_update_from_v2-2-0.py index cae8ae682e..dd0f1e6ea2 100644 --- a/pype/ftrack/actions/action_update_from_v2-2-0.py +++ b/pype/ftrack/actions/action_update_from_v2-2-0.py @@ -1,14 +1,6 @@ import os -import sys -import argparse -import logging -import collections -import json -import re -from pype.vendor import ftrack_api from pype.ftrack import BaseAction -from avalon import io, inventory, schema from pype.ftrack.lib.io_nonsingleton import DbConnector @@ -134,7 +126,6 @@ class PypeUpdateFromV2_2_0(BaseAction): "title": title } - def launch(self, session, entities, event): if 'values' not in event['data']: return @@ -182,7 +173,7 @@ class PypeUpdateFromV2_2_0(BaseAction): {"type": "asset"}, {"$unset": {"silo": ""}} ) - + self.log.debug("- setting schema of assets to v.3") self.db_con.update_many( {"type": "asset"}, @@ -191,10 +182,8 @@ class PypeUpdateFromV2_2_0(BaseAction): return True + def register(session, plugins_presets={}): """Register plugin. Called when used as an plugin.""" - if not isinstance(session, ftrack_api.session.Session): - return - PypeUpdateFromV2_2_0(session, plugins_presets).register() diff --git a/pype/ftrack/actions/action_where_run_ask.py b/pype/ftrack/actions/action_where_run_ask.py index 795c2664cc..a28f32f407 100644 --- a/pype/ftrack/actions/action_where_run_ask.py +++ b/pype/ftrack/actions/action_where_run_ask.py @@ -1,7 +1,5 @@ import os -from pype.vendor import ftrack_api from pype.ftrack import BaseAction -from pype.vendor.ftrack_api import session as fa_session class ActionAskWhereIRun(BaseAction): diff --git a/pype/ftrack/actions/action_where_run_show.py b/pype/ftrack/actions/action_where_run_show.py index 48618f0251..3f37ef8378 100644 --- a/pype/ftrack/actions/action_where_run_show.py +++ b/pype/ftrack/actions/action_where_run_show.py @@ -1,7 +1,7 @@ import platform import socket import getpass -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseAction diff --git a/pype/ftrack/events/action_sync_hier_attrs.py b/pype/ftrack/events/action_sync_hier_attrs.py deleted file mode 100644 index 23ac319261..0000000000 --- a/pype/ftrack/events/action_sync_hier_attrs.py +++ /dev/null @@ -1,383 +0,0 @@ -import os -import sys -import json -import argparse -import logging -import collections - -from pypeapp import config -from pype.vendor import ftrack_api -from pype.ftrack import BaseAction, lib -from pype.ftrack.lib.io_nonsingleton import DbConnector -from bson.objectid import ObjectId - - -class SyncHierarchicalAttrs(BaseAction): - - db_con = DbConnector() - ca_mongoid = lib.get_ca_mongoid() - - #: Action identifier. - identifier = 'sync.hierarchical.attrs' - #: Action label. - label = "Pype Admin" - variant = '- Sync Hier Attrs (Server)' - #: Action description. - description = 'Synchronize hierarchical attributes' - #: Icon - icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( - os.environ.get( - 'PYPE_STATICS_SERVER', - 'http://localhost:{}'.format( - config.get_presets().get('services', {}).get( - 'statics_server', {} - ).get('default_port', 8021) - ) - ) - ) - - def register(self): - self.session.event_hub.subscribe( - 'topic=ftrack.action.discover', - self._discover - ) - - self.session.event_hub.subscribe( - 'topic=ftrack.action.launch and data.actionIdentifier={}'.format( - self.identifier - ), - self._launch - ) - - def discover(self, session, entities, event): - ''' Validation ''' - role_check = False - discover = False - role_list = ['Pypeclub', 'Administrator', 'Project Manager'] - user = session.query( - 'User where id is "{}"'.format(event['source']['user']['id']) - ).one() - - for role in user['user_security_roles']: - if role['security_role']['name'] in role_list: - role_check = True - break - - if role_check is True: - for entity in entities: - context_type = entity.get('context_type', '').lower() - if ( - context_type in ('show', 'task') and - entity.entity_type.lower() != 'task' - ): - discover = True - break - - return discover - - def launch(self, session, entities, event): - self.interface_messages = {} - - user = session.query( - 'User where id is "{}"'.format(event['source']['user']['id']) - ).one() - - job = session.create('Job', { - 'user': user, - 'status': 'running', - 'data': json.dumps({ - 'description': 'Sync Hierachical attributes' - }) - }) - session.commit() - self.log.debug('Job with id "{}" created'.format(job['id'])) - - process_session = ftrack_api.Session( - server_url=session.server_url, - api_key=session.api_key, - api_user=session.api_user, - auto_connect_event_hub=True - ) - try: - # Collect hierarchical attrs - self.log.debug('Collecting Hierarchical custom attributes started') - custom_attributes = {} - all_avalon_attr = process_session.query( - 'CustomAttributeGroup where name is "avalon"' - ).one() - - error_key = ( - 'Hierarchical attributes with set "default" value (not allowed)' - ) - - for cust_attr in all_avalon_attr['custom_attribute_configurations']: - if 'avalon_' in cust_attr['key']: - continue - - if not cust_attr['is_hierarchical']: - continue - - if cust_attr['default']: - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - self.interface_messages[error_key].append( - cust_attr['label'] - ) - - self.log.warning(( - 'Custom attribute "{}" has set default value.' - ' This attribute can\'t be synchronized' - ).format(cust_attr['label'])) - continue - - custom_attributes[cust_attr['key']] = cust_attr - - self.log.debug( - 'Collecting Hierarchical custom attributes has finished' - ) - - if not custom_attributes: - msg = 'No hierarchical attributes to sync.' - self.log.debug(msg) - return { - 'success': True, - 'message': msg - } - - entity = entities[0] - if entity.entity_type.lower() == 'project': - project_name = entity['full_name'] - else: - project_name = entity['project']['full_name'] - - self.db_con.install() - self.db_con.Session['AVALON_PROJECT'] = project_name - - _entities = self._get_entities(event, process_session) - - for entity in _entities: - self.log.debug(30*'-') - self.log.debug( - 'Processing entity "{}"'.format(entity.get('name', entity)) - ) - - ent_name = entity.get('name', entity) - if entity.entity_type.lower() == 'project': - ent_name = entity['full_name'] - - for key in custom_attributes: - self.log.debug(30*'*') - self.log.debug( - 'Processing Custom attribute key "{}"'.format(key) - ) - # check if entity has that attribute - if key not in entity['custom_attributes']: - error_key = 'Missing key on entities' - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - - self.interface_messages[error_key].append( - '- key: "{}" - entity: "{}"'.format(key, ent_name) - ) - - self.log.error(( - '- key "{}" not found on "{}"' - ).format(key, entity.get('name', entity))) - continue - - value = self.get_hierarchical_value(key, entity) - if value is None: - error_key = ( - 'Missing value for key on entity' - ' and its parents (synchronization was skipped)' - ) - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - - self.interface_messages[error_key].append( - '- key: "{}" - entity: "{}"'.format(key, ent_name) - ) - - self.log.warning(( - '- key "{}" not set on "{}" or its parents' - ).format(key, ent_name)) - continue - - self.update_hierarchical_attribute(entity, key, value) - - job['status'] = 'done' - session.commit() - - except Exception: - self.log.error( - 'Action "{}" failed'.format(self.label), - exc_info=True - ) - - finally: - self.db_con.uninstall() - - if job['status'] in ('queued', 'running'): - job['status'] = 'failed' - session.commit() - - if self.interface_messages: - self.show_interface_from_dict( - messages=self.interface_messages, - title="something went wrong", - event=event - ) - - return True - - def get_hierarchical_value(self, key, entity): - value = entity['custom_attributes'][key] - if ( - value is not None or - entity.entity_type.lower() == 'project' - ): - return value - - return self.get_hierarchical_value(key, entity['parent']) - - def update_hierarchical_attribute(self, entity, key, value): - if ( - entity['context_type'].lower() not in ('show', 'task') or - entity.entity_type.lower() == 'task' - ): - return - - ent_name = entity.get('name', entity) - if entity.entity_type.lower() == 'project': - ent_name = entity['full_name'] - - hierarchy = '/'.join( - [a['name'] for a in entity.get('ancestors', [])] - ) - if hierarchy: - hierarchy = '/'.join( - [entity['project']['full_name'], hierarchy, entity['name']] - ) - elif entity.entity_type.lower() == 'project': - hierarchy = entity['full_name'] - else: - hierarchy = '/'.join( - [entity['project']['full_name'], entity['name']] - ) - - self.log.debug('- updating entity "{}"'.format(hierarchy)) - - # collect entity's custom attributes - custom_attributes = entity.get('custom_attributes') - if not custom_attributes: - return - - mongoid = custom_attributes.get(self.ca_mongoid) - if not mongoid: - error_key = 'Missing MongoID on entities (try SyncToAvalon first)' - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - - if ent_name not in self.interface_messages[error_key]: - self.interface_messages[error_key].append(ent_name) - - self.log.warning( - '-- entity "{}" is not synchronized to avalon. Skipping'.format( - ent_name - ) - ) - return - - try: - mongoid = ObjectId(mongoid) - except Exception: - error_key = 'Invalid MongoID on entities (try SyncToAvalon)' - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - - if ent_name not in self.interface_messages[error_key]: - self.interface_messages[error_key].append(ent_name) - - self.log.warning( - '-- entity "{}" has stored invalid MongoID. Skipping'.format( - ent_name - ) - ) - return - # Find entity in Mongo DB - mongo_entity = self.db_con.find_one({'_id': mongoid}) - if not mongo_entity: - error_key = 'Entities not found in Avalon DB (try SyncToAvalon)' - if error_key not in self.interface_messages: - self.interface_messages[error_key] = [] - - if ent_name not in self.interface_messages[error_key]: - self.interface_messages[error_key].append(ent_name) - - self.log.warning( - '-- entity "{}" was not found in DB by id "{}". Skipping'.format( - ent_name, str(mongoid) - ) - ) - return - - # Change value if entity has set it's own - entity_value = custom_attributes[key] - if entity_value is not None: - value = entity_value - - data = mongo_entity.get('data') or {} - - data[key] = value - self.db_con.update_many( - {'_id': mongoid}, - {'$set': {'data': data}} - ) - - for child in entity.get('children', []): - self.update_hierarchical_attribute(child, key, value) - - -def register(session, plugins_presets): - '''Register plugin. Called when used as an plugin.''' - - SyncHierarchicalAttrs(session, plugins_presets).register() - - -def main(arguments=None): - '''Set up logging and register action.''' - if arguments is None: - arguments = [] - - parser = argparse.ArgumentParser() - # Allow setting of logging level from arguments. - loggingLevels = {} - for level in ( - logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, - logging.ERROR, logging.CRITICAL - ): - loggingLevels[logging.getLevelName(level).lower()] = level - - parser.add_argument( - '-v', '--verbosity', - help='Set the logging output verbosity.', - choices=loggingLevels.keys(), - default='info' - ) - namespace = parser.parse_args(arguments) - - # Set up basic logging - logging.basicConfig(level=loggingLevels[namespace.verbosity]) - - session = ftrack_api.Session() - register(session) - - # Wait for events - logging.info( - 'Registered actions and listening for events. Use Ctrl-C to abort.' - ) - session.event_hub.wait() - - -if __name__ == '__main__': - raise SystemExit(main(sys.argv[1:])) diff --git a/pype/ftrack/events/action_sync_to_avalon.py b/pype/ftrack/events/action_sync_to_avalon.py index 7b5f94f216..9f9deeab95 100644 --- a/pype/ftrack/events/action_sync_to_avalon.py +++ b/pype/ftrack/events/action_sync_to_avalon.py @@ -1,338 +1,227 @@ import os -import sys -import argparse -import logging -import json -import collections import time +import traceback +from pype.ftrack import BaseAction +from pype.ftrack.lib.avalon_sync import SyncEntitiesFactory from pypeapp import config -from pype.vendor import ftrack_api -from pype.ftrack import BaseAction, lib -from pype.vendor.ftrack_api import session as fa_session -class SyncToAvalon(BaseAction): - ''' +class SyncToAvalonServer(BaseAction): + """ Synchronizing data action - from Ftrack to Avalon DB Stores all information about entity. - Name(string) - Most important information = identifier of entity - Parent(ObjectId) - Avalon Project Id, if entity is not project itself - - Silo(string) - Last parent except project - Data(dictionary): - VisualParent(ObjectId) - Avalon Id of parent asset - Parents(array of string) - All parent names except project - Tasks(array of string) - Tasks on asset - FtrackId(string) - entityType(string) - entity's type on Ftrack - * All Custom attributes in group 'Avalon' which name don't start with 'avalon_' + * All Custom attributes in group 'Avalon' + - custom attributes that start with 'avalon_' are skipped - * These information are stored also for all parents and children entities. + * These information are stored for entities in whole project. - Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'. + Avalon ID of asset is stored to Ftrack + - Custom attribute 'avalon_mongo_id'. - action IS NOT creating this Custom attribute if doesn't exist - - run 'Create Custom Attributes' action or do it manually (Not recommended) - - If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID: - - name, parents and silo are checked -> shows error if are not exact the same - - after sync it is not allowed to change names or move entities - - If ID in 'avalon_mongo_id' is empty string or is not found in DB: - - tries to find entity by name - - found: - - raise error if ftrackId/visual parent/parents are not same - - not found: - - Creates asset/project - - ''' - + - run 'Create Custom Attributes' action + - or do it manually (Not recommended) + """ #: Action identifier. - identifier = 'sync.to.avalon' + identifier = "sync.to.avalon.server" #: Action label. label = "Pype Admin" variant = "- Sync To Avalon (Server)" #: Action description. - description = 'Send data from Ftrack to Avalon' + description = "Send data from Ftrack to Avalon" #: Action icon. - icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( + icon = "{}/ftrack/action_icons/PypeAdmin.svg".format( os.environ.get( - 'PYPE_STATICS_SERVER', - 'http://localhost:{}'.format( - config.get_presets().get('services', {}).get( - 'statics_server', {} - ).get('default_port', 8021) + "PYPE_STATICS_SERVER", + "http://localhost:{}".format( + config.get_presets().get("services", {}).get( + "rest_api", {} + ).get("default_port", 8021) ) ) ) - project_query = ( - "select full_name, name, custom_attributes" - ", project_schema._task_type_schema.types.name" - " from Project where full_name is \"{}\"" - ) - - entities_query = ( - "select id, name, parent_id, link, custom_attributes" - " from TypedContext where project.full_name is \"{}\"" - ) - - # Entity type names(lowered) that won't be synchronized with their children - ignore_entity_types = ["task", "milestone"] + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.entities_factory = SyncEntitiesFactory(self.log, self.session) def register(self): self.session.event_hub.subscribe( - 'topic=ftrack.action.discover', - self._discover + "topic=ftrack.action.discover", + self._discover, + priority=self.priority ) - self.session.event_hub.subscribe( - 'topic=ftrack.action.launch and data.actionIdentifier={0}'.format( - self.identifier - ), - self._launch - ) + launch_subscription = ( + "topic=ftrack.action.launch and data.actionIdentifier={0}" + ).format(self.identifier) + self.session.event_hub.subscribe(launch_subscription, self._launch) def discover(self, session, entities, event): - ''' Validation ''' - roleCheck = False - discover = False - roleList = ['Pypeclub', 'Administrator', 'Project Manager'] - userId = event['source']['user']['id'] - user = session.query('User where id is ' + userId).one() - - for role in user['user_security_roles']: - if role['security_role']['name'] in roleList: - roleCheck = True + """ Validation """ + # Check if selection is valid + valid_selection = False + for ent in event["data"]["selection"]: + # Ignore entities that are not tasks or projects + if ent["entityType"].lower() in ["show", "task"]: + valid_selection = True break - if roleCheck is True: - for entity in entities: - if entity.entity_type.lower() not in ['task', 'assetversion']: - discover = True - break - return discover + if not valid_selection: + return False - def launch(self, session, entities, event): + # Get user and check his roles + user_id = event.get("source", {}).get("user", {}).get("id") + if not user_id: + return False + + user = session.query("User where id is \"{}\"".format(user_id)).first() + if not user: + return False + + role_list = ["Pypeclub", "Administrator", "Project Manager"] + for role in user["user_security_roles"]: + if role["security_role"]["name"] in role_list: + return True + return False + + def launch(self, session, in_entities, event): time_start = time.time() - message = "" - # JOB SETTINGS - userId = event['source']['user']['id'] - user = session.query('User where id is ' + userId).one() + self.show_message(event, "Synchronization - Preparing data", True) + # Get ftrack project + if in_entities[0].entity_type.lower() == "project": + ft_project_name = in_entities[0]["full_name"] + else: + ft_project_name = in_entities[0]["project"]["full_name"] - job = session.create('Job', { - 'user': user, - 'status': 'running', - 'data': json.dumps({ - 'description': 'Sync Ftrack to Avalon.' - }) - }) - session.commit() try: - self.log.debug("Preparing entities for synchronization") + self.entities_factory.launch_setup(ft_project_name) + time_1 = time.time() - if entities[0].entity_type.lower() == "project": - ft_project_name = entities[0]["full_name"] - else: - ft_project_name = entities[0]["project"]["full_name"] + self.entities_factory.set_cutom_attributes() + time_2 = time.time() - project_entities = session.query( - self.entities_query.format(ft_project_name) - ).all() + # This must happen before all filtering!!! + self.entities_factory.prepare_avalon_entities(ft_project_name) + time_3 = time.time() - ft_project = session.query( - self.project_query.format(ft_project_name) - ).one() + self.entities_factory.filter_by_ignore_sync() + time_4 = time.time() - entities_by_id = {} - entities_by_parent = collections.defaultdict(list) + self.entities_factory.duplicity_regex_check() + time_5 = time.time() - entities_by_id[ft_project["id"]] = ft_project - for ent in project_entities: - entities_by_id[ent["id"]] = ent - entities_by_parent[ent["parent_id"]].append(ent) + self.entities_factory.prepare_ftrack_ent_data() + time_6 = time.time() - importable = [] - for ent_info in event["data"]["selection"]: - ent = entities_by_id[ent_info["entityId"]] - for link_ent_info in ent["link"]: - link_ent = entities_by_id[link_ent_info["id"]] - if ( - ent.entity_type.lower() in self.ignore_entity_types or - link_ent in importable - ): - continue + self.entities_factory.synchronize() + time_7 = time.time() - importable.append(link_ent) + self.log.debug( + "*** Synchronization finished ***" + ) + self.log.debug( + "preparation <{}>".format(time_1 - time_start) + ) + self.log.debug( + "set_cutom_attributes <{}>".format(time_2 - time_1) + ) + self.log.debug( + "prepare_avalon_entities <{}>".format(time_3 - time_2) + ) + self.log.debug( + "filter_by_ignore_sync <{}>".format(time_4 - time_3) + ) + self.log.debug( + "duplicity_regex_check <{}>".format(time_5 - time_4) + ) + self.log.debug( + "prepare_ftrack_ent_data <{}>".format(time_6 - time_5) + ) + self.log.debug( + "synchronize <{}>".format(time_7 - time_6) + ) + self.log.debug( + "* Total time: {}".format(time_7 - time_start) + ) - def add_children(parent_id): - ents = entities_by_parent[parent_id] - for ent in ents: - if ent.entity_type.lower() in self.ignore_entity_types: - continue - - if ent not in importable: - importable.append(ent) - - add_children(ent["id"]) - - # add children of selection to importable - for ent_info in event["data"]["selection"]: - add_children(ent_info["entityId"]) - - # Check names: REGEX in schema/duplicates - raise error if found - all_names = [] - duplicates = [] - - for entity in importable: - lib.avalon_check_name(entity) - if entity.entity_type.lower() == "project": - continue - - if entity['name'] in all_names: - duplicates.append("'{}'".format(entity['name'])) - else: - all_names.append(entity['name']) - - if len(duplicates) > 0: - # TODO Show information to user and return False - raise ValueError( - "Entity name duplication: {}".format(", ".join(duplicates)) + report = self.entities_factory.report() + if report and report.get("items"): + default_title = "Synchronization report ({}):".format( + ft_project_name ) - - # ----- PROJECT ------ - avalon_project = lib.get_avalon_project(ft_project) - custom_attributes = lib.get_avalon_attr(session) - - # Import all entities to Avalon DB - for entity in importable: - result = lib.import_to_avalon( - session=session, - entity=entity, - ft_project=ft_project, - av_project=avalon_project, - custom_attributes=custom_attributes + self.show_interface( + items=report["items"], + title=report.get("title", default_title), + event=event ) - # TODO better error handling - # maybe split into critical, warnings and messages? - if 'errors' in result and len(result['errors']) > 0: - job['status'] = 'failed' - session.commit() - - lib.show_errors(self, event, result['errors']) - - return { - 'success': False, - 'message': "Sync to avalon FAILED" - } - - if avalon_project is None: - if 'project' in result: - avalon_project = result['project'] - - job['status'] = 'done' - session.commit() - - except ValueError as ve: - # TODO remove this part!!!! - job['status'] = 'failed' - session.commit() - message = str(ve) - self.log.error( - 'Error during syncToAvalon: {}'.format(message), - exc_info=True - ) - - except Exception as e: - job['status'] = 'failed' - session.commit() - exc_type, exc_obj, exc_tb = sys.exc_info() - fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - log_message = "{}/{}/Line: {}".format( - exc_type, fname, exc_tb.tb_lineno - ) - self.log.error( - 'Error during syncToAvalon: {}'.format(log_message), - exc_info=True - ) - # TODO add traceback to message and show to user - message = ( - 'Unexpected Error' - ' - Please check Log for more information' - ) - - finally: - if job['status'] in ['queued', 'running']: - job['status'] = 'failed' - - session.commit() - - time_end = time.time() - self.log.debug("Synchronization took \"{}\"".format( - str(time_end - time_start) - )) - - if job["status"] != "failed": - self.log.debug("Triggering Sync hierarchical attributes") - self.trigger_action("sync.hierarchical.attrs", event) - - if len(message) > 0: - message = "Unable to sync: {}".format(message) return { - 'success': False, - 'message': message + "success": True, + "message": "Synchronization Finished" } - return { - 'success': True, - 'message': "Synchronization was successfull" - } + except Exception: + self.log.error( + "Synchronization failed due to code error", exc_info=True + ) + msg = "An error has happened during synchronization" + title = "Synchronization report ({}):".format(ft_project_name) + items = [] + items.append({ + "type": "label", + "value": "# {}".format(msg) + }) + items.append({ + "type": "label", + "value": "## Traceback of the error" + }) + items.append({ + "type": "label", + "value": "

{}

".format( + str(traceback.format_exc()).replace( + "\n", "
").replace( + " ", " " + ) + ) + }) + + report = {"items": []} + try: + report = self.entities_factory.report() + except Exception: + pass + + _items = report.get("items", []) + if _items: + items.append(self.entities_factory.report_splitter) + items.extend(_items) + + self.show_interface(items, title, event) + + return {"success": True, "message": msg} + + finally: + try: + self.entities_factory.dbcon.uninstall() + except Exception: + pass + + try: + self.entities_factory.session.close() + except Exception: + pass -def register(session, plugins_presets): +def register(session, plugins_presets={}): '''Register plugin. Called when used as an plugin.''' - - # Validate that session is an instance of ftrack_api.Session. If not, - # assume that register is being called from an old or incompatible API and - # return without doing anything. - SyncToAvalon(session, plugins_presets).register() - - -def main(arguments=None): - '''Set up logging and register action.''' - if arguments is None: - arguments = [] - - parser = argparse.ArgumentParser() - # Allow setting of logging level from arguments. - loggingLevels = {} - for level in ( - logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, - logging.ERROR, logging.CRITICAL - ): - loggingLevels[logging.getLevelName(level).lower()] = level - - parser.add_argument( - '-v', '--verbosity', - help='Set the logging output verbosity.', - choices=loggingLevels.keys(), - default='info' - ) - namespace = parser.parse_args(arguments) - - # Set up basic logging - logging.basicConfig(level=loggingLevels[namespace.verbosity]) - - session = ftrack_api.Session() - register(session) - - # Wait for events - logging.info( - 'Registered actions and listening for events. Use Ctrl-C to abort.' - ) - session.event_hub.wait() - - -if __name__ == '__main__': - raise SystemExit(main(sys.argv[1:])) + SyncToAvalonServer(session, plugins_presets).register() diff --git a/pype/ftrack/events/event_del_avalon_id_from_new.py b/pype/ftrack/events/event_del_avalon_id_from_new.py index e5c4b1be45..d820e40467 100644 --- a/pype/ftrack/events/event_del_avalon_id_from_new.py +++ b/pype/ftrack/events/event_del_avalon_id_from_new.py @@ -1,6 +1,6 @@ -from pype.vendor import ftrack_api -from pype.ftrack import BaseEvent, get_ca_mongoid -from pype.ftrack.events.event_sync_to_avalon import Sync_to_Avalon +from pype.ftrack.lib import BaseEvent +from pype.ftrack.lib.avalon_sync import CustAttrIdKey +from pype.ftrack.events.event_sync_to_avalon import SyncToAvalonEvent class DelAvalonIdFromNew(BaseEvent): @@ -11,7 +11,8 @@ class DelAvalonIdFromNew(BaseEvent): Priority of this event must be less than SyncToAvalon event ''' - priority = Sync_to_Avalon.priority - 1 + priority = SyncToAvalonEvent.priority - 1 + ignore_me = True def launch(self, session, event): created = [] @@ -28,7 +29,7 @@ class DelAvalonIdFromNew(BaseEvent): elif ( entity.get('action', None) == 'update' and - get_ca_mongoid() in entity['keys'] and + CustAttrIdKey in entity['keys'] and entity_id in created ): ftrack_entity = session.get( @@ -37,13 +38,11 @@ class DelAvalonIdFromNew(BaseEvent): ) cust_attr = ftrack_entity['custom_attributes'][ - get_ca_mongoid() + CustAttrIdKey ] if cust_attr != '': - ftrack_entity['custom_attributes'][ - get_ca_mongoid() - ] = '' + ftrack_entity['custom_attributes'][CustAttrIdKey] = '' session.commit() except Exception: @@ -53,5 +52,4 @@ class DelAvalonIdFromNew(BaseEvent): def register(session, plugins_presets): '''Register plugin. Called when used as an plugin.''' - DelAvalonIdFromNew(session, plugins_presets).register() diff --git a/pype/ftrack/events/event_next_task_update.py b/pype/ftrack/events/event_next_task_update.py index 51ccb2f057..c709fed5e5 100644 --- a/pype/ftrack/events/event_next_task_update.py +++ b/pype/ftrack/events/event_next_task_update.py @@ -1,4 +1,4 @@ -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseEvent import operator @@ -80,10 +80,10 @@ class NextTaskUpdate(BaseEvent): '>>> [ {} ] updated to [ Ready ]' ).format(path)) except Exception as e: + session.rollback() self.log.warning(( '!!! [ {} ] status couldnt be set: [ {} ]' - ).format(path, e)) - session.rollback() + ).format(path, str(e)), exc_info=True) def register(session, plugins_presets): diff --git a/pype/ftrack/events/event_radio_buttons.py b/pype/ftrack/events/event_radio_buttons.py index 917c7a49e6..5b633d8976 100644 --- a/pype/ftrack/events/event_radio_buttons.py +++ b/pype/ftrack/events/event_radio_buttons.py @@ -1,8 +1,8 @@ -from pype.vendor import ftrack_api -from pype.ftrack import BaseEvent +import ftrack_api +from pype.ftrack.lib import BaseEvent -class Radio_buttons(BaseEvent): +class RadioButtons(BaseEvent): ignore_me = True @@ -37,4 +37,4 @@ class Radio_buttons(BaseEvent): def register(session, plugins_presets): '''Register plugin. Called when used as an plugin.''' - Radio_buttons(session, plugins_presets).register() + RadioButtons(session, plugins_presets).register() diff --git a/pype/ftrack/events/event_sync_hier_attr.py b/pype/ftrack/events/event_sync_hier_attr.py deleted file mode 100644 index 8031ec9e55..0000000000 --- a/pype/ftrack/events/event_sync_hier_attr.py +++ /dev/null @@ -1,213 +0,0 @@ -import os -import sys - -from pype.ftrack.lib.io_nonsingleton import DbConnector - -from pype.vendor import ftrack_api -from pype.ftrack import BaseEvent, lib -from bson.objectid import ObjectId - - -class SyncHierarchicalAttrs(BaseEvent): - # After sync to avalon event! - priority = 101 - db_con = DbConnector() - ca_mongoid = lib.get_ca_mongoid() - - def launch(self, session, event): - # Filter entities and changed values if it makes sence to run script - processable = [] - processable_ent = {} - for ent in event['data']['entities']: - # Ignore entities that are not tasks or projects - if ent['entityType'].lower() not in ['task', 'show']: - continue - - action = ent.get("action") - # skip if remove (Entity does not exist in Ftrack) - if action == "remove": - continue - - # When entity was add we don't care about keys - if action != "add": - keys = ent.get('keys') - if not keys: - continue - - entity = session.get(self._get_entity_type(ent), ent['entityId']) - processable.append(ent) - - processable_ent[ent['entityId']] = { - "entity": entity, - "action": action, - "link": entity["link"] - } - - if not processable: - return True - - # Find project of entities - ft_project = None - for entity_dict in processable_ent.values(): - try: - base_proj = entity_dict['link'][0] - except Exception: - continue - ft_project = session.get(base_proj['type'], base_proj['id']) - break - - # check if project is set to auto-sync - if ( - ft_project is None or - 'avalon_auto_sync' not in ft_project['custom_attributes'] or - ft_project['custom_attributes']['avalon_auto_sync'] is False - ): - return True - - # Get hierarchical custom attributes from "avalon" group - custom_attributes = {} - query = 'CustomAttributeGroup where name is "avalon"' - all_avalon_attr = session.query(query).one() - for cust_attr in all_avalon_attr['custom_attribute_configurations']: - if 'avalon_' in cust_attr['key']: - continue - if not cust_attr['is_hierarchical']: - continue - custom_attributes[cust_attr['key']] = cust_attr - - if not custom_attributes: - return True - - self.db_con.install() - self.db_con.Session['AVALON_PROJECT'] = ft_project['full_name'] - - for ent in processable: - entity_dict = processable_ent[ent['entityId']] - - entity = entity_dict["entity"] - ent_path = "/".join([ent["name"] for ent in entity_dict['link']]) - action = entity_dict["action"] - - keys_to_process = {} - if action == "add": - # Store all custom attributes when entity was added - for key in custom_attributes: - keys_to_process[key] = entity['custom_attributes'][key] - else: - # Update only updated keys - for key in ent['keys']: - if key in custom_attributes: - keys_to_process[key] = entity['custom_attributes'][key] - - processed_keys = self.get_hierarchical_values( - keys_to_process, entity - ) - # Do the processing of values - self.update_hierarchical_attribute(entity, processed_keys, ent_path) - - self.db_con.uninstall() - - return True - - def get_hierarchical_values(self, keys_dict, entity): - # check already set values - _set_keys = [] - for key, value in keys_dict.items(): - if value is not None: - _set_keys.append(key) - - # pop set values from keys_dict - set_keys = {} - for key in _set_keys: - set_keys[key] = keys_dict.pop(key) - - # find if entity has set values and pop them out - keys_to_pop = [] - for key in keys_dict.keys(): - _val = entity["custom_attributes"][key] - if _val: - keys_to_pop.append(key) - set_keys[key] = _val - - for key in keys_to_pop: - keys_dict.pop(key) - - # if there are not keys to find value return found - if not keys_dict: - return set_keys - - # end recursion if entity is project - if entity.entity_type.lower() == "project": - for key, value in keys_dict.items(): - set_keys[key] = value - - else: - result = self.get_hierarchical_values(keys_dict, entity["parent"]) - for key, value in result.items(): - set_keys[key] = value - - return set_keys - - def update_hierarchical_attribute(self, entity, keys_dict, ent_path): - # TODO store all keys at once for entity - custom_attributes = entity.get('custom_attributes') - if not custom_attributes: - return - - mongoid = custom_attributes.get(self.ca_mongoid) - if not mongoid: - return - - try: - mongoid = ObjectId(mongoid) - except Exception: - return - - mongo_entity = self.db_con.find_one({'_id': mongoid}) - if not mongo_entity: - return - - changed_keys = {} - data = mongo_entity.get('data') or {} - for key, value in keys_dict.items(): - cur_value = data.get(key) - if cur_value: - if cur_value == value: - continue - changed_keys[key] = value - data[key] = value - - if not changed_keys: - return - - self.log.debug( - "{} - updated hierarchical attributes: {}".format( - ent_path, str(changed_keys) - ) - ) - - self.db_con.update_many( - {'_id': mongoid}, - {'$set': {'data': data}} - ) - - for child in entity.get('children', []): - _keys_dict = {} - for key, value in keys_dict.items(): - if key not in child.get('custom_attributes', {}): - continue - child_value = child['custom_attributes'][key] - if child_value is not None: - continue - _keys_dict[key] = value - - if not _keys_dict: - continue - child_path = "/".join([ent["name"] for ent in child['link']]) - self.update_hierarchical_attribute(child, _keys_dict, child_path) - - -def register(session, plugins_presets): - '''Register plugin. Called when used as an plugin.''' - - SyncHierarchicalAttrs(session, plugins_presets).register() diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 3fe65bca11..606866aba2 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1,127 +1,2282 @@ -from pype.vendor import ftrack_api -from pype.ftrack import BaseEvent, lib +import os +import collections +import copy +import queue +import time +import atexit +import traceback + +from bson.objectid import ObjectId +from pymongo import UpdateOne + +from avalon import schema + +from pype.ftrack.lib import avalon_sync +from pype.ftrack.lib.avalon_sync import ( + CustAttrIdKey, CustAttrAutoSync, EntitySchemas +) +import ftrack_api +from pype.ftrack import BaseEvent + +from pype.ftrack.lib.io_nonsingleton import DbConnector -class Sync_to_Avalon(BaseEvent): +class SyncToAvalonEvent(BaseEvent): - priority = 100 + dbcon = DbConnector() - ignore_entityType = [ - 'assetversion', 'job', 'user', 'reviewsessionobject', 'timer', - 'socialfeed', 'socialnotification', 'timelog' + ignore_entTypes = [ + "socialfeed", "socialnotification", "note", + "assetversion", "job", "user", "reviewsessionobject", "timer", + "timelog", "auth_userrole" ] + ignore_ent_types = ["Milestone"] + ignore_keys = ["statusid"] + + project_query = ( + "select full_name, name, custom_attributes" + ", project_schema._task_type_schema.types.name" + " from Project where id is \"{}\"" + ) + + entities_query_by_id = ( + "select id, name, parent_id, link, custom_attributes from TypedContext" + " where project_id is \"{}\" and id in ({})" + ) + entities_name_query_by_name = ( + "select id, name from TypedContext" + " where project_id is \"{}\" and name in ({})" + ) + created_entities = [] + + def __init__(self, session, plugins_presets={}): + '''Expects a ftrack_api.Session instance''' + self.set_process_session(session) + super().__init__(session, plugins_presets) + + @property + def cur_project(self): + if self._cur_project is None: + found_id = None + for ent_info in self._cur_event["data"]["entities"]: + if found_id is not None: + break + parents = ent_info.get("parents") or [] + for parent in parents: + if parent.get("entityType") == "show": + found_id = parent.get("entityId") + break + if found_id: + self._cur_project = self.process_session.query( + self.project_query.format(found_id) + ).one() + return self._cur_project + + @property + def avalon_cust_attrs(self): + if self._avalon_cust_attrs is None: + self._avalon_cust_attrs = avalon_sync.get_avalon_attr( + self.process_session + ) + return self._avalon_cust_attrs + + @property + def avalon_entities(self): + if self._avalon_ents is None: + self.dbcon.install() + self.dbcon.Session["AVALON_PROJECT"] = ( + self.cur_project["full_name"] + ) + avalon_project = self.dbcon.find_one({"type": "project"}) + avalon_entities = list(self.dbcon.find({"type": "asset"})) + self._avalon_ents = (avalon_project, avalon_entities) + return self._avalon_ents + + @property + def avalon_ents_by_name(self): + if self._avalon_ents_by_name is None: + self._avalon_ents_by_name = {} + proj, ents = self.avalon_entities + for ent in ents: + self._avalon_ents_by_name[ent["name"]] = ent + return self._avalon_ents_by_name + + @property + def avalon_ents_by_id(self): + if self._avalon_ents_by_id is None: + self._avalon_ents_by_id = {} + proj, ents = self.avalon_entities + self._avalon_ents_by_id[proj["_id"]] = proj + for ent in ents: + self._avalon_ents_by_id[ent["_id"]] = ent + return self._avalon_ents_by_id + + @property + def avalon_ents_by_parent_id(self): + if self._avalon_ents_by_parent_id is None: + self._avalon_ents_by_parent_id = collections.defaultdict(list) + proj, ents = self.avalon_entities + for ent in ents: + vis_par = ent["data"]["visualParent"] + if vis_par is None: + vis_par = proj["_id"] + self._avalon_ents_by_parent_id[vis_par].append(ent) + return self._avalon_ents_by_parent_id + + @property + def avalon_ents_by_ftrack_id(self): + if self._avalon_ents_by_ftrack_id is None: + self._avalon_ents_by_ftrack_id = {} + proj, ents = self.avalon_entities + ftrack_id = proj["data"]["ftrackId"] + self._avalon_ents_by_ftrack_id[ftrack_id] = proj + for ent in ents: + ftrack_id = ent["data"]["ftrackId"] + self._avalon_ents_by_ftrack_id[ftrack_id] = ent + return self._avalon_ents_by_ftrack_id + + @property + def avalon_subsets_by_parents(self): + if self._avalon_subsets_by_parents is None: + self._avalon_subsets_by_parents = collections.defaultdict(list) + self.dbcon.install() + self.dbcon.Session["AVALON_PROJECT"] = ( + self.cur_project["full_name"] + ) + for subset in self.dbcon.find({"type": "subset"}): + self._avalon_subsets_by_parents[subset["parent"]].append( + subset + ) + return self._avalon_subsets_by_parents + + @property + def avalon_archived_by_id(self): + if self._avalon_archived_by_id is None: + self._avalon_archived_by_id = {} + self.dbcon.install() + self.dbcon.Session["AVALON_PROJECT"] = ( + self.cur_project["full_name"] + ) + for asset in self.dbcon.find({"type": "archived_asset"}): + self._avalon_archived_by_id[asset["_id"]] = asset + return self._avalon_archived_by_id + + @property + def avalon_archived_by_name(self): + if self._avalon_archived_by_name is None: + self._avalon_archived_by_name = {} + for asset in self.avalon_archived_by_id.values(): + self._avalon_archived_by_name[asset["name"]] = asset + return self._avalon_archived_by_name + + @property + def changeability_by_mongo_id(self): + """Return info about changeability of entity and it's parents.""" + if self._changeability_by_mongo_id is None: + self._changeability_by_mongo_id = collections.defaultdict( + lambda: True + ) + avalon_project, avalon_entities = self.avalon_entities + self._changeability_by_mongo_id[avalon_project["_id"]] = False + self._bubble_changeability( + list(self.avalon_subsets_by_parents.keys()) + ) + + return self._changeability_by_mongo_id + + @property + def avalon_custom_attributes(self): + """Return info about changeability of entity and it's parents.""" + if self._avalon_custom_attributes is None: + self._avalon_custom_attributes = avalon_sync.get_avalon_attr( + self.process_session + ) + return self._avalon_custom_attributes + + def remove_cached_by_key(self, key, values): + if self._avalon_ents is None: + return + + if not isinstance(values, (list, tuple)): + values = [values] + + def get_found_data(entity): + if not entity: + return None + return { + "ftrack_id": entity["data"]["ftrackId"], + "parent_id": entity["data"]["visualParent"], + "_id": entity["_id"], + "name": entity["name"], + "entity": entity + } + + if key == "id": + key = "_id" + elif key == "ftrack_id": + key = "data.ftrackId" + + found_data = {} + project, entities = self._avalon_ents + key_items = key.split(".") + for value in values: + ent = None + if key == "_id": + if self._avalon_ents_by_id is not None: + ent = self._avalon_ents_by_id.get(value) + + elif key == "name": + if self._avalon_ents_by_name is not None: + ent = self._avalon_ents_by_name.get(value) + + elif key == "data.ftrackId": + if self._avalon_ents_by_ftrack_id is not None: + ent = self._avalon_ents_by_ftrack_id.get(value) + + if ent is None: + for _ent in entities: + _temp = _ent + for item in key_items: + _temp = _temp[item] + + if _temp == value: + ent = _ent + break + + found_data[value] = get_found_data(ent) + + for value in values: + data = found_data[value] + if not data: + # TODO logging + self.log.warning( + "Didn't found entity by key/value \"{}\" / \"{}\"".format( + key, value + ) + ) + continue + + ftrack_id = data["ftrack_id"] + parent_id = data["parent_id"] + mongo_id = data["_id"] + name = data["name"] + entity = data["entity"] + + project, ents = self._avalon_ents + ents.remove(entity) + self._avalon_ents = project, ents + + if self._avalon_ents_by_ftrack_id is not None: + self._avalon_ents_by_ftrack_id.pop(ftrack_id, None) + + if self._avalon_ents_by_parent_id is not None: + self._avalon_ents_by_parent_id[parent_id].remove(entity) + + if self._avalon_ents_by_id is not None: + self._avalon_ents_by_id.pop(mongo_id, None) + + if self._avalon_ents_by_name is not None: + self._avalon_ents_by_name.pop(name, None) + + if self._avalon_archived_by_id is not None: + self._avalon_archived_by_id[mongo_id] = entity + + if mongo_id in self.task_changes_by_avalon_id: + self.task_changes_by_avalon_id.pop(mongo_id) + + def _bubble_changeability(self, unchangeable_ids): + unchangeable_queue = queue.Queue() + for entity_id in unchangeable_ids: + unchangeable_queue.put((entity_id, False)) + + processed_parents_ids = [] + while not unchangeable_queue.empty(): + entity_id, child_is_archived = unchangeable_queue.get() + # skip if already processed + if entity_id in processed_parents_ids: + continue + + entity = self.avalon_ents_by_id.get(entity_id) + # if entity is not archived but unchageable child was then skip + # - archived entities should not affect not archived? + if entity and child_is_archived: + continue + + # set changeability of current entity to False + self._changeability_by_mongo_id[entity_id] = False + processed_parents_ids.append(entity_id) + # if not entity then is probably archived + if not entity: + entity = self.avalon_archived_by_id.get(entity_id) + child_is_archived = True + + if not entity: + # if entity is not found then it is subset without parent + if entity_id in unchangeable_ids: + _subset_ids = [ + str(sub["_id"]) for sub in + self.avalon_subsets_by_parents[entity_id] + ] + joined_subset_ids = "| ".join(_subset_ids) + self.log.warning(( + "Parent <{}> for subsets <{}> does not exist" + ).format(str(entity_id), joined_subset_ids)) + else: + self.log.warning(( + "In avalon are entities without valid parents that" + " lead to Project (should not cause errors)" + " - MongoId <{}>" + ).format(str(entity_id))) + continue + + # skip if parent is project + parent_id = entity["data"]["visualParent"] + if parent_id is None: + continue + unchangeable_queue.put((parent_id, child_is_archived)) + + def reset_variables(self): + """Reset variables so each event callback has clear env.""" + self._cur_project = None + + self._avalon_cust_attrs = None + + self._avalon_ents = None + self._avalon_ents_by_id = None + self._avalon_ents_by_parent_id = None + self._avalon_ents_by_ftrack_id = None + self._avalon_ents_by_name = None + self._avalon_subsets_by_parents = None + self._changeability_by_mongo_id = None + self._avalon_archived_by_id = None + self._avalon_archived_by_name = None + + self.task_changes_by_avalon_id = {} + + self._avalon_custom_attributes = None + self._ent_types_by_name = None + + self.ftrack_ents_by_id = {} + self.obj_id_ent_type_map = {} + self.ftrack_recreated_mapping = {} + + self.ftrack_added = {} + self.ftrack_moved = {} + self.ftrack_renamed = {} + self.ftrack_updated = {} + self.ftrack_removed = {} + + self.moved_in_avalon = [] + self.renamed_in_avalon = [] + self.hier_cust_attrs_changes = collections.defaultdict(list) + + self.duplicated = [] + self.regex_failed = [] + + self.regex_schemas = {} + self.updates = collections.defaultdict(dict) + + self.report_items = { + "info": collections.defaultdict(list), + "warning": collections.defaultdict(list), + "error": collections.defaultdict(list) + } + + def set_process_session(self, session): + try: + self.process_session.close() + except Exception: + pass + self.process_session = ftrack_api.Session( + server_url=session.server_url, + api_key=session.api_key, + api_user=session.api_user, + auto_connect_event_hub=True + ) + atexit.register(lambda: self.process_session.close()) + + def filter_updated(self, updates): + filtered_updates = {} + for ftrack_id, ent_info in updates.items(): + changed_keys = [k for k in (ent_info.get("keys") or [])] + changes = { + k: v for k, v in (ent_info.get("changes") or {}).items() + } + + entity_type = ent_info["entity_type"] + if entity_type == "Task": + if "name" in changed_keys: + ent_info["keys"] = ["name"] + ent_info["changes"] = {"name": changes.pop("name")} + filtered_updates[ftrack_id] = ent_info + continue + + for _key in self.ignore_keys: + if _key in changed_keys: + changed_keys.remove(_key) + changes.pop(_key, None) + + if not changed_keys: + continue + + # Remove custom attributes starting with `avalon_` from changes + # - these custom attributes are not synchronized + avalon_keys = [] + for key in changes: + if key.startswith("avalon_"): + avalon_keys.append(key) + + for _key in avalon_keys: + changed_keys.remove(_key) + changes.pop(_key, None) + + if not changed_keys: + continue + + ent_info["keys"] = changed_keys + ent_info["changes"] = changes + filtered_updates[ftrack_id] = ent_info + + return filtered_updates + + def get_ent_path(self, ftrack_id): + entity = self.ftrack_ents_by_id.get(ftrack_id) + if not entity: + entity = self.process_session.query( + self.entities_query_by_id.format( + self.cur_project["id"], ftrack_id + ) + ).first() + if entity: + self.ftrack_ents_by_id[ftrack_id] = entity + else: + return "unknown hierarchy" + return "/".join([ent["name"] for ent in entity["link"]]) def launch(self, session, event): - ca_mongoid = lib.get_ca_mongoid() - # If mongo_id textfield has changed: RETURN! - # - infinite loop - for ent in event['data']['entities']: - if ent.get('keys') is not None: - if ca_mongoid in ent['keys']: - return + # Try to commit and if any error happen then recreate session + try: + self.process_session.commit() + except Exception: + self.set_process_session(session) - entities = self._get_entities(session, event, self.ignore_entityType) - ft_project = None - # get project - for entity in entities: - try: - base_proj = entity['link'][0] - except Exception: + # Reset object values for each launch + self.reset_variables() + self._cur_event = event + + entities_by_action = { + "remove": {}, + "update": {}, + "move": {}, + "add": {} + } + + entities_info = event["data"]["entities"] + found_actions = set() + for ent_info in entities_info: + entityType = ent_info["entityType"] + if entityType in self.ignore_entTypes: continue - ft_project = session.get(base_proj['type'], base_proj['id']) - break - # check if project is set to auto-sync + entity_type = ent_info.get("entity_type") + if not entity_type or entity_type in self.ignore_ent_types: + continue + + action = ent_info["action"] + ftrack_id = ent_info["entityId"] + if action == "move": + ent_keys = ent_info["keys"] + # Seprate update info from move action + if len(ent_keys) > 1: + _ent_info = ent_info.copy() + for ent_key in ent_keys: + if ent_key == "parent_id": + _ent_info["changes"].pop(ent_key, None) + _ent_info["keys"].remove(ent_key) + else: + ent_info["changes"].pop(ent_key, None) + ent_info["keys"].remove(ent_key) + + entities_by_action["update"][ftrack_id] = _ent_info + + found_actions.add(action) + entities_by_action[action][ftrack_id] = ent_info + + found_actions = list(found_actions) + if not found_actions: + return True + + # Check if auto sync was turned on/off + updated = entities_by_action["update"] + for ftrack_id, ent_info in updated.items(): + # filter project + if ent_info["entityType"] != "show": + continue + + changes = ent_info["changes"] + if CustAttrAutoSync not in changes: + continue + + auto_sync = changes[CustAttrAutoSync]["new"] + if auto_sync == "1": + # Trigger sync to avalon action if auto sync was turned on + ft_project = self.cur_project + self.log.debug(( + "Auto sync was turned on for project <{}>." + " Triggering syncToAvalon action." + ).format(ft_project["full_name"])) + selection = [{ + "entityId": ft_project["id"], + "entityType": "show" + }] + self.trigger_action( + action_name="sync.to.avalon.server", + event=event, + selection=selection + ) + # Exit for both cases + return True + + # Filter updated data by changed keys + updated = self.filter_updated(updated) + + # skip most of events where nothing has changed for avalon if ( - ft_project is None or - 'avalon_auto_sync' not in ft_project['custom_attributes'] or - ft_project['custom_attributes']['avalon_auto_sync'] is False + len(found_actions) == 1 and + found_actions[0] == "update" and + not updated + ): + return True + + ft_project = self.cur_project + # Check if auto-sync custom attribute exists + if CustAttrAutoSync not in ft_project["custom_attributes"]: + # TODO should we sent message to someone? + self.log.error(( + "Custom attribute \"{}\" is not created or user \"{}\" used" + " for Event server don't have permissions to access it!" + ).format(CustAttrAutoSync, self.session.api_user)) + return True + + # Skip if auto-sync is not set + auto_sync = ft_project["custom_attributes"][CustAttrAutoSync] + if auto_sync is not True: + return True + + debug_msg = "" + debug_msg += "Updated: {}".format(len(updated)) + debug_action_map = { + "add": "Created", + "remove": "Removed", + "move": "Moved" + } + for action, infos in entities_by_action.items(): + if action == "update": + continue + _action = debug_action_map[action] + debug_msg += "| {}: {}".format(_action, len(infos)) + + self.log.debug("Project changes <{}>: {}".format( + ft_project["full_name"], debug_msg + )) + # Get ftrack entities - find all ftrack ids first + ftrack_ids = [] + for ftrack_id in updated: + ftrack_ids.append(ftrack_id) + + for action, ftrack_ids in entities_by_action.items(): + # skip updated (already prepared) and removed (not exist in ftrack) + if action == "remove": + continue + + for ftrack_id in ftrack_ids: + if ftrack_id not in ftrack_ids: + ftrack_ids.append(ftrack_id) + + if ftrack_ids: + joined_ids = ", ".join(["\"{}\"".format(id) for id in ftrack_ids]) + ftrack_entities = self.process_session.query( + self.entities_query_by_id.format(ft_project["id"], joined_ids) + ).all() + for entity in ftrack_entities: + self.ftrack_ents_by_id[entity["id"]] = entity + + # Filter updates where name is changing + for ftrack_id, ent_info in updated.items(): + ent_keys = ent_info["keys"] + # Seprate update info from rename + if "name" not in ent_keys: + continue + + _ent_info = copy.deepcopy(ent_info) + for ent_key in ent_keys: + if ent_key == "name": + ent_info["changes"].pop(ent_key, None) + ent_info["keys"].remove(ent_key) + else: + _ent_info["changes"].pop(ent_key, None) + _ent_info["keys"].remove(ent_key) + + self.ftrack_renamed[ftrack_id] = _ent_info + + self.ftrack_removed = entities_by_action["remove"] + self.ftrack_moved = entities_by_action["move"] + self.ftrack_added = entities_by_action["add"] + self.ftrack_updated = updated + + self.log.debug("Synchronization begins") + try: + time_1 = time.time() + # 1.) Process removed - may affect all other actions + self.process_removed() + time_2 = time.time() + # 2.) Process renamed - may affect added + self.process_renamed() + time_3 = time.time() + # 3.) Process added - moved entity may be moved to new entity + self.process_added() + time_4 = time.time() + # 4.) Process moved + self.process_moved() + time_5 = time.time() + # 5.) Process updated + self.process_updated() + time_6 = time.time() + # 6.) Process changes in hierarchy or hier custom attribues + self.process_hier_cleanup() + if self.updates: + self.update_entities() + time_7 = time.time() + + time_removed = time_2 - time_1 + time_renamed = time_3 - time_2 + time_added = time_4 - time_3 + time_moved = time_5 - time_4 + time_updated = time_6 - time_5 + time_cleanup = time_7 - time_6 + time_total = time_7 - time_1 + self.log.debug("Process time: {} <{}, {}, {}, {}, {}, {}>".format( + time_total, time_removed, time_renamed, time_added, time_moved, + time_updated, time_cleanup + )) + + except Exception: + msg = "An error has happened during synchronization" + self.report_items["error"][msg].append(( + str(traceback.format_exc()).replace("\n", "
") + ).replace(" ", " ")) + + self.report() + return True + + def process_removed(self): + if not self.ftrack_removed: + return + ent_infos = self.ftrack_removed + removable_ids = [] + recreate_ents = [] + removed_names = [] + for ftrack_id, removed in ent_infos.items(): + entity_type = removed["entity_type"] + parent_id = removed["parentId"] + removed_name = removed["changes"]["name"]["old"] + if entity_type == "Task": + avalon_ent = self.avalon_ents_by_ftrack_id.get(parent_id) + if not avalon_ent: + self.log.debug(( + "Parent entity of task was not found in avalon <{}>" + ).format(self.get_ent_path(parent_id))) + continue + + mongo_id = avalon_ent["_id"] + if mongo_id not in self.task_changes_by_avalon_id: + self.task_changes_by_avalon_id[mongo_id] = ( + avalon_ent["data"]["tasks"] + ) + + if removed_name in self.task_changes_by_avalon_id[mongo_id]: + self.task_changes_by_avalon_id[mongo_id].remove( + removed_name + ) + + continue + + avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id) + if not avalon_ent: + continue + mongo_id = avalon_ent["_id"] + if self.changeability_by_mongo_id[mongo_id]: + removable_ids.append(mongo_id) + removed_names.append(removed_name) + else: + recreate_ents.append(avalon_ent) + + if removable_ids: + # TODO logging + self.log.debug("Assets marked as archived <{}>".format( + ", ".join(removed_names) + )) + self.dbcon.update_many( + {"_id": {"$in": removable_ids}, "type": "asset"}, + {"$set": {"type": "archived_asset"}} + ) + self.remove_cached_by_key("id", removable_ids) + + if recreate_ents: + # sort removed entities by parents len + # - length of parents determine hierarchy level + recreate_ents = sorted( + recreate_ents, + key=(lambda item: len( + (item.get("data", {}).get("parents") or []) + )) + ) + # TODO logging + # TODO report + recreate_msg = ( + "Deleted entity was recreated||Entity was recreated because" + " it or its children contain published data" + ) + proj, ents = self.avalon_entities + for avalon_entity in recreate_ents: + old_ftrack_id = avalon_entity["data"]["ftrackId"] + vis_par = avalon_entity["data"]["visualParent"] + if vis_par is None: + vis_par = proj["_id"] + parent_ent = self.avalon_ents_by_id[vis_par] + parent_ftrack_id = parent_ent["data"]["ftrackId"] + parent_ftrack_ent = self.ftrack_ents_by_id.get( + parent_ftrack_id + ) + if not parent_ftrack_ent: + if parent_ent["type"].lower() == "project": + parent_ftrack_ent = self.cur_project + else: + parent_ftrack_ent = self.process_session.query( + self.entities_query_by_id.format( + self.cur_project["id"], parent_ftrack_id + ) + ).one() + entity_type = avalon_entity["data"]["entityType"] + new_entity = self.process_session.create(entity_type, { + "name": avalon_entity["name"], + "parent": parent_ftrack_ent + }) + try: + self.process_session.commit() + except Exception: + # TODO logging + # TODO report + self.process_session.rolback() + ent_path_items = [self.cur_project["full_name"]] + ent_path_items.extend([ + par for par in avalon_entity["data"]["parents"] + ]) + ent_path_items.append(avalon_entity["name"]) + ent_path = "/".join(ent_path_items) + + error_msg = "Couldn't recreate entity in Ftrack" + report_msg = ( + "{}||Trying to recreate because it or its children" + " contain published data" + ).format(error_msg) + self.report_items["warning"][report_msg].append(ent_path) + self.log.warning( + "{}. Process session commit failed! <{}>".format( + error_msg, ent_path + ), + exc_info=True + ) + continue + + new_entity_id = new_entity["id"] + avalon_entity["data"]["ftrackId"] = new_entity_id + + for key, val in avalon_entity["data"].items(): + if not val: + continue + if key not in new_entity["custom_attributes"]: + continue + + new_entity["custom_attributes"][key] = val + + new_entity["custom_attributes"][CustAttrIdKey] = ( + str(avalon_entity["_id"]) + ) + ent_path = self.get_ent_path(new_entity_id) + + try: + self.process_session.commit() + except Exception: + # TODO logging + # TODO report + self.process_session.rolback() + error_msg = ( + "Couldn't update custom attributes after recreation" + " of entity in Ftrack" + ) + report_msg = ( + "{}||Entity was recreated because it or its children" + " contain published data" + ).format(error_msg) + self.report_items["warning"][report_msg].append(ent_path) + self.log.warning( + "{}. Process session commit failed! <{}>".format( + error_msg, ent_path + ), + exc_info=True + ) + continue + + self.report_items["info"][recreate_msg].append(ent_path) + + self.ftrack_recreated_mapping[old_ftrack_id] = new_entity_id + self.process_session.commit() + + found_idx = None + for idx, _entity in enumerate(self._avalon_ents): + if _entity["_id"] == avalon_entity["_id"]: + found_idx = idx + break + + if found_idx is None: + continue + + # Prepare updates dict for mongo update + if "data" not in self.updates[avalon_entity["_id"]]: + self.updates[avalon_entity["_id"]]["data"] = {} + + self.updates[avalon_entity["_id"]]["data"]["ftrackId"] = ( + new_entity_id + ) + # Update cached entities + self._avalon_ents[found_idx] = avalon_entity + + if self._avalon_ents_by_id is not None: + mongo_id = avalon_entity["_id"] + self._avalon_ents_by_id[mongo_id] = avalon_entity + + if self._avalon_ents_by_parent_id is not None: + vis_par = avalon_entity["data"]["visualParent"] + children = self._avalon_ents_by_parent_id[vis_par] + found_idx = None + for idx, _entity in enumerate(children): + if _entity["_id"] == avalon_entity["_id"]: + found_idx = idx + break + children[found_idx] = avalon_entity + self._avalon_ents_by_parent_id[vis_par] = children + + if self._avalon_ents_by_ftrack_id is not None: + self._avalon_ents_by_ftrack_id.pop(old_ftrack_id) + self._avalon_ents_by_ftrack_id[new_entity_id] = ( + avalon_entity + ) + + if self._avalon_ents_by_name is not None: + name = avalon_entity["name"] + self._avalon_ents_by_name[name] = avalon_entity + + # Check if entities with same name can be synchronized + if not removed_names: + return + + self.check_names_synchronizable(removed_names) + + def check_names_synchronizable(self, names): + """Check if entities with specific names are importable. + + This check should happend after removing entity or renaming entity. + When entity was removed or renamed then it's name is possible to sync. + """ + joined_passed_names = ", ".join( + ["\"{}\"".format(name) for name in names] + ) + same_name_entities = self.process_session.query( + self.entities_name_query_by_name.format( + self.cur_project["id"], joined_passed_names + ) + ).all() + if not same_name_entities: + return + + entities_by_name = collections.defaultdict(list) + for entity in same_name_entities: + entities_by_name[entity["name"]].append(entity) + + synchronizable_ents = [] + self.log.debug(( + "Deleting of entities should allow to synchronize another entities" + " with same name." + )) + for name, ents in entities_by_name.items(): + if len(ents) != 1: + self.log.debug(( + "Name \"{}\" still have more than one entity <{}>" + ).format( + name, "| ".join( + [self.get_ent_path(ent["id"]) for ent in ents] + ) + )) + continue + + entity = ents[0] + ent_path = self.get_ent_path(entity["id"]) + # TODO logging + self.log.debug( + "Checking if can synchronize entity <{}>".format(ent_path) + ) + # skip if already synchronized + ftrack_id = entity["id"] + if ftrack_id in self.avalon_ents_by_ftrack_id: + # TODO logging + self.log.debug( + "- Entity is already synchronized (skipping) <{}>".format( + ent_path + ) + ) + continue + + parent_id = entity["parent_id"] + if parent_id not in self.avalon_ents_by_ftrack_id: + # TODO logging + self.log.debug(( + "- Entity's parent entity doesn't seems to" + " be synchronized (skipping) <{}>" + ).format(ent_path)) + continue + + synchronizable_ents.append(entity) + + if not synchronizable_ents: + return + + synchronizable_ents = sorted( + synchronizable_ents, + key=(lambda entity: len(entity["link"])) + ) + + children_queue = queue.Queue() + for entity in synchronizable_ents: + parent_avalon_ent = self.avalon_ents_by_ftrack_id[ + entity["parent_id"] + ] + self.create_entity_in_avalon(entity, parent_avalon_ent) + + for child in entity["children"]: + if child.entity_type.lower() == "task": + continue + children_queue.put(child) + + while not children_queue.empty(): + entity = children_queue.get() + ftrack_id = entity["id"] + name = entity["name"] + ent_by_ftrack_id = self.avalon_ents_by_ftrack_id.get(ftrack_id) + if ent_by_ftrack_id: + raise Exception(( + "This is bug, parent was just synchronized to avalon" + " but entity is already in database {}" + ).format(dict(entity))) + + # Entity has duplicated name with another entity + # - may be renamed: in that case renaming method will handle that + duplicate_ent = self.avalon_ents_by_name.get(name) + if duplicate_ent: + continue + + passed_regex = avalon_sync.check_regex( + name, "asset", schema_patterns=self.regex_schemas + ) + if not passed_regex: + continue + + parent_id = entity["parent_id"] + parent_avalon_ent = self.avalon_ents_by_ftrack_id[parent_id] + + self.create_entity_in_avalon(entity, parent_avalon_ent) + + for child in entity["children"]: + if child.entity_type.lower() == "task": + continue + children_queue.put(child) + + def create_entity_in_avalon(self, ftrack_ent, parent_avalon): + proj, ents = self.avalon_entities + + # Parents, Hierarchy + ent_path_items = [ent["name"] for ent in ftrack_ent["link"]] + parents = ent_path_items[1:len(ent_path_items)-1:] + hierarchy = "" + if len(parents) > 0: + hierarchy = os.path.sep.join(parents) + + # TODO logging + self.log.debug( + "Trying to synchronize entity <{}>".format( + "/".join(ent_path_items) + ) + ) + + # Tasks + tasks = [] + for child in ftrack_ent["children"]: + if child.entity_type.lower() != "task": + continue + tasks.append(child["name"]) + + # Visual Parent + vis_par = None + if parent_avalon["type"].lower() != "project": + vis_par = parent_avalon["_id"] + + mongo_id = ObjectId() + name = ftrack_ent["name"] + final_entity = { + "_id": mongo_id, + "name": name, + "type": "asset", + "schema": EntitySchemas["asset"], + "parent": proj["_id"], + "data": { + "ftrackId": ftrack_ent["id"], + "entityType": ftrack_ent.entity_type, + "parents": parents, + "hierarchy": hierarchy, + "tasks": tasks, + "visualParent": vis_par + } + } + cust_attrs = self.get_cust_attr_values(ftrack_ent) + for key, val in cust_attrs.items(): + if key.startswith("avalon_"): + continue + final_entity["data"][key] = val + + _mongo_id_str = cust_attrs.get(CustAttrIdKey) + if _mongo_id_str: + try: + _mongo_id = ObjectId(_mongo_id_str) + if _mongo_id not in self.avalon_ents_by_id: + mongo_id = _mongo_id + final_entity["_id"] = mongo_id + + except Exception: + pass + + ent_path_items = [self.cur_project["full_name"]] + ent_path_items.extend([par for par in parents]) + ent_path_items.append(name) + ent_path = "/".join(ent_path_items) + + try: + schema.validate(final_entity) + except Exception: + # TODO logging + # TODO report + error_msg = ( + "Schema validation failed for new entity (This is a bug)" + ) + error_traceback = ( + str(traceback.format_exc()).replace("\n", "
") + ).replace(" ", " ") + + item_msg = ent_path + "
" + error_traceback + self.report_items["error"][error_msg].append(item_msg) + self.log.error( + "{}: \"{}\"".format(error_msg, str(final_entity)), + exc_info=True + ) + return None + + replaced = False + archived = self.avalon_archived_by_name.get(name) + if archived: + archived_id = archived["_id"] + if ( + archived["data"]["parents"] == parents or + self.changeability_by_mongo_id[archived_id] + ): + # TODO logging + self.log.debug( + "Entity was unarchived instead of creation <{}>".format( + ent_path + ) + ) + mongo_id = archived_id + final_entity["_id"] = mongo_id + self.dbcon.replace_one({"_id": mongo_id}, final_entity) + replaced = True + + if not replaced: + self.dbcon.insert_one(final_entity) + # TODO logging + self.log.debug("Entity was synchronized <{}>".format(ent_path)) + + mongo_id_str = str(mongo_id) + if mongo_id_str != ftrack_ent["custom_attributes"][CustAttrIdKey]: + ftrack_ent["custom_attributes"][CustAttrIdKey] = mongo_id_str + try: + self.process_session.commit() + except Exception: + self.process_session.rolback() + # TODO logging + # TODO report + error_msg = "Failed to store MongoID to entity's custom attribute" + report_msg = ( + "{}||SyncToAvalon action may solve this issue" + ).format(error_msg) + + self.report_items["warning"][report_msg].append(ent_path) + self.log.error( + "{}: \"{}\"".format(error_msg, ent_path), + exc_info=True + ) + + # modify cached data + # Skip if self._avalon_ents is not set(maybe never happen) + if self._avalon_ents is None: + return final_entity + + if self._avalon_ents is not None: + proj, ents = self._avalon_ents + ents.append(final_entity) + self._avalon_ents = (proj, ents) + + if self._avalon_ents_by_id is not None: + self._avalon_ents_by_id[mongo_id] = final_entity + + if self._avalon_ents_by_parent_id is not None: + self._avalon_ents_by_parent_id[vis_par].append(final_entity) + + if self._avalon_ents_by_ftrack_id is not None: + self._avalon_ents_by_ftrack_id[ftrack_ent["id"]] = final_entity + + if self._avalon_ents_by_name is not None: + self._avalon_ents_by_name[ftrack_ent["name"]] = final_entity + + return final_entity + + def get_cust_attr_values(self, entity, keys=None): + output = {} + custom_attrs, hier_attrs = self.avalon_custom_attributes + not_processed_keys = True + if keys: + not_processed_keys = [k for k in keys] + # Notmal custom attributes + processed_keys = [] + for attr in custom_attrs: + if not not_processed_keys: + break + key = attr["key"] + if key in processed_keys: + continue + + if key not in entity["custom_attributes"]: + continue + + if keys: + if key not in keys: + continue + else: + not_processed_keys.remove(key) + + output[key] = entity["custom_attributes"][key] + processed_keys.append(key) + + if not not_processed_keys: + return output + + # Hierarchical cust attrs + hier_keys = [] + defaults = {} + for attr in hier_attrs: + key = attr["key"] + if keys and key not in keys: + continue + hier_keys.append(key) + defaults[key] = attr["default"] + + hier_values = avalon_sync.get_hierarchical_attributes( + self.process_session, entity, hier_keys, defaults + ) + for key, val in hier_values.items(): + output[key] = val + + return output + + def process_renamed(self): + if not self.ftrack_renamed: + return + + ent_infos = self.ftrack_renamed + renamed_tasks = {} + not_found = {} + changeable_queue = queue.Queue() + for ftrack_id, ent_info in ent_infos.items(): + entity_type = ent_info["entity_type"] + new_name = ent_info["changes"]["name"]["new"] + old_name = ent_info["changes"]["name"]["old"] + if entity_type == "Task": + parent_id = ent_info["parentId"] + renamed_tasks[parent_id] = { + "new": new_name, + "old": old_name, + "ent_info": ent_info + } + continue + + ent_path = self.get_ent_path(ftrack_id) + avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id) + if not avalon_ent: + # TODO logging + self.log.debug(( + "Can't change the name (Entity is not is avalon) <{}>" + ).format(ent_path)) + not_found[ftrack_id] = ent_info + continue + + if new_name == avalon_ent["name"]: + # TODO logging + self.log.debug(( + "Avalon entity already has the same name <{}>" + ).format(ent_path)) + continue + + mongo_id = avalon_ent["_id"] + if self.changeability_by_mongo_id[mongo_id]: + changeable_queue.put((ftrack_id, avalon_ent, new_name)) + else: + ftrack_ent = self.ftrack_ents_by_id[ftrack_id] + ftrack_ent["name"] = avalon_ent["name"] + try: + self.process_session.commit() + # TODO logging + # TODO report + error_msg = "Entity renamed back" + report_msg = ( + "{}||It is not possible to change" + " the name of an entity or it's parents, " + " if it already contained published data." + ).format(error_msg) + self.report_items["info"][report_msg].append(ent_path) + self.log.warning("{} <{}>".format(error_msg, ent_path)) + + except Exception: + self.process_session.rollback() + # TODO report + # TODO logging + error_msg = ( + "Couldn't rename the entity back to its original name" + ) + report_msg = ( + "{}||Renamed because it is not possible to" + " change the name of an entity or it's parents, " + " if it already contained published data." + ).format(error_msg) + error_traceback = ( + str(traceback.format_exc()).replace("\n", "
") + ).replace(" ", " ") + + item_msg = ent_path + "
" + error_traceback + self.report_items["warning"][report_msg].append(item_msg) + self.log.warning( + "{}: \"{}\"".format(error_msg, ent_path), + exc_info=True + ) + + old_names = [] + # Process renaming in Avalon DB + while not changeable_queue.empty(): + ftrack_id, avalon_ent, new_name = changeable_queue.get() + mongo_id = avalon_ent["_id"] + old_name = avalon_ent["name"] + + _entity_type = "asset" + if entity_type == "Project": + _entity_type = "project" + + passed_regex = avalon_sync.check_regex( + new_name, _entity_type, schema_patterns=self.regex_schemas + ) + if not passed_regex: + self.regex_failed.append(ftrack_id) + continue + + # if avalon does not have same name then can be changed + same_name_avalon_ent = self.avalon_ents_by_name.get(new_name) + if not same_name_avalon_ent: + old_val = self._avalon_ents_by_name.pop(old_name) + old_val["name"] = new_name + self._avalon_ents_by_name[new_name] = old_val + self.updates[mongo_id] = {"name": new_name} + self.renamed_in_avalon.append(mongo_id) + + old_names.append(old_name) + if new_name in old_names: + old_names.remove(new_name) + + # TODO logging + ent_path = self.get_ent_path(ftrack_id) + self.log.debug( + "Name of entity will be changed to \"{}\" <{}>".format( + new_name, ent_path + ) + ) + continue + + # Check if same name is in changable_queue + # - it's name may be changed in next iteration + same_name_ftrack_id = same_name_avalon_ent["data"]["ftrackId"] + same_is_unprocessed = False + for item in list(changeable_queue.queue): + if same_name_ftrack_id == item[0]: + same_is_unprocessed = True + break + + if same_is_unprocessed: + changeable_queue.put((ftrack_id, avalon_ent, new_name)) + continue + + self.duplicated.append(ftrack_id) + + if old_names: + self.check_names_synchronizable(old_names) + + for parent_id, task_change in renamed_tasks.items(): + avalon_ent = self.avalon_ents_by_ftrack_id.get(parent_id) + ent_info = task_change["ent_info"] + if not avalon_ent: + not_found[ent_info["entityId"]] = ent_info + continue + + new_name = task_change["new"] + old_name = task_change["old"] + passed_regex = avalon_sync.check_regex( + new_name, "task", schema_patterns=self.regex_schemas + ) + if not passed_regex: + ftrack_id = ent_info["enityId"] + self.regex_failed.append(ftrack_id) + continue + + mongo_id = avalon_ent["_id"] + if mongo_id not in self.task_changes_by_avalon_id: + self.task_changes_by_avalon_id[mongo_id] = ( + avalon_ent["data"]["tasks"] + ) + + if old_name in self.task_changes_by_avalon_id[mongo_id]: + self.task_changes_by_avalon_id[mongo_id].remove(old_name) + else: + parent_ftrack_ent = self.ftrack_ents_by_id.get(parent_id) + if not parent_ftrack_ent: + parent_ftrack_ent = self.process_session.query( + self.entities_query_by_id.format( + self.cur_project["id"], parent_id + ) + ).first() + + if parent_ftrack_ent: + self.ftrack_ents_by_id[parent_id] = parent_ftrack_ent + child_names = [] + for child in parent_ftrack_ent["children"]: + if child.entity_type.lower() != "task": + continue + child_names.append(child["name"]) + + tasks = [task for task in ( + self.task_changes_by_avalon_id[mongo_id] + )] + for task in tasks: + if task not in child_names: + self.task_changes_by_avalon_id[mongo_id].remove( + task + ) + + if new_name not in self.task_changes_by_avalon_id[mongo_id]: + self.task_changes_by_avalon_id[mongo_id].append(new_name) + + # not_found are not processed since all not found are + # not found because they are not synchronizable + + def process_added(self): + ent_infos = self.ftrack_added + if not ent_infos: + return + + cust_attrs, hier_attrs = self.avalon_cust_attrs + entity_type_conf_ids = {} + # Skip if already exit in avalon db or tasks entities + # - happen when was created by any sync event/action + pop_out_ents = [] + new_tasks_by_parent = collections.defaultdict(list) + _new_ent_infos = {} + for ftrack_id, ent_info in ent_infos.items(): + if self.avalon_ents_by_ftrack_id.get(ftrack_id): + pop_out_ents.append(ftrack_id) + self.log.warning( + "Added entity is already synchronized <{}>".format( + self.get_ent_path(ftrack_id) + ) + ) + continue + + entity_type = ent_info["entity_type"] + if entity_type == "Task": + parent_id = ent_info["parentId"] + new_tasks_by_parent[parent_id].append(ent_info) + pop_out_ents.append(ftrack_id) + + configuration_id = entity_type_conf_ids.get(entity_type) + if not configuration_id: + for attr in cust_attrs: + key = attr["key"] + if key != CustAttrIdKey: + continue + + if attr["entity_type"] != ent_info["entityType"]: + continue + + if ent_info["entityType"] != "show": + if attr["object_type_id"] != ent_info["objectTypeId"]: + continue + + configuration_id = attr["id"] + entity_type_conf_ids[entity_type] = configuration_id + break + + _entity_key = collections.OrderedDict({ + "configuration_id": configuration_id, + "entity_id": ftrack_id + }) + + self.process_session.recorded_operations.push( + ftrack_api.operation.UpdateEntityOperation( + "ContextCustomAttributeValue", + _entity_key, + "value", + ftrack_api.symbol.NOT_SET, + "" + ) + ) + + try: + # Commit changes of mongo_id to empty string + self.process_session.commit() + self.log.debug("Commititng unsetting") + except Exception: + self.process_session.rollback() + # TODO logging + msg = ( + "Could not set value of Custom attribute, where mongo id" + " is stored, to empty string. Ftrack ids: \"{}\"" + ).format(", ".join(ent_infos.keys())) + self.log.warning(msg, exc_info=True) + + for ftrack_id in pop_out_ents: + ent_infos.pop(ftrack_id) + + # sort by parents length (same as by hierarchy level) + _ent_infos = sorted( + ent_infos.values(), + key=(lambda ent_info: len(ent_info.get("parents", []))) + ) + to_sync_by_id = collections.OrderedDict() + for ent_info in _ent_infos: + ft_id = ent_info["entityId"] + to_sync_by_id[ft_id] = self.ftrack_ents_by_id[ft_id] + + # cache regex success (for tasks) + for ftrack_id, entity in to_sync_by_id.items(): + if entity.entity_type.lower() == "project": + raise Exception(( + "Project can't be created with event handler!" + "This is a bug" + )) + parent_id = entity["parent_id"] + parent_avalon = self.avalon_ents_by_ftrack_id.get(parent_id) + if not parent_avalon: + # TODO logging + self.log.debug(( + "Skipping synchronization of entity" + " because parent was not found in Avalon DB <{}>" + ).format(self.get_ent_path(ftrack_id))) + continue + + is_synchonizable = True + name = entity["name"] + passed_regex = avalon_sync.check_regex( + name, "asset", schema_patterns=self.regex_schemas + ) + if not passed_regex: + self.regex_failed.append(ftrack_id) + is_synchonizable = False + + if name in self.avalon_ents_by_name: + self.duplicated.append(ftrack_id) + is_synchonizable = False + + if not is_synchonizable: + continue + + self.create_entity_in_avalon(entity, parent_avalon) + + for parent_id, ent_infos in new_tasks_by_parent.items(): + avalon_ent = self.avalon_ents_by_ftrack_id.get(parent_id) + if not avalon_ent: + # TODO logging + self.log.debug(( + "Skipping synchronization of task" + " because parent was not found in Avalon DB <{}>" + ).format(self.get_ent_path(parent_id))) + continue + + mongo_id = avalon_ent["_id"] + if mongo_id not in self.task_changes_by_avalon_id: + self.task_changes_by_avalon_id[mongo_id] = ( + avalon_ent["data"]["tasks"] + ) + + for ent_info in ent_infos: + new_name = ent_info["changes"]["name"]["new"] + passed_regex = avalon_sync.check_regex( + new_name, "task", schema_patterns=self.regex_schemas + ) + if not passed_regex: + self.regex_failed.append(ent_infos["entityId"]) + continue + + if new_name not in self.task_changes_by_avalon_id[mongo_id]: + self.task_changes_by_avalon_id[mongo_id].append(new_name) + + def process_moved(self): + if not self.ftrack_moved: + return + + ftrack_moved = {k: v for k, v in sorted( + self.ftrack_moved.items(), + key=(lambda line: len( + (line[1].get("data", {}).get("parents") or []) + )) + )} + + for ftrack_id, ent_info in ftrack_moved.items(): + avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id) + if not avalon_ent: + continue + + new_parent_id = ent_info["changes"]["parent_id"]["new"] + old_parent_id = ent_info["changes"]["parent_id"]["old"] + + mongo_id = avalon_ent["_id"] + if self.changeability_by_mongo_id[mongo_id]: + par_av_ent = self.avalon_ents_by_ftrack_id.get(new_parent_id) + if not par_av_ent: + # TODO logging + # TODO report + ent_path_items = [self.cur_project["full_name"]] + ent_path_items.extend(avalon_ent["data"]["parents"]) + ent_path_items.append(avalon_ent["name"]) + ent_path = "/".join(ent_path_items) + + error_msg = ( + "New parent of entity is not synchronized to avalon" + ) + report_msg = ( + "{}||Parent in Avalon can't be changed. That" + " may cause issues. Please fix parent or move entity" + " under valid entity." + ).format(error_msg) + + self.report_items["warning"][report_msg].append(ent_path) + self.log.warning("{} <{}>".format(error_msg, ent_path)) + continue + + # THIS MUST HAPPEND AFTER CREATING NEW ENTITIES !!!! + # - because may be moved to new created entity + if "data" not in self.updates[mongo_id]: + self.updates[mongo_id]["data"] = {} + + vis_par_id = None + if par_av_ent["type"].lower() != "project": + vis_par_id = par_av_ent["_id"] + self.updates[mongo_id]["data"]["visualParent"] = vis_par_id + self.moved_in_avalon.append(mongo_id) + + # TODO logging + ent_path_items = [self.cur_project["full_name"]] + ent_path_items.extend(par_av_ent["data"]["parents"]) + ent_path_items.append(par_av_ent["name"]) + ent_path_items.append(avalon_ent["name"]) + ent_path = "/".join(ent_path_items) + self.log.debug(( + "Parent of entity ({}) was changed in avalon <{}>" + ).format(str(mongo_id), ent_path) + ) + + else: + avalon_ent = self.avalon_ents_by_id[mongo_id] + avalon_parent_id = avalon_ent["data"]["visualParent"] + if avalon_parent_id is None: + avalon_parent_id = avalon_ent["parent"] + + avalon_parent = self.avalon_ents_by_id[avalon_parent_id] + parent_id = avalon_parent["data"]["ftrackId"] + + # For cases when parent was deleted at the same time + if parent_id in self.ftrack_recreated_mapping: + parent_id = ( + self.ftrack_recreated_mapping[parent_id] + ) + + ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id) + if not ftrack_ent: + ftrack_ent = self.process_session.query( + self.entities_query_by_id.format( + self.cur_project["id"], ftrack_id + ) + ).one() + self.ftrack_ents_by_id[ftrack_id] = ftrack_ent + + if parent_id == ftrack_ent["parent_id"]: + continue + + ftrack_ent["parent_id"] = parent_id + try: + self.process_session.commit() + # TODO logging + # TODO report + msg = "Entity was moved back" + report_msg = ( + "{}||Entity can't be moved when" + " it or its children contain published data" + ).format(msg) + ent_path = self.get_ent_path(ftrack_id) + self.report_items["info"][report_msg].append(ent_path) + self.log.warning("{} <{}>".format(msg, ent_path)) + + except Exception: + self.process_session.rollback() + # TODO logging + # TODO report + error_msg = ( + "Couldn't moved the entity back to its original parent" + ) + report_msg = ( + "{}||Moved back because it is not possible to" + " move with an entity or it's parents, " + " if it already contained published data." + ).format(error_msg) + error_traceback = ( + str(traceback.format_exc()).replace("\n", "
") + ).replace(" ", " ") + + item_msg = ent_path + "
" + error_traceback + self.report_items["warning"][report_msg].append(item_msg) + self.log.warning( + "{}: \"{}\"".format(error_msg, ent_path), + exc_info=True + ) + + def process_updated(self): + # Only custom attributes changes should get here + if not self.ftrack_updated: + return + + ent_infos = self.ftrack_updated + ftrack_mongo_mapping = {} + not_found_ids = [] + for ftrack_id, ent_info in ent_infos.items(): + avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id) + if not avalon_ent: + not_found_ids.append(ftrack_id) + continue + + ftrack_mongo_mapping[ftrack_id] = avalon_ent["_id"] + + for ftrack_id in not_found_ids: + ent_infos.pop(ftrack_id) + + if not ent_infos: + return + + cust_attrs, hier_attrs = self.avalon_cust_attrs + cust_attrs_by_obj_id = collections.defaultdict(dict) + for cust_attr in cust_attrs: + key = cust_attr["key"] + if key.startswith("avalon_"): + continue + + ca_ent_type = cust_attr["entity_type"] + + if ca_ent_type == "show": + cust_attrs_by_obj_id[ca_ent_type][key] = cust_attr + else: + obj_id = cust_attr["object_type_id"] + cust_attrs_by_obj_id[obj_id][key] = cust_attr + + hier_attrs_keys = [attr["key"] for attr in hier_attrs] + + for ftrack_id, ent_info in ent_infos.items(): + mongo_id = ftrack_mongo_mapping[ftrack_id] + entType = ent_info["entityType"] + ent_path = self.get_ent_path(ftrack_id) + if entType == "show": + ent_cust_attrs = cust_attrs_by_obj_id.get("show") + else: + obj_type_id = ent_info["objectTypeId"] + ent_cust_attrs = cust_attrs_by_obj_id.get(obj_type_id) + + for key, values in ent_info["changes"].items(): + if key in hier_attrs_keys: + self.hier_cust_attrs_changes[key].append(ftrack_id) + continue + + if key not in ent_cust_attrs: + continue + + if "data" not in self.updates[mongo_id]: + self.updates[mongo_id]["data"] = {} + value = values["new"] + self.updates[mongo_id]["data"][key] = value + self.log.debug( + "Setting data value of \"{}\" to \"{}\" <{}>".format( + key, value, ent_path + ) + ) + + if entType != "show" or key != "applications": + continue + + # Store apps to project't config + apps_str = ent_info["changes"]["applications"]["new"] + cust_attr_apps = [app for app in apps_str.split(", ") if app] + + proj_apps, warnings = ( + avalon_sync.get_project_apps(cust_attr_apps) + ) + if "config" not in self.updates[mongo_id]: + self.updates[mongo_id]["config"] = {} + self.updates[mongo_id]["config"]["apps"] = proj_apps + + for msg, items in warnings.items(): + if not msg or not items: + continue + self.report_items["warning"][msg] = items + + def process_hier_cleanup(self): + if ( + not self.moved_in_avalon and + not self.renamed_in_avalon and + not self.hier_cust_attrs_changes and + not self.task_changes_by_avalon_id ): return - # check if project have Custom Attribute 'avalon_mongo_id' - if ca_mongoid not in ft_project['custom_attributes']: - message = ( - "Custom attribute '{}' for 'Project' is not created" - " or don't have set permissions for API" - ).format(ca_mongoid) - self.log.warning(message) - self.show_message(event, message, False) - return + parent_changes = [] + hier_cust_attrs_ids = [] + hier_cust_attrs_keys = [] + all_keys = False + for mongo_id in self.moved_in_avalon: + parent_changes.append(mongo_id) + hier_cust_attrs_ids.append(mongo_id) + all_keys = True - # get avalon project if possible - import_entities = [] + for mongo_id in self.renamed_in_avalon: + if mongo_id not in parent_changes: + parent_changes.append(mongo_id) - custom_attributes = lib.get_avalon_attr(session) - - avalon_project = lib.get_avalon_project(ft_project) - if avalon_project is None: - import_entities.append(ft_project) - - for entity in entities: - if entity.entity_type.lower() in ['task']: - entity = entity['parent'] - - if 'custom_attributes' not in entity: + for key, ftrack_ids in self.hier_cust_attrs_changes.items(): + if key.startswith("avalon_"): continue - if ca_mongoid not in entity['custom_attributes']: + for ftrack_id in ftrack_ids: + avalon_ent = self.avalon_ents_by_ftrack_id[ftrack_id] + mongo_id = avalon_ent["_id"] + if mongo_id in hier_cust_attrs_ids: + continue + hier_cust_attrs_ids.append(mongo_id) + if not all_keys and key not in hier_cust_attrs_keys: + hier_cust_attrs_keys.append(key) - message = ( - "Custom attribute '{}' for '{}' is not created" - " or don't have set permissions for API" - ).format(ca_mongoid, entity.entity_type) + # Tasks preparation **** + for mongo_id, tasks in self.task_changes_by_avalon_id.items(): + avalon_ent = self.avalon_ents_by_id[mongo_id] + if "data" not in self.updates[mongo_id]: + self.updates[mongo_id]["data"] = {} - self.log.warning(message) - self.show_message(event, message, False) - return + self.updates[mongo_id]["data"]["tasks"] = tasks - if entity not in import_entities: - import_entities.append(entity) + # Parents preparation *** + mongo_to_ftrack_parents = {} + missing_ftrack_ents = {} + for mongo_id in parent_changes: + avalon_ent = self.avalon_ents_by_id[mongo_id] + ftrack_id = avalon_ent["data"]["ftrackId"] + if ftrack_id not in self.ftrack_ents_by_id: + missing_ftrack_ents[ftrack_id] = mongo_id + continue + ftrack_ent = self.ftrack_ents_by_id[ftrack_id] + mongo_to_ftrack_parents[mongo_id] = len(ftrack_ent["link"]) - if len(import_entities) < 1: + if missing_ftrack_ents: + joine_ids = ", ".join( + ["\"{}\"".format(id) for id in missing_ftrack_ents.keys()] + ) + entities = self.process_session.query( + self.entities_query_by_id.format( + self.cur_project["id"], joine_ids + ) + ).all() + for entity in entities: + ftrack_id = entity["id"] + self.ftrack_ents_by_id[ftrack_id] = entity + mongo_id = missing_ftrack_ents[ftrack_id] + mongo_to_ftrack_parents[mongo_id] = len(entity["link"]) + + stored_parents_by_mongo = {} + # sort by hierarchy level + mongo_to_ftrack_parents = [k for k, v in sorted( + mongo_to_ftrack_parents.items(), + key=(lambda item: item[1]) + )] + self.log.debug( + "Updating parents and hieararchy because of name/parenting changes" + ) + for mongo_id in mongo_to_ftrack_parents: + avalon_ent = self.avalon_ents_by_id[mongo_id] + vis_par = avalon_ent["data"]["visualParent"] + if vis_par in stored_parents_by_mongo: + parents = [par for par in stored_parents_by_mongo[vis_par]] + if vis_par is not None: + parent_ent = self.avalon_ents_by_id[vis_par] + parents.append(parent_ent["name"]) + stored_parents_by_mongo[mongo_id] = parents + continue + + ftrack_id = avalon_ent["data"]["ftrackId"] + ftrack_ent = self.ftrack_ents_by_id[ftrack_id] + ent_path_items = [ent["name"] for ent in ftrack_ent["link"]] + parents = ent_path_items[1:len(ent_path_items)-1:] + stored_parents_by_mongo[mongo_id] = parents + + for mongo_id, parents in stored_parents_by_mongo.items(): + avalon_ent = self.avalon_ents_by_id[mongo_id] + cur_par = avalon_ent["data"]["parents"] + if cur_par == parents: + continue + + hierarchy = "" + if len(parents) > 0: + hierarchy = os.path.sep.join(parents) + + if "data" not in self.updates[mongo_id]: + self.updates[mongo_id]["data"] = {} + self.updates[mongo_id]["data"]["parents"] = parents + self.updates[mongo_id]["data"]["hierarchy"] = hierarchy + + # Skip custom attributes if didn't change + if not hier_cust_attrs_ids: + # TODO logging + self.log.debug( + "Hierarchical attributes were not changed. Skipping" + ) + self.update_entities() return - try: - for entity in import_entities: - result = lib.import_to_avalon( - session=session, - entity=entity, - ft_project=ft_project, - av_project=avalon_project, - custom_attributes=custom_attributes + cust_attrs, hier_attrs = self.avalon_cust_attrs + + # Hierarchical custom attributes preparation *** + if all_keys: + hier_cust_attrs_keys = [ + attr["key"] for attr in hier_attrs if ( + not attr["key"].startswith("avalon_") ) - if 'errors' in result and len(result['errors']) > 0: - session.commit() - lib.show_errors(self, event, result['errors']) - - return - - if avalon_project is None: - if 'project' in result: - avalon_project = result['project'] - - except Exception as e: - session.reset() # reset session to clear it - - message = str(e) - title = 'Hey You! Unknown Error has been raised! (*look below*)' - ftrack_message = ( - 'SyncToAvalon event ended with unexpected error' - ' please check log file or contact Administrator' - ' for more information.' - ) - items = [ - {'type': 'label', 'value': '# Fatal Error'}, - {'type': 'label', 'value': '

{}

'.format(ftrack_message)} ] - self.show_interface(items, title, event=event) - self.log.error( - 'Fatal error during sync: {}'.format(message), exc_info=True + + mongo_ftrack_mapping = {} + cust_attrs_ftrack_ids = [] + # ftrack_parenting = collections.defaultdict(list) + entities_dict = collections.defaultdict(dict) + + children_queue = queue.Queue() + parent_queue = queue.Queue() + + for mongo_id in hier_cust_attrs_ids: + avalon_ent = self.avalon_ents_by_id[mongo_id] + parent_queue.put(avalon_ent) + ftrack_id = avalon_ent["data"]["ftrackId"] + if ftrack_id not in entities_dict: + entities_dict[ftrack_id] = { + "children": [], + "parent_id": None, + "hier_attrs": {} + } + + mongo_ftrack_mapping[mongo_id] = ftrack_id + cust_attrs_ftrack_ids.append(ftrack_id) + children_ents = self.avalon_ents_by_parent_id.get(mongo_id) or [] + for children_ent in children_ents: + _ftrack_id = children_ent["data"]["ftrackId"] + if _ftrack_id in entities_dict: + continue + + entities_dict[_ftrack_id] = { + "children": [], + "parent_id": None, + "hier_attrs": {} + } + # if _ftrack_id not in ftrack_parenting[ftrack_id]: + # ftrack_parenting[ftrack_id].append(_ftrack_id) + entities_dict[_ftrack_id]["parent_id"] = ftrack_id + if _ftrack_id not in entities_dict[ftrack_id]["children"]: + entities_dict[ftrack_id]["children"].append(_ftrack_id) + children_queue.put(children_ent) + + while not children_queue.empty(): + avalon_ent = children_queue.get() + mongo_id = avalon_ent["_id"] + ftrack_id = avalon_ent["data"]["ftrackId"] + if ftrack_id in cust_attrs_ftrack_ids: + continue + + mongo_ftrack_mapping[mongo_id] = ftrack_id + cust_attrs_ftrack_ids.append(ftrack_id) + + children_ents = self.avalon_ents_by_parent_id.get(mongo_id) or [] + for children_ent in children_ents: + _ftrack_id = children_ent["data"]["ftrackId"] + if _ftrack_id in entities_dict: + continue + + entities_dict[_ftrack_id] = { + "children": [], + "parent_id": None, + "hier_attrs": {} + } + entities_dict[_ftrack_id]["parent_id"] = ftrack_id + if _ftrack_id not in entities_dict[ftrack_id]["children"]: + entities_dict[ftrack_id]["children"].append(_ftrack_id) + children_queue.put(children_ent) + + while not parent_queue.empty(): + avalon_ent = parent_queue.get() + if avalon_ent["type"].lower() == "project": + continue + + ftrack_id = avalon_ent["data"]["ftrackId"] + + vis_par = avalon_ent["data"]["visualParent"] + if vis_par is None: + vis_par = avalon_ent["parent"] + + parent_ent = self.avalon_ents_by_id[vis_par] + parent_ftrack_id = parent_ent["data"]["ftrackId"] + if parent_ftrack_id not in entities_dict: + entities_dict[parent_ftrack_id] = { + "children": [], + "parent_id": None, + "hier_attrs": {} + } + + if ftrack_id not in entities_dict[parent_ftrack_id]["children"]: + entities_dict[parent_ftrack_id]["children"].append(ftrack_id) + + entities_dict[ftrack_id]["parent_id"] = parent_ftrack_id + + if parent_ftrack_id in cust_attrs_ftrack_ids: + continue + mongo_ftrack_mapping[vis_par] = parent_ftrack_id + cust_attrs_ftrack_ids.append(parent_ftrack_id) + # if ftrack_id not in ftrack_parenting[parent_ftrack_id]: + # ftrack_parenting[parent_ftrack_id].append(ftrack_id) + + parent_queue.put(parent_ent) + + # Prepare values to query + entity_ids_joined = ", ".join([ + "\"{}\"".format(id) for id in cust_attrs_ftrack_ids + ]) + attributes_joined = ", ".join([ + "\"{}\"".format(name) for name in hier_cust_attrs_keys + ]) + + queries = [{ + "action": "query", + "expression": ( + "select value, entity_id from CustomAttributeValue " + "where entity_id in ({}) and configuration.key in ({})" + ).format(entity_ids_joined, attributes_joined) + }] + + if hasattr(self.process_session, "call"): + [values] = self.process_session.call(queries) + else: + [values] = self.process_session._call(queries) + + ftrack_project_id = self.cur_project["id"] + + for attr in hier_attrs: + key = attr["key"] + if key not in hier_cust_attrs_keys: + continue + entities_dict[ftrack_project_id]["hier_attrs"][key] = ( + attr["default"] ) - return + # PREPARE DATA BEFORE THIS + avalon_hier = [] + for value in values["data"]: + if value["value"] is None: + continue + entity_id = value["entity_id"] + key = value["configuration"]["key"] + entities_dict[entity_id]["hier_attrs"][key] = value["value"] + + # Get dictionary with not None hierarchical values to pull to childs + project_values = {} + for key, value in ( + entities_dict[ftrack_project_id]["hier_attrs"].items() + ): + if value is not None: + project_values[key] = value + + for key in avalon_hier: + value = entities_dict[ftrack_project_id]["avalon_attrs"][key] + if value is not None: + project_values[key] = value + + hier_down_queue = queue.Queue() + hier_down_queue.put((project_values, ftrack_project_id)) + + while not hier_down_queue.empty(): + hier_values, parent_id = hier_down_queue.get() + for child_id in entities_dict[parent_id]["children"]: + _hier_values = hier_values.copy() + for name in hier_cust_attrs_keys: + value = entities_dict[child_id]["hier_attrs"].get(name) + if value is not None: + _hier_values[name] = value + + entities_dict[child_id]["hier_attrs"].update(_hier_values) + hier_down_queue.put((_hier_values, child_id)) + + ftrack_mongo_mapping = {} + for mongo_id, ftrack_id in mongo_ftrack_mapping.items(): + ftrack_mongo_mapping[ftrack_id] = mongo_id + + for ftrack_id, data in entities_dict.items(): + mongo_id = ftrack_mongo_mapping[ftrack_id] + avalon_ent = self.avalon_ents_by_id[mongo_id] + ent_path = self.get_ent_path(ftrack_id) + # TODO logging + self.log.debug( + "Updating hierarchical attributes <{}>".format(ent_path) + ) + for key, value in data["hier_attrs"].items(): + if ( + key in avalon_ent["data"] and + avalon_ent["data"][key] == value + ): + continue + + self.log.debug("- {}: {}".format(key, value)) + if "data" not in self.updates[mongo_id]: + self.updates[mongo_id]["data"] = {} + + self.updates[mongo_id]["data"][key] = value + + self.update_entities() + + def update_entities(self): + mongo_changes_bulk = [] + for mongo_id, changes in self.updates.items(): + filter = {"_id": mongo_id} + change_data = avalon_sync.from_dict_to_set(changes) + mongo_changes_bulk.append(UpdateOne(filter, change_data)) + + if not mongo_changes_bulk: + return + + self.dbcon.bulk_write(mongo_changes_bulk) + self.updates = collections.defaultdict(dict) + + @property + def duplicated_report(self): + if not self.duplicated: + return [] + + ft_project = self.cur_project + duplicated_names = [] + for ftrack_id in self.duplicated: + ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id) + if not ftrack_ent: + ftrack_ent = self.process_session.query( + self.entities_query_by_id.format( + ft_project["id"], ftrack_id + ) + ).one() + self.ftrack_ents_by_id[ftrack_id] = ftrack_ent + name = ftrack_ent["name"] + if name not in duplicated_names: + duplicated_names.append(name) + + joined_names = ", ".join( + ["\"{}\"".format(name) for name in duplicated_names] + ) + ft_ents = self.process_session.query( + self.entities_name_query_by_name.format( + ft_project["id"], joined_names + ) + ).all() + + ft_ents_by_name = collections.defaultdict(list) + for ft_ent in ft_ents: + name = ft_ent["name"] + ft_ents_by_name[name].append(ft_ent) + + if not ft_ents_by_name: + return [] + + subtitle = "Duplicated entity names:" + items = [] + items.append({ + "type": "label", + "value": "# {}".format(subtitle) + }) + items.append({ + "type": "label", + "value": ( + "

NOTE: It is not allowed to use the same name" + " for multiple entities in the same project

" + ) + }) + + for name, ents in ft_ents_by_name.items(): + items.append({ + "type": "label", + "value": "## {}".format(name) + }) + paths = [] + for ent in ents: + ftrack_id = ent["id"] + ent_path = "/".join([_ent["name"] for _ent in ent["link"]]) + avalon_ent = self.avalon_ents_by_id.get(ftrack_id) + + if avalon_ent: + additional = " (synchronized)" + if avalon_ent["name"] != name: + additional = " (synchronized as {})".format( + avalon_ent["name"] + ) + ent_path += additional + paths.append(ent_path) + + items.append({ + "type": "label", + "value": '

{}

'.format("
".join(paths)) + }) + + return items + + @property + def regex_report(self): + if not self.regex_failed: + return [] + + subtitle = "Entity names contain prohibited symbols:" + items = [] + items.append({ + "type": "label", + "value": "# {}".format(subtitle) + }) + items.append({ + "type": "label", + "value": ( + "

NOTE: You can use Letters( a-Z )," + " Numbers( 0-9 ) and Underscore( _ )

" + ) + }) + + ft_project = self.cur_project + for ftrack_id in self.regex_failed: + ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id) + if not ftrack_ent: + ftrack_ent = self.process_session.query( + self.entities_query_by_id.format( + ft_project["id"], ftrack_id + ) + ).one() + self.ftrack_ents_by_id[ftrack_id] = ftrack_ent + + name = ftrack_ent["name"] + ent_path_items = [_ent["name"] for _ent in ftrack_ent["link"][:-1]] + ent_path_items.append("{}".format(name)) + ent_path = "/".join(ent_path_items) + items.append({ + "type": "label", + "value": "

{} - {}

".format(name, ent_path) + }) + + return items + + def report(self): + msg_len = len(self.duplicated) + len(self.regex_failed) + for msgs in self.report_items.values(): + msg_len += len(msgs) + + if msg_len == 0: + return + + items = [] + project_name = self.cur_project["full_name"] + title = "Synchronization report ({}):".format(project_name) + + keys = ["error", "warning", "info"] + for key in keys: + subitems = [] + if key == "warning": + subitems.extend(self.duplicated_report) + subitems.extend(self.regex_report) + + for _msg, _items in self.report_items[key].items(): + if not _items: + continue + + msg_items = _msg.split("||") + msg = msg_items[0] + subitems.append({ + "type": "label", + "value": "# {}".format(msg) + }) + + if len(msg_items) > 1: + for note in msg_items[1:]: + subitems.append({ + "type": "label", + "value": "

NOTE: {}

".format(note) + }) + + if isinstance(_items, str): + _items = [_items] + subitems.append({ + "type": "label", + "value": '

{}

'.format("
".join(_items)) + }) + + if items and subitems: + items.append(self.report_splitter) + + items.extend(subitems) + + self.show_interface( + items=items, + title=title, + event=self._cur_event + ) + return True def register(session, plugins_presets): '''Register plugin. Called when used as an plugin.''' - Sync_to_Avalon(session, plugins_presets).register() + SyncToAvalonEvent(session, plugins_presets).register() diff --git a/pype/ftrack/events/event_test.py b/pype/ftrack/events/event_test.py index a909aa5510..1dfd8ee8b1 100644 --- a/pype/ftrack/events/event_test.py +++ b/pype/ftrack/events/event_test.py @@ -1,11 +1,11 @@ import os import sys import re -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack import BaseEvent -class Test_Event(BaseEvent): +class TestEvent(BaseEvent): ignore_me = True @@ -23,4 +23,4 @@ class Test_Event(BaseEvent): def register(session, plugins_presets): '''Register plugin. Called when used as an plugin.''' - Test_Event(session, plugins_presets).register() + TestEvent(session, plugins_presets).register() diff --git a/pype/ftrack/events/event_thumbnail_updates.py b/pype/ftrack/events/event_thumbnail_updates.py index a74aad6bd9..5421aa7543 100644 --- a/pype/ftrack/events/event_thumbnail_updates.py +++ b/pype/ftrack/events/event_thumbnail_updates.py @@ -1,4 +1,3 @@ -from pype.vendor import ftrack_api from pype.ftrack import BaseEvent @@ -26,33 +25,41 @@ class ThumbnailEvents(BaseEvent): # Update task thumbnail from published version # if (entity['entityType'] == 'assetversion' and # entity['action'] == 'encoded'): - if ( - entity['entityType'] == 'assetversion' - and 'thumbid' in (entity.get('keys') or []) + elif ( + entity['entityType'] == 'assetversion' and + entity['action'] != 'remove' and + 'thumbid' in (entity.get('keys') or []) ): version = session.get('AssetVersion', entity['entityId']) + if not version: + continue + thumbnail = version.get('thumbnail') - if thumbnail: - parent = version['asset']['parent'] - task = version['task'] - parent['thumbnail_id'] = version['thumbnail_id'] - if parent.entity_type.lower() == "project": - name = parent["full_name"] - else: - name = parent["name"] - msg = '>>> Updating thumbnail for shot [ {} ]'.format(name) + if not thumbnail: + continue - if task: - task['thumbnail_id'] = version['thumbnail_id'] - msg += " and task [ {} ]".format(task["name"]) + parent = version['asset']['parent'] + task = version['task'] + parent['thumbnail_id'] = version['thumbnail_id'] + if parent.entity_type.lower() == "project": + name = parent["full_name"] + else: + name = parent["name"] + msg = '>>> Updating thumbnail for shot [ {} ]'.format(name) - self.log.info(msg) + if task: + task['thumbnail_id'] = version['thumbnail_id'] + msg += " and task [ {} ]".format(task["name"]) - session.commit() + self.log.info(msg) + + try: + session.commit() + except Exception: + session.rollback() def register(session, plugins_presets): '''Register plugin. Called when used as an plugin.''' - ThumbnailEvents(session, plugins_presets).register() diff --git a/pype/ftrack/events/event_user_assigment.py b/pype/ftrack/events/event_user_assigment.py index 61699d736c..87994d34b2 100644 --- a/pype/ftrack/events/event_user_assigment.py +++ b/pype/ftrack/events/event_user_assigment.py @@ -1,12 +1,15 @@ -from pype.vendor import ftrack_api -from pype.ftrack import BaseEvent, lib -from pype.ftrack.lib.io_nonsingleton import DbConnector -from bson.objectid import ObjectId -from pypeapp import config -from pypeapp import Anatomy -import subprocess import os import re +import subprocess + +from pype.ftrack import BaseEvent +from pype.ftrack.lib.avalon_sync import CustAttrIdKey +from pype.ftrack.lib.io_nonsingleton import DbConnector + +from bson.objectid import ObjectId + +from pypeapp import config +from pypeapp import Anatomy class UserAssigmentEvent(BaseEvent): @@ -36,7 +39,6 @@ class UserAssigmentEvent(BaseEvent): """ db_con = DbConnector() - ca_mongoid = lib.get_ca_mongoid() def error(self, *err): for e in err: @@ -105,7 +107,7 @@ class UserAssigmentEvent(BaseEvent): self.db_con.Session['AVALON_PROJECT'] = task['project']['full_name'] avalon_entity = None - parent_id = parent['custom_attributes'].get(self.ca_mongoid) + parent_id = parent['custom_attributes'].get(CustAttrIdKey) if parent_id: parent_id = ObjectId(parent_id) avalon_entity = self.db_con.find_one({ diff --git a/pype/ftrack/events/event_version_to_task_statuses.py b/pype/ftrack/events/event_version_to_task_statuses.py index 66a55c0cf7..1f5f1514d7 100644 --- a/pype/ftrack/events/event_version_to_task_statuses.py +++ b/pype/ftrack/events/event_version_to_task_statuses.py @@ -1,72 +1,135 @@ -from pype.vendor import ftrack_api from pype.ftrack import BaseEvent +from pypeapp import config class VersionToTaskStatus(BaseEvent): + # Presets usage + default_status_mapping = {} + def launch(self, session, event): '''Propagates status from version to task when changed''' - session.commit() # start of event procedure ---------------------------------- for entity in event['data'].get('entities', []): - # Filter non-assetversions - if ( - entity['entityType'] == 'assetversion' and - 'statusid' in (entity.get('keys') or []) - ): + # Filter AssetVersions + if entity["entityType"] != "assetversion": + continue - version = session.get('AssetVersion', entity['entityId']) - try: - version_status = session.get( - 'Status', entity['changes']['statusid']['new'] - ) - except Exception: + # Skip if statusid not in keys (in changes) + keys = entity.get("keys") + if not keys or "statusid" not in keys: + continue + + # Get new version task name + version_status_id = ( + entity + .get("changes", {}) + .get("statusid", {}) + .get("new", {}) + ) + + # Just check that `new` is set to any value + if not version_status_id: + continue + + try: + version_status = session.get("Status", version_status_id) + except Exception: + self.log.warning( + "Troubles with query status id [ {} ]".format( + version_status_id + ), + exc_info=True + ) + + if not version_status: + continue + + version_status_orig = version_status["name"] + + # Load status mapping from presets + status_mapping = ( + config.get_presets() + .get("ftrack", {}) + .get("ftrack_config", {}) + .get("status_version_to_task") + ) or self.default_status_mapping + + # Skip if mapping is empty + if not status_mapping: + continue + + # Lower version status name and check if has mapping + version_status = version_status_orig.lower() + new_status_names = status_mapping.get(version_status) + if not new_status_names: + continue + + self.log.debug( + "Processing AssetVersion status change: [ {} ]".format( + version_status_orig + ) + ) + + # Backwards compatibility (convert string to list) + if isinstance(new_status_names, str): + new_status_names = [new_status_names] + + # Lower all names from presets + new_status_names = [name.lower() for name in new_status_names] + + # Get entities necessary for processing + version = session.get("AssetVersion", entity["entityId"]) + task = version.get("task") + if not task: + continue + + project_schema = task["project"]["project_schema"] + # Get all available statuses for Task + statuses = project_schema.get_statuses("Task", task["type_id"]) + # map lowered status name with it's object + stat_names_low = { + status["name"].lower(): status for status in statuses + } + + new_status = None + for status_name in new_status_names: + if status_name not in stat_names_low: continue - task_status = version_status - task = version['task'] - self.log.info('>>> version status: [ {} ]'.format( - version_status['name'])) - status_to_set = None - # Filter to versions with status change to "render complete" - if version_status['name'].lower() == 'reviewed': - status_to_set = 'Change requested' + # store object of found status + new_status = stat_names_low[status_name] + self.log.debug("Status to set: [ {} ]".format( + new_status["name"] + )) + break - if version_status['name'].lower() == 'approved': - status_to_set = 'Complete' + # Skip if status names were not found for paticulat entity + if not new_status: + self.log.warning( + "Any of statuses from presets can be set: {}".format( + str(new_status_names) + ) + ) + continue - self.log.info( - '>>> status to set: [ {} ]'.format(status_to_set)) + # Get full path to task for logging + ent_path = "/".join([ent["name"] for ent in task["link"]]) - if status_to_set is not None: - query = 'Status where name is "{}"'.format(status_to_set) - try: - task_status = session.query(query).one() - except Exception: - self.log.info( - '!!! status was not found in Ftrack [ {} ]'.format( - status_to_set - )) - continue - - # Proceed if the task status was set - if task_status is not None: - # Get path to task - path = task['name'] - for p in task['ancestors']: - path = p['name'] + '/' + path - - # Setting task status - try: - task['status'] = task_status - session.commit() - except Exception as e: - self.log.warning('!!! [ {} ] status couldnt be set:\ - [ {} ]'.format(path, e)) - else: - self.log.info('>>> [ {} ] updated to [ {} ]'.format( - path, task_status['name'])) + # Setting task status + try: + task["status"] = new_status + session.commit() + self.log.debug("[ {} ] Status updated to [ {} ]".format( + ent_path, new_status['name'] + )) + except Exception: + session.rollback() + self.log.warning( + "[ {} ]Status couldn't be set".format(ent_path), + exc_info=True + ) def register(session, plugins_presets): diff --git a/pype/ftrack/ftrack_server/__init__.py b/pype/ftrack/ftrack_server/__init__.py index 0861a1bc08..fcae4e0690 100644 --- a/pype/ftrack/ftrack_server/__init__.py +++ b/pype/ftrack/ftrack_server/__init__.py @@ -1 +1,2 @@ from .ftrack_server import FtrackServer +from .lib import check_ftrack_url diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index ba23857394..b09b0bc84e 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -7,13 +7,12 @@ import socket import argparse import atexit import time -from urllib.parse import urlparse -import requests -from pype.vendor import ftrack_api +import ftrack_api from pype.ftrack.lib import credentials -from pype.ftrack.ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.lib import ftrack_events_mongo_settings +from pype.ftrack.ftrack_server.lib import ( + ftrack_events_mongo_settings, check_ftrack_url +) import socket_thread @@ -25,36 +24,6 @@ class MongoPermissionsError(Exception): super().__init__(message) -def check_ftrack_url(url, log_errors=True): - """Checks if Ftrack server is responding""" - if not url: - print('ERROR: Ftrack URL is not set!') - return None - - url = url.strip('/ ') - - if 'http' not in url: - if url.endswith('ftrackapp.com'): - url = 'https://' + url - else: - url = 'https://{0}.ftrackapp.com'.format(url) - try: - result = requests.get(url, allow_redirects=False) - except requests.exceptions.RequestException: - if log_errors: - print('ERROR: Entered Ftrack URL is not accesible!') - return False - - if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers): - if log_errors: - print('ERROR: Entered Ftrack URL is not accesible!') - return False - - print('DEBUG: Ftrack server {} is accessible.'.format(url)) - - return url - - def check_mongo_url(host, port, log_error=False): """Checks if mongo server is responding""" sock = None @@ -96,9 +65,8 @@ def validate_credentials(url, user, api): except Exception as e: print( 'ERROR: Can\'t log into Ftrack with used credentials:' - ' Ftrack server: "{}" // Username: {} // API key: {}'.format( - url, user, api - )) + ' Ftrack server: "{}" // Username: {} // API key: {}' + ).format(url, user, api) return False print('DEBUG: Credentials Username: "{}", API key: "{}" are valid.'.format( @@ -176,9 +144,9 @@ def legacy_server(ftrack_url): ).format(str(max_fail_count), str(wait_time_after_max_fail))) subproc_failed_count += 1 elif (( - datetime.datetime.now() - subproc_last_failed - ).seconds > wait_time_after_max_fail): - subproc_failed_count = 0 + datetime.datetime.now() - subproc_last_failed + ).seconds > wait_time_after_max_fail): + subproc_failed_count = 0 # If thread failed test Ftrack and Mongo connection elif subproc.poll() is not None: @@ -268,6 +236,7 @@ def main_loop(ftrack_url): # Run threads only if Ftrack is accessible if not ftrack_accessible or not mongo_accessible: if not mongo_accessible and not printed_mongo_error: + mongo_url = mongo_hostname + ":" + mongo_port print("Can't access Mongo {}".format(mongo_url)) if not ftrack_accessible and not printed_ftrack_error: @@ -305,9 +274,9 @@ def main_loop(ftrack_url): ).format(str(max_fail_count), str(wait_time_after_max_fail))) storer_failed_count += 1 elif (( - datetime.datetime.now() - storer_last_failed - ).seconds > wait_time_after_max_fail): - storer_failed_count = 0 + datetime.datetime.now() - storer_last_failed + ).seconds > wait_time_after_max_fail): + storer_failed_count = 0 # If thread failed test Ftrack and Mongo connection elif not storer_thread.isAlive(): @@ -341,13 +310,13 @@ def main_loop(ftrack_url): processor_failed_count += 1 elif (( - datetime.datetime.now() - processor_last_failed - ).seconds > wait_time_after_max_fail): - processor_failed_count = 0 + datetime.datetime.now() - processor_last_failed + ).seconds > wait_time_after_max_fail): + processor_failed_count = 0 # If thread failed test Ftrack and Mongo connection elif not processor_thread.isAlive(): - if storer_thread.mongo_error: + if processor_thread.mongo_error: raise Exception( "Exiting because have issue with acces to MongoDB" ) diff --git a/pype/ftrack/ftrack_server/ftrack_server.py b/pype/ftrack/ftrack_server/ftrack_server.py index 12b046c510..eebc3f6ec4 100644 --- a/pype/ftrack/ftrack_server/ftrack_server.py +++ b/pype/ftrack/ftrack_server/ftrack_server.py @@ -2,7 +2,7 @@ import os import sys import types import importlib -from pype.vendor import ftrack_api +import ftrack_api import time import logging import inspect @@ -100,7 +100,10 @@ class FtrackServer: log.warning(msg, exc_info=e) if len(register_functions_dict) < 1: - raise Exception + raise Exception(( + "There are no events with register function." + " Registered paths: \"{}\"" + ).format("| ".join(paths))) # Load presets for setting plugins key = "user" diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 12159693fe..edd3cee09b 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -1,9 +1,32 @@ import os +import sys +import logging +import getpass +import atexit +import tempfile +import threading +import datetime +import time +import queue +import pymongo + +import requests +import ftrack_api +import ftrack_api.session +import ftrack_api.cache +import ftrack_api.operation +import ftrack_api._centralized_storage_scenario +import ftrack_api.event +from ftrack_api.logging import LazyLogMessage as L try: from urllib.parse import urlparse, parse_qs except ImportError: from urlparse import urlparse, parse_qs +from pypeapp import Logger + +from pype.ftrack.lib.custom_db_connector import DbConnector + def ftrack_events_mongo_settings(): host = None @@ -48,7 +71,9 @@ def ftrack_events_mongo_settings(): def get_ftrack_event_mongo_info(): - host, port, database, username, password, collection, auth_db = ftrack_events_mongo_settings() + host, port, database, username, password, collection, auth_db = ( + ftrack_events_mongo_settings() + ) user_pass = "" if username and password: user_pass = "{}:{}@".format(username, password) @@ -66,3 +91,333 @@ def get_ftrack_event_mongo_info(): url = "mongodb://{}{}{}{}".format(user_pass, socket_path, dab, auth) return url, database, collection + + +def check_ftrack_url(url, log_errors=True): + """Checks if Ftrack server is responding""" + if not url: + print('ERROR: Ftrack URL is not set!') + return None + + url = url.strip('/ ') + + if 'http' not in url: + if url.endswith('ftrackapp.com'): + url = 'https://' + url + else: + url = 'https://{0}.ftrackapp.com'.format(url) + try: + result = requests.get(url, allow_redirects=False) + except requests.exceptions.RequestException: + if log_errors: + print('ERROR: Entered Ftrack URL is not accesible!') + return False + + if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers): + if log_errors: + print('ERROR: Entered Ftrack URL is not accesible!') + return False + + print('DEBUG: Ftrack server {} is accessible.'.format(url)) + + return url + + +class StorerEventHub(ftrack_api.event.hub.EventHub): + def __init__(self, *args, **kwargs): + self.sock = kwargs.pop("sock") + super(StorerEventHub, self).__init__(*args, **kwargs) + + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "heartbeat": + # Reply with heartbeat. + self.sock.sendall(b"storer") + return self._send_packet(self._code_name_mapping['heartbeat']) + + elif code_name == "connect": + event = ftrack_api.event.base.Event( + topic="pype.storer.started", + data={}, + source={ + "id": self.id, + "user": {"username": self._api_user} + } + ) + self._event_queue.put(event) + + return super(StorerEventHub, self)._handle_packet( + code, packet_identifier, path, data + ) + + +class ProcessEventHub(ftrack_api.event.hub.EventHub): + url, database, table_name = get_ftrack_event_mongo_info() + + is_table_created = False + pypelog = Logger().get_logger("Session Processor") + + def __init__(self, *args, **kwargs): + self.dbcon = DbConnector( + mongo_url=self.url, + database_name=self.database, + table_name=self.table_name + ) + self.sock = kwargs.pop("sock") + super(ProcessEventHub, self).__init__(*args, **kwargs) + + def prepare_dbcon(self): + try: + self.dbcon.install() + self.dbcon._database.list_collection_names() + except pymongo.errors.AutoReconnect: + self.pypelog.error( + "Mongo server \"{}\" is not responding, exiting.".format( + os.environ["AVALON_MONGO"] + ) + ) + sys.exit(0) + + except pymongo.errors.OperationFailure: + self.pypelog.error(( + "Error with Mongo access, probably permissions." + "Check if exist database with name \"{}\"" + " and collection \"{}\" inside." + ).format(self.database, self.table_name)) + self.sock.sendall(b"MongoError") + sys.exit(0) + + def wait(self, duration=None): + """Overriden wait + + Event are loaded from Mongo DB when queue is empty. Handled event is + set as processed in Mongo DB. + """ + started = time.time() + self.prepare_dbcon() + while True: + try: + event = self._event_queue.get(timeout=0.1) + except queue.Empty: + if not self.load_events(): + time.sleep(0.5) + else: + try: + self._handle(event) + self.dbcon.update_one( + {"id": event["id"]}, + {"$set": {"pype_data.is_processed": True}} + ) + except pymongo.errors.AutoReconnect: + self.pypelog.error(( + "Mongo server \"{}\" is not responding, exiting." + ).format(os.environ["AVALON_MONGO"])) + sys.exit(0) + # Additional special processing of events. + if event['topic'] == 'ftrack.meta.disconnected': + break + + if duration is not None: + if (time.time() - started) > duration: + break + + def load_events(self): + """Load not processed events sorted by stored date""" + ago_date = datetime.datetime.now() - datetime.timedelta(days=3) + result = self.dbcon.delete_many({ + "pype_data.stored": {"$lte": ago_date}, + "pype_data.is_processed": True + }) + + not_processed_events = self.dbcon.find( + {"pype_data.is_processed": False} + ).sort( + [("pype_data.stored", pymongo.ASCENDING)] + ) + + found = False + for event_data in not_processed_events: + new_event_data = { + k: v for k, v in event_data.items() + if k not in ["_id", "pype_data"] + } + try: + event = ftrack_api.event.base.Event(**new_event_data) + except Exception: + self.logger.exception(L( + 'Failed to convert payload into event: {0}', + event_data + )) + continue + found = True + self._event_queue.put(event) + + return found + + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which skip events and extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "event": + return + if code_name == "heartbeat": + self.sock.sendall(b"processor") + return self._send_packet(self._code_name_mapping["heartbeat"]) + + return super()._handle_packet(code, packet_identifier, path, data) +class SocketSession(ftrack_api.session.Session): + '''An isolated session for interaction with an ftrack server.''' + def __init__( + self, server_url=None, api_key=None, api_user=None, auto_populate=True, + plugin_paths=None, cache=None, cache_key_maker=None, + auto_connect_event_hub=None, schema_cache_path=None, + plugin_arguments=None, sock=None, Eventhub=None + ): + super(ftrack_api.session.Session, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self._closed = False + + if server_url is None: + server_url = os.environ.get('FTRACK_SERVER') + + if not server_url: + raise TypeError( + 'Required "server_url" not specified. Pass as argument or set ' + 'in environment variable FTRACK_SERVER.' + ) + + self._server_url = server_url + + if api_key is None: + api_key = os.environ.get( + 'FTRACK_API_KEY', + # Backwards compatibility + os.environ.get('FTRACK_APIKEY') + ) + + if not api_key: + raise TypeError( + 'Required "api_key" not specified. Pass as argument or set in ' + 'environment variable FTRACK_API_KEY.' + ) + + self._api_key = api_key + + if api_user is None: + api_user = os.environ.get('FTRACK_API_USER') + if not api_user: + try: + api_user = getpass.getuser() + except Exception: + pass + + if not api_user: + raise TypeError( + 'Required "api_user" not specified. Pass as argument, set in ' + 'environment variable FTRACK_API_USER or one of the standard ' + 'environment variables used by Python\'s getpass module.' + ) + + self._api_user = api_user + + # Currently pending operations. + self.recorded_operations = ftrack_api.operation.Operations() + self.record_operations = True + + self.cache_key_maker = cache_key_maker + if self.cache_key_maker is None: + self.cache_key_maker = ftrack_api.cache.StringKeyMaker() + + # Enforce always having a memory cache at top level so that the same + # in-memory instance is returned from session. + self.cache = ftrack_api.cache.LayeredCache([ + ftrack_api.cache.MemoryCache() + ]) + + if cache is not None: + if callable(cache): + cache = cache(self) + + if cache is not None: + self.cache.caches.append(cache) + + self._managed_request = None + self._request = requests.Session() + self._request.auth = ftrack_api.session.SessionAuthentication( + self._api_key, self._api_user + ) + + self.auto_populate = auto_populate + + # Fetch server information and in doing so also check credentials. + self._server_information = self._fetch_server_information() + + # Now check compatibility of server based on retrieved information. + self.check_server_compatibility() + + # Construct event hub and load plugins. + if Eventhub is None: + Eventhub = ftrack_api.event.hub.EventHub + self._event_hub = Eventhub( + self._server_url, + self._api_user, + self._api_key, + sock=sock + ) + + self._auto_connect_event_hub_thread = None + if auto_connect_event_hub in (None, True): + # Connect to event hub in background thread so as not to block main + # session usage waiting for event hub connection. + self._auto_connect_event_hub_thread = threading.Thread( + target=self._event_hub.connect + ) + self._auto_connect_event_hub_thread.daemon = True + self._auto_connect_event_hub_thread.start() + + # To help with migration from auto_connect_event_hub default changing + # from True to False. + self._event_hub._deprecation_warning_auto_connect = ( + auto_connect_event_hub is None + ) + + # Register to auto-close session on exit. + atexit.register(self.close) + + self._plugin_paths = plugin_paths + if self._plugin_paths is None: + self._plugin_paths = os.environ.get( + 'FTRACK_EVENT_PLUGIN_PATH', '' + ).split(os.pathsep) + + self._discover_plugins(plugin_arguments=plugin_arguments) + + # TODO: Make schemas read-only and non-mutable (or at least without + # rebuilding types)? + if schema_cache_path is not False: + if schema_cache_path is None: + schema_cache_path = os.environ.get( + 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() + ) + + schema_cache_path = os.path.join( + schema_cache_path, 'ftrack_api_schema_cache.json' + ) + + self.schemas = self._load_schemas(schema_cache_path) + self.types = self._build_entity_type_classes(self.schemas) + + ftrack_api._centralized_storage_scenario.register(self) + + self._configure_locations() + self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.api.session.ready', + data=dict( + session=self + ) + ), + synchronous=True + ) diff --git a/pype/ftrack/ftrack_server/session_processor.py b/pype/ftrack/ftrack_server/session_processor.py deleted file mode 100644 index 86a9775dce..0000000000 --- a/pype/ftrack/ftrack_server/session_processor.py +++ /dev/null @@ -1,292 +0,0 @@ -import logging -import os -import atexit -import datetime -import tempfile -import threading -import time -import requests -import queue -import pymongo - -import ftrack_api -import ftrack_api.session -import ftrack_api.cache -import ftrack_api.operation -import ftrack_api._centralized_storage_scenario -import ftrack_api.event -from ftrack_api.logging import LazyLogMessage as L - -from pype.ftrack.lib.custom_db_connector import DbConnector -from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info -from pypeapp import Logger - -log = Logger().get_logger("Session processor") - - -class ProcessEventHub(ftrack_api.event.hub.EventHub): - url, database, table_name = get_ftrack_event_mongo_info() - - is_table_created = False - - def __init__(self, *args, **kwargs): - self.dbcon = DbConnector( - mongo_url=self.url, - database_name=self.database, - table_name=self.table_name - ) - self.sock = kwargs.pop("sock") - super(ProcessEventHub, self).__init__(*args, **kwargs) - - def prepare_dbcon(self): - try: - self.dbcon.install() - self.dbcon._database.collection_names() - except pymongo.errors.AutoReconnect: - log.error("Mongo server \"{}\" is not responding, exiting.".format( - os.environ["AVALON_MONGO"] - )) - sys.exit(0) - - except pymongo.errors.OperationFailure: - log.error(( - "Error with Mongo access, probably permissions." - "Check if exist database with name \"{}\"" - " and collection \"{}\" inside." - ).format(self.database, self.table_name)) - self.sock.sendall(b"MongoError") - sys.exit(0) - - def wait(self, duration=None): - """Overriden wait - - Event are loaded from Mongo DB when queue is empty. Handled event is - set as processed in Mongo DB. - """ - started = time.time() - self.prepare_dbcon() - while True: - try: - event = self._event_queue.get(timeout=0.1) - except queue.Empty: - if not self.load_events(): - time.sleep(0.5) - else: - try: - self._handle(event) - self.dbcon.update_one( - {"id": event["id"]}, - {"$set": {"pype_data.is_processed": True}} - ) - except pymongo.errors.AutoReconnect: - log.error(( - "Mongo server \"{}\" is not responding, exiting." - ).format(os.environ["AVALON_MONGO"])) - sys.exit(0) - # Additional special processing of events. - if event['topic'] == 'ftrack.meta.disconnected': - break - - if duration is not None: - if (time.time() - started) > duration: - break - - def load_events(self): - """Load not processed events sorted by stored date""" - ago_date = datetime.datetime.now() - datetime.timedelta(days=3) - result = self.dbcon.delete_many({ - "pype_data.stored": {"$lte": ago_date}, - "pype_data.is_processed": True - }) - - not_processed_events = self.dbcon.find( - {"pype_data.is_processed": False} - ).sort( - [("pype_data.stored", pymongo.ASCENDING)] - ) - - found = False - for event_data in not_processed_events: - new_event_data = { - k: v for k, v in event_data.items() - if k not in ["_id", "pype_data"] - } - try: - event = ftrack_api.event.base.Event(**new_event_data) - except Exception: - self.logger.exception(L( - 'Failed to convert payload into event: {0}', - event_data - )) - continue - found = True - self._event_queue.put(event) - - return found - - def _handle_packet(self, code, packet_identifier, path, data): - """Override `_handle_packet` which skip events and extend heartbeat""" - code_name = self._code_name_mapping[code] - if code_name == "event": - return - if code_name == "heartbeat": - self.sock.sendall(b"processor") - return self._send_packet(self._code_name_mapping["heartbeat"]) - - return super()._handle_packet(code, packet_identifier, path, data) - - -class ProcessSession(ftrack_api.session.Session): - '''An isolated session for interaction with an ftrack server.''' - def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=None, schema_cache_path=None, - plugin_arguments=None, sock=None - ): - super(ftrack_api.session.Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self._closed = False - - if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') - - if not server_url: - raise TypeError( - 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' - ) - - self._server_url = server_url - - if api_key is None: - api_key = os.environ.get( - 'FTRACK_API_KEY', - # Backwards compatibility - os.environ.get('FTRACK_APIKEY') - ) - - if not api_key: - raise TypeError( - 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' - ) - - self._api_key = api_key - - if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') - if not api_user: - try: - api_user = getpass.getuser() - except Exception: - pass - - if not api_user: - raise TypeError( - 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' - ) - - self._api_user = api_user - - # Currently pending operations. - self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True - - self.cache_key_maker = cache_key_maker - if self.cache_key_maker is None: - self.cache_key_maker = ftrack_api.cache.StringKeyMaker() - - # Enforce always having a memory cache at top level so that the same - # in-memory instance is returned from session. - self.cache = ftrack_api.cache.LayeredCache([ - ftrack_api.cache.MemoryCache() - ]) - - if cache is not None: - if callable(cache): - cache = cache(self) - - if cache is not None: - self.cache.caches.append(cache) - - self._managed_request = None - self._request = requests.Session() - self._request.auth = ftrack_api.session.SessionAuthentication( - self._api_key, self._api_user - ) - - self.auto_populate = auto_populate - - # Fetch server information and in doing so also check credentials. - self._server_information = self._fetch_server_information() - - # Now check compatibility of server based on retrieved information. - self.check_server_compatibility() - - # Construct event hub and load plugins. - self._event_hub = ProcessEventHub( - self._server_url, - self._api_user, - self._api_key, - sock=sock - ) - - self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): - # Connect to event hub in background thread so as not to block main - # session usage waiting for event hub connection. - self._auto_connect_event_hub_thread = threading.Thread( - target=self._event_hub.connect - ) - self._auto_connect_event_hub_thread.daemon = True - self._auto_connect_event_hub_thread.start() - - # To help with migration from auto_connect_event_hub default changing - # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) - - # Register to auto-close session on exit. - atexit.register(self.close) - - self._plugin_paths = plugin_paths - if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) - - self._discover_plugins(plugin_arguments=plugin_arguments) - - # TODO: Make schemas read-only and non-mutable (or at least without - # rebuilding types)? - if schema_cache_path is not False: - if schema_cache_path is None: - schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() - ) - - schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_schema_cache.json' - ) - - self.schemas = self._load_schemas(schema_cache_path) - self.types = self._build_entity_type_classes(self.schemas) - - ftrack_api._centralized_storage_scenario.register(self) - - self._configure_locations() - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) - ), - synchronous=True - ) diff --git a/pype/ftrack/ftrack_server/session_storer.py b/pype/ftrack/ftrack_server/session_storer.py deleted file mode 100644 index b3201c9e4d..0000000000 --- a/pype/ftrack/ftrack_server/session_storer.py +++ /dev/null @@ -1,257 +0,0 @@ -import logging -import os -import atexit -import tempfile -import threading -import requests - -import ftrack_api -import ftrack_api.session -import ftrack_api.cache -import ftrack_api.operation -import ftrack_api._centralized_storage_scenario -import ftrack_api.event -from ftrack_api.logging import LazyLogMessage as L - - -class StorerEventHub(ftrack_api.event.hub.EventHub): - def __init__(self, *args, **kwargs): - self.sock = kwargs.pop("sock") - super(StorerEventHub, self).__init__(*args, **kwargs) - - def _handle_packet(self, code, packet_identifier, path, data): - """Override `_handle_packet` which extend heartbeat""" - if self._code_name_mapping[code] == "heartbeat": - # Reply with heartbeat. - self.sock.sendall(b"storer") - return self._send_packet(self._code_name_mapping['heartbeat']) - - return super(StorerEventHub, self)._handle_packet( - code, packet_identifier, path, data - ) - - -class StorerSession(ftrack_api.session.Session): - '''An isolated session for interaction with an ftrack server.''' - def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=None, schema_cache_path=None, - plugin_arguments=None, sock=None - ): - '''Initialise session. - - *server_url* should be the URL of the ftrack server to connect to - including any port number. If not specified attempt to look up from - :envvar:`FTRACK_SERVER`. - - *api_key* should be the API key to use for authentication whilst - *api_user* should be the username of the user in ftrack to record - operations against. If not specified, *api_key* should be retrieved - from :envvar:`FTRACK_API_KEY` and *api_user* from - :envvar:`FTRACK_API_USER`. - - If *auto_populate* is True (the default), then accessing entity - attributes will cause them to be automatically fetched from the server - if they are not already. This flag can be changed on the session - directly at any time. - - *plugin_paths* should be a list of paths to search for plugins. If not - specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`. - - *cache* should be an instance of a cache that fulfils the - :class:`ftrack_api.cache.Cache` interface and will be used as the cache - for the session. It can also be a callable that will be called with the - session instance as sole argument. The callable should return ``None`` - if a suitable cache could not be configured, but session instantiation - can continue safely. - - .. note:: - - The session will add the specified cache to a pre-configured layered - cache that specifies the top level cache as a - :class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary - to construct a separate memory cache for typical behaviour. Working - around this behaviour or removing the memory cache can lead to - unexpected behaviour. - - *cache_key_maker* should be an instance of a key maker that fulfils the - :class:`ftrack_api.cache.KeyMaker` interface and will be used to - generate keys for objects being stored in the *cache*. If not specified, - a :class:`~ftrack_api.cache.StringKeyMaker` will be used. - - If *auto_connect_event_hub* is True then embedded event hub will be - automatically connected to the event server and allow for publishing and - subscribing to **non-local** events. If False, then only publishing and - subscribing to **local** events will be possible until the hub is - manually connected using :meth:`EventHub.connect - `. - - .. note:: - - The event hub connection is performed in a background thread to - improve session startup time. If a registered plugin requires a - connected event hub then it should check the event hub connection - status explicitly. Subscribing to events does *not* require a - connected event hub. - - Enable schema caching by setting *schema_cache_path* to a folder path. - If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to - determine the path to store cache in. If the environment variable is - also not specified then a temporary directory will be used. Set to - `False` to disable schema caching entirely. - - *plugin_arguments* should be an optional mapping (dict) of keyword - arguments to pass to plugin register functions upon discovery. If a - discovered plugin has a signature that is incompatible with the passed - arguments, the discovery mechanism will attempt to reduce the passed - arguments to only those that the plugin accepts. Note that a warning - will be logged in this case. - - ''' - super(ftrack_api.session.Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self._closed = False - - if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') - - if not server_url: - raise TypeError( - 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' - ) - - self._server_url = server_url - - if api_key is None: - api_key = os.environ.get( - 'FTRACK_API_KEY', - # Backwards compatibility - os.environ.get('FTRACK_APIKEY') - ) - - if not api_key: - raise TypeError( - 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' - ) - - self._api_key = api_key - - if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') - if not api_user: - try: - api_user = getpass.getuser() - except Exception: - pass - - if not api_user: - raise TypeError( - 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' - ) - - self._api_user = api_user - - # Currently pending operations. - self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True - - self.cache_key_maker = cache_key_maker - if self.cache_key_maker is None: - self.cache_key_maker = ftrack_api.cache.StringKeyMaker() - - # Enforce always having a memory cache at top level so that the same - # in-memory instance is returned from session. - self.cache = ftrack_api.cache.LayeredCache([ - ftrack_api.cache.MemoryCache() - ]) - - if cache is not None: - if callable(cache): - cache = cache(self) - - if cache is not None: - self.cache.caches.append(cache) - - self._managed_request = None - self._request = requests.Session() - self._request.auth = ftrack_api.session.SessionAuthentication( - self._api_key, self._api_user - ) - - self.auto_populate = auto_populate - - # Fetch server information and in doing so also check credentials. - self._server_information = self._fetch_server_information() - - # Now check compatibility of server based on retrieved information. - self.check_server_compatibility() - - # Construct event hub and load plugins. - self._event_hub = StorerEventHub( - self._server_url, - self._api_user, - self._api_key, - sock=sock - ) - - self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): - # Connect to event hub in background thread so as not to block main - # session usage waiting for event hub connection. - self._auto_connect_event_hub_thread = threading.Thread( - target=self._event_hub.connect - ) - self._auto_connect_event_hub_thread.daemon = True - self._auto_connect_event_hub_thread.start() - - # To help with migration from auto_connect_event_hub default changing - # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) - - # Register to auto-close session on exit. - atexit.register(self.close) - - self._plugin_paths = plugin_paths - if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) - - self._discover_plugins(plugin_arguments=plugin_arguments) - - # TODO: Make schemas read-only and non-mutable (or at least without - # rebuilding types)? - if schema_cache_path is not False: - if schema_cache_path is None: - schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() - ) - - schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_schema_cache.json' - ) - - self.schemas = self._load_schemas(schema_cache_path) - self.types = self._build_entity_type_classes(self.schemas) - - ftrack_api._centralized_storage_scenario.register(self) - - self._configure_locations() - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) - ), - synchronous=True - ) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index d0a2868743..3309f75cd7 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -1,7 +1,5 @@ import os -import sys import time -import signal import socket import threading import subprocess @@ -10,7 +8,9 @@ from pypeapp import Logger class SocketThread(threading.Thread): """Thread that checks suprocess of storer of processor of events""" + MAX_TIMEOUT = 35 + def __init__(self, name, port, filepath): super(SocketThread, self).__init__() self.log = Logger().get_logger("SocketThread", "Event Thread") diff --git a/pype/ftrack/ftrack_server/sub_event_processor.py b/pype/ftrack/ftrack_server/sub_event_processor.py index 01fa64fdd0..9c971ca916 100644 --- a/pype/ftrack/ftrack_server/sub_event_processor.py +++ b/pype/ftrack/ftrack_server/sub_event_processor.py @@ -1,12 +1,9 @@ -import os import sys -import datetime import signal import socket -import pymongo from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.session_processor import ProcessSession +from pype.ftrack.ftrack_server.lib import SocketSession, ProcessEventHub from pypeapp import Logger log = Logger().get_logger("Event processor") @@ -24,14 +21,15 @@ def main(args): sock.sendall(b"CreatedProcess") try: - session = ProcessSession(auto_connect_event_hub=True, sock=sock) - server = FtrackServer('event') + session = SocketSession( + auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub + ) + server = FtrackServer("event") log.debug("Launched Ftrack Event processor") server.run_server(session) - except Exception as exc: - import traceback - traceback.print_tb(exc.__traceback__) + except Exception: + log.error("Event server crashed. See traceback below", exc_info=True) finally: log.debug("First closing socket") diff --git a/pype/ftrack/ftrack_server/sub_event_storer.py b/pype/ftrack/ftrack_server/sub_event_storer.py index aaaf63accd..dfe8e21654 100644 --- a/pype/ftrack/ftrack_server/sub_event_storer.py +++ b/pype/ftrack/ftrack_server/sub_event_storer.py @@ -5,16 +5,24 @@ import signal import socket import pymongo +import ftrack_api from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info +from pype.ftrack.ftrack_server.lib import ( + get_ftrack_event_mongo_info, + SocketSession, + StorerEventHub +) from pype.ftrack.lib.custom_db_connector import DbConnector -from session_storer import StorerSession from pypeapp import Logger log = Logger().get_logger("Event storer") -url, database, table_name = get_ftrack_event_mongo_info() +class SessionFactory: + session = None + + +url, database, table_name = get_ftrack_event_mongo_info() dbcon = DbConnector( mongo_url=url, database_name=database, @@ -24,10 +32,11 @@ dbcon = DbConnector( # ignore_topics = ["ftrack.meta.connected"] ignore_topics = [] + def install_db(): try: dbcon.install() - dbcon._database.collection_names() + dbcon._database.list_collection_names() except pymongo.errors.AutoReconnect: log.error("Mongo server \"{}\" is not responding, exiting.".format( os.environ["AVALON_MONGO"] @@ -49,7 +58,7 @@ def launch(event): try: # dbcon.insert_one(event_data) - dbcon.update({"id": event_id}, event_data, upsert=True) + dbcon.replace_one({"id": event_id}, event_data, upsert=True) log.debug("Event: {} stored".format(event_id)) except pymongo.errors.AutoReconnect: @@ -65,10 +74,75 @@ def launch(event): ) +def trigger_sync(event): + session = SessionFactory.session + source_id = event.get("source", {}).get("id") + if not source_id or source_id != session.event_hub.id: + return + + if session is None: + log.warning("Session is not set. Can't trigger Sync to avalon action.") + return True + + projects = session.query("Project").all() + if not projects: + return True + + query = { + "pype_data.is_processed": False, + "topic": "ftrack.action.launch", + "data.actionIdentifier": "sync.to.avalon.server" + } + set_dict = { + "$set": {"pype_data.is_processed": True} + } + dbcon.update_many(query, set_dict) + + selections = [] + for project in projects: + if project["status"] != "active": + continue + + auto_sync = project["custom_attributes"].get("avalon_auto_sync") + if not auto_sync: + continue + + selections.append({ + "entityId": project["id"], + "entityType": "show" + }) + + if not selections: + return + + user = session.query( + "User where username is \"{}\"".format(session.api_user) + ).one() + user_data = { + "username": user["username"], + "id": user["id"] + } + + for selection in selections: + event_data = { + "actionIdentifier": "sync.to.avalon.server", + "selection": [selection] + } + session.event_hub.publish( + ftrack_api.event.base.Event( + topic="ftrack.action.launch", + data=event_data, + source=dict(user=user_data) + ), + on_error="ignore" + ) + + def register(session): '''Registers the event, subscribing the discover and launch topics.''' install_db() session.event_hub.subscribe("topic=*", launch) + session.event_hub.subscribe("topic=pype.storer.started", trigger_sync) def main(args): @@ -84,7 +158,10 @@ def main(args): sock.sendall(b"CreatedStore") try: - session = StorerSession(auto_connect_event_hub=True, sock=sock) + session = SocketSession( + auto_connect_event_hub=True, sock=sock, Eventhub=StorerEventHub + ) + SessionFactory.session = session register(session) server = FtrackServer("event") log.debug("Launched Ftrack Event storer") diff --git a/pype/ftrack/ftrack_server/sub_legacy_server.py b/pype/ftrack/ftrack_server/sub_legacy_server.py index 3d364e0ba3..8b7bab5e2e 100644 --- a/pype/ftrack/ftrack_server/sub_legacy_server.py +++ b/pype/ftrack/ftrack_server/sub_legacy_server.py @@ -1,4 +1,3 @@ -import os import sys import time import datetime @@ -6,8 +5,7 @@ import signal import threading from ftrack_server import FtrackServer -from pype.vendor import ftrack_api -from pype.vendor.ftrack_api.event.hub import EventHub +import ftrack_api from pypeapp import Logger log = Logger().get_logger("Event Server Legacy") @@ -37,7 +35,10 @@ class TimerChecker(threading.Thread): if not self.session.event_hub.connected: if not connected: - if (datetime.datetime.now() - start).seconds > self.max_time_out: + if ( + (datetime.datetime.now() - start).seconds > + self.max_time_out + ): log.error(( "Exiting event server. Session was not connected" " to ftrack server in {} seconds." @@ -61,7 +62,7 @@ class TimerChecker(threading.Thread): def main(args): check_thread = None try: - server = FtrackServer('event') + server = FtrackServer("event") session = ftrack_api.Session(auto_connect_event_hub=True) check_thread = TimerChecker(server, session) diff --git a/pype/ftrack/lib/__init__.py b/pype/ftrack/lib/__init__.py index 6198230e57..9af9ded943 100644 --- a/pype/ftrack/lib/__init__.py +++ b/pype/ftrack/lib/__init__.py @@ -1,4 +1,4 @@ -from .avalon_sync import * +from . import avalon_sync from .credentials import * from .ftrack_app_handler import * from .ftrack_event_handler import * diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py index 0baf99d2cf..064ea1adb8 100644 --- a/pype/ftrack/lib/avalon_sync.py +++ b/pype/ftrack/lib/avalon_sync.py @@ -1,369 +1,82 @@ import os import re -import json -from pype.lib import get_avalon_database -from bson.objectid import ObjectId +import queue +import collections + +from pype.ftrack.lib.io_nonsingleton import DbConnector + import avalon import avalon.api -from avalon import schema -from avalon.vendor import toml, jsonschema -from pypeapp import Logger, Anatomy, config +from avalon.vendor import toml +from pypeapp import Logger, Anatomy + +from bson.objectid import ObjectId +from bson.errors import InvalidId +from pymongo import UpdateOne +import ftrack_api -ValidationError = jsonschema.ValidationError log = Logger().get_logger(__name__) -def get_ca_mongoid(): - # returns name of Custom attribute that stores mongo_id - return 'avalon_mongo_id' +# Current schemas for avalon types +EntitySchemas = { + "project": "avalon-core:project-2.0", + "asset": "avalon-core:asset-3.0", + "config": "avalon-core:config-1.0" +} + +# name of Custom attribute that stores mongo_id from avalon db +CustAttrIdKey = "avalon_mongo_id" +CustAttrAutoSync = "avalon_auto_sync" -def import_to_avalon( - session, entity, ft_project, av_project, custom_attributes -): - database = get_avalon_database() - project_name = ft_project['full_name'] - output = {} - errors = [] +def check_regex(name, entity_type, in_schema=None, schema_patterns=None): + schema_name = "asset-3.0" + if in_schema: + schema_name = in_schema + elif entity_type == "project": + schema_name = "project-2.0" + elif entity_type == "task": + schema_name = "task" - entity_type = entity.entity_type - ent_path = "/".join([ent["name"] for ent in entity['link']]) - - log.debug("{} [{}] - Processing".format(ent_path, entity_type)) - - ca_mongoid = get_ca_mongoid() - # Validate if entity has custom attribute avalon_mongo_id - if ca_mongoid not in entity['custom_attributes']: - msg = ( - 'Custom attribute "{}" for "{}" is not created' - ' or don\'t have set permissions for API' - ).format(ca_mongoid, entity['name']) - log.error(msg) - errors.append({'Custom attribute error': msg}) - output['errors'] = errors - return output - - # Validate if entity name match REGEX in schema - avalon_check_name(entity) - - entity_type = entity.entity_type - # Project //////////////////////////////////////////////////////////////// - if entity_type in ['Project']: - type = 'project' - - proj_config = get_project_config(entity) - schema.validate(proj_config) - - av_project_code = None - if av_project is not None and 'code' in av_project['data']: - av_project_code = av_project['data']['code'] - ft_project_code = ft_project['name'] - - if av_project is None: - log.debug("{} - Creating project".format(project_name)) - item = { - 'schema': "avalon-core:project-2.0", - 'type': type, - 'name': project_name, - 'data': dict(), - 'config': proj_config, - 'parent': None, - } - schema.validate(item) - - database[project_name].insert_one(item) - - av_project = database[project_name].find_one( - {'type': type} - ) - - elif ( - av_project['name'] != project_name or - ( - av_project_code is not None and - av_project_code != ft_project_code - ) - ): - msg = ( - 'You can\'t change {0} "{1}" to "{2}"' - ', avalon wouldn\'t work properly!' - '\n{0} was changed back!' - ) - if av_project['name'] != project_name: - entity['full_name'] = av_project['name'] - errors.append( - {'Changed name error': msg.format( - 'Project name', av_project['name'], project_name - )} - ) - - if ( - av_project_code is not None and - av_project_code != ft_project_code - ): - log.warning(( - "{0} - Project code" - " is different in Avalon (\"{1}\")" - " that in Ftrack (\"{2}\")!" - " Trying to change it back in Ftrack to \"{1}\"." - ).format( - ent_path, str(av_project_code), str(ft_project_code) - )) - - entity['name'] = av_project_code - errors.append( - {'Changed name error': msg.format( - 'Project code', av_project_code, ft_project_code - )} - ) - - try: - session.commit() - log.info(( - "{} - Project code was changed back to \"{}\"" - ).format(ent_path, str(av_project_code))) - except Exception: - log.error( - ( - "{} - Couldn't change project code back to \"{}\"." - ).format(ent_path, str(av_project_code)), - exc_info=True - ) - - output['errors'] = errors - return output + name_pattern = None + if schema_patterns is not None: + name_pattern = schema_patterns.get(schema_name) + if not name_pattern: + default_pattern = "^[a-zA-Z0-9_.]*$" + schema_obj = avalon.schema._cache.get(schema_name + ".json") + if not schema_obj: + name_pattern = default_pattern else: - # not override existing templates! - templates = av_project['config'].get('template', None) - if templates is not None: - for key, value in proj_config['template'].items(): - if ( - key in templates and - templates[key] is not None and - templates[key] != value - ): - proj_config['template'][key] = templates[key] - - projectId = av_project['_id'] - - data = get_data( - entity, session, custom_attributes - ) - - cur_data = av_project.get('data') or {} - - enter_data = {} - for k, v in cur_data.items(): - enter_data[k] = v - for k, v in data.items(): - enter_data[k] = v - - log.debug("{} - Updating data".format(ent_path)) - database[project_name].update_many( - {'_id': ObjectId(projectId)}, - {'$set': { - 'name': project_name, - 'config': proj_config, - 'data': data - }} - ) - - entity['custom_attributes'][ca_mongoid] = str(projectId) - session.commit() - - output['project'] = av_project - - return output - - # Asset - ///////////////////////////////////////////////////////////// - if av_project is None: - result = import_to_avalon( - session, ft_project, ft_project, av_project, custom_attributes - ) - - if 'errors' in result: - output['errors'] = result['errors'] - return output - - elif 'project' not in result: - msg = 'During project import went something wrong' - errors.append({'Unexpected error': msg}) - output['errors'] = errors - return output - - av_project = result['project'] - output['project'] = result['project'] - - projectId = av_project['_id'] - data = get_data( - entity, session, custom_attributes - ) - - name = entity['name'] - - avalon_asset = None - # existence of this custom attr is already checked - if ca_mongoid not in entity['custom_attributes']: - msg = ( - "Entity type \"{}\" don't have created custom attribute \"{}\"" - " or user \"{}\" don't have permissions to read or change it." - ).format(entity_type, ca_mongoid, session.api_user) - - log.error(msg) - errors.append({'Missing Custom attribute': msg}) - output['errors'] = errors - return output - - mongo_id = entity['custom_attributes'][ca_mongoid] - mongo_id = mongo_id.replace(' ', '').replace('\n', '') - try: - ObjectId(mongo_id) - except Exception: - mongo_id = '' - - if mongo_id != '': - avalon_asset = database[project_name].find_one( - {'_id': ObjectId(mongo_id)} - ) - - if avalon_asset is None: - avalon_asset = database[project_name].find_one( - {'type': 'asset', 'name': name} - ) - if avalon_asset is None: - item = { - 'schema': "avalon-core:asset-3.0", - 'name': name, - 'parent': ObjectId(projectId), - 'type': 'asset', - 'data': data - } - schema.validate(item) - mongo_id = database[project_name].insert_one(item).inserted_id - log.debug("{} - Created in project \"{}\"".format( - ent_path, project_name - )) - # Raise error if it seems to be different ent. with same name - elif avalon_asset['data']['parents'] != data['parents']: - msg = ( - "{} - In Avalon DB already exists entity with name \"{}\"" - "\n- \"{}\"" - ).format(ent_path, name, "/".join(db_asset_path_items)) - log.error(msg) - errors.append({'Entity name duplication': msg}) - output['errors'] = errors - return output - - # Store new ID (in case that asset was removed from DB) - else: - mongo_id = avalon_asset['_id'] - else: - if avalon_asset['name'] != entity['name']: - if changeability_check_childs(entity) is False: - msg = ( - '{} - You can\'t change name "{}" to "{}"' - ', avalon wouldn\'t work properly!' - '\n\nName was changed back!' - '\n\nCreate new entity if you want to change name.' - ).format(ent_path, avalon_asset['name'], entity['name']) - - log.warning(msg) - entity['name'] = avalon_asset['name'] - session.commit() - errors.append({'Changed name error': msg}) - - if avalon_asset['data']['parents'] != data['parents']: - old_path = '/'.join(avalon_asset['data']['parents']) - new_path = '/'.join(data['parents']) - - msg = ( - 'You can\'t move with entities.' - '\nEntity "{}" was moved from "{}" to "{}"' - '\n\nAvalon won\'t work properly, {}!' + name_pattern = schema_obj.get( + "properties", {}).get( + "name", {}).get( + "pattern", default_pattern ) + if schema_patterns is not None: + schema_patterns[schema_name] = name_pattern - moved_back = False - if 'visualParent' in avalon_asset['data']: - asset_parent_id = avalon_asset['data']['visualParent'] or avalon_asset['parent'] - - asset_parent = database[project_name].find_one( - {'_id': ObjectId(asset_parent_id)} - ) - ft_parent_id = asset_parent['data']['ftrackId'] - try: - entity['parent_id'] = ft_parent_id - session.commit() - msg = msg.format( - avalon_asset['name'], old_path, new_path, - 'entity was moved back' - ) - log.warning(msg) - moved_back = True - - except Exception: - moved_back = False - - if moved_back is False: - msg = msg.format( - avalon_asset['name'], old_path, new_path, - 'please move it back' - ) - log.error(msg) - - errors.append({'Hierarchy change error': msg}) - - if len(errors) > 0: - output['errors'] = errors - return output - - avalon_asset = database[project_name].find_one( - {'_id': ObjectId(mongo_id)} - ) - - cur_data = avalon_asset.get('data') or {} - - enter_data = {} - for k, v in cur_data.items(): - enter_data[k] = v - for k, v in data.items(): - enter_data[k] = v - - database[project_name].update_many( - {'_id': ObjectId(mongo_id)}, - {'$set': { - 'name': name, - 'data': enter_data, - 'parent': ObjectId(projectId) - }}) - log.debug("{} - Updated data (in project \"{}\")".format( - ent_path, project_name - )) - entity['custom_attributes'][ca_mongoid] = str(mongo_id) - session.commit() - - return output + if re.match(name_pattern, name): + return True + return False -def get_avalon_attr(session, split_hierarchical=False): +def get_avalon_attr(session, split_hierarchical=True): custom_attributes = [] hier_custom_attributes = [] cust_attrs_query = ( - "select id, entity_type, object_type_id, is_hierarchical" + "select id, entity_type, object_type_id, is_hierarchical, default" " from CustomAttributeConfiguration" " where group.name = \"avalon\"" ) all_avalon_attr = session.query(cust_attrs_query).all() for cust_attr in all_avalon_attr: - if 'avalon_' in cust_attr['key']: + if split_hierarchical and cust_attr["is_hierarchical"]: + hier_custom_attributes.append(cust_attr) continue - if split_hierarchical: - if cust_attr["is_hierarchical"]: - hier_custom_attributes.append(cust_attr) - continue - custom_attributes.append(cust_attr) if split_hierarchical: @@ -373,256 +86,2185 @@ def get_avalon_attr(session, split_hierarchical=False): return custom_attributes -def changeability_check_childs(entity): - if (entity.entity_type.lower() != 'task' and 'children' not in entity): - return True - childs = entity['children'] - for child in childs: - if child.entity_type.lower() == 'task': - available_statuses = config.get_presets().get( - "ftrack", {}).get( - "ftrack_config", {}).get( - "sync_to_avalon", {}).get( - "statuses_name_change", [] - ) - ent_status = child['status']['name'].lower() - if ent_status not in available_statuses: - return False - # If not task go deeper - elif changeability_check_childs(child) is False: - return False - # If everything is allright - return True +def from_dict_to_set(data): + result = {"$set": {}} + dict_queue = queue.Queue() + dict_queue.put((None, data)) + + while not dict_queue.empty(): + _key, _data = dict_queue.get() + for key, value in _data.items(): + new_key = key + if _key is not None: + new_key = "{}.{}".format(_key, key) + + if not isinstance(value, dict): + result["$set"][new_key] = value + continue + dict_queue.put((new_key, value)) + return result -def get_data(entity, session, custom_attributes): - database = get_avalon_database() - - entity_type = entity.entity_type - - if entity_type.lower() == 'project': - ft_project = entity - elif entity_type.lower() != 'project': - ft_project = entity['project'] - av_project = get_avalon_project(ft_project) - - project_name = ft_project['full_name'] - - data = {} - data['ftrackId'] = entity['id'] - data['entityType'] = entity_type - - ent_types_query = "select id, name from ObjectType" - ent_types = session.query(ent_types_query).all() - ent_types_by_name = { - ent_type["name"]: ent_type["id"] for ent_type in ent_types - } - - for cust_attr in custom_attributes: - # skip hierarchical attributes - if cust_attr.get('is_hierarchical', False): - continue - - key = cust_attr['key'] - if cust_attr['entity_type'].lower() in ['asset']: - data[key] = entity['custom_attributes'][key] - - elif ( - cust_attr['entity_type'].lower() in ['show'] and - entity_type.lower() == 'project' - ): - data[key] = entity['custom_attributes'][key] - - elif ( - cust_attr['entity_type'].lower() in ['task'] and - entity_type.lower() != 'project' - ): - # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') - entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) - # Get object id of entity type - ent_obj_type_id = ent_types_by_name.get(entity_type_full) - - # Backup soluction when id is not found by prequeried objects - if not ent_obj_type_id: - query = 'ObjectType where name is "{}"'.format( - entity_type_full - ) - ent_obj_type_id = session.query(query).one()['id'] - - if cust_attr['object_type_id'] == ent_obj_type_id: - if key in entity['custom_attributes']: - data[key] = entity['custom_attributes'][key] - - if entity_type in ['Project']: - data['code'] = entity['name'] - return data - - # Get info for 'Data' in Avalon DB - tasks = [] - for child in entity['children']: - if child.entity_type in ['Task']: - tasks.append(child['name']) - - # Get list of parents without project - parents = [] - folderStruct = [] - for i in range(1, len(entity['link'])-1): - parEnt = session.get( - entity['link'][i]['type'], - entity['link'][i]['id'] - ) - parName = parEnt['name'] - folderStruct.append(parName) - parents.append(parEnt) - - parentId = None - - for parent in parents: - parentId = database[project_name].find_one( - {'type': 'asset', 'name': parName} - )['_id'] - if parent['parent'].entity_type != 'project' and parentId is None: - import_to_avalon( - session, parent, ft_project, av_project, custom_attributes - ) - parentId = database[project_name].find_one( - {'type': 'asset', 'name': parName} - )['_id'] - - hierarchy = "" - if len(folderStruct) > 0: - hierarchy = os.path.sep.join(folderStruct) - - data['visualParent'] = parentId - data['parents'] = folderStruct - data['tasks'] = tasks - data['hierarchy'] = hierarchy - - return data - - -def get_avalon_project(ft_project): - database = get_avalon_database() - project_name = ft_project['full_name'] - ca_mongoid = get_ca_mongoid() - if ca_mongoid not in ft_project['custom_attributes']: - return None - - # try to find by Id - project_id = ft_project['custom_attributes'][ca_mongoid] - try: - avalon_project = database[project_name].find_one({ - '_id': ObjectId(project_id) - }) - except Exception: - avalon_project = None - - if avalon_project is None: - avalon_project = database[project_name].find_one({ - 'type': 'project' - }) - - return avalon_project - - -def get_avalon_project_template(): +def get_avalon_project_template(project_name): """Get avalon template - Returns: dictionary with templates """ - templates = Anatomy().templates + templates = Anatomy(project_name).templates return { - 'workfile': templates["avalon"]["workfile"], - 'work': templates["avalon"]["work"], - 'publish': templates["avalon"]["publish"] + "workfile": templates["avalon"]["workfile"], + "work": templates["avalon"]["work"], + "publish": templates["avalon"]["publish"] } -def get_project_config(entity): - proj_config = {} - proj_config['schema'] = 'avalon-core:config-1.0' - proj_config['tasks'] = get_tasks(entity) - proj_config['apps'] = get_project_apps(entity) - proj_config['template'] = get_avalon_project_template() - - return proj_config - - -def get_tasks(project): - task_types = project['project_schema']['_task_type_schema']['types'] - return [{'name': task_type['name']} for task_type in task_types] - - -def get_project_apps(entity): - """ Get apps from project - Requirements: - 'Entity' MUST be object of ftrack entity with entity_type 'Project' - Checking if app from ftrack is available in Templates/bin/{app_name}.toml - - Returns: - Array with dictionaries with app Name and Label - """ +def get_project_apps(in_app_list): apps = [] - for app in entity['custom_attributes']['applications']: + # TODO report + missing_toml_msg = "Missing config file for application" + error_msg = ( + "Unexpected error happend during preparation of application" + ) + warnings = collections.defaultdict(list) + for app in in_app_list: try: toml_path = avalon.lib.which_app(app) if not toml_path: - log.warning(( - 'Missing config file for application "{}"' - ).format(app)) + log.warning(missing_toml_msg + ' "{}"'.format(app)) + warnings[missing_toml_msg].append(app) continue apps.append({ - 'name': app, - 'label': toml.load(toml_path)['label'] + "name": app, + "label": toml.load(toml_path)["label"] + }) + except Exception: + warnings[error_msg].append(app) + log.warning(( + "Error has happened during preparing application \"{}\"" + ).format(app), exc_info=True) + return apps, warnings + + +def get_hierarchical_attributes(session, entity, attr_names, attr_defaults={}): + entity_ids = [] + if entity.entity_type.lower() == "project": + entity_ids.append(entity["id"]) + else: + typed_context = session.query(( + "select ancestors.id, project from TypedContext where id is \"{}\"" + ).format(entity["id"])).one() + entity_ids.append(typed_context["id"]) + entity_ids.extend( + [ent["id"] for ent in reversed(typed_context["ancestors"])] + ) + entity_ids.append(typed_context["project"]["id"]) + + missing_defaults = [] + for attr_name in attr_names: + if attr_name not in attr_defaults: + missing_defaults.append(attr_name) + + join_ent_ids = ", ".join( + ["\"{}\"".format(entity_id) for entity_id in entity_ids] + ) + join_attribute_names = ", ".join( + ["\"{}\"".format(key) for key in attr_names] + ) + queries = [] + queries.append({ + "action": "query", + "expression": ( + "select value, entity_id from CustomAttributeValue " + "where entity_id in ({}) and configuration.key in ({})" + ).format(join_ent_ids, join_attribute_names) + }) + + if not missing_defaults: + if hasattr(session, "call"): + [values] = session.call(queries) + else: + [values] = session._call(queries) + else: + join_missing_names = ", ".join( + ["\"{}\"".format(key) for key in missing_defaults] + ) + queries.append({ + "action": "query", + "expression": ( + "select default from CustomAttributeConfiguration " + "where key in ({})" + ).format(join_missing_names) + }) + + [values, default_values] = session.call(queries) + for default_value in default_values: + key = default_value["data"][0]["key"] + attr_defaults[key] = default_value["data"][0]["default"] + + hier_values = {} + for key, val in attr_defaults.items(): + hier_values[key] = val + + if not values["data"]: + return hier_values + + _hier_values = collections.defaultdict(list) + for value in values["data"]: + key = value["configuration"]["key"] + _hier_values[key].append(value) + + for key, values in _hier_values.items(): + value = sorted( + values, key=lambda value: entity_ids.index(value["entity_id"]) + )[0] + hier_values[key] = value["value"] + + return hier_values + + +class SyncEntitiesFactory: + dbcon = DbConnector() + + project_query = ( + "select full_name, name, custom_attributes" + ", project_schema._task_type_schema.types.name" + " from Project where full_name is \"{}\"" + ) + entities_query = ( + "select id, name, parent_id, link" + " from TypedContext where project_id is \"{}\"" + ) + ignore_custom_attr_key = "avalon_ignore_sync" + + report_splitter = {"type": "label", "value": "---"} + + def __init__(self, log_obj, session): + self.log = log_obj + self._server_url = session.server_url + self._api_key = session.api_key + self._api_user = session.api_user + + def launch_setup(self, project_full_name): + try: + self.session.close() + except Exception: + pass + + self.session = ftrack_api.Session( + server_url=self._server_url, + api_key=self._api_key, + api_user=self._api_user, + auto_connect_event_hub=True + ) + + self.duplicates = {} + self.failed_regex = {} + self.tasks_failed_regex = collections.defaultdict(list) + self.report_items = { + "info": collections.defaultdict(list), + "warning": collections.defaultdict(list), + "error": collections.defaultdict(list) + } + + self.create_list = [] + self.updates = collections.defaultdict(dict) + + self.avalon_project = None + self.avalon_entities = None + + self._avalon_ents_by_id = None + self._avalon_ents_by_ftrack_id = None + self._avalon_ents_by_name = None + self._avalon_ents_by_parent_id = None + + self._avalon_archived_ents = None + self._avalon_archived_by_id = None + self._avalon_archived_by_parent_id = None + self._avalon_archived_by_name = None + + self._subsets_by_parent_id = None + self._changeability_by_mongo_id = None + + self.all_filtered_entities = {} + self.filtered_ids = [] + self.not_selected_ids = [] + + self._ent_paths_by_ftrack_id = {} + + self.ftrack_avalon_mapper = None + self.avalon_ftrack_mapper = None + self.create_ftrack_ids = None + self.update_ftrack_ids = None + self.deleted_entities = None + + # Get Ftrack project + ft_project = self.session.query( + self.project_query.format(project_full_name) + ).one() + ft_project_id = ft_project["id"] + + # Skip if project is ignored + if ft_project["custom_attributes"].get( + self.ignore_custom_attr_key + ) is True: + msg = ( + "Project \"{}\" has set `Ignore Sync` custom attribute to True" + ).format(project_full_name) + self.log.warning(msg) + return {"success": False, "message": msg} + + # Check if `avalon_mongo_id` custom attribute exist or is accessible + if CustAttrIdKey not in ft_project["custom_attributes"]: + items = [] + items.append({ + "type": "label", + "value": "# Can't access Custom attribute <{}>".format( + CustAttrIdKey + ) + }) + items.append({ + "type": "label", + "value": ( + "

- Check if user \"{}\" has permissions" + " to access the Custom attribute

" + ).format(self._api_key) + }) + items.append({ + "type": "label", + "value": "

- Check if the Custom attribute exist

" + }) + return { + "items": items, + "title": "Synchronization failed", + "success": False, + "message": "Synchronization failed" + } + + # Find all entities in project + all_project_entities = self.session.query( + self.entities_query.format(ft_project_id) + ).all() + + # Store entities by `id` and `parent_id` + entities_dict = collections.defaultdict(lambda: { + "children": list(), + "parent_id": None, + "entity": None, + "entity_type": None, + "name": None, + "custom_attributes": {}, + "hier_attrs": {}, + "avalon_attrs": {}, + "tasks": [] + }) + + for entity in all_project_entities: + parent_id = entity["parent_id"] + entity_type = entity.entity_type + entity_type_low = entity_type.lower() + if entity_type_low == "task": + entities_dict[parent_id]["tasks"].append(entity["name"]) + continue + + entity_id = entity["id"] + entities_dict[entity_id].update({ + "entity": entity, + "parent_id": parent_id, + "entity_type": entity_type_low, + "entity_type_orig": entity_type, + "name": entity["name"] + }) + entities_dict[parent_id]["children"].append(entity_id) + + entities_dict[ft_project_id]["entity"] = ft_project + entities_dict[ft_project_id]["entity_type"] = ( + ft_project.entity_type.lower() + ) + entities_dict[ft_project_id]["entity_type_orig"] = ( + ft_project.entity_type + ) + entities_dict[ft_project_id]["name"] = ft_project["full_name"] + + self.ft_project_id = ft_project_id + self.entities_dict = entities_dict + + @property + def avalon_ents_by_id(self): + if self._avalon_ents_by_id is None: + self._avalon_ents_by_id = {} + for entity in self.avalon_entities: + self._avalon_ents_by_id[str(entity["_id"])] = entity + + return self._avalon_ents_by_id + + @property + def avalon_ents_by_ftrack_id(self): + if self._avalon_ents_by_ftrack_id is None: + self._avalon_ents_by_ftrack_id = {} + for entity in self.avalon_entities: + key = entity.get("data", {}).get("ftrackId") + if not key: + continue + self._avalon_ents_by_ftrack_id[key] = str(entity["_id"]) + + return self._avalon_ents_by_ftrack_id + + @property + def avalon_ents_by_name(self): + if self._avalon_ents_by_name is None: + self._avalon_ents_by_name = {} + for entity in self.avalon_entities: + self._avalon_ents_by_name[entity["name"]] = str(entity["_id"]) + + return self._avalon_ents_by_name + + @property + def avalon_ents_by_parent_id(self): + if self._avalon_ents_by_parent_id is None: + self._avalon_ents_by_parent_id = collections.defaultdict(list) + for entity in self.avalon_entities: + parent_id = entity["data"]["visualParent"] + if parent_id is not None: + parent_id = str(parent_id) + self._avalon_ents_by_parent_id[parent_id].append(entity) + + return self._avalon_ents_by_parent_id + + @property + def avalon_archived_ents(self): + if self._avalon_archived_ents is None: + self._avalon_archived_ents = [ + ent for ent in self.dbcon.find({"type": "archived_asset"}) + ] + return self._avalon_archived_ents + + @property + def avalon_archived_by_name(self): + if self._avalon_archived_by_name is None: + self._avalon_archived_by_name = collections.defaultdict(list) + for ent in self.avalon_archived_ents: + self._avalon_archived_by_name[ent["name"]].append(ent) + return self._avalon_archived_by_name + + @property + def avalon_archived_by_id(self): + if self._avalon_archived_by_id is None: + self._avalon_archived_by_id = { + str(ent["_id"]): ent for ent in self.avalon_archived_ents + } + return self._avalon_archived_by_id + + @property + def avalon_archived_by_parent_id(self): + if self._avalon_archived_by_parent_id is None: + self._avalon_archived_by_parent_id = collections.defaultdict(list) + for entity in self.avalon_archived_ents: + parent_id = entity["data"]["visualParent"] + if parent_id is not None: + parent_id = str(parent_id) + self._avalon_archived_by_parent_id[parent_id].append(entity) + + return self._avalon_archived_by_parent_id + + @property + def subsets_by_parent_id(self): + if self._subsets_by_parent_id is None: + self._subsets_by_parent_id = collections.defaultdict(list) + for subset in self.dbcon.find({"type": "subset"}): + self._subsets_by_parent_id[str(subset["parent"])].append( + subset + ) + + return self._subsets_by_parent_id + + @property + def changeability_by_mongo_id(self): + if self._changeability_by_mongo_id is None: + self._changeability_by_mongo_id = collections.defaultdict( + lambda: True + ) + self._changeability_by_mongo_id[self.avalon_project_id] = False + self._bubble_changeability(list(self.subsets_by_parent_id.keys())) + return self._changeability_by_mongo_id + + @property + def all_ftrack_names(self): + return [ + ent_dict["name"] for ent_dict in self.entities_dict.values() if ( + ent_dict.get("name") + ) + ] + + def duplicity_regex_check(self): + self.log.debug("* Checking duplicities and invalid symbols") + # Duplicity and regex check + entity_ids_by_name = {} + duplicates = [] + failed_regex = [] + task_names = {} + _schema_patterns = {} + for ftrack_id, entity_dict in self.entities_dict.items(): + regex_check = True + name = entity_dict["name"] + entity_type = entity_dict["entity_type"] + # Tasks must be checked too + for task_name in entity_dict["tasks"]: + passed = task_names.get(task_name) + if passed is None: + passed = check_regex( + task_name, "task", schema_patterns=_schema_patterns + ) + task_names[task_name] = passed + + if not passed: + self.tasks_failed_regex[task_name].append(ftrack_id) + + if name in entity_ids_by_name: + duplicates.append(name) + else: + entity_ids_by_name[name] = [] + regex_check = check_regex( + name, entity_type, schema_patterns=_schema_patterns + ) + + entity_ids_by_name[name].append(ftrack_id) + if not regex_check: + failed_regex.append(name) + + for name in failed_regex: + self.failed_regex[name] = entity_ids_by_name[name] + + for name in duplicates: + self.duplicates[name] = entity_ids_by_name[name] + + self.filter_by_duplicate_regex() + + def filter_by_duplicate_regex(self): + filter_queue = queue.Queue() + failed_regex_msg = "{} - Entity has invalid symbols in the name" + duplicate_msg = "There are multiple entities with the name: \"{}\":" + + for ids in self.failed_regex.values(): + for id in ids: + ent_path = self.get_ent_path(id) + self.log.warning(failed_regex_msg.format(ent_path)) + filter_queue.put(id) + + for name, ids in self.duplicates.items(): + self.log.warning(duplicate_msg.format(name)) + for id in ids: + ent_path = self.get_ent_path(id) + self.log.warning(ent_path) + filter_queue.put(id) + + filtered_ids = [] + while not filter_queue.empty(): + ftrack_id = filter_queue.get() + if ftrack_id in filtered_ids: + continue + + entity_dict = self.entities_dict.pop(ftrack_id, {}) + if not entity_dict: + continue + + self.all_filtered_entities[ftrack_id] = entity_dict + parent_id = entity_dict.get("parent_id") + if parent_id and parent_id in self.entities_dict: + if ftrack_id in self.entities_dict[parent_id]["children"]: + self.entities_dict[parent_id]["children"].remove(ftrack_id) + + filtered_ids.append(ftrack_id) + for child_id in entity_dict.get("children", []): + filter_queue.put(child_id) + + for name, ids in self.tasks_failed_regex.items(): + for id in ids: + if id not in self.entities_dict: + continue + self.entities_dict[id]["tasks"].remove(name) + ent_path = self.get_ent_path(id) + self.log.warning(failed_regex_msg.format( + "/".join([ent_path, name]) + )) + + def filter_by_ignore_sync(self): + # skip filtering if `ignore_sync` attribute do not exist + if self.entities_dict[self.ft_project_id]["avalon_attrs"].get( + self.ignore_custom_attr_key, "_notset_" + ) == "_notset_": + return + + self.filter_queue = queue.Queue() + self.filter_queue.put((self.ft_project_id, False)) + while not self.filter_queue.empty(): + parent_id, remove = self.filter_queue.get() + if remove: + parent_dict = self.entities_dict.pop(parent_id, {}) + self.all_filtered_entities[parent_id] = parent_dict + self.filtered_ids.append(parent_id) + else: + parent_dict = self.entities_dict.get(parent_id, {}) + + for child_id in parent_dict.get("children", []): + # keep original `remove` value for all childs + _remove = (remove is True) + if not _remove: + if self.entities_dict[child_id]["avalon_attrs"].get( + self.ignore_custom_attr_key + ): + self.entities_dict[parent_id]["children"].remove( + child_id + ) + _remove = True + self.filter_queue.put((child_id, _remove)) + + def filter_by_selection(self, event): + # BUGGY!!!! cause that entities are in deleted list + # TODO may be working when filtering happen after preparations + # - But this part probably does not have any functional reason + # - Time of synchronization probably won't be changed much + selected_ids = [] + for entity in event["data"]["selection"]: + # Skip if project is in selection + if entity["entityType"] == "show": + return + selected_ids.append(entity["entityId"]) + + sync_ids = [self.ft_project_id] + parents_queue = queue.Queue() + children_queue = queue.Queue() + for id in selected_ids: + # skip if already filtered with ignore sync custom attribute + if id in self.filtered_ids: + continue + + parents_queue.put(id) + children_queue.put(id) + + while not parents_queue.empty(): + id = parents_queue.get() + while True: + # Stops when parent is in sync_ids + if id in self.filtered_ids or id in sync_ids or id is None: + break + sync_ids.append(id) + id = self.entities_dict[id]["parent_id"] + + while not children_queue.empty(): + parent_id = children_queue.get() + for child_id in self.entities_dict[parent_id]["children"]: + if child_id in sync_ids or child_id in self.filtered_ids: + continue + sync_ids.append(child_id) + children_queue.put(child_id) + + # separate not selected and to process entities + for key, value in self.entities_dict.items(): + if key not in sync_ids: + self.not_selected_ids.append(key) + + for id in self.not_selected_ids: + # pop from entities + value = self.entities_dict.pop(id) + # remove entity from parent's children + parent_id = value["parent_id"] + if parent_id not in sync_ids: + continue + + self.entities_dict[parent_id]["children"].remove(id) + + def set_cutom_attributes(self): + self.log.debug("* Preparing custom attributes") + # Get custom attributes and values + custom_attrs, hier_attrs = get_avalon_attr(self.session) + ent_types = self.session.query("select id, name from ObjectType").all() + ent_types_by_name = { + ent_type["name"]: ent_type["id"] for ent_type in ent_types + } + + attrs = set() + # store default values per entity type + attrs_per_entity_type = collections.defaultdict(dict) + avalon_attrs = collections.defaultdict(dict) + # store also custom attribute configuration id for future use (create) + attrs_per_entity_type_ca_id = collections.defaultdict(dict) + avalon_attrs_ca_id = collections.defaultdict(dict) + + for cust_attr in custom_attrs: + key = cust_attr["key"] + attrs.add(cust_attr["id"]) + ca_ent_type = cust_attr["entity_type"] + if key.startswith("avalon_"): + if ca_ent_type == "show": + avalon_attrs[ca_ent_type][key] = cust_attr["default"] + avalon_attrs_ca_id[ca_ent_type][key] = cust_attr["id"] + else: + obj_id = cust_attr["object_type_id"] + avalon_attrs[obj_id][key] = cust_attr["default"] + avalon_attrs_ca_id[obj_id][key] = cust_attr["id"] + continue + + if ca_ent_type == "show": + attrs_per_entity_type[ca_ent_type][key] = cust_attr["default"] + attrs_per_entity_type_ca_id[ca_ent_type][key] = cust_attr["id"] + else: + obj_id = cust_attr["object_type_id"] + attrs_per_entity_type[obj_id][key] = cust_attr["default"] + attrs_per_entity_type_ca_id[obj_id][key] = cust_attr["id"] + + obj_id_ent_type_map = {} + sync_ids = [] + for entity_id, entity_dict in self.entities_dict.items(): + sync_ids.append(entity_id) + entity_type = entity_dict["entity_type"] + entity_type_orig = entity_dict["entity_type_orig"] + + if entity_type == "project": + attr_key = "show" + else: + map_key = obj_id_ent_type_map.get(entity_type_orig) + if not map_key: + # Put space between capitals + # (e.g. 'AssetBuild' -> 'Asset Build') + map_key = re.sub( + r"(\w)([A-Z])", r"\1 \2", entity_type_orig + ) + obj_id_ent_type_map[entity_type_orig] = map_key + + # Get object id of entity type + attr_key = ent_types_by_name.get(map_key) + + # Backup soluction when id is not found by prequeried objects + if not attr_key: + query = "ObjectType where name is \"{}\"".format(map_key) + attr_key = self.session.query(query).one()["id"] + ent_types_by_name[map_key] = attr_key + + prepared_attrs = attrs_per_entity_type.get(attr_key) + prepared_avalon_attr = avalon_attrs.get(attr_key) + prepared_attrs_ca_id = attrs_per_entity_type_ca_id.get(attr_key) + prepared_avalon_attr_ca_id = avalon_attrs_ca_id.get(attr_key) + if prepared_attrs: + self.entities_dict[entity_id]["custom_attributes"] = ( + prepared_attrs.copy() + ) + if prepared_attrs_ca_id: + self.entities_dict[entity_id]["custom_attributes_id"] = ( + prepared_attrs_ca_id.copy() + ) + if prepared_avalon_attr: + self.entities_dict[entity_id]["avalon_attrs"] = ( + prepared_avalon_attr.copy() + ) + if prepared_avalon_attr_ca_id: + self.entities_dict[entity_id]["avalon_attrs_id"] = ( + prepared_avalon_attr_ca_id.copy() + ) + + # TODO query custom attributes by entity_id + entity_ids_joined = ", ".join([ + "\"{}\"".format(id) for id in sync_ids + ]) + attributes_joined = ", ".join([ + "\"{}\"".format(name) for name in attrs + ]) + + cust_attr_query = ( + "select value, entity_id from ContextCustomAttributeValue " + "where entity_id in ({}) and configuration_id in ({})" + ) + call_expr = [{ + "action": "query", + "expression": cust_attr_query.format( + entity_ids_joined, attributes_joined + ) + }] + if hasattr(self.session, "call"): + [values] = self.session.call(call_expr) + else: + [values] = self.session._call(call_expr) + + for value in values["data"]: + entity_id = value["entity_id"] + key = value["configuration"]["key"] + store_key = "custom_attributes" + if key.startswith("avalon_"): + store_key = "avalon_attrs" + self.entities_dict[entity_id][store_key][key] = value["value"] + + # process hierarchical attributes + self.set_hierarchical_attribute(hier_attrs, sync_ids) + + def set_hierarchical_attribute(self, hier_attrs, sync_ids): + # collect all hierarchical attribute keys + # and prepare default values to project + attribute_names = [] + attribute_ids = [] + for attr in hier_attrs: + key = attr["key"] + attribute_ids.append(attr["id"]) + attribute_names.append(key) + + store_key = "hier_attrs" + if key.startswith("avalon_"): + store_key = "avalon_attrs" + + self.entities_dict[self.ft_project_id][store_key][key] = ( + attr["default"] + ) + + # Prepare dict with all hier keys and None values + prepare_dict = {} + prepare_dict_avalon = {} + for attr in attribute_names: + if attr.startswith("avalon_"): + prepare_dict_avalon[attr] = None + else: + prepare_dict[attr] = None + + for id, entity_dict in self.entities_dict.items(): + # Skip project because has stored defaults at the moment + if entity_dict["entity_type"] == "project": + continue + entity_dict["hier_attrs"] = prepare_dict.copy() + for key, val in prepare_dict_avalon.items(): + entity_dict["avalon_attrs"][key] = val + + # Prepare values to query + entity_ids_joined = ", ".join([ + "\"{}\"".format(id) for id in sync_ids + ]) + attributes_joined = ", ".join([ + "\"{}\"".format(name) for name in attribute_ids + ]) + call_expr = [{ + "action": "query", + "expression": ( + "select value, entity_id from CustomAttributeValue " + "where entity_id in ({}) and configuration_id in ({})" + ).format(entity_ids_joined, attributes_joined) + }] + if hasattr(self.session, "call"): + [values] = self.session.call(call_expr) + else: + [values] = self.session._call(call_expr) + + avalon_hier = [] + for value in values["data"]: + if value["value"] is None: + continue + entity_id = value["entity_id"] + key = value["configuration"]["key"] + store_key = "hier_attrs" + if key.startswith("avalon_"): + store_key = "avalon_attrs" + avalon_hier.append(key) + self.entities_dict[entity_id][store_key][key] = value["value"] + + # Get dictionary with not None hierarchical values to pull to childs + top_id = self.ft_project_id + project_values = {} + for key, value in self.entities_dict[top_id]["hier_attrs"].items(): + if value is not None: + project_values[key] = value + + for key in avalon_hier: + value = self.entities_dict[top_id]["avalon_attrs"][key] + if value is not None: + project_values[key] = value + + hier_down_queue = queue.Queue() + hier_down_queue.put((project_values, top_id)) + + while not hier_down_queue.empty(): + hier_values, parent_id = hier_down_queue.get() + for child_id in self.entities_dict[parent_id]["children"]: + _hier_values = hier_values.copy() + for name in attribute_names: + store_key = "hier_attrs" + if name.startswith("avalon_"): + store_key = "avalon_attrs" + value = self.entities_dict[child_id][store_key][name] + if value is not None: + _hier_values[name] = value + + self.entities_dict[child_id]["hier_attrs"].update(_hier_values) + hier_down_queue.put((_hier_values, child_id)) + + def remove_from_archived(self, mongo_id): + entity = self.avalon_archived_by_id.pop(mongo_id, None) + if not entity: + return + + if self._avalon_archived_ents is not None: + if entity in self._avalon_archived_ents: + self._avalon_archived_ents.remove(entity) + + if self._avalon_archived_by_name is not None: + name = entity["name"] + if name in self._avalon_archived_by_name: + name_ents = self._avalon_archived_by_name[name] + if entity in name_ents: + if len(name_ents) == 1: + self._avalon_archived_by_name.pop(name) + else: + self._avalon_archived_by_name[name].remove(entity) + + # TODO use custom None instead of __NOTSET__ + if self._avalon_archived_by_parent_id is not None: + parent_id = entity.get("data", {}).get( + "visualParent", "__NOTSET__" + ) + if parent_id is not None: + parent_id = str(parent_id) + + if parent_id in self._avalon_archived_by_parent_id: + parent_list = self._avalon_archived_by_parent_id[parent_id] + if entity not in parent_list: + self._avalon_archived_by_parent_id[parent_id].remove( + entity + ) + + def prepare_ftrack_ent_data(self): + not_set_ids = [] + for id, entity_dict in self.entities_dict.items(): + entity = entity_dict["entity"] + if entity is None: + not_set_ids.append(id) + continue + + self.entities_dict[id]["final_entity"] = {} + self.entities_dict[id]["final_entity"]["name"] = ( + entity_dict["name"] + ) + data = {} + data["ftrackId"] = entity["id"] + data["entityType"] = entity_dict["entity_type_orig"] + + for key, val in entity_dict.get("custom_attributes", []).items(): + data[key] = val + + for key, val in entity_dict.get("hier_attrs", []).items(): + data[key] = val + + if id == self.ft_project_id: + data["code"] = entity["name"] + self.entities_dict[id]["final_entity"]["data"] = data + self.entities_dict[id]["final_entity"]["type"] = "project" + + proj_schema = entity["project_schema"] + task_types = proj_schema["_task_type_schema"]["types"] + proj_apps, warnings = get_project_apps( + (data.get("applications") or []) + ) + for msg, items in warnings.items(): + if not msg or not items: + continue + self.report_items["warning"][msg] = items + + self.entities_dict[id]["final_entity"]["config"] = { + "tasks": [{"name": tt["name"]} for tt in task_types], + "apps": proj_apps + } + continue + + ent_path_items = [ent["name"] for ent in entity["link"]] + parents = ent_path_items[1:len(ent_path_items)-1:] + hierarchy = "" + if len(parents) > 0: + hierarchy = os.path.sep.join(parents) + + data["parents"] = parents + data["hierarchy"] = hierarchy + data["tasks"] = self.entities_dict[id].pop("tasks", []) + self.entities_dict[id]["final_entity"]["data"] = data + self.entities_dict[id]["final_entity"]["type"] = "asset" + + if not_set_ids: + self.log.debug(( + "- Debug information: Filtering bug, there are empty dicts" + "in entities dict (functionality should not be affected) <{}>" + ).format("| ".join(not_set_ids))) + for id in not_set_ids: + self.entities_dict.pop(id) + + def get_ent_path(self, ftrack_id): + ent_path = self._ent_paths_by_ftrack_id.get(ftrack_id) + if not ent_path: + entity = self.entities_dict[ftrack_id]["entity"] + ent_path = "/".join( + [ent["name"] for ent in entity["link"]] + ) + self._ent_paths_by_ftrack_id[ftrack_id] = ent_path + + return ent_path + + def prepare_avalon_entities(self, ft_project_name): + self.log.debug(( + "* Preparing avalon entities " + "(separate to Create, Update and Deleted groups)" + )) + # Avalon entities + self.dbcon.install() + self.dbcon.Session["AVALON_PROJECT"] = ft_project_name + avalon_project = self.dbcon.find_one({"type": "project"}) + avalon_entities = self.dbcon.find({"type": "asset"}) + self.avalon_project = avalon_project + self.avalon_entities = avalon_entities + + ftrack_avalon_mapper = {} + avalon_ftrack_mapper = {} + create_ftrack_ids = [] + update_ftrack_ids = [] + + same_mongo_id = [] + all_mongo_ids = {} + for ftrack_id, entity_dict in self.entities_dict.items(): + mongo_id = entity_dict["avalon_attrs"].get(CustAttrIdKey) + if not mongo_id: + continue + if mongo_id in all_mongo_ids: + same_mongo_id.append(mongo_id) + else: + all_mongo_ids[mongo_id] = [] + all_mongo_ids[mongo_id].append(ftrack_id) + + if avalon_project: + mongo_id = str(avalon_project["_id"]) + ftrack_avalon_mapper[self.ft_project_id] = mongo_id + avalon_ftrack_mapper[mongo_id] = self.ft_project_id + update_ftrack_ids.append(self.ft_project_id) + else: + create_ftrack_ids.append(self.ft_project_id) + + # make it go hierarchically + prepare_queue = queue.Queue() + + for child_id in self.entities_dict[self.ft_project_id]["children"]: + prepare_queue.put(child_id) + + while not prepare_queue.empty(): + ftrack_id = prepare_queue.get() + for child_id in self.entities_dict[ftrack_id]["children"]: + prepare_queue.put(child_id) + + entity_dict = self.entities_dict[ftrack_id] + ent_path = self.get_ent_path(ftrack_id) + + mongo_id = entity_dict["avalon_attrs"].get(CustAttrIdKey) + av_ent_by_mongo_id = self.avalon_ents_by_id.get(mongo_id) + if av_ent_by_mongo_id: + av_ent_ftrack_id = av_ent_by_mongo_id.get("data", {}).get( + "ftrackId" + ) + is_right = False + else_match_better = False + if av_ent_ftrack_id and av_ent_ftrack_id == ftrack_id: + is_right = True + + elif mongo_id not in same_mongo_id: + is_right = True + + else: + ftrack_ids_with_same_mongo = all_mongo_ids[mongo_id] + for _ftrack_id in ftrack_ids_with_same_mongo: + if _ftrack_id == av_ent_ftrack_id: + continue + + _entity_dict = self.entities_dict[_ftrack_id] + _mongo_id = _entity_dict["avalon_attrs"][CustAttrIdKey] + _av_ent_by_mongo_id = self.avalon_ents_by_id.get( + _mongo_id + ) + _av_ent_ftrack_id = _av_ent_by_mongo_id.get( + "data", {} + ).get("ftrackId") + if _av_ent_ftrack_id == ftrack_id: + else_match_better = True + break + + if not is_right and not else_match_better: + entity = entity_dict["entity"] + ent_path_items = [ent["name"] for ent in entity["link"]] + parents = ent_path_items[1:len(ent_path_items)-1:] + av_parents = av_ent_by_mongo_id["data"]["parents"] + if av_parents == parents: + is_right = True + else: + name = entity_dict["name"] + av_name = av_ent_by_mongo_id["name"] + if name == av_name: + is_right = True + + if is_right: + self.log.debug( + "Existing (by MongoID) <{}>".format(ent_path) + ) + ftrack_avalon_mapper[ftrack_id] = mongo_id + avalon_ftrack_mapper[mongo_id] = ftrack_id + update_ftrack_ids.append(ftrack_id) + continue + + mongo_id = self.avalon_ents_by_ftrack_id.get(ftrack_id) + if not mongo_id: + mongo_id = self.avalon_ents_by_name.get(entity_dict["name"]) + if mongo_id: + self.log.debug( + "Existing (by matching name) <{}>".format(ent_path) + ) + else: + self.log.debug( + "Existing (by FtrackID in mongo) <{}>".format(ent_path) + ) + + if mongo_id: + ftrack_avalon_mapper[ftrack_id] = mongo_id + avalon_ftrack_mapper[mongo_id] = ftrack_id + update_ftrack_ids.append(ftrack_id) + continue + + self.log.debug("New <{}>".format(ent_path)) + create_ftrack_ids.append(ftrack_id) + + deleted_entities = [] + for mongo_id in self.avalon_ents_by_id: + if mongo_id in avalon_ftrack_mapper: + continue + deleted_entities.append(mongo_id) + + av_ent = self.avalon_ents_by_id[mongo_id] + av_ent_path_items = [p for p in av_ent["data"]["parents"]] + av_ent_path_items.append(av_ent["name"]) + self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items))) + + self.ftrack_avalon_mapper = ftrack_avalon_mapper + self.avalon_ftrack_mapper = avalon_ftrack_mapper + self.create_ftrack_ids = create_ftrack_ids + self.update_ftrack_ids = update_ftrack_ids + self.deleted_entities = deleted_entities + + self.log.debug(( + "Ftrack -> Avalon comparison: New <{}> " + "| Existing <{}> | Deleted <{}>" + ).format( + len(create_ftrack_ids), + len(update_ftrack_ids), + len(deleted_entities) + )) + + def filter_with_children(self, ftrack_id): + if ftrack_id not in self.entities_dict: + return + ent_dict = self.entities_dict[ftrack_id] + parent_id = ent_dict["parent_id"] + self.entities_dict[parent_id]["children"].remove(ftrack_id) + + children_queue = queue.Queue() + children_queue.put(ftrack_id) + while not children_queue.empty(): + _ftrack_id = children_queue.get() + entity_dict = self.entities_dict.pop(_ftrack_id, {"children": []}) + for child_id in entity_dict["children"]: + children_queue.put(child_id) + + def prepare_changes(self): + self.log.debug("* Preparing changes for avalon/ftrack") + hierarchy_changing_ids = [] + ignore_keys = collections.defaultdict(list) + + update_queue = queue.Queue() + for ftrack_id in self.update_ftrack_ids: + update_queue.put(ftrack_id) + + while not update_queue.empty(): + ftrack_id = update_queue.get() + if ftrack_id == self.ft_project_id: + changes = self.prepare_project_changes() + if changes: + self.updates[self.avalon_project_id] = changes + continue + + ftrack_ent_dict = self.entities_dict[ftrack_id] + + # *** check parents + parent_check = False + + ftrack_parent_id = ftrack_ent_dict["parent_id"] + avalon_id = self.ftrack_avalon_mapper[ftrack_id] + avalon_entity = self.avalon_ents_by_id[avalon_id] + avalon_parent_id = avalon_entity["data"]["visualParent"] + if avalon_parent_id is not None: + avalon_parent_id = str(avalon_parent_id) + + ftrack_parent_mongo_id = self.ftrack_avalon_mapper[ + ftrack_parent_id + ] + + # if parent is project + if (ftrack_parent_mongo_id == avalon_parent_id) or ( + ftrack_parent_id == self.ft_project_id and + avalon_parent_id is None + ): + parent_check = True + + # check name + ftrack_name = ftrack_ent_dict["name"] + avalon_name = avalon_entity["name"] + name_check = ftrack_name == avalon_name + + # IDEAL STATE: both parent and name check passed + if parent_check and name_check: + continue + + # If entity is changeable then change values of parent or name + if self.changeability_by_mongo_id[avalon_id]: + # TODO logging + if not parent_check: + if ftrack_parent_mongo_id == str(self.avalon_project_id): + new_parent_name = self.entities_dict[ + self.ft_project_id]["name"] + new_parent_id = None + else: + new_parent_name = self.avalon_ents_by_id[ + ftrack_parent_mongo_id]["name"] + new_parent_id = ObjectId(ftrack_parent_mongo_id) + + if avalon_parent_id == str(self.avalon_project_id): + old_parent_name = self.entities_dict[ + self.ft_project_id]["name"] + else: + old_parent_name = self.avalon_ents_by_id[ + ftrack_parent_mongo_id]["name"] + + self.updates[avalon_id]["data"] = { + "visualParent": new_parent_id + } + ignore_keys[ftrack_id].append("data.visualParent") + self.log.debug(( + "Avalon entity \"{}\" changed parent \"{}\" -> \"{}\"" + ).format(avalon_name, old_parent_name, new_parent_name)) + + if not name_check: + self.updates[avalon_id]["name"] = ftrack_name + ignore_keys[ftrack_id].append("name") + self.log.debug( + "Avalon entity \"{}\" was renamed to \"{}\"".format( + avalon_name, ftrack_name + ) + ) + continue + + # parents and hierarchy must be recalculated + hierarchy_changing_ids.append(ftrack_id) + + # Parent is project if avalon_parent_id is set to None + if avalon_parent_id is None: + avalon_parent_id = str(self.avalon_project_id) + + if not name_check: + ent_path = self.get_ent_path(ftrack_id) + # TODO report + # TODO logging + self.entities_dict[ftrack_id]["name"] = avalon_name + self.entities_dict[ftrack_id]["entity"]["name"] = ( + avalon_name + ) + self.entities_dict[ftrack_id]["final_entity"]["name"] = ( + avalon_name + ) + self.log.warning("Name was changed back to {} <{}>".format( + avalon_name, ent_path + )) + self._ent_paths_by_ftrack_id.pop(ftrack_id, None) + msg = ( + " It is not possible to change" + " the name of an entity or it's parents, " + " if it already contained published data." + ) + self.report_items["warning"][msg].append(ent_path) + + # skip parent oricessing if hierarchy didn't change + if parent_check: + continue + + # Logic when parenting(hierarchy) has changed and should not + old_ftrack_parent_id = self.avalon_ftrack_mapper.get( + avalon_parent_id + ) + + # If last ftrack parent id from mongo entity exist then just + # remap paren_id on entity + if old_ftrack_parent_id: + # TODO report + # TODO logging + ent_path = self.get_ent_path(ftrack_id) + msg = ( + " It is not possible" + " to change the hierarchy of an entity or it's parents," + " if it already contained published data." + ) + self.report_items["warning"][msg].append(ent_path) + self.log.warning(( + " Entity contains published data so it was moved" + " back to it's original hierarchy <{}>" + ).format(ent_path)) + self.entities_dict[ftrack_id]["entity"]["parent_id"] = ( + old_ftrack_parent_id + ) + self.entities_dict[ftrack_id]["parent_id"] = ( + old_ftrack_parent_id + ) + self.entities_dict[old_ftrack_parent_id][ + "children" + ].append(ftrack_id) + + continue + + old_parent_ent = self.avalon_ents_by_id.get(avalon_parent_id) + if not old_parent_ent: + old_parent_ent = self.avalon_archived_by_id.get( + avalon_parent_id + ) + + # TODO report + # TODO logging + if not old_parent_ent: + self.log.warning(( + "Parent entity was not found by id" + " - Trying to find by parent name" + )) + ent_path = self.get_ent_path(ftrack_id) + + parents = avalon_entity["data"]["parents"] + parent_name = parents[-1] + matching_entity_id = None + for id, entity_dict in self.entities_dict.items(): + if entity_dict["name"] == parent_name: + matching_entity_id = id + break + + if matching_entity_id is None: + # TODO logging + # TODO report (turn off auto-sync?) + self.log.error(( + "The entity contains published data but it was moved" + " to a different place in the hierarchy and it's" + " previous parent cannot be found." + " It's impossible to solve this programmatically <{}>" + ).format(ent_path)) + msg = ( + " Hierarchy of an entity" + " can't be changed due to published data and missing" + " previous parent" + ) + self.report_items["error"][msg].append(ent_path) + self.filter_with_children(ftrack_id) + continue + + matching_ent_dict = self.entities_dict.get(matching_entity_id) + match_ent_parents = matching_ent_dict.get( + "final_entity", {}).get( + "data", {}).get( + "parents", ["__NOTSET__"] + ) + # TODO logging + # TODO report + if ( + len(match_ent_parents) >= len(parents) or + match_ent_parents[:-1] != parents + ): + ent_path = self.get_ent_path(ftrack_id) + self.log.error(( + "The entity contains published data but it was moved" + " to a different place in the hierarchy and it's" + " previous parents were moved too." + " It's impossible to solve this programmatically <{}>" + ).format(ent_path)) + msg = ( + " Hierarchy of an entity" + " can't be changed due to published data and scrambled" + "hierarchy" + ) + continue + + old_parent_ent = matching_ent_dict["final_entity"] + + parent_id = self.ft_project_id + entities_to_create = [] + # TODO logging + self.log.warning( + "Ftrack entities must be recreated because they were deleted," + " but they contain published data." + ) + + _avalon_ent = old_parent_ent + + self.updates[avalon_parent_id] = {"type": "asset"} + success = True + while True: + _vis_par = _avalon_ent["data"]["visualParent"] + _name = _avalon_ent["name"] + if _name in self.all_ftrack_names: + av_ent_path_items = _avalon_ent["data"]["parents"] + av_ent_path_items.append(_name) + av_ent_path = "/".join(av_ent_path_items) + # TODO report + # TODO logging + self.log.error(( + "Can't recreate the entity in Ftrack because an entity" + " with the same name already exists in a different" + " place in the hierarchy <{}>" + ).format(av_ent_path)) + msg = ( + " Hierarchy of an entity" + " can't be changed. I contains published data and it's" + " previous parent had a name, that is duplicated at a " + " different hierarchy level" + ) + self.report_items["error"][msg].append(av_ent_path) + self.filter_with_children(ftrack_id) + success = False + break + + entities_to_create.append(_avalon_ent) + if _vis_par is None: + break + + _vis_par = str(_vis_par) + _mapped = self.avalon_ftrack_mapper.get(_vis_par) + if _mapped: + parent_id = _mapped + break + + _avalon_ent = self.avalon_ents_by_id.get(_vis_par) + if not _avalon_ent: + _avalon_ent = self.avalon_archived_by_id.get(_vis_par) + + if success is False: + continue + + new_entity_id = None + for av_entity in reversed(entities_to_create): + new_entity_id = self.create_ftrack_ent_from_avalon_ent( + av_entity, parent_id + ) + update_queue.put(new_entity_id) + + if new_entity_id: + ftrack_ent_dict["entity"]["parent_id"] = new_entity_id + + if hierarchy_changing_ids: + self.reload_parents(hierarchy_changing_ids) + + for ftrack_id in self.update_ftrack_ids: + if ftrack_id == self.ft_project_id: + continue + + avalon_id = self.ftrack_avalon_mapper[ftrack_id] + avalon_entity = self.avalon_ents_by_id[avalon_id] + + avalon_attrs = self.entities_dict[ftrack_id]["avalon_attrs"] + if ( + CustAttrIdKey not in avalon_attrs or + avalon_attrs[CustAttrIdKey] != avalon_id + ): + configuration_id = self.entities_dict[ftrack_id][ + "avalon_attrs_id"][CustAttrIdKey] + + _entity_key = collections.OrderedDict({ + "configuration_id": configuration_id, + "entity_id": ftrack_id + }) + + self.session.recorded_operations.push( + ftrack_api.operation.UpdateEntityOperation( + "ContextCustomAttributeValue", + _entity_key, + "value", + ftrack_api.symbol.NOT_SET, + avalon_id + ) + ) + # check rest of data + data_changes = self.compare_dict( + self.entities_dict[ftrack_id]["final_entity"], + avalon_entity, + ignore_keys[ftrack_id] + ) + if data_changes: + self.updates[avalon_id] = self.merge_dicts( + data_changes, + self.updates[avalon_id] + ) + + def synchronize(self): + self.log.debug("* Synchronization begins") + avalon_project_id = self.ftrack_avalon_mapper.get(self.ft_project_id) + if avalon_project_id: + self.avalon_project_id = ObjectId(avalon_project_id) + + # remove filtered ftrack ids from create/update list + for ftrack_id in self.all_filtered_entities: + if ftrack_id in self.create_ftrack_ids: + self.create_ftrack_ids.remove(ftrack_id) + elif ftrack_id in self.update_ftrack_ids: + self.update_ftrack_ids.remove(ftrack_id) + + self.log.debug("* Processing entities for archivation") + self.delete_entities() + + self.log.debug("* Processing new entities") + # Create not created entities + for ftrack_id in self.create_ftrack_ids: + # CHECK it is possible that entity was already created + # because is parent of another entity which was processed first + if ftrack_id in self.ftrack_avalon_mapper: + continue + self.create_avalon_entity(ftrack_id) + + if len(self.create_list) > 0: + self.dbcon.insert_many(self.create_list) + + self.session.commit() + + self.log.debug("* Processing entities for update") + self.prepare_changes() + self.update_entities() + self.session.commit() + + def create_avalon_entity(self, ftrack_id): + if ftrack_id == self.ft_project_id: + self.create_avalon_project() + return + + entity_dict = self.entities_dict[ftrack_id] + parent_ftrack_id = entity_dict["parent_id"] + avalon_parent = None + if parent_ftrack_id != self.ft_project_id: + avalon_parent = self.ftrack_avalon_mapper.get(parent_ftrack_id) + # if not avalon_parent: + # self.create_avalon_entity(parent_ftrack_id) + # avalon_parent = self.ftrack_avalon_mapper[parent_ftrack_id] + avalon_parent = ObjectId(avalon_parent) + + # avalon_archived_by_id avalon_archived_by_name + current_id = ( + entity_dict["avalon_attrs"].get(CustAttrIdKey) or "" + ).strip() + mongo_id = current_id + name = entity_dict["name"] + + # Check if exist archived asset in mongo - by ID + unarchive = False + unarchive_id = self.check_unarchivation(ftrack_id, mongo_id, name) + if unarchive_id is not None: + unarchive = True + mongo_id = unarchive_id + + item = entity_dict["final_entity"] + try: + new_id = ObjectId(mongo_id) + if mongo_id in self.avalon_ftrack_mapper: + new_id = ObjectId() + except InvalidId: + new_id = ObjectId() + + item["_id"] = new_id + item["parent"] = self.avalon_project_id + item["schema"] = EntitySchemas["asset"] + item["data"]["visualParent"] = avalon_parent + + new_id_str = str(new_id) + self.ftrack_avalon_mapper[ftrack_id] = new_id_str + self.avalon_ftrack_mapper[new_id_str] = ftrack_id + + self._avalon_ents_by_id[new_id_str] = item + self._avalon_ents_by_ftrack_id[ftrack_id] = new_id_str + self._avalon_ents_by_name[item["name"]] = new_id_str + + if current_id != new_id_str: + # store mongo id to ftrack entity + configuration_id = self.entities_dict[ftrack_id][ + "avalon_attrs_id" + ][CustAttrIdKey] + _entity_key = collections.OrderedDict({ + "configuration_id": configuration_id, + "entity_id": ftrack_id }) - except Exception as e: - log.warning('Error with application {0} - {1}'.format(app, e)) - return apps - - -def avalon_check_name(entity, in_schema=None): - default_pattern = "^[a-zA-Z0-9_.]*$" - - name = entity["name"] - schema_name = "asset-3.0" - - if in_schema: - schema_name = in_schema - elif entity.entity_type.lower() == "project": - name = entity["full_name"] - schema_name = "project-2.0" - - schema_obj = avalon.schema._cache.get(schema_name + ".json") - name_pattern = schema_obj.get("properties", {}).get("name", {}).get( - "pattern", default_pattern - ) - if not re.match(name_pattern, name): - msg = "\"{}\" includes unsupported symbols like \"dash\" or \"space\"" - raise ValueError(msg.format(name)) - - -def show_errors(obj, event, errors): - title = 'Hey You! You raised few Errors! (*look below*)' - items = [] - splitter = {'type': 'label', 'value': '---'} - for error in errors: - for key, message in error.items(): - error_title = { - 'type': 'label', - 'value': '# {}'.format(key) - } - error_message = { - 'type': 'label', - 'value': '

{}

'.format(message) - } - if len(items) > 0: - items.append(splitter) - items.append(error_title) - items.append(error_message) - obj.log.error( - '{}: {}'.format(key, message) + self.session.recorded_operations.push( + ftrack_api.operation.UpdateEntityOperation( + "ContextCustomAttributeValue", + _entity_key, + "value", + ftrack_api.symbol.NOT_SET, + new_id_str + ) ) - obj.show_interface(items, title, event=event) + + if unarchive is False: + self.create_list.append(item) + return + # If unarchive then replace entity data in database + self.dbcon.replace_one({"_id": new_id}, item) + self.remove_from_archived(mongo_id) + av_ent_path_items = item["data"]["parents"] + av_ent_path_items.append(item["name"]) + av_ent_path = "/".join(av_ent_path_items) + self.log.debug("Entity was unarchived <{}>".format(av_ent_path)) + + def check_unarchivation(self, ftrack_id, mongo_id, name): + archived_by_id = self.avalon_archived_by_id.get(mongo_id) + archived_by_name = self.avalon_archived_by_name.get(name) + + # if not found in archived then skip + if not archived_by_id and not archived_by_name: + return None + + entity_dict = self.entities_dict[ftrack_id] + + if archived_by_id: + # if is changeable then unarchive (nothing to check here) + if self.changeability_by_mongo_id[mongo_id]: + return mongo_id + + # TODO replace `__NOTSET__` with custom None constant + archived_parent_id = archived_by_id["data"].get( + "visualParent", "__NOTSET__" + ) + archived_parents = archived_by_id["data"].get("parents") + archived_name = archived_by_id["name"] + + if ( + archived_name != entity_dict["name"] or + archived_parents != entity_dict["final_entity"]["data"][ + "parents" + ] + ): + return None + + return mongo_id + + # First check if there is any that have same parents + for archived in archived_by_name: + mongo_id = str(archived["_id"]) + archived_parents = archived.get("data", {}).get("parents") + if ( + archived_parents == entity_dict["final_entity"]["data"][ + "parents" + ] + ): + return mongo_id + + # Secondly try to find more close to current ftrack entity + first_changeable = None + for archived in archived_by_name: + mongo_id = str(archived["_id"]) + if not self.changeability_by_mongo_id[mongo_id]: + continue + + if first_changeable is None: + first_changeable = mongo_id + + ftrack_parent_id = entity_dict["parent_id"] + map_ftrack_parent_id = self.ftrack_avalon_mapper.get( + ftrack_parent_id + ) + + # TODO replace `__NOTSET__` with custom None constant + archived_parent_id = archived.get("data", {}).get( + "visualParent", "__NOTSET__" + ) + if archived_parent_id is not None: + archived_parent_id = str(archived_parent_id) + + # skip if parent is archived - How this should be possible? + parent_entity = self.avalon_ents_by_id.get(archived_parent_id) + if ( + parent_entity and ( + map_ftrack_parent_id is not None and + map_ftrack_parent_id == str(parent_entity["_id"]) + ) + ): + return mongo_id + # Last return first changeable with same name (or None) + return first_changeable + + def create_avalon_project(self): + project_item = self.entities_dict[self.ft_project_id]["final_entity"] + mongo_id = ( + self.entities_dict[self.ft_project_id]["avalon_attrs"].get( + CustAttrIdKey + ) or "" + ).strip() + + try: + new_id = ObjectId(mongo_id) + except InvalidId: + new_id = ObjectId() + + project_name = self.entities_dict[self.ft_project_id]["name"] + project_item["_id"] = new_id + project_item["parent"] = None + project_item["schema"] = EntitySchemas["project"] + project_item["config"]["schema"] = EntitySchemas["config"] + project_item["config"]["template"] = ( + get_avalon_project_template(project_name) + ) + + self.ftrack_avalon_mapper[self.ft_project_id] = new_id + self.avalon_ftrack_mapper[new_id] = self.ft_project_id + + self.avalon_project_id = new_id + + self._avalon_ents_by_id[str(new_id)] = project_item + self._avalon_ents_by_ftrack_id[self.ft_project_id] = str(new_id) + self._avalon_ents_by_name[project_item["name"]] = str(new_id) + + self.create_list.append(project_item) + + # store mongo id to ftrack entity + entity = self.entities_dict[self.ft_project_id]["entity"] + entity["custom_attributes"][CustAttrIdKey] = str(new_id) + + def _bubble_changeability(self, unchangeable_ids): + unchangeable_queue = queue.Queue() + for entity_id in unchangeable_ids: + unchangeable_queue.put((entity_id, False)) + + processed_parents_ids = [] + subsets_to_remove = [] + while not unchangeable_queue.empty(): + entity_id, child_is_archived = unchangeable_queue.get() + # skip if already processed + if entity_id in processed_parents_ids: + continue + + entity = self.avalon_ents_by_id.get(entity_id) + # if entity is not archived but unchageable child was then skip + # - archived entities should not affect not archived? + if entity and child_is_archived: + continue + + # set changeability of current entity to False + self._changeability_by_mongo_id[entity_id] = False + processed_parents_ids.append(entity_id) + # if not entity then is probably archived + if not entity: + entity = self.avalon_archived_by_id.get(entity_id) + child_is_archived = True + + if not entity: + # if entity is not found then it is subset without parent + if entity_id in unchangeable_ids: + subsets_to_remove.append(entity_id) + else: + # TODO logging - What is happening here? + self.log.warning(( + "Avalon contains entities without valid parents that" + " lead to Project (should not cause errors)" + " - MongoId <{}>" + ).format(str(entity_id))) + continue + + # skip if parent is project + parent_id = entity["data"]["visualParent"] + if parent_id is None: + continue + unchangeable_queue.put((str(parent_id), child_is_archived)) + + self._delete_subsets_without_asset(subsets_to_remove) + + def _delete_subsets_without_asset(self, not_existing_parents): + subset_ids = [] + version_ids = [] + repre_ids = [] + to_delete = [] + + for parent_id in not_existing_parents: + subsets = self.subsets_by_parent_id.get(parent_id) + if not subsets: + continue + for subset in subsets: + if subset.get("type") != "subset": + continue + subset_ids.append(subset["_id"]) + + db_subsets = self.dbcon.find({ + "_id": {"$in": subset_ids}, + "type": "subset" + }) + if not db_subsets: + return + + db_versions = self.dbcon.find({ + "parent": {"$in": subset_ids}, + "type": "version" + }) + if db_versions: + version_ids = [ver["_id"] for ver in db_versions] + + db_repres = self.dbcon.find({ + "parent": {"$in": version_ids}, + "type": "representation" + }) + if db_repres: + repre_ids = [repre["_id"] for repre in db_repres] + + to_delete.extend(subset_ids) + to_delete.extend(version_ids) + to_delete.extend(repre_ids) + + self.dbcon.delete_many({"_id": {"$in": to_delete}}) + + # Probably deprecated + def _check_changeability(self, parent_id=None): + for entity in self.avalon_ents_by_parent_id[parent_id]: + mongo_id = str(entity["_id"]) + is_changeable = self._changeability_by_mongo_id.get(mongo_id) + if is_changeable is not None: + continue + + self._check_changeability(mongo_id) + is_changeable = True + for child in self.avalon_ents_by_parent_id[parent_id]: + if not self._changeability_by_mongo_id[str(child["_id"])]: + is_changeable = False + break + + if is_changeable is True: + is_changeable = (mongo_id in self.subsets_by_parent_id) + self._changeability_by_mongo_id[mongo_id] = is_changeable + + def update_entities(self): + mongo_changes_bulk = [] + for mongo_id, changes in self.updates.items(): + filter = {"_id": ObjectId(mongo_id)} + change_data = from_dict_to_set(changes) + mongo_changes_bulk.append(UpdateOne(filter, change_data)) + + if not mongo_changes_bulk: + # TODO LOG + return + self.dbcon.bulk_write(mongo_changes_bulk) + + def reload_parents(self, hierarchy_changing_ids): + parents_queue = queue.Queue() + parents_queue.put((self.ft_project_id, [], False)) + while not parents_queue.empty(): + ftrack_id, parent_parents, changed = parents_queue.get() + _parents = parent_parents.copy() + if ftrack_id not in hierarchy_changing_ids and not changed: + if ftrack_id != self.ft_project_id: + _parents.append(self.entities_dict[ftrack_id]["name"]) + for child_id in self.entities_dict[ftrack_id]["children"]: + parents_queue.put((child_id, _parents, changed)) + continue + + changed = True + parents = [par for par in _parents] + hierarchy = "/".join(parents) + self.entities_dict[ftrack_id][ + "final_entity"]["data"]["parents"] = parents + self.entities_dict[ftrack_id][ + "final_entity"]["data"]["hierarchy"] = hierarchy + + _parents.append(self.entities_dict[ftrack_id]["name"]) + for child_id in self.entities_dict[ftrack_id]["children"]: + parents_queue.put((child_id, _parents, changed)) + + if ftrack_id in self.create_ftrack_ids: + mongo_id = self.ftrack_avalon_mapper[ftrack_id] + if "data" not in self.updates[mongo_id]: + self.updates[mongo_id]["data"] = {} + self.updates[mongo_id]["data"]["parents"] = parents + self.updates[mongo_id]["data"]["hierarchy"] = hierarchy + + def prepare_project_changes(self): + ftrack_ent_dict = self.entities_dict[self.ft_project_id] + ftrack_entity = ftrack_ent_dict["entity"] + avalon_code = self.avalon_project["data"]["code"] + # TODO Is possible to sync if full name was changed? + # if ftrack_ent_dict["name"] != self.avalon_project["name"]: + # ftrack_entity["full_name"] = avalon_name + # self.entities_dict[self.ft_project_id]["name"] = avalon_name + # self.entities_dict[self.ft_project_id]["final_entity"][ + # "name" + # ] = avalon_name + + # TODO logging + # TODO report + # TODO May this happen? Is possible to change project code? + if ftrack_entity["name"] != avalon_code: + ftrack_entity["name"] = avalon_code + self.entities_dict[self.ft_project_id]["final_entity"]["data"][ + "code" + ] = avalon_code + self.session.commit() + sub_msg = ( + "Project code was changed back to \"{}\"".format(avalon_code) + ) + msg = ( + "It is not possible to change" + " project code after synchronization" + ) + self.report_items["warning"][msg] = sub_msg + self.log.warning(sub_msg) + + return self.compare_dict( + self.entities_dict[self.ft_project_id]["final_entity"], + self.avalon_project + ) + + def compare_dict(self, dict_new, dict_old, _ignore_keys=[]): + # _ignore_keys may be used for keys nested dict like"data.visualParent" + changes = {} + ignore_keys = [] + for key_val in _ignore_keys: + key_items = key_val.split(".") + if len(key_items) == 1: + ignore_keys.append(key_items[0]) + + for key, value in dict_new.items(): + if key in ignore_keys: + continue + + if key not in dict_old: + changes[key] = value + continue + + if isinstance(value, dict): + if not isinstance(dict_old[key], dict): + changes[key] = value + continue + + _new_ignore_keys = [] + for key_val in _ignore_keys: + key_items = key_val.split(".") + if len(key_items) <= 1: + continue + _new_ignore_keys.append(".".join(key_items[1:])) + + _changes = self.compare_dict( + value, dict_old[key], _new_ignore_keys + ) + if _changes: + changes[key] = _changes + continue + + if value != dict_old[key]: + changes[key] = value + + return changes + + def merge_dicts(self, dict_new, dict_old): + for key, value in dict_new.items(): + if key not in dict_old: + dict_old[key] = value + continue + + if isinstance(value, dict): + dict_old[key] = self.merge_dicts(value, dict_old[key]) + continue + + dict_old[key] = value + + return dict_old + + def delete_entities(self): + if not self.deleted_entities: + return + # Try to order so child is not processed before parent + deleted_entities = [] + _deleted_entities = [id for id in self.deleted_entities] + + while True: + if not _deleted_entities: + break + _ready = [] + for mongo_id in _deleted_entities: + ent = self.avalon_ents_by_id[mongo_id] + vis_par = ent["data"]["visualParent"] + if ( + vis_par is not None and + str(vis_par) in self.deleted_entities + ): + continue + _ready.append(mongo_id) + + for id in _ready: + deleted_entities.append(id) + _deleted_entities.remove(id) + + delete_ids = [] + for mongo_id in deleted_entities: + # delete if they are deletable + if self.changeability_by_mongo_id[mongo_id]: + delete_ids.append(ObjectId(mongo_id)) + continue + + # check if any new created entity match same entity + # - name and parents must match + deleted_entity = self.avalon_ents_by_id[mongo_id] + name = deleted_entity["name"] + parents = deleted_entity["data"]["parents"] + similar_ent_id = None + for ftrack_id in self.create_ftrack_ids: + _ent_final = self.entities_dict[ftrack_id]["final_entity"] + if _ent_final["name"] != name: + continue + if _ent_final["data"]["parents"] != parents: + continue + + # If in create is "same" then we can "archive" current + # since will be unarchived in create method + similar_ent_id = ftrack_id + break + + # If similar entity(same name and parents) is in create + # entities list then just change from create to update + if similar_ent_id is not None: + self.create_ftrack_ids.remove(similar_ent_id) + self.update_ftrack_ids.append(similar_ent_id) + self.avalon_ftrack_mapper[mongo_id] = similar_ent_id + self.ftrack_avalon_mapper[similar_ent_id] = mongo_id + continue + + found_by_name_id = None + for ftrack_id, ent_dict in self.entities_dict.items(): + if not ent_dict.get("name"): + continue + + if name == ent_dict["name"]: + found_by_name_id = ftrack_id + break + + if found_by_name_id is not None: + # * THESE conditins are too complex to implement in first stage + # - probably not possible to solve if this happen + # if found_by_name_id in self.create_ftrack_ids: + # # reparent entity of the new one create? + # pass + # + # elif found_by_name_id in self.update_ftrack_ids: + # found_mongo_id = self.ftrack_avalon_mapper[found_by_name_id] + # + # ent_dict = self.entities_dict[found_by_name_id] + + # TODO report - CRITICAL entity with same name alread exists in + # different hierarchy - can't recreate entity + continue + + _vis_parent = str(deleted_entity["data"]["visualParent"]) + if _vis_parent is None: + _vis_parent = self.avalon_project_id + ftrack_parent_id = self.avalon_ftrack_mapper[_vis_parent] + self.create_ftrack_ent_from_avalon_ent( + deleted_entity, ftrack_parent_id + ) + + filter = {"_id": {"$in": delete_ids}, "type": "asset"} + self.dbcon.update_many(filter, {"$set": {"type": "archived_asset"}}) + + def create_ftrack_ent_from_avalon_ent(self, av_entity, parent_id): + new_entity = None + parent_entity = self.entities_dict[parent_id]["entity"] + + _name = av_entity["name"] + _type = av_entity["data"].get("entityType", "folder") + + self.log.debug(( + "Re-ceating deleted entity {} <{}>" + ).format(_name, _type)) + + new_entity = self.session.create(_type, { + "name": _name, + "parent": parent_entity + }) + + final_entity = {} + for k, v in av_entity.items(): + final_entity[k] = v + + if final_entity.get("type") != "asset": + final_entity["type"] = "asset" + + new_entity_id = new_entity["id"] + new_entity_data = { + "entity": new_entity, + "parent_id": parent_id, + "entity_type": _type.lower(), + "entity_type_orig": _type, + "name": _name, + "final_entity": final_entity + } + for k, v in new_entity_data.items(): + self.entities_dict[new_entity_id][k] = v + + p_chilren = self.entities_dict[parent_id]["children"] + if new_entity_id not in p_chilren: + self.entities_dict[parent_id]["children"].append(new_entity_id) + + cust_attr, hier_attrs = get_avalon_attr(self.session) + for _attr in cust_attr: + key = _attr["key"] + if key not in av_entity["data"]: + continue + + if key not in new_entity["custom_attributes"]: + continue + + value = av_entity["data"][key] + if not value: + continue + + new_entity["custom_attributes"][key] = value + + av_entity_id = str(av_entity["_id"]) + new_entity["custom_attributes"][CustAttrIdKey] = av_entity_id + + self.ftrack_avalon_mapper[new_entity_id] = av_entity_id + self.avalon_ftrack_mapper[av_entity_id] = new_entity_id + + self.session.commit() + + ent_path = self.get_ent_path(new_entity_id) + msg = ( + "Deleted entity was recreated because it or its children" + " contain published data" + ) + + self.report_items["info"][msg].append(ent_path) + + return new_entity_id + + def regex_duplicate_interface(self): + items = [] + if self.failed_regex or self.tasks_failed_regex: + subtitle = "Entity names contain prohibited symbols:" + items.append({ + "type": "label", + "value": "# {}".format(subtitle) + }) + items.append({ + "type": "label", + "value": ( + "

NOTE: You can use Letters( a-Z )," + " Numbers( 0-9 ) and Underscore( _ )

" + ) + }) + log_msgs = [] + for name, ids in self.failed_regex.items(): + error_title = { + "type": "label", + "value": "## {}".format(name) + } + items.append(error_title) + paths = [] + for entity_id in ids: + ent_path = self.get_ent_path(entity_id) + paths.append(ent_path) + + error_message = { + "type": "label", + "value": '

{}

'.format("
".join(paths)) + } + items.append(error_message) + log_msgs.append("<{}> ({})".format(name, ",".join(paths))) + + for name, ids in self.tasks_failed_regex.items(): + error_title = { + "type": "label", + "value": "## Task: {}".format(name) + } + items.append(error_title) + paths = [] + for entity_id in ids: + ent_path = self.get_ent_path(entity_id) + ent_path = "/".join([ent_path, name]) + paths.append(ent_path) + + error_message = { + "type": "label", + "value": '

{}

'.format("
".join(paths)) + } + items.append(error_message) + log_msgs.append("<{}> ({})".format(name, ",".join(paths))) + + self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs))) + + if self.duplicates: + subtitle = "Duplicated entity names:" + items.append({ + "type": "label", + "value": "# {}".format(subtitle) + }) + items.append({ + "type": "label", + "value": ( + "

NOTE: It is not allowed to use the same name" + " for multiple entities in the same project

" + ) + }) + log_msgs = [] + for name, ids in self.duplicates.items(): + error_title = { + "type": "label", + "value": "## {}".format(name) + } + items.append(error_title) + paths = [] + for entity_id in ids: + ent_path = self.get_ent_path(entity_id) + paths.append(ent_path) + + error_message = { + "type": "label", + "value": '

{}

'.format("
".join(paths)) + } + items.append(error_message) + log_msgs.append("<{}> ({})".format(name, ", ".join(paths))) + + self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs))) + + return items + + def report(self): + items = [] + project_name = self.entities_dict[self.ft_project_id]["name"] + title = "Synchronization report ({}):".format(project_name) + + keys = ["error", "warning", "info"] + for key in keys: + subitems = [] + if key == "warning": + for _item in self.regex_duplicate_interface(): + subitems.append(_item) + + for msg, _items in self.report_items[key].items(): + if not _items: + continue + + subitems.append({ + "type": "label", + "value": "# {}".format(msg) + }) + if isinstance(_items, str): + _items = [_items] + subitems.append({ + "type": "label", + "value": '

{}

'.format("
".join(_items)) + }) + + if items and subitems: + items.append(self.report_splitter) + + items.extend(subitems) + + return { + "items": items, + "title": title, + "success": False, + "message": "Synchronization Finished" + } diff --git a/pype/ftrack/lib/credentials.py b/pype/ftrack/lib/credentials.py index ad47893867..7e305942f2 100644 --- a/pype/ftrack/lib/credentials.py +++ b/pype/ftrack/lib/credentials.py @@ -1,6 +1,6 @@ import os import json -from pype.vendor import ftrack_api +import ftrack_api import appdirs diff --git a/pype/ftrack/lib/custom_db_connector.py b/pype/ftrack/lib/custom_db_connector.py index 11fd197555..8e8eab8740 100644 --- a/pype/ftrack/lib/custom_db_connector.py +++ b/pype/ftrack/lib/custom_db_connector.py @@ -22,7 +22,12 @@ import pymongo from pymongo.client_session import ClientSession class NotActiveTable(Exception): - pass + def __init__(self, *args, **kwargs): + msg = "Active table is not set. (This is bug)" + if not (args or kwargs): + args = (default_message,) + super().__init__(*args, **kwargs) + def auto_reconnect(func): """Handling auto reconnect in 3 retry times""" @@ -37,7 +42,16 @@ def auto_reconnect(func): time.sleep(0.1) else: raise + return decorated + +def check_active_table(func): + """Check if DbConnector has active table before db method is called""" + @functools.wraps(func) + def decorated(obj, *args, **kwargs): + if not obj.active_table: + raise NotActiveTable() + return func(obj, *args, **kwargs) return decorated @@ -53,7 +67,6 @@ def check_active_table(func): class DbConnector: - log = logging.getLogger(__name__) timeout = 1000 @@ -68,10 +81,18 @@ class DbConnector: self.active_table = table_name + def __getitem__(self, key): + # gives direct access to collection withou setting `active_table` + return self._database[key] + def __getattribute__(self, attr): + # not all methods of PyMongo database are implemented with this it is + # possible to use them too try: - return super().__getattribute__(attr) + return super(DbConnector, self).__getattribute__(attr) except AttributeError: + if self.active_table is None: + raise NotActiveTable() return self._database[self.active_table].__getattribute__(attr) def install(self): @@ -131,6 +152,15 @@ class DbConnector: def exist_table(self, table_name): return table_name in self.tables() + def create_table(self, name, **options): + if self.exist_table(name): + return + + return self._database.create_collection(name, **options) + + def exist_table(self, table_name): + return table_name in self.tables() + def tables(self): """List available tables Returns: @@ -166,18 +196,21 @@ class DbConnector: @check_active_table @auto_reconnect def find(self, filter, projection=None, sort=None, **options): - options["projection"] = projection options["sort"] = sort - return self._database[self.active_table].find(filter, **options) + return self._database[self.active_table].find( + filter, projection, **options + ) @check_active_table @auto_reconnect def find_one(self, filter, projection=None, sort=None, **options): assert isinstance(filter, dict), "filter must be " - - options["projection"] = projection options["sort"] = sort - return self._database[self.active_table].find_one(filter, **options) + return self._database[self.active_table].find_one( + filter, + projection, + **options + ) @check_active_table @auto_reconnect @@ -202,8 +235,8 @@ class DbConnector: @check_active_table @auto_reconnect - def distinct(self, *args, **kwargs): - return self._database[self.active_table].distinct(*args, **kwargs) + def distinct(self, **options): + return self._database[self.active_table].distinct(**options) @check_active_table @auto_reconnect @@ -216,10 +249,14 @@ class DbConnector: @auto_reconnect def delete_one(self, filter, collation=None, **options): options["collation"] = collation - return self._database[self.active_table].delete_one(filter, **options) + return self._database[self.active_table].delete_one( + filter, **options + ) @check_active_table @auto_reconnect def delete_many(self, filter, collation=None, **options): options["collation"] = collation - return self._database[self.active_table].delete_many(filter, **options) + return self._database[self.active_table].delete_many( + filter, **options + ) diff --git a/pype/ftrack/lib/ftrack_app_handler.py b/pype/ftrack/lib/ftrack_app_handler.py index dbb38a3247..9dc735987d 100644 --- a/pype/ftrack/lib/ftrack_app_handler.py +++ b/pype/ftrack/lib/ftrack_app_handler.py @@ -345,25 +345,44 @@ class AppAction(BaseHandler): statuses = presets['status_update'] actual_status = entity['status']['name'].lower() - next_status_name = None - for key, value in statuses.items(): - if actual_status in value or '_any_' in value: - if key != '_ignore_': - next_status_name = key + already_tested = [] + ent_path = "/".join( + [ent["name"] for ent in entity['link']] + ) + while True: + next_status_name = None + for key, value in statuses.items(): + if key in already_tested: + continue + if actual_status in value or '_any_' in value: + if key != '_ignore_': + next_status_name = key + already_tested.append(key) + break + already_tested.append(key) + + if next_status_name is None: break - if next_status_name is not None: try: query = 'Status where name is "{}"'.format( next_status_name ) status = session.query(query).one() + entity['status'] = status session.commit() + self.log.debug("Changing status to \"{}\" <{}>".format( + next_status_name, ent_path + )) + break + except Exception: + session.rollback() msg = ( - 'Status "{}" in presets wasn\'t found on Ftrack' - ).format(next_status_name) + 'Status "{}" in presets wasn\'t found' + ' on Ftrack entity type "{}"' + ).format(next_status_name, entity.entity_type) self.log.warning(msg) # Set origin avalon environments diff --git a/pype/ftrack/lib/ftrack_base_handler.py b/pype/ftrack/lib/ftrack_base_handler.py index 13e1cae9a9..8329505ffb 100644 --- a/pype/ftrack/lib/ftrack_base_handler.py +++ b/pype/ftrack/lib/ftrack_base_handler.py @@ -1,9 +1,8 @@ import functools import time from pypeapp import Logger -from pype.vendor import ftrack_api -from pype.vendor.ftrack_api import session as fa_session -from pype.ftrack.ftrack_server import session_processor +import ftrack_api +from pype.ftrack.ftrack_server.lib import SocketSession class MissingPermision(Exception): @@ -13,6 +12,13 @@ class MissingPermision(Exception): super().__init__(message) +class PreregisterException(Exception): + def __init__(self, message=None): + if not message: + message = "Pre-registration conditions were not met" + super().__init__(message) + + class BaseHandler(object): '''Custom Action base class @@ -35,7 +41,7 @@ class BaseHandler(object): self.log = Logger().get_logger(self.__class__.__name__) if not( isinstance(session, ftrack_api.session.Session) or - isinstance(session, session_processor.ProcessSession) + isinstance(session, SocketSession) ): raise Exception(( "Session object entered with args is instance of \"{}\"" @@ -89,15 +95,17 @@ class BaseHandler(object): '!{} "{}" - You\'re missing required {} permissions' ).format(self.type, label, str(MPE))) except AssertionError as ae: - self.log.info(( + self.log.warning(( '!{} "{}" - {}' ).format(self.type, label, str(ae))) except NotImplementedError: self.log.error(( '{} "{}" - Register method is not implemented' - ).format( - self.type, label) - ) + ).format(self.type, label)) + except PreregisterException as exc: + self.log.warning(( + '{} "{}" - {}' + ).format(self.type, label, str(exc))) except Exception as e: self.log.error('{} "{}" - Registration failed ({})'.format( self.type, label, str(e)) @@ -119,6 +127,7 @@ class BaseHandler(object): try: return func(*args, **kwargs) except Exception as exc: + self.session.rollback() msg = '{} "{}": Failed ({})'.format(self.type, label, str(exc)) self.log.error(msg, exc_info=True) return { @@ -163,10 +172,10 @@ class BaseHandler(object): if result is True: return - msg = "Pre-register conditions were not met" + msg = None if isinstance(result, str): msg = result - raise Exception(msg) + raise PreregisterException(msg) def preregister(self): ''' @@ -233,7 +242,7 @@ class BaseHandler(object): _entities is None or _entities[0].get( 'link', None - ) == fa_session.ftrack_api.symbol.NOT_SET + ) == ftrack_api.symbol.NOT_SET ): _entities = self._get_entities(event) @@ -437,7 +446,7 @@ class BaseHandler(object): 'applicationId=ftrack.client.web and user.id="{0}"' ).format(user_id) self.session.event_hub.publish( - fa_session.ftrack_api.event.base.Event( + ftrack_api.event.base.Event( topic='ftrack.action.trigger-user-interface', data=dict( type='message', @@ -485,8 +494,8 @@ class BaseHandler(object): if not user: raise TypeError(( - 'Ftrack user with {} "{}" was not found!'.format(key, value) - )) + 'Ftrack user with {} "{}" was not found!' + ).format(key, value)) user_id = user['id'] @@ -495,7 +504,7 @@ class BaseHandler(object): ).format(user_id) self.session.event_hub.publish( - fa_session.ftrack_api.event.base.Event( + ftrack_api.event.base.Event( topic='ftrack.action.trigger-user-interface', data=dict( type='widget', @@ -523,7 +532,7 @@ class BaseHandler(object): else: first = False - subtitle = {'type': 'label', 'value':'

{}

'.format(key)} + subtitle = {'type': 'label', 'value': '

{}

'.format(key)} items.append(subtitle) if isinstance(value, list): for item in value: @@ -583,7 +592,7 @@ class BaseHandler(object): # Create and trigger event session.event_hub.publish( - fa_session.ftrack_api.event.base.Event( + ftrack_api.event.base.Event( topic=topic, data=_event_data, source=dict(user=_user_data) @@ -593,3 +602,24 @@ class BaseHandler(object): self.log.debug( "Action \"{}\" Triggered successfully".format(action_name) ) + + def trigger_event( + self, topic, event_data={}, session=None, source=None, + event=None, on_error="ignore" + ): + if session is None: + session = self.session + + if not source and event: + source = event.get("source") + # Create and trigger event + event = ftrack_api.event.base.Event( + topic=topic, + data=event_data, + source=source + ) + session.event_hub.publish(event, on_error=on_error) + + self.log.debug(( + "Publishing event: {}" + ).format(str(event.__dict__))) diff --git a/pype/ftrack/lib/ftrack_event_handler.py b/pype/ftrack/lib/ftrack_event_handler.py index db55eef16e..80a86527ab 100644 --- a/pype/ftrack/lib/ftrack_event_handler.py +++ b/pype/ftrack/lib/ftrack_event_handler.py @@ -26,6 +26,7 @@ class BaseEvent(BaseHandler): try: func(*args, **kwargs) except Exception as exc: + self.session.rollback() self.log.error( 'Event "{}" Failed: {}'.format( self.__class__.__name__, str(exc) diff --git a/pype/ftrack/lib/io_nonsingleton.py b/pype/ftrack/lib/io_nonsingleton.py index 3490ef03be..6380e4eb23 100644 --- a/pype/ftrack/lib/io_nonsingleton.py +++ b/pype/ftrack/lib/io_nonsingleton.py @@ -50,6 +50,19 @@ class DbConnector(object): self._database = None self._is_installed = False + def __getitem__(self, key): + # gives direct access to collection withou setting `active_table` + return self._database[key] + + def __getattribute__(self, attr): + # not all methods of PyMongo database are implemented with this it is + # possible to use them too + try: + return super(DbConnector, self).__getattribute__(attr) + except AttributeError: + cur_proj = self.Session["AVALON_PROJECT"] + return self._database[cur_proj].__getattribute__(attr) + def install(self): """Establish a persistent connection to the database""" if self._is_installed: diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py index ce2754c25d..8da97da56b 100644 --- a/pype/ftrack/tray/ftrack_module.py +++ b/pype/ftrack/tray/ftrack_module.py @@ -4,9 +4,9 @@ import threading import time from Qt import QtCore, QtGui, QtWidgets -from pype.vendor import ftrack_api +import ftrack_api from pypeapp import style -from pype.ftrack import FtrackServer, credentials +from pype.ftrack import FtrackServer, check_ftrack_url, credentials from . import login_dialog from pype import api as pype @@ -24,7 +24,8 @@ class FtrackModule: self.thread_timer = None self.bool_logged = False - self.bool_action_server = False + self.bool_action_server_running = False + self.bool_action_thread_running = False self.bool_timer_event = False def show_login_widget(self): @@ -74,28 +75,50 @@ class FtrackModule: # Actions part def start_action_server(self): + self.bool_action_thread_running = True + self.set_menu_visibility() + if ( + self.thread_action_server is not None and + self.bool_action_thread_running is False + ): + self.stop_action_server() + if self.thread_action_server is None: self.thread_action_server = threading.Thread( target=self.set_action_server ) - self.thread_action_server.daemon = True self.thread_action_server.start() - log.info("Ftrack action server launched") - self.bool_action_server = True - self.set_menu_visibility() - def set_action_server(self): - try: - self.action_server.run_server() - except Exception as exc: - log.error( - "Ftrack Action server crashed! Please try to start again.", - exc_info=True + first_check = True + while self.bool_action_thread_running is True: + if not check_ftrack_url(os.environ['FTRACK_SERVER']): + if first_check: + log.warning( + "Could not connect to Ftrack server" + ) + first_check = False + time.sleep(1) + continue + log.info( + "Connected to Ftrack server. Running actions session" ) - # TODO show message to user - self.bool_action_server = False + try: + self.bool_action_server_running = True + self.set_menu_visibility() + self.action_server.run_server() + if self.bool_action_thread_running: + log.debug("Ftrack action server has stopped") + except Exception: + log.warning( + "Ftrack Action server crashed. Trying to connect again", + exc_info=True + ) + self.bool_action_server_running = False self.set_menu_visibility() + first_check = True + + self.bool_action_thread_running = False def reset_action_server(self): self.stop_action_server() @@ -103,16 +126,21 @@ class FtrackModule: def stop_action_server(self): try: + self.bool_action_thread_running = False self.action_server.stop_session() if self.thread_action_server is not None: self.thread_action_server.join() self.thread_action_server = None - log.info("Ftrack action server stopped") - self.bool_action_server = False + log.info("Ftrack action server was forced to stop") + + self.bool_action_server_running = False self.set_menu_visibility() - except Exception as e: - log.error("During Killing action server: {0}".format(e)) + except Exception: + log.warning( + "Error has happened during Killing action server", + exc_info=True + ) # Definition of Tray menu def tray_menu(self, parent_menu): @@ -158,6 +186,9 @@ class FtrackModule: def tray_start(self): self.validate() + def tray_exit(self): + self.stop_action_server() + # Definition of visibility of each menu actions def set_menu_visibility(self): @@ -170,9 +201,9 @@ class FtrackModule: self.stop_timer_thread() return - self.aRunActionS.setVisible(not self.bool_action_server) - self.aResetActionS.setVisible(self.bool_action_server) - self.aStopActionS.setVisible(self.bool_action_server) + self.aRunActionS.setVisible(not self.bool_action_thread_running) + self.aResetActionS.setVisible(self.bool_action_thread_running) + self.aStopActionS.setVisible(self.bool_action_thread_running) if self.bool_timer_event is False: self.start_timer_thread() diff --git a/pype/lib.py b/pype/lib.py index 6f6895085e..c8fade7f4a 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -1,16 +1,12 @@ import os import re import logging -import importlib import itertools import contextlib import subprocess import inspect -from .vendor import pather -from .vendor.pather.error import ParseError - -import avalon.io as io +from avalon import io import avalon.api import avalon @@ -21,12 +17,15 @@ log = logging.getLogger(__name__) def _subprocess(args): """Convenience method for getting output errors for subprocess.""" + # make sure environment contains only strings + env = {k: str(v) for k, v in os.environ.items()} + proc = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, - env=os.environ + env=env ) output = proc.communicate()[0] @@ -562,7 +561,7 @@ def get_subsets(asset_name, find_dict = {"type": "representation", "parent": version_sel["_id"]} - filter_repr = {"$or": [{"name": repr} for repr in representations]} + filter_repr = {"name": {"$in": representations}} find_dict.update(filter_repr) repres_out = [i for i in io.find(find_dict)] @@ -572,3 +571,43 @@ def get_subsets(asset_name, "representaions": repres_out} return output_dict + + +class CustomNone: + """Created object can be used as custom None (not equal to None). + + WARNING: Multiple created objects are not equal either. + Exmple: + >>> a = CustomNone() + >>> a == None + False + >>> b = CustomNone() + >>> a == b + False + >>> a == a + True + """ + + def __init__(self): + """Create uuid as identifier for custom None.""" + import uuid + self.identifier = str(uuid.uuid4()) + + def __bool__(self): + """Return False (like default None).""" + return False + + def __eq__(self, other): + """Equality is compared by identifier value.""" + if type(other) == type(self): + if other.identifier == self.identifier: + return True + return False + + def __str__(self): + """Return value of identifier when converted to string.""" + return self.identifier + + def __repr__(self): + """Representation of custom None.""" + return "".format(str(self.identifier)) diff --git a/pype/logging/gui/app.py b/pype/logging/gui/app.py index 7cee280158..9767077f80 100644 --- a/pype/logging/gui/app.py +++ b/pype/logging/gui/app.py @@ -33,5 +33,7 @@ class LogsWindow(QtWidgets.QWidget): def on_selection_changed(self): index = self.logs_widget.selected_log() + if not index or not index.isValid(): + return node = index.data(self.logs_widget.model.NodeRole) self.log_detail.set_detail(node) diff --git a/pype/logging/gui/widgets.py b/pype/logging/gui/widgets.py index 66692c2c65..10aad3c282 100644 --- a/pype/logging/gui/widgets.py +++ b/pype/logging/gui/widgets.py @@ -1,11 +1,7 @@ -import datetime -import inspect +import getpass from Qt import QtCore, QtWidgets, QtGui -from PyQt5.QtCore import QVariant from .models import LogModel -from .lib import preserve_states - class SearchComboBox(QtWidgets.QComboBox): """Searchable ComboBox with empty placeholder value as first value""" @@ -53,6 +49,7 @@ class SearchComboBox(QtWidgets.QComboBox): return text + class CheckableComboBox2(QtWidgets.QComboBox): def __init__(self, parent=None): super(CheckableComboBox, self).__init__(parent) @@ -96,9 +93,11 @@ class SelectableMenu(QtWidgets.QMenu): else: super(SelectableMenu, self).mouseReleaseEvent(event) + class CustomCombo(QtWidgets.QWidget): selection_changed = QtCore.Signal() + checked_changed = QtCore.Signal(bool) def __init__(self, title, parent=None): super(CustomCombo, self).__init__(parent) @@ -127,12 +126,27 @@ class CustomCombo(QtWidgets.QWidget): self.toolmenu.clear() self.addItems(items) + def select_items(self, items, ignore_input=False): + if not isinstance(items, list): + items = [items] + + for action in self.toolmenu.actions(): + check = True + if ( + action.text() in items and ignore_input or + action.text() not in items and not ignore_input + ): + check = False + + action.setChecked(check) + def addItems(self, items): for item in items: action = self.toolmenu.addAction(item) action.setCheckable(True) - action.setChecked(True) self.toolmenu.addAction(action) + action.setChecked(True) + action.triggered.connect(self.checked_changed) def items(self): for action in self.toolmenu.actions(): @@ -186,15 +200,42 @@ class CheckableComboBox(QtWidgets.QComboBox): for text, checked in items: text_item = QtGui.QStandardItem(text) checked_item = QtGui.QStandardItem() - checked_item.setData(QVariant(checked), QtCore.Qt.CheckStateRole) + checked_item.setData( + QtCore.QVariant(checked), QtCore.Qt.CheckStateRole + ) self.model.appendRow([text_item, checked_item]) +class FilterLogModel(QtCore.QSortFilterProxyModel): + sub_dict = ["$gt", "$lt", "$not"] + def __init__(self, key_values, parent=None): + super(FilterLogModel, self).__init__(parent) + self.allowed_key_values = key_values + + def filterAcceptsRow(self, row, parent): + """ + Reimplemented from base class. + """ + model = self.sourceModel() + for key, values in self.allowed_key_values.items(): + col_indx = model.COLUMNS.index(key) + value = model.index(row, col_indx, parent).data( + QtCore.Qt.DisplayRole + ) + if value not in values: + return False + return True + + class LogsWidget(QtWidgets.QWidget): """A widget that lists the published subsets for an asset""" active_changed = QtCore.Signal() + _level_order = [ + "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL" + ] + def __init__(self, parent=None): super(LogsWidget, self).__init__(parent=parent) @@ -202,47 +243,45 @@ class LogsWidget(QtWidgets.QWidget): filter_layout = QtWidgets.QHBoxLayout() - # user_filter = SearchComboBox(self, "Users") user_filter = CustomCombo("Users", self) users = model.dbcon.distinct("user") user_filter.populate(users) - user_filter.selection_changed.connect(self.user_changed) + user_filter.checked_changed.connect(self.user_changed) + user_filter.select_items(getpass.getuser()) level_filter = CustomCombo("Levels", self) - # levels = [(level, True) for level in model.dbcon.distinct("level")] levels = model.dbcon.distinct("level") - level_filter.addItems(levels) + _levels = [] + for level in self._level_order: + if level in levels: + _levels.append(level) + level_filter.populate(_levels) + level_filter.checked_changed.connect(self.level_changed) - date_from_label = QtWidgets.QLabel("From:") - date_filter_from = QtWidgets.QDateTimeEdit() - - date_from_layout = QtWidgets.QVBoxLayout() - date_from_layout.addWidget(date_from_label) - date_from_layout.addWidget(date_filter_from) - - # now = datetime.datetime.now() - # QtCore.QDateTime(now.year, now.month, now.day, now.hour, now.minute, second = 0, msec = 0, timeSpec = 0) - date_to_label = QtWidgets.QLabel("To:") - date_filter_to = QtWidgets.QDateTimeEdit() - - date_to_layout = QtWidgets.QVBoxLayout() - date_to_layout.addWidget(date_to_label) - date_to_layout.addWidget(date_filter_to) + # date_from_label = QtWidgets.QLabel("From:") + # date_filter_from = QtWidgets.QDateTimeEdit() + # + # date_from_layout = QtWidgets.QVBoxLayout() + # date_from_layout.addWidget(date_from_label) + # date_from_layout.addWidget(date_filter_from) + # + # date_to_label = QtWidgets.QLabel("To:") + # date_filter_to = QtWidgets.QDateTimeEdit() + # + # date_to_layout = QtWidgets.QVBoxLayout() + # date_to_layout.addWidget(date_to_label) + # date_to_layout.addWidget(date_filter_to) filter_layout.addWidget(user_filter) filter_layout.addWidget(level_filter) + filter_layout.setAlignment(QtCore.Qt.AlignLeft) - filter_layout.addLayout(date_from_layout) - filter_layout.addLayout(date_to_layout) + # filter_layout.addLayout(date_from_layout) + # filter_layout.addLayout(date_to_layout) view = QtWidgets.QTreeView(self) view.setAllColumnsShowFocus(True) - # # Set view delegates - # time_delegate = PrettyTimeDelegate() - # column = model.COLUMNS.index("time") - # view.setItemDelegateForColumn(column, time_delegate) - layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) layout.addLayout(filter_layout) @@ -255,34 +294,54 @@ class LogsWidget(QtWidgets.QWidget): QtCore.Qt.AscendingOrder ) - view.setModel(model) + key_val = { + "user": users, + "level": levels + } + proxy_model = FilterLogModel(key_val, view) + proxy_model.setSourceModel(model) + view.setModel(proxy_model) view.customContextMenuRequested.connect(self.on_context_menu) view.selectionModel().selectionChanged.connect(self.active_changed) - # user_filter.connect() - # TODO remove if nothing will affect... - # header = self.view.header() + # WARNING this is cool but slows down widget a lot + # header = view.header() # # Enforce the columns to fit the data (purely cosmetic) # if Qt.__binding__ in ("PySide2", "PyQt5"): # header.setSectionResizeMode(QtWidgets.QHeaderView.ResizeToContents) # else: # header.setResizeMode(QtWidgets.QHeaderView.ResizeToContents) - # Set signals - # prepare model.refresh() # Store to memory self.model = model + self.proxy_model = proxy_model self.view = view self.user_filter = user_filter + self.level_filter = level_filter def user_changed(self): + valid_actions = [] for action in self.user_filter.items(): - print(action) + if action.isChecked(): + valid_actions.append(action.text()) + + self.proxy_model.allowed_key_values["user"] = valid_actions + self.proxy_model.invalidate() + + def level_changed(self): + valid_actions = [] + for action in self.level_filter.items(): + if action.isChecked(): + valid_actions.append(action.text()) + + self.proxy_model.allowed_key_values["level"] = valid_actions + self.proxy_model.invalidate() + def on_context_menu(self, point): # TODO will be any actions? it's ready diff --git a/pype/maya/lib.py b/pype/maya/lib.py index b9387b65fd..0890d3863e 100644 --- a/pype/maya/lib.py +++ b/pype/maya/lib.py @@ -3,6 +3,7 @@ import re import os import uuid +import math import bson import json @@ -1776,9 +1777,10 @@ def set_scene_fps(fps, update=True): # pull from mapping # this should convert float string to float and int to int # so 25.0 is converted to 25, but 23.98 will be still float. - decimals = int(str(fps-int(fps))[2:]) - if decimals == 0: - fps = int(fps) + dec, ipart = math.modf(fps) + if dec == 0.0: + fps = int(ipart) + unit = fps_mapping.get(str(fps), None) if unit is None: raise ValueError("Unsupported FPS value: `%s`" % fps) @@ -1861,6 +1863,7 @@ def set_context_settings(): # Set project fps fps = asset_data.get("fps", project_data.get("fps", 25)) + api.Session["AVALON_FPS"] = fps set_scene_fps(fps) # Set project resolution diff --git a/pype/muster/muster.py b/pype/muster/muster.py index 66c7a94994..1f92e57e83 100644 --- a/pype/muster/muster.py +++ b/pype/muster/muster.py @@ -43,8 +43,10 @@ class MusterModule: self.aShowLogin.trigger() if "RestApiServer" in modules: + def api_show_login(): + self.aShowLogin.trigger() modules["RestApiServer"].register_callback( - "muster/show_login", api_callback, "post" + "/show_login", api_show_login, "muster", "post" ) # Definition of Tray menu diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index b7dbf69510..141cf4c13d 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -112,7 +112,9 @@ def install(): # Disable all families except for the ones we explicitly want to see family_states = [ "write", - "review" + "review", + "nukenodes" + "gizmo" ] avalon.data["familiesStateDefault"] = False diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 157af9019d..f213b596ad 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -6,6 +6,7 @@ from collections import OrderedDict from avalon import api, io, lib import avalon.nuke +from avalon.nuke import lib as anlib import pype.api as pype import nuke @@ -105,6 +106,10 @@ def writes_version_sync(): for each in nuke.allNodes(): if each.Class() == 'Write': + # check if the node is avalon tracked + if "AvalonTab" not in each.knobs(): + continue + avalon_knob_data = avalon.nuke.get_avalon_knob_data( each, ['avalon:', 'ak:']) @@ -1190,3 +1195,209 @@ class BuildWorkfile(WorkfileSettings): def position_up(self, multiply=1): self.ypos -= (self.ypos_size * multiply) + self.ypos_gap + + +class Exporter_review_lut: + """ + Generator object for review lut from Nuke + + Args: + klass (pyblish.plugin): pyblish plugin parent + + + """ + _temp_nodes = [] + data = dict({ + "representations": list() + }) + + def __init__(self, + klass, + instance, + name=None, + ext=None, + cube_size=None, + lut_size=None, + lut_style=None): + + self.log = klass.log + self.instance = instance + + self.name = name or "baked_lut" + self.ext = ext or "cube" + self.cube_size = cube_size or 32 + self.lut_size = lut_size or 1024 + self.lut_style = lut_style or "linear" + + self.stagingDir = self.instance.data["stagingDir"] + self.collection = self.instance.data.get("collection", None) + + # set frame start / end and file name to self + self.get_file_info() + + self.log.info("File info was set...") + + self.file = self.fhead + self.name + ".{}".format(self.ext) + self.path = os.path.join(self.stagingDir, self.file).replace("\\", "/") + + def generate_lut(self): + # ---------- start nodes creation + + # CMSTestPattern + cms_node = nuke.createNode("CMSTestPattern") + cms_node["cube_size"].setValue(self.cube_size) + # connect + self._temp_nodes.append(cms_node) + self.previous_node = cms_node + self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes)) + + # Node View Process + ipn = self.get_view_process_node() + if ipn is not None: + # connect + ipn.setInput(0, self.previous_node) + self._temp_nodes.append(ipn) + self.previous_node = ipn + self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) + + # OCIODisplay + dag_node = nuke.createNode("OCIODisplay") + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + + # GenerateLUT + gen_lut_node = nuke.createNode("GenerateLUT") + gen_lut_node["file"].setValue(self.path) + gen_lut_node["file_type"].setValue(".{}".format(self.ext)) + gen_lut_node["lut1d"].setValue(self.lut_size) + gen_lut_node["style1d"].setValue(self.lut_style) + # connect + gen_lut_node.setInput(0, self.previous_node) + self._temp_nodes.append(gen_lut_node) + self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes)) + + # ---------- end nodes creation + + # Export lut file + nuke.execute( + gen_lut_node.name(), + int(self.first_frame), + int(self.first_frame)) + + self.log.info("Exported...") + + # ---------- generate representation data + self.get_representation_data() + + self.log.debug("Representation... `{}`".format(self.data)) + + # ---------- Clean up + for node in self._temp_nodes: + nuke.delete(node) + self.log.info("Deleted nodes...") + + return self.data + + def get_file_info(self): + if self.collection: + self.log.debug("Collection: `{}`".format(self.collection)) + # get path + self.fname = os.path.basename(self.collection.format( + "{head}{padding}{tail}")) + self.fhead = self.collection.format("{head}") + + # get first and last frame + self.first_frame = min(self.collection.indexes) + self.last_frame = max(self.collection.indexes) + else: + self.fname = os.path.basename(self.instance.data.get("path", None)) + self.fhead = os.path.splitext(self.fname)[0] + "." + self.first_frame = self.instance.data.get("frameStart", None) + self.last_frame = self.instance.data.get("frameEnd", None) + + if "#" in self.fhead: + self.fhead = self.fhead.replace("#", "")[:-1] + + def get_representation_data(self): + + repre = { + 'name': self.name, + 'ext': self.ext, + 'files': self.file, + "stagingDir": self.stagingDir, + "anatomy_template": "publish", + "tags": [self.name.replace("_", "-")] + } + + self.data["representations"].append(repre) + + def get_view_process_node(self): + """ + Will get any active view process. + + Arguments: + self (class): in object definition + + Returns: + nuke.Node: copy node of Input Process node + """ + anlib.reset_selection() + ipn_orig = None + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + # copy selected to clipboard + nuke.nodeCopy('%clipboard%') + # reset selection + anlib.reset_selection() + # paste node and selection is on it only + nuke.nodePaste('%clipboard%') + # assign to variable + ipn = nuke.selectedNode() + + return ipn + +def get_dependent_nodes(nodes): + """Get all dependent nodes connected to the list of nodes. + + Looking for connections outside of the nodes in incoming argument. + + Arguments: + nodes (list): list of nuke.Node objects + + Returns: + connections_in: dictionary of nodes and its dependencies + connections_out: dictionary of nodes and its dependency + """ + + connections_in = dict() + connections_out = dict() + node_names = [n.name() for n in nodes] + for node in nodes: + inputs = node.dependencies() + outputs = node.dependent() + # collect all inputs outside + test_in = [(i, n) for i, n in enumerate(inputs) + if n.name() not in node_names] + if test_in: + connections_in.update({ + node: test_in + }) + # collect all outputs outside + test_out = [i for i in outputs if i.name() not in node_names] + if test_out: + # only one dependent node is allowed + connections_out.update({ + node: test_out[-1] + }) + + return connections_in, connections_out diff --git a/pype/nuke/utils.py b/pype/nuke/utils.py new file mode 100644 index 0000000000..7583221696 --- /dev/null +++ b/pype/nuke/utils.py @@ -0,0 +1,64 @@ +import os +import nuke +from avalon.nuke import lib as anlib + + +def get_node_outputs(node): + ''' + Return a dictionary of the nodes and pipes that are connected to node + ''' + dep_dict = {} + dependencies = node.dependent(nuke.INPUTS | nuke.HIDDEN_INPUTS) + for d in dependencies: + dep_dict[d] = [] + for i in range(d.inputs()): + if d.input(i) == node: + dep_dict[d].append(i) + return dep_dict + + +def is_node_gizmo(node): + ''' + return True if node is gizmo + ''' + return 'gizmo_file' in node.knobs() + + +def gizmo_is_nuke_default(gizmo): + '''Check if gizmo is in default install path''' + plug_dir = os.path.join(os.path.dirname( + nuke.env['ExecutablePath']), 'plugins') + return gizmo.filename().startswith(plug_dir) + + +def bake_gizmos_recursively(in_group=nuke.Root()): + """Converting a gizmo to group + + Argumets: + is_group (nuke.Node)[optonal]: group node or all nodes + """ + # preserve selection after all is done + with anlib.maintained_selection(): + # jump to the group + with in_group: + for node in nuke.allNodes(): + if is_node_gizmo(node) and not gizmo_is_nuke_default(node): + with node: + outputs = get_node_outputs(node) + group = node.makeGroup() + # Reconnect inputs and outputs if any + if outputs: + for n, pipes in outputs.items(): + for i in pipes: + n.setInput(i, group) + for i in range(node.inputs()): + group.setInput(i, node.input(i)) + # set node position and name + group.setXYpos(node.xpos(), node.ypos()) + name = node.name() + nuke.delete(node) + group.setName(name) + node = group + + if node.Class() == "Group": + bake_gizmos_recursively(node) diff --git a/pype/plugins/aport/publish/collect_context.py b/pype/plugins/aport/publish/collect_context.py index 2aaa89fd05..35811d6378 100644 --- a/pype/plugins/aport/publish/collect_context.py +++ b/pype/plugins/aport/publish/collect_context.py @@ -1,9 +1,6 @@ import os import pyblish.api -from avalon import ( - io, - api as avalon -) +from avalon import api as avalon from pype import api as pype import json from pathlib import Path diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py index 87b14b612b..9fe4fddebf 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py @@ -1,5 +1,6 @@ import os import sys +import six import pyblish.api import clique @@ -125,6 +126,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): metadata=asset_metadata ) ) + try: + session.commit() + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + six.reraise(tp, value, tb) # Adding metadata existing_asset_metadata = asset_entity["metadata"] @@ -137,8 +144,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): "version": 0, "asset": asset_entity, } - if task: - assetversion_data['task'] = task assetversion_data.update(data.get("assetversion_data", {})) @@ -150,6 +155,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): # due to a ftrack_api bug where you can't add metadata on creation. assetversion_metadata = assetversion_data.pop("metadata", {}) + if task: + assetversion_data['task'] = task + # Create a new entity if none exits. if not assetversion_entity: assetversion_entity = session.create( @@ -162,6 +170,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): metadata=assetversion_metadata ) ) + try: + session.commit() + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + six.reraise(tp, value, tb) # Adding metadata existing_assetversion_metadata = assetversion_entity["metadata"] @@ -170,7 +184,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): # Have to commit the version and asset, because location can't # determine the final location without. - session.commit() + try: + session.commit() + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + six.reraise(tp, value, tb) # Component # Get existing entity. @@ -209,7 +228,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): session.delete(member) del(member) - session.commit() + try: + session.commit() + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + six.reraise(tp, value, tb) # Reset members in memory if "members" in component_entity.keys(): @@ -320,4 +344,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): ) else: # Commit changes. - session.commit() + try: + session.commit() + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + six.reraise(tp, value, tb) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_comments.py b/pype/plugins/ftrack/publish/integrate_ftrack_comments.py new file mode 100644 index 0000000000..9d0b7b3ab9 --- /dev/null +++ b/pype/plugins/ftrack/publish/integrate_ftrack_comments.py @@ -0,0 +1,31 @@ +import sys +import pyblish.api +import six + + +class IntegrateFtrackComments(pyblish.api.InstancePlugin): + """Create comments in Ftrack.""" + + order = pyblish.api.IntegratorOrder + label = "Integrate Comments to Ftrack." + families = ["shot"] + + def process(self, instance): + session = instance.context.data["ftrackSession"] + + entity = session.query( + "Shot where name is \"{}\"".format(instance.data["item"].name()) + ).one() + + notes = [] + for comment in instance.data["comments"]: + notes.append(session.create("Note", {"content": comment})) + + entity["notes"].extend(notes) + + try: + session.commit() + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + six.reraise(tp, value, tb) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index 976250da00..5e680a172a 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -28,7 +28,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'plate': 'img', 'audio': 'audio', 'workfile': 'scene', - 'animation': 'cache' + 'animation': 'cache', + 'image': 'img' } def process(self, instance): @@ -37,6 +38,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): if instance.data.get('version'): version_number = int(instance.data.get('version')) + else: + raise ValueError("Instance version not set") family = instance.data['family'].lower() @@ -113,6 +116,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): }, "assetversion_data": { "version": version_number, + "comment": instance.context.data.get("comment", "") }, "component_data": component_data, "component_path": comp['published_path'], diff --git a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py index 2f09f6d927..1deff56d83 100644 --- a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py +++ b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py @@ -1,3 +1,6 @@ +import sys + +import six import pyblish.api from avalon import io @@ -66,9 +69,10 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): # try to find if entity already exists else: - query = 'TypedContext where name is "{0}" and project.full_name is "{1}"'.format( - entity_name, self.ft_project["full_name"] - ) + query = ( + 'TypedContext where name is "{0}" and ' + 'project_id is "{1}"' + ).format(entity_name, self.ft_project["id"]) try: entity = self.session.query(query).one() except Exception: @@ -98,7 +102,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): for instance in instances: instance.data['ftrackEntity'] = entity - self.session.commit() + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + six.reraise(tp, value, tb) # TASKS tasks = entity_data.get('tasks', []) @@ -121,11 +130,21 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): task_type=task, parent=entity ) - self.session.commit() + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + six.reraise(tp, value, tb) # Incoming links. self.create_links(entity_data, entity) - self.session.commit() + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + six.reraise(tp, value, tb) if 'childs' in entity_data: self.import_to_ftrack( @@ -135,7 +154,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): # Clear existing links. for link in entity.get("incoming_links", []): self.session.delete(link) - self.session.commit() + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + six.reraise(tp, value, tb) # Create new links. for input in entity_data.get("inputs", []): @@ -171,7 +195,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): self.log.info(self.task_types) task['type'] = self.task_types[task_type] - self.session.commit() + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + six.reraise(tp, value, tb) return task @@ -180,6 +209,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): 'name': name, 'parent': parent }) - self.session.commit() + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + six.reraise(tp, value, tb) return entity diff --git a/pype/plugins/global/_publish_unused/collect_json.py b/pype/plugins/global/_publish_unused/collect_json.py index ba5fc29d12..dc5bfb9c81 100644 --- a/pype/plugins/global/_publish_unused/collect_json.py +++ b/pype/plugins/global/_publish_unused/collect_json.py @@ -3,7 +3,7 @@ import json import re import pyblish.api -from pype.vendor import clique +import clique class CollectJSON(pyblish.api.ContextPlugin): diff --git a/pype/plugins/global/_publish_unused/extract_json.py b/pype/plugins/global/_publish_unused/extract_json.py index e218776638..8aff324574 100644 --- a/pype/plugins/global/_publish_unused/extract_json.py +++ b/pype/plugins/global/_publish_unused/extract_json.py @@ -4,7 +4,7 @@ import datetime import time import pyblish.api -from pype.vendor import clique +import clique class ExtractJSON(pyblish.api.ContextPlugin): diff --git a/pype/plugins/global/_publish_unused/extract_quicktime.py b/pype/plugins/global/_publish_unused/extract_quicktime.py index 6a33d825d0..76a920b798 100644 --- a/pype/plugins/global/_publish_unused/extract_quicktime.py +++ b/pype/plugins/global/_publish_unused/extract_quicktime.py @@ -1,7 +1,7 @@ import os import pyblish.api import subprocess -from pype.vendor import clique +import clique class ExtractQuicktimeEXR(pyblish.api.InstancePlugin): diff --git a/pype/plugins/global/publish/cleanup.py b/pype/plugins/global/publish/cleanup.py index 34123b31cf..6f761ca83c 100644 --- a/pype/plugins/global/publish/cleanup.py +++ b/pype/plugins/global/publish/cleanup.py @@ -40,6 +40,15 @@ class CleanUp(pyblish.api.InstancePlugin): active = True def process(self, instance): + # Get the errored instances + failed = [] + for result in instance.context.data["results"]: + if (result["error"] is not None and result["instance"] is not None + and result["instance"] not in failed): + failed.append(result["instance"]) + assert instance not in failed, ("Result of '{}' instance " + "were not success".format(instance.data["name"])) + if [ef for ef in self.exclude_families if instance.data["family"] in ef]: return diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 39481e216b..d0ff5722a3 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -100,6 +100,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): label = "RenderedFrames" def process(self, context): + pixel_aspect = 1 + lut_path = None if os.environ.get("PYPE_PUBLISH_PATHS"): paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep) self.log.info("Collecting paths: {}".format(paths)) @@ -144,6 +146,12 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): self.log.info("setting session using metadata") api.Session.update(session) os.environ.update(session) + instance = metadata.get("instance") + if instance: + instance_family = instance.get("family") + pixel_aspect = instance.get("pixelAspect", 1) + lut_path = instance.get("lutPath", None) + else: # Search in directory @@ -181,6 +189,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): families.append("ftrack") if "review" not in families: families.append("review") + if "write" in instance_family: + families.append("write") for collection in collections: instance = context.create_instance(str(collection)) @@ -197,6 +207,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): start = data.get("frameStart", indices[0]) end = data.get("frameEnd", indices[-1]) + self.log.debug("Collected pixel_aspect:\n" + "{}".format(pixel_aspect)) + self.log.debug("type pixel_aspect:\n" + "{}".format(type(pixel_aspect))) + # root = os.path.normpath(root) # self.log.info("Source: {}}".format(data.get("source", ""))) @@ -212,8 +227,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "frameStart": start, "frameEnd": end, "fps": fps, - "source": data.get('source', '') + "source": data.get('source', ''), + "pixelAspect": pixel_aspect, }) + if lut_path: + instance.data.update({"lutPath": lut_path}) instance.append(collection) instance.context.data['fps'] = fps diff --git a/pype/plugins/global/publish/collect_scene_version.py b/pype/plugins/global/publish/collect_scene_version.py index 0d76015909..2844a695e2 100644 --- a/pype/plugins/global/publish/collect_scene_version.py +++ b/pype/plugins/global/publish/collect_scene_version.py @@ -24,4 +24,4 @@ class CollectSceneVersion(pyblish.api.ContextPlugin): rootVersion = pype.get_version_from_path(filename) context.data['version'] = rootVersion - self.log.info('Scene Version: %s' % context.data('version')) + self.log.info('Scene Version: %s' % context.data.get('version')) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index b80ca4ae1b..9b0c03fdee 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -85,3 +85,6 @@ class CollectTemplates(pyblish.api.InstancePlugin): instance.data["assumedDestination"] = os.path.dirname( (anatomy.format(template_data))["publish"]["path"] ) + self.log.info("Assumed Destination has been created...") + self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"])) + self.log.debug("__ template: `{}`".format(instance.data["template"])) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index b1569aaa45..95a7144081 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -29,11 +29,16 @@ class ExtractBurnin(pype.api.Extractor): if instance.context.data.get('version'): version = "v" + str(instance.context.data['version']) + frame_start = int(instance.data.get("frameStart") or 0) + frame_end = int(instance.data.get("frameEnd") or 1) + duration = frame_end - frame_start + 1 prep_data = { "username": instance.context.data['user'], "asset": os.environ['AVALON_ASSET'], "task": os.environ['AVALON_TASK'], - "start_frame": int(instance.data["frameStart"]), + "frame_start": frame_start, + "frame_end": frame_end, + "duration": duration, "version": version } self.log.debug("__ prep_data: {}".format(prep_data)) @@ -49,12 +54,17 @@ class ExtractBurnin(pype.api.Extractor): name = "_burnin" movieFileBurnin = filename.replace(".mov", "") + name + ".mov" - full_movie_path = os.path.join(os.path.normpath(stagingdir), repre["files"]) - full_burnin_path = os.path.join(os.path.normpath(stagingdir), movieFileBurnin) + full_movie_path = os.path.join( + os.path.normpath(stagingdir), repre["files"] + ) + full_burnin_path = os.path.join( + os.path.normpath(stagingdir), movieFileBurnin + ) self.log.debug("__ full_burnin_path: {}".format(full_burnin_path)) burnin_data = { "input": full_movie_path.replace("\\", "/"), + "codec": repre.get("codec", []), "output": full_burnin_path.replace("\\", "/"), "burnin_data": prep_data } diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 10c339e0c6..8a1a0b5e68 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -1,7 +1,7 @@ import os import pyblish.api -from pype.vendor import clique +import clique import pype.api diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 1a7dcced78..f621df0c66 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -1,9 +1,8 @@ import os - +import math import pyblish.api -from pype.vendor import clique +import clique import pype.api -from pypeapp import config class ExtractReview(pyblish.api.InstancePlugin): @@ -22,16 +21,19 @@ class ExtractReview(pyblish.api.InstancePlugin): families = ["review"] hosts = ["nuke", "maya", "shell"] + outputs = {} + ext_filter = [] + def process(self, instance): - # adding plugin attributes from presets - publish_presets = config.get_presets()["plugins"]["global"]["publish"] - plugin_attrs = publish_presets[self.__class__.__name__] - output_profiles = plugin_attrs.get("outputs", {}) + + output_profiles = self.outputs or {} inst_data = instance.data fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") - + resolution_height = instance.data.get("resolutionHeight", 1080) + resolution_width = instance.data.get("resolutionWidth", 1920) + pixel_aspect = instance.data.get("pixelAspect", 1) self.log.debug("Families In: `{}`".format(instance.data["families"])) # get representation and loop them @@ -40,7 +42,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # filter out mov and img sequences representations_new = representations[:] for repre in representations: - if repre['ext'] in plugin_attrs["ext_filter"]: + if repre['ext'] in self.ext_filter: tags = repre.get("tags", []) self.log.info("Try repre: {}".format(repre)) @@ -92,8 +94,9 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.info("p_tags: `{}`".format(p_tags)) # add families [instance.data["families"].append(t) - for t in p_tags - if t not in instance.data["families"]] + for t in p_tags + if t not in instance.data["families"]] + # add to [new_tags.append(t) for t in p_tags if t not in new_tags] @@ -147,21 +150,83 @@ class ExtractReview(pyblish.api.InstancePlugin): ) output_args = [] + codec_args = profile.get('codec', []) + output_args.extend(codec_args) # preset's output data output_args.extend(profile.get('output', [])) # letter_box - # TODO: add to documentation - lb = profile.get('letter_box', None) - if lb: + lb = profile.get('letter_box', 0) + if lb is not 0: + if "reformat" not in p_tags: + lb /= pixel_aspect output_args.append( - "-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb)) + "-filter:v scale=1920x1080:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb)) # In case audio is longer than video. output_args.append("-shortest") # output filename output_args.append(full_output_path) + + self.log.debug("__ pixel_aspect: `{}`".format(pixel_aspect)) + self.log.debug("__ resolution_width: `{}`".format(resolution_width)) + self.log.debug("__ resolution_height: `{}`".format(resolution_height)) + # scaling none square pixels and 1920 width + if "reformat" in p_tags: + width_scale = 1920 + width_half_pad = 0 + res_w = int(float(resolution_width) * pixel_aspect) + height_half_pad = int(( + (res_w - 1920) / ( + res_w * .01) * ( + 1080 * .01)) / 2 + ) + height_scale = 1080 - (height_half_pad * 2) + if height_scale > 1080: + height_half_pad = 0 + height_scale = 1080 + width_half_pad = (1920 - (float(resolution_width) * (1080 / float(resolution_height))) ) / 2 + width_scale = int(1920 - (width_half_pad * 2)) + + self.log.debug("__ width_scale: `{}`".format(width_scale)) + self.log.debug("__ width_half_pad: `{}`".format(width_half_pad)) + self.log.debug("__ height_scale: `{}`".format(height_scale)) + self.log.debug("__ height_half_pad: `{}`".format(height_half_pad)) + + + scaling_arg = "scale={0}x{1}:flags=lanczos,pad=1920:1080:{2}:{3}:black,setsar=1".format( + width_scale, height_scale, width_half_pad, height_half_pad + ) + + vf_back = self.add_video_filter_args( + output_args, scaling_arg) + # add it to output_args + output_args.insert(0, vf_back) + + # baking lut file application + lut_path = instance.data.get("lutPath") + if lut_path and ("bake-lut" in p_tags): + # removing Gama info as it is all baked in lut + gamma = next((g for g in input_args + if "-gamma" in g), None) + if gamma: + input_args.remove(gamma) + + # create lut argument + lut_arg = "lut3d=file='{}'".format( + lut_path.replace( + "\\", "/").replace(":/", "\\:/") + ) + lut_arg += ",colormatrix=bt601:bt709" + + vf_back = self.add_video_filter_args( + output_args, lut_arg) + # add it to output_args + output_args.insert(0, vf_back) + self.log.info("Added Lut to ffmpeg command") + self.log.debug("_ output_args: `{}`".format(output_args)) + mov_args = [ os.path.join( os.environ.get( @@ -183,7 +248,8 @@ class ExtractReview(pyblish.api.InstancePlugin): 'ext': ext, 'files': repr_file, "tags": new_tags, - "outputName": name + "outputName": name, + "codec": codec_args }) if repre_new.get('preview'): repre_new.pop("preview") @@ -207,3 +273,40 @@ class ExtractReview(pyblish.api.InstancePlugin): instance.data["representations"] = representations_new self.log.debug("Families Out: `{}`".format(instance.data["families"])) + + def add_video_filter_args(self, args, inserting_arg): + """ + Fixing video filter argumets to be one long string + + Args: + args (list): list of string arguments + inserting_arg (str): string argument we want to add + (without flag `-vf`) + + Returns: + str: long joined argument to be added back to list of arguments + + """ + # find all video format settings + vf_settings = [p for p in args + for v in ["-filter:v", "-vf"] + if v in p] + self.log.debug("_ vf_settings: `{}`".format(vf_settings)) + + # remove them from output args list + for p in vf_settings: + self.log.debug("_ remove p: `{}`".format(p)) + args.remove(p) + self.log.debug("_ args: `{}`".format(args)) + + # strip them from all flags + vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "") + for p in vf_settings] + + self.log.debug("_ vf_fixed: `{}`".format(vf_fixed)) + vf_fixed.insert(0, inserting_arg) + self.log.debug("_ vf_fixed: `{}`".format(vf_fixed)) + # create new video filter setting + vf_back = "-vf " + ",".join(vf_fixed) + + return vf_back diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index ab3da29cac..faade613f2 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -1,18 +1,23 @@ import os from os.path import getsize import logging -import speedcopy +import sys import clique import errno import pyblish.api from avalon import api, io from avalon.vendor import filelink +# this is needed until speedcopy for linux is fixed +if sys.platform == "win32": + from speedcopy import copyfile +else: + from shutil import copyfile log = logging.getLogger(__name__) class IntegrateAssetNew(pyblish.api.InstancePlugin): - """Resolve any dependency issius + """Resolve any dependency issues This plug-in resolves any paths which, if not updated might break the published file. @@ -57,7 +62,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "render", "imagesequence", "review", - "render", "rendersetup", "rig", "plate", @@ -66,7 +70,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "audio", "yetiRig", "yeticache", - "source" + "nukenodes", + "gizmo", + "source", + "matchmove", + "image" ] exclude_families = ["clip"] @@ -406,7 +414,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): } if sequence_repre and repre.get("frameStart"): - representation['context']['frame'] = repre.get("frameStart") + representation['context']['frame'] = src_padding_exp % int(repre.get("frameStart")) self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) @@ -476,7 +484,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # copy file with speedcopy and check if size of files are simetrical while True: - speedcopy.copyfile(src, dst) + copyfile(src, dst) if str(getsize(src)) in str(getsize(dst)): break @@ -494,7 +502,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): filelink.create(src, dst, filelink.HARDLINK) def get_subset(self, asset, instance): - subset = io.find_one({"type": "subset", "parent": asset["_id"], "name": instance.data["subset"]}) @@ -503,7 +510,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset_name = instance.data["subset"] self.log.info("Subset '%s' not found, creating.." % subset_name) self.log.debug("families. %s" % instance.data.get('families')) - self.log.debug("families. %s" % type(instance.data.get('families'))) + self.log.debug( + "families. %s" % type(instance.data.get('families'))) _id = io.insert_one({ "schema": "pype:subset-3.0", @@ -517,6 +525,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset = io.find_one({"_id": _id}) + # add group if available + if instance.data.get("subsetGroup"): + io.update_many({ + 'type': 'subset', + '_id': io.ObjectId(subset["_id"]) + }, {'$set': {'data.subsetGroup': + instance.data.get('subsetGroup')}} + ) + return subset def create_version(self, subset, version_number, locations, data=None): diff --git a/pype/plugins/global/publish/validate_containers.py b/pype/plugins/global/publish/validate_containers.py new file mode 100644 index 0000000000..44cb5def3c --- /dev/null +++ b/pype/plugins/global/publish/validate_containers.py @@ -0,0 +1,28 @@ +import pyblish.api + +import pype.lib +from avalon.tools import cbsceneinventory + + +class ShowInventory(pyblish.api.Action): + + label = "Show Inventory" + icon = "briefcase" + on = "failed" + + def process(self, context, plugin): + cbsceneinventory.show() + + +class ValidateContainers(pyblish.api.ContextPlugin): + """Containers are must be updated to latest version on publish.""" + + label = "Validate Containers" + order = pyblish.api.ValidatorOrder + hosts = ["maya", "houdini", "nuke"] + optional = True + actions = [ShowInventory] + + def process(self, context): + if pype.lib.any_outdated(): + raise ValueError("There are outdated containers in the scene.") diff --git a/pype/plugins/global/publish/validate_ffmpeg_installed.py b/pype/plugins/global/publish/validate_ffmpeg_installed.py index 6d5ffba1e1..df7c330e95 100644 --- a/pype/plugins/global/publish/validate_ffmpeg_installed.py +++ b/pype/plugins/global/publish/validate_ffmpeg_installed.py @@ -27,6 +27,8 @@ class ValidateFfmpegInstallef(pyblish.api.Validator): return True def process(self, instance): + self.log.info("ffmpeg path: `{}`".format( + os.environ.get("FFMPEG_PATH", ""))) if self.is_tool( os.path.join( os.environ.get("FFMPEG_PATH", ""), "ffmpeg")) is False: diff --git a/pype/plugins/global/publish/validate_templates.py b/pype/plugins/global/publish/validate_templates.py index a2c7b24ac1..f24f6b1a2e 100644 --- a/pype/plugins/global/publish/validate_templates.py +++ b/pype/plugins/global/publish/validate_templates.py @@ -1,8 +1,9 @@ import pyblish.api import os + class ValidateTemplates(pyblish.api.ContextPlugin): - """Check if all templates were filed""" + """Check if all templates were filled""" label = "Validate Templates" order = pyblish.api.ValidatorOrder - 0.1 @@ -18,12 +19,12 @@ class ValidateTemplates(pyblish.api.ContextPlugin): "root": os.environ["PYPE_STUDIO_PROJECTS_PATH"], "project": {"name": "D001_projectsx", "code": "prjX"}, - "ext": "exr", - "version": 3, - "task": "animation", - "asset": "sh001", - "hierarchy": "ep101/sq01/sh010"} - + "ext": "exr", + "version": 3, + "task": "animation", + "asset": "sh001", + "app": "maya", + "hierarchy": "ep101/sq01/sh010"} anatomy_filled = anatomy.format(data) self.log.info(anatomy_filled) @@ -31,11 +32,12 @@ class ValidateTemplates(pyblish.api.ContextPlugin): data = {"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"], "project": {"name": "D001_projectsy", "code": "prjY"}, - "ext": "abc", - "version": 1, - "task": "lookdev", - "asset": "bob", - "hierarchy": "ep101/sq01/bob"} + "ext": "abc", + "version": 1, + "task": "lookdev", + "asset": "bob", + "app": "maya", + "hierarchy": "ep101/sq01/bob"} anatomy_filled = context.data["anatomy"].format(data) self.log.info(anatomy_filled["work"]["folder"]) diff --git a/pype/plugins/launcher/actions/Aport.py b/pype/plugins/launcher/actions/Aport.py index 94f14cd0d3..0ecd07c49a 100644 --- a/pype/plugins/launcher/actions/Aport.py +++ b/pype/plugins/launcher/actions/Aport.py @@ -1,7 +1,4 @@ import os -import sys -from avalon import io -from pprint import pprint import acre from avalon import api, lib diff --git a/pype/plugins/launcher/actions/unused/PremierePro.py b/pype/plugins/launcher/actions/unused/PremierePro.py index 97d693ffbb..57aa4eb2cb 100644 --- a/pype/plugins/launcher/actions/unused/PremierePro.py +++ b/pype/plugins/launcher/actions/unused/PremierePro.py @@ -1,10 +1,9 @@ import os -import sys -from pprint import pprint import acre from avalon import api, lib, io import pype.api as pype +from pypeapp import Anatomy class PremierePro(api.Action): diff --git a/pype/plugins/maya/create/create_ass.py b/pype/plugins/maya/create/create_ass.py index 84b42e9b20..6d8eda1a40 100644 --- a/pype/plugins/maya/create/create_ass.py +++ b/pype/plugins/maya/create/create_ass.py @@ -1,6 +1,7 @@ from collections import OrderedDict import avalon.maya +from pype.maya import lib from maya import cmds @@ -14,10 +15,21 @@ class CreateAss(avalon.maya.Creator): icon = "cube" defaults = ['Main'] + def __init__(self, *args, **kwargs): + super(CreateAss, self).__init__(*args, **kwargs) + + # Add animation data + self.data.update(lib.collect_animation_data()) + + # Vertex colors with the geometry + self.data["exportSequence"] = False + def process(self): instance = super(CreateAss, self).process() - data = OrderedDict(**self.data) + # data = OrderedDict(**self.data) + + nodes = list() @@ -30,4 +42,6 @@ class CreateAss(avalon.maya.Creator): assProxy = cmds.sets(name="proxy_SET", empty=True) cmds.sets([assContent, assProxy], forceElement=instance) - self.data = data + # self.log.info(data) + # + # self.data = data diff --git a/pype/plugins/maya/create/create_look.py b/pype/plugins/maya/create/create_look.py index 299fbafe02..49fd376db0 100644 --- a/pype/plugins/maya/create/create_look.py +++ b/pype/plugins/maya/create/create_look.py @@ -18,3 +18,6 @@ class CreateLook(avalon.maya.Creator): # Whether to automatically convert the textures to .tx upon publish. self.data["maketx"] = True + + # Enable users to force a copy. + self.data["forceCopy"] = False diff --git a/pype/plugins/maya/create/create_renderglobals.py b/pype/plugins/maya/create/create_renderglobals.py index ac6048a3e7..7c71bfbc36 100644 --- a/pype/plugins/maya/create/create_renderglobals.py +++ b/pype/plugins/maya/create/create_renderglobals.py @@ -38,7 +38,7 @@ class CreateRenderGlobals(avalon.maya.Creator): self.log.warning("Deadline REST API url not found.") else: argument = "{}/api/pools?NamesOnly=true".format(deadline_url) - response = requests.get(argument) + response = self._requests_get(argument) if not response.ok: self.log.warning("No pools retrieved") else: @@ -135,7 +135,7 @@ class CreateRenderGlobals(avalon.maya.Creator): 'authToken': self._token } api_entry = '/api/pools/list' - response = requests.get( + response = self._requests_get( self.MUSTER_REST_URL + api_entry, params=params) if response.status_code != 200: if response.status_code == 401: diff --git a/pype/plugins/maya/load/load_ass.py b/pype/plugins/maya/load/load_ass.py index 2960e4403e..83dd80bd4e 100644 --- a/pype/plugins/maya/load/load_ass.py +++ b/pype/plugins/maya/load/load_ass.py @@ -2,6 +2,7 @@ from avalon import api import pype.maya.plugin import os from pypeapp import config +import clique class AssProxyLoader(pype.maya.plugin.ReferenceLoader): @@ -21,6 +22,13 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader): from avalon import maya import pymel.core as pm + version = context['version'] + version_data = version.get("data", {}) + + self.log.info("version_data: {}\n".format(version_data)) + + frameStart = version_data.get("frameStart", None) + try: family = context["representation"]["context"]["family"] except ValueError: @@ -30,7 +38,24 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader): groupName = "{}:{}".format(namespace, name) path = self.fname - proxyPath = os.path.splitext(path)[0] + ".ma" + proxyPath_base = os.path.splitext(path)[0] + + if frameStart is not None: + proxyPath_base = os.path.splitext(proxyPath_base)[0] + + publish_folder = os.path.split(path)[0] + files_in_folder = os.listdir(publish_folder) + collections, remainder = clique.assemble(files_in_folder) + + if collections: + hashes = collections[0].padding * '#' + coll = collections[0].format('{head}[index]{tail}') + filename = coll.replace('[index]', hashes) + + path = os.path.join(publish_folder, filename) + + proxyPath = proxyPath_base + ".ma" + self.log.info nodes = cmds.file(proxyPath, namespace=namespace, @@ -147,6 +172,13 @@ class AssStandinLoader(api.Loader): import mtoa.ui.arnoldmenu import pymel.core as pm + version = context['version'] + version_data = version.get("data", {}) + + self.log.info("version_data: {}\n".format(version_data)) + + frameStart = version_data.get("frameStart", None) + asset = context['asset']['name'] namespace = namespace or lib.unique_namespace( asset + "_", @@ -182,6 +214,8 @@ class AssStandinLoader(api.Loader): # Set the standin filepath standinShape.dso.set(self.fname) + if frameStart is not None: + standinShape.useFrameExtension.set(1) nodes = [root, standin] self[:] = nodes @@ -199,14 +233,23 @@ class AssStandinLoader(api.Loader): path = api.get_representation_path(representation) - # Update the standin - members = pm.sets(container['objectName'], query=True) - standins = pm.ls(members, type="AiStandIn", long=True) + files_in_path = os.listdir(os.path.split(path)[0]) + sequence = 0 + collections, remainder = clique.assemble(files_in_path) + if collections: + sequence = 1 - assert len(caches) == 1, "This is a bug" + # Update the standin + standins = list() + members = pm.sets(container['objectName'], query=True) + for member in members: + shape = member.getShape() + if (shape and shape.type() == "aiStandIn"): + standins.append(shape) for standin in standins: - standin.cacheFileName.set(path) + standin.dso.set(path) + standin.useFrameExtension.set(sequence) container = pm.PyNode(container["objectName"]) container.representation.set(str(representation["_id"])) diff --git a/pype/plugins/maya/load/load_matchmove.py b/pype/plugins/maya/load/load_matchmove.py new file mode 100644 index 0000000000..abc702cde8 --- /dev/null +++ b/pype/plugins/maya/load/load_matchmove.py @@ -0,0 +1,30 @@ +from avalon import api +from maya import mel + + +class MatchmoveLoader(api.Loader): + """ + This will run matchmove script to create track in scene. + + Supported script types are .py and .mel + """ + + families = ["matchmove"] + representations = ["py", "mel"] + defaults = ["Camera", "Object", "Mocap"] + + label = "Run matchmove script" + icon = "empire" + color = "orange" + + def load(self, context, name, namespace, data): + if self.fname.lower().endswith(".py"): + exec(open(self.fname).read()) + + elif self.fname.lower().endswith(".mel"): + mel.eval('source "{}"'.format(self.fname)) + + else: + self.log.error("Unsupported script type") + + return True diff --git a/pype/plugins/maya/load/load_reference.py b/pype/plugins/maya/load/load_reference.py index 55db019cf4..376fcc2c01 100644 --- a/pype/plugins/maya/load/load_reference.py +++ b/pype/plugins/maya/load/load_reference.py @@ -1,9 +1,7 @@ import pype.maya.plugin import os from pypeapp import config -reload(config) -import pype.maya.plugin -reload(pype.maya.plugin) + class ReferenceLoader(pype.maya.plugin.ReferenceLoader): """Load the model""" @@ -22,7 +20,6 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader): from avalon import maya import pymel.core as pm - try: family = context["representation"]["context"]["family"] except ValueError: diff --git a/pype/plugins/maya/publish/collect_look.py b/pype/plugins/maya/publish/collect_look.py index 618f2749a4..7a5fea776c 100644 --- a/pype/plugins/maya/publish/collect_look.py +++ b/pype/plugins/maya/publish/collect_look.py @@ -219,10 +219,6 @@ class CollectLook(pyblish.api.InstancePlugin): with lib.renderlayer(instance.data["renderlayer"]): self.collect(instance) - # make ftrack publishable - self.maketx = instance.data.get('maketx', True) - instance.data['maketx'] = self.maketx - self.log.info('maketx: {}'.format(self.maketx)) def collect(self, instance): @@ -297,9 +293,11 @@ class CollectLook(pyblish.api.InstancePlugin): self.log.info("Collected file nodes:\n{}".format(files)) # Collect textures if any file nodes are found - instance.data["resources"] = [self.collect_resource(n) - for n in files] - self.log.info("Collected resources:\n{}".format(instance.data["resources"])) + instance.data["resources"] = [] + for n in files: + instance.data["resources"].append(self.collect_resource(n)) + + self.log.info("Collected resources: {}".format(instance.data["resources"])) # Log a warning when no relevant sets were retrieved for the look. if not instance.data["lookData"]["relationships"]: @@ -423,7 +421,7 @@ class CollectLook(pyblish.api.InstancePlugin): self.log.debug("processing: {}".format(node)) if cmds.nodeType(node) == 'file': - self.log.debug("file node") + self.log.debug(" - file node") attribute = "{}.fileTextureName".format(node) computed_attribute = "{}.computedFileTextureNamePattern".format(node) elif cmds.nodeType(node) == 'aiImage': @@ -431,7 +429,7 @@ class CollectLook(pyblish.api.InstancePlugin): attribute = "{}.filename".format(node) computed_attribute = attribute source = cmds.getAttr(attribute) - + self.log.info(" - file source: {}".format(source)) color_space_attr = "{}.colorSpace".format(node) color_space = cmds.getAttr(color_space_attr) # Compare with the computed file path, e.g. the one with the @@ -455,6 +453,13 @@ class CollectLook(pyblish.api.InstancePlugin): if len(files) == 0: self.log.error("No valid files found from node `%s`" % node) + self.log.info("collection of resource done:") + self.log.info(" - node: {}".format(node)) + self.log.info(" - attribute: {}".format(attribute)) + self.log.info(" - source: {}".format(source)) + self.log.info(" - file: {}".format(files)) + self.log.info(" - color space: {}".format(color_space)) + # Define the resource return {"node": node, "attribute": attribute, diff --git a/pype/plugins/maya/publish/extract_ass.py b/pype/plugins/maya/publish/extract_ass.py index 1fed6c8dd7..71f3e0d84c 100644 --- a/pype/plugins/maya/publish/extract_ass.py +++ b/pype/plugins/maya/publish/extract_ass.py @@ -20,8 +20,11 @@ class ExtractAssStandin(pype.api.Extractor): def process(self, instance): + sequence = instance.data.get("exportSequence", False) + staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) + filenames = list() file_path = os.path.join(staging_dir, filename) # Write out .ass file @@ -29,13 +32,47 @@ class ExtractAssStandin(pype.api.Extractor): with avalon.maya.maintained_selection(): self.log.info("Writing: {}".format(instance.data["setMembers"])) cmds.select(instance.data["setMembers"], noExpand=True) - cmds.arnoldExportAss( filename=file_path, - selected=True, - asciiAss=True, - shadowLinks=True, - lightLinks=True, - boundingBox=True - ) + + if sequence: + self.log.info("Extracting ass sequence") + + # Collect the start and end including handles + start = instance.data.get("frameStart", 1) + end = instance.data.get("frameEnd", 1) + handles = instance.data.get("handles", 0) + step = instance.data.get("step", 0) + if handles: + start -= handles + end += handles + + exported_files = cmds.arnoldExportAss(filename=file_path, + selected=True, + asciiAss=True, + shadowLinks=True, + lightLinks=True, + boundingBox=True, + startFrame=start, + endFrame=end, + frameStep=step + ) + for file in exported_files: + filenames.append(os.path.split(file)[1]) + self.log.info("Exported: {}".format(filenames)) + else: + cmds.arnoldExportAss(filename=file_path, + selected=True, + asciiAss=True, + shadowLinks=True, + lightLinks=True, + boundingBox=True + ) + filenames = filename + optionals = [ + "frameStart", "frameEnd", "step", "handles", + "handleEnd", "handleStart" + ] + for key in optionals: + instance.data.pop(key, None) if "representations" not in instance.data: instance.data["representations"] = [] @@ -43,9 +80,13 @@ class ExtractAssStandin(pype.api.Extractor): representation = { 'name': 'ass', 'ext': 'ass', - 'files': filename, + 'files': filenames, "stagingDir": staging_dir } + + if sequence: + representation['frameStart'] = start + instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" diff --git a/pype/plugins/maya/publish/extract_assproxy.py b/pype/plugins/maya/publish/extract_assproxy.py index 34c3113e11..59684febe1 100644 --- a/pype/plugins/maya/publish/extract_assproxy.py +++ b/pype/plugins/maya/publish/extract_assproxy.py @@ -43,8 +43,13 @@ class ExtractAssProxy(pype.api.Extractor): # Get only the shape contents we need in such a way that we avoid # taking along intermediateObjects - members = instance.data['proxy'] - members = cmds.ls(members, + proxy = instance.data.get('proxy', None) + + if not proxy: + self.log.info("no proxy mesh") + return + + members = cmds.ls(proxy, dag=True, transforms=True, noIntermediate=True) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index d23dfbcf3e..5226f80f7a 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -38,11 +38,7 @@ def source_hash(filepath, *args): file_name = os.path.basename(filepath) time = str(os.path.getmtime(filepath)) size = str(os.path.getsize(filepath)) - return "|".join([ - file_name, - time, - size - ] + list(args)).replace(".", ",") + return "|".join([file_name, time, size] + list(args)).replace(".", ",") def find_paths_by_hash(texture_hash): @@ -64,36 +60,33 @@ def maketx(source, destination, *args): """ cmd = [ - "maketx", - "-v", # verbose - "-u", # update mode - # unpremultiply before conversion (recommended when alpha present) - "--unpremult", - "--checknan", - # use oiio-optimized settings for tile-size, planarconfig, metadata - "--oiio", - "--filter lanczos3" - ] + "maketx", + "-v", # verbose + "-u", # update mode + # unpremultiply before conversion (recommended when alpha present) + "--unpremult", + "--checknan", + # use oiio-optimized settings for tile-size, planarconfig, metadata + "--oiio", + "--filter lanczos3", + ] cmd.extend(args) - cmd.extend([ - "-o", destination, - source - ]) + cmd.extend(["-o", destination, source]) + + cmd = " ".join(cmd) CREATE_NO_WINDOW = 0x08000000 - kwargs = dict( - args=cmd, - stderr=subprocess.STDOUT - ) + kwargs = dict(args=cmd, stderr=subprocess.STDOUT) if sys.platform == "win32": - kwargs["creationflags"] = CREATE_NO_WIDOW + kwargs["creationflags"] = CREATE_NO_WINDOW try: out = subprocess.check_output(**kwargs) except subprocess.CalledProcessError as exc: print(exc) import traceback + traceback.print_exc() raise @@ -180,36 +173,51 @@ class ExtractLook(pype.api.Extractor): # Preserve color space values (force value after filepath change) # This will also trigger in the same order at end of context to # ensure after context it's still the original value. - color_space = resource.get('color_space') + color_space = resource.get("color_space") for f in resource["files"]: - files_metadata[os.path.normpath(f)] = {'color_space': color_space} + files_metadata[os.path.normpath(f)] = { + "color_space": color_space} # files.update(os.path.normpath(f)) # Process the resource files transfers = list() hardlinks = list() hashes = dict() + forceCopy = instance.data.get("forceCopy", False) self.log.info(files) for filepath in files_metadata: - cspace = files_metadata[filepath]['color_space'] + cspace = files_metadata[filepath]["color_space"] linearise = False - if cspace == 'sRGB': + if cspace == "sRGB": linearise = True + # set its file node to 'raw' as tx will be linearized + files_metadata[filepath]["color_space"] = "raw" source, mode, hash = self._process_texture( - filepath, do_maketx, staging=dir_path, linearise=linearise - ) - destination = self.resource_destination( - instance, source, do_maketx + filepath, + do_maketx, + staging=dir_path, + linearise=linearise, + force=forceCopy ) + destination = self.resource_destination(instance, + source, + do_maketx) + + # Force copy is specified. + if forceCopy: + mode = COPY + if mode == COPY: transfers.append((source, destination)) + self.log.info('copying') elif mode == HARDLINK: hardlinks.append((source, destination)) + self.log.info('hardlinking') # Store the hashes from hash to destination to include in the # database @@ -230,13 +238,14 @@ class ExtractLook(pype.api.Extractor): # Preserve color space values (force value after filepath change) # This will also trigger in the same order at end of context to # ensure after context it's still the original value. - color_space_attr = resource['node'] + ".colorSpace" + color_space_attr = resource["node"] + ".colorSpace" color_space = cmds.getAttr(color_space_attr) - + if files_metadata[source]["color_space"] == "raw": + # set colorpsace to raw if we linearized it + color_space = "Raw" # Remap file node filename to destination - attr = resource['attribute'] + attr = resource["attribute"] remap[attr] = destinations[source] - remap[color_space_attr] = color_space self.log.info("Finished remapping destinations ...") @@ -263,13 +272,15 @@ class ExtractLook(pype.api.Extractor): channels=True, constraints=True, expressions=True, - constructionHistory=True + constructionHistory=True, ) # Write the JSON data self.log.info("Extract json..") - data = {"attributes": lookdata["attributes"], - "relationships": relationships} + data = { + "attributes": lookdata["attributes"], + "relationships": relationships + } with open(json_path, "w") as f: json.dump(data, f) @@ -288,7 +299,7 @@ class ExtractLook(pype.api.Extractor): instance.data["representations"].append( { "name": "ma", - "ext": 'ma', + "ext": "ma", "files": os.path.basename(maya_fname), "stagingDir": os.path.dirname(maya_fname), } @@ -296,7 +307,7 @@ class ExtractLook(pype.api.Extractor): instance.data["representations"].append( { "name": "json", - "ext": 'json', + "ext": "json", "files": os.path.basename(json_fname), "stagingDir": os.path.dirname(json_fname), } @@ -309,13 +320,18 @@ class ExtractLook(pype.api.Extractor): # Source hash for the textures instance.data["sourceHashes"] = hashes - self.log.info("Extracted instance '%s' to: %s" % ( - instance.name, maya_path) - ) + """ + self.log.info("Returning colorspaces to their original values ...") + for attr, value in remap.items(): + self.log.info(" - {}: {}".format(attr, value)) + cmds.setAttr(attr, value, type="string") + """ + self.log.info("Extracted instance '%s' to: %s" % (instance.name, + maya_path)) def resource_destination(self, instance, filepath, do_maketx): - anatomy = instance.context.data['anatomy'] + anatomy = instance.context.data["anatomy"] self.create_destination_template(instance, anatomy) @@ -327,12 +343,10 @@ class ExtractLook(pype.api.Extractor): ext = ".tx" return os.path.join( - instance.data["assumedDestination"], - "resources", - basename + ext + instance.data["assumedDestination"], "resources", basename + ext ) - def _process_texture(self, filepath, do_maketx, staging, linearise): + def _process_texture(self, filepath, do_maketx, staging, linearise, force): """Process a single texture file on disk for publishing. This will: 1. Check whether it's already published, if so it will do hardlink @@ -354,24 +368,20 @@ class ExtractLook(pype.api.Extractor): # If source has been published before with the same settings, # then don't reprocess but hardlink from the original existing = find_paths_by_hash(texture_hash) - if existing: + if existing and not force: self.log.info("Found hash in database, preparing hardlink..") source = next((p for p in existing if os.path.exists(p)), None) if filepath: return source, HARDLINK, texture_hash else: self.log.warning( - "Paths not found on disk, " - "skipping hardlink: %s" % (existing,) + ("Paths not found on disk, " + "skipping hardlink: %s") % (existing,) ) if do_maketx and ext != ".tx": # Produce .tx file in staging if source file is not .tx - converted = os.path.join( - staging, - "resources", - fname + ".tx" - ) + converted = os.path.join(staging, "resources", fname + ".tx") if linearise: self.log.info("tx: converting sRGB -> linear") @@ -384,9 +394,15 @@ class ExtractLook(pype.api.Extractor): os.makedirs(os.path.dirname(converted)) self.log.info("Generating .tx file for %s .." % filepath) - maketx(filepath, converted, - # Include `source-hash` as string metadata - "-sattrib", "sourceHash", texture_hash, colorconvert) + maketx( + filepath, + converted, + # Include `source-hash` as string metadata + "-sattrib", + "sourceHash", + texture_hash, + colorconvert, + ) return converted, COPY, texture_hash @@ -412,58 +428,62 @@ class ExtractLook(pype.api.Extractor): project_name = api.Session["AVALON_PROJECT"] a_template = anatomy.templates - project = io.find_one({"type": "project", - "name": project_name}, - projection={"config": True, "data": True}) + project = io.find_one( + {"type": "project", "name": project_name}, + projection={"config": True, "data": True}, + ) - template = a_template['publish']['path'] + template = a_template["publish"]["path"] # anatomy = instance.context.data['anatomy'] - asset = io.find_one({"type": "asset", - "name": asset_name, - "parent": project["_id"]}) + asset = io.find_one( + {"type": "asset", "name": asset_name, "parent": project["_id"]} + ) assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset.get('silo') + "in project '{}'").format(asset_name, project_name) + silo = asset.get("silo") - subset = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset["_id"]}) + subset = io.find_one( + {"type": "subset", "name": subset_name, "parent": asset["_id"]} + ) # assume there is no version yet, we start at `1` version = None version_number = 1 if subset is not None: - version = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[("name", -1)]) + version = io.find_one( + {"type": "version", + "parent": subset["_id"] + }, sort=[("name", -1)] + ) # if there is a subset there ought to be version if version is not None: version_number += version["name"] - if instance.data.get('version'): - version_number = int(instance.data.get('version')) + if instance.data.get("version"): + version_number = int(instance.data.get("version")) - padding = int(a_template['render']['padding']) + padding = int(a_template["render"]["padding"]) - hierarchy = asset['data']['parents'] + hierarchy = asset["data"]["parents"] if hierarchy: # hierarchy = os.path.sep.join(hierarchy) hierarchy = "/".join(hierarchy) - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "frame": ('#' * padding), - "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP"} + template_data = { + "root": api.Session["AVALON_PROJECTS"], + "project": {"name": project_name, "code": project["data"]["code"]}, + "silo": silo, + "family": instance.data["family"], + "asset": asset_name, + "subset": subset_name, + "frame": ("#" * padding), + "version": version_number, + "hierarchy": hierarchy, + "representation": "TEMP", + } instance.data["assumedTemplateData"] = template_data self.log.info(template_data) diff --git a/pype/plugins/maya/publish/extract_quicktime.py b/pype/plugins/maya/publish/extract_quicktime.py index 3d8c1dda9b..1031955260 100644 --- a/pype/plugins/maya/publish/extract_quicktime.py +++ b/pype/plugins/maya/publish/extract_quicktime.py @@ -1,19 +1,16 @@ import os -import subprocess import contextlib -import json import capture_gui import clique - +# import pype.maya.lib as lib import pype.api -import avalon.maya - +# from maya import cmds, mel import pymel.core as pm -from pype.vendor import ffmpeg -# from pype.scripts import otio_burnin -reload(ffmpeg) +# import ffmpeg +# # from pype.scripts import otio_burnin +# reload(ffmpeg) # TODO: move codec settings to presets diff --git a/pype/plugins/maya/publish/extract_thumbnail.py b/pype/plugins/maya/publish/extract_thumbnail.py index e47915c4cf..dc8044cf19 100644 --- a/pype/plugins/maya/publish/extract_thumbnail.py +++ b/pype/plugins/maya/publish/extract_thumbnail.py @@ -11,8 +11,8 @@ import pype.api from maya import cmds import pymel.core as pm -from pype.vendor import ffmpeg -reload(ffmpeg) +# import ffmpeg +# reload(ffmpeg) import avalon.maya diff --git a/pype/plugins/maya/publish/submit_maya_deadline.py b/pype/plugins/maya/publish/submit_maya_deadline.py index 5bb50bd85e..55c04e9c41 100644 --- a/pype/plugins/maya/publish/submit_maya_deadline.py +++ b/pype/plugins/maya/publish/submit_maya_deadline.py @@ -271,20 +271,21 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): for key in environment: clean_path = "" self.log.debug("key: {}".format(key)) - to_process = environment[key] + self.log.debug("value: {}".format(environment[key])) + to_process = str(environment[key]) if key == "PYPE_STUDIO_CORE_MOUNT": - clean_path = environment[key] - elif "://" in environment[key]: - clean_path = environment[key] - elif os.pathsep not in to_process: + clean_path = to_process + elif "://" in to_process: + clean_path = to_process + elif os.pathsep not in str(to_process): try: - path = environment[key] + path = to_process path.decode('UTF-8', 'strict') clean_path = os.path.normpath(path) except UnicodeDecodeError: print('path contains non UTF characters') else: - for path in environment[key].split(os.pathsep): + for path in to_process.split(os.pathsep): try: path.decode('UTF-8', 'strict') clean_path += os.path.normpath(path) + os.pathsep diff --git a/pype/plugins/maya/publish/validate_assembly_name.py b/pype/plugins/maya/publish/validate_assembly_name.py new file mode 100644 index 0000000000..2a3a92e950 --- /dev/null +++ b/pype/plugins/maya/publish/validate_assembly_name.py @@ -0,0 +1,50 @@ +import pyblish.api +import maya.cmds as cmds +import pype.maya.action + + +class ValidateAssemblyName(pyblish.api.InstancePlugin): + """ Ensure Assembly name ends with `GRP` + + Check if assembly name ends with `_GRP` string. + """ + + label = "Validate Assembly Name" + order = pyblish.api.ValidatorOrder + families = ["assembly"] + actions = [pype.maya.action.SelectInvalidAction] + active = False + + @classmethod + def get_invalid(cls, instance): + cls.log.info("Checking name of {}".format(instance.name)) + + content_instance = instance.data.get("setMembers", None) + if not content_instance: + cls.log.error("Instance has no nodes!") + return True + + # All children will be included in the extracted export so we also + # validate *all* descendents of the set members and we skip any + # intermediate shapes + descendants = cmds.listRelatives(content_instance, + allDescendents=True, + fullPath=True) or [] + descendants = cmds.ls(descendants, noIntermediate=True, long=True) + content_instance = list(set(content_instance + descendants)) + assemblies = cmds.ls(content_instance, assemblies=True, long=True) + + invalid = [] + for cr in assemblies: + if not cr.endswith('_GRP'): + cls.log.error("{} doesn't end with _GRP".format(cr)) + invalid.append(cr) + + return invalid + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError("Found {} invalid named assembly " + "items".format(len(invalid))) diff --git a/pype/plugins/maya/publish/validate_model_name.py b/pype/plugins/maya/publish/validate_model_name.py new file mode 100644 index 0000000000..89c629c5a4 --- /dev/null +++ b/pype/plugins/maya/publish/validate_model_name.py @@ -0,0 +1,98 @@ +from maya import cmds +import pyblish.api +import pype.api +import pype.maya.action +import re + + +class ValidateModelName(pyblish.api.InstancePlugin): + """Validate name of model + + starts with (somename)_###_(materialID)_GEO + materialID must be present in list + padding number doesn't have limit + + """ + optional = True + order = pype.api.ValidateContentsOrder + hosts = ["maya"] + families = ["model"] + label = "Model Name" + actions = [pype.maya.action.SelectInvalidAction] + # path to shader names definitions + # TODO: move it to preset file + material_file = None + active = False + regex = '(.*)_(\\d)*_(.*)_(GEO)' + + @classmethod + def get_invalid(cls, instance): + + # find out if supplied transform is group or not + def is_group(groupName): + try: + children = cmds.listRelatives(groupName, children=True) + for child in children: + if not cmds.ls(child, transforms=True): + return False + return True + except: + return False + + invalid = [] + content_instance = instance.data.get("setMembers", None) + if not content_instance: + cls.log.error("Instance has no nodes!") + return True + pass + descendants = cmds.listRelatives(content_instance, + allDescendents=True, + fullPath=True) or [] + + descendants = cmds.ls(descendants, noIntermediate=True, long=True) + trns = cmds.ls(descendants, long=False, type=('transform')) + + # filter out groups + filter = [node for node in trns if not is_group(node)] + + # load shader list file as utf-8 + if cls.material_file: + shader_file = open(cls.material_file, "r") + shaders = shader_file.readlines() + shader_file.close() + + # strip line endings from list + shaders = map(lambda s: s.rstrip(), shaders) + + # compile regex for testing names + r = re.compile(cls.regex) + + for obj in filter: + m = r.match(obj) + if m is None: + cls.log.error("invalid name on: {}".format(obj)) + invalid.append(obj) + else: + # if we have shader files and shader named group is in + # regex, test this group against names in shader file + if 'shader' in r.groupindex and shaders: + try: + if not m.group('shader') in shaders: + cls.log.error( + "invalid materialID on: {0} ({1})".format( + obj, m.group('shader'))) + invalid.append(obj) + except IndexError: + # shader named group doesn't match + cls.log.error( + "shader group doesn't match: {}".format(obj)) + invalid.append(obj) + + return invalid + + def process(self, instance): + + invalid = self.get_invalid(instance) + + if invalid: + raise RuntimeError("Model naming is invalid. See log.") diff --git a/pype/plugins/maya/publish/validate_node_ids_in_database.py b/pype/plugins/maya/publish/validate_node_ids_in_database.py index 7347ce2ab2..fdcf0b20b0 100644 --- a/pype/plugins/maya/publish/validate_node_ids_in_database.py +++ b/pype/plugins/maya/publish/validate_node_ids_in_database.py @@ -1,6 +1,6 @@ import pyblish.api -import avalon.io as io +from avalon import io import pype.api import pype.maya.action diff --git a/pype/plugins/maya/publish/validate_node_ids_related.py b/pype/plugins/maya/publish/validate_node_ids_related.py index 4a154d0b71..4872f438d4 100644 --- a/pype/plugins/maya/publish/validate_node_ids_related.py +++ b/pype/plugins/maya/publish/validate_node_ids_related.py @@ -1,7 +1,7 @@ import pyblish.api import pype.api -import avalon.io as io +from avalon import io import pype.maya.action from pype.maya import lib diff --git a/pype/plugins/maya/publish/validate_shader_name.py b/pype/plugins/maya/publish/validate_shader_name.py new file mode 100644 index 0000000000..c6f72a2940 --- /dev/null +++ b/pype/plugins/maya/publish/validate_shader_name.py @@ -0,0 +1,78 @@ +from maya import cmds + +import pyblish.api +import pype.api +import pype.maya.action +import re + + +class ValidateShaderName(pyblish.api.InstancePlugin): + """Validate shader name assigned. + + It should be _<*>_SHD + + """ + optional = True + active = False + order = pype.api.ValidateContentsOrder + families = ["look"] + hosts = ['maya'] + label = 'Validate Shaders Name' + actions = [pype.maya.action.SelectInvalidAction] + regex = r'(?P.*)_(.*)_SHD' + + # The default connections to check + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError("Found shapes with invalid shader names " + "assigned: " + "\n{}".format(invalid)) + + @classmethod + def get_invalid(cls, instance): + + invalid = [] + + # Get all shapes from the instance + content_instance = instance.data.get("setMembers", None) + if not content_instance: + cls.log.error("Instance has no nodes!") + return True + pass + descendants = cmds.listRelatives(content_instance, + allDescendents=True, + fullPath=True) or [] + + descendants = cmds.ls(descendants, noIntermediate=True, long=True) + shapes = cmds.ls(descendants, type=["nurbsSurface", "mesh"], long=True) + asset_name = instance.data.get("asset", None) + + # Check the number of connected shadingEngines per shape + r = re.compile(cls.regex) + for shape in shapes: + shading_engines = cmds.listConnections(shape, + destination=True, + type="shadingEngine") or [] + shaders = cmds.ls( + cmds.listConnections(shading_engines), materials=1 + ) + + for shader in shaders: + m = r.match(cls.regex, shader) + if m is None: + invalid.append(shape) + cls.log.error( + "object {0} has invalid shader name {1}".format(shape, + shader) + ) + else: + if 'asset' in r.groupindex: + if m.group('asset') != asset_name: + invalid.append(shape) + cls.log.error(("object {0} has invalid " + "shader name {1}").format(shape, + shader)) + + return invalid diff --git a/pype/plugins/nuke/_publish_unused/validate_nuke_settings.py b/pype/plugins/nuke/_publish_unused/validate_nuke_settings.py index dd66b4fb3a..441658297d 100644 --- a/pype/plugins/nuke/_publish_unused/validate_nuke_settings.py +++ b/pype/plugins/nuke/_publish_unused/validate_nuke_settings.py @@ -1,7 +1,7 @@ import nuke import os import pyblish.api -import avalon.io as io +from avalon import io # TODO: add repair function diff --git a/pype/plugins/nuke/create/create_backdrop b/pype/plugins/nuke/create/create_backdrop deleted file mode 100644 index 2cdc222618..0000000000 --- a/pype/plugins/nuke/create/create_backdrop +++ /dev/null @@ -1,2 +0,0 @@ -# creates backdrop which is published as separate nuke script -# it is versioned by major version diff --git a/pype/plugins/nuke/create/create_backdrop.py b/pype/plugins/nuke/create/create_backdrop.py new file mode 100644 index 0000000000..767e92b592 --- /dev/null +++ b/pype/plugins/nuke/create/create_backdrop.py @@ -0,0 +1,50 @@ +from avalon.nuke.pipeline import Creator +from avalon.nuke import lib as anlib +import nuke + +class CreateBackdrop(Creator): + """Add Publishable Backdrop""" + + name = "nukenodes" + label = "Create Backdrop" + family = "nukenodes" + icon = "file-archive-o" + defaults = ["Main"] + + def __init__(self, *args, **kwargs): + super(CreateBackdrop, self).__init__(*args, **kwargs) + self.nodes = nuke.selectedNodes() + self.node_color = "0xdfea5dff" + return + + def process(self): + from nukescripts import autoBackdrop + nodes = list() + if (self.options or {}).get("useSelection"): + nodes = self.nodes + + if len(nodes) >= 1: + anlib.select_nodes(nodes) + bckd_node = autoBackdrop() + bckd_node["name"].setValue("{}_BDN".format(self.name)) + bckd_node["tile_color"].setValue(int(self.node_color, 16)) + bckd_node["note_font_size"].setValue(24) + bckd_node["label"].setValue("[{}]".format(self.name)) + # add avalon knobs + instance = anlib.imprint(bckd_node, self.data) + + return instance + else: + nuke.message("Please select nodes you " + "wish to add to a container") + return + else: + bckd_node = autoBackdrop() + bckd_node["name"].setValue("{}_BDN".format(self.name)) + bckd_node["tile_color"].setValue(int(self.node_color, 16)) + bckd_node["note_font_size"].setValue(24) + bckd_node["label"].setValue("[{}]".format(self.name)) + # add avalon knobs + instance = anlib.imprint(bckd_node, self.data) + + return instance diff --git a/pype/plugins/nuke/create/create_gizmo.py b/pype/plugins/nuke/create/create_gizmo.py new file mode 100644 index 0000000000..41229862e3 --- /dev/null +++ b/pype/plugins/nuke/create/create_gizmo.py @@ -0,0 +1,79 @@ +from avalon.nuke.pipeline import Creator +from avalon.nuke import lib as anlib +import nuke +import nukescripts + +class CreateGizmo(Creator): + """Add Publishable "gizmo" group + + The name is symbolically gizmo as presumably + it is something familiar to nuke users as group of nodes + distributed downstream in workflow + """ + + name = "gizmo" + label = "Gizmo" + family = "gizmo" + icon = "file-archive-o" + defaults = ["ViewerInput", "Lut", "Effect"] + + def __init__(self, *args, **kwargs): + super(CreateGizmo, self).__init__(*args, **kwargs) + self.nodes = nuke.selectedNodes() + self.node_color = "0x7533c1ff" + return + + def process(self): + if (self.options or {}).get("useSelection"): + nodes = self.nodes + self.log.info(len(nodes)) + if len(nodes) == 1: + anlib.select_nodes(nodes) + node = nodes[-1] + # check if Group node + if node.Class() in "Group": + node["name"].setValue("{}_GZM".format(self.name)) + node["tile_color"].setValue(int(self.node_color, 16)) + return anlib.imprint(node, self.data) + else: + nuke.message("Please select a group node " + "you wish to publish as the gizmo") + + if len(nodes) >= 2: + anlib.select_nodes(nodes) + nuke.makeGroup() + gizmo_node = nuke.selectedNode() + gizmo_node["name"].setValue("{}_GZM".format(self.name)) + gizmo_node["tile_color"].setValue(int(self.node_color, 16)) + + # add sticky node wit guide + with gizmo_node: + sticky = nuke.createNode("StickyNote") + sticky["label"].setValue( + "Add following:\n- set Input" + " nodes\n- set one Output1\n" + "- create User knobs on the group") + + # add avalon knobs + return anlib.imprint(gizmo_node, self.data) + + else: + nuke.message("Please select nodes you " + "wish to add to the gizmo") + return + else: + with anlib.maintained_selection(): + gizmo_node = nuke.createNode("Group") + gizmo_node["name"].setValue("{}_GZM".format(self.name)) + gizmo_node["tile_color"].setValue(int(self.node_color, 16)) + + # add sticky node wit guide + with gizmo_node: + sticky = nuke.createNode("StickyNote") + sticky["label"].setValue( + "Add following:\n- add Input" + " nodes\n- add one Output1\n" + "- create User knobs on the group") + + # add avalon knobs + return anlib.imprint(gizmo_node, self.data) diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index cc66274626..042826d4d9 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -24,18 +24,17 @@ class CreateWriteRender(plugin.PypeCreator): def __init__(self, *args, **kwargs): super(CreateWriteRender, self).__init__(*args, **kwargs) - self.name = self.data["subset"] - data = OrderedDict() - data["family"] = self.nClass - data["families"] = self.family + data["family"] = self.family + data["families"] = self.nClass for k, v in self.data.items(): if k not in data.keys(): data.update({k: v}) self.data = data + self.nodes = nuke.selectedNodes() self.log.info("self.data: '{}'".format(self.data)) def process(self): @@ -48,9 +47,9 @@ class CreateWriteRender(plugin.PypeCreator): # use selection if (self.options or {}).get("useSelection"): - nodes = nuke.selectedNodes() + nodes = self.nodes - assert len(nodes) == 1, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`") + assert len(nodes) < 2, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`") selected_node = nodes[0] inputs = [selected_node] diff --git a/pype/plugins/nuke/load/load_gizmo_ip.py b/pype/plugins/nuke/load/load_gizmo_ip.py new file mode 100644 index 0000000000..0d78c14214 --- /dev/null +++ b/pype/plugins/nuke/load/load_gizmo_ip.py @@ -0,0 +1,239 @@ +from avalon import api, style, io +import nuke +from pype.nuke import lib as pnlib +from avalon.nuke import lib as anlib +from avalon.nuke import containerise, update_container + + +class LoadGizmoInputProcess(api.Loader): + """Loading colorspace soft effect exported from nukestudio""" + + representations = ["gizmo"] + families = ["gizmo"] + + label = "Load Gizmo - Input Process" + order = 0 + icon = "eye" + color = style.colors.alert + node_color = "0x7533c1ff" + + def load(self, context, name, namespace, data): + """ + Loading function to get Gizmo as Input Process on viewer + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + + # get main variables + version = context['version'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + namespace = namespace or context['asset']['name'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + # prepare data for imprinting + # add additional metadata from the version to imprint to Avalon knob + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # getting file path + file = self.fname.replace("\\", "/") + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + with anlib.maintained_selection(): + # add group from nk + nuke.nodePaste(file) + + GN = nuke.selectedNode() + + GN["name"].setValue(object_name) + + # try to place it under Viewer1 + if not self.connect_active_viewer(GN): + nuke.delete(GN) + return + + return containerise( + node=GN, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def update(self, container, representation): + """Update the Loader's path + + Nuke automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + + # get main variables + # Get version from io + version = io.find_one({ + "type": "version", + "_id": representation["parent"] + }) + # get corresponding node + GN = nuke.toNode(container['objectName']) + + file = api.get_representation_path(representation).replace("\\", "/") + context = representation["context"] + name = container['name'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + namespace = container['namespace'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"representation": str(representation["_id"]), + "frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + with anlib.maintained_selection(): + xpos = GN.xpos() + ypos = GN.ypos() + avalon_data = anlib.get_avalon_knob_data(GN) + nuke.delete(GN) + # add group from nk + nuke.nodePaste(file) + + GN = nuke.selectedNode() + anlib.set_avalon_knob_data(GN, avalon_data) + GN.setXYpos(xpos, ypos) + GN["name"].setValue(object_name) + + # get all versions in list + versions = io.find({ + "type": "version", + "parent": version["parent"] + }).distinct('name') + + max_version = max(versions) + + # change color of node + if version.get("name") not in [max_version]: + GN["tile_color"].setValue(int("0xd88467ff", 16)) + else: + GN["tile_color"].setValue(int(self.node_color, 16)) + + self.log.info("udated to version: {}".format(version.get("name"))) + + return update_container(GN, data_imprint) + + def connect_active_viewer(self, group_node): + """ + Finds Active viewer and + place the node under it, also adds + name of group into Input Process of the viewer + + Arguments: + group_node (nuke node): nuke group node object + + """ + group_node_name = group_node["name"].value() + + viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] + if len(viewer) > 0: + viewer = viewer[0] + else: + self.log.error("Please create Viewer node before you " + "run this action again") + return None + + # get coordinates of Viewer1 + xpos = viewer["xpos"].value() + ypos = viewer["ypos"].value() + + ypos += 150 + + viewer["ypos"].setValue(ypos) + + # set coordinates to group node + group_node["xpos"].setValue(xpos) + group_node["ypos"].setValue(ypos + 50) + + # add group node name to Viewer Input Process + viewer["input_process_node"].setValue(group_node_name) + + # put backdrop under + pnlib.create_backdrop(label="Input Process", layer=2, + nodes=[viewer, group_node], color="0x7c7faaff") + + return True + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes trought all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.iteritems()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, unicode): + return input.encode('utf-8') + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + from avalon.nuke import viewer_update_and_undo_stop + node = nuke.toNode(container['objectName']) + with viewer_update_and_undo_stop(): + nuke.delete(node) diff --git a/pype/plugins/nuke/load/load_matchmove.py b/pype/plugins/nuke/load/load_matchmove.py new file mode 100644 index 0000000000..6a674368fb --- /dev/null +++ b/pype/plugins/nuke/load/load_matchmove.py @@ -0,0 +1,24 @@ +from avalon import api + + +class MatchmoveLoader(api.Loader): + """ + This will run matchmove script to create track in script. + """ + + families = ["matchmove"] + representations = ["py"] + defaults = ["Camera", "Object"] + + label = "Run matchmove script" + icon = "empire" + color = "orange" + + def load(self, context, name, namespace, data): + if self.fname.lower().endswith(".py"): + exec(open(self.fname).read()) + + else: + self.log.error("Unsupported script type") + + return True diff --git a/pype/plugins/nuke/load/load_mov.py b/pype/plugins/nuke/load/load_mov.py index e6daaaff8a..e598839405 100644 --- a/pype/plugins/nuke/load/load_mov.py +++ b/pype/plugins/nuke/load/load_mov.py @@ -1,9 +1,6 @@ -import os import contextlib -from avalon import api -import avalon.io as io - +from avalon import api, io import nuke @@ -102,7 +99,7 @@ class LoadMov(api.Loader): handle_start = version_data.get("handleStart", None) handle_end = version_data.get("handleEnd", None) repr_cont = context["representation"]["context"] - + # fix handle start and end if none are available if not handle_start and not handle_end: handle_start = handles diff --git a/pype/plugins/nuke/load/load_script_precomp.py b/pype/plugins/nuke/load/load_script_precomp.py index e84e23a890..310157f099 100644 --- a/pype/plugins/nuke/load/load_script_precomp.py +++ b/pype/plugins/nuke/load/load_script_precomp.py @@ -7,7 +7,7 @@ class LinkAsGroup(api.Loader): """Copy the published file to be pasted at the desired location""" representations = ["nk"] - families = ["workfile"] + families = ["workfile", "nukenodes"] label = "Load Precomp" order = 0 @@ -63,8 +63,6 @@ class LinkAsGroup(api.Loader): colorspace = context["version"]["data"].get("colorspace", None) self.log.info("colorspace: {}\n".format(colorspace)) - # ['version', 'file', 'reading', 'output', 'useOutput'] - P["name"].setValue("{}_{}".format(name, namespace)) P["useOutput"].setValue(True) @@ -74,14 +72,15 @@ class LinkAsGroup(api.Loader): if n.Class() == "Group" if get_avalon_knob_data(n)] - # create panel for selecting output - panel_choices = " ".join(writes) - panel_label = "Select write node for output" - p = nuke.Panel("Select Write Node") - p.addEnumerationPulldown( - panel_label, panel_choices) - p.show() - P["output"].setValue(p.value(panel_label)) + if writes: + # create panel for selecting output + panel_choices = " ".join(writes) + panel_label = "Select write node for output" + p = nuke.Panel("Select Write Node") + p.addEnumerationPulldown( + panel_label, panel_choices) + p.show() + P["output"].setValue(p.value(panel_label)) P["tile_color"].setValue(0xff0ff0ff) diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index e1c75584d7..8f01d4511b 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -1,9 +1,6 @@ -import os import contextlib -from avalon import api -import avalon.io as io - +from avalon import api, io import nuke diff --git a/pype/plugins/nuke/publish/collect_backdrop.py b/pype/plugins/nuke/publish/collect_backdrop.py new file mode 100644 index 0000000000..d98a20aee0 --- /dev/null +++ b/pype/plugins/nuke/publish/collect_backdrop.py @@ -0,0 +1,83 @@ +import pyblish.api +import pype.api as pype +from pype.nuke import lib as pnlib +import nuke + +@pyblish.api.log +class CollectBackdrops(pyblish.api.InstancePlugin): + """Collect Backdrop node instance and its content + """ + + order = pyblish.api.CollectorOrder + 0.22 + label = "Collect Backdrop" + hosts = ["nuke"] + families = ["nukenodes"] + + def process(self, instance): + + bckn = instance[0] + + # define size of the backdrop + left = bckn.xpos() + top = bckn.ypos() + right = left + bckn['bdwidth'].value() + bottom = top + bckn['bdheight'].value() + + # iterate all nodes + for node in nuke.allNodes(): + + # exclude viewer + if node.Class() == "Viewer": + continue + + # find all related nodes + if (node.xpos() > left) \ + and (node.xpos() + node.screenWidth() < right) \ + and (node.ypos() > top) \ + and (node.ypos() + node.screenHeight() < bottom): + + # add contained nodes to instance's node list + instance.append(node) + + # get all connections from outside of backdrop + nodes = instance[1:] + connections_in, connections_out = pnlib.get_dependent_nodes(nodes) + instance.data["connections_in"] = connections_in + instance.data["connections_out"] = connections_out + + # make label nicer + instance.data["label"] = "{0} ({1} nodes)".format( + bckn.name(), len(instance)-1) + + instance.data["families"].append(instance.data["family"]) + + # Get frame range + handle_start = instance.context.data["handleStart"] + handle_end = instance.context.data["handleEnd"] + first_frame = int(nuke.root()["first_frame"].getValue()) + last_frame = int(nuke.root()["last_frame"].getValue()) + + # get version + version = pype.get_version_from_path(nuke.root().name()) + instance.data['version'] = version + + # Add version data to instance + version_data = { + "handles": handle_start, + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": first_frame + handle_start, + "frameEnd": last_frame - handle_end, + "version": int(version), + "families": [instance.data["family"]] + instance.data["families"], + "subset": instance.data["subset"], + "fps": instance.context.data["fps"] + } + + instance.data.update({ + "versionData": version_data, + "frameStart": first_frame, + "frameEnd": last_frame + }) + self.log.info("Backdrop content collected: `{}`".format(instance[:])) + self.log.info("Backdrop instance collected: `{}`".format(instance)) diff --git a/pype/plugins/nuke/publish/collect_gizmo.py b/pype/plugins/nuke/publish/collect_gizmo.py new file mode 100644 index 0000000000..11e8c17a3f --- /dev/null +++ b/pype/plugins/nuke/publish/collect_gizmo.py @@ -0,0 +1,56 @@ +import pyblish.api +import pype.api as pype +import nuke + + +@pyblish.api.log +class CollectGizmo(pyblish.api.InstancePlugin): + """Collect Gizmo (group) node instance and its content + """ + + order = pyblish.api.CollectorOrder + 0.22 + label = "Collect Gizmo (Group)" + hosts = ["nuke"] + families = ["gizmo"] + + def process(self, instance): + + grpn = instance[0] + + # add family to familiess + instance.data["families"].insert(0, instance.data["family"]) + # make label nicer + instance.data["label"] = "{0} ({1} nodes)".format( + grpn.name(), len(instance) - 1) + + # Get frame range + handle_start = instance.context.data["handleStart"] + handle_end = instance.context.data["handleEnd"] + first_frame = int(nuke.root()["first_frame"].getValue()) + last_frame = int(nuke.root()["last_frame"].getValue()) + + # get version + version = pype.get_version_from_path(nuke.root().name()) + instance.data['version'] = version + + # Add version data to instance + version_data = { + "handles": handle_start, + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": first_frame + handle_start, + "frameEnd": last_frame - handle_end, + "colorspace": nuke.root().knob('workingSpaceLUT').value(), + "version": int(version), + "families": [instance.data["family"]] + instance.data["families"], + "subset": instance.data["subset"], + "fps": instance.context.data["fps"] + } + + instance.data.update({ + "versionData": version_data, + "frameStart": first_frame, + "frameEnd": last_frame + }) + self.log.info("Gizmo content collected: `{}`".format(instance[:])) + self.log.info("Gizmo instance collected: `{}`".format(instance)) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 5303dc6456..cffe415058 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -21,7 +21,8 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): self.log.debug("asset_data: {}".format(asset_data["data"])) instances = [] - # creating instances per write node + + root = nuke.root() self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes())) for node in nuke.allNodes(): @@ -31,11 +32,11 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): continue except Exception as E: self.log.warning(E) - continue + # get data from avalon knob self.log.debug("node[name]: {}".format(node['name'].value())) - avalon_knob_data = get_avalon_knob_data(node) + avalon_knob_data = get_avalon_knob_data(node, ["avalon:", "ak:"]) self.log.debug("avalon_knob_data: {}".format(avalon_knob_data)) @@ -45,6 +46,14 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): if avalon_knob_data["id"] != "pyblish.avalon.instance": continue + # establish families + family = avalon_knob_data["family"] + families = list() + + # except disabled nodes but exclude backdrops in test + if ("nukenodes" not in family) and (node["disable"].value()): + continue + subset = avalon_knob_data.get( "subset", None) or node["name"].value() @@ -54,22 +63,56 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): # Add all nodes in group instances. if node.Class() == "Group": + # only alter families for render family + if ("render" in family): + # check if node is not disabled + families.append(avalon_knob_data["families"]) + if node["render"].value(): + self.log.info("flagged for render") + add_family = "render.local" + # dealing with local/farm rendering + if node["render_farm"].value(): + self.log.info("adding render farm family") + add_family = "render.farm" + instance.data["transfer"] = False + families.append(add_family) + else: + # add family into families + families.insert(0, family) + node.begin() for i in nuke.allNodes(): instance.append(i) node.end() - family = avalon_knob_data["families"] + family = avalon_knob_data["family"] + families = avalon_knob_data.get("families") + if families: + families = [families] + else: + families = [family] + + # Get format + format = root['format'].value() + resolution_width = format.width() + resolution_height = format.height() + pixel_aspect = format.pixelAspect() if node.Class() not in "Read": - if node["render"].value(): + if "render" not in node.knobs().keys(): + families.insert(0, family) + elif node["render"].value(): self.log.info("flagged for render") - family = "render.local" + add_family = "render.local" # dealing with local/farm rendering if node["render_farm"].value(): self.log.info("adding render farm family") - family = "render.farm" - instance.data['transfer'] = False + add_family = "render.farm" + instance.data["transfer"] = False + families.append(add_family) + else: + # add family into families + families.insert(0, family) instance.data.update({ "subset": subset, @@ -77,12 +120,15 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "label": node.name(), "name": node.name(), "subset": subset, - "family": avalon_knob_data["family"], - "families": [avalon_knob_data["family"], family], + "family": family, + "families": families, "avalonKnob": avalon_knob_data, "publish": node.knob('publish').value(), "step": 1, - "fps": nuke.root()['fps'].value() + "fps": nuke.root()['fps'].value(), + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height, + "pixelAspect": pixel_aspect, }) @@ -90,5 +136,4 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): instances.append(instance) context.data["instances"] = instances - self.log.debug("context: {}".format(context)) diff --git a/pype/plugins/nuke/publish/collect_legacy_write.py b/pype/plugins/nuke/publish/collect_legacy_write.py index 05dbe4216c..cfb0798434 100644 --- a/pype/plugins/nuke/publish/collect_legacy_write.py +++ b/pype/plugins/nuke/publish/collect_legacy_write.py @@ -1,25 +1,31 @@ -import nuke - import pyblish.api -class CollectWriteLegacy(pyblish.api.ContextPlugin): +class CollectWriteLegacy(pyblish.api.InstancePlugin): """Collect legacy write nodes.""" - order = pyblish.api.CollectorOrder - label = "Collect Write Legacy" + order = pyblish.api.CollectorOrder + 0.0101 + label = "Collect Write node Legacy" hosts = ["nuke", "nukeassist"] - def process(self, context): + def process(self, instance): + self.log.info(instance[:]) + node = instance[0] - for node in nuke.allNodes(): - if node.Class() != "Write": - continue + if node.Class() not in ["Group", "Write"]: + return - if "avalon" not in node.knobs().keys(): - continue + family_knobs = ["ak:family", "avalon:family"] + test = [k for k in node.knobs().keys() if k in family_knobs] + self.log.info(test) - instance = context.create_instance( - node.name(), family="write.legacy" + if len(test) == 1: + if "render" in node[test[0]].value(): + self.log.info("render") + return + + if "render" in node.knobs(): + instance.data.update( + {"family": "write.legacy", + "families": []} ) - instance.append(node) diff --git a/pype/plugins/nuke/publish/collect_workfile.py b/pype/plugins/nuke/publish/collect_workfile.py index fcec97769a..aaee554fbf 100644 --- a/pype/plugins/nuke/publish/collect_workfile.py +++ b/pype/plugins/nuke/publish/collect_workfile.py @@ -78,7 +78,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): "publish": root.knob('publish').value(), "family": family, "families": [family], - "representations": list() + "representations": list(), + "subsetGroup": "workfiles" }) # adding basic script data diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 28470c94de..dd3049834d 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -11,7 +11,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.1 label = "Collect Writes" hosts = ["nuke", "nukeassist"] - families = ["render", "render.local", "render.farm"] + families = ["write"] def process(self, instance): @@ -76,7 +76,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): } try: - collected_frames = os.listdir(output_dir) + collected_frames = [f for f in os.listdir(output_dir) + if ext in f] if collected_frames: representation['frameStart'] = "%0{}d".format( len(str(last_frame))) % first_frame @@ -99,7 +100,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "subset": instance.data["subset"], "fps": instance.context.data["fps"] } - + instance.data["family"] = "write" group_node = [x for x in instance if x.Class() == "Group"][0] deadlineChunkSize = 1 if "deadlineChunkSize" in group_node.knobs(): @@ -109,6 +110,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): if "deadlinePriority" in group_node.knobs(): deadlinePriority = group_node["deadlinePriority"].value() + families = [f for f in instance.data["families"] if "write" not in f] instance.data.update({ "versionData": version_data, "path": path, @@ -119,9 +121,13 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "frameStart": first_frame, "frameEnd": last_frame, "outputType": output_type, + "family": "write", + "families": families, "colorspace": node["colorspace"].value(), "deadlineChunkSize": deadlineChunkSize, - "deadlinePriority": deadlinePriority + "deadlinePriority": deadlinePriority, + "subsetGroup": "renders" }) + self.log.debug("instance.data: {}".format(instance.data)) diff --git a/pype/plugins/nuke/publish/extract_backdrop.py b/pype/plugins/nuke/publish/extract_backdrop.py new file mode 100644 index 0000000000..7b01b5deac --- /dev/null +++ b/pype/plugins/nuke/publish/extract_backdrop.py @@ -0,0 +1,103 @@ +import pyblish.api +from avalon.nuke import lib as anlib +from pype.nuke import lib as pnlib +import nuke +import os +import pype +reload(pnlib) + +class ExtractBackdropNode(pype.api.Extractor): + """Extracting content of backdrop nodes + + Will create nuke script only with containing nodes. + Also it will solve Input and Output nodes. + + """ + + order = pyblish.api.ExtractorOrder + label = "Extract Backdrop" + hosts = ["nuke"] + families = ["nukenodes"] + + def process(self, instance): + tmp_nodes = list() + nodes = instance[1:] + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = "{0}.nk".format(instance.name) + path = os.path.join(stagingdir, filename) + + # maintain selection + with anlib.maintained_selection(): + # all connections outside of backdrop + connections_in = instance.data["connections_in"] + connections_out = instance.data["connections_out"] + self.log.debug("_ connections_in: `{}`".format(connections_in)) + self.log.debug("_ connections_out: `{}`".format(connections_out)) + + # create input nodes and name them as passing node (*_INP) + for n, inputs in connections_in.items(): + for i, input in inputs: + inpn = nuke.createNode("Input") + inpn["name"].setValue("{}_{}_INP".format(n.name(), i)) + n.setInput(i, inpn) + inpn.setXYpos(input.xpos(), input.ypos()) + nodes.append(inpn) + tmp_nodes.append(inpn) + + anlib.reset_selection() + + # connect output node + for n, output in connections_out.items(): + opn = nuke.createNode("Output") + self.log.info(n.name()) + self.log.info(output.name()) + output.setInput( + next((i for i, d in enumerate(output.dependencies()) + if d.name() in n.name()), 0), opn) + opn.setInput(0, n) + opn.autoplace() + nodes.append(opn) + tmp_nodes.append(opn) + anlib.reset_selection() + + # select nodes to copy + anlib.reset_selection() + anlib.select_nodes(nodes) + # create tmp nk file + # save file to the path + nuke.nodeCopy(path) + + # Clean up + for tn in tmp_nodes: + nuke.delete(tn) + + # restore original connections + # reconnect input node + for n, inputs in connections_in.items(): + for i, input in inputs: + n.setInput(i, input) + + # reconnect output node + for n, output in connections_out.items(): + output.setInput( + next((i for i, d in enumerate(output.dependencies()) + if d.name() in n.name()), 0), n) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + # create representation + representation = { + 'name': 'nk', + 'ext': 'nk', + 'files': filename, + "stagingDir": stagingdir + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '{}' to: {}".format( + instance.name, path)) + + self.log.info("Data {}".format( + instance.data)) diff --git a/pype/plugins/nuke/publish/extract_gizmo.py b/pype/plugins/nuke/publish/extract_gizmo.py new file mode 100644 index 0000000000..36ef1d464c --- /dev/null +++ b/pype/plugins/nuke/publish/extract_gizmo.py @@ -0,0 +1,95 @@ +import pyblish.api +from avalon.nuke import lib as anlib +from pype.nuke import lib as pnlib +from pype.nuke import utils as pnutils +import nuke +import os +import pype + + +class ExtractGizmo(pype.api.Extractor): + """Extracting Gizmo (Group) node + + Will create nuke script only with the Gizmo node. + """ + + order = pyblish.api.ExtractorOrder + label = "Extract Gizmo (Group)" + hosts = ["nuke"] + families = ["gizmo"] + + def process(self, instance): + tmp_nodes = list() + orig_grpn = instance[0] + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = "{0}.nk".format(instance.name) + path = os.path.join(stagingdir, filename) + + # maintain selection + with anlib.maintained_selection(): + orig_grpn_name = orig_grpn.name() + tmp_grpn_name = orig_grpn_name + "_tmp" + # select original group node + anlib.select_nodes([orig_grpn]) + + # copy to clipboard + nuke.nodeCopy("%clipboard%") + + # reset selection to none + anlib.reset_selection() + + # paste clipboard + nuke.nodePaste("%clipboard%") + + # assign pasted node + copy_grpn = nuke.selectedNode() + copy_grpn.setXYpos((orig_grpn.xpos() + 120), orig_grpn.ypos()) + + # convert gizmos to groups + pnutils.bake_gizmos_recursively(copy_grpn) + + # remove avalonknobs + knobs = copy_grpn.knobs() + avalon_knobs = [k for k in knobs.keys() + for ak in ["avalon:", "ak:"] + if ak in k] + avalon_knobs.append("publish") + for ak in avalon_knobs: + copy_grpn.removeKnob(knobs[ak]) + + # add to temporary nodes + tmp_nodes.append(copy_grpn) + + # swap names + orig_grpn.setName(tmp_grpn_name) + copy_grpn.setName(orig_grpn_name) + + # create tmp nk file + # save file to the path + nuke.nodeCopy(path) + + # Clean up + for tn in tmp_nodes: + nuke.delete(tn) + + # rename back to original + orig_grpn.setName(orig_grpn_name) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + # create representation + representation = { + 'name': 'gizmo', + 'ext': 'nk', + 'files': filename, + "stagingDir": stagingdir + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '{}' to: {}".format( + instance.name, path)) + + self.log.info("Data {}".format( + instance.data)) diff --git a/pype/plugins/nuke/publish/extract_review_data.py b/pype/plugins/nuke/publish/extract_review_data.py deleted file mode 100644 index 791b9d7969..0000000000 --- a/pype/plugins/nuke/publish/extract_review_data.py +++ /dev/null @@ -1,187 +0,0 @@ -import os -import nuke -import pyblish.api -import pype - -class ExtractReviewData(pype.api.Extractor): - """Extracts movie and thumbnail with baked in luts - - must be run after extract_render_local.py - - """ - - order = pyblish.api.ExtractorOrder + 0.01 - label = "Extract Review Data" - - families = ["review"] - hosts = ["nuke"] - - def process(self, instance): - - # Store selection - selection = [i for i in nuke.allNodes() if i["selected"].getValue()] - # Deselect all nodes to prevent external connections - [i["selected"].setValue(False) for i in nuke.allNodes()] - self.log.debug("creating staging dir:") - self.staging_dir(instance) - - self.log.debug("instance: {}".format(instance)) - self.log.debug("instance.data[families]: {}".format( - instance.data["families"])) - - if "still" not in instance.data["families"]: - self.render_review_representation(instance, - representation="mov") - self.render_review_representation(instance, - representation="jpeg") - else: - self.render_review_representation(instance, representation="jpeg") - - # Restore selection - [i["selected"].setValue(False) for i in nuke.allNodes()] - [i["selected"].setValue(True) for i in selection] - - def render_review_representation(self, - instance, - representation="mov"): - - assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" - - temporary_nodes = [] - stagingDir = instance.data[ - 'representations'][0]["stagingDir"].replace("\\", "/") - self.log.debug("StagingDir `{0}`...".format(stagingDir)) - - collection = instance.data.get("collection", None) - - if collection: - # get path - fname = os.path.basename(collection.format( - "{head}{padding}{tail}")) - fhead = collection.format("{head}") - - # get first and last frame - first_frame = min(collection.indexes) - last_frame = max(collection.indexes) - else: - fname = os.path.basename(instance.data.get("path", None)) - fhead = os.path.splitext(fname)[0] + "." - first_frame = instance.data.get("frameStart", None) - last_frame = instance.data.get("frameEnd", None) - - rnode = nuke.createNode("Read") - - rnode["file"].setValue( - os.path.join(stagingDir, fname).replace("\\", "/")) - - rnode["first"].setValue(first_frame) - rnode["origfirst"].setValue(first_frame) - rnode["last"].setValue(last_frame) - rnode["origlast"].setValue(last_frame) - temporary_nodes.append(rnode) - previous_node = rnode - - # get input process and connect it to baking - ipn = self.get_view_process_node() - if ipn is not None: - ipn.setInput(0, previous_node) - previous_node = ipn - temporary_nodes.append(ipn) - - reformat_node = nuke.createNode("Reformat") - - ref_node = self.nodes.get("Reformat", None) - if ref_node: - for k, v in ref_node: - self.log.debug("k,v: {0}:{1}".format(k,v)) - if isinstance(v, unicode): - v = str(v) - reformat_node[k].setValue(v) - - reformat_node.setInput(0, previous_node) - previous_node = reformat_node - temporary_nodes.append(reformat_node) - - dag_node = nuke.createNode("OCIODisplay") - dag_node.setInput(0, previous_node) - previous_node = dag_node - temporary_nodes.append(dag_node) - - # create write node - write_node = nuke.createNode("Write") - - if representation in "mov": - file = fhead + "baked.mov" - name = "baked" - path = os.path.join(stagingDir, file).replace("\\", "/") - self.log.debug("Path: {}".format(path)) - instance.data["baked_colorspace_movie"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("mov") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["review", "delete"] - - elif representation in "jpeg": - file = fhead + "jpeg" - name = "thumbnail" - path = os.path.join(stagingDir, file).replace("\\", "/") - instance.data["thumbnail"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("jpeg") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["thumbnail"] - - # retime for - first_frame = int(last_frame) / 2 - last_frame = int(last_frame) / 2 - - repre = { - 'name': name, - 'ext': representation, - 'files': file, - "stagingDir": stagingDir, - "frameStart": first_frame, - "frameEnd": last_frame, - "anatomy_template": "render", - "tags": tags - } - instance.data["representations"].append(repre) - - # Render frames - nuke.execute(write_node.name(), int(first_frame), int(last_frame)) - - self.log.debug("representations: {}".format(instance.data["representations"])) - - # Clean up - for node in temporary_nodes: - nuke.delete(node) - - def get_view_process_node(self): - - # Select only the target node - if nuke.selectedNodes(): - [n.setSelected(False) for n in nuke.selectedNodes()] - - ipn_orig = None - for v in [n for n in nuke.allNodes() - if "Viewer" in n.Class()]: - ip = v['input_process'].getValue() - ipn = v['input_process_node'].getValue() - if "VIEWER_INPUT" not in ipn and ip: - ipn_orig = nuke.toNode(ipn) - ipn_orig.setSelected(True) - - if ipn_orig: - nuke.nodeCopy('%clipboard%') - - [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all - - nuke.nodePaste('%clipboard%') - - ipn = nuke.selectedNode() - - return ipn diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py new file mode 100644 index 0000000000..dfc10952cd --- /dev/null +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -0,0 +1,58 @@ +import os +import pyblish.api +from avalon.nuke import lib as anlib +from pype.nuke import lib as pnlib +import pype +reload(pnlib) + + +class ExtractReviewLutData(pype.api.Extractor): + """Extracts movie and thumbnail with baked in luts + + must be run after extract_render_local.py + + """ + + order = pyblish.api.ExtractorOrder + 0.005 + label = "Extract Review Data Lut" + + families = ["review"] + hosts = ["nuke"] + + def process(self, instance): + families = instance.data["families"] + self.log.info("Creating staging dir...") + if "representations" in instance.data: + staging_dir = instance.data[ + "representations"][0]["stagingDir"].replace("\\", "/") + instance.data["stagingDir"] = staging_dir + instance.data["representations"][0]["tags"] = ["review"] + else: + instance.data["representations"] = [] + # get output path + render_path = instance.data['path'] + staging_dir = os.path.normpath(os.path.dirname(render_path)) + instance.data["stagingDir"] = staging_dir + + self.log.info( + "StagingDir `{0}`...".format(instance.data["stagingDir"])) + + with anlib.maintained_selection(): + exporter = pnlib.Exporter_review_lut( + self, instance + ) + data = exporter.generate_lut() + + # assign to representations + instance.data["lutPath"] = os.path.join( + exporter.stagingDir, exporter.file).replace("\\", "/") + instance.data["representations"] += data["representations"] + + if "render.farm" in families: + instance.data["families"].remove("review") + instance.data["families"].remove("ftrack") + + self.log.debug( + "_ lutPath: {}".format(instance.data["lutPath"])) + self.log.debug( + "_ representations: {}".format(instance.data["representations"])) diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py new file mode 100644 index 0000000000..450bb39928 --- /dev/null +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -0,0 +1,174 @@ +import os +import nuke +from avalon.nuke import lib as anlib +import pyblish.api +import pype + + +class ExtractThumbnail(pype.api.Extractor): + """Extracts movie and thumbnail with baked in luts + + must be run after extract_render_local.py + + """ + + order = pyblish.api.ExtractorOrder + 0.01 + label = "Extract Thumbnail" + + families = ["review", "render.farm"] + hosts = ["nuke"] + + def process(self, instance): + + with anlib.maintained_selection(): + self.log.debug("instance: {}".format(instance)) + self.log.debug("instance.data[families]: {}".format( + instance.data["families"])) + + self.render_thumbnail(instance) + + def render_thumbnail(self, instance): + node = instance[0] # group node + self.log.info("Creating staging dir...") + if "representations" in instance.data: + staging_dir = instance.data[ + "representations"][0]["stagingDir"].replace("\\", "/") + instance.data["stagingDir"] = staging_dir + instance.data["representations"][0]["tags"] = ["review"] + else: + instance.data["representations"] = [] + # get output path + render_path = instance.data['path'] + staging_dir = os.path.normpath(os.path.dirname(render_path)) + instance.data["stagingDir"] = staging_dir + + self.log.info( + "StagingDir `{0}`...".format(instance.data["stagingDir"])) + + temporary_nodes = [] + collection = instance.data.get("collection", None) + + if collection: + # get path + fname = os.path.basename(collection.format( + "{head}{padding}{tail}")) + fhead = collection.format("{head}") + + # get first and last frame + first_frame = min(collection.indexes) + last_frame = max(collection.indexes) + else: + fname = os.path.basename(instance.data.get("path", None)) + fhead = os.path.splitext(fname)[0] + "." + first_frame = instance.data.get("frameStart", None) + last_frame = instance.data.get("frameEnd", None) + + if "#" in fhead: + fhead = fhead.replace("#", "")[:-1] + + path_render = os.path.join(staging_dir, fname).replace("\\", "/") + # check if file exist otherwise connect to write node + if os.path.isfile(path_render): + rnode = nuke.createNode("Read") + + rnode["file"].setValue(path_render) + + rnode["first"].setValue(first_frame) + rnode["origfirst"].setValue(first_frame) + rnode["last"].setValue(last_frame) + rnode["origlast"].setValue(last_frame) + temporary_nodes.append(rnode) + previous_node = rnode + else: + previous_node = node + + # get input process and connect it to baking + ipn = self.get_view_process_node() + if ipn is not None: + ipn.setInput(0, previous_node) + previous_node = ipn + temporary_nodes.append(ipn) + + reformat_node = nuke.createNode("Reformat") + + ref_node = self.nodes.get("Reformat", None) + if ref_node: + for k, v in ref_node: + self.log.debug("k, v: {0}:{1}".format(k, v)) + if isinstance(v, unicode): + v = str(v) + reformat_node[k].setValue(v) + + reformat_node.setInput(0, previous_node) + previous_node = reformat_node + temporary_nodes.append(reformat_node) + + dag_node = nuke.createNode("OCIODisplay") + dag_node.setInput(0, previous_node) + previous_node = dag_node + temporary_nodes.append(dag_node) + + # create write node + write_node = nuke.createNode("Write") + file = fhead + "jpeg" + name = "thumbnail" + path = os.path.join(staging_dir, file).replace("\\", "/") + instance.data["thumbnail"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("jpeg") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + tags = ["thumbnail"] + + # retime for + first_frame = int(last_frame) / 2 + last_frame = int(last_frame) / 2 + + repre = { + 'name': name, + 'ext': "jpeg", + 'files': file, + "stagingDir": staging_dir, + "frameStart": first_frame, + "frameEnd": last_frame, + "anatomy_template": "render", + "tags": tags + } + instance.data["representations"].append(repre) + + # Render frames + nuke.execute(write_node.name(), int(first_frame), int(last_frame)) + + self.log.debug( + "representations: {}".format(instance.data["representations"])) + + # Clean up + for node in temporary_nodes: + nuke.delete(node) + + def get_view_process_node(self): + + # Select only the target node + if nuke.selectedNodes(): + [n.setSelected(False) for n in nuke.selectedNodes()] + + ipn_orig = None + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + nuke.nodeCopy('%clipboard%') + + [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all + + nuke.nodePaste('%clipboard%') + + ipn = nuke.selectedNode() + + return ipn diff --git a/pype/plugins/nuke/publish/increment_script_version.py b/pype/plugins/nuke/publish/increment_script_version.py index 2e33e65528..6e3ce08276 100644 --- a/pype/plugins/nuke/publish/increment_script_version.py +++ b/pype/plugins/nuke/publish/increment_script_version.py @@ -14,7 +14,7 @@ class IncrementScriptVersion(pyblish.api.ContextPlugin): def process(self, context): assert all(result["success"] for result in context.data["results"]), ( - "Atomicity not held, aborting.") + "Publishing not succesfull so version is not increased.") instances = context[:] diff --git a/pype/plugins/nuke/publish/submit_nuke_deadline.py b/pype/plugins/nuke/publish/submit_nuke_deadline.py index 4044026b5e..d9207d2bfc 100644 --- a/pype/plugins/nuke/publish/submit_nuke_deadline.py +++ b/pype/plugins/nuke/publish/submit_nuke_deadline.py @@ -1,9 +1,7 @@ import os import json import getpass - -import nuke - + from avalon import api from avalon.vendor import requests import re @@ -27,40 +25,36 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): def process(self, instance): - node = None - for x in instance: - if x.Class() == "Write": - node = x - - if node is None: - return + node = instance[0] + # for x in instance: + # if x.Class() == "Write": + # node = x + # + # if node is None: + # return DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL", "http://localhost:8082") assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL" context = instance.context - workspace = os.path.dirname(context.data["currentFile"]) - filepath = None - # get path - path = nuke.filename(node) - output_dir = instance.data['outputDir'] + # get output path + render_path = instance.data['path'] + render_dir = os.path.normpath(os.path.dirname(render_path)) - filepath = context.data["currentFile"] + script_path = context.data["currentFile"] - self.log.debug(filepath) - - filename = os.path.basename(filepath) + script_name = os.path.basename(script_path) comment = context.data.get("comment", "") - dirname = os.path.join(workspace, "renders") + deadline_user = context.data.get("deadlineUser", getpass.getuser()) - jobname = "%s - %s" % (filename, instance.name) + jobname = "%s - %s" % (script_name, instance.name) ver = re.search(r"\d+\.\d+", context.data.get("hostVersion")) try: # Ensure render folder exists - os.makedirs(dirname) + os.makedirs(render_dir) except OSError: pass @@ -71,7 +65,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): payload = { "JobInfo": { # Top-level group name - "BatchName": filename, + "BatchName": script_name, # Job name, as seen in Monitor "Name": jobname, @@ -95,20 +89,20 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): }, "PluginInfo": { # Input - "SceneFile": filepath, + "SceneFile": script_path, # Output directory and filename - "OutputFilePath": dirname.replace("\\", "/"), + "OutputFilePath": render_dir.replace("\\", "/"), # "OutputFilePrefix": render_variables["filename_prefix"], # Mandatory for Deadline "Version": ver.group(), # Resolve relative references - "ProjectPath": workspace, - + "ProjectPath": script_path, + "AWSAssetFile0": render_path, # Only the specific write node is rendered. - "WriteNode": instance[0].name() + "WriteNode": node.name() }, # Mandatory for Deadline, may be empty diff --git a/pype/plugins/nuke/publish/validate_backdrop.py b/pype/plugins/nuke/publish/validate_backdrop.py new file mode 100644 index 0000000000..cf2d56087d --- /dev/null +++ b/pype/plugins/nuke/publish/validate_backdrop.py @@ -0,0 +1,69 @@ +import pyblish +from avalon.nuke import lib as anlib +import nuke + + +class SelectCenterInNodeGraph(pyblish.api.Action): + """ + Centering failed instance node in node grap + """ + + label = "Center node in node graph" + icon = "wrench" + on = "failed" + + def process(self, context, plugin): + + # Get the errored instances + failed = [] + for result in context.data["results"]: + if (result["error"] is not None and result["instance"] is not None + and result["instance"] not in failed): + failed.append(result["instance"]) + + # Apply pyblish.logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(failed, plugin) + + all_xC = list() + all_yC = list() + + # maintain selection + with anlib.maintained_selection(): + # collect all failed nodes xpos and ypos + for instance in instances: + bdn = instance[0] + xC = bdn.xpos() + bdn.screenWidth()/2 + yC = bdn.ypos() + bdn.screenHeight()/2 + + all_xC.append(xC) + all_yC.append(yC) + + self.log.info("all_xC: `{}`".format(all_xC)) + self.log.info("all_yC: `{}`".format(all_yC)) + + # zoom to nodes in node graph + nuke.zoom(2, [min(all_xC), min(all_yC)]) + + +@pyblish.api.log +class ValidateBackdrop(pyblish.api.InstancePlugin): + """Validate amount of nodes on backdrop node in case user + forgoten to add nodes above the publishing backdrop node""" + + order = pyblish.api.ValidatorOrder + optional = True + families = ["nukenodes"] + label = "Validate Backdrop" + hosts = ["nuke"] + actions = [SelectCenterInNodeGraph] + + def process(self, instance): + connections_out = instance.data["connections_out"] + + msg_multiple_outputs = "Only one outcoming connection from \"{}\" is allowed".format( + instance.data["name"]) + assert len(connections_out.keys()) <= 1, msg_multiple_outputs + + msg_no_content = "No content on backdrop node: \"{}\"".format( + instance.data["name"]) + assert len(instance) > 1, msg_no_content diff --git a/pype/plugins/nuke/publish/validate_gizmo.py b/pype/plugins/nuke/publish/validate_gizmo.py new file mode 100644 index 0000000000..9c94ea88ef --- /dev/null +++ b/pype/plugins/nuke/publish/validate_gizmo.py @@ -0,0 +1,58 @@ +import pyblish +from avalon.nuke import lib as anlib +import nuke + + +class OpenFailedGroupNode(pyblish.api.Action): + """ + Centering failed instance node in node grap + """ + + label = "Open Gizmo in Node Graph" + icon = "wrench" + on = "failed" + + def process(self, context, plugin): + + # Get the errored instances + failed = [] + for result in context.data["results"]: + if (result["error"] is not None and result["instance"] is not None + and result["instance"] not in failed): + failed.append(result["instance"]) + + # Apply pyblish.logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(failed, plugin) + + # maintain selection + with anlib.maintained_selection(): + # collect all failed nodes xpos and ypos + for instance in instances: + grpn = instance[0] + nuke.showDag(grpn) + + +@pyblish.api.log +class ValidateGizmo(pyblish.api.InstancePlugin): + """Validate amount of output nodes in gizmo (group) node""" + + order = pyblish.api.ValidatorOrder + optional = True + families = ["gizmo"] + label = "Validate Gizmo (Group)" + hosts = ["nuke"] + actions = [OpenFailedGroupNode] + + def process(self, instance): + grpn = instance[0] + + with grpn: + connections_out = nuke.allNodes('Output') + msg_multiple_outputs = "Only one outcoming connection from " + "\"{}\" is allowed".format(instance.data["name"]) + assert len(connections_out) <= 1, msg_multiple_outputs + + connections_in = nuke.allNodes('Input') + msg_missing_inputs = "At least one Input node has to be used in: " + "\"{}\"".format(instance.data["name"]) + assert len(connections_in) >= 1, msg_missing_inputs diff --git a/pype/plugins/nuke/publish/validate_write_bounding_box.py b/pype/plugins/nuke/publish/validate_write_bounding_box.py index cedeea6d9f..417d4ab004 100644 --- a/pype/plugins/nuke/publish/validate_write_bounding_box.py +++ b/pype/plugins/nuke/publish/validate_write_bounding_box.py @@ -57,7 +57,7 @@ class ValidateNukeWriteBoundingBox(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder optional = True - families = ["write"] + families = ["render"] label = "Write Bounding Box" hosts = ["nuke"] actions = [RepairNukeBoundingBoxAction] diff --git a/pype/plugins/nuke/publish/validate_write_deadline_tab.py b/pype/plugins/nuke/publish/validate_write_deadline_tab.py index 4e63e14a7f..0510bdaebf 100644 --- a/pype/plugins/nuke/publish/validate_write_deadline_tab.py +++ b/pype/plugins/nuke/publish/validate_write_deadline_tab.py @@ -39,7 +39,7 @@ class ValidateNukeWriteDeadlineTab(pyblish.api.InstancePlugin): label = "Deadline Tab" hosts = ["nuke"] optional = True - families = ["write"] + families = ["render"] actions = [RepairNukeWriteDeadlineTab] def process(self, instance): diff --git a/pype/plugins/nuke/publish/validate_write_knobs.py b/pype/plugins/nuke/publish/validate_write_knobs.py new file mode 100644 index 0000000000..072ffd4b17 --- /dev/null +++ b/pype/plugins/nuke/publish/validate_write_knobs.py @@ -0,0 +1,101 @@ +import nuke + +import pyblish.api +import pype.api + + +class ValidateNukeWriteKnobs(pyblish.api.ContextPlugin): + """Ensure knobs are consistent. + + Knobs to validate and their values comes from the + "nuke/knobs.json" preset, which needs this structure: + { + "family": { + "knob_name": knob_value + } + } + """ + + order = pyblish.api.ValidatorOrder + label = "Knobs" + hosts = ["nuke"] + actions = [pype.api.RepairContextAction] + optional = True + + def process(self, context): + # Check for preset existence. + if not context.data["presets"]["nuke"].get("knobs"): + return + + invalid = self.get_invalid(context, compute=True) + if invalid: + raise RuntimeError( + "Found knobs with invalid values: {}".format(invalid) + ) + + @classmethod + def get_invalid(cls, context, compute=False): + invalid = context.data.get("invalid_knobs", []) + if compute: + invalid = cls.get_invalid_knobs(context) + + return invalid + + @classmethod + def get_invalid_knobs(cls, context): + presets = context.data["presets"]["nuke"]["knobs"] + invalid_knobs = [] + for instance in context: + # Filter publisable instances. + if not instance.data["publish"]: + continue + + # Filter families. + families = [instance.data["family"]] + families += instance.data.get("families", []) + families = list(set(families) & set(presets.keys())) + if not families: + continue + + # Get all knobs to validate. + knobs = {} + for family in families: + for preset in presets[family]: + knobs.update({preset: presets[family][preset]}) + + # Get invalid knobs. + nodes = [] + + for node in nuke.allNodes(): + nodes.append(node) + if node.Class() == "Group": + node.begin() + for i in nuke.allNodes(): + nodes.append(i) + node.end() + + for node in nodes: + for knob in node.knobs(): + if knob in knobs.keys(): + expected = knobs[knob] + if node[knob].value() != expected: + invalid_knobs.append( + { + "knob": node[knob], + "expected": expected, + "current": node[knob].value() + } + ) + + context.data["invalid_knobs"] = invalid_knobs + return invalid_knobs + + @classmethod + def repair(cls, instance): + invalid = cls.get_invalid(instance) + for data in invalid: + if isinstance(data["expected"], unicode): + data["knob"].setValue(str(data["expected"])) + continue + + data["knob"].setValue(data["expected"]) diff --git a/pype/plugins/nuke/publish/validate_write_legacy.py b/pype/plugins/nuke/publish/validate_write_legacy.py index b452e60ba4..58beb56eba 100644 --- a/pype/plugins/nuke/publish/validate_write_legacy.py +++ b/pype/plugins/nuke/publish/validate_write_legacy.py @@ -4,8 +4,9 @@ import os import nuke from avalon import api +import re import pyblish.api - +from avalon.nuke import get_avalon_knob_data class RepairWriteLegacyAction(pyblish.api.Action): @@ -26,24 +27,47 @@ class RepairWriteLegacyAction(pyblish.api.Action): instances = pyblish.api.instances_by_plugin(failed, plugin) for instance in instances: - data = toml.loads(instance[0]["avalon"].value()) + if "Write" in instance[0].Class(): + data = toml.loads(instance[0]["avalon"].value()) + else: + data = get_avalon_knob_data(instance[0]) + + self.log.info(data) + data["xpos"] = instance[0].xpos() data["ypos"] = instance[0].ypos() data["input"] = instance[0].input(0) data["publish"] = instance[0]["publish"].value() data["render"] = instance[0]["render"].value() data["render_farm"] = instance[0]["render_farm"].value() + data["review"] = instance[0]["review"].value() - nuke.delete(instance[0]) + # nuke.delete(instance[0]) + + task = os.environ["AVALON_TASK"] + sanitized_task = re.sub('[^0-9a-zA-Z]+', '', task) + subset_name = "render{}Main".format( + sanitized_task.capitalize()) + + Create_name = "CreateWriteRender" + + creator_plugin = None + for Creator in api.discover(api.Creator): + if Creator.__name__ != Create_name: + continue + + creator_plugin = Creator + + # return api.create() + creator_plugin(data["subset"], data["asset"]).process() - family = "render{}".format(os.environ["AVALON_TASK"].capitalize()) - api.create(data["subset"], data["asset"], family) node = nuke.toNode(data["subset"]) node.setXYpos(data["xpos"], data["ypos"]) node.setInput(0, data["input"]) node["publish"].setValue(data["publish"]) node["render"].setValue(data["render"]) node["render_farm"].setValue(data["render_farm"]) + node["review"].setValue(data["review"]) class ValidateWriteLegacy(pyblish.api.InstancePlugin): diff --git a/pype/plugins/nuke/publish/validate_write_nodes.py b/pype/plugins/nuke/publish/validate_write_nodes.py index 564c912a7a..836cee6c8f 100644 --- a/pype/plugins/nuke/publish/validate_write_nodes.py +++ b/pype/plugins/nuke/publish/validate_write_nodes.py @@ -26,7 +26,7 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder optional = True - families = ["write"] + families = ["render"] label = "Write Node" actions = [RepairNukeWriteNodeAction] hosts = ["nuke"] diff --git a/pype/plugins/nukestudio/publish/collect_shots.py b/pype/plugins/nukestudio/publish/collect_shots.py index c1fcf05b89..455e25bf82 100644 --- a/pype/plugins/nukestudio/publish/collect_shots.py +++ b/pype/plugins/nukestudio/publish/collect_shots.py @@ -39,8 +39,13 @@ class CollectShots(api.InstancePlugin): data["name"] = data["subset"] + "_" + data["asset"] - data["label"] = data["asset"] + " - " + data["subset"] + " - tasks: {} - assetbuilds: {}".format( - data["tasks"], [x["name"] for x in data.get("assetbuilds", [])] + data["label"] = ( + "{} - {} - tasks:{} - assetbuilds:{}".format( + data["asset"], + data["subset"], + data["tasks"], + [x["name"] for x in data.get("assetbuilds", [])] + ) ) # Create instance. diff --git a/pype/plugins/nukestudio/publish/collect_tag_comments.py b/pype/plugins/nukestudio/publish/collect_tag_comments.py new file mode 100644 index 0000000000..1ec98e3d3b --- /dev/null +++ b/pype/plugins/nukestudio/publish/collect_tag_comments.py @@ -0,0 +1,35 @@ +from pyblish import api + + +class CollectClipTagComments(api.InstancePlugin): + """Collect comments from tags on selected track items and their sources.""" + + order = api.CollectorOrder + 0.013 + label = "Collect Comments" + hosts = ["nukestudio"] + families = ["clip"] + + def process(self, instance): + # Collect comments. + instance.data["comments"] = [] + + # Exclude non-tagged instances. + for tag in instance.data["tags"]: + if tag["name"].lower() == "comment": + instance.data["comments"].append( + tag.metadata().dict()["tag.note"] + ) + + # Find tags on the source clip. + tags = instance.data["item"].source().tags() + for tag in tags: + if tag.name().lower() == "comment": + instance.data["comments"].append( + tag.metadata().dict()["tag.note"] + ) + + # Update label with comments counter. + instance.data["label"] = "{} - comments:{}".format( + instance.data["label"], + len(instance.data["comments"]) + ) diff --git a/pype/plugins/premiere/publish/integrate_hierarchy_ftrack.py b/pype/plugins/premiere/publish/integrate_hierarchy_ftrack.py index d6d03e9722..c32df636e1 100644 --- a/pype/plugins/premiere/publish/integrate_hierarchy_ftrack.py +++ b/pype/plugins/premiere/publish/integrate_hierarchy_ftrack.py @@ -1,4 +1,6 @@ +import sys import pyblish.api +import six class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): @@ -91,7 +93,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): for instance in instances: instance.data['ftrackShotId'] = entity['id'] - self.session.commit() + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + six.reraise(tp, value, tb) # TASKS tasks = entity_data.get('tasks', []) @@ -114,7 +121,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): task_type=ftrack_types[task], parent=entity ) - self.session.commit() if 'childs' in entity_data: self.import_to_ftrack( @@ -141,7 +147,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): self.log.info(self.task_types) task['type'] = self.task_types[task_type] - self.session.commit() + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + six.reraise(tp, value, tb) return task @@ -150,6 +161,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): 'name': name, 'parent': parent }) - self.session.commit() + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + six.reraise(tp, value, tb) return entity diff --git a/pype/plugins/premiere/publish/validate_auto_sync_off.py b/pype/plugins/premiere/publish/validate_auto_sync_off.py index 3b46a682ab..b6429cfa05 100644 --- a/pype/plugins/premiere/publish/validate_auto_sync_off.py +++ b/pype/plugins/premiere/publish/validate_auto_sync_off.py @@ -1,6 +1,8 @@ +import sys import pyblish.api import pype.api import avalon.api +import six class ValidateAutoSyncOff(pyblish.api.ContextPlugin): @@ -48,4 +50,9 @@ class ValidateAutoSyncOff(pyblish.api.ContextPlugin): session = context.data["ftrackSession"] invalid = cls.get_invalid(context) invalid['custom_attributes']['avalon_auto_sync'] = False - session.commit() + try: + session.commit() + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + six.reraise(tp, value, tb) diff --git a/pype/plugins/standalonepublisher/publish/collect_context.py b/pype/plugins/standalonepublisher/publish/collect_context.py index 43e2350be4..327b99f432 100644 --- a/pype/plugins/standalonepublisher/publish/collect_context.py +++ b/pype/plugins/standalonepublisher/publish/collect_context.py @@ -45,66 +45,71 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin): with open(input_json_path, "r") as f: in_data = json.load(f) - asset_name = in_data['asset'] - family_preset_key = in_data.get('family_preset_key', '') - family = in_data['family'] - subset = in_data['subset'] + asset_name = in_data["asset"] + family_preset_key = in_data.get("family_preset_key", "") + family = in_data["family"] + subset = in_data["subset"] # Load presets presets = context.data.get("presets") if not presets: from pypeapp import config + presets = config.get_presets() # Get from presets anatomy key that will be used for getting template # - default integrate new is used if not set - anatomy_key = presets.get( - "standalone_publish", {}).get( - "families", {}).get( - family_preset_key, {}).get( - "anatomy_template" + anatomy_key = ( + presets.get("standalone_publish", {}) + .get("families", {}) + .get(family_preset_key, {}) + .get("anatomy_template") ) - project = io.find_one({'type': 'project'}) - asset = io.find_one({ - 'type': 'asset', - 'name': asset_name - }) - context.data['project'] = project - context.data['asset'] = asset + project = io.find_one({"type": "project"}) + asset = io.find_one({"type": "asset", "name": asset_name}) + context.data["project"] = project + context.data["asset"] = asset instance = context.create_instance(subset) - instance.data.update({ - "subset": subset, - "asset": asset_name, - "label": subset, - "name": subset, - "family": family, - "frameStart": in_data.get("representations", [None])[0].get("frameStart", None), - "frameEnd": in_data.get("representations", [None])[0].get("frameEnd", None), - "families": [family, 'ftrack'], - }) + instance.data.update( + { + "subset": subset, + "asset": asset_name, + "label": subset, + "name": subset, + "family": family, + "version": in_data.get("version", 1), + "frameStart": in_data.get("representations", [None])[0].get( + "frameStart", None + ), + "frameEnd": in_data.get("representations", [None])[0].get( + "frameEnd", None + ), + "families": [family, "ftrack"], + } + ) self.log.info("collected instance: {}".format(instance.data)) self.log.info("parsing data: {}".format(in_data)) - instance.data['destination_list'] = list() - instance.data['representations'] = list() - instance.data['source'] = 'standalone publisher' + instance.data["destination_list"] = list() + instance.data["representations"] = list() + instance.data["source"] = "standalone publisher" - for component in in_data['representations']: + for component in in_data["representations"]: - component['destination'] = component['files'] - component['stagingDir'] = component['stagingDir'] + component["destination"] = component["files"] + component["stagingDir"] = component["stagingDir"] # Do not set anatomy_template if not specified if anatomy_key: - component['anatomy_template'] = anatomy_key - if isinstance(component['files'], list): - collections, remainder = clique.assemble(component['files']) + component["anatomy_template"] = anatomy_key + if isinstance(component["files"], list): + collections, remainder = clique.assemble(component["files"]) self.log.debug("collecting sequence: {}".format(collections)) instance.data["frameStart"] = int(component["frameStart"]) instance.data["frameEnd"] = int(component["frameEnd"]) - instance.data['fps'] = int(component['fps']) + instance.data["fps"] = int(component["fps"]) if component["preview"]: instance.data["families"].append("review") diff --git a/pype/plugins/standalonepublisher/publish/collect_matchmove.py b/pype/plugins/standalonepublisher/publish/collect_matchmove.py new file mode 100644 index 0000000000..b46efc1cf3 --- /dev/null +++ b/pype/plugins/standalonepublisher/publish/collect_matchmove.py @@ -0,0 +1,29 @@ +""" +Requires: + Nothing + +Provides: + Instance +""" + +import pyblish.api +import logging + + +log = logging.getLogger("collector") + + +class CollectMatchmovePublish(pyblish.api.InstancePlugin): + """ + Collector with only one reason for its existence - remove 'ftrack' + family implicitly added by Standalone Publisher + """ + + label = "Collect Matchmove - SA Publish" + order = pyblish.api.CollectorOrder + family = ["matchmove"] + hosts = ["standalonepublisher"] + + def process(self, instance): + if "ftrack" in instance.data["families"]: + instance.data["families"].remove("ftrack") diff --git a/pype/plugins/standalonepublisher/publish/extract_review.py b/pype/plugins/standalonepublisher/publish/extract_review.py index fbc14785a4..f06d9bcde0 100644 --- a/pype/plugins/standalonepublisher/publish/extract_review.py +++ b/pype/plugins/standalonepublisher/publish/extract_review.py @@ -2,7 +2,7 @@ import os import tempfile import pyblish.api -from pype.vendor import clique +import clique import pype.api diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index ad2e59fc96..01dc76aacf 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -91,7 +91,9 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): text = today.strftime(date_format) self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) - def add_frame_numbers(self, align, options=None, start_frame=None): + def add_frame_numbers( + self, align, options=None, start_frame=None, text=None + ): """ Convenience method to create the frame number expression. @@ -103,7 +105,12 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if start_frame: options['frame_offset'] = start_frame - options['expression'] = r'%%{eif\:n+%d\:d}' % options['frame_offset'] + expr = r'%%{eif\:n+%d\:d}' % options['frame_offset'] + if text and isinstance(text, str): + text = r"{}".format(text) + expr = text.replace("{current_frame}", expr) + + options['expression'] = expr text = str(int(self.end_frame + options['frame_offset'])) self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) @@ -121,7 +128,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): timecode = ffmpeg_burnins._frames_to_timecode( options['frame_offset'], - self.frame_rate + self.frame_rate ) options = options.copy() if not options.get('fps'): @@ -213,13 +220,15 @@ def example(input_path, output_path): burnin.render(output_path, overwrite=True) -def burnins_from_data(input_path, output_path, data, overwrite=True): +def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True): ''' This method adds burnins to video/image file based on presets setting. Extension of output MUST be same as input. (mov -> mov, avi -> avi,...) :param input_path: full path to input file where burnins should be add :type input_path: str + :param codec_data: all codec related arguments in list + :param codec_data: list :param output_path: full path to output file where output will be rendered :type output_path: str :param data: data required for burnin settings (more info below) @@ -284,8 +293,8 @@ def burnins_from_data(input_path, output_path, data, overwrite=True): burnin = ModifiedBurnins(input_path, options_init=options_init) - start_frame = data.get("start_frame") - start_frame_tc = data.get('start_frame_tc', start_frame) + frame_start = data.get("frame_start") + frame_start_tc = data.get('frame_start_tc', frame_start) for align_text, preset in presets.get('burnins', {}).items(): align = None if align_text == 'TOP_LEFT': @@ -311,7 +320,7 @@ def burnins_from_data(input_path, output_path, data, overwrite=True): if ( bi_func in ['frame_numbers', 'timecode'] and - start_frame is None + frame_start is None ): log.error( 'start_frame is not set in entered data!' @@ -320,9 +329,26 @@ def burnins_from_data(input_path, output_path, data, overwrite=True): return if bi_func == 'frame_numbers': - burnin.add_frame_numbers(align, start_frame=start_frame) + current_frame_identifier = "{current_frame}" + text = preset.get('text') or current_frame_identifier + + if current_frame_identifier not in text: + log.warning(( + 'Text for Frame numbers don\'t have ' + '`{current_frame}` key in text!' + )) + + text_items = [] + split_items = text.split(current_frame_identifier) + for item in split_items: + text_items.append(item.format(**data)) + + text = "{current_frame}".join(text_items) + + burnin.add_frame_numbers(align, start_frame=frame_start, text=text) + elif bi_func == 'timecode': - burnin.add_timecode(align, start_frame=start_frame_tc) + burnin.add_timecode(align, start_frame=frame_start_tc) elif bi_func == 'text': if not preset.get('text'): log.error('Text is not set for text function burnin!') @@ -339,11 +365,20 @@ def burnins_from_data(input_path, output_path, data, overwrite=True): ) return - burnin.render(output_path, overwrite=overwrite) + codec_args = '' + if codec_data is not []: + codec_args = " ".join(codec_data) + + burnin.render(output_path, args=codec_args, overwrite=overwrite) if __name__ == '__main__': import sys import json data = json.loads(sys.argv[-1]) - burnins_from_data(data['input'], data['output'], data['burnin_data']) + burnins_from_data( + data['input'], + data['codec'], + data['output'], + data['burnin_data'] + ) diff --git a/pype/scripts/publish_filesequence.py b/pype/scripts/publish_filesequence.py index 25ed4135c3..7ad7318831 100644 --- a/pype/scripts/publish_filesequence.py +++ b/pype/scripts/publish_filesequence.py @@ -4,6 +4,7 @@ import os import logging import subprocess import platform +from shutil import which handler = logging.basicConfig() log = logging.getLogger("Publish Image Sequences") @@ -35,22 +36,32 @@ def __main__(): auto_pype_root = os.path.abspath(auto_pype_root + "../../../../..") auto_pype_root = os.environ.get('PYPE_ROOT') or auto_pype_root - if kwargs.pype: - pype_root = kwargs.pype - else: - # if pype argument not specified, lets assume it is set in PATH - pype_root = "" - - print("Set pype root to: {}".format(pype_root)) - print("Paths: {}".format(kwargs.paths or [os.getcwd()])) - - paths = kwargs.paths or [os.getcwd()] pype_command = "pype.ps1" if platform.system().lower() == "linux": pype_command = "pype" elif platform.system().lower() == "windows": pype_command = "pype.bat" + if kwargs.pype: + pype_root = kwargs.pype + else: + # test if pype.bat / pype is in the PATH + # if it is, which() will return its path and we use that. + # if not, we use auto_pype_root path. Caveat of that one is + # that it can be UNC path and that will not work on windows. + + pype_path = which(pype_command) + + if pype_path: + pype_root = os.path.dirname(pype_path) + else: + pype_root = auto_pype_root + + print("Set pype root to: {}".format(pype_root)) + print("Paths: {}".format(kwargs.paths or [os.getcwd()])) + + paths = kwargs.paths or [os.getcwd()] + args = [ os.path.join(pype_root, pype_command), "publish", @@ -60,9 +71,11 @@ def __main__(): print("Pype command: {}".format(" ".join(args))) # Forcing forwaring the environment because environment inheritance does # not always work. - exit_code = subprocess.call(args, env=os.environ) + # Cast all values in environment to str to be safe + env = {k: str(v) for k, v in os.environ.items()} + exit_code = subprocess.call(args, env=env) if exit_code != 0: - raise ValueError("Publishing failed.") + raise RuntimeError("Publishing failed.") if __name__ == '__main__': diff --git a/pype/services/idle_manager/idle_manager.py b/pype/services/idle_manager/idle_manager.py index fd07eeec14..0897245049 100644 --- a/pype/services/idle_manager/idle_manager.py +++ b/pype/services/idle_manager/idle_manager.py @@ -1,7 +1,7 @@ import time import collections -from Qt import QtCore, QtGui, QtWidgets -from pype.vendor.pynput import mouse, keyboard +from Qt import QtCore +from pynput import mouse, keyboard from pypeapp import Logger @@ -29,6 +29,13 @@ class IdleManager(QtCore.QThread): def tray_start(self): self.start() + def tray_exit(self): + self.stop() + try: + self.time_signals = {} + except Exception: + pass + def add_time_signal(self, emit_time, signal): """ If any module want to use IdleManager, need to use add_time_signal :param emit_time: time when signal will be emitted diff --git a/pype/services/rest_api/__init__.py b/pype/services/rest_api/__init__.py index c11ecfd761..fbeec00c88 100644 --- a/pype/services/rest_api/__init__.py +++ b/pype/services/rest_api/__init__.py @@ -1,4 +1,6 @@ from .rest_api import RestApiServer +from .base_class import RestApi, abort, route, register_statics +from .lib import RestMethods, CallbackResult def tray_init(tray_widget, main_widget): diff --git a/pype/services/rest_api/base_class.py b/pype/services/rest_api/base_class.py new file mode 100644 index 0000000000..6f06e01fcf --- /dev/null +++ b/pype/services/rest_api/base_class.py @@ -0,0 +1,124 @@ +from http import HTTPStatus + +from .lib import ( + RestApiFactory, Splitter, + ObjAlreadyExist, AbortException, +) + + +def route(path, url_prefix="", methods=[], strict_match=False): + """Decorator that register callback and all its attributes. + Callback is registered to Singleton RestApiFactory. + + :param path: Specify url path when callback should be triggered. + :type path: str + :param url_prefix: Specify prefix of path, defaults to "/". + :type url_prefix: str, list, optional + :param methods: Specify request method (GET, POST, PUT, etc.) when + callback will be triggered, defaults to ["GET"] + :type methods: list, str, optional + :param strict_match: Decides if callback can handle both single and + multiple entities (~/projects/ && ~/projects/), + defaults to False. + :type strict_match: bool + + `path` may include dynamic keys that will be stored to object which can + be obtained in callback. + Example: + - registered path: "/projects/" + - url request path: "/projects/S001_test_project" + In this case will be callback triggered and in accessible data will be + stored {"project_name": "S001_test_project"}. + + `url_prefix` is optional but it is better to specify for easier filtering + of requests. + Example: + - url_prefix: `"/avalon"` or `["avalon"]` + - path: `"/projects"` + In this case request path must be "/avalon/projects" to trigger registered + callback. + """ + + def decorator(callback): + RestApiFactory.register_route( + path, callback, url_prefix, methods, strict_match + ) + callback.restapi = True + return callback + return decorator + + +def register_statics(url_prefix, dir_path): + """Decorator that register callback and all its attributes. + Callback is registered to Singleton RestApiFactory. + + :param url_prefix: Specify prefix of path, defaults to "/". + (Example: "/resources") + :type url_prefix: str + :param dir_path: Path to file folder where statics are located. + :type dir_path: str + """ + + RestApiFactory.register_statics((url_prefix, dir_path)) + + +def abort(status_code=HTTPStatus.NOT_FOUND, message=None): + """Should be used to stop registered callback. + `abort` raise AbortException that is handled with request Handler which + returns entered status and may send optional message in body. + + :param status_code: Status that will be send in reply of request, + defaults to 404 + :type status_code: int + :param message: Message to send in body, default messages are based on + statuc_code in Handler, defaults to None + :type message: str, optional + ... + :raises AbortException: This exception is handled in Handler to know + about launched `abort` + """ + + items = [] + items.append(str(status_code)) + if not message: + message = "" + + items.append(message) + + raise AbortException(Splitter.join(items)) + + +class RestApi: + """Base class for RestApi classes. + + Use this class is required when it is necessary to have class for handling + requests and want to use decorators for registering callbacks. + + It is possible to use decorators in another class only when object, + of class where decorators are, is registered to RestApiFactory. + """ + + def route(path, url_prefix="", methods=[], strict_match=False): + return route(path, url_prefix, methods, strict_match) + + @classmethod + def register_route( + cls, callback, path, url_prefix="", methods=[], strict_match=False + ): + return route(path, methods, url_prefix, strict_match)(callback) + + @classmethod + def register_statics(cls, url_prefix, dir_path): + return register_statics(url_prefix, dir_path) + + @classmethod + def abort(cls, status_code=HTTPStatus.NOT_FOUND, message=None): + abort(status_code, message) + + def __new__(cls, *args, **kwargs): + for obj in RestApiFactory.registered_objs: + if type(obj) == cls: + raise ObjAlreadyExist(cls) + instance = super(RestApi, cls).__new__(cls) + RestApiFactory.register_obj(instance) + return instance diff --git a/pype/services/rest_api/lib/__init__.py b/pype/services/rest_api/lib/__init__.py new file mode 100644 index 0000000000..1c521de712 --- /dev/null +++ b/pype/services/rest_api/lib/__init__.py @@ -0,0 +1,7 @@ +from .exceptions import ObjAlreadyExist, AbortException +from .lib import RestMethods, CallbackResult, RequestInfo, Splitter +from .factory import _RestApiFactory + +RestApiFactory = _RestApiFactory() + +from .handler import Handler diff --git a/pype/services/rest_api/lib/exceptions.py b/pype/services/rest_api/lib/exceptions.py new file mode 100644 index 0000000000..b17e1ce012 --- /dev/null +++ b/pype/services/rest_api/lib/exceptions.py @@ -0,0 +1,12 @@ +class ObjAlreadyExist(Exception): + """Is used when is created multiple objects of same RestApi class.""" + def __init__(self, cls=None, message=None): + if not (cls and message): + message = "RestApi object was created twice." + elif not message: + message = "{} object was created twice.".format(cls.__name__) + super().__init__(message) + + +class AbortException(Exception): + pass diff --git a/pype/services/rest_api/lib/factory.py b/pype/services/rest_api/lib/factory.py new file mode 100644 index 0000000000..2b94d498ff --- /dev/null +++ b/pype/services/rest_api/lib/factory.py @@ -0,0 +1,360 @@ +import os +import re +import inspect +import collections +from .lib import RestMethods + +from pypeapp import Logger + +log = Logger().get_logger("RestApiFactory") + + +def prepare_fullpath(path, prefix): + """Concatenate registered path and prefix with right form. + + :param path: Registered url path for registered callback. + :type path: str, list + :param prefix: Registered and prepared url prefix. + :type prefix: str, None + :return: concatenated prefix and path in right form + :rtype: str + """ + + if isinstance(path, (list, tuple)): + path_items = path + else: + path_items = [part for part in path.split("/") if part] + + fullpath = "/" + if path and prefix: + items = [part for part in prefix.split("/") if part] + items.extend(path_items) + fullpath = "/".join(items) + if path.endswith("/"): + fullpath += "/" + + elif path: + fullpath = "/".join(path_items) + if path.endswith("/"): + fullpath += "/" + + elif prefix: + fullpath = prefix + + if not fullpath.startswith("/"): + fullpath = "/{}".format(fullpath) + + return fullpath + + +def prepare_regex_from_path(full_path, strict_match): + """Prepare regex based on set path. + + When registered path do not contain dynamic keys regex is not set. + Dynamic keys are specified with "<" and ">" ("<{dynamic key}>"). + + :param full_path: Full url path (prefix + path) for registered callback. + :type full_path: str, list, None + :return: regex and keys of all groups in regex + :rtype: tuple(SRE_Pattern, list), tuple(None, None) + """ + get_indexes_regex = "<[^< >]+>" + all_founded_keys = re.findall(get_indexes_regex, full_path) + if not all_founded_keys: + return None, None + + regex_path = full_path + keys = [] + for key in all_founded_keys: + replacement = "(?P{}\w+)".format(key) + keys.append(key.replace("<", "").replace(">", "")) + if not strict_match: + if full_path.endswith(key): + replacement = "?{}?".format(replacement) + regex_path = regex_path.replace(key, replacement) + + regex_path = "^{}$".format(regex_path) + + return re.compile(regex_path), keys + + +def prepare_prefix(url_prefix): + """Check if the url_prefix is set and is in correct form. + + Output is None when prefix is empty or "/". + + :param url_prefix: Registered prefix of registered callback. + :type url_prefix: str, list, None + :return: Url prefix of registered callback + :rtype: str, None + """ + if url_prefix is None or url_prefix.strip() == "/": + return None + elif isinstance(url_prefix, (list, tuple)): + url_prefix = "/".join(url_prefix) + else: + items = [part for part in url_prefix.split("/") if part] + url_prefix = "/".join(items) + + if not url_prefix: + return None + + while url_prefix.endswith("/"): + url_prefix = url_prefix[:-1] + + if not url_prefix.startswith("/"): + url_prefix = "/{}".format(url_prefix) + + return url_prefix + + +def prepare_methods(methods, callback=None): + """Check and convert entered methods. + + String `methods` is converted to list. All values are converted to + `RestMethods` enum object. Invalid methods are ignored and printed out. + + :param methods: Contain rest api methods, when callback is called. + :type methods: str, list + :param callback: Registered callback, helps to identify where is + invalid method. + :type callback: function, method, optional + :return: Valid methods + :rtype: list + """ + invalid_methods = collections.defaultdict(list) + + if not methods: + _methods = [RestMethods.GET] + elif isinstance(methods, str) or isinstance(methods, RestMethods): + _method = RestMethods.get(methods) + _methods = [] + if _method is None: + invalid_methods[methods].append(callback) + else: + _methods.append(_method) + + else: + _methods = [] + for method in methods: + found = False + _method = RestMethods.get(method) + if _method is None: + invalid_methods[methods].append(callback) + continue + + _methods.append(_method) + + for method, callbacks in invalid_methods.items(): + callback_info = "" + + callbacks = [cbk for cbk in callbacks if cbk] + if len(callbacks) > 0: + multiple_ind = "" + if len(callbacks) > 1: + multiple_ind = "s" + + callback_items = [] + for callback in callbacks: + callback_items.append("\"{}<{}>\"".format( + callback.__qualname__, callback.__globals__["__file__"] + )) + + callback_info = " with callback{} {}".format( + multiple_ind, "| ".join(callback_items) + ) + + log.warning( + ("Invalid RestApi method \"{}\"{}").format(method, callback_info) + ) + + return _methods + + +def prepare_callback_info(callback): + """Prepare data for callback handling when should be triggered.""" + callback_info = inspect.getfullargspec(callback) + + callback_args = callback_info.args + callback_args_len = 0 + if callback_args: + callback_args_len = len(callback_args) + if type(callback).__name__ == "method": + callback_args_len -= 1 + + defaults = callback_info.defaults + defaults_len = 0 + if defaults: + defaults_len = len(defaults) + + annotations = callback_info.annotations + + return { + "args": callback_args, + "args_len": callback_args_len, + "defaults": defaults, + "defaults_len": defaults_len, + "hasargs": callback_info.varargs is not None, + "haskwargs": callback_info.varkw is not None, + "annotations": annotations + } + + +class _RestApiFactory: + """Factory is used to store and prepare callbacks for requests. + + Should be created only one object used for all registered callbacks when + it is expected to run only one http server. + """ + registered_objs = [] + unprocessed_routes = [] + unprocessed_statics = [] + + prepared_routes = { + method: collections.defaultdict(list) for method in RestMethods + } + prepared_statics = {} + + has_routes = False + + def has_handlers(self): + return (self.has_routes or self.prepared_statics) + + def _process_route(self, route): + return self.unprocessed_routes.pop( + self.unprocessed_routes.index(route) + ) + + def _process_statics(self, item): + return self.unprocessed_statics.pop( + self.unprocessed_statics.index(item) + ) + + def register_route( + self, path, callback, url_prefix, methods, strict_match + ): + log.debug("Registering callback for item \"{}\"".format( + callback.__qualname__ + )) + route = { + "path": path, + "callback": callback, + "url_prefix": url_prefix, + "methods": methods, + "strict_match": strict_match + } + self.unprocessed_routes.append(route) + + def register_obj(self, obj): + """Register object for decorated methods in class definition.""" + self.registered_objs.append(obj) + + def register_statics(self, item): + log.debug("Registering statics path \"{}\"".format(item)) + self.unprocessed_statics.append(item) + + def _prepare_route(self, route): + """Prepare data of registered callbacks for routes. + + Registration info are prepared to easy filter during handling + of requests. + + :param route: Contain all necessary info for filtering and + handling callback for registered route. + :type route: dict + """ + callback = route["callback"] + methods = prepare_methods(route["methods"], callback) + url_prefix = prepare_prefix(route["url_prefix"]) + fullpath = prepare_fullpath(route["path"], url_prefix) + regex, regex_keys = prepare_regex_from_path( + fullpath, route["strict_match"] + ) + callback_info = prepare_callback_info(callback) + + for method in methods: + self.has_routes = True + self.prepared_routes[method][url_prefix].append({ + "regex": regex, + "regex_keys": regex_keys, + "fullpath": fullpath, + "callback": callback, + "callback_info": callback_info + }) + + def prepare_registered(self): + """Iter through all registered callbacks and statics to prepare them. + + First are processed callbacks registered with decorators in classes by + registered objects. Remaining callbacks are filtered, it is checked if + methods has `__self__` or are defined in (it is expeted they + do not requise access to object) + """ + for url_prefix, dir_path in self.unprocessed_statics: + self._process_statics((url_prefix, dir_path)) + dir_path = os.path.normpath(dir_path) + if not os.path.exists(dir_path): + log.warning( + "Directory path \"{}\" was not found".format(dir_path) + ) + continue + url_prefix = prepare_prefix(url_prefix) + self.prepared_statics[url_prefix] = dir_path + + for obj in self.registered_objs: + method_names = [ + attr for attr in dir(obj) + if inspect.ismethod(getattr(obj, attr)) + ] + for method_name in method_names: + method = obj.__getattribute__(method_name) + if not hasattr(method, "restapi"): + continue + + if not method.restapi: + continue + + for route in self.unprocessed_routes: + callback = route["callback"] + if not ( + callback.__qualname__ == method.__qualname__ and + callback.__module__ == method.__module__ and + callback.__globals__["__file__"] == ( + method.__globals__["__file__"] + ) + ): + continue + + route["callback"] = method + self._process_route(route) + self._prepare_route(route) + break + + for route in self.unprocessed_routes: + callback = route["callback"] + is_class_method = len(callback.__qualname__.split(".")) != 1 + if is_class_method: + missing_self = True + if hasattr(callback, "__self__"): + if callback.__self__ is not None: + missing_self = False + + if "" in callback.__qualname__: + pass + + elif missing_self: + log.warning(( + "Object of callback \"{}\" from \"{}\" is not" + " accessible for api. Register object or" + " register callback with already created object" + "(not with decorator in class).".format( + callback.__qualname__, + callback.__globals__["__file__"] + ) + )) + continue + + self._prepare_route(route) + continue + + self._prepare_route(route) diff --git a/pype/services/rest_api/lib/handler.py b/pype/services/rest_api/lib/handler.py new file mode 100644 index 0000000000..732061c005 --- /dev/null +++ b/pype/services/rest_api/lib/handler.py @@ -0,0 +1,343 @@ +import os +import re +import json +import datetime +import traceback +import http.server +from http import HTTPStatus +from urllib.parse import urlparse + +from .lib import RestMethods, CallbackResult, RequestInfo +from .exceptions import AbortException +from . import RestApiFactory, Splitter + +from pypeapp import Logger + +log = Logger().get_logger("RestApiHandler") + + +class Handler(http.server.SimpleHTTPRequestHandler): + # TODO fill will necessary statuses + default_messages = { + HTTPStatus.BAD_REQUEST: "Bad request", + HTTPStatus.NOT_FOUND: "Not found" + } + + statuses = { + "POST": { + "OK": 200, + "CREATED": 201 + }, + "PUT": { + "OK": 200, + "NO_CONTENT": 204 + } + } + + def do_GET(self): + return self._handle_request(RestMethods.GET) + + def do_POST(self): + return self._handle_request(RestMethods.POST) + + def do_PUT(self): + return self._handle_request(RestMethods.PUT) + + def do_DELETE(self): + return self._handle_request(RestMethods.DELETE) + + def do_PATCH(self): + return self._handle_request(RestMethods.PATCH) + + def _handle_request(self, rest_method): + """Handle request by registered callbacks and statics. + + Statics are only for GET method request. `_handle_statics` is called + when path is matching registered statics prefix. Callbacks are filtered + by method and their prefixes. When path is matching `_handle_callback` + is called. + + If any registered callback or statics match requested path 400 status + is responsed. And 500 when unexpected error happens. + """ + parsed_url = urlparse(self.path) + path = parsed_url.path + + if rest_method is RestMethods.GET: + for prefix, dirpath in RestApiFactory.prepared_statics.items(): + if not path.startswith(prefix): + continue + _path = path[len(prefix):] + return self._handle_statics(dirpath, _path) + + matching_item = None + found_prefix = None + url_prefixes = RestApiFactory.prepared_routes[rest_method] + for url_prefix, items in url_prefixes.items(): + if matching_item is not None: + break + + if url_prefix is not None: + if not path.startswith(url_prefix): + continue + + found_prefix = url_prefix + + for item in items: + regex = item["regex"] + item_full_path = item["fullpath"] + if regex is None: + if path == item_full_path: + item["url_data"] = None + matching_item = item + break + + else: + found = re.match(regex, path) + if found: + item["url_data"] = found.groupdict() + matching_item = item + break + + if not matching_item: + if found_prefix is not None: + _path = path.replace(found_prefix, "") + if _path: + request_str = " \"{}\"".format(_path) + else: + request_str = "" + + message = "Invalid path request{} for prefix \"{}\"".format( + request_str, found_prefix + ) + else: + message = "Invalid path request \"{}\"".format(self.path) + log.debug(message) + self.send_error(HTTPStatus.BAD_REQUEST, message) + + return + + try: + log.debug("Triggering callback for path \"{}\"".format(path)) + + result = self._handle_callback( + matching_item, parsed_url, rest_method + ) + + return self._handle_callback_result(result, rest_method) + + except AbortException as exc: + status_code, message = str(exc).split(Splitter) + status_code = int(status_code) + if not message: + message = self.default_messages.get( + status_code, "UnexpectedError" + ) + + self.send_response(status_code) + self.send_header("Content-type", "text/html") + self.send_header("Content-Length", len(message)) + self.end_headers() + + self.wfile.write(message.encode()) + return message + + except Exception as exc: + log_message = "Unexpected Exception was raised (this is bug!)" + log.error(log_message, exc_info=True) + items = [log_message] + items += traceback.extract_tb(exc.__traceback__).format() + message = "\n".join(items) + + self.send_response(HTTPStatus.INTERNAL_SERVER_ERROR) + self.send_header("Content-type", "text/html") + self.send_header("Content-Length", len(message)) + self.end_headers() + + self.wfile.write(message.encode()) + return message + + def _handle_callback_result(self, result, rest_method): + """Send response to request based on result of callback. + :param result: Result returned by callback. + :type result: None, bool, dict, list, CallbackResult + :param rest_method: Rest api method (GET, POST, etc.). + :type rest_method: RestMethods + + Response is based on result type: + - None, True - It is expected everything was OK, status 200. + - False - It is expected callback was not successful, status 400 + - dict, list - Result is send under "data" key of body, status 200 + - CallbackResult - object specify status and data + """ + content_type = "application/json" + status = HTTPStatus.OK + success = True + message = None + data = None + + body = None + # TODO better handling of results + if isinstance(result, CallbackResult): + status = result.status_code + body_dict = {} + for key, value in result.items(): + if value is not None: + body_dict[key] = value + body = json.dumps(body_dict) + + elif result in [None, True]: + status = HTTPStatus.OK + success = True + message = "{} request for \"{}\" passed".format( + rest_method, self.path + ) + + elif result is False: + status = HTTPStatus.BAD_REQUEST + success = False + + elif isinstance(result, (dict, list)): + status = HTTPStatus.OK + data = result + + if status == HTTPStatus.NO_CONTENT: + self.send_response(status) + self.end_headers() + return + + if not body: + body_dict = {"success": success} + if message: + body_dict["message"] = message + + if not data: + data = {} + + body_dict["data"] = data + body = json.dumps(body_dict) + + self.send_response(status) + self.send_header("Content-type", content_type) + self.send_header("Content-Length", len(body)) + self.end_headers() + + self.wfile.write(body.encode()) + return body + + def _handle_callback(self, item, parsed_url, rest_method): + """Prepare data from request and trigger callback. + + Data are loaded from body of request if there are any. + + :param item: Item stored during callback registration with all info. + :type item: dict + :param parsed_url: Url parsed with urllib (separated path, query, etc). + :type parsed_url: ParseResult + :param rest_method: Rest api method (GET, POST, etc.). + :type rest_method: RestMethods + """ + regex_keys = item["regex_keys"] + + url_data = None + if regex_keys: + url_data = {key: None for key in regex_keys} + if item["url_data"]: + for key, value in item["url_data"].items(): + url_data[key] = value + + in_data = None + cont_len = self.headers.get("Content-Length") + if cont_len: + content_length = int(cont_len) + in_data_str = self.rfile.read(content_length) + if in_data_str: + in_data = json.loads(in_data_str) + + request_info = RequestInfo( + url_data=url_data, + request_data=in_data, + query=parsed_url.query, + fragment=parsed_url.fragment, + params=parsed_url.params, + method=rest_method, + handler=self + ) + + callback = item["callback"] + callback_info = item["callback_info"] + + _args_len = callback_info["args_len"] + _has_args = callback_info["hasargs"] + _has_kwargs = callback_info["haskwargs"] + + args = [] + kwargs = {} + if _args_len == 0: + if _has_args: + args.append(request_info) + elif _has_kwargs: + kwargs["request_info"] = request_info + else: + args.append(request_info) + + return callback(*args, **kwargs) + + def _handle_statics(self, dirpath, path): + """Return static file in response when file exist in destination.""" + path = os.path.normpath(dirpath + path) + + ctype = self.guess_type(path) + try: + file_obj = open(path, "rb") + except OSError: + self.send_error(HTTPStatus.NOT_FOUND, "File not found") + return None + + try: + file_stat = os.fstat(file_obj.fileno()) + # Use browser cache if possible + if ("If-Modified-Since" in self.headers + and "If-None-Match" not in self.headers): + # compare If-Modified-Since and time of last file modification + try: + ims = http.server.email.utils.parsedate_to_datetime( + self.headers["If-Modified-Since"]) + except (TypeError, IndexError, OverflowError, ValueError): + # ignore ill-formed values + pass + else: + if ims.tzinfo is None: + # obsolete format with no timezone, cf. + # https://tools.ietf.org/html/rfc7231#section-7.1.1.1 + ims = ims.replace(tzinfo=datetime.timezone.utc) + if ims.tzinfo is datetime.timezone.utc: + # compare to UTC datetime of last modification + last_modif = datetime.datetime.fromtimestamp( + file_stat.st_mtime, datetime.timezone.utc) + # remove microseconds, like in If-Modified-Since + last_modif = last_modif.replace(microsecond=0) + + if last_modif <= ims: + self.send_response(HTTPStatus.NOT_MODIFIED) + self.end_headers() + file_obj.close() + return None + + self.send_response(HTTPStatus.OK) + self.send_header("Content-type", ctype) + self.send_header("Content-Length", str(file_stat[6])) + self.send_header( + "Last-Modified", + self.date_time_string(file_stat.st_mtime) + ) + self.end_headers() + self.wfile.write(file_obj.read()) + return file_obj + except Exception: + log.error( + "Failed to read data from file \"{}\"".format(path), + exc_info=True + ) + finally: + file_obj.close() diff --git a/pype/services/rest_api/lib/lib.py b/pype/services/rest_api/lib/lib.py new file mode 100644 index 0000000000..f5986cce8c --- /dev/null +++ b/pype/services/rest_api/lib/lib.py @@ -0,0 +1,197 @@ +import enum +from http import HTTPStatus +from urllib.parse import urlencode, parse_qs + +Splitter = "__splitter__" + + +class RestMethods(enum.Enum): + GET = "GET" + POST = "POST" + PUT = "PUT" + PATCH = "PATCH" + DELETE = "DELETE" + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self.value == other.value + + elif isinstance(other, str): + return self.value.lower() == other.lower() + return self.value == other + + def __hash__(self): + return enum.Enum.__hash__(self) + + @classmethod + def get(cls, name, default=None): + for meth in cls: + if meth == name: + return meth + return default + + +class HandlerDict(dict): + def __init__(self, data=None, *args, **kwargs): + if not data: + data = {} + super().__init__(data, *args, **kwargs) + + def __repr__(self): + return "<{}> {}".format(self.__class__.__name__, str(dict(self))) + + +class Params(HandlerDict): + pass + + +class UrlData(HandlerDict): + pass + + +class RequestData(HandlerDict): + pass + + +class Query(HandlerDict): + """Class for url query convert to dict and string.""" + + def __init__(self, query): + if isinstance(query, dict): + pass + else: + query = parse_qs(query) + super().__init__(query) + + def get_string(self): + return urlencode(dict(self), doseq=True) + + +class Fragment(HandlerDict): + """Class for url fragment convert to dict and string.""" + + def __init__(self, fragment): + if isinstance(fragment, dict): + _fragment = fragment + else: + _fragment = {} + for frag in fragment.split("&"): + if not frag: + continue + items = frag.split("=") + + value = None + key = items[0] + if len(items) == 2: + value = items[1] + elif len(items) > 2: + value = "=".join(items[1:]) + + _fragment[key] = value + + super().__init__(_fragment) + + def get_string(self): + items = [] + for parts in dict(self).items(): + items.append( + "=".join([p for p in parts if p]) + ) + return "&".join(items) + + +class RequestInfo: + """Object that can be passed to callback as argument. + + Contain necessary data for handling request. + Object is created to single use and can be used similar to dict. + + :param url_data: Data collected from path when path with dynamic keys + is matching. + :type url_data: dict, None + :param request_data: Data of body from request. + :type request_data: dict, None + :param query: Query from url path of reques. + :type query: str, None + :param fragment: Fragment from url path of reques. + :type fragment: str, None + :param params: Parems from url path of reques. + :type params: str, None + :param method: Method of request (GET, POST, etc.) + :type method: RestMethods + :param handler: Handler handling request from http server. + :type handler: Handler + """ + def __init__( + self, url_data, request_data, query, fragment, params, method, handler + ): + self.url_data = UrlData(url_data) + self.request_data = RequestData(request_data) + self.query = Query(query) + self.fragment = Fragment(fragment) + self.params = Params(params) + self.method = method + self.handler = handler + + def __getitem__(self, key): + return self.__getattribute__(key) + + def __hash__(self): + return { + "url_data": self.url_data, + "request_data": self. request_data, + "query": self.query, + "fragment": self.fragment, + "params": self.params, + "method": self.method, + "handler": self.handler + } + + def items(self): + return dict(self).items() + + +class CallbackResult: + """Can be used as return value of callback. + + It is possible to specify status code, success boolean, message and data + for specify head and body of request response. `abort` should be rather + used when result is error. + + :param status_code: Status code of result. + :type status_code: int + :param success: Success is key in body, may be used for handling response. + :type success: bool + :param message: Similar to success, message is key in body and may + be used for handling response. + :type message: str, None + :param data: Data is also key for body in response. + :type data: dict, None + """ + _data = {} + + def __init__( + self, status_code=HTTPStatus.OK, success=True, message=None, data=None, + **kwargs + ): + self.status_code = status_code + self._data = { + "success": success, + "message": message, + "data": data + } + for k, v in kwargs.items(): + self._data[k] = v + + def __getitem__(self, key): + return self._data[key] + + def __iter__(self): + for key in self._data: + yield key + + def get(self, key, default=None): + return self._data.get(key, default) + + def items(self): + return self._data.items() diff --git a/pype/services/rest_api/rest_api.py b/pype/services/rest_api/rest_api.py index 22823a5586..21c4f7f51f 100644 --- a/pype/services/rest_api/rest_api.py +++ b/pype/services/rest_api/rest_api.py @@ -1,260 +1,124 @@ import os -import json -import enum -import collections -import threading -from inspect import signature import socket -import http.server -from http import HTTPStatus import socketserver - from Qt import QtCore +from .lib import RestApiFactory, Handler +from .base_class import route, register_statics from pypeapp import config, Logger log = Logger().get_logger("RestApiServer") -class RestMethods(enum.Enum): - GET = "GET" - POST = "POST" - PUT = "PUT" - PATCH = "PATCH" - DELETE = "DELETE" +class RestApiServer: + """Rest Api allows to access statics or callbacks with http requests. - def __repr__(self): - return str(self.value) + To register statics use `register_statics`. - def __eq__(self, other): - if isinstance(other, str): - return self.value == other - return self == other + To register callback use `register_callback` method or use `route` decorator. + `route` decorator should be used with not-class functions, it is possible + to use within class when inherits `RestApi` (defined in `base_class.py`) + or created object, with used decorator, is registered with `register_obj`. - def __hash__(self): - return enum.Enum.__hash__(self) + .. code-block:: python + @route("/username", url_prefix="/api", methods=["get"], strict_match=False) + def get_username(): + return {"username": getpass.getuser()} - def __str__(self): - return str(self.value) + In that case response to `localhost:{port}/api/username` will be status + `200` with body including `{"data": {"username": getpass.getuser()}}` + Callback may expect one argument which will be filled with request + info. Data object has attributes: `request_data`, `query`, + `fragment`, `params`, `method`, `handler`, `url_data`. + request_data - Data from request body if there are any. + query - query from url path (?identificator=name) + fragment - fragment from url path (#reset_credentials) + params - params from url path + method - request method (GET, POST, PUT, etc.) + handler - Handler object of HttpServer Handler + url_data - dynamic url keys from registered path with their values -class Handler(http.server.SimpleHTTPRequestHandler): + Dynamic url keys may be set with path argument. + .. code-block:: python + from rest_api import route - def do_GET(self): - self.process_request(RestMethods.GET) - - def do_POST(self): - """Common code for POST. - - This trigger callbacks on specific paths. - - If request contain data and callback func has arg data are sent to - callback too. - - Send back return values of callbacks. - """ - self.process_request(RestMethods.POST) - - def process_request(self, rest_method): - """Because processing is technically the same for now so it is used - the same way - """ - in_data = None - cont_len = self.headers.get("Content-Length") - if cont_len: - content_length = int(cont_len) - in_data_str = self.rfile.read(content_length) - if in_data_str: - in_data = json.loads(in_data_str) - - registered_callbacks = self.server.registered_callbacks[rest_method] - - path_items = [part.lower() for part in self.path.split("/") if part] - request_path = "/".join(path_items) - callback = registered_callbacks.get(request_path) - result = None - if callback: - log.debug( - "Triggering callbacks for path \"{}\"".format(request_path) - ) - try: - params = signature(callback).parameters - if len(params) > 0 and in_data: - result = callback(in_data) - else: - result = callback() - - self.send_response(HTTPStatus.OK) - - if result in [None, True] or isinstance(result, (str, int)): - message = str(result) - if result in [None, True]: - message = "{} request for \"{}\" passed".format( - rest_method, self.path - ) - - self.handle_result(rest_method, message=message) - - return - - if isinstance(result, (dict, list)): - message = json.dumps(result).encode() - self.handle_result(rest_method, final_output=message) - - return - - except Exception: - message = "{} request for \"{}\" failed".format( - rest_method, self.path - ) - log.error(message, exc_info=True) - - self.send_response(HTTPStatus.BAD_REQUEST) - self.handle_result(rest_method, message=message, success=False) - - return - - self.handle_result(rest_method) - - else: - message = ( - "{} request for \"{}\" don't have registered callback" - ).format(rest_method, self.path) - log.debug(message) - - self.send_response(HTTPStatus.NOT_FOUND) - self.handle_result(rest_method, message=message, success=False) - - def handle_result( - self, rest_method, final_output=None, message=None, success=True, - content_type="application/json" - ): - self.send_header("Content-type", content_type) - if final_output: - output = final_output - else: - if not message: - output = json.dumps({ - "success": False, - "message": ( - "{} request for \"{}\" has unexpected result" - ).format(rest_method, self.path) - }).encode() - - else: - output = json.dumps({ - "success": success, "message": message - }).encode() - - - if isinstance(output, str): - self.send_header("Content-Length", len(output)) - - self.end_headers() - - if output: - self.wfile.write(output) - - -class AdditionalArgsTCPServer(socketserver.TCPServer): - def __init__(self, registered_callbacks, *args, **kwargs): - self.registered_callbacks = registered_callbacks - super(AdditionalArgsTCPServer, self).__init__(*args, **kwargs) - - -class RestApiServer(QtCore.QThread): - """ Listener for REST requests. - - It is possible to register callbacks for url paths. - Be careful about crossreferencing to different QThreads it is not allowed. - """ - - def __init__(self): - super(RestApiServer, self).__init__() - self.registered_callbacks = { - RestMethods.GET: collections.defaultdict(list), - RestMethods.POST: collections.defaultdict(list), - RestMethods.PUT: collections.defaultdict(list), - RestMethods.PATCH: collections.defaultdict(list), - RestMethods.DELETE: collections.defaultdict(list) + all_projects = { + "Proj1": {"proj_data": []}, + "Proj2": {"proj_data": []}, } + @route("/projects/", url_prefix="/api", methods=["get"], strict_match=False) + def get_projects(request_info): + project_name = request_info.url_data["project_name"] + if not project_name: + return all_projects + return all_projects.get(project_name) + + This example should end with status 404 if project is not found. In that + case is best to use `abort` method. + + .. code-block:: python + from rest_api import abort + + @route("/projects/", url_prefix="/api", methods=["get"], strict_match=False) + def get_projects(request_info): + project_name = request_info.url_data["project_name"] + if not project_name: + return all_projects + + project = all_projects.get(project_name) + if not project: + abort(404, "Project \"{}\".format(project_name) was not found") + return project + + `strict_match` allows to handle not only specific entity but all entity types. + E.g. "/projects/" with set `strict_match` to False will handle also + "/projects" or "/projects/" path. It is necessary to set `strict_match` to + True when should handle only single entity. + + Callback may return many types. For more information read docstring of + `_handle_callback_result` defined in handler. + """ + def __init__(self): self.qaction = None self.failed_icon = None self._is_running = False + try: - self.presets = config.get_presets().get( - "services", {}).get( - "rest_api", {} - ) + self.presets = config.get_presets()["services"]["rest_api"] except Exception: self.presets = {"default_port": 8011, "exclude_ports": []} + log.debug(( + "There are not set presets for RestApiModule." + " Using defaults \"{}\"" + ).format(str(self.presets))) - self.port = self.find_port() + port = self.find_port() + self.rest_api_thread = RestApiThread(self, port) + + statics_dir = os.path.sep.join([os.environ["PYPE_MODULE_ROOT"], "res"]) + self.register_statics("/res", statics_dir) + os.environ["PYPE_STATICS_SERVER"] = "{}/res".format( + os.environ["PYPE_REST_API_URL"] + ) def set_qaction(self, qaction, failed_icon): self.qaction = qaction self.failed_icon = failed_icon - def register_callback(self, path, callback, rest_method=RestMethods.POST): - if isinstance(path, (list, set)): - path = "/".join([part.lower() for part in path]) - elif isinstance(path, str): - path = "/".join( - [part.lower() for part in str(path).split("/") if part] - ) - - if isinstance(rest_method, str): - rest_method = str(rest_method).upper() - - if path in self.registered_callbacks[rest_method]: - log.error( - "Path \"{}\" has already registered callback.".format(path) - ) - return False - - log.debug( - "Registering callback for path \"{}\"".format(path) + def register_callback( + self, path, callback, url_prefix="", methods=[], strict_match=False + ): + RestApiFactory.register_route( + path, callback, url_prefix, methods, strict_match ) - self.registered_callbacks[rest_method][path] = callback - return True + def register_statics(self, url_prefix, dir_path): + register_statics(url_prefix, dir_path) - def tray_start(self): - self.start() - - @property - def is_running(self): - return self._is_running - - def stop(self): - self._is_running = False - - def run(self): - self._is_running = True - if not self.registered_callbacks: - log.info("Any registered callbacks for Rest Api server.") - return - - try: - log.debug( - "Running Rest Api server on URL:" - " \"http://localhost:{}\"".format(self.port) - ) - with AdditionalArgsTCPServer( - self.registered_callbacks, - ("", self.port), - Handler - ) as httpd: - while self._is_running: - httpd.handle_request() - except Exception: - log.warning( - "Rest Api Server service has failed", exc_info=True - ) - self._is_running = False - if self.qaction and self.failed_icon: - self.qaction.setIcon(self.failed_icon) + def register_obj(self, obj): + RestApiFactory.register_obj(obj) def find_port(self): start_port = self.presets["default_port"] @@ -276,3 +140,53 @@ class RestApiServer(QtCore.QThread): found_port ) return found_port + + def tray_start(self): + RestApiFactory.prepare_registered() + if not RestApiFactory.has_handlers(): + log.debug("There are not registered any handlers for RestApi") + return + self.rest_api_thread.start() + + @property + def is_running(self): + return self.rest_api_thread.is_running + + def stop(self): + self.rest_api_thread.is_running = False + + def thread_stopped(self): + self._is_running = False + + +class RestApiThread(QtCore.QThread): + """ Listener for REST requests. + + It is possible to register callbacks for url paths. + Be careful about crossreferencing to different Threads it is not allowed. + """ + + def __init__(self, module, port): + super(RestApiThread, self).__init__() + self.is_running = False + self.module = module + self.port = port + + def run(self): + self.is_running = True + + try: + log.debug( + "Running Rest Api server on URL:" + " \"http://localhost:{}\"".format(self.port) + ) + with socketserver.TCPServer(("", self.port), Handler) as httpd: + while self.is_running: + httpd.handle_request() + except Exception: + log.warning( + "Rest Api Server service has failed", exc_info=True + ) + + self.is_running = False + self.module.thread_stopped() diff --git a/pype/services/statics_server/__init__.py b/pype/services/statics_server/__init__.py deleted file mode 100644 index 4b2721b18b..0000000000 --- a/pype/services/statics_server/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .statics_server import StaticsServer - - -def tray_init(tray_widget, main_widget): - return StaticsServer() diff --git a/pype/services/statics_server/statics_server.py b/pype/services/statics_server/statics_server.py deleted file mode 100644 index 8655cd9df9..0000000000 --- a/pype/services/statics_server/statics_server.py +++ /dev/null @@ -1,202 +0,0 @@ -import os -import sys -import datetime -import socket -import http.server -from http import HTTPStatus -import urllib -import posixpath -import socketserver - -from Qt import QtCore -from pypeapp import config, Logger - - -DIRECTORY = os.path.sep.join([os.environ['PYPE_MODULE_ROOT'], 'res']) - - -class Handler(http.server.SimpleHTTPRequestHandler): - def __init__(self, *args, **kwargs): - py_version = sys.version.split('.') - # If python version is 3.7 or higher - if int(py_version[0]) >= 3 and int(py_version[1]) >= 7: - super().__init__(*args, directory=DIRECTORY, **kwargs) - else: - self.directory = DIRECTORY - super().__init__(*args, **kwargs) - - def send_head(self): - """Common code for GET and HEAD commands. - - This sends the response code and MIME headers. - - Return value is either a file object (which has to be copied - to the outputfile by the caller unless the command was HEAD, - and must be closed by the caller under all circumstances), or - None, in which case the caller has nothing further to do. - - """ - path = self.translate_path(self.path) - f = None - if os.path.isdir(path): - parts = urllib.parse.urlsplit(self.path) - if not parts.path.endswith('/'): - # redirect browser - doing basically what apache does - self.send_response(HTTPStatus.MOVED_PERMANENTLY) - new_parts = (parts[0], parts[1], parts[2] + '/', - parts[3], parts[4]) - new_url = urllib.parse.urlunsplit(new_parts) - self.send_header("Location", new_url) - self.end_headers() - return None - for index in "index.html", "index.htm": - index = os.path.join(path, index) - if os.path.exists(index): - path = index - break - else: - return self.list_directory(path) - ctype = self.guess_type(path) - try: - f = open(path, 'rb') - except OSError: - self.send_error(HTTPStatus.NOT_FOUND, "File not found") - return None - - try: - fs = os.fstat(f.fileno()) - # Use browser cache if possible - if ("If-Modified-Since" in self.headers - and "If-None-Match" not in self.headers): - # compare If-Modified-Since and time of last file modification - try: - ims = http.server.email.utils.parsedate_to_datetime( - self.headers["If-Modified-Since"]) - except (TypeError, IndexError, OverflowError, ValueError): - # ignore ill-formed values - pass - else: - if ims.tzinfo is None: - # obsolete format with no timezone, cf. - # https://tools.ietf.org/html/rfc7231#section-7.1.1.1 - ims = ims.replace(tzinfo=datetime.timezone.utc) - if ims.tzinfo is datetime.timezone.utc: - # compare to UTC datetime of last modification - last_modif = datetime.datetime.fromtimestamp( - fs.st_mtime, datetime.timezone.utc) - # remove microseconds, like in If-Modified-Since - last_modif = last_modif.replace(microsecond=0) - - if last_modif <= ims: - self.send_response(HTTPStatus.NOT_MODIFIED) - self.end_headers() - f.close() - return None - - self.send_response(HTTPStatus.OK) - self.send_header("Content-type", ctype) - self.send_header("Content-Length", str(fs[6])) - self.send_header("Last-Modified", - self.date_time_string(fs.st_mtime)) - self.end_headers() - return f - except: - f.close() - raise - - def translate_path(self, path): - """Translate a /-separated PATH to the local filename syntax. - - Components that mean special things to the local file system - (e.g. drive or directory names) are ignored. (XXX They should - probably be diagnosed.) - - """ - # abandon query parameters - path = path.split('?',1)[0] - path = path.split('#',1)[0] - # Don't forget explicit trailing slash when normalizing. Issue17324 - trailing_slash = path.rstrip().endswith('/') - try: - path = urllib.parse.unquote(path, errors='surrogatepass') - except UnicodeDecodeError: - path = urllib.parse.unquote(path) - path = posixpath.normpath(path) - words = path.split('/') - words = filter(None, words) - path = self.directory - for word in words: - if os.path.dirname(word) or word in (os.curdir, os.pardir): - # Ignore components that are not a simple file/directory name - continue - path = os.path.join(path, word) - if trailing_slash: - path += '/' - return path - - -class StaticsServer(QtCore.QThread): - """ Measure user's idle time in seconds. - Idle time resets on keyboard/mouse input. - Is able to emit signals at specific time idle. - """ - def __init__(self): - super(StaticsServer, self).__init__() - self.qaction = None - self.failed_icon = None - self._is_running = False - self.log = Logger().get_logger(self.__class__.__name__) - try: - self.presets = config.get_presets().get( - 'services', {}).get('statics_server') - except Exception: - self.presets = {'default_port': 8010, 'exclude_ports': []} - - self.port = self.find_port() - - def set_qaction(self, qaction, failed_icon): - self.qaction = qaction - self.failed_icon = failed_icon - - def tray_start(self): - self.start() - - @property - def is_running(self): - return self._is_running - - def stop(self): - self._is_running = False - - def run(self): - self._is_running = True - try: - with socketserver.TCPServer(("", self.port), Handler) as httpd: - while self._is_running: - httpd.handle_request() - except Exception: - self.log.warning( - 'Statics Server service has failed', exc_info=True - ) - self._is_running = False - if self.qaction and self.failed_icon: - self.qaction.setIcon(self.failed_icon) - - def find_port(self): - start_port = self.presets['default_port'] - exclude_ports = self.presets['exclude_ports'] - found_port = None - # port check takes time so it's lowered to 100 ports - for port in range(start_port, start_port+100): - if port in exclude_ports: - continue - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: - result = sock.connect_ex(('localhost', port)) - if result != 0: - found_port = port - if found_port is not None: - break - if found_port is None: - return None - os.environ['PYPE_STATICS_SERVER'] = 'http://localhost:{}'.format(found_port) - return found_port diff --git a/pype/setdress_api.py b/pype/setdress_api.py index c6de0a4f74..55a6b4a2fb 100644 --- a/pype/setdress_api.py +++ b/pype/setdress_api.py @@ -7,8 +7,7 @@ import copy from maya import cmds -from avalon import api -import avalon.io as io +from avalon import api, io from avalon.maya.lib import unique_namespace from pype.maya.lib import matrix_equals diff --git a/pype/standalonepublish/resources/menu.png b/pype/standalonepublish/resources/menu.png new file mode 100644 index 0000000000..da83b45244 Binary files /dev/null and b/pype/standalonepublish/resources/menu.png differ diff --git a/pype/standalonepublish/resources/menu.svg b/pype/standalonepublish/resources/menu.svg deleted file mode 100644 index ac1e728011..0000000000 --- a/pype/standalonepublish/resources/menu.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - diff --git a/pype/standalonepublish/resources/menu_disabled.png b/pype/standalonepublish/resources/menu_disabled.png new file mode 100644 index 0000000000..e4758f0b19 Binary files /dev/null and b/pype/standalonepublish/resources/menu_disabled.png differ diff --git a/pype/standalonepublish/resources/menu_hover.png b/pype/standalonepublish/resources/menu_hover.png new file mode 100644 index 0000000000..dfe8ed53b2 Binary files /dev/null and b/pype/standalonepublish/resources/menu_hover.png differ diff --git a/pype/standalonepublish/resources/menu_pressed.png b/pype/standalonepublish/resources/menu_pressed.png new file mode 100644 index 0000000000..a5f931b2c4 Binary files /dev/null and b/pype/standalonepublish/resources/menu_pressed.png differ diff --git a/pype/standalonepublish/resources/menu_pressed_hover.png b/pype/standalonepublish/resources/menu_pressed_hover.png new file mode 100644 index 0000000000..51503add0f Binary files /dev/null and b/pype/standalonepublish/resources/menu_pressed_hover.png differ diff --git a/pype/standalonepublish/resources/preview.svg b/pype/standalonepublish/resources/preview.svg deleted file mode 100644 index 4a9810c1d5..0000000000 --- a/pype/standalonepublish/resources/preview.svg +++ /dev/null @@ -1,19 +0,0 @@ - - - - - PREVIEW - - diff --git a/pype/standalonepublish/resources/thumbnail.svg b/pype/standalonepublish/resources/thumbnail.svg deleted file mode 100644 index dbc228f8c8..0000000000 --- a/pype/standalonepublish/resources/thumbnail.svg +++ /dev/null @@ -1,19 +0,0 @@ - - - - - THUMBNAIL - - diff --git a/pype/standalonepublish/resources/trash.png b/pype/standalonepublish/resources/trash.png new file mode 100644 index 0000000000..8d12d5f8e0 Binary files /dev/null and b/pype/standalonepublish/resources/trash.png differ diff --git a/pype/standalonepublish/resources/trash.svg b/pype/standalonepublish/resources/trash.svg deleted file mode 100644 index 07905024c0..0000000000 --- a/pype/standalonepublish/resources/trash.svg +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - diff --git a/pype/standalonepublish/resources/trash_disabled.png b/pype/standalonepublish/resources/trash_disabled.png new file mode 100644 index 0000000000..06f5ae5276 Binary files /dev/null and b/pype/standalonepublish/resources/trash_disabled.png differ diff --git a/pype/standalonepublish/resources/trash_hover.png b/pype/standalonepublish/resources/trash_hover.png new file mode 100644 index 0000000000..4725c0f8ab Binary files /dev/null and b/pype/standalonepublish/resources/trash_hover.png differ diff --git a/pype/standalonepublish/resources/trash_pressed.png b/pype/standalonepublish/resources/trash_pressed.png new file mode 100644 index 0000000000..901b0e6d35 Binary files /dev/null and b/pype/standalonepublish/resources/trash_pressed.png differ diff --git a/pype/standalonepublish/resources/trash_pressed_hover.png b/pype/standalonepublish/resources/trash_pressed_hover.png new file mode 100644 index 0000000000..076ced260f Binary files /dev/null and b/pype/standalonepublish/resources/trash_pressed_hover.png differ diff --git a/pype/standalonepublish/widgets/model_filter_proxy_recursive_sort.py b/pype/standalonepublish/widgets/model_filter_proxy_recursive_sort.py index 04ee88229f..9528e96ebf 100644 --- a/pype/standalonepublish/widgets/model_filter_proxy_recursive_sort.py +++ b/pype/standalonepublish/widgets/model_filter_proxy_recursive_sort.py @@ -1,4 +1,5 @@ from . import QtCore +import re class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel): diff --git a/pype/standalonepublish/widgets/widget_component_item.py b/pype/standalonepublish/widgets/widget_component_item.py index 9631fed258..daf8f5d773 100644 --- a/pype/standalonepublish/widgets/widget_component_item.py +++ b/pype/standalonepublish/widgets/widget_component_item.py @@ -1,21 +1,19 @@ import os from . import QtCore, QtGui, QtWidgets -from . import SvgButton from . import get_resource -from avalon import style +from pypeapp import style class ComponentItem(QtWidgets.QFrame): - C_NORMAL = '#777777' - C_HOVER = '#ffffff' - C_ACTIVE = '#4BB543' - C_ACTIVE_HOVER = '#4BF543' signal_remove = QtCore.Signal(object) signal_thumbnail = QtCore.Signal(object) signal_preview = QtCore.Signal(object) signal_repre_change = QtCore.Signal(object, object) + preview_text = "PREVIEW" + thumbnail_text = "THUMBNAIL" + def __init__(self, parent, main_parent): super().__init__() self.has_valid_repre = True @@ -55,10 +53,8 @@ class ComponentItem(QtWidgets.QFrame): self.icon.setText("") self.icon.setScaledContents(True) - self.btn_action_menu = SvgButton( - get_resource('menu.svg'), 22, 22, - [self.C_NORMAL, self.C_HOVER], - frame_image_info, False + self.btn_action_menu = PngButton( + name="menu", size=QtCore.QSize(22, 22) ) self.action_menu = QtWidgets.QMenu() @@ -85,7 +81,9 @@ class ComponentItem(QtWidgets.QFrame): self.file_info.setStyleSheet('padding-left:3px;') - expanding_sizePolicy.setHeightForWidth(self.name.sizePolicy().hasHeightForWidth()) + expanding_sizePolicy.setHeightForWidth( + self.name.sizePolicy().hasHeightForWidth() + ) frame_name_repre = QtWidgets.QFrame(frame) @@ -101,7 +99,8 @@ class ComponentItem(QtWidgets.QFrame): layout.addWidget(self.ext, alignment=QtCore.Qt.AlignRight) frame_name_repre.setSizePolicy( - QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding + QtWidgets.QSizePolicy.MinimumExpanding, + QtWidgets.QSizePolicy.MinimumExpanding ) # Repre + icons @@ -124,17 +123,8 @@ class ComponentItem(QtWidgets.QFrame): frame_icons = QtWidgets.QFrame(frame_repre_icons) - self.preview = SvgButton( - get_resource('preview.svg'), 64, 18, - [self.C_NORMAL, self.C_HOVER, self.C_ACTIVE, self.C_ACTIVE_HOVER], - frame_icons - ) - - self.thumbnail = SvgButton( - get_resource('thumbnail.svg'), 84, 18, - [self.C_NORMAL, self.C_HOVER, self.C_ACTIVE, self.C_ACTIVE_HOVER], - frame_icons - ) + self.preview = LightingButton(self.preview_text) + self.thumbnail = LightingButton(self.thumbnail_text) layout = QtWidgets.QHBoxLayout(frame_icons) layout.setSpacing(6) @@ -162,12 +152,7 @@ class ComponentItem(QtWidgets.QFrame): layout_main.addWidget(frame_middle) - self.remove = SvgButton( - get_resource('trash.svg'), 22, 22, - [self.C_NORMAL, self.C_HOVER], - frame, False - ) - + self.remove = PngButton(name="trash", size=QtCore.QSize(22, 22)) layout_main.addWidget(self.remove) layout = QtWidgets.QVBoxLayout(self) @@ -272,16 +257,16 @@ class ComponentItem(QtWidgets.QFrame): self.signal_repre_change.emit(self, repre_name) def is_thumbnail(self): - return self.thumbnail.checked + return self.thumbnail.isChecked() def change_thumbnail(self, hover=True): - self.thumbnail.change_checked(hover) + self.thumbnail.setChecked(hover) def is_preview(self): - return self.preview.checked + return self.preview.isChecked() def change_preview(self, hover=True): - self.preview.change_checked(hover) + self.preview.setChecked(hover) def collect_data(self): in_files = self.in_data['files'] @@ -309,3 +294,233 @@ class ComponentItem(QtWidgets.QFrame): data['fps'] = self.in_data['fps'] return data + + +class LightingButton(QtWidgets.QPushButton): + lightingbtnstyle = """ + QPushButton { + text-align: center; + color: #777777; + background-color: transparent; + border-width: 1px; + border-color: #777777; + border-style: solid; + padding-top: 2px; + padding-bottom: 2px; + padding-left: 3px; + padding-right: 3px; + border-radius: 3px; + } + + QPushButton:hover { + border-color: #cccccc; + color: #cccccc; + } + + QPushButton:pressed { + border-color: #ffffff; + color: #ffffff; + } + + QPushButton:disabled { + border-color: #3A3939; + color: #3A3939; + } + + QPushButton:checked { + border-color: #4BB543; + color: #4BB543; + } + + QPushButton:checked:hover { + border-color: #4Bd543; + color: #4Bd543; + } + + QPushButton:checked:pressed { + border-color: #4BF543; + color: #4BF543; + } + """ + + def __init__(self, text, *args, **kwargs): + super().__init__(text, *args, **kwargs) + self.setStyleSheet(self.lightingbtnstyle) + + self.setCheckable(True) + + preview_font_metrics = self.fontMetrics().boundingRect(text) + width = preview_font_metrics.width() + 16 + height = preview_font_metrics.height() + 5 + self.setMaximumWidth(width) + self.setMaximumHeight(height) + + +class PngFactory: + png_names = { + "trash": { + "normal": QtGui.QIcon(get_resource("trash.png")), + "hover": QtGui.QIcon(get_resource("trash_hover.png")), + "pressed": QtGui.QIcon(get_resource("trash_pressed.png")), + "pressed_hover": QtGui.QIcon( + get_resource("trash_pressed_hover.png") + ), + "disabled": QtGui.QIcon(get_resource("trash_disabled.png")) + }, + + "menu": { + "normal": QtGui.QIcon(get_resource("menu.png")), + "hover": QtGui.QIcon(get_resource("menu_hover.png")), + "pressed": QtGui.QIcon(get_resource("menu_pressed.png")), + "pressed_hover": QtGui.QIcon( + get_resource("menu_pressed_hover.png") + ), + "disabled": QtGui.QIcon(get_resource("menu_disabled.png")) + } + } + + +class PngButton(QtWidgets.QPushButton): + png_button_style = """ + QPushButton { + border: none; + background-color: transparent; + padding-top: 0px; + padding-bottom: 0px; + padding-left: 0px; + padding-right: 0px; + } + QPushButton:hover {} + QPushButton:pressed {} + QPushButton:disabled {} + QPushButton:checked {} + QPushButton:checked:hover {} + QPushButton:checked:pressed {} + """ + + def __init__( + self, name=None, path=None, hover_path=None, pressed_path=None, + hover_pressed_path=None, disabled_path=None, + size=None, *args, **kwargs + ): + self._hovered = False + self._pressed = False + super(PngButton, self).__init__(*args, **kwargs) + self.setStyleSheet(self.png_button_style) + + png_dict = {} + if name: + png_dict = PngFactory.png_names.get(name) or {} + if not png_dict: + print(( + "WARNING: There is not set icon with name \"{}\"" + "in PngFactory!" + ).format(name)) + + ico_normal = png_dict.get("normal") + ico_hover = png_dict.get("hover") + ico_pressed = png_dict.get("pressed") + ico_hover_pressed = png_dict.get("pressed_hover") + ico_disabled = png_dict.get("disabled") + + if path: + ico_normal = QtGui.QIcon(path) + if hover_path: + ico_hover = QtGui.QIcon(hover_path) + + if pressed_path: + ico_pressed = QtGui.QIcon(hover_path) + + if hover_pressed_path: + ico_hover_pressed = QtGui.QIcon(hover_pressed_path) + + if disabled_path: + ico_disabled = QtGui.QIcon(disabled_path) + + self.setIcon(ico_normal) + if size: + self.setIconSize(size) + self.setMaximumSize(size) + + self.ico_normal = ico_normal + self.ico_hover = ico_hover + self.ico_pressed = ico_pressed + self.ico_hover_pressed = ico_hover_pressed + self.ico_disabled = ico_disabled + + def setDisabled(self, in_bool): + super(PngButton, self).setDisabled(in_bool) + icon = self.ico_normal + if in_bool and self.ico_disabled: + icon = self.ico_disabled + self.setIcon(icon) + + def enterEvent(self, event): + self._hovered = True + if not self.isEnabled(): + return + icon = self.ico_normal + if self.ico_hover: + icon = self.ico_hover + + if self._pressed and self.ico_hover_pressed: + icon = self.ico_hover_pressed + + if self.icon() != icon: + self.setIcon(icon) + + def mouseMoveEvent(self, event): + super(PngButton, self).mouseMoveEvent(event) + if self._pressed: + mouse_pos = event.pos() + hovering = self.rect().contains(mouse_pos) + if hovering and not self._hovered: + self.enterEvent(event) + elif not hovering and self._hovered: + self.leaveEvent(event) + + def leaveEvent(self, event): + self._hovered = False + if not self.isEnabled(): + return + icon = self.ico_normal + if self._pressed and self.ico_pressed: + icon = self.ico_pressed + + if self.icon() != icon: + self.setIcon(icon) + + def mousePressEvent(self, event): + self._pressed = True + if not self.isEnabled(): + return + icon = self.ico_hover + if self.ico_pressed: + icon = self.ico_pressed + + if self.ico_hover_pressed: + mouse_pos = event.pos() + if self.rect().contains(mouse_pos): + icon = self.ico_hover_pressed + + if icon is None: + icon = self.ico_normal + + if self.icon() != icon: + self.setIcon(icon) + + def mouseReleaseEvent(self, event): + if not self.isEnabled(): + return + if self._pressed: + self._pressed = False + mouse_pos = event.pos() + if self.rect().contains(mouse_pos): + self.clicked.emit() + + icon = self.ico_normal + if self._hovered and self.ico_hover: + icon = self.ico_hover + + if self.icon() != icon: + self.setIcon(icon) diff --git a/pype/standalonepublish/widgets/widget_drop_frame.py b/pype/standalonepublish/widgets/widget_drop_frame.py index ba8ab44cf8..73b9f0e179 100644 --- a/pype/standalonepublish/widgets/widget_drop_frame.py +++ b/pype/standalonepublish/widgets/widget_drop_frame.py @@ -92,28 +92,32 @@ class DropDataFrame(QtWidgets.QFrame): self._refresh_view() def _set_thumbnail(self, in_item): + current_state = in_item.is_thumbnail() + in_item.change_thumbnail(not current_state) + checked_item = None for item in self.components_list.widgets(): if item.is_thumbnail(): checked_item = item break - if checked_item is None or checked_item == in_item: - in_item.change_thumbnail() - else: + if checked_item is not None and checked_item != in_item: checked_item.change_thumbnail(False) - in_item.change_thumbnail() + + in_item.change_thumbnail(current_state) def _set_preview(self, in_item): + current_state = in_item.is_preview() + in_item.change_preview(not current_state) + checked_item = None for item in self.components_list.widgets(): if item.is_preview(): checked_item = item break - if checked_item is None or checked_item == in_item: - in_item.change_preview() - else: + if checked_item is not None and checked_item != in_item: checked_item.change_preview(False) - in_item.change_preview() + + in_item.change_preview(current_state) def _remove_item(self, in_item): valid_repre = in_item.has_valid_repre is True diff --git a/pype/standalonepublish/widgets/widget_family_desc.py b/pype/standalonepublish/widgets/widget_family_desc.py index 9b9dad7b46..7c80dcfd57 100644 --- a/pype/standalonepublish/widgets/widget_family_desc.py +++ b/pype/standalonepublish/widgets/widget_family_desc.py @@ -6,7 +6,7 @@ import json from . import QtWidgets, QtCore, QtGui from . import HelpRole, FamilyRole, ExistsRole, PluginRole from . import qtawesome -from pype.vendor import six +import six from pype import lib as pypelib diff --git a/pype/tools/assetcreator/widget.py b/pype/tools/assetcreator/widget.py index 75e793479a..1e9e4ab624 100644 --- a/pype/tools/assetcreator/widget.py +++ b/pype/tools/assetcreator/widget.py @@ -3,9 +3,8 @@ import contextlib import collections from avalon.vendor import qtawesome -from avalon.vendor.Qt import QtWidgets, QtCore, QtGui -from avalon import io -from avalon import style +from Qt import QtWidgets, QtCore, QtGui +from avalon import style, io from .model import ( TreeModel, diff --git a/pype/vendor/appdirs.py b/pype/vendor/appdirs.py deleted file mode 100644 index ae67001af8..0000000000 --- a/pype/vendor/appdirs.py +++ /dev/null @@ -1,608 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2005-2010 ActiveState Software Inc. -# Copyright (c) 2013 Eddy Petrișor - -"""Utilities for determining application-specific dirs. - -See for details and usage. -""" -# Dev Notes: -# - MSDN on where to store app data files: -# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 -# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html -# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html - -__version_info__ = (1, 4, 3) -__version__ = '.'.join(map(str, __version_info__)) - - -import sys -import os - -PY3 = sys.version_info[0] == 3 - -if PY3: - unicode = str - -if sys.platform.startswith('java'): - import platform - os_name = platform.java_ver()[3][0] - if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. - system = 'win32' - elif os_name.startswith('Mac'): # "Mac OS X", etc. - system = 'darwin' - else: # "Linux", "SunOS", "FreeBSD", etc. - # Setting this to "linux2" is not ideal, but only Windows or Mac - # are actually checked for and the rest of the module expects - # *sys.platform* style strings. - system = 'linux2' -else: - system = sys.platform - - - -def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): - r"""Return full path to the user-specific data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user data directories are: - Mac OS X: ~/Library/Application Support/ - Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined - Win XP (not roaming): C:\Documents and Settings\\Application Data\\ - Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ - Win 7 (not roaming): C:\Users\\AppData\Local\\ - Win 7 (roaming): C:\Users\\AppData\Roaming\\ - - For Unix, we follow the XDG spec and support $XDG_DATA_HOME. - That means, by default "~/.local/share/". - """ - if system == "win32": - if appauthor is None: - appauthor = appname - const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" - path = os.path.normpath(_get_win_folder(const)) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - elif system == 'darwin': - path = os.path.expanduser('~/Library/Application Support/') - if appname: - path = os.path.join(path, appname) - else: - path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): - r"""Return full path to the user-shared data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "multipath" is an optional parameter only applicable to *nix - which indicates that the entire list of data dirs should be - returned. By default, the first item from XDG_DATA_DIRS is - returned, or '/usr/local/share/', - if XDG_DATA_DIRS is not set - - Typical site data directories are: - Mac OS X: /Library/Application Support/ - Unix: /usr/local/share/ or /usr/share/ - Win XP: C:\Documents and Settings\All Users\Application Data\\ - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) - Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. - - For Unix, this is using the $XDG_DATA_DIRS[0] default. - - WARNING: Do not use this on Windows. See the Vista-Fail note above for why. - """ - if system == "win32": - if appauthor is None: - appauthor = appname - path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - elif system == 'darwin': - path = os.path.expanduser('/Library/Application Support') - if appname: - path = os.path.join(path, appname) - else: - # XDG default for $XDG_DATA_DIRS - # only first, if multipath is False - path = os.getenv('XDG_DATA_DIRS', - os.pathsep.join(['/usr/local/share', '/usr/share'])) - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] - if appname: - if version: - appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] - - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] - return path - - if appname and version: - path = os.path.join(path, version) - return path - - -def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): - r"""Return full path to the user-specific config dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user config directories are: - Mac OS X: same as user_data_dir - Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined - Win *: same as user_data_dir - - For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. - That means, by default "~/.config/". - """ - if system in ["win32", "darwin"]: - path = user_data_dir(appname, appauthor, None, roaming) - else: - path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): - r"""Return full path to the user-shared data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "multipath" is an optional parameter only applicable to *nix - which indicates that the entire list of config dirs should be - returned. By default, the first item from XDG_CONFIG_DIRS is - returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set - - Typical site config directories are: - Mac OS X: same as site_data_dir - Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in - $XDG_CONFIG_DIRS - Win *: same as site_data_dir - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) - - For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False - - WARNING: Do not use this on Windows. See the Vista-Fail note above for why. - """ - if system in ["win32", "darwin"]: - path = site_data_dir(appname, appauthor) - if appname and version: - path = os.path.join(path, version) - else: - # XDG default for $XDG_CONFIG_DIRS - # only first, if multipath is False - path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] - if appname: - if version: - appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] - - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] - return path - - -def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): - r"""Return full path to the user-specific cache dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "opinion" (boolean) can be False to disable the appending of - "Cache" to the base app data dir for Windows. See - discussion below. - - Typical user cache directories are: - Mac OS X: ~/Library/Caches/ - Unix: ~/.cache/ (XDG default) - Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache - Vista: C:\Users\\AppData\Local\\\Cache - - On Windows the only suggestion in the MSDN docs is that local settings go in - the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming - app data dir (the default returned by `user_data_dir` above). Apps typically - put cache data somewhere *under* the given dir here. Some examples: - ...\Mozilla\Firefox\Profiles\\Cache - ...\Acme\SuperApp\Cache\1.0 - OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. - This can be disabled with the `opinion=False` option. - """ - if system == "win32": - if appauthor is None: - appauthor = appname - path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) - if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - if opinion: - path = os.path.join(path, "Cache") - elif system == 'darwin': - path = os.path.expanduser('~/Library/Caches') - if appname: - path = os.path.join(path, appname) - else: - path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): - r"""Return full path to the user-specific state dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user state directories are: - Mac OS X: same as user_data_dir - Unix: ~/.local/state/ # or in $XDG_STATE_HOME, if defined - Win *: same as user_data_dir - - For Unix, we follow this Debian proposal - to extend the XDG spec and support $XDG_STATE_HOME. - - That means, by default "~/.local/state/". - """ - if system in ["win32", "darwin"]: - path = user_data_dir(appname, appauthor, None, roaming) - else: - path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) - if appname: - path = os.path.join(path, appname) - if appname and version: - path = os.path.join(path, version) - return path - - -def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): - r"""Return full path to the user-specific log dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "appauthor" (only used on Windows) is the name of the - appauthor or distributing body for this application. Typically - it is the owning company name. This falls back to appname. You may - pass False to disable it. - "version" is an optional version path element to append to the - path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this - would typically be ".". - Only applied when appname is present. - "opinion" (boolean) can be False to disable the appending of - "Logs" to the base app data dir for Windows, and "log" to the - base cache dir for Unix. See discussion below. - - Typical user log directories are: - Mac OS X: ~/Library/Logs/ - Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined - Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs - Vista: C:\Users\\AppData\Local\\\Logs - - On Windows the only suggestion in the MSDN docs is that local settings - go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in - examples of what some windows apps use for a logs dir.) - - OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` - value for Windows and appends "log" to the user cache dir for Unix. - This can be disabled with the `opinion=False` option. - """ - if system == "darwin": - path = os.path.join( - os.path.expanduser('~/Library/Logs'), - appname) - elif system == "win32": - path = user_data_dir(appname, appauthor, version) - version = False - if opinion: - path = os.path.join(path, "Logs") - else: - path = user_cache_dir(appname, appauthor, version) - version = False - if opinion: - path = os.path.join(path, "log") - if appname and version: - path = os.path.join(path, version) - return path - - -class AppDirs(object): - """Convenience wrapper for getting application dirs.""" - def __init__(self, appname=None, appauthor=None, version=None, - roaming=False, multipath=False): - self.appname = appname - self.appauthor = appauthor - self.version = version - self.roaming = roaming - self.multipath = multipath - - @property - def user_data_dir(self): - return user_data_dir(self.appname, self.appauthor, - version=self.version, roaming=self.roaming) - - @property - def site_data_dir(self): - return site_data_dir(self.appname, self.appauthor, - version=self.version, multipath=self.multipath) - - @property - def user_config_dir(self): - return user_config_dir(self.appname, self.appauthor, - version=self.version, roaming=self.roaming) - - @property - def site_config_dir(self): - return site_config_dir(self.appname, self.appauthor, - version=self.version, multipath=self.multipath) - - @property - def user_cache_dir(self): - return user_cache_dir(self.appname, self.appauthor, - version=self.version) - - @property - def user_state_dir(self): - return user_state_dir(self.appname, self.appauthor, - version=self.version) - - @property - def user_log_dir(self): - return user_log_dir(self.appname, self.appauthor, - version=self.version) - - -#---- internal support stuff - -def _get_win_folder_from_registry(csidl_name): - """This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. - """ - if PY3: - import winreg as _winreg - else: - import _winreg - - shell_folder_name = { - "CSIDL_APPDATA": "AppData", - "CSIDL_COMMON_APPDATA": "Common AppData", - "CSIDL_LOCAL_APPDATA": "Local AppData", - }[csidl_name] - - key = _winreg.OpenKey( - _winreg.HKEY_CURRENT_USER, - r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" - ) - dir, type = _winreg.QueryValueEx(key, shell_folder_name) - return dir - - -def _get_win_folder_with_pywin32(csidl_name): - from win32com.shell import shellcon, shell - dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) - # Try to make this a unicode path because SHGetFolderPath does - # not return unicode strings when there is unicode data in the - # path. - try: - dir = unicode(dir) - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in dir: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - try: - import win32api - dir = win32api.GetShortPathName(dir) - except ImportError: - pass - except UnicodeError: - pass - return dir - - -def _get_win_folder_with_ctypes(csidl_name): - import ctypes - - csidl_const = { - "CSIDL_APPDATA": 26, - "CSIDL_COMMON_APPDATA": 35, - "CSIDL_LOCAL_APPDATA": 28, - }[csidl_name] - - buf = ctypes.create_unicode_buffer(1024) - ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in buf: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf2 = ctypes.create_unicode_buffer(1024) - if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): - buf = buf2 - - return buf.value - -def _get_win_folder_with_jna(csidl_name): - import array - from com.sun import jna - from com.sun.jna.platform import win32 - - buf_size = win32.WinDef.MAX_PATH * 2 - buf = array.zeros('c', buf_size) - shell = win32.Shell32.INSTANCE - shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) - dir = jna.Native.toString(buf.tostring()).rstrip("\0") - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in dir: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf = array.zeros('c', buf_size) - kernel = win32.Kernel32.INSTANCE - if kernel.GetShortPathName(dir, buf, buf_size): - dir = jna.Native.toString(buf.tostring()).rstrip("\0") - - return dir - -if system == "win32": - try: - import win32com.shell - _get_win_folder = _get_win_folder_with_pywin32 - except ImportError: - try: - from ctypes import windll - _get_win_folder = _get_win_folder_with_ctypes - except ImportError: - try: - import com.sun.jna - _get_win_folder = _get_win_folder_with_jna - except ImportError: - _get_win_folder = _get_win_folder_from_registry - - -#---- self test code - -if __name__ == "__main__": - appname = "MyApp" - appauthor = "MyCompany" - - props = ("user_data_dir", - "user_config_dir", - "user_cache_dir", - "user_state_dir", - "user_log_dir", - "site_data_dir", - "site_config_dir") - - print("-- app dirs %s --" % __version__) - - print("-- app dirs (with optional 'version')") - dirs = AppDirs(appname, appauthor, version="1.0") - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (without optional 'version')") - dirs = AppDirs(appname, appauthor) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (without optional 'appauthor')") - dirs = AppDirs(appname) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) - - print("\n-- app dirs (with disabled 'appauthor')") - dirs = AppDirs(appname, appauthor=False) - for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/pype/vendor/arrow-0.12.1.dist-info/INSTALLER b/pype/vendor/arrow-0.12.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e38a..0000000000 --- a/pype/vendor/arrow-0.12.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/pype/vendor/arrow-0.12.1.dist-info/LICENSE b/pype/vendor/arrow-0.12.1.dist-info/LICENSE deleted file mode 100644 index 727ded8388..0000000000 --- a/pype/vendor/arrow-0.12.1.dist-info/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2013 Chris Smith - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/pype/vendor/arrow-0.12.1.dist-info/METADATA b/pype/vendor/arrow-0.12.1.dist-info/METADATA deleted file mode 100644 index 93d06f7f1e..0000000000 --- a/pype/vendor/arrow-0.12.1.dist-info/METADATA +++ /dev/null @@ -1,125 +0,0 @@ -Metadata-Version: 2.1 -Name: arrow -Version: 0.12.1 -Summary: Better dates and times for Python -Home-page: https://github.com/crsmithdev/arrow/ -Author: Chris Smith -Author-email: crsmithdev@gmail.com -License: Apache 2.0 -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Dist: python-dateutil -Requires-Dist: backports.functools-lru-cache (>=1.2.1); python_version=='2.7' - -Arrow - Better dates & times for Python -======================================= - -.. image:: https://travis-ci.org/crsmithdev/arrow.svg - :alt: build status - :target: https://travis-ci.org/crsmithdev/arrow - -.. image:: https://codecov.io/github/crsmithdev/arrow/coverage.svg?branch=master - :target: https://codecov.io/github/crsmithdev/arrow - :alt: Codecov - -.. image:: https://img.shields.io/pypi/v/arrow.svg - :target: https://pypi.python.org/pypi/arrow - :alt: downloads - -Documentation: `arrow.readthedocs.org `_ ---------------------------------------------------------------------------------- - -What? ------ - -Arrow is a Python library that offers a sensible, human-friendly approach to creating, manipulating, formatting and converting dates, times, and timestamps. It implements and updates the datetime type, plugging gaps in functionality, and provides an intelligent module API that supports many common creation scenarios. Simply put, it helps you work with dates and times with fewer imports and a lot less code. - -Arrow is heavily inspired by `moment.js `_ and `requests `_ - -Why? ----- - -Python's standard library and some other low-level modules have near-complete date, time and time zone functionality but don't work very well from a usability perspective: - -- Too many modules: datetime, time, calendar, dateutil, pytz and more -- Too many types: date, time, datetime, tzinfo, timedelta, relativedelta, etc. -- Time zones and timestamp conversions are verbose and unpleasant -- Time zone naievety is the norm -- Gaps in functionality: ISO-8601 parsing, timespans, humanization - -Features --------- - -- Fully implemented, drop-in replacement for datetime -- Supports Python 2.6, 2.7, 3.3, 3.4 and 3.5 -- Time zone-aware & UTC by default -- Provides super-simple creation options for many common input scenarios -- Updated .replace method with support for relative offsets, including weeks -- Formats and parses strings automatically -- Partial support for ISO-8601 -- Timezone conversion -- Timestamp available as a property -- Generates time spans, ranges, floors and ceilings in timeframes from year to microsecond -- Humanizes and supports a growing list of contributed locales -- Extensible for your own Arrow-derived types - -Quick start ------------ - -First: - -.. code-block:: console - - $ pip install arrow - -And then: - -.. code-block:: pycon - - >>> import arrow - >>> utc = arrow.utcnow() - >>> utc - - - >>> utc = utc.replace(hours=-1) - >>> utc - - - >>> local = utc.to('US/Pacific') - >>> local - - - >>> arrow.get('2013-05-11T21:23:58.970460+00:00') - - - >>> local.timestamp - 1368303838 - - >>> local.format() - '2013-05-11 13:23:58 -07:00' - - >>> local.format('YYYY-MM-DD HH:mm:ss ZZ') - '2013-05-11 13:23:58 -07:00' - - >>> local.humanize() - 'an hour ago' - - >>> local.humanize(locale='ko_kr') - '1시간 전' - -Further documentation can be found at `arrow.readthedocs.org `_ - -Contributing ------------- - -Contributions are welcome, especially with localization. See `locales.py `_ for what's currently supported. - - diff --git a/pype/vendor/arrow-0.12.1.dist-info/RECORD b/pype/vendor/arrow-0.12.1.dist-info/RECORD deleted file mode 100644 index 4edf255b29..0000000000 --- a/pype/vendor/arrow-0.12.1.dist-info/RECORD +++ /dev/null @@ -1,22 +0,0 @@ -arrow-0.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -arrow-0.12.1.dist-info/LICENSE,sha256=pLdgG-UFacLJapgY_ICbAUlBDITJlxTWDJ1PsK6GH6I,579 -arrow-0.12.1.dist-info/METADATA,sha256=PVodUfwAHQ-YjlzNkcrgnJIWud0vTQpCa_Hd-3zBOG0,4284 -arrow-0.12.1.dist-info/RECORD,, -arrow-0.12.1.dist-info/WHEEL,sha256=1Cc6AMC7xwtmRYeNiHurrGRLJoEuor81IEEqyHblQuw,116 -arrow-0.12.1.dist-info/top_level.txt,sha256=aCBThK2RIB824ctI3l9i6z94l8UYpFF-BC4m3dDzFFo,6 -arrow/__init__.py,sha256=_q2QJezQr_fCP0jE4ldqzWJwS_yw9unClcjUOdmMOn0,164 -arrow/__pycache__/__init__.cpython-36.pyc,, -arrow/__pycache__/api.cpython-36.pyc,, -arrow/__pycache__/arrow.cpython-36.pyc,, -arrow/__pycache__/factory.cpython-36.pyc,, -arrow/__pycache__/formatter.cpython-36.pyc,, -arrow/__pycache__/locales.cpython-36.pyc,, -arrow/__pycache__/parser.cpython-36.pyc,, -arrow/__pycache__/util.cpython-36.pyc,, -arrow/api.py,sha256=C6fxwcwAdd_KKhh7gaQcK_vSP4CRNNnuCUGGSn_8DS4,1066 -arrow/arrow.py,sha256=CE7oSKXj9aEYnw7qw1hKA3X92ztiYnAin-QtfuSkROU,36025 -arrow/factory.py,sha256=C7Ef9NQ5-hkrTECMM_NI-UIpBuMF5m546IkfZXIzfP4,8569 -arrow/formatter.py,sha256=R1b-l3ejp8j8R71tF_fb19BFiyEY4VWrsGyX13g-o1Y,3434 -arrow/locales.py,sha256=3iq7JEv8swN7rHrbsyHm2Jp0YMV6Ed88fa6JIiOOgVk,72239 -arrow/parser.py,sha256=m_jSHIVZ0MH_pbhwjny6M2_4QpquY9V0gfDK0c6qYMM,11141 -arrow/util.py,sha256=jO7c3wY9eN_58hy0KDN9caGi7F5Aip1M3Vc3PQlUixc,1034 diff --git a/pype/vendor/arrow-0.12.1.dist-info/WHEEL b/pype/vendor/arrow-0.12.1.dist-info/WHEEL deleted file mode 100644 index c76ec85d6b..0000000000 --- a/pype/vendor/arrow-0.12.1.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.32.0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/pype/vendor/arrow-0.12.1.dist-info/top_level.txt b/pype/vendor/arrow-0.12.1.dist-info/top_level.txt deleted file mode 100644 index e2dc7471c2..0000000000 --- a/pype/vendor/arrow-0.12.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -arrow diff --git a/pype/vendor/arrow/__init__.py b/pype/vendor/arrow/__init__.py deleted file mode 100644 index 86ccb69f00..0000000000 --- a/pype/vendor/arrow/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- - -from .arrow import Arrow -from .factory import ArrowFactory -from .api import get, now, utcnow - -__version__ = '0.12.1' -VERSION = __version__ diff --git a/pype/vendor/arrow/api.py b/pype/vendor/arrow/api.py deleted file mode 100644 index 83731458ee..0000000000 --- a/pype/vendor/arrow/api.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -''' -Provides the default implementation of :class:`ArrowFactory ` -methods for use as a module API. - -''' - -from __future__ import absolute_import - -from arrow.factory import ArrowFactory - - -# internal default factory. -_factory = ArrowFactory() - - -def get(*args, **kwargs): - ''' Calls the default :class:`ArrowFactory ` ``get`` method. - - ''' - - return _factory.get(*args, **kwargs) - -def utcnow(): - ''' Calls the default :class:`ArrowFactory ` ``utcnow`` method. - - ''' - - return _factory.utcnow() - - -def now(tz=None): - ''' Calls the default :class:`ArrowFactory ` ``now`` method. - - ''' - - return _factory.now(tz) - - -def factory(type): - ''' Returns an :class:`.ArrowFactory` for the specified :class:`Arrow ` - or derived type. - - :param type: the type, :class:`Arrow ` or derived. - - ''' - - return ArrowFactory(type) - - -__all__ = ['get', 'utcnow', 'now', 'factory'] - diff --git a/pype/vendor/arrow/arrow.py b/pype/vendor/arrow/arrow.py deleted file mode 100644 index 7cd5dcb1f9..0000000000 --- a/pype/vendor/arrow/arrow.py +++ /dev/null @@ -1,1050 +0,0 @@ -# -*- coding: utf-8 -*- -''' -Provides the :class:`Arrow ` class, an enhanced ``datetime`` -replacement. - -''' - -from __future__ import absolute_import - -from datetime import datetime, timedelta, tzinfo -from dateutil import tz as dateutil_tz -from dateutil.relativedelta import relativedelta -from math import trunc -import calendar -import sys -import warnings - - -from arrow import util, locales, parser, formatter - - -class Arrow(object): - '''An :class:`Arrow ` object. - - Implements the ``datetime`` interface, behaving as an aware ``datetime`` while implementing - additional functionality. - - :param year: the calendar year. - :param month: the calendar month. - :param day: the calendar day. - :param hour: (optional) the hour. Defaults to 0. - :param minute: (optional) the minute, Defaults to 0. - :param second: (optional) the second, Defaults to 0. - :param microsecond: (optional) the microsecond. Defaults 0. - :param tzinfo: (optional) A timezone expression. Defaults to UTC. - - .. _tz-expr: - - Recognized timezone expressions: - - - A ``tzinfo`` object. - - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. - - A ``str`` in ISO-8601 style, as in '+07:00'. - - A ``str``, one of the following: 'local', 'utc', 'UTC'. - - Usage:: - - >>> import arrow - >>> arrow.Arrow(2013, 5, 5, 12, 30, 45) - - - ''' - - resolution = datetime.resolution - - _ATTRS = ['year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'] - _ATTRS_PLURAL = ['{0}s'.format(a) for a in _ATTRS] - _MONTHS_PER_QUARTER = 3 - - def __init__(self, year, month, day, hour=0, minute=0, second=0, microsecond=0, - tzinfo=None): - - if util.isstr(tzinfo): - tzinfo = parser.TzinfoParser.parse(tzinfo) - tzinfo = tzinfo or dateutil_tz.tzutc() - - self._datetime = datetime(year, month, day, hour, minute, second, - microsecond, tzinfo) - - - # factories: single object, both original and from datetime. - - @classmethod - def now(cls, tzinfo=None): - '''Constructs an :class:`Arrow ` object, representing "now" in the given - timezone. - - :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. - - ''' - - utc = datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc()) - dt = utc.astimezone(dateutil_tz.tzlocal() if tzinfo is None else tzinfo) - - return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, - dt.microsecond, dt.tzinfo) - - @classmethod - def utcnow(cls): - ''' Constructs an :class:`Arrow ` object, representing "now" in UTC - time. - - ''' - - dt = datetime.utcnow() - - return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, - dt.microsecond, dateutil_tz.tzutc()) - - @classmethod - def fromtimestamp(cls, timestamp, tzinfo=None): - ''' Constructs an :class:`Arrow ` object from a timestamp, converted to - the given timezone. - - :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. - :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. - - Timestamps should always be UTC. If you have a non-UTC timestamp:: - - >>> arrow.Arrow.utcfromtimestamp(1367900664).replace(tzinfo='US/Pacific') - - - ''' - - tzinfo = tzinfo or dateutil_tz.tzlocal() - timestamp = cls._get_timestamp_from_input(timestamp) - dt = datetime.fromtimestamp(timestamp, tzinfo) - - return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, - dt.microsecond, dt.tzinfo) - - @classmethod - def utcfromtimestamp(cls, timestamp): - '''Constructs an :class:`Arrow ` object from a timestamp, in UTC time. - - :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. - - ''' - - timestamp = cls._get_timestamp_from_input(timestamp) - dt = datetime.utcfromtimestamp(timestamp) - - return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, - dt.microsecond, dateutil_tz.tzutc()) - - @classmethod - def fromdatetime(cls, dt, tzinfo=None): - ''' Constructs an :class:`Arrow ` object from a ``datetime`` and - optional replacement timezone. - - :param dt: the ``datetime`` - :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to ``dt``'s - timezone, or UTC if naive. - - If you only want to replace the timezone of naive datetimes:: - - >>> dt - datetime.datetime(2013, 5, 5, 0, 0, tzinfo=tzutc()) - >>> arrow.Arrow.fromdatetime(dt, dt.tzinfo or 'US/Pacific') - - - ''' - - tzinfo = tzinfo or dt.tzinfo or dateutil_tz.tzutc() - - return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, - dt.microsecond, tzinfo) - - @classmethod - def fromdate(cls, date, tzinfo=None): - ''' Constructs an :class:`Arrow ` object from a ``date`` and optional - replacement timezone. Time values are set to 0. - - :param date: the ``date`` - :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to UTC. - ''' - - tzinfo = tzinfo or dateutil_tz.tzutc() - - return cls(date.year, date.month, date.day, tzinfo=tzinfo) - - @classmethod - def strptime(cls, date_str, fmt, tzinfo=None): - ''' Constructs an :class:`Arrow ` object from a date string and format, - in the style of ``datetime.strptime``. Optionally replaces the parsed timezone. - - :param date_str: the date string. - :param fmt: the format string. - :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to the parsed - timezone if ``fmt`` contains a timezone directive, otherwise UTC. - - ''' - - dt = datetime.strptime(date_str, fmt) - tzinfo = tzinfo or dt.tzinfo - - return cls(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, - dt.microsecond, tzinfo) - - - # factories: ranges and spans - - @classmethod - def range(cls, frame, start, end=None, tz=None, limit=None): - ''' Returns a list of :class:`Arrow ` objects, representing - an iteration of time between two inputs. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param start: A datetime expression, the start of the range. - :param end: (optional) A datetime expression, the end of the range. - :param tz: (optional) A :ref:`timezone expression `. Defaults to - ``start``'s timezone, or UTC if ``start`` is naive. - :param limit: (optional) A maximum number of tuples to return. - - **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to - return the entire range. Call with ``limit`` alone to return a maximum # of results from - the start. Call with both to cap a range at a maximum # of results. - - **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before - iterating. As such, either call with naive objects and ``tz``, or aware objects from the - same timezone and no ``tz``. - - Supported frame values: year, quarter, month, week, day, hour, minute, second. - - Recognized datetime expressions: - - - An :class:`Arrow ` object. - - A ``datetime`` object. - - Usage:: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.range('hour', start, end): - ... print(repr(r)) - ... - - - - - - - **NOTE**: Unlike Python's ``range``, ``end`` *may* be included in the returned list:: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 13, 30) - >>> for r in arrow.Arrow.range('hour', start, end): - ... print(repr(r)) - ... - - - - ''' - - _, frame_relative, relative_steps = cls._get_frames(frame) - - tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) - - start = cls._get_datetime(start).replace(tzinfo=tzinfo) - end, limit = cls._get_iteration_params(end, limit) - end = cls._get_datetime(end).replace(tzinfo=tzinfo) - - current = cls.fromdatetime(start) - results = [] - - while current <= end and len(results) < limit: - results.append(current) - - values = [getattr(current, f) for f in cls._ATTRS] - current = cls(*values, tzinfo=tzinfo) + relativedelta(**{frame_relative: relative_steps}) - - return results - - - @classmethod - def span_range(cls, frame, start, end, tz=None, limit=None): - ''' Returns a list of tuples, each :class:`Arrow ` objects, - representing a series of timespans between two inputs. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param start: A datetime expression, the start of the range. - :param end: (optional) A datetime expression, the end of the range. - :param tz: (optional) A :ref:`timezone expression `. Defaults to - ``start``'s timezone, or UTC if ``start`` is naive. - :param limit: (optional) A maximum number of tuples to return. - - **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to - return the entire range. Call with ``limit`` alone to return a maximum # of results from - the start. Call with both to cap a range at a maximum # of results. - - **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before - iterating. As such, either call with naive objects and ``tz``, or aware objects from the - same timezone and no ``tz``. - - Supported frame values: year, quarter, month, week, day, hour, minute, second. - - Recognized datetime expressions: - - - An :class:`Arrow ` object. - - A ``datetime`` object. - - **NOTE**: Unlike Python's ``range``, ``end`` will *always* be included in the returned list - of timespans. - - Usage: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.span_range('hour', start, end): - ... print(r) - ... - (, ) - (, ) - (, ) - (, ) - (, ) - (, ) - - ''' - - tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) - start = cls.fromdatetime(start, tzinfo).span(frame)[0] - _range = cls.range(frame, start, end, tz, limit) - return [r.span(frame) for r in _range] - - @classmethod - def interval(cls, frame, start, end, interval=1, tz=None): - ''' Returns an array of tuples, each :class:`Arrow ` objects, - representing a series of intervals between two inputs. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param start: A datetime expression, the start of the range. - :param end: (optional) A datetime expression, the end of the range. - :param interval: (optional) Time interval for the given time frame. - :param tz: (optional) A timezone expression. Defaults to UTC. - - Supported frame values: year, quarter, month, week, day, hour, minute, second - - Recognized datetime expressions: - - - An :class:`Arrow ` object. - - A ``datetime`` object. - - Recognized timezone expressions: - - - A ``tzinfo`` object. - - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. - - A ``str`` in ISO-8601 style, as in '+07:00'. - - A ``str``, one of the following: 'local', 'utc', 'UTC'. - - Usage: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.interval('hour', start, end, 2): - ... print r - ... - (, ) - (, ) - (, ) - ''' - if interval < 1: - raise ValueError("interval has to be a positive integer") - - spanRange = cls.span_range(frame,start,end,tz) - - bound = (len(spanRange) // interval) * interval - return [ (spanRange[i][0],spanRange[i+ interval - 1][1]) for i in range(0,bound, interval) ] - - # representations - - def __repr__(self): - return '<{0} [{1}]>'.format(self.__class__.__name__, self.__str__()) - - def __str__(self): - return self._datetime.isoformat() - - def __format__(self, formatstr): - - if len(formatstr) > 0: - return self.format(formatstr) - - return str(self) - - def __hash__(self): - return self._datetime.__hash__() - - - # attributes & properties - - def __getattr__(self, name): - - if name == 'week': - return self.isocalendar()[1] - - if name == 'quarter': - return int((self.month-1)/self._MONTHS_PER_QUARTER) + 1 - - if not name.startswith('_'): - value = getattr(self._datetime, name, None) - - if value is not None: - return value - - return object.__getattribute__(self, name) - - @property - def tzinfo(self): - ''' Gets the ``tzinfo`` of the :class:`Arrow ` object. ''' - - return self._datetime.tzinfo - - @tzinfo.setter - def tzinfo(self, tzinfo): - ''' Sets the ``tzinfo`` of the :class:`Arrow ` object. ''' - - self._datetime = self._datetime.replace(tzinfo=tzinfo) - - @property - def datetime(self): - ''' Returns a datetime representation of the :class:`Arrow ` object. ''' - - return self._datetime - - @property - def naive(self): - ''' Returns a naive datetime representation of the :class:`Arrow ` - object. ''' - - return self._datetime.replace(tzinfo=None) - - @property - def timestamp(self): - ''' Returns a timestamp representation of the :class:`Arrow ` object, in - UTC time. ''' - - return calendar.timegm(self._datetime.utctimetuple()) - - @property - def float_timestamp(self): - ''' Returns a floating-point representation of the :class:`Arrow ` - object, in UTC time. ''' - - return self.timestamp + float(self.microsecond) / 1000000 - - - # mutation and duplication. - - def clone(self): - ''' Returns a new :class:`Arrow ` object, cloned from the current one. - - Usage: - - >>> arw = arrow.utcnow() - >>> cloned = arw.clone() - - ''' - - return self.fromdatetime(self._datetime) - - def replace(self, **kwargs): - ''' Returns a new :class:`Arrow ` object with attributes updated - according to inputs. - - Use property names to set their value absolutely:: - - >>> import arrow - >>> arw = arrow.utcnow() - >>> arw - - >>> arw.replace(year=2014, month=6) - - - You can also replace the timezone without conversion, using a - :ref:`timezone expression `:: - - >>> arw.replace(tzinfo=tz.tzlocal()) - - - ''' - - absolute_kwargs = {} - relative_kwargs = {} # TODO: DEPRECATED; remove in next release - - for key, value in kwargs.items(): - - if key in self._ATTRS: - absolute_kwargs[key] = value - elif key in self._ATTRS_PLURAL or key in ['weeks', 'quarters']: - # TODO: DEPRECATED - warnings.warn("replace() with plural property to shift value" - "is deprecated, use shift() instead", - DeprecationWarning) - relative_kwargs[key] = value - elif key in ['week', 'quarter']: - raise AttributeError('setting absolute {0} is not supported'.format(key)) - elif key !='tzinfo': - raise AttributeError('unknown attribute: "{0}"'.format(key)) - - # core datetime does not support quarters, translate to months. - relative_kwargs.setdefault('months', 0) - relative_kwargs['months'] += relative_kwargs.pop('quarters', 0) * self._MONTHS_PER_QUARTER - - current = self._datetime.replace(**absolute_kwargs) - current += relativedelta(**relative_kwargs) # TODO: DEPRECATED - - tzinfo = kwargs.get('tzinfo') - - if tzinfo is not None: - tzinfo = self._get_tzinfo(tzinfo) - current = current.replace(tzinfo=tzinfo) - - return self.fromdatetime(current) - - def shift(self, **kwargs): - ''' Returns a new :class:`Arrow ` object with attributes updated - according to inputs. - - Use pluralized property names to shift their current value relatively: - - >>> import arrow - >>> arw = arrow.utcnow() - >>> arw - - >>> arw.shift(years=1, months=-1) - - - Day-of-the-week relative shifting can use either Python's weekday numbers - (Monday = 0, Tuesday = 1 .. Sunday = 6) or using dateutil.relativedelta's - day instances (MO, TU .. SU). When using weekday numbers, the returned - date will always be greater than or equal to the starting date. - - Using the above code (which is a Saturday) and asking it to shift to Saturday: - - >>> arw.shift(weekday=5) - - - While asking for a Monday: - - >>> arw.shift(weekday=0) - - - ''' - - relative_kwargs = {} - - for key, value in kwargs.items(): - - if key in self._ATTRS_PLURAL or key in ['weeks', 'quarters', 'weekday']: - relative_kwargs[key] = value - else: - raise AttributeError() - - # core datetime does not support quarters, translate to months. - relative_kwargs.setdefault('months', 0) - relative_kwargs['months'] += relative_kwargs.pop('quarters', 0) * self._MONTHS_PER_QUARTER - - current = self._datetime + relativedelta(**relative_kwargs) - - return self.fromdatetime(current) - - def to(self, tz): - ''' Returns a new :class:`Arrow ` object, converted - to the target timezone. - - :param tz: A :ref:`timezone expression `. - - Usage:: - - >>> utc = arrow.utcnow() - >>> utc - - - >>> utc.to('US/Pacific') - - - >>> utc.to(tz.tzlocal()) - - - >>> utc.to('-07:00') - - - >>> utc.to('local') - - - >>> utc.to('local').to('utc') - - - ''' - - if not isinstance(tz, tzinfo): - tz = parser.TzinfoParser.parse(tz) - - dt = self._datetime.astimezone(tz) - - return self.__class__(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, - dt.microsecond, dt.tzinfo) - - def span(self, frame, count=1): - ''' Returns two new :class:`Arrow ` objects, representing the timespan - of the :class:`Arrow ` object in a given timeframe. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param count: (optional) the number of frames to span. - - Supported frame values: year, quarter, month, week, day, hour, minute, second. - - Usage:: - - >>> arrow.utcnow() - - - >>> arrow.utcnow().span('hour') - (, ) - - >>> arrow.utcnow().span('day') - (, ) - - >>> arrow.utcnow().span('day', count=2) - (, ) - - ''' - - frame_absolute, frame_relative, relative_steps = self._get_frames(frame) - - if frame_absolute == 'week': - attr = 'day' - elif frame_absolute == 'quarter': - attr = 'month' - else: - attr = frame_absolute - - index = self._ATTRS.index(attr) - frames = self._ATTRS[:index + 1] - - values = [getattr(self, f) for f in frames] - - for i in range(3 - len(values)): - values.append(1) - - floor = self.__class__(*values, tzinfo=self.tzinfo) - - if frame_absolute == 'week': - floor = floor + relativedelta(days=-(self.isoweekday() - 1)) - elif frame_absolute == 'quarter': - floor = floor + relativedelta(months=-((self.month - 1) % 3)) - - ceil = floor + relativedelta( - **{frame_relative: count * relative_steps}) + relativedelta(microseconds=-1) - - return floor, ceil - - def floor(self, frame): - ''' Returns a new :class:`Arrow ` object, representing the "floor" - of the timespan of the :class:`Arrow ` object in a given timeframe. - Equivalent to the first element in the 2-tuple returned by - :func:`span `. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - - Usage:: - - >>> arrow.utcnow().floor('hour') - - ''' - - return self.span(frame)[0] - - def ceil(self, frame): - ''' Returns a new :class:`Arrow ` object, representing the "ceiling" - of the timespan of the :class:`Arrow ` object in a given timeframe. - Equivalent to the second element in the 2-tuple returned by - :func:`span `. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - - Usage:: - - >>> arrow.utcnow().ceil('hour') - - ''' - - return self.span(frame)[1] - - - # string output and formatting. - - def format(self, fmt='YYYY-MM-DD HH:mm:ssZZ', locale='en_us'): - ''' Returns a string representation of the :class:`Arrow ` object, - formatted according to a format string. - - :param fmt: the format string. - - Usage:: - - >>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ') - '2013-05-09 03:56:47 -00:00' - - >>> arrow.utcnow().format('X') - '1368071882' - - >>> arrow.utcnow().format('MMMM DD, YYYY') - 'May 09, 2013' - - >>> arrow.utcnow().format() - '2013-05-09 03:56:47 -00:00' - - ''' - - return formatter.DateTimeFormatter(locale).format(self._datetime, fmt) - - - def humanize(self, other=None, locale='en_us', only_distance=False, granularity='auto'): - ''' Returns a localized, humanized representation of a relative difference in time. - - :param other: (optional) an :class:`Arrow ` or ``datetime`` object. - Defaults to now in the current :class:`Arrow ` object's timezone. - :param locale: (optional) a ``str`` specifying a locale. Defaults to 'en_us'. - :param only_distance: (optional) returns only time difference eg: "11 seconds" without "in" or "ago" part. - :param granularity: (optional) defines the precision of the output. Set it to strings 'second', 'minute', 'hour', 'day', 'month' or 'year'. - Usage:: - - >>> earlier = arrow.utcnow().shift(hours=-2) - >>> earlier.humanize() - '2 hours ago' - - >>> later = later = earlier.shift(hours=4) - >>> later.humanize(earlier) - 'in 4 hours' - - ''' - - locale = locales.get_locale(locale) - - if other is None: - utc = datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc()) - dt = utc.astimezone(self._datetime.tzinfo) - - elif isinstance(other, Arrow): - dt = other._datetime - - elif isinstance(other, datetime): - if other.tzinfo is None: - dt = other.replace(tzinfo=self._datetime.tzinfo) - else: - dt = other.astimezone(self._datetime.tzinfo) - - else: - raise TypeError() - - delta = int(util.total_seconds(self._datetime - dt)) - sign = -1 if delta < 0 else 1 - diff = abs(delta) - delta = diff - - if granularity=='auto': - if diff < 10: - return locale.describe('now', only_distance=only_distance) - - if diff < 45: - seconds = sign * delta - return locale.describe('seconds', seconds, only_distance=only_distance) - - elif diff < 90: - return locale.describe('minute', sign, only_distance=only_distance) - elif diff < 2700: - minutes = sign * int(max(delta / 60, 2)) - return locale.describe('minutes', minutes, only_distance=only_distance) - - elif diff < 5400: - return locale.describe('hour', sign, only_distance=only_distance) - elif diff < 79200: - hours = sign * int(max(delta / 3600, 2)) - return locale.describe('hours', hours, only_distance=only_distance) - - elif diff < 129600: - return locale.describe('day', sign, only_distance=only_distance) - elif diff < 2160000: - days = sign * int(max(delta / 86400, 2)) - return locale.describe('days', days, only_distance=only_distance) - - elif diff < 3888000: - return locale.describe('month', sign, only_distance=only_distance) - elif diff < 29808000: - self_months = self._datetime.year * 12 + self._datetime.month - other_months = dt.year * 12 + dt.month - - months = sign * int(max(abs(other_months - self_months), 2)) - - return locale.describe('months', months, only_distance=only_distance) - - elif diff < 47260800: - return locale.describe('year', sign, only_distance=only_distance) - else: - years = sign * int(max(delta / 31536000, 2)) - return locale.describe('years', years, only_distance=only_distance) - - else: - if granularity == 'second': - delta = sign * delta - if(abs(delta) < 2): - return locale.describe('now', only_distance=only_distance) - elif granularity == 'minute': - delta = sign * delta / float(60) - elif granularity == 'hour': - delta = sign * delta / float(60*60) - elif granularity == 'day': - delta = sign * delta / float(60*60*24) - elif granularity == 'month': - delta = sign * delta / float(60*60*24*30.5) - elif granularity == 'year': - delta = sign * delta / float(60*60*24*365.25) - else: - raise AttributeError('Error. Could not understand your level of granularity. Please select between \ - "second", "minute", "hour", "day", "week", "month" or "year"') - - if(trunc(abs(delta)) != 1): - granularity += 's' - return locale.describe(granularity, delta, only_distance=False) - # math - - def __add__(self, other): - - if isinstance(other, (timedelta, relativedelta)): - return self.fromdatetime(self._datetime + other, self._datetime.tzinfo) - - raise TypeError() - - def __radd__(self, other): - return self.__add__(other) - - def __sub__(self, other): - - if isinstance(other, (timedelta, relativedelta)): - return self.fromdatetime(self._datetime - other, self._datetime.tzinfo) - - elif isinstance(other, datetime): - return self._datetime - other - - elif isinstance(other, Arrow): - return self._datetime - other._datetime - - raise TypeError() - - def __rsub__(self, other): - - if isinstance(other, datetime): - return other - self._datetime - - raise TypeError() - - - # comparisons - - def _cmperror(self, other): - raise TypeError('can\'t compare \'{0}\' to \'{1}\''.format( - type(self), type(other))) - - def __eq__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return False - - return self._datetime == self._get_datetime(other) - - def __ne__(self, other): - return not self.__eq__(other) - - def __gt__(self, other): - - if not isinstance(other, (Arrow, datetime)): - self._cmperror(other) - - return self._datetime > self._get_datetime(other) - - def __ge__(self, other): - - if not isinstance(other, (Arrow, datetime)): - self._cmperror(other) - - return self._datetime >= self._get_datetime(other) - - def __lt__(self, other): - - if not isinstance(other, (Arrow, datetime)): - self._cmperror(other) - - return self._datetime < self._get_datetime(other) - - def __le__(self, other): - - if not isinstance(other, (Arrow, datetime)): - self._cmperror(other) - - return self._datetime <= self._get_datetime(other) - - - # datetime methods - - def date(self): - ''' Returns a ``date`` object with the same year, month and day. ''' - - return self._datetime.date() - - def time(self): - ''' Returns a ``time`` object with the same hour, minute, second, microsecond. ''' - - return self._datetime.time() - - def timetz(self): - ''' Returns a ``time`` object with the same hour, minute, second, microsecond and - tzinfo. ''' - - return self._datetime.timetz() - - def astimezone(self, tz): - ''' Returns a ``datetime`` object, converted to the specified timezone. - - :param tz: a ``tzinfo`` object. - - ''' - - return self._datetime.astimezone(tz) - - def utcoffset(self): - ''' Returns a ``timedelta`` object representing the whole number of minutes difference from - UTC time. ''' - - return self._datetime.utcoffset() - - def dst(self): - ''' Returns the daylight savings time adjustment. ''' - - return self._datetime.dst() - - def timetuple(self): - ''' Returns a ``time.struct_time``, in the current timezone. ''' - - return self._datetime.timetuple() - - def utctimetuple(self): - ''' Returns a ``time.struct_time``, in UTC time. ''' - - return self._datetime.utctimetuple() - - def toordinal(self): - ''' Returns the proleptic Gregorian ordinal of the date. ''' - - return self._datetime.toordinal() - - def weekday(self): - ''' Returns the day of the week as an integer (0-6). ''' - - return self._datetime.weekday() - - def isoweekday(self): - ''' Returns the ISO day of the week as an integer (1-7). ''' - - return self._datetime.isoweekday() - - def isocalendar(self): - ''' Returns a 3-tuple, (ISO year, ISO week number, ISO weekday). ''' - - return self._datetime.isocalendar() - - def isoformat(self, sep='T'): - '''Returns an ISO 8601 formatted representation of the date and time. ''' - - return self._datetime.isoformat(sep) - - def ctime(self): - ''' Returns a ctime formatted representation of the date and time. ''' - - return self._datetime.ctime() - - def strftime(self, format): - ''' Formats in the style of ``datetime.strptime``. - - :param format: the format string. - - ''' - - return self._datetime.strftime(format) - - def for_json(self): - '''Serializes for the ``for_json`` protocol of simplejson.''' - - return self.isoformat() - - # internal tools. - - @staticmethod - def _get_tzinfo(tz_expr): - - if tz_expr is None: - return dateutil_tz.tzutc() - if isinstance(tz_expr, tzinfo): - return tz_expr - else: - try: - return parser.TzinfoParser.parse(tz_expr) - except parser.ParserError: - raise ValueError('\'{0}\' not recognized as a timezone'.format( - tz_expr)) - - @classmethod - def _get_datetime(cls, expr): - - if isinstance(expr, Arrow): - return expr.datetime - - if isinstance(expr, datetime): - return expr - - try: - expr = float(expr) - return cls.utcfromtimestamp(expr).datetime - except: - raise ValueError( - '\'{0}\' not recognized as a timestamp or datetime'.format(expr)) - - @classmethod - def _get_frames(cls, name): - - if name in cls._ATTRS: - return name, '{0}s'.format(name), 1 - - elif name in ['week', 'weeks']: - return 'week', 'weeks', 1 - elif name in ['quarter', 'quarters']: - return 'quarter', 'months', 3 - - supported = ', '.join(cls._ATTRS + ['week', 'weeks'] + ['quarter', 'quarters']) - raise AttributeError('range/span over frame {0} not supported. Supported frames: {1}'.format(name, supported)) - - @classmethod - def _get_iteration_params(cls, end, limit): - - if end is None: - - if limit is None: - raise Exception('one of \'end\' or \'limit\' is required') - - return cls.max, limit - - else: - if limit is None: - return end, sys.maxsize - return end, limit - - @staticmethod - def _get_timestamp_from_input(timestamp): - - try: - return float(timestamp) - except: - raise ValueError('cannot parse \'{0}\' as a timestamp'.format(timestamp)) - -Arrow.min = Arrow.fromdatetime(datetime.min) -Arrow.max = Arrow.fromdatetime(datetime.max) diff --git a/pype/vendor/arrow/factory.py b/pype/vendor/arrow/factory.py deleted file mode 100644 index 6c33bbb266..0000000000 --- a/pype/vendor/arrow/factory.py +++ /dev/null @@ -1,258 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Implements the :class:`ArrowFactory ` class, -providing factory methods for common :class:`Arrow ` -construction scenarios. - -""" - -from __future__ import absolute_import - -from arrow.arrow import Arrow -from arrow import parser -from arrow.util import is_timestamp, isstr - -from datetime import datetime, tzinfo, date -from dateutil import tz as dateutil_tz -from time import struct_time -import calendar - - -class ArrowFactory(object): - ''' A factory for generating :class:`Arrow ` objects. - - :param type: (optional) the :class:`Arrow `-based class to construct from. - Defaults to :class:`Arrow `. - - ''' - - def __init__(self, type=Arrow): - self.type = type - - def get(self, *args, **kwargs): - ''' Returns an :class:`Arrow ` object based on flexible inputs. - - :param locale: (optional) a ``str`` specifying a locale for the parser. Defaults to - 'en_us'. - :param tzinfo: (optional) a :ref:`timezone expression ` or tzinfo object. - Replaces the timezone unless using an input form that is explicitly UTC or specifies - the timezone in a positional argument. Defaults to UTC. - - Usage:: - - >>> import arrow - - **No inputs** to get current UTC time:: - - >>> arrow.get() - - - **None** to also get current UTC time:: - - >>> arrow.get(None) - - - **One** :class:`Arrow ` object, to get a copy. - - >>> arw = arrow.utcnow() - >>> arrow.get(arw) - - - **One** ``str``, ``float``, or ``int``, convertible to a floating-point timestamp, to get - that timestamp in UTC:: - - >>> arrow.get(1367992474.293378) - - - >>> arrow.get(1367992474) - - - >>> arrow.get('1367992474.293378') - - - >>> arrow.get('1367992474') - - - **One** ISO-8601-formatted ``str``, to parse it:: - - >>> arrow.get('2013-09-29T01:26:43.830580') - - - **One** ``tzinfo``, to get the current time **converted** to that timezone:: - - >>> arrow.get(tz.tzlocal()) - - - **One** naive ``datetime``, to get that datetime in UTC:: - - >>> arrow.get(datetime(2013, 5, 5)) - - - **One** aware ``datetime``, to get that datetime:: - - >>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal())) - - - **One** naive ``date``, to get that date in UTC:: - - >>> arrow.get(date(2013, 5, 5)) - - - **Two** arguments, a naive or aware ``datetime``, and a replacement - :ref:`timezone expression `:: - - >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific') - - - **Two** arguments, a naive ``date``, and a replacement - :ref:`timezone expression `:: - - >>> arrow.get(date(2013, 5, 5), 'US/Pacific') - - - **Two** arguments, both ``str``, to parse the first according to the format of the second:: - - >>> arrow.get('2013-05-05 12:30:45', 'YYYY-MM-DD HH:mm:ss') - - - **Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try:: - - >>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss']) - - - **Three or more** arguments, as for the constructor of a ``datetime``:: - - >>> arrow.get(2013, 5, 5, 12, 30, 45) - - - **One** time.struct time:: - - >>> arrow.get(gmtime(0)) - - - ''' - - arg_count = len(args) - locale = kwargs.get('locale', 'en_us') - tz = kwargs.get('tzinfo', None) - - # () -> now, @ utc. - if arg_count == 0: - if isinstance(tz, tzinfo): - return self.type.now(tz) - return self.type.utcnow() - - if arg_count == 1: - arg = args[0] - - # (None) -> now, @ utc. - if arg is None: - return self.type.utcnow() - - # try (int, float, str(int), str(float)) -> utc, from timestamp. - if is_timestamp(arg): - return self.type.utcfromtimestamp(arg) - - # (Arrow) -> from the object's datetime. - if isinstance(arg, Arrow): - return self.type.fromdatetime(arg.datetime) - - # (datetime) -> from datetime. - if isinstance(arg, datetime): - return self.type.fromdatetime(arg) - - # (date) -> from date. - if isinstance(arg, date): - return self.type.fromdate(arg) - - # (tzinfo) -> now, @ tzinfo. - elif isinstance(arg, tzinfo): - return self.type.now(arg) - - # (str) -> parse. - elif isstr(arg): - dt = parser.DateTimeParser(locale).parse_iso(arg) - return self.type.fromdatetime(dt) - - # (struct_time) -> from struct_time - elif isinstance(arg, struct_time): - return self.type.utcfromtimestamp(calendar.timegm(arg)) - - else: - raise TypeError('Can\'t parse single argument type of \'{0}\''.format(type(arg))) - - elif arg_count == 2: - - arg_1, arg_2 = args[0], args[1] - - if isinstance(arg_1, datetime): - - # (datetime, tzinfo/str) -> fromdatetime replace tzinfo. - if isinstance(arg_2, tzinfo) or isstr(arg_2): - return self.type.fromdatetime(arg_1, arg_2) - else: - raise TypeError('Can\'t parse two arguments of types \'datetime\', \'{0}\''.format( - type(arg_2))) - - elif isinstance(arg_1, date): - - # (date, tzinfo/str) -> fromdate replace tzinfo. - if isinstance(arg_2, tzinfo) or isstr(arg_2): - return self.type.fromdate(arg_1, tzinfo=arg_2) - else: - raise TypeError('Can\'t parse two arguments of types \'date\', \'{0}\''.format( - type(arg_2))) - - # (str, format) -> parse. - elif isstr(arg_1) and (isstr(arg_2) or isinstance(arg_2, list)): - dt = parser.DateTimeParser(locale).parse(args[0], args[1]) - return self.type.fromdatetime(dt, tzinfo=tz) - - else: - raise TypeError('Can\'t parse two arguments of types \'{0}\', \'{1}\''.format( - type(arg_1), type(arg_2))) - - # 3+ args -> datetime-like via constructor. - else: - return self.type(*args, **kwargs) - - def utcnow(self): - '''Returns an :class:`Arrow ` object, representing "now" in UTC time. - - Usage:: - - >>> import arrow - >>> arrow.utcnow() - - ''' - - return self.type.utcnow() - - def now(self, tz=None): - '''Returns an :class:`Arrow ` object, representing "now" in the given - timezone. - - :param tz: (optional) A :ref:`timezone expression `. Defaults to local time. - - Usage:: - - >>> import arrow - >>> arrow.now() - - - >>> arrow.now('US/Pacific') - - - >>> arrow.now('+02:00') - - - >>> arrow.now('local') - - ''' - - if tz is None: - tz = dateutil_tz.tzlocal() - elif not isinstance(tz, tzinfo): - tz = parser.TzinfoParser.parse(tz) - - return self.type.now(tz) diff --git a/pype/vendor/arrow/formatter.py b/pype/vendor/arrow/formatter.py deleted file mode 100644 index 50fd3a1791..0000000000 --- a/pype/vendor/arrow/formatter.py +++ /dev/null @@ -1,105 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import - -import calendar -import re -from dateutil import tz as dateutil_tz -from arrow import util, locales - - -class DateTimeFormatter(object): - - _FORMAT_RE = re.compile('(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?|a|A|X)') - - def __init__(self, locale='en_us'): - - self.locale = locales.get_locale(locale) - - def format(cls, dt, fmt): - - return cls._FORMAT_RE.sub(lambda m: cls._format_token(dt, m.group(0)), fmt) - - def _format_token(self, dt, token): - - if token == 'YYYY': - return self.locale.year_full(dt.year) - if token == 'YY': - return self.locale.year_abbreviation(dt.year) - - if token == 'MMMM': - return self.locale.month_name(dt.month) - if token == 'MMM': - return self.locale.month_abbreviation(dt.month) - if token == 'MM': - return '{0:02d}'.format(dt.month) - if token == 'M': - return str(dt.month) - - if token == 'DDDD': - return '{0:03d}'.format(dt.timetuple().tm_yday) - if token == 'DDD': - return str(dt.timetuple().tm_yday) - if token == 'DD': - return '{0:02d}'.format(dt.day) - if token == 'D': - return str(dt.day) - - if token == 'Do': - return self.locale.ordinal_number(dt.day) - - if token == 'dddd': - return self.locale.day_name(dt.isoweekday()) - if token == 'ddd': - return self.locale.day_abbreviation(dt.isoweekday()) - if token == 'd': - return str(dt.isoweekday()) - - if token == 'HH': - return '{0:02d}'.format(dt.hour) - if token == 'H': - return str(dt.hour) - if token == 'hh': - return '{0:02d}'.format(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) - if token == 'h': - return str(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) - - if token == 'mm': - return '{0:02d}'.format(dt.minute) - if token == 'm': - return str(dt.minute) - - if token == 'ss': - return '{0:02d}'.format(dt.second) - if token == 's': - return str(dt.second) - - if token == 'SSSSSS': - return str('{0:06d}'.format(int(dt.microsecond))) - if token == 'SSSSS': - return str('{0:05d}'.format(int(dt.microsecond / 10))) - if token == 'SSSS': - return str('{0:04d}'.format(int(dt.microsecond / 100))) - if token == 'SSS': - return str('{0:03d}'.format(int(dt.microsecond / 1000))) - if token == 'SS': - return str('{0:02d}'.format(int(dt.microsecond / 10000))) - if token == 'S': - return str(int(dt.microsecond / 100000)) - - if token == 'X': - return str(calendar.timegm(dt.utctimetuple())) - - if token in ['ZZ', 'Z']: - separator = ':' if token == 'ZZ' else '' - tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo - total_minutes = int(util.total_seconds(tz.utcoffset(dt)) / 60) - - sign = '+' if total_minutes >= 0 else '-' - total_minutes = abs(total_minutes) - hour, minute = divmod(total_minutes, 60) - - return '{0}{1:02d}{2}{3:02d}'.format(sign, hour, separator, minute) - - if token in ('a', 'A'): - return self.locale.meridian(dt.hour, token) - diff --git a/pype/vendor/arrow/locales.py b/pype/vendor/arrow/locales.py deleted file mode 100644 index 724a3d1975..0000000000 --- a/pype/vendor/arrow/locales.py +++ /dev/null @@ -1,2159 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from __future__ import unicode_literals - -import inspect -import sys -from math import trunc - - -def get_locale(name): - '''Returns an appropriate :class:`Locale ` - corresponding to an inpute locale name. - - :param name: the name of the locale. - - ''' - - locale_cls = _locales.get(name.lower()) - - if locale_cls is None: - raise ValueError('Unsupported locale \'{0}\''.format(name)) - - return locale_cls() - - -# base locale type. - -class Locale(object): - ''' Represents locale-specific data and functionality. ''' - - names = [] - - timeframes = { - 'now': '', - 'seconds': '', - 'minute': '', - 'minutes': '', - 'hour': '', - 'hours': '', - 'day': '', - 'days': '', - 'month': '', - 'months': '', - 'year': '', - 'years': '', - } - - meridians = { - 'am': '', - 'pm': '', - 'AM': '', - 'PM': '', - } - - past = None - future = None - - month_names = [] - month_abbreviations = [] - - day_names = [] - day_abbreviations = [] - - ordinal_day_re = r'(\d+)' - - def __init__(self): - - self._month_name_to_ordinal = None - - def describe(self, timeframe, delta=0, only_distance=False): - ''' Describes a delta within a timeframe in plain language. - - :param timeframe: a string representing a timeframe. - :param delta: a quantity representing a delta in a timeframe. - :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords - ''' - - humanized = self._format_timeframe(timeframe, delta) - if not only_distance: - humanized = self._format_relative(humanized, timeframe, delta) - - return humanized - - def day_name(self, day): - ''' Returns the day name for a specified day of the week. - - :param day: the ``int`` day of the week (1-7). - - ''' - - return self.day_names[day] - - def day_abbreviation(self, day): - ''' Returns the day abbreviation for a specified day of the week. - - :param day: the ``int`` day of the week (1-7). - - ''' - - return self.day_abbreviations[day] - - def month_name(self, month): - ''' Returns the month name for a specified month of the year. - - :param month: the ``int`` month of the year (1-12). - - ''' - - return self.month_names[month] - - def month_abbreviation(self, month): - ''' Returns the month abbreviation for a specified month of the year. - - :param month: the ``int`` month of the year (1-12). - - ''' - - return self.month_abbreviations[month] - - def month_number(self, name): - ''' Returns the month number for a month specified by name or abbreviation. - - :param name: the month name or abbreviation. - - ''' - - if self._month_name_to_ordinal is None: - self._month_name_to_ordinal = self._name_to_ordinal(self.month_names) - self._month_name_to_ordinal.update(self._name_to_ordinal(self.month_abbreviations)) - - return self._month_name_to_ordinal.get(name) - - def year_full(self, year): - ''' Returns the year for specific locale if available - - :param name: the ``int`` year (4-digit) - ''' - return '{0:04d}'.format(year) - - def year_abbreviation(self, year): - ''' Returns the year for specific locale if available - - :param name: the ``int`` year (4-digit) - ''' - return '{0:04d}'.format(year)[2:] - - def meridian(self, hour, token): - ''' Returns the meridian indicator for a specified hour and format token. - - :param hour: the ``int`` hour of the day. - :param token: the format token. - ''' - - if token == 'a': - return self.meridians['am'] if hour < 12 else self.meridians['pm'] - if token == 'A': - return self.meridians['AM'] if hour < 12 else self.meridians['PM'] - - def ordinal_number(self, n): - ''' Returns the ordinal format of a given integer - - :param n: an integer - ''' - return self._ordinal_number(n) - - def _ordinal_number(self, n): - return '{0}'.format(n) - - def _name_to_ordinal(self, lst): - return dict(map(lambda i: (i[1].lower(), i[0] + 1), enumerate(lst[1:]))) - - def _format_timeframe(self, timeframe, delta): - return self.timeframes[timeframe].format(trunc(abs(delta))) - - def _format_relative(self, humanized, timeframe, delta): - - if timeframe == 'now': - return humanized - - direction = self.past if delta < 0 else self.future - - return direction.format(humanized) - - -# base locale type implementations. - -class EnglishLocale(Locale): - - names = ['en', 'en_us', 'en_gb', 'en_au', 'en_be', 'en_jp', 'en_za', 'en_ca'] - - past = '{0} ago' - future = 'in {0}' - - timeframes = { - 'now': 'just now', - 'seconds': 'seconds', - 'minute': 'a minute', - 'minutes': '{0} minutes', - 'hour': 'an hour', - 'hours': '{0} hours', - 'day': 'a day', - 'days': '{0} days', - 'month': 'a month', - 'months': '{0} months', - 'year': 'a year', - 'years': '{0} years', - } - - meridians = { - 'am': 'am', - 'pm': 'pm', - 'AM': 'AM', - 'PM': 'PM', - } - - month_names = ['', 'January', 'February', 'March', 'April', 'May', 'June', 'July', - 'August', 'September', 'October', 'November', 'December'] - month_abbreviations = ['', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', - 'Sep', 'Oct', 'Nov', 'Dec'] - - day_names = ['', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] - day_abbreviations = ['', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] - - ordinal_day_re = r'((?P[2-3]?1(?=st)|[2-3]?2(?=nd)|[2-3]?3(?=rd)|[1-3]?[04-9](?=th)|1[1-3](?=th))(st|nd|rd|th))' - - def _ordinal_number(self, n): - if n % 100 not in (11, 12, 13): - remainder = abs(n) % 10 - if remainder == 1: - return '{0}st'.format(n) - elif remainder == 2: - return '{0}nd'.format(n) - elif remainder == 3: - return '{0}rd'.format(n) - return '{0}th'.format(n) - - -class ItalianLocale(Locale): - names = ['it', 'it_it'] - past = '{0} fa' - future = 'tra {0}' - - timeframes = { - 'now': 'adesso', - 'seconds': 'qualche secondo', - 'minute': 'un minuto', - 'minutes': '{0} minuti', - 'hour': 'un\'ora', - 'hours': '{0} ore', - 'day': 'un giorno', - 'days': '{0} giorni', - 'month': 'un mese', - 'months': '{0} mesi', - 'year': 'un anno', - 'years': '{0} anni', - } - - month_names = ['', 'gennaio', 'febbraio', 'marzo', 'aprile', 'maggio', 'giugno', 'luglio', - 'agosto', 'settembre', 'ottobre', 'novembre', 'dicembre'] - month_abbreviations = ['', 'gen', 'feb', 'mar', 'apr', 'mag', 'giu', 'lug', 'ago', - 'set', 'ott', 'nov', 'dic'] - - day_names = ['', 'lunedì', 'martedì', 'mercoledì', 'giovedì', 'venerdì', 'sabato', 'domenica'] - day_abbreviations = ['', 'lun', 'mar', 'mer', 'gio', 'ven', 'sab', 'dom'] - - ordinal_day_re = r'((?P[1-3]?[0-9](?=[ºª]))[ºª])' - - def _ordinal_number(self, n): - return '{0}º'.format(n) - - -class SpanishLocale(Locale): - names = ['es', 'es_es'] - past = 'hace {0}' - future = 'en {0}' - - timeframes = { - 'now': 'ahora', - 'seconds': 'segundos', - 'minute': 'un minuto', - 'minutes': '{0} minutos', - 'hour': 'una hora', - 'hours': '{0} horas', - 'day': 'un día', - 'days': '{0} días', - 'month': 'un mes', - 'months': '{0} meses', - 'year': 'un año', - 'years': '{0} años', - } - - month_names = ['', 'enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', - 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre'] - month_abbreviations = ['', 'ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', - 'sep', 'oct', 'nov', 'dic'] - - day_names = ['', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado', 'domingo'] - day_abbreviations = ['', 'lun', 'mar', 'mie', 'jue', 'vie', 'sab', 'dom'] - - ordinal_day_re = r'((?P[1-3]?[0-9](?=[ºª]))[ºª])' - - def _ordinal_number(self, n): - return '{0}º'.format(n) - - -class FrenchLocale(Locale): - names = ['fr', 'fr_fr'] - past = 'il y a {0}' - future = 'dans {0}' - - timeframes = { - 'now': 'maintenant', - 'seconds': 'quelques secondes', - 'minute': 'une minute', - 'minutes': '{0} minutes', - 'hour': 'une heure', - 'hours': '{0} heures', - 'day': 'un jour', - 'days': '{0} jours', - 'month': 'un mois', - 'months': '{0} mois', - 'year': 'un an', - 'years': '{0} ans', - } - - month_names = ['', 'janvier', 'février', 'mars', 'avril', 'mai', 'juin', 'juillet', - 'août', 'septembre', 'octobre', 'novembre', 'décembre'] - month_abbreviations = ['', 'janv', 'févr', 'mars', 'avr', 'mai', 'juin', 'juil', 'août', - 'sept', 'oct', 'nov', 'déc'] - - day_names = ['', 'lundi', 'mardi', 'mercredi', 'jeudi', 'vendredi', 'samedi', 'dimanche'] - day_abbreviations = ['', 'lun', 'mar', 'mer', 'jeu', 'ven', 'sam', 'dim'] - - ordinal_day_re = r'((?P\b1(?=er\b)|[1-3]?[02-9](?=e\b)|[1-3]1(?=e\b))(er|e)\b)' - - def _ordinal_number(self, n): - if abs(n) == 1: - return '{0}er'.format(n) - return '{0}e'.format(n) - - -class GreekLocale(Locale): - - names = ['el', 'el_gr'] - - past = '{0} πριν' - future = 'σε {0}' - - timeframes = { - 'now': 'τώρα', - 'seconds': 'δευτερόλεπτα', - 'minute': 'ένα λεπτό', - 'minutes': '{0} λεπτά', - 'hour': 'μια ώρα', - 'hours': '{0} ώρες', - 'day': 'μια μέρα', - 'days': '{0} μέρες', - 'month': 'ένα μήνα', - 'months': '{0} μήνες', - 'year': 'ένα χρόνο', - 'years': '{0} χρόνια', - } - - month_names = ['', 'Ιανουαρίου', 'Φεβρουαρίου', 'Μαρτίου', 'Απριλίου', 'Μαΐου', 'Ιουνίου', - 'Ιουλίου', 'Αυγούστου', 'Σεπτεμβρίου', 'Οκτωβρίου', 'Νοεμβρίου', 'Δεκεμβρίου'] - month_abbreviations = ['', 'Ιαν', 'Φεβ', 'Μαρ', 'Απρ', 'Μαϊ', 'Ιον', 'Ιολ', 'Αυγ', - 'Σεπ', 'Οκτ', 'Νοε', 'Δεκ'] - - day_names = ['', 'Δευτέρα', 'Τρίτη', 'Τετάρτη', 'Πέμπτη', 'Παρασκευή', 'Σάββατο', 'Κυριακή'] - day_abbreviations = ['', 'Δευ', 'Τρι', 'Τετ', 'Πεμ', 'Παρ', 'Σαβ', 'Κυρ'] - - -class JapaneseLocale(Locale): - - names = ['ja', 'ja_jp'] - - past = '{0}前' - future = '{0}後' - - timeframes = { - 'now': '現在', - 'seconds': '数秒', - 'minute': '1分', - 'minutes': '{0}分', - 'hour': '1時間', - 'hours': '{0}時間', - 'day': '1日', - 'days': '{0}日', - 'month': '1ヶ月', - 'months': '{0}ヶ月', - 'year': '1年', - 'years': '{0}年', - } - - month_names = ['', '1月', '2月', '3月', '4月', '5月', '6月', '7月', '8月', - '9月', '10月', '11月', '12月'] - month_abbreviations = ['', ' 1', ' 2', ' 3', ' 4', ' 5', ' 6', ' 7', ' 8', - ' 9', '10', '11', '12'] - - day_names = ['', '月曜日', '火曜日', '水曜日', '木曜日', '金曜日', '土曜日', '日曜日'] - day_abbreviations = ['', '月', '火', '水', '木', '金', '土', '日'] - - -class SwedishLocale(Locale): - - names = ['sv', 'sv_se'] - - past = 'för {0} sen' - future = 'om {0}' - - timeframes = { - 'now': 'just nu', - 'seconds': 'några sekunder', - 'minute': 'en minut', - 'minutes': '{0} minuter', - 'hour': 'en timme', - 'hours': '{0} timmar', - 'day': 'en dag', - 'days': '{0} dagar', - 'month': 'en månad', - 'months': '{0} månader', - 'year': 'ett år', - 'years': '{0} år', - } - - month_names = ['', 'januari', 'februari', 'mars', 'april', 'maj', 'juni', 'juli', - 'augusti', 'september', 'oktober', 'november', 'december'] - month_abbreviations = ['', 'jan', 'feb', 'mar', 'apr', 'maj', 'jun', 'jul', - 'aug', 'sep', 'okt', 'nov', 'dec'] - - day_names = ['', 'måndag', 'tisdag', 'onsdag', 'torsdag', 'fredag', 'lördag', 'söndag'] - day_abbreviations = ['', 'mån', 'tis', 'ons', 'tor', 'fre', 'lör', 'sön'] - - -class FinnishLocale(Locale): - - names = ['fi', 'fi_fi'] - - # The finnish grammar is very complex, and its hard to convert - # 1-to-1 to something like English. - - past = '{0} sitten' - future = '{0} kuluttua' - - timeframes = { - 'now': ['juuri nyt', 'juuri nyt'], - 'seconds': ['muutama sekunti', 'muutaman sekunnin'], - 'minute': ['minuutti', 'minuutin'], - 'minutes': ['{0} minuuttia', '{0} minuutin'], - 'hour': ['tunti', 'tunnin'], - 'hours': ['{0} tuntia', '{0} tunnin'], - 'day': ['päivä', 'päivä'], - 'days': ['{0} päivää', '{0} päivän'], - 'month': ['kuukausi', 'kuukauden'], - 'months': ['{0} kuukautta', '{0} kuukauden'], - 'year': ['vuosi', 'vuoden'], - 'years': ['{0} vuotta', '{0} vuoden'], - } - - # Months and days are lowercase in Finnish - month_names = ['', 'tammikuu', 'helmikuu', 'maaliskuu', 'huhtikuu', - 'toukokuu', 'kesäkuu', 'heinäkuu', 'elokuu', - 'syyskuu', 'lokakuu', 'marraskuu', 'joulukuu'] - - month_abbreviations = ['', 'tammi', 'helmi', 'maalis', 'huhti', - 'touko', 'kesä', 'heinä', 'elo', - 'syys', 'loka', 'marras', 'joulu'] - - day_names = ['', 'maanantai', 'tiistai', 'keskiviikko', 'torstai', - 'perjantai', 'lauantai', 'sunnuntai'] - - day_abbreviations = ['', 'ma', 'ti', 'ke', 'to', 'pe', 'la', 'su'] - - def _format_timeframe(self, timeframe, delta): - return (self.timeframes[timeframe][0].format(abs(delta)), - self.timeframes[timeframe][1].format(abs(delta))) - - def _format_relative(self, humanized, timeframe, delta): - if timeframe == 'now': - return humanized[0] - - direction = self.past if delta < 0 else self.future - which = 0 if delta < 0 else 1 - - return direction.format(humanized[which]) - - def _ordinal_number(self, n): - return '{0}.'.format(n) - - -class ChineseCNLocale(Locale): - - names = ['zh', 'zh_cn'] - - past = '{0}前' - future = '{0}后' - - timeframes = { - 'now': '刚才', - 'seconds': '几秒', - 'minute': '1分钟', - 'minutes': '{0}分钟', - 'hour': '1小时', - 'hours': '{0}小时', - 'day': '1天', - 'days': '{0}天', - 'month': '1个月', - 'months': '{0}个月', - 'year': '1年', - 'years': '{0}年', - } - - month_names = ['', '一月', '二月', '三月', '四月', '五月', '六月', '七月', - '八月', '九月', '十月', '十一月', '十二月'] - month_abbreviations = ['', ' 1', ' 2', ' 3', ' 4', ' 5', ' 6', ' 7', ' 8', - ' 9', '10', '11', '12'] - - day_names = ['', '星期一', '星期二', '星期三', '星期四', '星期五', '星期六', '星期日'] - day_abbreviations = ['', '一', '二', '三', '四', '五', '六', '日'] - - -class ChineseTWLocale(Locale): - - names = ['zh_tw'] - - past = '{0}前' - future = '{0}後' - - timeframes = { - 'now': '剛才', - 'seconds': '幾秒', - 'minute': '1分鐘', - 'minutes': '{0}分鐘', - 'hour': '1小時', - 'hours': '{0}小時', - 'day': '1天', - 'days': '{0}天', - 'month': '1個月', - 'months': '{0}個月', - 'year': '1年', - 'years': '{0}年', - } - - month_names = ['', '1月', '2月', '3月', '4月', '5月', '6月', '7月', '8月', - '9月', '10月', '11月', '12月'] - month_abbreviations = ['', ' 1', ' 2', ' 3', ' 4', ' 5', ' 6', ' 7', ' 8', - ' 9', '10', '11', '12'] - - day_names = ['', '周一', '周二', '周三', '周四', '周五', '周六', '周日'] - day_abbreviations = ['', '一', '二', '三', '四', '五', '六', '日'] - - -class KoreanLocale(Locale): - - names = ['ko', 'ko_kr'] - - past = '{0} 전' - future = '{0} 후' - - timeframes = { - 'now': '지금', - 'seconds': '몇 초', - 'minute': '1분', - 'minutes': '{0}분', - 'hour': '1시간', - 'hours': '{0}시간', - 'day': '1일', - 'days': '{0}일', - 'month': '1개월', - 'months': '{0}개월', - 'year': '1년', - 'years': '{0}년', - } - - month_names = ['', '1월', '2월', '3월', '4월', '5월', '6월', '7월', '8월', - '9월', '10월', '11월', '12월'] - month_abbreviations = ['', ' 1', ' 2', ' 3', ' 4', ' 5', ' 6', ' 7', ' 8', - ' 9', '10', '11', '12'] - - day_names = ['', '월요일', '화요일', '수요일', '목요일', '금요일', '토요일', '일요일'] - day_abbreviations = ['', '월', '화', '수', '목', '금', '토', '일'] - - -# derived locale types & implementations. -class DutchLocale(Locale): - - names = ['nl', 'nl_nl'] - - past = '{0} geleden' - future = 'over {0}' - - timeframes = { - 'now': 'nu', - 'seconds': 'seconden', - 'minute': 'een minuut', - 'minutes': '{0} minuten', - 'hour': 'een uur', - 'hours': '{0} uur', - 'day': 'een dag', - 'days': '{0} dagen', - 'month': 'een maand', - 'months': '{0} maanden', - 'year': 'een jaar', - 'years': '{0} jaar', - } - - # In Dutch names of months and days are not starting with a capital letter - # like in the English language. - month_names = ['', 'januari', 'februari', 'maart', 'april', 'mei', 'juni', 'juli', - 'augustus', 'september', 'oktober', 'november', 'december'] - month_abbreviations = ['', 'jan', 'feb', 'mrt', 'apr', 'mei', 'jun', 'jul', 'aug', - 'sep', 'okt', 'nov', 'dec'] - - day_names = ['', 'maandag', 'dinsdag', 'woensdag', 'donderdag', 'vrijdag', 'zaterdag', 'zondag'] - day_abbreviations = ['', 'ma', 'di', 'wo', 'do', 'vr', 'za', 'zo'] - - -class SlavicBaseLocale(Locale): - - def _format_timeframe(self, timeframe, delta): - - form = self.timeframes[timeframe] - delta = abs(delta) - - if isinstance(form, list): - - if delta % 10 == 1 and delta % 100 != 11: - form = form[0] - elif 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): - form = form[1] - else: - form = form[2] - - return form.format(delta) - -class BelarusianLocale(SlavicBaseLocale): - - names = ['be', 'be_by'] - - past = '{0} таму' - future = 'праз {0}' - - timeframes = { - 'now': 'зараз', - 'seconds': 'некалькі секунд', - 'minute': 'хвіліну', - 'minutes': ['{0} хвіліну', '{0} хвіліны', '{0} хвілін'], - 'hour': 'гадзіну', - 'hours': ['{0} гадзіну', '{0} гадзіны', '{0} гадзін'], - 'day': 'дзень', - 'days': ['{0} дзень', '{0} дні', '{0} дзён'], - 'month': 'месяц', - 'months': ['{0} месяц', '{0} месяцы', '{0} месяцаў'], - 'year': 'год', - 'years': ['{0} год', '{0} гады', '{0} гадоў'], - } - - month_names = ['', 'студзеня', 'лютага', 'сакавіка', 'красавіка', 'траўня', 'чэрвеня', - 'ліпеня', 'жніўня', 'верасня', 'кастрычніка', 'лістапада', 'снежня'] - month_abbreviations = ['', 'студ', 'лют', 'сак', 'крас', 'трав', 'чэрв', 'ліп', 'жнів', - 'вер', 'каст', 'ліст', 'снеж'] - - day_names = ['', 'панядзелак', 'аўторак', 'серада', 'чацвер', 'пятніца', 'субота', 'нядзеля'] - day_abbreviations = ['', 'пн', 'ат', 'ср', 'чц', 'пт', 'сб', 'нд'] - - -class PolishLocale(SlavicBaseLocale): - - names = ['pl', 'pl_pl'] - - past = '{0} temu' - future = 'za {0}' - - timeframes = { - 'now': 'teraz', - 'seconds': 'kilka sekund', - 'minute': 'minutę', - 'minutes': ['{0} minut', '{0} minuty', '{0} minut'], - 'hour': 'godzina', - 'hours': ['{0} godzin', '{0} godziny', '{0} godzin'], - 'day': 'dzień', - 'days': ['{0} dzień', '{0} dni', '{0} dni'], - 'month': 'miesiąc', - 'months': ['{0} miesiąc', '{0} miesiące', '{0} miesięcy'], - 'year': 'rok', - 'years': ['{0} rok', '{0} lata', '{0} lat'], - } - - month_names = ['', 'styczeń', 'luty', 'marzec', 'kwiecień', 'maj', - 'czerwiec', 'lipiec', 'sierpień', 'wrzesień', 'październik', - 'listopad', 'grudzień'] - month_abbreviations = ['', 'sty', 'lut', 'mar', 'kwi', 'maj', 'cze', 'lip', - 'sie', 'wrz', 'paź', 'lis', 'gru'] - - day_names = ['', 'poniedziałek', 'wtorek', 'środa', 'czwartek', 'piątek', - 'sobota', 'niedziela'] - day_abbreviations = ['', 'Pn', 'Wt', 'Śr', 'Czw', 'Pt', 'So', 'Nd'] - - -class RussianLocale(SlavicBaseLocale): - - names = ['ru', 'ru_ru'] - - past = '{0} назад' - future = 'через {0}' - - timeframes = { - 'now': 'сейчас', - 'seconds': 'несколько секунд', - 'minute': 'минуту', - 'minutes': ['{0} минуту', '{0} минуты', '{0} минут'], - 'hour': 'час', - 'hours': ['{0} час', '{0} часа', '{0} часов'], - 'day': 'день', - 'days': ['{0} день', '{0} дня', '{0} дней'], - 'month': 'месяц', - 'months': ['{0} месяц', '{0} месяца', '{0} месяцев'], - 'year': 'год', - 'years': ['{0} год', '{0} года', '{0} лет'], - } - - month_names = ['', 'января', 'февраля', 'марта', 'апреля', 'мая', 'июня', - 'июля', 'августа', 'сентября', 'октября', 'ноября', 'декабря'] - month_abbreviations = ['', 'янв', 'фев', 'мар', 'апр', 'май', 'июн', 'июл', - 'авг', 'сен', 'окт', 'ноя', 'дек'] - - day_names = ['', 'понедельник', 'вторник', 'среда', 'четверг', 'пятница', - 'суббота', 'воскресенье'] - day_abbreviations = ['', 'пн', 'вт', 'ср', 'чт', 'пт', 'сб', 'вс'] - - -class BulgarianLocale(SlavicBaseLocale): - - names = ['bg', 'bg_BG'] - - past = '{0} назад' - future = 'напред {0}' - - timeframes = { - 'now': 'сега', - 'seconds': 'няколко секунди', - 'minute': 'минута', - 'minutes': ['{0} минута', '{0} минути', '{0} минути'], - 'hour': 'час', - 'hours': ['{0} час', '{0} часа', '{0} часа'], - 'day': 'ден', - 'days': ['{0} ден', '{0} дни', '{0} дни'], - 'month': 'месец', - 'months': ['{0} месец', '{0} месеца', '{0} месеца'], - 'year': 'година', - 'years': ['{0} година', '{0} години', '{0} години'], - } - - month_names = ['', 'януари', 'февруари', 'март', 'април', 'май', 'юни', - 'юли', 'август', 'септември', 'октомври', 'ноември', 'декември'] - month_abbreviations = ['', 'ян', 'февр', 'март', 'апр', 'май', 'юни', 'юли', - 'авг', 'септ', 'окт', 'ноем', 'дек'] - - day_names = ['', 'понеделник', 'вторник', 'сряда', 'четвъртък', 'петък', - 'събота', 'неделя'] - day_abbreviations = ['', 'пон', 'вт', 'ср', 'четв', 'пет', 'съб', 'нед'] - - -class UkrainianLocale(SlavicBaseLocale): - - names = ['ua', 'uk_ua'] - - past = '{0} тому' - future = 'за {0}' - - timeframes = { - 'now': 'зараз', - 'seconds': 'кілька секунд', - 'minute': 'хвилину', - 'minutes': ['{0} хвилину', '{0} хвилини', '{0} хвилин'], - 'hour': 'годину', - 'hours': ['{0} годину', '{0} години', '{0} годин'], - 'day': 'день', - 'days': ['{0} день', '{0} дні', '{0} днів'], - 'month': 'місяць', - 'months': ['{0} місяць', '{0} місяці', '{0} місяців'], - 'year': 'рік', - 'years': ['{0} рік', '{0} роки', '{0} років'], - } - - month_names = ['', 'січня', 'лютого', 'березня', 'квітня', 'травня', 'червня', - 'липня', 'серпня', 'вересня', 'жовтня', 'листопада', 'грудня'] - month_abbreviations = ['', 'січ', 'лют', 'бер', 'квіт', 'трав', 'черв', 'лип', 'серп', - 'вер', 'жовт', 'лист', 'груд'] - - day_names = ['', 'понеділок', 'вівторок', 'середа', 'четвер', 'п’ятниця', 'субота', 'неділя'] - day_abbreviations = ['', 'пн', 'вт', 'ср', 'чт', 'пт', 'сб', 'нд'] - - -class _DeutschLocaleCommonMixin(object): - - past = 'vor {0}' - future = 'in {0}' - - timeframes = { - 'now': 'gerade eben', - 'seconds': 'Sekunden', - 'minute': 'einer Minute', - 'minutes': '{0} Minuten', - 'hour': 'einer Stunde', - 'hours': '{0} Stunden', - 'day': 'einem Tag', - 'days': '{0} Tagen', - 'month': 'einem Monat', - 'months': '{0} Monaten', - 'year': 'einem Jahr', - 'years': '{0} Jahren', - } - - month_names = [ - '', 'Januar', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', - 'August', 'September', 'Oktober', 'November', 'Dezember' - ] - - month_abbreviations = [ - '', 'Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', - 'Okt', 'Nov', 'Dez' - ] - - day_names = [ - '', 'Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag', - 'Samstag', 'Sonntag' - ] - - day_abbreviations = [ - '', 'Mo', 'Di', 'Mi', 'Do', 'Fr', 'Sa', 'So' - ] - - def _ordinal_number(self, n): - return '{0}.'.format(n) - - -class GermanLocale(_DeutschLocaleCommonMixin, Locale): - - names = ['de', 'de_de'] - - -class AustrianLocale(_DeutschLocaleCommonMixin, Locale): - - names = ['de_at'] - - month_names = [ - '', 'Jänner', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', - 'August', 'September', 'Oktober', 'November', 'Dezember' - ] - - -class NorwegianLocale(Locale): - - names = ['nb', 'nb_no'] - - past = 'for {0} siden' - future = 'om {0}' - - timeframes = { - 'now': 'nå nettopp', - 'seconds': 'noen sekunder', - 'minute': 'ett minutt', - 'minutes': '{0} minutter', - 'hour': 'en time', - 'hours': '{0} timer', - 'day': 'en dag', - 'days': '{0} dager', - 'month': 'en måned', - 'months': '{0} måneder', - 'year': 'ett år', - 'years': '{0} år', - } - - month_names = ['', 'januar', 'februar', 'mars', 'april', 'mai', 'juni', - 'juli', 'august', 'september', 'oktober', 'november', - 'desember'] - month_abbreviations = ['', 'jan', 'feb', 'mar', 'apr', 'mai', 'jun', 'jul', - 'aug', 'sep', 'okt', 'nov', 'des'] - - day_names = ['', 'mandag', 'tirsdag', 'onsdag', 'torsdag', 'fredag', - 'lørdag', 'søndag'] - day_abbreviations = ['', 'ma', 'ti', 'on', 'to', 'fr', 'lø', 'sø'] - - -class NewNorwegianLocale(Locale): - - names = ['nn', 'nn_no'] - - past = 'for {0} sidan' - future = 'om {0}' - - timeframes = { - 'now': 'no nettopp', - 'seconds': 'nokre sekund', - 'minute': 'ett minutt', - 'minutes': '{0} minutt', - 'hour': 'ein time', - 'hours': '{0} timar', - 'day': 'ein dag', - 'days': '{0} dagar', - 'month': 'en månad', - 'months': '{0} månader', - 'year': 'eit år', - 'years': '{0} år', - } - - month_names = ['', 'januar', 'februar', 'mars', 'april', 'mai', 'juni', - 'juli', 'august', 'september', 'oktober', 'november', - 'desember'] - month_abbreviations = ['', 'jan', 'feb', 'mar', 'apr', 'mai', 'jun', 'jul', - 'aug', 'sep', 'okt', 'nov', 'des'] - - day_names = ['', 'måndag', 'tysdag', 'onsdag', 'torsdag', 'fredag', - 'laurdag', 'sundag'] - day_abbreviations = ['', 'må', 'ty', 'on', 'to', 'fr', 'la', 'su'] - - -class PortugueseLocale(Locale): - names = ['pt', 'pt_pt'] - - past = 'há {0}' - future = 'em {0}' - - timeframes = { - 'now': 'agora', - 'seconds': 'segundos', - 'minute': 'um minuto', - 'minutes': '{0} minutos', - 'hour': 'uma hora', - 'hours': '{0} horas', - 'day': 'um dia', - 'days': '{0} dias', - 'month': 'um mês', - 'months': '{0} meses', - 'year': 'um ano', - 'years': '{0} anos', - } - - month_names = ['', 'janeiro', 'fevereiro', 'março', 'abril', 'maio', 'junho', 'julho', - 'agosto', 'setembro', 'outubro', 'novembro', 'dezembro'] - month_abbreviations = ['', 'jan', 'fev', 'mar', 'abr', 'maio', 'jun', 'jul', 'ago', - 'set', 'out', 'nov', 'dez'] - - day_names = ['', 'segunda-feira', 'terça-feira', 'quarta-feira', 'quinta-feira', 'sexta-feira', - 'sábado', 'domingo'] - day_abbreviations = ['', 'seg', 'ter', 'qua', 'qui', 'sex', 'sab', 'dom'] - - -class BrazilianPortugueseLocale(PortugueseLocale): - names = ['pt_br'] - - past = 'faz {0}' - - -class TagalogLocale(Locale): - - names = ['tl'] - - past = 'nakaraang {0}' - future = '{0} mula ngayon' - - timeframes = { - 'now': 'ngayon lang', - 'seconds': 'segundo', - 'minute': 'isang minuto', - 'minutes': '{0} minuto', - 'hour': 'isang oras', - 'hours': '{0} oras', - 'day': 'isang araw', - 'days': '{0} araw', - 'month': 'isang buwan', - 'months': '{0} buwan', - 'year': 'isang taon', - 'years': '{0} taon', - } - - month_names = ['', 'Enero', 'Pebrero', 'Marso', 'Abril', 'Mayo', 'Hunyo', 'Hulyo', - 'Agosto', 'Setyembre', 'Oktubre', 'Nobyembre', 'Disyembre'] - month_abbreviations = ['', 'Ene', 'Peb', 'Mar', 'Abr', 'May', 'Hun', 'Hul', 'Ago', - 'Set', 'Okt', 'Nob', 'Dis'] - - day_names = ['', 'Lunes', 'Martes', 'Miyerkules', 'Huwebes', 'Biyernes', 'Sabado', 'Linggo'] - day_abbreviations = ['', 'Lun', 'Mar', 'Miy', 'Huw', 'Biy', 'Sab', 'Lin'] - - -class VietnameseLocale(Locale): - - names = ['vi', 'vi_vn'] - - past = '{0} trước' - future = '{0} nữa' - - timeframes = { - 'now': 'hiện tại', - 'seconds': 'giây', - 'minute': 'một phút', - 'minutes': '{0} phút', - 'hour': 'một giờ', - 'hours': '{0} giờ', - 'day': 'một ngày', - 'days': '{0} ngày', - 'month': 'một tháng', - 'months': '{0} tháng', - 'year': 'một năm', - 'years': '{0} năm', - } - - month_names = ['', 'Tháng Một', 'Tháng Hai', 'Tháng Ba', 'Tháng Tư', 'Tháng Năm', 'Tháng Sáu', 'Tháng Bảy', - 'Tháng Tám', 'Tháng Chín', 'Tháng Mười', 'Tháng Mười Một', 'Tháng Mười Hai'] - month_abbreviations = ['', 'Tháng 1', 'Tháng 2', 'Tháng 3', 'Tháng 4', 'Tháng 5', 'Tháng 6', 'Tháng 7', 'Tháng 8', - 'Tháng 9', 'Tháng 10', 'Tháng 11', 'Tháng 12'] - - day_names = ['', 'Thứ Hai', 'Thứ Ba', 'Thứ Tư', 'Thứ Năm', 'Thứ Sáu', 'Thứ Bảy', 'Chủ Nhật'] - day_abbreviations = ['', 'Thứ 2', 'Thứ 3', 'Thứ 4', 'Thứ 5', 'Thứ 6', 'Thứ 7', 'CN'] - - -class TurkishLocale(Locale): - - names = ['tr', 'tr_tr'] - - past = '{0} önce' - future = '{0} sonra' - - timeframes = { - 'now': 'şimdi', - 'seconds': 'saniye', - 'minute': 'bir dakika', - 'minutes': '{0} dakika', - 'hour': 'bir saat', - 'hours': '{0} saat', - 'day': 'bir gün', - 'days': '{0} gün', - 'month': 'bir ay', - 'months': '{0} ay', - 'year': 'yıl', - 'years': '{0} yıl', - } - - month_names = ['', 'Ocak', 'Şubat', 'Mart', 'Nisan', 'Mayıs', 'Haziran', 'Temmuz', - 'Ağustos', 'Eylül', 'Ekim', 'Kasım', 'Aralık'] - month_abbreviations = ['', 'Oca', 'Şub', 'Mar', 'Nis', 'May', 'Haz', 'Tem', 'Ağu', - 'Eyl', 'Eki', 'Kas', 'Ara'] - - day_names = ['', 'Pazartesi', 'Salı', 'Çarşamba', 'Perşembe', 'Cuma', 'Cumartesi', 'Pazar'] - day_abbreviations = ['', 'Pzt', 'Sal', 'Çar', 'Per', 'Cum', 'Cmt', 'Paz'] - - -class AzerbaijaniLocale(Locale): - - names = ['az', 'az_az'] - - past = '{0} əvvəl' - future = '{0} sonra' - - timeframes = { - 'now': 'indi', - 'seconds': 'saniyə', - 'minute': 'bir dəqiqə', - 'minutes': '{0} dəqiqə', - 'hour': 'bir saat', - 'hours': '{0} saat', - 'day': 'bir gün', - 'days': '{0} gün', - 'month': 'bir ay', - 'months': '{0} ay', - 'year': 'il', - 'years': '{0} il', - } - - month_names = ['', 'Yanvar', 'Fevral', 'Mart', 'Aprel', 'May', 'İyun', 'İyul', - 'Avqust', 'Sentyabr', 'Oktyabr', 'Noyabr', 'Dekabr'] - month_abbreviations = ['', 'Yan', 'Fev', 'Mar', 'Apr', 'May', 'İyn', 'İyl', 'Avq', - 'Sen', 'Okt', 'Noy', 'Dek'] - - day_names = ['', 'Bazar ertəsi', 'Çərşənbə axşamı', 'Çərşənbə', 'Cümə axşamı', 'Cümə', 'Şənbə', 'Bazar'] - day_abbreviations = ['', 'Ber', 'Çax', 'Çər', 'Cax', 'Cüm', 'Şnb', 'Bzr'] - -class ArabicLocale(Locale): - names = ['ar', 'ar_ae', 'ar_bh', 'ar_dj', 'ar_eg', 'ar_eh', 'ar_er', - 'ar_km', 'ar_kw', 'ar_ly', 'ar_om', 'ar_qa', 'ar_sa', 'ar_sd', 'ar_so', - 'ar_ss', 'ar_td', 'ar_ye'] - - past = 'منذ {0}' - future = 'خلال {0}' - - timeframes = { - 'now': 'الآن', - 'seconds': { - 'double' : 'ثانيتين', - 'ten' : '{0} ثوان', - 'higher' : '{0} ثانية' - }, - 'minute': 'دقيقة', - 'minutes': { - 'double' : 'دقيقتين', - 'ten' : '{0} دقائق', - 'higher' : '{0} دقيقة' - }, - 'hour': 'ساعة', - 'hours': { - 'double' : 'ساعتين', - 'ten' : '{0} ساعات', - 'higher' : '{0} ساعة' - }, - 'day': 'يوم', - 'days': { - 'double' : 'يومين', - 'ten' : '{0} أيام', - 'higher' : '{0} يوم' - }, - 'month': 'شهر', - 'months': { - 'double' : 'شهرين', - 'ten' : '{0} أشهر', - 'higher' : '{0} شهر' - }, - 'year': 'سنة', - 'years': { - 'double' : 'سنتين', - 'ten' : '{0} سنوات', - 'higher' : '{0} سنة' - }, - } - - month_names = ['', 'يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', - 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر'] - month_abbreviations = ['', 'يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', - 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر'] - - day_names = ['', 'الإثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت', 'الأحد'] - day_abbreviations = ['', 'إثنين', 'ثلاثاء', 'أربعاء', 'خميس', 'جمعة', 'سبت', 'أحد'] - - def _format_timeframe(self, timeframe, delta): - form = self.timeframes[timeframe] - delta = abs(delta) - if isinstance(form, dict): - if delta == 2: - form = form['double'] - elif delta > 2 and delta <= 10: - form = form['ten'] - else: - form = form['higher'] - - return form.format(delta) - -class LevantArabicLocale(ArabicLocale): - names = ['ar_iq', 'ar_jo', 'ar_lb', 'ar_ps', 'ar_sy'] - month_names = ['', 'كانون الثاني', 'شباط', 'آذار', 'نيسان', 'أيار', 'حزيران', 'تموز', 'آب', - 'أيلول', 'تشرين الأول', 'تشرين الثاني', 'كانون الأول'] - month_abbreviations = ['', 'كانون الثاني', 'شباط', 'آذار', 'نيسان', 'أيار', 'حزيران', 'تموز', 'آب', - 'أيلول', 'تشرين الأول', 'تشرين الثاني', 'كانون الأول'] - -class AlgeriaTunisiaArabicLocale(ArabicLocale): - names = ['ar_tn', 'ar_dz'] - month_names = ['', 'جانفي', 'فيفري', 'مارس', 'أفريل', 'ماي', 'جوان', - 'جويلية', 'أوت', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر'] - month_abbreviations = ['', 'جانفي', 'فيفري', 'مارس', 'أفريل', 'ماي', 'جوان', - 'جويلية', 'أوت', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر'] - -class MauritaniaArabicLocale(ArabicLocale): - names = ['ar_mr'] - month_names = ['', 'يناير', 'فبراير', 'مارس', 'إبريل', 'مايو', 'يونيو', - 'يوليو', 'أغشت', 'شتمبر', 'أكتوبر', 'نوفمبر', 'دجمبر'] - month_abbreviations = ['', 'يناير', 'فبراير', 'مارس', 'إبريل', 'مايو', 'يونيو', - 'يوليو', 'أغشت', 'شتمبر', 'أكتوبر', 'نوفمبر', 'دجمبر'] - -class MoroccoArabicLocale(ArabicLocale): - names = ['ar_ma'] - month_names = ['', 'يناير', 'فبراير', 'مارس', 'أبريل', 'ماي', 'يونيو', - 'يوليوز', 'غشت', 'شتنبر', 'أكتوبر', 'نونبر', 'دجنبر'] - month_abbreviations = ['', 'يناير', 'فبراير', 'مارس', 'أبريل', 'ماي', 'يونيو', - 'يوليوز', 'غشت', 'شتنبر', 'أكتوبر', 'نونبر', 'دجنبر'] - -class IcelandicLocale(Locale): - - def _format_timeframe(self, timeframe, delta): - - timeframe = self.timeframes[timeframe] - if delta < 0: - timeframe = timeframe[0] - elif delta > 0: - timeframe = timeframe[1] - - return timeframe.format(abs(delta)) - - names = ['is', 'is_is'] - - past = 'fyrir {0} síðan' - future = 'eftir {0}' - - timeframes = { - 'now': 'rétt í þessu', - 'seconds': ('nokkrum sekúndum', 'nokkrar sekúndur'), - 'minute': ('einni mínútu', 'eina mínútu'), - 'minutes': ('{0} mínútum', '{0} mínútur'), - 'hour': ('einum tíma', 'einn tíma'), - 'hours': ('{0} tímum', '{0} tíma'), - 'day': ('einum degi', 'einn dag'), - 'days': ('{0} dögum', '{0} daga'), - 'month': ('einum mánuði', 'einn mánuð'), - 'months': ('{0} mánuðum', '{0} mánuði'), - 'year': ('einu ári', 'eitt ár'), - 'years': ('{0} árum', '{0} ár'), - } - - meridians = { - 'am': 'f.h.', - 'pm': 'e.h.', - 'AM': 'f.h.', - 'PM': 'e.h.', - } - - month_names = ['', 'janúar', 'febrúar', 'mars', 'apríl', 'maí', 'júní', - 'júlí', 'ágúst', 'september', 'október', 'nóvember', 'desember'] - month_abbreviations = ['', 'jan', 'feb', 'mar', 'apr', 'maí', 'jún', - 'júl', 'ágú', 'sep', 'okt', 'nóv', 'des'] - - day_names = ['', 'mánudagur', 'þriðjudagur', 'miðvikudagur', 'fimmtudagur', - 'föstudagur', 'laugardagur', 'sunnudagur'] - day_abbreviations = ['', 'mán', 'þri', 'mið', 'fim', 'fös', 'lau', 'sun'] - - -class DanishLocale(Locale): - - names = ['da', 'da_dk'] - - past = 'for {0} siden' - future = 'efter {0}' - - timeframes = { - 'now': 'lige nu', - 'seconds': 'et par sekunder', - 'minute': 'et minut', - 'minutes': '{0} minutter', - 'hour': 'en time', - 'hours': '{0} timer', - 'day': 'en dag', - 'days': '{0} dage', - 'month': 'en måned', - 'months': '{0} måneder', - 'year': 'et år', - 'years': '{0} år', - } - - month_names = ['', 'januar', 'februar', 'marts', 'april', 'maj', 'juni', - 'juli', 'august', 'september', 'oktober', 'november', 'december'] - month_abbreviations = ['', 'jan', 'feb', 'mar', 'apr', 'maj', 'jun', - 'jul', 'aug', 'sep', 'okt', 'nov', 'dec'] - - day_names = ['', 'mandag', 'tirsdag', 'onsdag', 'torsdag', 'fredag', - 'lørdag', 'søndag'] - day_abbreviations = ['', 'man', 'tir', 'ons', 'tor', 'fre', 'lør', 'søn'] - - -class MalayalamLocale(Locale): - - names = ['ml'] - - past = '{0} മുമ്പ്' - future = '{0} ശേഷം' - - timeframes = { - 'now': 'ഇപ്പോൾ', - 'seconds': 'സെക്കന്റ്‌', - 'minute': 'ഒരു മിനിറ്റ്', - 'minutes': '{0} മിനിറ്റ്', - 'hour': 'ഒരു മണിക്കൂർ', - 'hours': '{0} മണിക്കൂർ', - 'day': 'ഒരു ദിവസം ', - 'days': '{0} ദിവസം ', - 'month': 'ഒരു മാസം ', - 'months': '{0} മാസം ', - 'year': 'ഒരു വർഷം ', - 'years': '{0} വർഷം ', - } - - meridians = { - 'am': 'രാവിലെ', - 'pm': 'ഉച്ചക്ക് ശേഷം', - 'AM': 'രാവിലെ', - 'PM': 'ഉച്ചക്ക് ശേഷം', - } - - month_names = ['', 'ജനുവരി', 'ഫെബ്രുവരി', 'മാർച്ച്‌', 'ഏപ്രിൽ ', 'മെയ്‌ ', 'ജൂണ്‍', 'ജൂലൈ', - 'ഓഗസ്റ്റ്‌', 'സെപ്റ്റംബർ', 'ഒക്ടോബർ', 'നവംബർ', 'ഡിസംബർ'] - month_abbreviations = ['', 'ജനു', 'ഫെബ് ', 'മാർ', 'ഏപ്രിൽ', 'മേയ്', 'ജൂണ്‍', 'ജൂലൈ', 'ഓഗസ്റ', - 'സെപ്റ്റ', 'ഒക്ടോ', 'നവം', 'ഡിസം'] - - day_names = ['', 'തിങ്കള്‍', 'ചൊവ്വ', 'ബുധന്‍', 'വ്യാഴം', 'വെള്ളി', 'ശനി', 'ഞായര്‍'] - day_abbreviations = ['', 'തിങ്കള്‍', 'ചൊവ്വ', 'ബുധന്‍', 'വ്യാഴം', 'വെള്ളി', 'ശനി', 'ഞായര്‍'] - - -class HindiLocale(Locale): - - names = ['hi'] - - past = '{0} पहले' - future = '{0} बाद' - - timeframes = { - 'now': 'अभी', - 'seconds': 'सेकंड्', - 'minute': 'एक मिनट ', - 'minutes': '{0} मिनट ', - 'hour': 'एक घंटा', - 'hours': '{0} घंटे', - 'day': 'एक दिन', - 'days': '{0} दिन', - 'month': 'एक माह ', - 'months': '{0} महीने ', - 'year': 'एक वर्ष ', - 'years': '{0} साल ', - } - - meridians = { - 'am': 'सुबह', - 'pm': 'शाम', - 'AM': 'सुबह', - 'PM': 'शाम', - } - - month_names = ['', 'जनवरी', 'फरवरी', 'मार्च', 'अप्रैल ', 'मई', 'जून', 'जुलाई', - 'अगस्त', 'सितंबर', 'अक्टूबर', 'नवंबर', 'दिसंबर'] - month_abbreviations = ['', 'जन', 'फ़र', 'मार्च', 'अप्रै', 'मई', 'जून', 'जुलाई', 'आग', - 'सित', 'अकत', 'नवे', 'दिस'] - - day_names = ['', 'सोमवार', 'मंगलवार', 'बुधवार', 'गुरुवार', 'शुक्रवार', 'शनिवार', 'रविवार'] - day_abbreviations = ['', 'सोम', 'मंगल', 'बुध', 'गुरुवार', 'शुक्र', 'शनि', 'रवि'] - -class CzechLocale(Locale): - names = ['cs', 'cs_cz'] - - timeframes = { - 'now': 'Teď', - 'seconds': { - 'past': '{0} sekundami', - 'future': ['{0} sekundy', '{0} sekund'] - }, - 'minute': {'past': 'minutou', 'future': 'minutu', 'zero': '{0} minut'}, - 'minutes': { - 'past': '{0} minutami', - 'future': ['{0} minuty', '{0} minut'] - }, - 'hour': {'past': 'hodinou', 'future': 'hodinu', 'zero': '{0} hodin'}, - 'hours': { - 'past': '{0} hodinami', - 'future': ['{0} hodiny', '{0} hodin'] - }, - 'day': {'past': 'dnem', 'future': 'den', 'zero': '{0} dnů'}, - 'days': { - 'past': '{0} dny', - 'future': ['{0} dny', '{0} dnů'] - }, - 'month': {'past': 'měsícem', 'future': 'měsíc', 'zero': '{0} měsíců'}, - 'months': { - 'past': '{0} měsíci', - 'future': ['{0} měsíce', '{0} měsíců'] - }, - 'year': {'past': 'rokem', 'future': 'rok', 'zero': '{0} let'}, - 'years': { - 'past': '{0} lety', - 'future': ['{0} roky', '{0} let'] - } - } - - past = 'Před {0}' - future = 'Za {0}' - - month_names = ['', 'leden', 'únor', 'březen', 'duben', 'květen', 'červen', - 'červenec', 'srpen', 'září', 'říjen', 'listopad', 'prosinec'] - month_abbreviations = ['', 'led', 'úno', 'bře', 'dub', 'kvě', 'čvn', 'čvc', - 'srp', 'zář', 'říj', 'lis', 'pro'] - - day_names = ['', 'pondělí', 'úterý', 'středa', 'čtvrtek', 'pátek', - 'sobota', 'neděle'] - day_abbreviations = ['', 'po', 'út', 'st', 'čt', 'pá', 'so', 'ne'] - - - def _format_timeframe(self, timeframe, delta): - '''Czech aware time frame format function, takes into account - the differences between past and future forms.''' - form = self.timeframes[timeframe] - if isinstance(form, dict): - if delta == 0: - form = form['zero'] # And *never* use 0 in the singular! - elif delta > 0: - form = form['future'] - else: - form = form['past'] - delta = abs(delta) - - if isinstance(form, list): - if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): - form = form[0] - else: - form = form[1] - - return form.format(delta) - - -class SlovakLocale(Locale): - names = ['sk', 'sk_sk'] - - timeframes = { - 'now': 'Teraz', - 'seconds': { - 'past': 'pár sekundami', - 'future': ['{0} sekundy', '{0} sekúnd'] - }, - 'minute': {'past': 'minútou', 'future': 'minútu', 'zero': '{0} minút'}, - 'minutes': { - 'past': '{0} minútami', - 'future': ['{0} minúty', '{0} minút'] - }, - 'hour': {'past': 'hodinou', 'future': 'hodinu', 'zero': '{0} hodín'}, - 'hours': { - 'past': '{0} hodinami', - 'future': ['{0} hodiny', '{0} hodín'] - }, - 'day': {'past': 'dňom', 'future': 'deň', 'zero': '{0} dní'}, - 'days': { - 'past': '{0} dňami', - 'future': ['{0} dni', '{0} dní'] - }, - 'month': {'past': 'mesiacom', 'future': 'mesiac', 'zero': '{0} mesiacov'}, - 'months': { - 'past': '{0} mesiacmi', - 'future': ['{0} mesiace', '{0} mesiacov'] - }, - 'year': {'past': 'rokom', 'future': 'rok', 'zero': '{0} rokov'}, - 'years': { - 'past': '{0} rokmi', - 'future': ['{0} roky', '{0} rokov'] - } - } - - past = 'Pred {0}' - future = 'O {0}' - - month_names = ['', 'január', 'február', 'marec', 'apríl', 'máj', 'jún', - 'júl', 'august', 'september', 'október', 'november', 'december'] - month_abbreviations = ['', 'jan', 'feb', 'mar', 'apr', 'máj', 'jún', 'júl', - 'aug', 'sep', 'okt', 'nov', 'dec'] - - day_names = ['', 'pondelok', 'utorok', 'streda', 'štvrtok', 'piatok', - 'sobota', 'nedeľa'] - day_abbreviations = ['', 'po', 'ut', 'st', 'št', 'pi', 'so', 'ne'] - - - def _format_timeframe(self, timeframe, delta): - '''Slovak aware time frame format function, takes into account - the differences between past and future forms.''' - form = self.timeframes[timeframe] - if isinstance(form, dict): - if delta == 0: - form = form['zero'] # And *never* use 0 in the singular! - elif delta > 0: - form = form['future'] - else: - form = form['past'] - delta = abs(delta) - - if isinstance(form, list): - if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): - form = form[0] - else: - form = form[1] - - return form.format(delta) - - -class FarsiLocale(Locale): - - names = ['fa', 'fa_ir'] - - past = '{0} قبل' - future = 'در {0}' - - timeframes = { - 'now': 'اکنون', - 'seconds': 'ثانیه', - 'minute': 'یک دقیقه', - 'minutes': '{0} دقیقه', - 'hour': 'یک ساعت', - 'hours': '{0} ساعت', - 'day': 'یک روز', - 'days': '{0} روز', - 'month': 'یک ماه', - 'months': '{0} ماه', - 'year': 'یک سال', - 'years': '{0} سال', - } - - meridians = { - 'am': 'قبل از ظهر', - 'pm': 'بعد از ظهر', - 'AM': 'قبل از ظهر', - 'PM': 'بعد از ظهر', - } - - month_names = ['', 'January', 'February', 'March', 'April', 'May', 'June', 'July', - 'August', 'September', 'October', 'November', 'December'] - month_abbreviations = ['', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', - 'Sep', 'Oct', 'Nov', 'Dec'] - - day_names = ['', 'دو شنبه', 'سه شنبه', 'چهارشنبه', 'پنجشنبه', 'جمعه', 'شنبه', 'یکشنبه'] - day_abbreviations = ['', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] - - -class MacedonianLocale(Locale): - names = ['mk', 'mk_mk'] - - past = 'пред {0}' - future = 'за {0}' - - timeframes = { - 'now': 'сега', - 'seconds': 'секунди', - 'minute': 'една минута', - 'minutes': '{0} минути', - 'hour': 'еден саат', - 'hours': '{0} саати', - 'day': 'еден ден', - 'days': '{0} дена', - 'month': 'еден месец', - 'months': '{0} месеци', - 'year': 'една година', - 'years': '{0} години', - } - - meridians = { - 'am': 'дп', - 'pm': 'пп', - 'AM': 'претпладне', - 'PM': 'попладне', - } - - month_names = ['', 'Јануари', 'Февруари', 'Март', 'Април', 'Мај', 'Јуни', 'Јули', 'Август', 'Септември', 'Октомври', - 'Ноември', 'Декември'] - month_abbreviations = ['', 'Јан.', ' Фев.', ' Мар.', ' Апр.', ' Мај', ' Јун.', ' Јул.', ' Авг.', ' Септ.', ' Окт.', - ' Ноем.', ' Декем.'] - - day_names = ['', 'Понеделник', ' Вторник', ' Среда', ' Четврток', ' Петок', ' Сабота', ' Недела'] - day_abbreviations = ['', 'Пон.', ' Вт.', ' Сре.', ' Чет.', ' Пет.', ' Саб.', ' Нед.'] - - -class HebrewLocale(Locale): - - names = ['he', 'he_IL'] - - past = 'לפני {0}' - future = 'בעוד {0}' - - timeframes = { - 'now': 'הרגע', - 'seconds': 'שניות', - 'minute': 'דקה', - 'minutes': '{0} דקות', - 'hour': 'שעה', - 'hours': '{0} שעות', - '2-hours': 'שעתיים', - 'day': 'יום', - 'days': '{0} ימים', - '2-days': 'יומיים', - 'month': 'חודש', - 'months': '{0} חודשים', - '2-months': 'חודשיים', - 'year': 'שנה', - 'years': '{0} שנים', - '2-years': 'שנתיים', - } - - meridians = { - 'am': 'לפנ"צ', - 'pm': 'אחר"צ', - 'AM': 'לפני הצהריים', - 'PM': 'אחרי הצהריים', - } - - month_names = ['', 'ינואר', 'פברואר', 'מרץ', 'אפריל', 'מאי', 'יוני', 'יולי', - 'אוגוסט', 'ספטמבר', 'אוקטובר', 'נובמבר', 'דצמבר'] - month_abbreviations = ['', 'ינו׳', 'פבר׳', 'מרץ', 'אפר׳', 'מאי', 'יוני', 'יולי', 'אוג׳', - 'ספט׳', 'אוק׳', 'נוב׳', 'דצמ׳'] - - day_names = ['', 'שני', 'שלישי', 'רביעי', 'חמישי', 'שישי', 'שבת', 'ראשון'] - day_abbreviations = ['', 'ב׳', 'ג׳', 'ד׳', 'ה׳', 'ו׳', 'ש׳', 'א׳'] - - def _format_timeframe(self, timeframe, delta): - '''Hebrew couple of aware''' - couple = '2-{0}'.format(timeframe) - if abs(delta) == 2 and couple in self.timeframes: - return self.timeframes[couple].format(abs(delta)) - else: - return self.timeframes[timeframe].format(abs(delta)) - -class MarathiLocale(Locale): - - names = ['mr'] - - past = '{0} आधी' - future = '{0} नंतर' - - timeframes = { - 'now': 'सद्य', - 'seconds': 'सेकंद', - 'minute': 'एक मिनिट ', - 'minutes': '{0} मिनिट ', - 'hour': 'एक तास', - 'hours': '{0} तास', - 'day': 'एक दिवस', - 'days': '{0} दिवस', - 'month': 'एक महिना ', - 'months': '{0} महिने ', - 'year': 'एक वर्ष ', - 'years': '{0} वर्ष ', - } - - meridians = { - 'am': 'सकाळ', - 'pm': 'संध्याकाळ', - 'AM': 'सकाळ', - 'PM': 'संध्याकाळ', - } - - month_names = ['', 'जानेवारी', 'फेब्रुवारी', 'मार्च', 'एप्रिल', 'मे', 'जून', 'जुलै', - 'अॉगस्ट', 'सप्टेंबर', 'अॉक्टोबर', 'नोव्हेंबर', 'डिसेंबर'] - month_abbreviations = ['', 'जान', 'फेब्रु', 'मार्च', 'एप्रि', 'मे', 'जून', 'जुलै', 'अॉग', - 'सप्टें', 'अॉक्टो', 'नोव्हें', 'डिसें'] - - day_names = ['', 'सोमवार', 'मंगळवार', 'बुधवार', 'गुरुवार', 'शुक्रवार', 'शनिवार', 'रविवार'] - day_abbreviations = ['', 'सोम', 'मंगळ', 'बुध', 'गुरु', 'शुक्र', 'शनि', 'रवि'] - -def _map_locales(): - - locales = {} - - for cls_name, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass): - if issubclass(cls, Locale): - for name in cls.names: - locales[name.lower()] = cls - - return locales - -class CatalanLocale(Locale): - names = ['ca', 'ca_es', 'ca_ad', 'ca_fr', 'ca_it'] - past = 'Fa {0}' - future = 'En {0}' - - timeframes = { - 'now': 'Ara mateix', - 'seconds': 'segons', - 'minute': '1 minut', - 'minutes': '{0} minuts', - 'hour': 'una hora', - 'hours': '{0} hores', - 'day': 'un dia', - 'days': '{0} dies', - 'month': 'un mes', - 'months': '{0} mesos', - 'year': 'un any', - 'years': '{0} anys', - } - - month_names = ['', 'Gener', 'Febrer', 'Març', 'Abril', 'Maig', 'Juny', 'Juliol', 'Agost', 'Setembre', 'Octubre', 'Novembre', 'Desembre'] - month_abbreviations = ['', 'Gener', 'Febrer', 'Març', 'Abril', 'Maig', 'Juny', 'Juliol', 'Agost', 'Setembre', 'Octubre', 'Novembre', 'Desembre'] - day_names = ['', 'Dilluns', 'Dimarts', 'Dimecres', 'Dijous', 'Divendres', 'Dissabte', 'Diumenge'] - day_abbreviations = ['', 'Dilluns', 'Dimarts', 'Dimecres', 'Dijous', 'Divendres', 'Dissabte', 'Diumenge'] - -class BasqueLocale(Locale): - names = ['eu', 'eu_eu'] - past = 'duela {0}' - future = '{0}' # I don't know what's the right phrase in Basque for the future. - - timeframes = { - 'now': 'Orain', - 'seconds': 'segundu', - 'minute': 'minutu bat', - 'minutes': '{0} minutu', - 'hour': 'ordu bat', - 'hours': '{0} ordu', - 'day': 'egun bat', - 'days': '{0} egun', - 'month': 'hilabete bat', - 'months': '{0} hilabet', - 'year': 'urte bat', - 'years': '{0} urte', - } - - month_names = ['', 'urtarrilak', 'otsailak', 'martxoak', 'apirilak', 'maiatzak', 'ekainak', 'uztailak', 'abuztuak', 'irailak', 'urriak', 'azaroak', 'abenduak'] - month_abbreviations = ['', 'urt', 'ots', 'mar', 'api', 'mai', 'eka', 'uzt', 'abu', 'ira', 'urr', 'aza', 'abe'] - day_names = ['', 'astelehena', 'asteartea', 'asteazkena', 'osteguna', 'ostirala', 'larunbata', 'igandea'] - day_abbreviations = ['', 'al', 'ar', 'az', 'og', 'ol', 'lr', 'ig'] - - -class HungarianLocale(Locale): - - names = ['hu', 'hu_hu'] - - past = '{0} ezelőtt' - future = '{0} múlva' - - timeframes = { - 'now': 'éppen most', - 'seconds': { - 'past': 'másodpercekkel', - 'future': 'pár másodperc' - }, - 'minute': {'past': 'egy perccel', 'future': 'egy perc'}, - 'minutes': {'past': '{0} perccel', 'future': '{0} perc'}, - 'hour': {'past': 'egy órával', 'future': 'egy óra'}, - 'hours': {'past': '{0} órával', 'future': '{0} óra'}, - 'day': { - 'past': 'egy nappal', - 'future': 'egy nap' - }, - 'days': { - 'past': '{0} nappal', - 'future': '{0} nap' - }, - 'month': {'past': 'egy hónappal', 'future': 'egy hónap'}, - 'months': {'past': '{0} hónappal', 'future': '{0} hónap'}, - 'year': {'past': 'egy évvel', 'future': 'egy év'}, - 'years': {'past': '{0} évvel', 'future': '{0} év'}, - } - - month_names = ['', 'január', 'február', 'március', 'április', 'május', - 'június', 'július', 'augusztus', 'szeptember', - 'október', 'november', 'december'] - month_abbreviations = ['', 'jan', 'febr', 'márc', 'ápr', 'máj', 'jún', - 'júl', 'aug', 'szept', 'okt', 'nov', 'dec'] - - day_names = ['', 'hétfő', 'kedd', 'szerda', 'csütörtök', 'péntek', - 'szombat', 'vasárnap'] - day_abbreviations = ['', 'hét', 'kedd', 'szer', 'csüt', 'pént', - 'szom', 'vas'] - - meridians = { - 'am': 'de', - 'pm': 'du', - 'AM': 'DE', - 'PM': 'DU', - } - - def _format_timeframe(self, timeframe, delta): - form = self.timeframes[timeframe] - - if isinstance(form, dict): - if delta > 0: - form = form['future'] - else: - form = form['past'] - - return form.format(abs(delta)) - - -class EsperantoLocale(Locale): - names = ['eo', 'eo_xx'] - past = 'antaŭ {0}' - future = 'post {0}' - - timeframes = { - 'now': 'nun', - 'seconds': 'kelkaj sekundoj', - 'minute': 'unu minuto', - 'minutes': '{0} minutoj', - 'hour': 'un horo', - 'hours': '{0} horoj', - 'day': 'unu tago', - 'days': '{0} tagoj', - 'month': 'unu monato', - 'months': '{0} monatoj', - 'year': 'unu jaro', - 'years': '{0} jaroj', - } - - month_names = ['', 'januaro', 'februaro', 'marto', 'aprilo', 'majo', - 'junio', 'julio', 'aŭgusto', 'septembro', 'oktobro', - 'novembro', 'decembro'] - month_abbreviations = ['', 'jan', 'feb', 'mar', 'apr', 'maj', 'jun', - 'jul', 'aŭg', 'sep', 'okt', 'nov', 'dec'] - - day_names = ['', 'lundo', 'mardo', 'merkredo', 'ĵaŭdo', 'vendredo', - 'sabato', 'dimanĉo'] - day_abbreviations = ['', 'lun', 'mar', 'mer', 'ĵaŭ', 'ven', - 'sab', 'dim'] - - meridians = { - 'am': 'atm', - 'pm': 'ptm', - 'AM': 'ATM', - 'PM': 'PTM', - } - - ordinal_day_re = r'((?P[1-3]?[0-9](?=a))a)' - - def _ordinal_number(self, n): - return '{0}a'.format(n) - - -class ThaiLocale(Locale): - - names = ['th', 'th_th'] - - past = '{0}{1}ที่ผ่านมา' - future = 'ในอีก{1}{0}' - - timeframes = { - 'now': 'ขณะนี้', - 'seconds': 'ไม่กี่วินาที', - 'minute': '1 นาที', - 'minutes': '{0} นาที', - 'hour': '1 ชั่วโมง', - 'hours': '{0} ชั่วโมง', - 'day': '1 วัน', - 'days': '{0} วัน', - 'month': '1 เดือน', - 'months': '{0} เดือน', - 'year': '1 ปี', - 'years': '{0} ปี', - } - - month_names = ['', 'มกราคม', 'กุมภาพันธ์', 'มีนาคม', 'เมษายน', - 'พฤษภาคม', 'มิถุนายน', 'กรกฎาคม', 'สิงหาคม', - 'กันยายน', 'ตุลาคม', 'พฤศจิกายน', 'ธันวาคม'] - month_abbreviations = ['', 'ม.ค.', 'ก.พ.', 'มี.ค.', 'เม.ย.', 'พ.ค.', - 'มิ.ย.', 'ก.ค.', 'ส.ค.', 'ก.ย.', 'ต.ค.', - 'พ.ย.', 'ธ.ค.'] - - day_names = ['', 'จันทร์', 'อังคาร', 'พุธ', 'พฤหัสบดี', 'ศุกร์', - 'เสาร์', 'อาทิตย์'] - day_abbreviations = ['', 'จ', 'อ', 'พ', 'พฤ', 'ศ', 'ส', 'อา'] - - meridians = { - 'am': 'am', - 'pm': 'pm', - 'AM': 'AM', - 'PM': 'PM', - } - - BE_OFFSET = 543 - - def year_full(self, year): - '''Thai always use Buddhist Era (BE) which is CE + 543''' - year += self.BE_OFFSET - return '{0:04d}'.format(year) - - def year_abbreviation(self, year): - '''Thai always use Buddhist Era (BE) which is CE + 543''' - year += self.BE_OFFSET - return '{0:04d}'.format(year)[2:] - - def _format_relative(self, humanized, timeframe, delta): - '''Thai normally doesn't have any space between words''' - if timeframe == 'now': - return humanized - space = '' if timeframe == 'seconds' else ' ' - direction = self.past if delta < 0 else self.future - - return direction.format(humanized, space) - - - -class BengaliLocale(Locale): - - names = ['bn', 'bn_bd', 'bn_in'] - - past = '{0} আগে' - future = '{0} পরে' - - timeframes = { - 'now': 'এখন', - 'seconds': 'সেকেন্ড', - 'minute': 'এক মিনিট', - 'minutes': '{0} মিনিট', - 'hour': 'এক ঘণ্টা', - 'hours': '{0} ঘণ্টা', - 'day': 'এক দিন', - 'days': '{0} দিন', - 'month': 'এক মাস', - 'months': '{0} মাস ', - 'year': 'এক বছর', - 'years': '{0} বছর', - } - - meridians = { - 'am': 'সকাল', - 'pm': 'বিকাল', - 'AM': 'সকাল', - 'PM': 'বিকাল', - } - - month_names = ['', 'জানুয়ারি', 'ফেব্রুয়ারি', 'মার্চ', 'এপ্রিল', 'মে', 'জুন', 'জুলাই', - 'আগস্ট', 'সেপ্টেম্বর', 'অক্টোবর', 'নভেম্বর', 'ডিসেম্বর'] - month_abbreviations = ['', 'জানু', 'ফেব', 'মার্চ', 'এপ্রি', 'মে', 'জুন', 'জুল', - 'অগা','সেপ্ট', 'অক্টো', 'নভে', 'ডিসে'] - - day_names = ['', 'সোমবার', 'মঙ্গলবার', 'বুধবার', 'বৃহস্পতিবার', 'শুক্রবার', 'শনিবার', 'রবিবার'] - day_abbreviations = ['', 'সোম', 'মঙ্গল', 'বুধ', 'বৃহঃ', 'শুক্র', 'শনি', 'রবি'] - - def _ordinal_number(self, n): - if n > 10 or n == 0: - return '{0}তম'.format(n) - if n in [1, 5, 7, 8, 9, 10]: - return '{0}ম'.format(n) - if n in [2, 3]: - return '{0}য়'.format(n) - if n == 4: - return '{0}র্থ'.format(n) - if n == 6: - return '{0}ষ্ঠ'.format(n) - - -class RomanshLocale(Locale): - - names = ['rm', 'rm_ch'] - - past = 'avant {0}' - future = 'en {0}' - - timeframes = { - 'now': 'en quest mument', - 'seconds': 'secundas', - 'minute': 'ina minuta', - 'minutes': '{0} minutas', - 'hour': 'in\'ura', - 'hours': '{0} ura', - 'day': 'in di', - 'days': '{0} dis', - 'month': 'in mais', - 'months': '{0} mais', - 'year': 'in onn', - 'years': '{0} onns', - } - - month_names = [ - '', 'schaner', 'favrer', 'mars', 'avrigl', 'matg', 'zercladur', - 'fanadur', 'avust', 'settember', 'october', 'november', 'december' - ] - - month_abbreviations = [ - '', 'schan', 'fav', 'mars', 'avr', 'matg', 'zer', 'fan', 'avu', - 'set', 'oct', 'nov', 'dec' - ] - - day_names = [ - '', 'glindesdi', 'mardi', 'mesemna', 'gievgia', 'venderdi', - 'sonda', 'dumengia' - ] - - day_abbreviations = [ - '', 'gli', 'ma', 'me', 'gie', 've', 'so', 'du' - ] - - -class SwissLocale(Locale): - - names = ['de', 'de_ch'] - - past = 'vor {0}' - future = 'in {0}' - - timeframes = { - 'now': 'gerade eben', - 'seconds': 'Sekunden', - 'minute': 'einer Minute', - 'minutes': '{0} Minuten', - 'hour': 'einer Stunde', - 'hours': '{0} Stunden', - 'day': 'einem Tag', - 'days': '{0} Tagen', - 'month': 'einem Monat', - 'months': '{0} Monaten', - 'year': 'einem Jahr', - 'years': '{0} Jahren', - } - - month_names = [ - '', 'Januar', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', - 'August', 'September', 'Oktober', 'November', 'Dezember' - ] - - month_abbreviations = [ - '', 'Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', - 'Okt', 'Nov', 'Dez' - ] - - day_names = [ - '', 'Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag', - 'Samstag', 'Sonntag' - ] - - day_abbreviations = [ - '', 'Mo', 'Di', 'Mi', 'Do', 'Fr', 'Sa', 'So' - ] - - -class RomanianLocale(Locale): - names = ['ro', 'ro_ro'] - - past = '{0} în urmă' - future = 'peste {0}' - - timeframes = { - 'now': 'acum', - 'seconds': 'câteva secunde', - 'minute': 'un minut', - 'minutes': '{0} minute', - 'hour': 'o oră', - 'hours': '{0} ore', - 'day': 'o zi', - 'days': '{0} zile', - 'month': 'o lună', - 'months': '{0} luni', - 'year': 'un an', - 'years': '{0} ani', - } - - month_names = ['', 'ianuarie', 'februarie', 'martie', 'aprilie', 'mai', 'iunie', 'iulie', - 'august', 'septembrie', 'octombrie', 'noiembrie', 'decembrie'] - month_abbreviations = ['', 'ian', 'febr', 'mart', 'apr', 'mai', 'iun', 'iul', 'aug', 'sept', 'oct', 'nov', 'dec'] - - day_names = ['', 'luni', 'marți', 'miercuri', 'joi', 'vineri', 'sâmbătă', 'duminică'] - day_abbreviations = ['', 'Lun', 'Mar', 'Mie', 'Joi', 'Vin', 'Sâm', 'Dum'] - - -class SlovenianLocale(Locale): - names = ['sl', 'sl_si'] - - past = 'pred {0}' - future = 'čez {0}' - - timeframes = { - 'now': 'zdaj', - 'seconds': 'sekund', - 'minute': 'minuta', - 'minutes': '{0} minutami', - 'hour': 'uro', - 'hours': '{0} ur', - 'day': 'dan', - 'days': '{0} dni', - 'month': 'mesec', - 'months': '{0} mesecev', - 'year': 'leto', - 'years': '{0} let', - } - - meridians = { - 'am': '', - 'pm': '', - 'AM': '', - 'PM': '', - } - - month_names = [ - '', 'Januar', 'Februar', 'Marec', 'April', 'Maj', 'Junij', 'Julij', - 'Avgust', 'September', 'Oktober', 'November', 'December' - ] - - month_abbreviations = [ - '', 'Jan', 'Feb', 'Mar', 'Apr', 'Maj', 'Jun', 'Jul', 'Avg', - 'Sep', 'Okt', 'Nov', 'Dec' - ] - - day_names = [ - '', 'Ponedeljek', 'Torek', 'Sreda', 'Četrtek', 'Petek', 'Sobota', 'Nedelja' - ] - - day_abbreviations = [ - '', 'Pon', 'Tor', 'Sre', 'Čet', 'Pet', 'Sob', 'Ned' - ] - -class IndonesianLocale(Locale): - - names = ['id', 'id_id'] - - past = '{0} yang lalu' - future = 'dalam {0}' - - timeframes = { - 'now': 'baru saja', - 'seconds': 'detik', - 'minute': '1 menit', - 'minutes': '{0} menit', - 'hour': '1 jam', - 'hours': '{0} jam', - 'day': '1 hari', - 'days': '{0} hari', - 'month': '1 bulan', - 'months': '{0} bulan', - 'year': '1 tahun', - 'years': '{0} tahun' - } - - meridians = { - 'am': '', - 'pm': '', - 'AM': '', - 'PM': '' - } - - month_names = ['', 'Januari', 'Februari', 'Maret', 'April', 'Mei', 'Juni', 'Juli', - 'Agustus', 'September', 'Oktober', 'November', 'Desember'] - - month_abbreviations = ['', 'Jan', 'Feb', 'Mar', 'Apr', 'Mei', 'Jun', 'Jul', - 'Ags', 'Sept', 'Okt', 'Nov', 'Des'] - - day_names = ['', 'Senin', 'Selasa', 'Rabu', 'Kamis', 'Jumat', 'Sabtu', 'Minggu'] - - day_abbreviations = ['', 'Senin', 'Selasa', 'Rabu', 'Kamis', 'Jumat', 'Sabtu', 'Minggu'] - - -class NepaliLocale(Locale): - names = ['ne', 'ne_np'] - - past = '{0} पहिले' - future = '{0} पछी' - - timeframes = { - 'now': 'अहिले', - 'seconds': 'सेकण्ड', - 'minute': 'मिनेट', - 'minutes': '{0} मिनेट', - 'hour': 'एक घण्टा', - 'hours': '{0} घण्टा', - 'day': 'एक दिन', - 'days': '{0} दिन', - 'month': 'एक महिना', - 'months': '{0} महिना', - 'year': 'एक बर्ष', - 'years': 'बर्ष' - } - - meridians = { - 'am': 'पूर्वाह्न', - 'pm': 'अपरान्ह', - 'AM': 'पूर्वाह्न', - 'PM': 'अपरान्ह' - } - - month_names = [ - '', 'जनवरी', 'फेब्रुअरी', 'मार्च', 'एप्रील', 'मे', 'जुन', - 'जुलाई', 'अगष्ट', 'सेप्टेम्बर', 'अक्टोबर', 'नोवेम्बर', 'डिसेम्बर' - ] - month_abbreviations = [ - '', 'जन', 'फेब', 'मार्च', 'एप्रील', 'मे', 'जुन', 'जुलाई', 'अग', - 'सेप', 'अक्ट', 'नोव', 'डिस' - ] - - day_names = [ - '', 'सोमवार', 'मंगलवार', 'बुधवार', 'बिहिवार', 'शुक्रवार', 'शनिवार', 'आइतवार' - ] - - day_abbreviations = [ - '', 'सोम', 'मंगल', 'बुध', 'बिहि', 'शुक्र', 'शनि', 'आइत' - ] - - -_locales = _map_locales() diff --git a/pype/vendor/arrow/parser.py b/pype/vendor/arrow/parser.py deleted file mode 100644 index e4f368ad5a..0000000000 --- a/pype/vendor/arrow/parser.py +++ /dev/null @@ -1,344 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from __future__ import unicode_literals - -from datetime import datetime -from dateutil import tz -import re - -try: - from functools import lru_cache -except ImportError: # pragma: no cover - from backports.functools_lru_cache import lru_cache # pragma: no cover - -from arrow import locales - - -class ParserError(RuntimeError): - pass - - -class DateTimeParser(object): - - _FORMAT_RE = re.compile('(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?d?d?d|HH?|hh?|mm?|ss?|S+|ZZ?Z?|a|A|X)') - _ESCAPE_RE = re.compile('\[[^\[\]]*\]') - - _ONE_OR_MORE_DIGIT_RE = re.compile('\d+') - _ONE_OR_TWO_DIGIT_RE = re.compile('\d{1,2}') - _FOUR_DIGIT_RE = re.compile('\d{4}') - _TWO_DIGIT_RE = re.compile('\d{2}') - _TZ_RE = re.compile('[+\-]?\d{2}:?(\d{2})?') - _TZ_NAME_RE = re.compile('\w[\w+\-/]+') - - - _BASE_INPUT_RE_MAP = { - 'YYYY': _FOUR_DIGIT_RE, - 'YY': _TWO_DIGIT_RE, - 'MM': _TWO_DIGIT_RE, - 'M': _ONE_OR_TWO_DIGIT_RE, - 'DD': _TWO_DIGIT_RE, - 'D': _ONE_OR_TWO_DIGIT_RE, - 'HH': _TWO_DIGIT_RE, - 'H': _ONE_OR_TWO_DIGIT_RE, - 'hh': _TWO_DIGIT_RE, - 'h': _ONE_OR_TWO_DIGIT_RE, - 'mm': _TWO_DIGIT_RE, - 'm': _ONE_OR_TWO_DIGIT_RE, - 'ss': _TWO_DIGIT_RE, - 's': _ONE_OR_TWO_DIGIT_RE, - 'X': re.compile('\d+'), - 'ZZZ': _TZ_NAME_RE, - 'ZZ': _TZ_RE, - 'Z': _TZ_RE, - 'S': _ONE_OR_MORE_DIGIT_RE, - } - - MARKERS = ['YYYY', 'MM', 'DD'] - SEPARATORS = ['-', '/', '.'] - - def __init__(self, locale='en_us', cache_size=0): - - self.locale = locales.get_locale(locale) - self._input_re_map = self._BASE_INPUT_RE_MAP.copy() - self._input_re_map.update({ - 'MMMM': self._choice_re(self.locale.month_names[1:], re.IGNORECASE), - 'MMM': self._choice_re(self.locale.month_abbreviations[1:], - re.IGNORECASE), - 'Do': re.compile(self.locale.ordinal_day_re), - 'dddd': self._choice_re(self.locale.day_names[1:], re.IGNORECASE), - 'ddd': self._choice_re(self.locale.day_abbreviations[1:], - re.IGNORECASE), - 'd': re.compile(r"[1-7]"), - 'a': self._choice_re( - (self.locale.meridians['am'], self.locale.meridians['pm']) - ), - # note: 'A' token accepts both 'am/pm' and 'AM/PM' formats to - # ensure backwards compatibility of this token - 'A': self._choice_re(self.locale.meridians.values()) - }) - if cache_size > 0: - self._generate_pattern_re =\ - lru_cache(maxsize=cache_size)(self._generate_pattern_re) - - def parse_iso(self, string): - - has_time = 'T' in string or ' ' in string.strip() - space_divider = ' ' in string.strip() - - if has_time: - if space_divider: - date_string, time_string = string.split(' ', 1) - else: - date_string, time_string = string.split('T', 1) - time_parts = re.split('[+-]', time_string, 1) - has_tz = len(time_parts) > 1 - has_seconds = time_parts[0].count(':') > 1 - has_subseconds = re.search('[.,]', time_parts[0]) - - if has_subseconds: - formats = ['YYYY-MM-DDTHH:mm:ss%sS' % has_subseconds.group()] - elif has_seconds: - formats = ['YYYY-MM-DDTHH:mm:ss'] - else: - formats = ['YYYY-MM-DDTHH:mm'] - else: - has_tz = False - # generate required formats: YYYY-MM-DD, YYYY-MM-DD, YYYY - # using various separators: -, /, . - l = len(self.MARKERS) - formats = [separator.join(self.MARKERS[:l-i]) - for i in range(l) - for separator in self.SEPARATORS] - - if has_time and has_tz: - formats = [f + 'Z' for f in formats] - - if space_divider: - formats = [item.replace('T', ' ', 1) for item in formats] - - return self._parse_multiformat(string, formats) - - def _generate_pattern_re(self, fmt): - - # fmt is a string of tokens like 'YYYY-MM-DD' - # we construct a new string by replacing each - # token by its pattern: - # 'YYYY-MM-DD' -> '(?P\d{4})-(?P\d{2})-(?P
\d{2})' - tokens = [] - offset = 0 - - # Extract the bracketed expressions to be reinserted later. - escaped_fmt = re.sub(self._ESCAPE_RE, "#", fmt) - # Any number of S is the same as one. - escaped_fmt = re.sub('S+', 'S', escaped_fmt) - escaped_data = re.findall(self._ESCAPE_RE, fmt) - - fmt_pattern = escaped_fmt - - for m in self._FORMAT_RE.finditer(escaped_fmt): - token = m.group(0) - try: - input_re = self._input_re_map[token] - except KeyError: - raise ParserError('Unrecognized token \'{0}\''.format(token)) - input_pattern = '(?P<{0}>{1})'.format(token, input_re.pattern) - tokens.append(token) - # a pattern doesn't have the same length as the token - # it replaces! We keep the difference in the offset variable. - # This works because the string is scanned left-to-right and matches - # are returned in the order found by finditer. - fmt_pattern = fmt_pattern[:m.start() + offset] + input_pattern + fmt_pattern[m.end() + offset:] - offset += len(input_pattern) - (m.end() - m.start()) - - final_fmt_pattern = "" - a = fmt_pattern.split("#") - b = escaped_data - - # Due to the way Python splits, 'a' will always be longer - for i in range(len(a)): - final_fmt_pattern += a[i] - if i < len(b): - final_fmt_pattern += b[i][1:-1] - - return tokens, re.compile(final_fmt_pattern, flags=re.IGNORECASE) - - def parse(self, string, fmt): - - if isinstance(fmt, list): - return self._parse_multiformat(string, fmt) - - fmt_tokens, fmt_pattern_re = self._generate_pattern_re(fmt) - - match = fmt_pattern_re.search(string) - if match is None: - raise ParserError('Failed to match \'{0}\' when parsing \'{1}\'' - .format(fmt_pattern_re.pattern, string)) - parts = {} - for token in fmt_tokens: - if token == 'Do': - value = match.group('value') - else: - value = match.group(token) - self._parse_token(token, value, parts) - return self._build_datetime(parts) - - def _parse_token(self, token, value, parts): - - if token == 'YYYY': - parts['year'] = int(value) - elif token == 'YY': - value = int(value) - parts['year'] = 1900 + value if value > 68 else 2000 + value - - elif token in ['MMMM', 'MMM']: - parts['month'] = self.locale.month_number(value.lower()) - - elif token in ['MM', 'M']: - parts['month'] = int(value) - - elif token in ['DD', 'D']: - parts['day'] = int(value) - - elif token in ['Do']: - parts['day'] = int(value) - - elif token.upper() in ['HH', 'H']: - parts['hour'] = int(value) - - elif token in ['mm', 'm']: - parts['minute'] = int(value) - - elif token in ['ss', 's']: - parts['second'] = int(value) - - elif token == 'S': - # We have the *most significant* digits of an arbitrary-precision integer. - # We want the six most significant digits as an integer, rounded. - # FIXME: add nanosecond support somehow? - value = value.ljust(7, str('0')) - - # floating-point (IEEE-754) defaults to half-to-even rounding - seventh_digit = int(value[6]) - if seventh_digit == 5: - rounding = int(value[5]) % 2 - elif seventh_digit > 5: - rounding = 1 - else: - rounding = 0 - - parts['microsecond'] = int(value[:6]) + rounding - - elif token == 'X': - parts['timestamp'] = int(value) - - elif token in ['ZZZ', 'ZZ', 'Z']: - parts['tzinfo'] = TzinfoParser.parse(value) - - elif token in ['a', 'A']: - if value in ( - self.locale.meridians['am'], - self.locale.meridians['AM'] - ): - parts['am_pm'] = 'am' - elif value in ( - self.locale.meridians['pm'], - self.locale.meridians['PM'] - ): - parts['am_pm'] = 'pm' - - @staticmethod - def _build_datetime(parts): - - timestamp = parts.get('timestamp') - - if timestamp: - tz_utc = tz.tzutc() - return datetime.fromtimestamp(timestamp, tz=tz_utc) - - am_pm = parts.get('am_pm') - hour = parts.get('hour', 0) - - if am_pm == 'pm' and hour < 12: - hour += 12 - elif am_pm == 'am' and hour == 12: - hour = 0 - - return datetime(year=parts.get('year', 1), month=parts.get('month', 1), - day=parts.get('day', 1), hour=hour, minute=parts.get('minute', 0), - second=parts.get('second', 0), microsecond=parts.get('microsecond', 0), - tzinfo=parts.get('tzinfo')) - - def _parse_multiformat(self, string, formats): - - _datetime = None - - for fmt in formats: - try: - _datetime = self.parse(string, fmt) - break - except ParserError: - pass - - if _datetime is None: - raise ParserError('Could not match input to any of {0} on \'{1}\''.format(formats, string)) - - return _datetime - - @staticmethod - def _map_lookup(input_map, key): - - try: - return input_map[key] - except KeyError: - raise ParserError('Could not match "{0}" to {1}'.format(key, input_map)) - - @staticmethod - def _try_timestamp(string): - - try: - return float(string) - except: - return None - - @staticmethod - def _choice_re(choices, flags=0): - return re.compile('({0})'.format('|'.join(choices)), flags=flags) - - -class TzinfoParser(object): - - _TZINFO_RE = re.compile('([+\-])?(\d\d):?(\d\d)?') - - @classmethod - def parse(cls, string): - - tzinfo = None - - if string == 'local': - tzinfo = tz.tzlocal() - - elif string in ['utc', 'UTC']: - tzinfo = tz.tzutc() - - else: - - iso_match = cls._TZINFO_RE.match(string) - - if iso_match: - sign, hours, minutes = iso_match.groups() - if minutes is None: - minutes = 0 - seconds = int(hours) * 3600 + int(minutes) * 60 - - if sign == '-': - seconds *= -1 - - tzinfo = tz.tzoffset(None, seconds) - - else: - tzinfo = tz.gettz(string) - - if tzinfo is None: - raise ParserError('Could not parse timezone expression "{0}"'.format(string)) - - return tzinfo diff --git a/pype/vendor/arrow/util.py b/pype/vendor/arrow/util.py deleted file mode 100644 index 3eed4faa8b..0000000000 --- a/pype/vendor/arrow/util.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import - -import sys - -# python 2.6 / 2.7 definitions for total_seconds function. - -def _total_seconds_27(td): # pragma: no cover - return td.total_seconds() - -def _total_seconds_26(td): - return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 1e6) / 1e6 - - -# get version info and assign correct total_seconds function. - -version = '{0}.{1}.{2}'.format(*sys.version_info[:3]) - -if version < '2.7': # pragma: no cover - total_seconds = _total_seconds_26 -else: # pragma: no cover - total_seconds = _total_seconds_27 - -def is_timestamp(value): - if type(value) == bool: - return False - try: - float(value) - return True - except: - return False - -# python 2.7 / 3.0+ definitions for isstr function. - -try: # pragma: no cover - basestring - - def isstr(s): - return isinstance(s, basestring) - -except NameError: #pragma: no cover - - def isstr(s): - return isinstance(s, str) - - -__all__ = ['total_seconds', 'is_timestamp', 'isstr'] diff --git a/pype/vendor/chardet/__init__.py b/pype/vendor/chardet/__init__.py deleted file mode 100644 index 0f9f820ef6..0000000000 --- a/pype/vendor/chardet/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - - -from .compat import PY2, PY3 -from .universaldetector import UniversalDetector -from .version import __version__, VERSION - - -def detect(byte_str): - """ - Detect the encoding of the given byte string. - - :param byte_str: The byte sequence to examine. - :type byte_str: ``bytes`` or ``bytearray`` - """ - if not isinstance(byte_str, bytearray): - if not isinstance(byte_str, bytes): - raise TypeError('Expected object of type bytes or bytearray, got: ' - '{0}'.format(type(byte_str))) - else: - byte_str = bytearray(byte_str) - detector = UniversalDetector() - detector.feed(byte_str) - return detector.close() diff --git a/pype/vendor/chardet/big5freq.py b/pype/vendor/chardet/big5freq.py deleted file mode 100644 index 38f32517aa..0000000000 --- a/pype/vendor/chardet/big5freq.py +++ /dev/null @@ -1,386 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Big5 frequency table -# by Taiwan's Mandarin Promotion Council -# -# -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -#Char to FreqOrder table -BIG5_TABLE_SIZE = 5376 - -BIG5_CHAR_TO_FREQ_ORDER = ( - 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 -3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 -1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 - 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 -3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 -4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 -5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 - 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 - 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 - 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 -2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 -1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 -3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 - 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 -3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 -2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 - 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 -3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 -1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 -5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 - 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 -5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 -1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 - 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 - 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 -3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 -3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 - 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 -2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 -2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 - 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 - 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 -3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 -1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 -1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 -1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 -2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 - 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 -4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 -1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 -5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 -2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 - 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 - 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 - 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 - 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 -5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 - 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 -1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 - 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 - 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 -5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 -1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 - 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 -3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 -4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 -3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 - 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 - 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 -1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 -4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 -3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 -3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 -2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 -5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 -3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 -5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 -1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 -2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 -1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 - 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 -1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 -4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 -3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 - 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 - 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 - 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 -2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 -5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 -1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 -2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 -1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 -1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 -5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 -5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 -5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 -3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 -4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 -4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 -2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 -5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 -3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 - 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 -5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 -5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 -1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 -2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 -3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 -4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 -5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 -3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 -4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 -1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 -1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 -4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 -1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 - 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 -1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 -1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 -3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 - 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 -5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 -2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 -1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 -1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 -5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 - 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 -4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 - 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 -2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 - 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 -1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 -1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 - 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 -4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 -4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 -1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 -3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 -5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 -5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 -1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 -2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 -1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 -3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 -2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 -3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 -2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 -4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 -4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 -3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 - 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 -3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 - 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 -3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 -4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 -3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 -1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 -5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 - 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 -5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 -1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 - 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 -4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 -4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 - 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 -2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 -2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 -3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 -1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 -4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 -2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 -1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 -1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 -2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 -3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 -1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 -5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 -1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 -4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 -1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 - 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 -1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 -4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 -4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 -2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 -1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 -4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 - 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 -5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 -2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 -3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 -4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 - 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 -5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 -5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 -1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 -4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 -4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 -2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 -3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 -3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 -2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 -1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 -4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 -3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 -3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 -2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 -4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 -5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 -3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 -2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 -3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 -1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 -2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 -3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 -4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 -2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 -2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 -5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 -1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 -2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 -1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 -3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 -4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 -2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 -3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 -3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 -2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 -4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 -2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 -3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 -4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 -5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 -3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 - 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 -1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 -4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 -1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 -4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 -5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 - 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 -5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 -5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 -2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 -3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 -2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 -2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 - 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 -1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 -4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 -3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 -3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 - 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 -2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 - 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 -2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 -4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 -1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 -4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 -1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 -3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 - 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 -3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 -5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 -5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 -3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 -3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 -1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 -2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 -5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 -1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 -1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 -3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 - 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 -1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 -4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 -5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 -2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 -3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 - 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 -1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 -2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 -2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 -5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 -5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 -5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 -2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 -2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 -1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 -4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 -3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 -3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 -4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 -4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 -2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 -2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 -5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 -4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 -5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 -4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 - 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 - 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 -1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 -3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 -4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 -1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 -5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 -2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 -2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 -3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 -5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 -1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 -3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 -5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 -1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 -5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 -2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 -3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 -2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 -3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 -3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 -3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 -4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 - 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 -2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 -4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 -3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 -5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 -1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 -5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 - 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 -1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 - 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 -4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 -1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 -4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 -1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 - 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 -3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 -4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 -5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 - 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 -3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 - 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 -2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 -) - diff --git a/pype/vendor/chardet/big5prober.py b/pype/vendor/chardet/big5prober.py deleted file mode 100644 index 98f9970122..0000000000 --- a/pype/vendor/chardet/big5prober.py +++ /dev/null @@ -1,47 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import Big5DistributionAnalysis -from .mbcssm import BIG5_SM_MODEL - - -class Big5Prober(MultiByteCharSetProber): - def __init__(self): - super(Big5Prober, self).__init__() - self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) - self.distribution_analyzer = Big5DistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "Big5" - - @property - def language(self): - return "Chinese" diff --git a/pype/vendor/chardet/chardistribution.py b/pype/vendor/chardet/chardistribution.py deleted file mode 100644 index c0395f4a45..0000000000 --- a/pype/vendor/chardet/chardistribution.py +++ /dev/null @@ -1,233 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, - EUCTW_TYPICAL_DISTRIBUTION_RATIO) -from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, - EUCKR_TYPICAL_DISTRIBUTION_RATIO) -from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, - GB2312_TYPICAL_DISTRIBUTION_RATIO) -from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, - BIG5_TYPICAL_DISTRIBUTION_RATIO) -from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, - JIS_TYPICAL_DISTRIBUTION_RATIO) - - -class CharDistributionAnalysis(object): - ENOUGH_DATA_THRESHOLD = 1024 - SURE_YES = 0.99 - SURE_NO = 0.01 - MINIMUM_DATA_THRESHOLD = 3 - - def __init__(self): - # Mapping table to get frequency order from char order (get from - # GetOrder()) - self._char_to_freq_order = None - self._table_size = None # Size of above table - # This is a constant value which varies from language to language, - # used in calculating confidence. See - # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html - # for further detail. - self.typical_distribution_ratio = None - self._done = None - self._total_chars = None - self._freq_chars = None - self.reset() - - def reset(self): - """reset analyser, clear any state""" - # If this flag is set to True, detection is done and conclusion has - # been made - self._done = False - self._total_chars = 0 # Total characters encountered - # The number of characters whose frequency order is less than 512 - self._freq_chars = 0 - - def feed(self, char, char_len): - """feed a character with known length""" - if char_len == 2: - # we only care about 2-bytes character in our distribution analysis - order = self.get_order(char) - else: - order = -1 - if order >= 0: - self._total_chars += 1 - # order is valid - if order < self._table_size: - if 512 > self._char_to_freq_order[order]: - self._freq_chars += 1 - - def get_confidence(self): - """return confidence based on existing data""" - # if we didn't receive any character in our consideration range, - # return negative answer - if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: - return self.SURE_NO - - if self._total_chars != self._freq_chars: - r = (self._freq_chars / ((self._total_chars - self._freq_chars) - * self.typical_distribution_ratio)) - if r < self.SURE_YES: - return r - - # normalize confidence (we don't want to be 100% sure) - return self.SURE_YES - - def got_enough_data(self): - # It is not necessary to receive all data to draw conclusion. - # For charset detection, certain amount of data is enough - return self._total_chars > self.ENOUGH_DATA_THRESHOLD - - def get_order(self, byte_str): - # We do not handle characters based on the original encoding string, - # but convert this encoding string to a number, here called order. - # This allows multiple encodings of a language to share one frequency - # table. - return -1 - - -class EUCTWDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCTWDistributionAnalysis, self).__init__() - self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER - self._table_size = EUCTW_TABLE_SIZE - self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-TW encoding, we are interested - # first byte range: 0xc4 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = byte_str[0] - if first_char >= 0xC4: - return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 - else: - return -1 - - -class EUCKRDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCKRDistributionAnalysis, self).__init__() - self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER - self._table_size = EUCKR_TABLE_SIZE - self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-KR encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = byte_str[0] - if first_char >= 0xB0: - return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 - else: - return -1 - - -class GB2312DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(GB2312DistributionAnalysis, self).__init__() - self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER - self._table_size = GB2312_TABLE_SIZE - self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for GB2312 encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if (first_char >= 0xB0) and (second_char >= 0xA1): - return 94 * (first_char - 0xB0) + second_char - 0xA1 - else: - return -1 - - -class Big5DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(Big5DistributionAnalysis, self).__init__() - self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER - self._table_size = BIG5_TABLE_SIZE - self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for big5 encoding, we are interested - # first byte range: 0xa4 -- 0xfe - # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if first_char >= 0xA4: - if second_char >= 0xA1: - return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 - else: - return 157 * (first_char - 0xA4) + second_char - 0x40 - else: - return -1 - - -class SJISDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(SJISDistributionAnalysis, self).__init__() - self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER - self._table_size = JIS_TABLE_SIZE - self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for sjis encoding, we are interested - # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe - # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if (first_char >= 0x81) and (first_char <= 0x9F): - order = 188 * (first_char - 0x81) - elif (first_char >= 0xE0) and (first_char <= 0xEF): - order = 188 * (first_char - 0xE0 + 31) - else: - return -1 - order = order + second_char - 0x40 - if second_char > 0x7F: - order = -1 - return order - - -class EUCJPDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCJPDistributionAnalysis, self).__init__() - self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER - self._table_size = JIS_TABLE_SIZE - self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-JP encoding, we are interested - # first byte range: 0xa0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - char = byte_str[0] - if char >= 0xA0: - return 94 * (char - 0xA1) + byte_str[1] - 0xa1 - else: - return -1 diff --git a/pype/vendor/chardet/charsetgroupprober.py b/pype/vendor/chardet/charsetgroupprober.py deleted file mode 100644 index 8b3738efd8..0000000000 --- a/pype/vendor/chardet/charsetgroupprober.py +++ /dev/null @@ -1,106 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import ProbingState -from .charsetprober import CharSetProber - - -class CharSetGroupProber(CharSetProber): - def __init__(self, lang_filter=None): - super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) - self._active_num = 0 - self.probers = [] - self._best_guess_prober = None - - def reset(self): - super(CharSetGroupProber, self).reset() - self._active_num = 0 - for prober in self.probers: - if prober: - prober.reset() - prober.active = True - self._active_num += 1 - self._best_guess_prober = None - - @property - def charset_name(self): - if not self._best_guess_prober: - self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.charset_name - - @property - def language(self): - if not self._best_guess_prober: - self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.language - - def feed(self, byte_str): - for prober in self.probers: - if not prober: - continue - if not prober.active: - continue - state = prober.feed(byte_str) - if not state: - continue - if state == ProbingState.FOUND_IT: - self._best_guess_prober = prober - return self.state - elif state == ProbingState.NOT_ME: - prober.active = False - self._active_num -= 1 - if self._active_num <= 0: - self._state = ProbingState.NOT_ME - return self.state - return self.state - - def get_confidence(self): - state = self.state - if state == ProbingState.FOUND_IT: - return 0.99 - elif state == ProbingState.NOT_ME: - return 0.01 - best_conf = 0.0 - self._best_guess_prober = None - for prober in self.probers: - if not prober: - continue - if not prober.active: - self.logger.debug('%s not active', prober.charset_name) - continue - conf = prober.get_confidence() - self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) - if best_conf < conf: - best_conf = conf - self._best_guess_prober = prober - if not self._best_guess_prober: - return 0.0 - return best_conf diff --git a/pype/vendor/chardet/charsetprober.py b/pype/vendor/chardet/charsetprober.py deleted file mode 100644 index eac4e59865..0000000000 --- a/pype/vendor/chardet/charsetprober.py +++ /dev/null @@ -1,145 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import logging -import re - -from .enums import ProbingState - - -class CharSetProber(object): - - SHORTCUT_THRESHOLD = 0.95 - - def __init__(self, lang_filter=None): - self._state = None - self.lang_filter = lang_filter - self.logger = logging.getLogger(__name__) - - def reset(self): - self._state = ProbingState.DETECTING - - @property - def charset_name(self): - return None - - def feed(self, buf): - pass - - @property - def state(self): - return self._state - - def get_confidence(self): - return 0.0 - - @staticmethod - def filter_high_byte_only(buf): - buf = re.sub(b'([\x00-\x7F])+', b' ', buf) - return buf - - @staticmethod - def filter_international_words(buf): - """ - We define three types of bytes: - alphabet: english alphabets [a-zA-Z] - international: international characters [\x80-\xFF] - marker: everything else [^a-zA-Z\x80-\xFF] - - The input buffer can be thought to contain a series of words delimited - by markers. This function works to filter all words that contain at - least one international character. All contiguous sequences of markers - are replaced by a single space ascii character. - - This filter applies to all scripts which do not use English characters. - """ - filtered = bytearray() - - # This regex expression filters out only words that have at-least one - # international character. The word may include one marker character at - # the end. - words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', - buf) - - for word in words: - filtered.extend(word[:-1]) - - # If the last character in the word is a marker, replace it with a - # space as markers shouldn't affect our analysis (they are used - # similarly across all languages and may thus have similar - # frequencies). - last_char = word[-1:] - if not last_char.isalpha() and last_char < b'\x80': - last_char = b' ' - filtered.extend(last_char) - - return filtered - - @staticmethod - def filter_with_english_letters(buf): - """ - Returns a copy of ``buf`` that retains only the sequences of English - alphabet and high byte characters that are not between <> characters. - Also retains English alphabet and high byte characters immediately - before occurrences of >. - - This filter can be applied to all scripts which contain both English - characters and extended ASCII characters, but is currently only used by - ``Latin1Prober``. - """ - filtered = bytearray() - in_tag = False - prev = 0 - - for curr in range(len(buf)): - # Slice here to get bytes instead of an int with Python 3 - buf_char = buf[curr:curr + 1] - # Check if we're coming out of or entering an HTML tag - if buf_char == b'>': - in_tag = False - elif buf_char == b'<': - in_tag = True - - # If current character is not extended-ASCII and not alphabetic... - if buf_char < b'\x80' and not buf_char.isalpha(): - # ...and we're not in a tag - if curr > prev and not in_tag: - # Keep everything after last non-extended-ASCII, - # non-alphabetic character - filtered.extend(buf[prev:curr]) - # Output a space to delimit stretch we kept - filtered.extend(b' ') - prev = curr + 1 - - # If we're not in a tag... - if not in_tag: - # Keep everything after last non-extended-ASCII, non-alphabetic - # character - filtered.extend(buf[prev:]) - - return filtered diff --git a/pype/vendor/chardet/cli/__init__.py b/pype/vendor/chardet/cli/__init__.py deleted file mode 100644 index 8b13789179..0000000000 --- a/pype/vendor/chardet/cli/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/pype/vendor/chardet/cli/chardetect.py b/pype/vendor/chardet/cli/chardetect.py deleted file mode 100644 index f0a4cc5d79..0000000000 --- a/pype/vendor/chardet/cli/chardetect.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python -""" -Script which takes one or more file paths and reports on their detected -encodings - -Example:: - - % chardetect somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - -If no paths are provided, it takes its input from stdin. - -""" - -from __future__ import absolute_import, print_function, unicode_literals - -import argparse -import sys - -from chardet import __version__ -from chardet.compat import PY2 -from chardet.universaldetector import UniversalDetector - - -def description_of(lines, name='stdin'): - """ - Return a string describing the probable encoding of a file or - list of strings. - - :param lines: The lines to get the encoding of. - :type lines: Iterable of bytes - :param name: Name of file or collection of lines - :type name: str - """ - u = UniversalDetector() - for line in lines: - line = bytearray(line) - u.feed(line) - # shortcut out of the loop to save reading further - particularly useful if we read a BOM. - if u.done: - break - u.close() - result = u.result - if PY2: - name = name.decode(sys.getfilesystemencoding(), 'ignore') - if result['encoding']: - return '{0}: {1} with confidence {2}'.format(name, result['encoding'], - result['confidence']) - else: - return '{0}: no result'.format(name) - - -def main(argv=None): - """ - Handles command line arguments and gets things started. - - :param argv: List of arguments, as if specified on the command-line. - If None, ``sys.argv[1:]`` is used instead. - :type argv: list of str - """ - # Get command line arguments - parser = argparse.ArgumentParser( - description="Takes one or more file paths and reports their detected \ - encodings") - parser.add_argument('input', - help='File whose encoding we would like to determine. \ - (default: stdin)', - type=argparse.FileType('rb'), nargs='*', - default=[sys.stdin if PY2 else sys.stdin.buffer]) - parser.add_argument('--version', action='version', - version='%(prog)s {0}'.format(__version__)) - args = parser.parse_args(argv) - - for f in args.input: - if f.isatty(): - print("You are running chardetect interactively. Press " + - "CTRL-D twice at the start of a blank line to signal the " + - "end of your input. If you want help, run chardetect " + - "--help\n", file=sys.stderr) - print(description_of(f, f.name)) - - -if __name__ == '__main__': - main() diff --git a/pype/vendor/chardet/codingstatemachine.py b/pype/vendor/chardet/codingstatemachine.py deleted file mode 100644 index 68fba44f14..0000000000 --- a/pype/vendor/chardet/codingstatemachine.py +++ /dev/null @@ -1,88 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import logging - -from .enums import MachineState - - -class CodingStateMachine(object): - """ - A state machine to verify a byte sequence for a particular encoding. For - each byte the detector receives, it will feed that byte to every active - state machine available, one byte at a time. The state machine changes its - state based on its previous state and the byte it receives. There are 3 - states in a state machine that are of interest to an auto-detector: - - START state: This is the state to start with, or a legal byte sequence - (i.e. a valid code point) for character has been identified. - - ME state: This indicates that the state machine identified a byte sequence - that is specific to the charset it is designed for and that - there is no other possible encoding which can contain this byte - sequence. This will to lead to an immediate positive answer for - the detector. - - ERROR state: This indicates the state machine identified an illegal byte - sequence for that encoding. This will lead to an immediate - negative answer for this encoding. Detector will exclude this - encoding from consideration from here on. - """ - def __init__(self, sm): - self._model = sm - self._curr_byte_pos = 0 - self._curr_char_len = 0 - self._curr_state = None - self.logger = logging.getLogger(__name__) - self.reset() - - def reset(self): - self._curr_state = MachineState.START - - def next_state(self, c): - # for each byte we get its class - # if it is first byte, we also get byte length - byte_class = self._model['class_table'][c] - if self._curr_state == MachineState.START: - self._curr_byte_pos = 0 - self._curr_char_len = self._model['char_len_table'][byte_class] - # from byte's class and state_table, we get its next state - curr_state = (self._curr_state * self._model['class_factor'] - + byte_class) - self._curr_state = self._model['state_table'][curr_state] - self._curr_byte_pos += 1 - return self._curr_state - - def get_current_charlen(self): - return self._curr_char_len - - def get_coding_state_machine(self): - return self._model['name'] - - @property - def language(self): - return self._model['language'] diff --git a/pype/vendor/chardet/compat.py b/pype/vendor/chardet/compat.py deleted file mode 100644 index ddd74687c0..0000000000 --- a/pype/vendor/chardet/compat.py +++ /dev/null @@ -1,34 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# Contributor(s): -# Dan Blanchard -# Ian Cordasco -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import sys - - -if sys.version_info < (3, 0): - PY2 = True - PY3 = False - base_str = (str, unicode) - text_type = unicode -else: - PY2 = False - PY3 = True - base_str = (bytes, str) - text_type = str diff --git a/pype/vendor/chardet/cp949prober.py b/pype/vendor/chardet/cp949prober.py deleted file mode 100644 index efd793abca..0000000000 --- a/pype/vendor/chardet/cp949prober.py +++ /dev/null @@ -1,49 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .chardistribution import EUCKRDistributionAnalysis -from .codingstatemachine import CodingStateMachine -from .mbcharsetprober import MultiByteCharSetProber -from .mbcssm import CP949_SM_MODEL - - -class CP949Prober(MultiByteCharSetProber): - def __init__(self): - super(CP949Prober, self).__init__() - self.coding_sm = CodingStateMachine(CP949_SM_MODEL) - # NOTE: CP949 is a superset of EUC-KR, so the distribution should be - # not different. - self.distribution_analyzer = EUCKRDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "CP949" - - @property - def language(self): - return "Korean" diff --git a/pype/vendor/chardet/enums.py b/pype/vendor/chardet/enums.py deleted file mode 100644 index 0451207225..0000000000 --- a/pype/vendor/chardet/enums.py +++ /dev/null @@ -1,76 +0,0 @@ -""" -All of the Enums that are used throughout the chardet package. - -:author: Dan Blanchard (dan.blanchard@gmail.com) -""" - - -class InputState(object): - """ - This enum represents the different states a universal detector can be in. - """ - PURE_ASCII = 0 - ESC_ASCII = 1 - HIGH_BYTE = 2 - - -class LanguageFilter(object): - """ - This enum represents the different language filters we can apply to a - ``UniversalDetector``. - """ - CHINESE_SIMPLIFIED = 0x01 - CHINESE_TRADITIONAL = 0x02 - JAPANESE = 0x04 - KOREAN = 0x08 - NON_CJK = 0x10 - ALL = 0x1F - CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL - CJK = CHINESE | JAPANESE | KOREAN - - -class ProbingState(object): - """ - This enum represents the different states a prober can be in. - """ - DETECTING = 0 - FOUND_IT = 1 - NOT_ME = 2 - - -class MachineState(object): - """ - This enum represents the different states a state machine can be in. - """ - START = 0 - ERROR = 1 - ITS_ME = 2 - - -class SequenceLikelihood(object): - """ - This enum represents the likelihood of a character following the previous one. - """ - NEGATIVE = 0 - UNLIKELY = 1 - LIKELY = 2 - POSITIVE = 3 - - @classmethod - def get_num_categories(cls): - """:returns: The number of likelihood categories in the enum.""" - return 4 - - -class CharacterCategory(object): - """ - This enum represents the different categories language models for - ``SingleByteCharsetProber`` put characters into. - - Anything less than CONTROL is considered a letter. - """ - UNDEFINED = 255 - LINE_BREAK = 254 - SYMBOL = 253 - DIGIT = 252 - CONTROL = 251 diff --git a/pype/vendor/chardet/escprober.py b/pype/vendor/chardet/escprober.py deleted file mode 100644 index c70493f2b1..0000000000 --- a/pype/vendor/chardet/escprober.py +++ /dev/null @@ -1,101 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .codingstatemachine import CodingStateMachine -from .enums import LanguageFilter, ProbingState, MachineState -from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, - ISO2022KR_SM_MODEL) - - -class EscCharSetProber(CharSetProber): - """ - This CharSetProber uses a "code scheme" approach for detecting encodings, - whereby easily recognizable escape or shift sequences are relied on to - identify these encodings. - """ - - def __init__(self, lang_filter=None): - super(EscCharSetProber, self).__init__(lang_filter=lang_filter) - self.coding_sm = [] - if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: - self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) - self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) - if self.lang_filter & LanguageFilter.JAPANESE: - self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) - if self.lang_filter & LanguageFilter.KOREAN: - self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) - self.active_sm_count = None - self._detected_charset = None - self._detected_language = None - self._state = None - self.reset() - - def reset(self): - super(EscCharSetProber, self).reset() - for coding_sm in self.coding_sm: - if not coding_sm: - continue - coding_sm.active = True - coding_sm.reset() - self.active_sm_count = len(self.coding_sm) - self._detected_charset = None - self._detected_language = None - - @property - def charset_name(self): - return self._detected_charset - - @property - def language(self): - return self._detected_language - - def get_confidence(self): - if self._detected_charset: - return 0.99 - else: - return 0.00 - - def feed(self, byte_str): - for c in byte_str: - for coding_sm in self.coding_sm: - if not coding_sm or not coding_sm.active: - continue - coding_state = coding_sm.next_state(c) - if coding_state == MachineState.ERROR: - coding_sm.active = False - self.active_sm_count -= 1 - if self.active_sm_count <= 0: - self._state = ProbingState.NOT_ME - return self.state - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - self._detected_charset = coding_sm.get_coding_state_machine() - self._detected_language = coding_sm.language - return self.state - - return self.state diff --git a/pype/vendor/chardet/escsm.py b/pype/vendor/chardet/escsm.py deleted file mode 100644 index 0069523a04..0000000000 --- a/pype/vendor/chardet/escsm.py +++ /dev/null @@ -1,246 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import MachineState - -HZ_CLS = ( -1,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,0,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,4,0,5,2,0, # 78 - 7f -1,1,1,1,1,1,1,1, # 80 - 87 -1,1,1,1,1,1,1,1, # 88 - 8f -1,1,1,1,1,1,1,1, # 90 - 97 -1,1,1,1,1,1,1,1, # 98 - 9f -1,1,1,1,1,1,1,1, # a0 - a7 -1,1,1,1,1,1,1,1, # a8 - af -1,1,1,1,1,1,1,1, # b0 - b7 -1,1,1,1,1,1,1,1, # b8 - bf -1,1,1,1,1,1,1,1, # c0 - c7 -1,1,1,1,1,1,1,1, # c8 - cf -1,1,1,1,1,1,1,1, # d0 - d7 -1,1,1,1,1,1,1,1, # d8 - df -1,1,1,1,1,1,1,1, # e0 - e7 -1,1,1,1,1,1,1,1, # e8 - ef -1,1,1,1,1,1,1,1, # f0 - f7 -1,1,1,1,1,1,1,1, # f8 - ff -) - -HZ_ST = ( -MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 - 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f - 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 - 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f -) - -HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) - -HZ_SM_MODEL = {'class_table': HZ_CLS, - 'class_factor': 6, - 'state_table': HZ_ST, - 'char_len_table': HZ_CHAR_LEN_TABLE, - 'name': "HZ-GB-2312", - 'language': 'Chinese'} - -ISO2022CN_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,3,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,4,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022CN_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 -MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f -MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 - 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f -) - -ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, - 'class_factor': 9, - 'state_table': ISO2022CN_ST, - 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, - 'name': "ISO-2022-CN", - 'language': 'Chinese'} - -ISO2022JP_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,2,2, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,7,0,0,0, # 20 - 27 -3,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -6,0,4,0,8,0,0,0, # 40 - 47 -0,9,5,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022JP_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 -MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f -MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 -) - -ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, - 'class_factor': 10, - 'state_table': ISO2022JP_ST, - 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, - 'name': "ISO-2022-JP", - 'language': 'Japanese'} - -ISO2022KR_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,3,0,0,0, # 20 - 27 -0,4,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,5,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022KR_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 -) - -ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) - -ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, - 'class_factor': 6, - 'state_table': ISO2022KR_ST, - 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, - 'name': "ISO-2022-KR", - 'language': 'Korean'} - - diff --git a/pype/vendor/chardet/eucjpprober.py b/pype/vendor/chardet/eucjpprober.py deleted file mode 100644 index 20ce8f7d15..0000000000 --- a/pype/vendor/chardet/eucjpprober.py +++ /dev/null @@ -1,92 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import ProbingState, MachineState -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCJPDistributionAnalysis -from .jpcntx import EUCJPContextAnalysis -from .mbcssm import EUCJP_SM_MODEL - - -class EUCJPProber(MultiByteCharSetProber): - def __init__(self): - super(EUCJPProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) - self.distribution_analyzer = EUCJPDistributionAnalysis() - self.context_analyzer = EUCJPContextAnalysis() - self.reset() - - def reset(self): - super(EUCJPProber, self).reset() - self.context_analyzer.reset() - - @property - def charset_name(self): - return "EUC-JP" - - @property - def language(self): - return "Japanese" - - def feed(self, byte_str): - for i in range(len(byte_str)): - # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.context_analyzer.feed(self._last_char, char_len) - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.context_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - context_conf = self.context_analyzer.get_confidence() - distrib_conf = self.distribution_analyzer.get_confidence() - return max(context_conf, distrib_conf) diff --git a/pype/vendor/chardet/euckrfreq.py b/pype/vendor/chardet/euckrfreq.py deleted file mode 100644 index b68078cb96..0000000000 --- a/pype/vendor/chardet/euckrfreq.py +++ /dev/null @@ -1,195 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology - -# 128 --> 0.79 -# 256 --> 0.92 -# 512 --> 0.986 -# 1024 --> 0.99944 -# 2048 --> 0.99999 -# -# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 -# Random Distribution Ration = 512 / (2350-512) = 0.279. -# -# Typical Distribution Ratio - -EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 - -EUCKR_TABLE_SIZE = 2352 - -# Char to FreqOrder table , -EUCKR_CHAR_TO_FREQ_ORDER = ( - 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, -1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, -1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, - 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, - 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, - 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, -1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, - 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, - 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, -1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, -1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, -1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, -1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, -1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, - 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, -1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, -1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, -1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, -1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, - 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, -1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, - 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, - 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, -1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, - 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, -1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, - 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, - 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, -1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, -1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, -1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, -1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, - 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, -1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, - 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, - 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, -1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, -1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, -1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, -1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, -1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, -1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, - 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, - 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, - 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, -1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, - 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, -1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, - 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, - 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, -2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, - 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, - 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, -2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, -2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, -2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, - 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, - 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, -2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, - 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, -1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, -2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, -1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, -2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, -2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, -1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, - 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, -2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, -2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, - 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, - 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, -2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, -1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, -2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, -2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, -2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, -2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, -2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, -2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, -1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, -2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, -2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, -2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, -2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, -2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, -1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, -1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, -2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, -1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, -2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, -1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, - 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, -2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, - 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, -2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, - 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, -2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, -2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, - 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, -2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, -1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, - 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, -1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, -2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, -1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, -2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, - 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, -2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, -1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, -2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, -1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, -2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, -1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, - 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, -2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, -2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, - 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, - 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, -1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, -1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, - 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, -2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, -2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, - 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, - 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, - 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, -2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, - 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, - 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, -2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, -2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, - 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, -2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, -1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, - 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, -2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, -2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, -2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, - 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, - 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, - 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, -2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, -2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, -2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, -1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, -2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, - 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 -) - diff --git a/pype/vendor/chardet/euckrprober.py b/pype/vendor/chardet/euckrprober.py deleted file mode 100644 index 345a060d02..0000000000 --- a/pype/vendor/chardet/euckrprober.py +++ /dev/null @@ -1,47 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCKRDistributionAnalysis -from .mbcssm import EUCKR_SM_MODEL - - -class EUCKRProber(MultiByteCharSetProber): - def __init__(self): - super(EUCKRProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) - self.distribution_analyzer = EUCKRDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "EUC-KR" - - @property - def language(self): - return "Korean" diff --git a/pype/vendor/chardet/euctwfreq.py b/pype/vendor/chardet/euctwfreq.py deleted file mode 100644 index ed7a995a3a..0000000000 --- a/pype/vendor/chardet/euctwfreq.py +++ /dev/null @@ -1,387 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# EUCTW frequency table -# Converted from big5 work -# by Taiwan's Mandarin Promotion Council -# - -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -# Char to FreqOrder table , -EUCTW_TABLE_SIZE = 5376 - -EUCTW_CHAR_TO_FREQ_ORDER = ( - 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 -3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 -1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 - 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 -3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 -4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 -7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 - 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 - 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 - 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 -2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 -1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 -3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 - 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 -3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 -2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 - 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 -3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 -1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 -7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 - 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 -7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 -1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 - 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 - 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 -3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 -3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 - 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 -2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 -2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 - 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 - 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 -3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 -1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 -1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 -1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 -2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 - 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 -4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 -1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 -7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 -2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 - 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 - 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 - 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 - 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 -7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 - 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 -1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 - 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 - 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 -7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 -1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 - 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 -3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 -4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 -3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 - 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 - 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 -1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 -4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 -3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 -3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 -2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 -7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 -3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 -7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 -1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 -2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 -1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 - 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 -1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 -4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 -3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 - 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 - 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 - 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 -2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 -7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 -1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 -2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 -1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 -1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 -7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 -7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 -7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 -3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 -4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 -1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 -7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 -2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 -7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 -3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 -3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 -7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 -2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 -7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 - 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 -4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 -2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 -7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 -3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 -2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 -2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 - 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 -2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 -1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 -1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 -2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 -1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 -7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 -7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 -2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 -4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 -1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 -7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 - 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 -4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 - 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 -2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 - 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 -1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 -1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 - 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 -3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 -3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 -1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 -3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 -7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 -7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 -1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 -2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 -1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 -3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 -2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 -3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 -2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 -4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 -4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 -3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 - 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 -3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 - 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 -3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 -3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 -3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 -1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 -7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 - 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 -7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 -1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 - 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 -4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 -3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 - 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 -2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 -2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 -3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 -1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 -4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 -2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 -1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 -1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 -2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 -3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 -1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 -7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 -1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 -4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 -1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 - 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 -1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 -3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 -3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 -2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 -1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 -4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 - 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 -7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 -2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 -3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 -4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 - 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 -7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 -7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 -1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 -4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 -3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 -2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 -3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 -3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 -2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 -1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 -4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 -3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 -3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 -2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 -4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 -7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 -3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 -2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 -3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 -1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 -2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 -3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 -4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 -2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 -2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 -7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 -1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 -2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 -1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 -3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 -4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 -2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 -3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 -3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 -2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 -4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 -2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 -3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 -4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 -7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 -3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 - 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 -1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 -4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 -1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 -4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 -7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 - 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 -7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 -2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 -1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 -1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 -3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 - 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 - 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 - 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 -3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 -2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 - 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 -7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 -1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 -3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 -7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 -1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 -7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 -4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 -1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 -2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 -2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 -4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 - 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 - 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 -3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 -3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 -1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 -2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 -7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 -1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 -1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 -3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 - 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 -1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 -4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 -7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 -2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 -3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 - 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 -1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 -2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 -2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 -7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 -7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 -7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 -2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 -2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 -1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 -4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 -3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 -3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 -4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 -4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 -2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 -2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 -7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 -4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 -7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 -2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 -1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 -3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 -4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 -2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 - 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 -2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 -1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 -2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 -2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 -4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 -7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 -1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 -3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 -7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 -1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 -8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 -2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 -8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 -2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 -2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 -8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 -8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 -8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 - 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 -8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 -4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 -3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 -8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 -1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 -8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 - 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 -1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 - 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 -4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 -1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 -4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 -1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 - 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 -3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 -4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 -8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 - 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 -3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 - 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 -2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 -) - diff --git a/pype/vendor/chardet/euctwprober.py b/pype/vendor/chardet/euctwprober.py deleted file mode 100644 index 35669cc4dd..0000000000 --- a/pype/vendor/chardet/euctwprober.py +++ /dev/null @@ -1,46 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCTWDistributionAnalysis -from .mbcssm import EUCTW_SM_MODEL - -class EUCTWProber(MultiByteCharSetProber): - def __init__(self): - super(EUCTWProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) - self.distribution_analyzer = EUCTWDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "EUC-TW" - - @property - def language(self): - return "Taiwan" diff --git a/pype/vendor/chardet/gb2312freq.py b/pype/vendor/chardet/gb2312freq.py deleted file mode 100644 index 697837bd9a..0000000000 --- a/pype/vendor/chardet/gb2312freq.py +++ /dev/null @@ -1,283 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# GB2312 most frequently used character table -# -# Char to FreqOrder table , from hz6763 - -# 512 --> 0.79 -- 0.79 -# 1024 --> 0.92 -- 0.13 -# 2048 --> 0.98 -- 0.06 -# 6768 --> 1.00 -- 0.02 -# -# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 -# Random Distribution Ration = 512 / (3755 - 512) = 0.157 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR - -GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 - -GB2312_TABLE_SIZE = 3760 - -GB2312_CHAR_TO_FREQ_ORDER = ( -1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, -2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, -2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, - 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, -1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, -1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, - 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, -1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, -2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, -3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, - 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, -1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, - 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, -2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, - 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, -2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, -1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, -3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, - 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, -1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, - 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, -2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, -1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, -3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, -1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, -2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, -1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, - 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, -3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, -3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, - 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, -3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, - 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, -1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, -3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, -2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, -1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, - 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, -1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, -4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, - 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, -3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, -3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, - 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, -1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, -2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, -1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, -1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, - 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, -3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, -3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, -4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, - 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, -3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, -1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, -1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, -4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, - 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, - 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, -3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, -1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, - 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, -1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, -2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, - 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, - 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, - 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, -3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, -4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, -3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, - 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, -2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, -2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, -2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, - 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, -2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, - 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, - 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, - 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, -3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, -2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, -2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, -1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, - 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, -2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, - 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, - 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, -1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, -1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, - 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, - 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, -1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, -2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, -3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, -2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, -2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, -2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, -3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, -1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, -1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, -2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, -1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, -3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, -1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, -1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, -3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, - 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, -2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, -1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, -4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, -1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, -1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, -3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, -1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, - 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, - 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, -1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, - 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, -1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, -1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, - 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, -3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, -4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, -3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, -2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, -2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, -1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, -3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, -2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, -1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, -1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, - 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, -2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, -2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, -3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, -4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, -3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, - 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, -3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, -2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, -1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, - 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, - 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, -3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, -4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, -2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, -1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, -1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, - 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, -1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, -3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, - 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, - 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, -1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, - 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, -1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, - 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, -2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, - 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, -2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, -2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, -1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, -1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, -2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, - 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, -1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, -1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, -2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, -2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, -3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, -1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, -4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, - 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, - 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, -3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, -1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, - 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, -3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, -1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, -4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, -1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, -2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, -1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, - 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, -1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, -3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, - 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, -2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, - 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, -1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, -1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, -1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, -3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, -2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, -3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, -3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, -3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, - 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, -2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, - 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, -2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, - 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, -1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, - 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, - 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, -1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, -3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, -3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, -1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, -1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, -3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, -2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, -2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, -1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, -3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, - 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, -4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, -1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, -2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, -3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, -3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, -1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, - 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, - 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, -2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, - 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, -1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, - 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, -1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, -1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, -1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, -1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, -1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, - 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, - 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 -) - diff --git a/pype/vendor/chardet/gb2312prober.py b/pype/vendor/chardet/gb2312prober.py deleted file mode 100644 index 8446d2dd95..0000000000 --- a/pype/vendor/chardet/gb2312prober.py +++ /dev/null @@ -1,46 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import GB2312DistributionAnalysis -from .mbcssm import GB2312_SM_MODEL - -class GB2312Prober(MultiByteCharSetProber): - def __init__(self): - super(GB2312Prober, self).__init__() - self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) - self.distribution_analyzer = GB2312DistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "GB2312" - - @property - def language(self): - return "Chinese" diff --git a/pype/vendor/chardet/hebrewprober.py b/pype/vendor/chardet/hebrewprober.py deleted file mode 100644 index b0e1bf4926..0000000000 --- a/pype/vendor/chardet/hebrewprober.py +++ /dev/null @@ -1,292 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Shy Shalom -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState - -# This prober doesn't actually recognize a language or a charset. -# It is a helper prober for the use of the Hebrew model probers - -### General ideas of the Hebrew charset recognition ### -# -# Four main charsets exist in Hebrew: -# "ISO-8859-8" - Visual Hebrew -# "windows-1255" - Logical Hebrew -# "ISO-8859-8-I" - Logical Hebrew -# "x-mac-hebrew" - ?? Logical Hebrew ?? -# -# Both "ISO" charsets use a completely identical set of code points, whereas -# "windows-1255" and "x-mac-hebrew" are two different proper supersets of -# these code points. windows-1255 defines additional characters in the range -# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific -# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. -# x-mac-hebrew defines similar additional code points but with a different -# mapping. -# -# As far as an average Hebrew text with no diacritics is concerned, all four -# charsets are identical with respect to code points. Meaning that for the -# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters -# (including final letters). -# -# The dominant difference between these charsets is their directionality. -# "Visual" directionality means that the text is ordered as if the renderer is -# not aware of a BIDI rendering algorithm. The renderer sees the text and -# draws it from left to right. The text itself when ordered naturally is read -# backwards. A buffer of Visual Hebrew generally looks like so: -# "[last word of first line spelled backwards] [whole line ordered backwards -# and spelled backwards] [first word of first line spelled backwards] -# [end of line] [last word of second line] ... etc' " -# adding punctuation marks, numbers and English text to visual text is -# naturally also "visual" and from left to right. -# -# "Logical" directionality means the text is ordered "naturally" according to -# the order it is read. It is the responsibility of the renderer to display -# the text from right to left. A BIDI algorithm is used to place general -# punctuation marks, numbers and English text in the text. -# -# Texts in x-mac-hebrew are almost impossible to find on the Internet. From -# what little evidence I could find, it seems that its general directionality -# is Logical. -# -# To sum up all of the above, the Hebrew probing mechanism knows about two -# charsets: -# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are -# backwards while line order is natural. For charset recognition purposes -# the line order is unimportant (In fact, for this implementation, even -# word order is unimportant). -# Logical Hebrew - "windows-1255" - normal, naturally ordered text. -# -# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be -# specifically identified. -# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew -# that contain special punctuation marks or diacritics is displayed with -# some unconverted characters showing as question marks. This problem might -# be corrected using another model prober for x-mac-hebrew. Due to the fact -# that x-mac-hebrew texts are so rare, writing another model prober isn't -# worth the effort and performance hit. -# -#### The Prober #### -# -# The prober is divided between two SBCharSetProbers and a HebrewProber, -# all of which are managed, created, fed data, inquired and deleted by the -# SBCSGroupProber. The two SBCharSetProbers identify that the text is in -# fact some kind of Hebrew, Logical or Visual. The final decision about which -# one is it is made by the HebrewProber by combining final-letter scores -# with the scores of the two SBCharSetProbers to produce a final answer. -# -# The SBCSGroupProber is responsible for stripping the original text of HTML -# tags, English characters, numbers, low-ASCII punctuation characters, spaces -# and new lines. It reduces any sequence of such characters to a single space. -# The buffer fed to each prober in the SBCS group prober is pure text in -# high-ASCII. -# The two SBCharSetProbers (model probers) share the same language model: -# Win1255Model. -# The first SBCharSetProber uses the model normally as any other -# SBCharSetProber does, to recognize windows-1255, upon which this model was -# built. The second SBCharSetProber is told to make the pair-of-letter -# lookup in the language model backwards. This in practice exactly simulates -# a visual Hebrew model using the windows-1255 logical Hebrew model. -# -# The HebrewProber is not using any language model. All it does is look for -# final-letter evidence suggesting the text is either logical Hebrew or visual -# Hebrew. Disjointed from the model probers, the results of the HebrewProber -# alone are meaningless. HebrewProber always returns 0.00 as confidence -# since it never identifies a charset by itself. Instead, the pointer to the -# HebrewProber is passed to the model probers as a helper "Name Prober". -# When the Group prober receives a positive identification from any prober, -# it asks for the name of the charset identified. If the prober queried is a -# Hebrew model prober, the model prober forwards the call to the -# HebrewProber to make the final decision. In the HebrewProber, the -# decision is made according to the final-letters scores maintained and Both -# model probers scores. The answer is returned in the form of the name of the -# charset identified, either "windows-1255" or "ISO-8859-8". - -class HebrewProber(CharSetProber): - # windows-1255 / ISO-8859-8 code points of interest - FINAL_KAF = 0xea - NORMAL_KAF = 0xeb - FINAL_MEM = 0xed - NORMAL_MEM = 0xee - FINAL_NUN = 0xef - NORMAL_NUN = 0xf0 - FINAL_PE = 0xf3 - NORMAL_PE = 0xf4 - FINAL_TSADI = 0xf5 - NORMAL_TSADI = 0xf6 - - # Minimum Visual vs Logical final letter score difference. - # If the difference is below this, don't rely solely on the final letter score - # distance. - MIN_FINAL_CHAR_DISTANCE = 5 - - # Minimum Visual vs Logical model score difference. - # If the difference is below this, don't rely at all on the model score - # distance. - MIN_MODEL_DISTANCE = 0.01 - - VISUAL_HEBREW_NAME = "ISO-8859-8" - LOGICAL_HEBREW_NAME = "windows-1255" - - def __init__(self): - super(HebrewProber, self).__init__() - self._final_char_logical_score = None - self._final_char_visual_score = None - self._prev = None - self._before_prev = None - self._logical_prober = None - self._visual_prober = None - self.reset() - - def reset(self): - self._final_char_logical_score = 0 - self._final_char_visual_score = 0 - # The two last characters seen in the previous buffer, - # mPrev and mBeforePrev are initialized to space in order to simulate - # a word delimiter at the beginning of the data - self._prev = ' ' - self._before_prev = ' ' - # These probers are owned by the group prober. - - def set_model_probers(self, logicalProber, visualProber): - self._logical_prober = logicalProber - self._visual_prober = visualProber - - def is_final(self, c): - return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, - self.FINAL_PE, self.FINAL_TSADI] - - def is_non_final(self, c): - # The normal Tsadi is not a good Non-Final letter due to words like - # 'lechotet' (to chat) containing an apostrophe after the tsadi. This - # apostrophe is converted to a space in FilterWithoutEnglishLetters - # causing the Non-Final tsadi to appear at an end of a word even - # though this is not the case in the original text. - # The letters Pe and Kaf rarely display a related behavior of not being - # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' - # for example legally end with a Non-Final Pe or Kaf. However, the - # benefit of these letters as Non-Final letters outweighs the damage - # since these words are quite rare. - return c in [self.NORMAL_KAF, self.NORMAL_MEM, - self.NORMAL_NUN, self.NORMAL_PE] - - def feed(self, byte_str): - # Final letter analysis for logical-visual decision. - # Look for evidence that the received buffer is either logical Hebrew - # or visual Hebrew. - # The following cases are checked: - # 1) A word longer than 1 letter, ending with a final letter. This is - # an indication that the text is laid out "naturally" since the - # final letter really appears at the end. +1 for logical score. - # 2) A word longer than 1 letter, ending with a Non-Final letter. In - # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, - # should not end with the Non-Final form of that letter. Exceptions - # to this rule are mentioned above in isNonFinal(). This is an - # indication that the text is laid out backwards. +1 for visual - # score - # 3) A word longer than 1 letter, starting with a final letter. Final - # letters should not appear at the beginning of a word. This is an - # indication that the text is laid out backwards. +1 for visual - # score. - # - # The visual score and logical score are accumulated throughout the - # text and are finally checked against each other in GetCharSetName(). - # No checking for final letters in the middle of words is done since - # that case is not an indication for either Logical or Visual text. - # - # We automatically filter out all 7-bit characters (replace them with - # spaces) so the word boundary detection works properly. [MAP] - - if self.state == ProbingState.NOT_ME: - # Both model probers say it's not them. No reason to continue. - return ProbingState.NOT_ME - - byte_str = self.filter_high_byte_only(byte_str) - - for cur in byte_str: - if cur == ' ': - # We stand on a space - a word just ended - if self._before_prev != ' ': - # next-to-last char was not a space so self._prev is not a - # 1 letter word - if self.is_final(self._prev): - # case (1) [-2:not space][-1:final letter][cur:space] - self._final_char_logical_score += 1 - elif self.is_non_final(self._prev): - # case (2) [-2:not space][-1:Non-Final letter][ - # cur:space] - self._final_char_visual_score += 1 - else: - # Not standing on a space - if ((self._before_prev == ' ') and - (self.is_final(self._prev)) and (cur != ' ')): - # case (3) [-2:space][-1:final letter][cur:not space] - self._final_char_visual_score += 1 - self._before_prev = self._prev - self._prev = cur - - # Forever detecting, till the end or until both model probers return - # ProbingState.NOT_ME (handled above) - return ProbingState.DETECTING - - @property - def charset_name(self): - # Make the decision: is it Logical or Visual? - # If the final letter score distance is dominant enough, rely on it. - finalsub = self._final_char_logical_score - self._final_char_visual_score - if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: - return self.LOGICAL_HEBREW_NAME - if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: - return self.VISUAL_HEBREW_NAME - - # It's not dominant enough, try to rely on the model scores instead. - modelsub = (self._logical_prober.get_confidence() - - self._visual_prober.get_confidence()) - if modelsub > self.MIN_MODEL_DISTANCE: - return self.LOGICAL_HEBREW_NAME - if modelsub < -self.MIN_MODEL_DISTANCE: - return self.VISUAL_HEBREW_NAME - - # Still no good, back to final letter distance, maybe it'll save the - # day. - if finalsub < 0.0: - return self.VISUAL_HEBREW_NAME - - # (finalsub > 0 - Logical) or (don't know what to do) default to - # Logical. - return self.LOGICAL_HEBREW_NAME - - @property - def language(self): - return 'Hebrew' - - @property - def state(self): - # Remain active as long as any of the model probers are active. - if (self._logical_prober.state == ProbingState.NOT_ME) and \ - (self._visual_prober.state == ProbingState.NOT_ME): - return ProbingState.NOT_ME - return ProbingState.DETECTING diff --git a/pype/vendor/chardet/jisfreq.py b/pype/vendor/chardet/jisfreq.py deleted file mode 100644 index 83fc082b54..0000000000 --- a/pype/vendor/chardet/jisfreq.py +++ /dev/null @@ -1,325 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology -# -# Japanese frequency table, applied to both S-JIS and EUC-JP -# They are sorted in order. - -# 128 --> 0.77094 -# 256 --> 0.85710 -# 512 --> 0.92635 -# 1024 --> 0.97130 -# 2048 --> 0.99431 -# -# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 -# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 -# -# Typical Distribution Ratio, 25% of IDR - -JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 - -# Char to FreqOrder table , -JIS_TABLE_SIZE = 4368 - -JIS_CHAR_TO_FREQ_ORDER = ( - 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 -3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 -1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 -2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 -2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 -5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 -1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 -5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 -5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 -5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 -5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 -5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 -5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 -1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 -1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 -1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 -2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 -3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 -3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 - 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 - 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 -1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 - 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 -5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 - 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 - 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 - 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 - 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 - 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 -5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 -5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 -5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 -4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 -5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 -5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 -5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 -5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 -5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 -5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 -5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 -5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 -5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 -3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 -5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 -5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 -5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 -5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 -5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 -5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 -5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 -5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 -5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 -5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 -5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 -5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 -5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 -5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 -5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 -5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 -5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 -5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 -5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 -5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 -5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 -5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 -5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 -5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 -5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 -5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 -5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 -5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 -5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 -5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 -5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 -5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 -5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 -5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 -5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 -5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 -5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 -5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 -6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 -6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 -6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 -6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 -6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 -6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 -6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 -6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 -4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 - 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 - 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 -1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 -1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 - 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 -3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 -3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 - 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 -3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 -3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 - 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 -2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 - 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 -3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 -1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 - 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 -1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 - 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 -2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 -2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 -2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 -2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 -1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 -1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 -1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 -1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 -2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 -1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 -2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 -1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 -1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 -1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 -1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 -1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 -1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 - 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 - 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 -1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 -2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 -2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 -2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 -3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 -3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 - 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 -3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 -1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 - 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 -2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 -1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 - 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 -3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 -4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 -2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 -1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 -2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 -1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 - 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 - 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 -1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 -2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 -2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 -2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 -3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 -1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 -2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 - 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 - 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 - 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 -1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 -2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 - 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 -1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 -1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 - 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 -1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 -1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 -1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 - 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 -2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 - 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 -2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 -3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 -2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 -1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 -6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 -1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 -2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 -1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 - 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 - 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 -3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 -3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 -1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 -1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 -1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 -1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 - 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 - 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 -2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 - 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 -3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 -2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 - 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 -1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 -2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 - 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 -1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 - 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 -4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 -2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 -1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 - 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 -1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 -2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 - 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 -6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 -1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 -1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 -2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 -3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 - 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 -3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 -1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 - 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 -1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 - 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 -3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 - 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 -2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 - 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 -4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 -2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 -1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 -1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 -1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 - 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 -1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 -3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 -1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 -3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 - 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 - 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 - 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 -2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 -1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 - 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 -1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 - 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 -1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 - 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 - 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 - 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 -1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 -1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 -2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 -4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 - 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 -1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 - 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 -1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 -3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 -1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 -2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 -2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 -1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 -1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 -2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 - 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 -2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 -1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 -1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 -1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 -1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 -3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 -2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 -2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 - 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 -3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 -3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 -1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 -2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 -1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 -2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 -) - - diff --git a/pype/vendor/chardet/jpcntx.py b/pype/vendor/chardet/jpcntx.py deleted file mode 100644 index 20044e4bc8..0000000000 --- a/pype/vendor/chardet/jpcntx.py +++ /dev/null @@ -1,233 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - - -# This is hiragana 2-char sequence table, the number in each cell represents its frequency category -jp2CharContext = ( -(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), -(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), -(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), -(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), -(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), -(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), -(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), -(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), -(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), -(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), -(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), -(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), -(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), -(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), -(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), -(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), -(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), -(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), -(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), -(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), -(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), -(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), -(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), -(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), -(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), -(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), -(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), -(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), -(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), -(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), -(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), -(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), -(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), -(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), -(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), -(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), -(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), -(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), -(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), -(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), -(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), -(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), -(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), -(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), -(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), -(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), -(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), -(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), -(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), -(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), -(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), -(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), -(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), -(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), -(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), -(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), -(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), -(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), -(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), -(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), -(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), -(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), -(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), -(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), -(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), -(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), -(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), -(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), -(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), -(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), -(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), -(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), -(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), -(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), -) - -class JapaneseContextAnalysis(object): - NUM_OF_CATEGORY = 6 - DONT_KNOW = -1 - ENOUGH_REL_THRESHOLD = 100 - MAX_REL_THRESHOLD = 1000 - MINIMUM_DATA_THRESHOLD = 4 - - def __init__(self): - self._total_rel = None - self._rel_sample = None - self._need_to_skip_char_num = None - self._last_char_order = None - self._done = None - self.reset() - - def reset(self): - self._total_rel = 0 # total sequence received - # category counters, each integer counts sequence in its category - self._rel_sample = [0] * self.NUM_OF_CATEGORY - # if last byte in current buffer is not the last byte of a character, - # we need to know how many bytes to skip in next buffer - self._need_to_skip_char_num = 0 - self._last_char_order = -1 # The order of previous char - # If this flag is set to True, detection is done and conclusion has - # been made - self._done = False - - def feed(self, byte_str, num_bytes): - if self._done: - return - - # The buffer we got is byte oriented, and a character may span in more than one - # buffers. In case the last one or two byte in last buffer is not - # complete, we record how many byte needed to complete that character - # and skip these bytes here. We can choose to record those bytes as - # well and analyse the character once it is complete, but since a - # character will not make much difference, by simply skipping - # this character will simply our logic and improve performance. - i = self._need_to_skip_char_num - while i < num_bytes: - order, char_len = self.get_order(byte_str[i:i + 2]) - i += char_len - if i > num_bytes: - self._need_to_skip_char_num = i - num_bytes - self._last_char_order = -1 - else: - if (order != -1) and (self._last_char_order != -1): - self._total_rel += 1 - if self._total_rel > self.MAX_REL_THRESHOLD: - self._done = True - break - self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 - self._last_char_order = order - - def got_enough_data(self): - return self._total_rel > self.ENOUGH_REL_THRESHOLD - - def get_confidence(self): - # This is just one way to calculate confidence. It works well for me. - if self._total_rel > self.MINIMUM_DATA_THRESHOLD: - return (self._total_rel - self._rel_sample[0]) / self._total_rel - else: - return self.DONT_KNOW - - def get_order(self, byte_str): - return -1, 1 - -class SJISContextAnalysis(JapaneseContextAnalysis): - def __init__(self): - super(SJISContextAnalysis, self).__init__() - self._charset_name = "SHIFT_JIS" - - @property - def charset_name(self): - return self._charset_name - - def get_order(self, byte_str): - if not byte_str: - return -1, 1 - # find out current char's byte length - first_char = byte_str[0] - if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): - char_len = 2 - if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): - self._charset_name = "CP932" - else: - char_len = 1 - - # return its order if it is hiragana - if len(byte_str) > 1: - second_char = byte_str[1] - if (first_char == 202) and (0x9F <= second_char <= 0xF1): - return second_char - 0x9F, char_len - - return -1, char_len - -class EUCJPContextAnalysis(JapaneseContextAnalysis): - def get_order(self, byte_str): - if not byte_str: - return -1, 1 - # find out current char's byte length - first_char = byte_str[0] - if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): - char_len = 2 - elif first_char == 0x8F: - char_len = 3 - else: - char_len = 1 - - # return its order if it is hiragana - if len(byte_str) > 1: - second_char = byte_str[1] - if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): - return second_char - 0xA1, char_len - - return -1, char_len - - diff --git a/pype/vendor/chardet/langbulgarianmodel.py b/pype/vendor/chardet/langbulgarianmodel.py deleted file mode 100644 index 2aa4fb2e22..0000000000 --- a/pype/vendor/chardet/langbulgarianmodel.py +++ /dev/null @@ -1,228 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -# this table is modified base on win1251BulgarianCharToOrderMap, so -# only number <64 is sure valid - -Latin5_BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 -210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 - 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 - 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 -) - -win1251BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 -221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 - 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 - 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 96.9392% -# first 1024 sequences:3.0618% -# rest sequences: 0.2992% -# negative sequences: 0.0020% -BulgarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, -3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, -0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, -0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, -0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, -0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, -0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, -2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, -3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, -1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, -3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, -1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, -2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, -2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, -3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, -1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, -2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, -2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, -1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, -2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, -2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, -2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, -1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, -2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, -1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, -3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, -1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, -3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, -1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, -2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, -1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, -2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, -1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, -2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, -1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, -2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, -1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, -0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, -1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, -1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, -1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, -0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, -1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, -1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -) - -Latin5BulgarianModel = { - 'char_to_order_map': Latin5_BulgarianCharToOrderMap, - 'precedence_matrix': BulgarianLangModel, - 'typical_positive_ratio': 0.969392, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-5", - 'language': 'Bulgairan', -} - -Win1251BulgarianModel = { - 'char_to_order_map': win1251BulgarianCharToOrderMap, - 'precedence_matrix': BulgarianLangModel, - 'typical_positive_ratio': 0.969392, - 'keep_english_letter': False, - 'charset_name': "windows-1251", - 'language': 'Bulgarian', -} diff --git a/pype/vendor/chardet/langcyrillicmodel.py b/pype/vendor/chardet/langcyrillicmodel.py deleted file mode 100644 index e5f9a1fd19..0000000000 --- a/pype/vendor/chardet/langcyrillicmodel.py +++ /dev/null @@ -1,333 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# KOI8-R language model -# Character Mapping Table: -KOI8R_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 -223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 -238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 - 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 - 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 - 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 - 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 -) - -win1251_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -) - -latin5_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -macCyrillic_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, -) - -IBM855_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, -206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, - 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, -220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, -230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, - 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, - 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, -250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, -) - -IBM866_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 97.6601% -# first 1024 sequences: 2.3389% -# rest sequences: 0.1237% -# negative sequences: 0.0009% -RussianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, -1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, -1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, -2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, -1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, -3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, -1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, -2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, -1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, -1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, -1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, -1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, -3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, -1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, -2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, -1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, -2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, -1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, -1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, -1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, -3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, -3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, -1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, -1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, -0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, -1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, -1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, -0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, -1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, -2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, -1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, -1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, -2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, -1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, -1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, -1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, -0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, -0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, -0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, -2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, -0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -) - -Koi8rModel = { - 'char_to_order_map': KOI8R_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "KOI8-R", - 'language': 'Russian', -} - -Win1251CyrillicModel = { - 'char_to_order_map': win1251_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "windows-1251", - 'language': 'Russian', -} - -Latin5CyrillicModel = { - 'char_to_order_map': latin5_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-5", - 'language': 'Russian', -} - -MacCyrillicModel = { - 'char_to_order_map': macCyrillic_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "MacCyrillic", - 'language': 'Russian', -} - -Ibm866Model = { - 'char_to_order_map': IBM866_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "IBM866", - 'language': 'Russian', -} - -Ibm855Model = { - 'char_to_order_map': IBM855_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "IBM855", - 'language': 'Russian', -} diff --git a/pype/vendor/chardet/langgreekmodel.py b/pype/vendor/chardet/langgreekmodel.py deleted file mode 100644 index 533222166c..0000000000 --- a/pype/vendor/chardet/langgreekmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin7_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -win1253_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.2851% -# first 1024 sequences:1.7001% -# rest sequences: 0.0359% -# negative sequences: 0.0148% -GreekLangModel = ( -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, -2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, -2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, -2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, -0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, -3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, -2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, -0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, -0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, -0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, -0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, -0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, -0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, -0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, -0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, -0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, -0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, -0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, -0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, -0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, -0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, -0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, -0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, -0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, -0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -Latin7GreekModel = { - 'char_to_order_map': Latin7_char_to_order_map, - 'precedence_matrix': GreekLangModel, - 'typical_positive_ratio': 0.982851, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-7", - 'language': 'Greek', -} - -Win1253GreekModel = { - 'char_to_order_map': win1253_char_to_order_map, - 'precedence_matrix': GreekLangModel, - 'typical_positive_ratio': 0.982851, - 'keep_english_letter': False, - 'charset_name': "windows-1253", - 'language': 'Greek', -} diff --git a/pype/vendor/chardet/langhebrewmodel.py b/pype/vendor/chardet/langhebrewmodel.py deleted file mode 100644 index 58f4c875ec..0000000000 --- a/pype/vendor/chardet/langhebrewmodel.py +++ /dev/null @@ -1,200 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Simon Montagu -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Shoshannah Forbes - original C code (?) -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Windows-1255 language model -# Character Mapping Table: -WIN1255_CHAR_TO_ORDER_MAP = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 - 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 -253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 - 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 -124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, -215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, - 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, -106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, - 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, -238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, - 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, - 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.4004% -# first 1024 sequences: 1.5981% -# rest sequences: 0.087% -# negative sequences: 0.0015% -HEBREW_LANG_MODEL = ( -0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, -3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, -1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, -1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, -1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, -1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, -0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, -0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, -0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, -0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, -0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, -0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, -0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, -0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, -0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, -1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, -0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, -0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, -0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, -0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, -0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, -1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, -1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, -2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, -0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, -0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, -) - -Win1255HebrewModel = { - 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, - 'precedence_matrix': HEBREW_LANG_MODEL, - 'typical_positive_ratio': 0.984004, - 'keep_english_letter': False, - 'charset_name': "windows-1255", - 'language': 'Hebrew', -} diff --git a/pype/vendor/chardet/langhungarianmodel.py b/pype/vendor/chardet/langhungarianmodel.py deleted file mode 100644 index bb7c095e1e..0000000000 --- a/pype/vendor/chardet/langhungarianmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin2_HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, -175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, - 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, - 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, -245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -win1250HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, -177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, - 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, - 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, -245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 94.7368% -# first 1024 sequences:5.2623% -# rest sequences: 0.8894% -# negative sequences: 0.0009% -HungarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, -3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, -0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, -1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, -1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, -3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, -2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, -2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, -2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, -2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, -1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, -1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, -3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, -1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, -1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, -2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, -2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, -2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, -3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, -1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, -1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, -1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, -2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, -1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, -2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, -2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, -1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, -1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, -0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, -2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, -2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, -1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, -1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, -2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, -2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, -2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, -1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, -0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -) - -Latin2HungarianModel = { - 'char_to_order_map': Latin2_HungarianCharToOrderMap, - 'precedence_matrix': HungarianLangModel, - 'typical_positive_ratio': 0.947368, - 'keep_english_letter': True, - 'charset_name': "ISO-8859-2", - 'language': 'Hungarian', -} - -Win1250HungarianModel = { - 'char_to_order_map': win1250HungarianCharToOrderMap, - 'precedence_matrix': HungarianLangModel, - 'typical_positive_ratio': 0.947368, - 'keep_english_letter': True, - 'charset_name': "windows-1250", - 'language': 'Hungarian', -} diff --git a/pype/vendor/chardet/langthaimodel.py b/pype/vendor/chardet/langthaimodel.py deleted file mode 100644 index 15f94c2df0..0000000000 --- a/pype/vendor/chardet/langthaimodel.py +++ /dev/null @@ -1,199 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# The following result for thai was collected from a limited sample (1M). - -# Character Mapping Table: -TIS620CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 -188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 -253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 - 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 -209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, -223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, -236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, - 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, - 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, - 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, - 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, - 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 92.6386% -# first 1024 sequences:7.3177% -# rest sequences: 1.0230% -# negative sequences: 0.0436% -ThaiLangModel = ( -0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, -0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, -3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, -0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, -3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, -3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, -3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, -3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, -2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, -3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, -1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, -3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, -1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, -0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, -0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, -2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, -0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, -3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, -2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, -3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, -2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, -3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, -3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, -3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, -3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, -1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, -0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, -0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, -3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, -3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, -1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, -3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, -3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, -0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, -0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, -1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, -1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, -3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, -0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, -3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, -0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, -0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, -0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, -0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, -0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, -0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, -0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, -0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, -0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, -3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, -2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, -0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, -3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, -1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, -1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -TIS620ThaiModel = { - 'char_to_order_map': TIS620CharToOrderMap, - 'precedence_matrix': ThaiLangModel, - 'typical_positive_ratio': 0.926386, - 'keep_english_letter': False, - 'charset_name': "TIS-620", - 'language': 'Thai', -} diff --git a/pype/vendor/chardet/langturkishmodel.py b/pype/vendor/chardet/langturkishmodel.py deleted file mode 100644 index a427a45739..0000000000 --- a/pype/vendor/chardet/langturkishmodel.py +++ /dev/null @@ -1,193 +0,0 @@ -# -*- coding: utf-8 -*- -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Özgür Baskın - Turkish Language Model -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin5_TurkishCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, - 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, -255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, - 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, -180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, -164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, -150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, - 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, -124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, - 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, - 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, - 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, -) - -TurkishLangModel = ( -3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, -3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, -3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, -3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, -3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, -3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, -3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, -2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, -3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, -2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, -1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, -3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, -3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, -2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, -3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, -2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, -3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, -3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, -3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, -0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, -3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, -0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, -3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, -3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, -2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, -2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, -3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, -0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, -1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, -3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, -1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, -3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, -0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, -3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, -0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, -1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, -1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, -2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, -2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, -0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, -3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, -0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, -3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, -1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, -0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, -3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, -0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, -3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, -3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, -1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, -2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, -0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, -3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, -0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, -0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, -3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, -0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, -0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, -3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, -0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, -3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, -0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, -0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, -3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, -0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, -0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, -3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, -0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, -0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, -0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, -3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, -0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, -0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, -0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, -0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, -1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, -0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, -0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, -3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, -0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, -2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, -2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, -0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, -0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -Latin5TurkishModel = { - 'char_to_order_map': Latin5_TurkishCharToOrderMap, - 'precedence_matrix': TurkishLangModel, - 'typical_positive_ratio': 0.970290, - 'keep_english_letter': True, - 'charset_name': "ISO-8859-9", - 'language': 'Turkish', -} diff --git a/pype/vendor/chardet/latin1prober.py b/pype/vendor/chardet/latin1prober.py deleted file mode 100644 index 7d1e8c20fb..0000000000 --- a/pype/vendor/chardet/latin1prober.py +++ /dev/null @@ -1,145 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState - -FREQ_CAT_NUM = 4 - -UDF = 0 # undefined -OTH = 1 # other -ASC = 2 # ascii capital letter -ASS = 3 # ascii small letter -ACV = 4 # accent capital vowel -ACO = 5 # accent capital other -ASV = 6 # accent small vowel -ASO = 7 # accent small other -CLASS_NUM = 8 # total classes - -Latin1_CharToClass = ( - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F - OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 - ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F - OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 - ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F - OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 - OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F - UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 - OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF - ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 - ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF - ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 - ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF - ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 - ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF - ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 - ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF -) - -# 0 : illegal -# 1 : very unlikely -# 2 : normal -# 3 : very likely -Latin1ClassModel = ( -# UDF OTH ASC ASS ACV ACO ASV ASO - 0, 0, 0, 0, 0, 0, 0, 0, # UDF - 0, 3, 3, 3, 3, 3, 3, 3, # OTH - 0, 3, 3, 3, 3, 3, 3, 3, # ASC - 0, 3, 3, 3, 1, 1, 3, 3, # ASS - 0, 3, 3, 3, 1, 2, 1, 2, # ACV - 0, 3, 3, 3, 3, 3, 3, 3, # ACO - 0, 3, 1, 3, 1, 1, 1, 3, # ASV - 0, 3, 1, 3, 1, 1, 3, 3, # ASO -) - - -class Latin1Prober(CharSetProber): - def __init__(self): - super(Latin1Prober, self).__init__() - self._last_char_class = None - self._freq_counter = None - self.reset() - - def reset(self): - self._last_char_class = OTH - self._freq_counter = [0] * FREQ_CAT_NUM - CharSetProber.reset(self) - - @property - def charset_name(self): - return "ISO-8859-1" - - @property - def language(self): - return "" - - def feed(self, byte_str): - byte_str = self.filter_with_english_letters(byte_str) - for c in byte_str: - char_class = Latin1_CharToClass[c] - freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) - + char_class] - if freq == 0: - self._state = ProbingState.NOT_ME - break - self._freq_counter[freq] += 1 - self._last_char_class = char_class - - return self.state - - def get_confidence(self): - if self.state == ProbingState.NOT_ME: - return 0.01 - - total = sum(self._freq_counter) - if total < 0.01: - confidence = 0.0 - else: - confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) - / total) - if confidence < 0.0: - confidence = 0.0 - # lower the confidence of latin1 so that other more accurate - # detector can take priority. - confidence = confidence * 0.73 - return confidence diff --git a/pype/vendor/chardet/mbcharsetprober.py b/pype/vendor/chardet/mbcharsetprober.py deleted file mode 100644 index 6256ecfd1e..0000000000 --- a/pype/vendor/chardet/mbcharsetprober.py +++ /dev/null @@ -1,91 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState, MachineState - - -class MultiByteCharSetProber(CharSetProber): - """ - MultiByteCharSetProber - """ - - def __init__(self, lang_filter=None): - super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) - self.distribution_analyzer = None - self.coding_sm = None - self._last_char = [0, 0] - - def reset(self): - super(MultiByteCharSetProber, self).reset() - if self.coding_sm: - self.coding_sm.reset() - if self.distribution_analyzer: - self.distribution_analyzer.reset() - self._last_char = [0, 0] - - @property - def charset_name(self): - raise NotImplementedError - - @property - def language(self): - raise NotImplementedError - - def feed(self, byte_str): - for i in range(len(byte_str)): - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.distribution_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - return self.distribution_analyzer.get_confidence() diff --git a/pype/vendor/chardet/mbcsgroupprober.py b/pype/vendor/chardet/mbcsgroupprober.py deleted file mode 100644 index 530abe75e0..0000000000 --- a/pype/vendor/chardet/mbcsgroupprober.py +++ /dev/null @@ -1,54 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .utf8prober import UTF8Prober -from .sjisprober import SJISProber -from .eucjpprober import EUCJPProber -from .gb2312prober import GB2312Prober -from .euckrprober import EUCKRProber -from .cp949prober import CP949Prober -from .big5prober import Big5Prober -from .euctwprober import EUCTWProber - - -class MBCSGroupProber(CharSetGroupProber): - def __init__(self, lang_filter=None): - super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) - self.probers = [ - UTF8Prober(), - SJISProber(), - EUCJPProber(), - GB2312Prober(), - EUCKRProber(), - CP949Prober(), - Big5Prober(), - EUCTWProber() - ] - self.reset() diff --git a/pype/vendor/chardet/mbcssm.py b/pype/vendor/chardet/mbcssm.py deleted file mode 100644 index 8360d0f284..0000000000 --- a/pype/vendor/chardet/mbcssm.py +++ /dev/null @@ -1,572 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import MachineState - -# BIG5 - -BIG5_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 4,4,4,4,4,4,4,4, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 4,3,3,3,3,3,3,3, # a0 - a7 - 3,3,3,3,3,3,3,3, # a8 - af - 3,3,3,3,3,3,3,3, # b0 - b7 - 3,3,3,3,3,3,3,3, # b8 - bf - 3,3,3,3,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -BIG5_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 -) - -BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) - -BIG5_SM_MODEL = {'class_table': BIG5_CLS, - 'class_factor': 5, - 'state_table': BIG5_ST, - 'char_len_table': BIG5_CHAR_LEN_TABLE, - 'name': 'Big5'} - -# CP949 - -CP949_CLS = ( - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f - 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f - 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f - 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f - 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f - 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f - 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f - 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f - 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af - 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf - 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff -) - -CP949_ST = ( -#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = - MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 -) - -CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) - -CP949_SM_MODEL = {'class_table': CP949_CLS, - 'class_factor': 10, - 'state_table': CP949_ST, - 'char_len_table': CP949_CHAR_LEN_TABLE, - 'name': 'CP949'} - -# EUC-JP - -EUCJP_CLS = ( - 4,4,4,4,4,4,4,4, # 00 - 07 - 4,4,4,4,4,4,5,5, # 08 - 0f - 4,4,4,4,4,4,4,4, # 10 - 17 - 4,4,4,5,4,4,4,4, # 18 - 1f - 4,4,4,4,4,4,4,4, # 20 - 27 - 4,4,4,4,4,4,4,4, # 28 - 2f - 4,4,4,4,4,4,4,4, # 30 - 37 - 4,4,4,4,4,4,4,4, # 38 - 3f - 4,4,4,4,4,4,4,4, # 40 - 47 - 4,4,4,4,4,4,4,4, # 48 - 4f - 4,4,4,4,4,4,4,4, # 50 - 57 - 4,4,4,4,4,4,4,4, # 58 - 5f - 4,4,4,4,4,4,4,4, # 60 - 67 - 4,4,4,4,4,4,4,4, # 68 - 6f - 4,4,4,4,4,4,4,4, # 70 - 77 - 4,4,4,4,4,4,4,4, # 78 - 7f - 5,5,5,5,5,5,5,5, # 80 - 87 - 5,5,5,5,5,5,1,3, # 88 - 8f - 5,5,5,5,5,5,5,5, # 90 - 97 - 5,5,5,5,5,5,5,5, # 98 - 9f - 5,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,0,5 # f8 - ff -) - -EUCJP_ST = ( - 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f - 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 -) - -EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) - -EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, - 'class_factor': 6, - 'state_table': EUCJP_ST, - 'char_len_table': EUCJP_CHAR_LEN_TABLE, - 'name': 'EUC-JP'} - -# EUC-KR - -EUCKR_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,3,3,3, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,3,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 2,2,2,2,2,2,2,2, # e0 - e7 - 2,2,2,2,2,2,2,2, # e8 - ef - 2,2,2,2,2,2,2,2, # f0 - f7 - 2,2,2,2,2,2,2,0 # f8 - ff -) - -EUCKR_ST = ( - MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f -) - -EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) - -EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, - 'class_factor': 4, - 'state_table': EUCKR_ST, - 'char_len_table': EUCKR_CHAR_LEN_TABLE, - 'name': 'EUC-KR'} - -# EUC-TW - -EUCTW_CLS = ( - 2,2,2,2,2,2,2,2, # 00 - 07 - 2,2,2,2,2,2,0,0, # 08 - 0f - 2,2,2,2,2,2,2,2, # 10 - 17 - 2,2,2,0,2,2,2,2, # 18 - 1f - 2,2,2,2,2,2,2,2, # 20 - 27 - 2,2,2,2,2,2,2,2, # 28 - 2f - 2,2,2,2,2,2,2,2, # 30 - 37 - 2,2,2,2,2,2,2,2, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,2, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,6,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,3,4,4,4,4,4,4, # a0 - a7 - 5,5,1,1,1,1,1,1, # a8 - af - 1,1,1,1,1,1,1,1, # b0 - b7 - 1,1,1,1,1,1,1,1, # b8 - bf - 1,1,3,1,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -EUCTW_ST = ( - MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 - MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 - MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f -) - -EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) - -EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, - 'class_factor': 7, - 'state_table': EUCTW_ST, - 'char_len_table': EUCTW_CHAR_LEN_TABLE, - 'name': 'x-euc-tw'} - -# GB2312 - -GB2312_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 3,3,3,3,3,3,3,3, # 30 - 37 - 3,3,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,4, # 78 - 7f - 5,6,6,6,6,6,6,6, # 80 - 87 - 6,6,6,6,6,6,6,6, # 88 - 8f - 6,6,6,6,6,6,6,6, # 90 - 97 - 6,6,6,6,6,6,6,6, # 98 - 9f - 6,6,6,6,6,6,6,6, # a0 - a7 - 6,6,6,6,6,6,6,6, # a8 - af - 6,6,6,6,6,6,6,6, # b0 - b7 - 6,6,6,6,6,6,6,6, # b8 - bf - 6,6,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 6,6,6,6,6,6,6,6, # e0 - e7 - 6,6,6,6,6,6,6,6, # e8 - ef - 6,6,6,6,6,6,6,6, # f0 - f7 - 6,6,6,6,6,6,6,0 # f8 - ff -) - -GB2312_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 - 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f -) - -# To be accurate, the length of class 6 can be either 2 or 4. -# But it is not necessary to discriminate between the two since -# it is used for frequency analysis only, and we are validating -# each code range there as well. So it is safe to set it to be -# 2 here. -GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) - -GB2312_SM_MODEL = {'class_table': GB2312_CLS, - 'class_factor': 7, - 'state_table': GB2312_ST, - 'char_len_table': GB2312_CHAR_LEN_TABLE, - 'name': 'GB2312'} - -# Shift_JIS - -SJIS_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 3,3,3,3,3,2,2,3, # 80 - 87 - 3,3,3,3,3,3,3,3, # 88 - 8f - 3,3,3,3,3,3,3,3, # 90 - 97 - 3,3,3,3,3,3,3,3, # 98 - 9f - #0xa0 is illegal in sjis encoding, but some pages does - #contain such byte. We need to be more error forgiven. - 2,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,4,4,4, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,0,0,0) # f8 - ff - - -SJIS_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 -) - -SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) - -SJIS_SM_MODEL = {'class_table': SJIS_CLS, - 'class_factor': 6, - 'state_table': SJIS_ST, - 'char_len_table': SJIS_CHAR_LEN_TABLE, - 'name': 'Shift_JIS'} - -# UCS2-BE - -UCS2BE_CLS = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2BE_ST = ( - 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 - 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f - 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 - 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f - 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 -) - -UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) - -UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, - 'class_factor': 6, - 'state_table': UCS2BE_ST, - 'char_len_table': UCS2BE_CHAR_LEN_TABLE, - 'name': 'UTF-16BE'} - -# UCS2-LE - -UCS2LE_CLS = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2LE_ST = ( - 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 - 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f - 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 - 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f - 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 -) - -UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) - -UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, - 'class_factor': 6, - 'state_table': UCS2LE_ST, - 'char_len_table': UCS2LE_CHAR_LEN_TABLE, - 'name': 'UTF-16LE'} - -# UTF-8 - -UTF8_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 2,2,2,2,3,3,3,3, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 5,5,5,5,5,5,5,5, # a0 - a7 - 5,5,5,5,5,5,5,5, # a8 - af - 5,5,5,5,5,5,5,5, # b0 - b7 - 5,5,5,5,5,5,5,5, # b8 - bf - 0,0,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 7,8,8,8,8,8,8,8, # e0 - e7 - 8,8,8,8,8,9,8,8, # e8 - ef - 10,11,11,11,11,11,11,11, # f0 - f7 - 12,13,13,13,14,15,0,0 # f8 - ff -) - -UTF8_ST = ( - MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 - 9, 11, 8, 7, 6, 5, 4, 3,#08-0f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f - MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f - MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f - MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f - MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af - MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf -) - -UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) - -UTF8_SM_MODEL = {'class_table': UTF8_CLS, - 'class_factor': 16, - 'state_table': UTF8_ST, - 'char_len_table': UTF8_CHAR_LEN_TABLE, - 'name': 'UTF-8'} diff --git a/pype/vendor/chardet/sbcharsetprober.py b/pype/vendor/chardet/sbcharsetprober.py deleted file mode 100644 index 0adb51de5a..0000000000 --- a/pype/vendor/chardet/sbcharsetprober.py +++ /dev/null @@ -1,132 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import CharacterCategory, ProbingState, SequenceLikelihood - - -class SingleByteCharSetProber(CharSetProber): - SAMPLE_SIZE = 64 - SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 - POSITIVE_SHORTCUT_THRESHOLD = 0.95 - NEGATIVE_SHORTCUT_THRESHOLD = 0.05 - - def __init__(self, model, reversed=False, name_prober=None): - super(SingleByteCharSetProber, self).__init__() - self._model = model - # TRUE if we need to reverse every pair in the model lookup - self._reversed = reversed - # Optional auxiliary prober for name decision - self._name_prober = name_prober - self._last_order = None - self._seq_counters = None - self._total_seqs = None - self._total_char = None - self._freq_char = None - self.reset() - - def reset(self): - super(SingleByteCharSetProber, self).reset() - # char order of last character - self._last_order = 255 - self._seq_counters = [0] * SequenceLikelihood.get_num_categories() - self._total_seqs = 0 - self._total_char = 0 - # characters that fall in our sampling range - self._freq_char = 0 - - @property - def charset_name(self): - if self._name_prober: - return self._name_prober.charset_name - else: - return self._model['charset_name'] - - @property - def language(self): - if self._name_prober: - return self._name_prober.language - else: - return self._model.get('language') - - def feed(self, byte_str): - if not self._model['keep_english_letter']: - byte_str = self.filter_international_words(byte_str) - if not byte_str: - return self.state - char_to_order_map = self._model['char_to_order_map'] - for i, c in enumerate(byte_str): - # XXX: Order is in range 1-64, so one would think we want 0-63 here, - # but that leads to 27 more test failures than before. - order = char_to_order_map[c] - # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but - # CharacterCategory.SYMBOL is actually 253, so we use CONTROL - # to make it closer to the original intent. The only difference - # is whether or not we count digits and control characters for - # _total_char purposes. - if order < CharacterCategory.CONTROL: - self._total_char += 1 - if order < self.SAMPLE_SIZE: - self._freq_char += 1 - if self._last_order < self.SAMPLE_SIZE: - self._total_seqs += 1 - if not self._reversed: - i = (self._last_order * self.SAMPLE_SIZE) + order - model = self._model['precedence_matrix'][i] - else: # reverse the order of the letters in the lookup - i = (order * self.SAMPLE_SIZE) + self._last_order - model = self._model['precedence_matrix'][i] - self._seq_counters[model] += 1 - self._last_order = order - - charset_name = self._model['charset_name'] - if self.state == ProbingState.DETECTING: - if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: - confidence = self.get_confidence() - if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, we have a winner', - charset_name, confidence) - self._state = ProbingState.FOUND_IT - elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, below negative ' - 'shortcut threshhold %s', charset_name, - confidence, - self.NEGATIVE_SHORTCUT_THRESHOLD) - self._state = ProbingState.NOT_ME - - return self.state - - def get_confidence(self): - r = 0.01 - if self._total_seqs > 0: - r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / - self._total_seqs / self._model['typical_positive_ratio']) - r = r * self._freq_char / self._total_char - if r >= 1.0: - r = 0.99 - return r diff --git a/pype/vendor/chardet/sbcsgroupprober.py b/pype/vendor/chardet/sbcsgroupprober.py deleted file mode 100644 index 98e95dc1a3..0000000000 --- a/pype/vendor/chardet/sbcsgroupprober.py +++ /dev/null @@ -1,73 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .sbcharsetprober import SingleByteCharSetProber -from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, - Latin5CyrillicModel, MacCyrillicModel, - Ibm866Model, Ibm855Model) -from .langgreekmodel import Latin7GreekModel, Win1253GreekModel -from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel -# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel -from .langthaimodel import TIS620ThaiModel -from .langhebrewmodel import Win1255HebrewModel -from .hebrewprober import HebrewProber -from .langturkishmodel import Latin5TurkishModel - - -class SBCSGroupProber(CharSetGroupProber): - def __init__(self): - super(SBCSGroupProber, self).__init__() - self.probers = [ - SingleByteCharSetProber(Win1251CyrillicModel), - SingleByteCharSetProber(Koi8rModel), - SingleByteCharSetProber(Latin5CyrillicModel), - SingleByteCharSetProber(MacCyrillicModel), - SingleByteCharSetProber(Ibm866Model), - SingleByteCharSetProber(Ibm855Model), - SingleByteCharSetProber(Latin7GreekModel), - SingleByteCharSetProber(Win1253GreekModel), - SingleByteCharSetProber(Latin5BulgarianModel), - SingleByteCharSetProber(Win1251BulgarianModel), - # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) - # after we retrain model. - # SingleByteCharSetProber(Latin2HungarianModel), - # SingleByteCharSetProber(Win1250HungarianModel), - SingleByteCharSetProber(TIS620ThaiModel), - SingleByteCharSetProber(Latin5TurkishModel), - ] - hebrew_prober = HebrewProber() - logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, - False, hebrew_prober) - visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, - hebrew_prober) - hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) - self.probers.extend([hebrew_prober, logical_hebrew_prober, - visual_hebrew_prober]) - - self.reset() diff --git a/pype/vendor/chardet/sjisprober.py b/pype/vendor/chardet/sjisprober.py deleted file mode 100644 index 9e29623bdc..0000000000 --- a/pype/vendor/chardet/sjisprober.py +++ /dev/null @@ -1,92 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import SJISDistributionAnalysis -from .jpcntx import SJISContextAnalysis -from .mbcssm import SJIS_SM_MODEL -from .enums import ProbingState, MachineState - - -class SJISProber(MultiByteCharSetProber): - def __init__(self): - super(SJISProber, self).__init__() - self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) - self.distribution_analyzer = SJISDistributionAnalysis() - self.context_analyzer = SJISContextAnalysis() - self.reset() - - def reset(self): - super(SJISProber, self).reset() - self.context_analyzer.reset() - - @property - def charset_name(self): - return self.context_analyzer.charset_name - - @property - def language(self): - return "Japanese" - - def feed(self, byte_str): - for i in range(len(byte_str)): - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.context_analyzer.feed(self._last_char[2 - char_len:], - char_len) - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 - - char_len], char_len) - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - context_conf = self.context_analyzer.get_confidence() - distrib_conf = self.distribution_analyzer.get_confidence() - return max(context_conf, distrib_conf) diff --git a/pype/vendor/chardet/universaldetector.py b/pype/vendor/chardet/universaldetector.py deleted file mode 100644 index 7b4e92d615..0000000000 --- a/pype/vendor/chardet/universaldetector.py +++ /dev/null @@ -1,286 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### -""" -Module containing the UniversalDetector detector class, which is the primary -class a user of ``chardet`` should use. - -:author: Mark Pilgrim (initial port to Python) -:author: Shy Shalom (original C code) -:author: Dan Blanchard (major refactoring for 3.0) -:author: Ian Cordasco -""" - - -import codecs -import logging -import re - -from .charsetgroupprober import CharSetGroupProber -from .enums import InputState, LanguageFilter, ProbingState -from .escprober import EscCharSetProber -from .latin1prober import Latin1Prober -from .mbcsgroupprober import MBCSGroupProber -from .sbcsgroupprober import SBCSGroupProber - - -class UniversalDetector(object): - """ - The ``UniversalDetector`` class underlies the ``chardet.detect`` function - and coordinates all of the different charset probers. - - To get a ``dict`` containing an encoding and its confidence, you can simply - run: - - .. code:: - - u = UniversalDetector() - u.feed(some_bytes) - u.close() - detected = u.result - - """ - - MINIMUM_THRESHOLD = 0.20 - HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') - ESC_DETECTOR = re.compile(b'(\033|~{)') - WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') - ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', - 'iso-8859-2': 'Windows-1250', - 'iso-8859-5': 'Windows-1251', - 'iso-8859-6': 'Windows-1256', - 'iso-8859-7': 'Windows-1253', - 'iso-8859-8': 'Windows-1255', - 'iso-8859-9': 'Windows-1254', - 'iso-8859-13': 'Windows-1257'} - - def __init__(self, lang_filter=LanguageFilter.ALL): - self._esc_charset_prober = None - self._charset_probers = [] - self.result = None - self.done = None - self._got_data = None - self._input_state = None - self._last_char = None - self.lang_filter = lang_filter - self.logger = logging.getLogger(__name__) - self._has_win_bytes = None - self.reset() - - def reset(self): - """ - Reset the UniversalDetector and all of its probers back to their - initial states. This is called by ``__init__``, so you only need to - call this directly in between analyses of different documents. - """ - self.result = {'encoding': None, 'confidence': 0.0, 'language': None} - self.done = False - self._got_data = False - self._has_win_bytes = False - self._input_state = InputState.PURE_ASCII - self._last_char = b'' - if self._esc_charset_prober: - self._esc_charset_prober.reset() - for prober in self._charset_probers: - prober.reset() - - def feed(self, byte_str): - """ - Takes a chunk of a document and feeds it through all of the relevant - charset probers. - - After calling ``feed``, you can check the value of the ``done`` - attribute to see if you need to continue feeding the - ``UniversalDetector`` more data, or if it has made a prediction - (in the ``result`` attribute). - - .. note:: - You should always call ``close`` when you're done feeding in your - document if ``done`` is not already ``True``. - """ - if self.done: - return - - if not len(byte_str): - return - - if not isinstance(byte_str, bytearray): - byte_str = bytearray(byte_str) - - # First check for known BOMs, since these are guaranteed to be correct - if not self._got_data: - # If the data starts with BOM, we know it is UTF - if byte_str.startswith(codecs.BOM_UTF8): - # EF BB BF UTF-8 with BOM - self.result = {'encoding': "UTF-8-SIG", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith((codecs.BOM_UTF32_LE, - codecs.BOM_UTF32_BE)): - # FF FE 00 00 UTF-32, little-endian BOM - # 00 00 FE FF UTF-32, big-endian BOM - self.result = {'encoding': "UTF-32", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith(b'\xFE\xFF\x00\x00'): - # FE FF 00 00 UCS-4, unusual octet order BOM (3412) - self.result = {'encoding': "X-ISO-10646-UCS-4-3412", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith(b'\x00\x00\xFF\xFE'): - # 00 00 FF FE UCS-4, unusual octet order BOM (2143) - self.result = {'encoding': "X-ISO-10646-UCS-4-2143", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): - # FF FE UTF-16, little endian BOM - # FE FF UTF-16, big endian BOM - self.result = {'encoding': "UTF-16", - 'confidence': 1.0, - 'language': ''} - - self._got_data = True - if self.result['encoding'] is not None: - self.done = True - return - - # If none of those matched and we've only see ASCII so far, check - # for high bytes and escape sequences - if self._input_state == InputState.PURE_ASCII: - if self.HIGH_BYTE_DETECTOR.search(byte_str): - self._input_state = InputState.HIGH_BYTE - elif self._input_state == InputState.PURE_ASCII and \ - self.ESC_DETECTOR.search(self._last_char + byte_str): - self._input_state = InputState.ESC_ASCII - - self._last_char = byte_str[-1:] - - # If we've seen escape sequences, use the EscCharSetProber, which - # uses a simple state machine to check for known escape sequences in - # HZ and ISO-2022 encodings, since those are the only encodings that - # use such sequences. - if self._input_state == InputState.ESC_ASCII: - if not self._esc_charset_prober: - self._esc_charset_prober = EscCharSetProber(self.lang_filter) - if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: - self.result = {'encoding': - self._esc_charset_prober.charset_name, - 'confidence': - self._esc_charset_prober.get_confidence(), - 'language': - self._esc_charset_prober.language} - self.done = True - # If we've seen high bytes (i.e., those with values greater than 127), - # we need to do more complicated checks using all our multi-byte and - # single-byte probers that are left. The single-byte probers - # use character bigram distributions to determine the encoding, whereas - # the multi-byte probers use a combination of character unigram and - # bigram distributions. - elif self._input_state == InputState.HIGH_BYTE: - if not self._charset_probers: - self._charset_probers = [MBCSGroupProber(self.lang_filter)] - # If we're checking non-CJK encodings, use single-byte prober - if self.lang_filter & LanguageFilter.NON_CJK: - self._charset_probers.append(SBCSGroupProber()) - self._charset_probers.append(Latin1Prober()) - for prober in self._charset_probers: - if prober.feed(byte_str) == ProbingState.FOUND_IT: - self.result = {'encoding': prober.charset_name, - 'confidence': prober.get_confidence(), - 'language': prober.language} - self.done = True - break - if self.WIN_BYTE_DETECTOR.search(byte_str): - self._has_win_bytes = True - - def close(self): - """ - Stop analyzing the current document and come up with a final - prediction. - - :returns: The ``result`` attribute, a ``dict`` with the keys - `encoding`, `confidence`, and `language`. - """ - # Don't bother with checks if we're already done - if self.done: - return self.result - self.done = True - - if not self._got_data: - self.logger.debug('no data received!') - - # Default to ASCII if it is all we've seen so far - elif self._input_state == InputState.PURE_ASCII: - self.result = {'encoding': 'ascii', - 'confidence': 1.0, - 'language': ''} - - # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD - elif self._input_state == InputState.HIGH_BYTE: - prober_confidence = None - max_prober_confidence = 0.0 - max_prober = None - for prober in self._charset_probers: - if not prober: - continue - prober_confidence = prober.get_confidence() - if prober_confidence > max_prober_confidence: - max_prober_confidence = prober_confidence - max_prober = prober - if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): - charset_name = max_prober.charset_name - lower_charset_name = max_prober.charset_name.lower() - confidence = max_prober.get_confidence() - # Use Windows encoding name instead of ISO-8859 if we saw any - # extra Windows-specific bytes - if lower_charset_name.startswith('iso-8859'): - if self._has_win_bytes: - charset_name = self.ISO_WIN_MAP.get(lower_charset_name, - charset_name) - self.result = {'encoding': charset_name, - 'confidence': confidence, - 'language': max_prober.language} - - # Log all prober confidences if none met MINIMUM_THRESHOLD - if self.logger.getEffectiveLevel() == logging.DEBUG: - if self.result['encoding'] is None: - self.logger.debug('no probers hit minimum threshold') - for group_prober in self._charset_probers: - if not group_prober: - continue - if isinstance(group_prober, CharSetGroupProber): - for prober in group_prober.probers: - self.logger.debug('%s %s confidence = %s', - prober.charset_name, - prober.language, - prober.get_confidence()) - else: - self.logger.debug('%s %s confidence = %s', - prober.charset_name, - prober.language, - prober.get_confidence()) - return self.result diff --git a/pype/vendor/chardet/utf8prober.py b/pype/vendor/chardet/utf8prober.py deleted file mode 100644 index 6c3196cc2d..0000000000 --- a/pype/vendor/chardet/utf8prober.py +++ /dev/null @@ -1,82 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState, MachineState -from .codingstatemachine import CodingStateMachine -from .mbcssm import UTF8_SM_MODEL - - - -class UTF8Prober(CharSetProber): - ONE_CHAR_PROB = 0.5 - - def __init__(self): - super(UTF8Prober, self).__init__() - self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) - self._num_mb_chars = None - self.reset() - - def reset(self): - super(UTF8Prober, self).reset() - self.coding_sm.reset() - self._num_mb_chars = 0 - - @property - def charset_name(self): - return "utf-8" - - @property - def language(self): - return "" - - def feed(self, byte_str): - for c in byte_str: - coding_state = self.coding_sm.next_state(c) - if coding_state == MachineState.ERROR: - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - if self.coding_sm.get_current_charlen() >= 2: - self._num_mb_chars += 1 - - if self.state == ProbingState.DETECTING: - if self.get_confidence() > self.SHORTCUT_THRESHOLD: - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - unlike = 0.99 - if self._num_mb_chars < 6: - unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars - return 1.0 - unlike - else: - return unlike diff --git a/pype/vendor/chardet/version.py b/pype/vendor/chardet/version.py deleted file mode 100644 index bb2a34a70e..0000000000 --- a/pype/vendor/chardet/version.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -This module exists only to simplify retrieving the version number of chardet -from within setup.py and from chardet subpackages. - -:author: Dan Blanchard (dan.blanchard@gmail.com) -""" - -__version__ = "3.0.4" -VERSION = __version__.split('.') diff --git a/pype/vendor/clique/__init__.py b/pype/vendor/clique/__init__.py deleted file mode 100644 index 39f79a6f2e..0000000000 --- a/pype/vendor/clique/__init__.py +++ /dev/null @@ -1,292 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips -# :license: See LICENSE.txt. - -import re -from collections import defaultdict - -from ._version import __version__ -from .collection import Collection -from .error import CollectionError -# was changed for ftrack-api -from six import string_types - - -#: Pattern for matching an index with optional padding. -DIGITS_PATTERN = '(?P(?P0*)\d+)' - -#: Common patterns that can be passed to :py:func:`~clique.assemble`. -PATTERNS = { - 'frames': '\.{0}\.\D+\d?$'.format(DIGITS_PATTERN), - 'versions': 'v{0}'.format(DIGITS_PATTERN) -} - - -def split(fname): - '''Split `fname` into ({head}, {index} and {tail}) - - Example: - >>> split('rs_beauty.1000.png') - ('rs_beauty.', '1000', '.png') - >>> split('myRender.0100.png') - ('myRender.', '0100', '.png') - - ''' - - try: - collections, _ = assemble([fname], minimum_items=1) - except IndexError: - raise ValueError("No collection found") - else: - # Search for indexes starting from end, as opposed to start - # E.g. myRender2017.001.png -> myRender2017.%03d.png - # As opposed to -> myRender%d.%001.png - col = collections[-1] - idx = list(col.indexes)[0] - - return (col.head, - str(idx).zfill(col.padding), - col.tail) - - -def assemble(iterable, patterns=None, minimum_items=2, case_sensitive=True): - '''Assemble items in *iterable* into discreet collections. - - *patterns* may be specified as a list of regular expressions to limit - the returned collection possibilities. Use this when interested in - collections that only match specific patterns. Each pattern must contain - the expression from :py:data:`DIGITS_PATTERN` exactly once. - - A selection of common expressions are available in :py:data:`PATTERNS`. - - .. note:: - - If a pattern is supplied as a string it will be automatically compiled - to a :py:class:`re.RegexObject` instance for convenience. - - When *patterns* is not specified, collections are formed by examining all - possible groupings of the items in *iterable* based around common numerical - components. - - *minimum_items* dictates the minimum number of items a collection must have - in order to be included in the result. The default is 2, filtering out - single item collections. - - If *case_sensitive* is False, then items will be treated as part of the same - collection when they only differ in casing. To avoid ambiguity, the - resulting collection will always be lowercase. For example, "item.0001.dpx" - and "Item.0002.dpx" would be part of the same collection, "item.%04d.dpx". - - .. note:: - - Any compiled *patterns* will also respect the set case sensitivity. - - Return tuple of two lists (collections, remainder) where 'collections' is a - list of assembled :py:class:`~clique.collection.Collection` instances and - 'remainder' is a list of items that did not belong to any collection. - - ''' - collection_map = defaultdict(set) - collections = [] - remainder = [] - - # Compile patterns. - flags = 0 - if not case_sensitive: - flags |= re.IGNORECASE - - compiled_patterns = [] - - if patterns is not None: - if not patterns: - return collections, list(iterable) - - for pattern in patterns: - if isinstance(pattern, string_types): - compiled_patterns.append(re.compile(pattern, flags=flags)) - else: - compiled_patterns.append(pattern) - - else: - compiled_patterns.append(re.compile(DIGITS_PATTERN, flags=flags)) - - # Process iterable. - for item in iterable: - matched = False - - for pattern in compiled_patterns: - for match in pattern.finditer(item): - index = match.group('index') - - head = item[:match.start('index')] - tail = item[match.end('index'):] - - if not case_sensitive: - head = head.lower() - tail = tail.lower() - - padding = match.group('padding') - if padding: - padding = len(index) - else: - padding = 0 - - key = (head, tail, padding) - collection_map[key].add(int(index)) - matched = True - - if not matched: - remainder.append(item) - - # Form collections. - merge_candidates = [] - for (head, tail, padding), indexes in collection_map.items(): - collection = Collection(head, tail, padding, indexes) - collections.append(collection) - - if collection.padding == 0: - merge_candidates.append(collection) - - # Merge together collections that align on padding boundaries. For example, - # 0998-0999 and 1000-1001 can be merged into 0998-1001. Note that only - # indexes within the padding width limit are merged. If a collection is - # entirely merged into another then it will not be included as a separate - # collection in the results. - fully_merged = [] - for collection in collections: - if collection.padding == 0: - continue - - for candidate in merge_candidates: - if (candidate.head == collection.head and - candidate.tail == collection.tail): - - merged_index_count = 0 - for index in candidate.indexes: - if len(str(abs(index))) == collection.padding: - collection.indexes.add(index) - merged_index_count += 1 - - if merged_index_count == len(candidate.indexes): - fully_merged.append(candidate) - - # Filter out fully merged collections. - collections = [collection for collection in collections - if collection not in fully_merged] - - # Filter out collections that do not have at least as many indexes as - # minimum_items. In addition, add any members of a filtered collection, - # which are not members of an unfiltered collection, to the remainder. - filtered = [] - remainder_candidates = [] - for collection in collections: - if len(collection.indexes) >= minimum_items: - filtered.append(collection) - else: - for member in collection: - remainder_candidates.append(member) - - for candidate in remainder_candidates: - # Check if candidate has already been added to remainder to avoid - # duplicate entries. - if candidate in remainder: - continue - - has_membership = False - - for collection in filtered: - if candidate in collection: - has_membership = True - break - - if not has_membership: - remainder.append(candidate) - - return filtered, remainder - - -def parse(value, pattern='{head}{padding}{tail} [{ranges}]'): - '''Parse *value* into a :py:class:`~clique.collection.Collection`. - - Use *pattern* to extract information from *value*. It may make use of the - following keys: - - * *head* - Common leading part of the collection. - * *tail* - Common trailing part of the collection. - * *padding* - Padding value in ``%0d`` format. - * *range* - Total range in the form ``start-end``. - * *ranges* - Comma separated ranges of indexes. - * *holes* - Comma separated ranges of missing indexes. - - .. note:: - - *holes* only makes sense if *range* or *ranges* is also present. - - ''' - # Construct regular expression for given pattern. - expressions = { - 'head': '(?P.*)', - 'tail': '(?P.*)', - 'padding': '%(?P\d*)d', - 'range': '(?P\d+-\d+)?', - 'ranges': '(?P[\d ,\-]+)?', - 'holes': '(?P[\d ,\-]+)' - } - - pattern_regex = re.escape(pattern) - for key, expression in expressions.items(): - pattern_regex = pattern_regex.replace( - '\{{{0}\}}'.format(key), - expression - ) - pattern_regex = '^{0}$'.format(pattern_regex) - - # Match pattern against value and use results to construct collection. - match = re.search(pattern_regex, value) - if match is None: - raise ValueError('Value did not match pattern.') - - groups = match.groupdict() - if 'padding' in groups and groups['padding']: - groups['padding'] = int(groups['padding']) - else: - groups['padding'] = 0 - - # Create collection and then add indexes. - collection = Collection( - groups.get('head', ''), - groups.get('tail', ''), - groups['padding'] - ) - - if groups.get('range', None) is not None: - start, end = map(int, groups['range'].split('-')) - collection.indexes.update(range(start, end + 1)) - - if groups.get('ranges', None) is not None: - parts = [part.strip() for part in groups['ranges'].split(',')] - for part in parts: - index_range = list(map(int, part.split('-', 2))) - - if len(index_range) > 1: - # Index range. - for index in range(index_range[0], index_range[1] + 1): - collection.indexes.add(index) - else: - # Single index. - collection.indexes.add(index_range[0]) - - if 'holes' in groups: - parts = [part.strip() for part in groups['holes'].split(',')] - for part in parts: - index_range = map(int, part.split('-', 2)) - - if len(index_range) > 1: - # Index range. - for index in range(index_range[0], index_range[1] + 1): - collection.indexes.remove(index) - else: - # Single index. - collection.indexes.remove(index_range[0]) - - return collection diff --git a/pype/vendor/clique/_version.py b/pype/vendor/clique/_version.py deleted file mode 100644 index a4dad29899..0000000000 --- a/pype/vendor/clique/_version.py +++ /dev/null @@ -1,2 +0,0 @@ -__version__ = '1.3.1' - diff --git a/pype/vendor/clique/collection.py b/pype/vendor/clique/collection.py deleted file mode 100644 index db9276c99f..0000000000 --- a/pype/vendor/clique/collection.py +++ /dev/null @@ -1,385 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips -# :license: See LICENSE.txt. - -import re - -import clique.descriptor -import clique.error -import clique.sorted_set - - -class Collection(object): - '''Represent group of items that differ only by numerical component.''' - - indexes = clique.descriptor.Unsettable('indexes') - - def __init__(self, head, tail, padding, indexes=None): - '''Initialise collection. - - *head* is the leading common part whilst *tail* is the trailing - common part. - - *padding* specifies the "width" of the numerical component. An index - will be padded with zeros to fill this width. A *padding* of zero - implies no padding and width may be any size so long as no leading - zeros are present. - - *indexes* can specify a set of numerical indexes to initially populate - the collection with. - - .. note:: - - After instantiation, the ``indexes`` attribute cannot be set to a - new value using assignment:: - - >>> collection.indexes = [1, 2, 3] - AttributeError: Cannot set attribute defined as unsettable. - - Instead, manipulate it directly:: - - >>> collection.indexes.clear() - >>> collection.indexes.update([1, 2, 3]) - - ''' - super(Collection, self).__init__() - self.__dict__['indexes'] = clique.sorted_set.SortedSet() - self._head = head - self._tail = tail - self.padding = padding - self._update_expression() - - if indexes is not None: - self.indexes.update(indexes) - - @property - def head(self): - '''Return common leading part.''' - return self._head - - @head.setter - def head(self, value): - '''Set common leading part to *value*.''' - self._head = value - self._update_expression() - - @property - def tail(self): - '''Return common trailing part.''' - return self._tail - - @tail.setter - def tail(self, value): - '''Set common trailing part to *value*.''' - self._tail = value - self._update_expression() - - def _update_expression(self): - '''Update internal expression.''' - self._expression = re.compile( - '^{0}(?P(?P0*)\d+?){1}$' - .format(re.escape(self.head), re.escape(self.tail)) - ) - - def __str__(self): - '''Return string represenation.''' - return self.format() - - def __repr__(self): - '''Return representation.''' - return '<{0} "{1}">'.format(self.__class__.__name__, self) - - def __iter__(self): - '''Return iterator over items in collection.''' - for index in self.indexes: - formatted_index = '{0:0{1}d}'.format(index, self.padding) - item = '{0}{1}{2}'.format(self.head, formatted_index, self.tail) - yield item - - def __contains__(self, item): - '''Return whether *item* is present in collection.''' - match = self.match(item) - if not match: - return False - - if not int(match.group('index')) in self.indexes: - return False - - return True - - def __eq__(self, other): - '''Return whether *other* collection is equal.''' - if not isinstance(other, Collection): - return NotImplemented - - return all([ - other.head == self.head, - other.tail == self.tail, - other.padding == self.padding, - other.indexes == self.indexes - ]) - - def __ne__(self, other): - '''Return whether *other* collection is not equal.''' - result = self.__eq__(other) - if result is NotImplemented: - return result - - return not result - - def __gt__(self, other): - '''Return whether *other* collection is greater than.''' - if not isinstance(other, Collection): - return NotImplemented - - a = (self.head, self.tail, self.padding, len(self.indexes)) - b = (other.head, other.tail, other.padding, len(other.indexes)) - - return a > b - - def __lt__(self, other): - '''Return whether *other* collection is less than.''' - result = self.__gt__(other) - if result is NotImplemented: - return result - - return not result - - def __ge__(self, other): - '''Return whether *other* collection is greater than or equal.''' - result = self.__eq__(other) - if result is NotImplemented: - return result - - if result is False: - result = self.__gt__(other) - - return result - - def __le__(self, other): - '''Return whether *other* collection is less than or equal.''' - result = self.__eq__(other) - if result is NotImplemented: - return result - - if result is False: - result = self.__lt__(other) - - return result - - def match(self, item): - '''Return whether *item* matches this collection expression. - - If a match is successful return data about the match otherwise return - None. - - ''' - match = self._expression.match(item) - if not match: - return None - - index = match.group('index') - padded = False - if match.group('padding'): - padded = True - - if self.padding == 0: - if padded: - return None - - elif len(index) != self.padding: - return None - - return match - - def add(self, item): - '''Add *item* to collection. - - raise :py:class:`~clique.error.CollectionError` if *item* cannot be - added to the collection. - - ''' - match = self.match(item) - if match is None: - raise clique.error.CollectionError( - 'Item does not match collection expression.' - ) - - self.indexes.add(int(match.group('index'))) - - def remove(self, item): - '''Remove *item* from collection. - - raise :py:class:`~clique.error.CollectionError` if *item* cannot be - removed from the collection. - - ''' - match = self.match(item) - if match is None: - raise clique.error.CollectionError( - 'Item not present in collection.' - ) - - index = int(match.group('index')) - try: - self.indexes.remove(index) - except KeyError: - raise clique.error.CollectionError( - 'Item not present in collection.' - ) - - def format(self, pattern='{head}{padding}{tail} [{ranges}]'): - '''Return string representation as specified by *pattern*. - - Pattern can be any format accepted by Python's standard format function - and will receive the following keyword arguments as context: - - * *head* - Common leading part of the collection. - * *tail* - Common trailing part of the collection. - * *padding* - Padding value in ``%0d`` format. - * *range* - Total range in the form ``start-end`` - * *ranges* - Comma separated ranges of indexes. - * *holes* - Comma separated ranges of missing indexes. - - ''' - data = {} - data['head'] = self.head - data['tail'] = self.tail - - if self.padding: - data['padding'] = '%0{0}d'.format(self.padding) - else: - data['padding'] = '%d' - - if '{holes}' in pattern: - data['holes'] = self.holes().format('{ranges}') - - if '{range}' in pattern or '{ranges}' in pattern: - indexes = list(self.indexes) - indexes_count = len(indexes) - - if indexes_count == 0: - data['range'] = '' - - elif indexes_count == 1: - data['range'] = '{0}'.format(indexes[0]) - - else: - data['range'] = '{0}-{1}'.format( - indexes[0], indexes[-1] - ) - - if '{ranges}' in pattern: - separated = self.separate() - if len(separated) > 1: - ranges = [collection.format('{range}') - for collection in separated] - - else: - ranges = [data['range']] - - data['ranges'] = ', '.join(ranges) - - return pattern.format(**data) - - def is_contiguous(self): - '''Return whether entire collection is contiguous.''' - previous = None - for index in self.indexes: - if previous is None: - previous = index - continue - - if index != (previous + 1): - return False - - previous = index - - return True - - def holes(self): - '''Return holes in collection. - - Return :py:class:`~clique.collection.Collection` of missing indexes. - - ''' - missing = set([]) - previous = None - for index in self.indexes: - if previous is None: - previous = index - continue - - if index != (previous + 1): - missing.update(range(previous + 1, index)) - - previous = index - - return Collection(self.head, self.tail, self.padding, indexes=missing) - - def is_compatible(self, collection): - '''Return whether *collection* is compatible with this collection. - - To be compatible *collection* must have the same head, tail and padding - properties as this collection. - - ''' - return all([ - isinstance(collection, Collection), - collection.head == self.head, - collection.tail == self.tail, - collection.padding == self.padding - ]) - - def merge(self, collection): - '''Merge *collection* into this collection. - - If the *collection* is compatible with this collection then update - indexes with all indexes in *collection*. - - raise :py:class:`~clique.error.CollectionError` if *collection* is not - compatible with this collection. - - ''' - if not self.is_compatible(collection): - raise clique.error.CollectionError('Collection is not compatible ' - 'with this collection.') - - self.indexes.update(collection.indexes) - - def separate(self): - '''Return contiguous parts of collection as separate collections. - - Return as list of :py:class:`~clique.collection.Collection` instances. - - ''' - collections = [] - start = None - end = None - - for index in self.indexes: - if start is None: - start = index - end = start - continue - - if index != (end + 1): - collections.append( - Collection(self.head, self.tail, self.padding, - indexes=set(range(start, end + 1))) - ) - start = index - - end = index - - if start is None: - collections.append( - Collection(self.head, self.tail, self.padding) - ) - else: - collections.append( - Collection(self.head, self.tail, self.padding, - indexes=range(start, end + 1)) - ) - - return collections diff --git a/pype/vendor/clique/descriptor.py b/pype/vendor/clique/descriptor.py deleted file mode 100644 index 32affe17ee..0000000000 --- a/pype/vendor/clique/descriptor.py +++ /dev/null @@ -1,44 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips -# :license: See LICENSE.txt. - - -class Unsettable(object): - '''Prevent standard setting of property. - - Example:: - - >>> class Foo(object): - ... - ... x = Unsettable('x') - ... - ... def __init__(self): - ... self.__dict__['x'] = True - ... - >>> foo = Foo() - >>> print foo.x - True - >>> foo.x = False - AttributeError: Cannot set attribute defined as unsettable. - - ''' - - def __init__(self, label): - '''Initialise descriptor with property *label*. - - *label* should match the name of the property being described:: - - x = Unsettable('x') - - ''' - self.label = label - super(Unsettable, self).__init__() - - def __get__(self, instance, owner): - '''Return value of property for *instance*.''' - return instance.__dict__.get(self.label) - - def __set__(self, instance, value): - '''Set *value* for *instance* property.''' - raise AttributeError('Cannot set attribute defined as unsettable.') - diff --git a/pype/vendor/clique/error.py b/pype/vendor/clique/error.py deleted file mode 100644 index 964e55348e..0000000000 --- a/pype/vendor/clique/error.py +++ /dev/null @@ -1,10 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips -# :license: See LICENSE.txt. - -'''Custom error classes.''' - - -class CollectionError(Exception): - '''Raise when a collection error occurs.''' - diff --git a/pype/vendor/clique/sorted_set.py b/pype/vendor/clique/sorted_set.py deleted file mode 100644 index aa7e32157f..0000000000 --- a/pype/vendor/clique/sorted_set.py +++ /dev/null @@ -1,62 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips -# :license: See LICENSE.txt. - -import collections -import bisect - - -class SortedSet(collections.MutableSet): - '''Maintain sorted collection of unique items.''' - - def __init__(self, iterable=None): - '''Initialise with items from *iterable*.''' - super(SortedSet, self).__init__() - self._members = [] - if iterable: - self.update(iterable) - - def __str__(self): - '''Return string representation.''' - return str(self._members) - - def __repr__(self): - '''Return representation.''' - return '<{0} "{1}">'.format(self.__class__.__name__, self) - - def __contains__(self, item): - '''Return whether *item* is present.''' - return self._index(item) >= 0 - - def __len__(self): - '''Return number of items.''' - return len(self._members) - - def __iter__(self): - '''Return iterator over items.''' - return iter(self._members) - - def add(self, item): - '''Add *item*.''' - if not item in self: - index = bisect.bisect_right(self._members, item) - self._members.insert(index, item) - - def discard(self, item): - '''Remove *item*.''' - index = self._index(item) - if index >= 0: - del self._members[index] - - def update(self, iterable): - '''Update items with those from *iterable*.''' - for item in iterable: - self.add(item) - - def _index(self, item): - '''Return index of *item* in member list or -1 if not present.''' - index = bisect.bisect_left(self._members, item) - if index != len(self) and self._members[index] == item: - return index - - return -1 diff --git a/pype/vendor/dateutil/__init__.py b/pype/vendor/dateutil/__init__.py deleted file mode 100644 index 0defb82e21..0000000000 --- a/pype/vendor/dateutil/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -try: - from ._version import version as __version__ -except ImportError: - __version__ = 'unknown' - -__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', - 'utils', 'zoneinfo'] diff --git a/pype/vendor/dateutil/_common.py b/pype/vendor/dateutil/_common.py deleted file mode 100644 index 4eb2659bd2..0000000000 --- a/pype/vendor/dateutil/_common.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Common code used in multiple modules. -""" - - -class weekday(object): - __slots__ = ["weekday", "n"] - - def __init__(self, weekday, n=None): - self.weekday = weekday - self.n = n - - def __call__(self, n): - if n == self.n: - return self - else: - return self.__class__(self.weekday, n) - - def __eq__(self, other): - try: - if self.weekday != other.weekday or self.n != other.n: - return False - except AttributeError: - return False - return True - - def __hash__(self): - return hash(( - self.weekday, - self.n, - )) - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] - if not self.n: - return s - else: - return "%s(%+d)" % (s, self.n) - -# vim:ts=4:sw=4:et diff --git a/pype/vendor/dateutil/_version.py b/pype/vendor/dateutil/_version.py deleted file mode 100644 index 713fe0dfe5..0000000000 --- a/pype/vendor/dateutil/_version.py +++ /dev/null @@ -1,4 +0,0 @@ -# coding: utf-8 -# file generated by setuptools_scm -# don't change, don't track in version control -version = '2.7.3' diff --git a/pype/vendor/dateutil/easter.py b/pype/vendor/dateutil/easter.py deleted file mode 100644 index 53b7c78938..0000000000 --- a/pype/vendor/dateutil/easter.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers a generic easter computing method for any given year, using -Western, Orthodox or Julian algorithms. -""" - -import datetime - -__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] - -EASTER_JULIAN = 1 -EASTER_ORTHODOX = 2 -EASTER_WESTERN = 3 - - -def easter(year, method=EASTER_WESTERN): - """ - This method was ported from the work done by GM Arts, - on top of the algorithm by Claus Tondering, which was - based in part on the algorithm of Ouding (1940), as - quoted in "Explanatory Supplement to the Astronomical - Almanac", P. Kenneth Seidelmann, editor. - - This algorithm implements three different easter - calculation methods: - - 1 - Original calculation in Julian calendar, valid in - dates after 326 AD - 2 - Original method, with date converted to Gregorian - calendar, valid in years 1583 to 4099 - 3 - Revised method, in Gregorian calendar, valid in - years 1583 to 4099 as well - - These methods are represented by the constants: - - * ``EASTER_JULIAN = 1`` - * ``EASTER_ORTHODOX = 2`` - * ``EASTER_WESTERN = 3`` - - The default method is method 3. - - More about the algorithm may be found at: - - `GM Arts: Easter Algorithms `_ - - and - - `The Calendar FAQ: Easter `_ - - """ - - if not (1 <= method <= 3): - raise ValueError("invalid method") - - # g - Golden year - 1 - # c - Century - # h - (23 - Epact) mod 30 - # i - Number of days from March 21 to Paschal Full Moon - # j - Weekday for PFM (0=Sunday, etc) - # p - Number of days from March 21 to Sunday on or before PFM - # (-6 to 28 methods 1 & 3, to 56 for method 2) - # e - Extra days to add for method 2 (converting Julian - # date to Gregorian date) - - y = year - g = y % 19 - e = 0 - if method < 3: - # Old method - i = (19*g + 15) % 30 - j = (y + y//4 + i) % 7 - if method == 2: - # Extra dates to convert Julian to Gregorian date - e = 10 - if y > 1600: - e = e + y//100 - 16 - (y//100 - 16)//4 - else: - # New method - c = y//100 - h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 - i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) - j = (y + y//4 + i + 2 - c + c//4) % 7 - - # p can be from -6 to 56 corresponding to dates 22 March to 23 May - # (later dates apply to method 2, although 23 May never actually occurs) - p = i - j + e - d = 1 + (p + 27 + (p + 6)//40) % 31 - m = 3 + (p + 26)//30 - return datetime.date(int(y), int(m), int(d)) diff --git a/pype/vendor/dateutil/parser/__init__.py b/pype/vendor/dateutil/parser/__init__.py deleted file mode 100644 index 216762c099..0000000000 --- a/pype/vendor/dateutil/parser/__init__.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -from ._parser import parse, parser, parserinfo -from ._parser import DEFAULTPARSER, DEFAULTTZPARSER -from ._parser import UnknownTimezoneWarning - -from ._parser import __doc__ - -from .isoparser import isoparser, isoparse - -__all__ = ['parse', 'parser', 'parserinfo', - 'isoparse', 'isoparser', - 'UnknownTimezoneWarning'] - - -### -# Deprecate portions of the private interface so that downstream code that -# is improperly relying on it is given *some* notice. - - -def __deprecated_private_func(f): - from functools import wraps - import warnings - - msg = ('{name} is a private function and may break without warning, ' - 'it will be moved and or renamed in future versions.') - msg = msg.format(name=f.__name__) - - @wraps(f) - def deprecated_func(*args, **kwargs): - warnings.warn(msg, DeprecationWarning) - return f(*args, **kwargs) - - return deprecated_func - -def __deprecate_private_class(c): - import warnings - - msg = ('{name} is a private class and may break without warning, ' - 'it will be moved and or renamed in future versions.') - msg = msg.format(name=c.__name__) - - class private_class(c): - __doc__ = c.__doc__ - - def __init__(self, *args, **kwargs): - warnings.warn(msg, DeprecationWarning) - super(private_class, self).__init__(*args, **kwargs) - - private_class.__name__ = c.__name__ - - return private_class - - -from ._parser import _timelex, _resultbase -from ._parser import _tzparser, _parsetz - -_timelex = __deprecate_private_class(_timelex) -_tzparser = __deprecate_private_class(_tzparser) -_resultbase = __deprecate_private_class(_resultbase) -_parsetz = __deprecated_private_func(_parsetz) diff --git a/pype/vendor/dateutil/parser/_parser.py b/pype/vendor/dateutil/parser/_parser.py deleted file mode 100644 index 9d2bb795cf..0000000000 --- a/pype/vendor/dateutil/parser/_parser.py +++ /dev/null @@ -1,1578 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers a generic date/time string parser which is able to parse -most known formats to represent a date and/or time. - -This module attempts to be forgiving with regards to unlikely input formats, -returning a datetime object even for dates which are ambiguous. If an element -of a date/time stamp is omitted, the following rules are applied: - -- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour - on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is - specified. -- If a time zone is omitted, a timezone-naive datetime is returned. - -If any other elements are missing, they are taken from the -:class:`datetime.datetime` object passed to the parameter ``default``. If this -results in a day number exceeding the valid number of days per month, the -value falls back to the end of the month. - -Additional resources about date/time string formats can be found below: - -- `A summary of the international standard date and time notation - `_ -- `W3C Date and Time Formats `_ -- `Time Formats (Planetary Rings Node) `_ -- `CPAN ParseDate module - `_ -- `Java SimpleDateFormat Class - `_ -""" -from __future__ import unicode_literals - -import datetime -import re -import string -import time -import warnings - -from calendar import monthrange -from io import StringIO - -import six -from six import binary_type, integer_types, text_type - -from decimal import Decimal - -from warnings import warn - -from .. import relativedelta -from .. import tz - -__all__ = ["parse", "parserinfo"] - - -# TODO: pandas.core.tools.datetimes imports this explicitly. Might be worth -# making public and/or figuring out if there is something we can -# take off their plate. -class _timelex(object): - # Fractional seconds are sometimes split by a comma - _split_decimal = re.compile("([.,])") - - def __init__(self, instream): - if six.PY2: - # In Python 2, we can't duck type properly because unicode has - # a 'decode' function, and we'd be double-decoding - if isinstance(instream, (binary_type, bytearray)): - instream = instream.decode() - else: - if getattr(instream, 'decode', None) is not None: - instream = instream.decode() - - if isinstance(instream, text_type): - instream = StringIO(instream) - elif getattr(instream, 'read', None) is None: - raise TypeError('Parser must be a string or character stream, not ' - '{itype}'.format(itype=instream.__class__.__name__)) - - self.instream = instream - self.charstack = [] - self.tokenstack = [] - self.eof = False - - def get_token(self): - """ - This function breaks the time string into lexical units (tokens), which - can be parsed by the parser. Lexical units are demarcated by changes in - the character set, so any continuous string of letters is considered - one unit, any continuous string of numbers is considered one unit. - - The main complication arises from the fact that dots ('.') can be used - both as separators (e.g. "Sep.20.2009") or decimal points (e.g. - "4:30:21.447"). As such, it is necessary to read the full context of - any dot-separated strings before breaking it into tokens; as such, this - function maintains a "token stack", for when the ambiguous context - demands that multiple tokens be parsed at once. - """ - if self.tokenstack: - return self.tokenstack.pop(0) - - seenletters = False - token = None - state = None - - while not self.eof: - # We only realize that we've reached the end of a token when we - # find a character that's not part of the current token - since - # that character may be part of the next token, it's stored in the - # charstack. - if self.charstack: - nextchar = self.charstack.pop(0) - else: - nextchar = self.instream.read(1) - while nextchar == '\x00': - nextchar = self.instream.read(1) - - if not nextchar: - self.eof = True - break - elif not state: - # First character of the token - determines if we're starting - # to parse a word, a number or something else. - token = nextchar - if self.isword(nextchar): - state = 'a' - elif self.isnum(nextchar): - state = '0' - elif self.isspace(nextchar): - token = ' ' - break # emit token - else: - break # emit token - elif state == 'a': - # If we've already started reading a word, we keep reading - # letters until we find something that's not part of a word. - seenletters = True - if self.isword(nextchar): - token += nextchar - elif nextchar == '.': - token += nextchar - state = 'a.' - else: - self.charstack.append(nextchar) - break # emit token - elif state == '0': - # If we've already started reading a number, we keep reading - # numbers until we find something that doesn't fit. - if self.isnum(nextchar): - token += nextchar - elif nextchar == '.' or (nextchar == ',' and len(token) >= 2): - token += nextchar - state = '0.' - else: - self.charstack.append(nextchar) - break # emit token - elif state == 'a.': - # If we've seen some letters and a dot separator, continue - # parsing, and the tokens will be broken up later. - seenletters = True - if nextchar == '.' or self.isword(nextchar): - token += nextchar - elif self.isnum(nextchar) and token[-1] == '.': - token += nextchar - state = '0.' - else: - self.charstack.append(nextchar) - break # emit token - elif state == '0.': - # If we've seen at least one dot separator, keep going, we'll - # break up the tokens later. - if nextchar == '.' or self.isnum(nextchar): - token += nextchar - elif self.isword(nextchar) and token[-1] == '.': - token += nextchar - state = 'a.' - else: - self.charstack.append(nextchar) - break # emit token - - if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or - token[-1] in '.,')): - l = self._split_decimal.split(token) - token = l[0] - for tok in l[1:]: - if tok: - self.tokenstack.append(tok) - - if state == '0.' and token.count('.') == 0: - token = token.replace(',', '.') - - return token - - def __iter__(self): - return self - - def __next__(self): - token = self.get_token() - if token is None: - raise StopIteration - - return token - - def next(self): - return self.__next__() # Python 2.x support - - @classmethod - def split(cls, s): - return list(cls(s)) - - @classmethod - def isword(cls, nextchar): - """ Whether or not the next character is part of a word """ - return nextchar.isalpha() - - @classmethod - def isnum(cls, nextchar): - """ Whether the next character is part of a number """ - return nextchar.isdigit() - - @classmethod - def isspace(cls, nextchar): - """ Whether the next character is whitespace """ - return nextchar.isspace() - - -class _resultbase(object): - - def __init__(self): - for attr in self.__slots__: - setattr(self, attr, None) - - def _repr(self, classname): - l = [] - for attr in self.__slots__: - value = getattr(self, attr) - if value is not None: - l.append("%s=%s" % (attr, repr(value))) - return "%s(%s)" % (classname, ", ".join(l)) - - def __len__(self): - return (sum(getattr(self, attr) is not None - for attr in self.__slots__)) - - def __repr__(self): - return self._repr(self.__class__.__name__) - - -class parserinfo(object): - """ - Class which handles what inputs are accepted. Subclass this to customize - the language and acceptable values for each parameter. - - :param dayfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the day (``True``) or month (``False``). If - ``yearfirst`` is set to ``True``, this distinguishes between YDM - and YMD. Default is ``False``. - - :param yearfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the year. If ``True``, the first number is taken - to be the year, otherwise the last number is taken to be the year. - Default is ``False``. - """ - - # m from a.m/p.m, t from ISO T separator - JUMP = [" ", ".", ",", ";", "-", "/", "'", - "at", "on", "and", "ad", "m", "t", "of", - "st", "nd", "rd", "th"] - - WEEKDAYS = [("Mon", "Monday"), - ("Tue", "Tuesday"), # TODO: "Tues" - ("Wed", "Wednesday"), - ("Thu", "Thursday"), # TODO: "Thurs" - ("Fri", "Friday"), - ("Sat", "Saturday"), - ("Sun", "Sunday")] - MONTHS = [("Jan", "January"), - ("Feb", "February"), # TODO: "Febr" - ("Mar", "March"), - ("Apr", "April"), - ("May", "May"), - ("Jun", "June"), - ("Jul", "July"), - ("Aug", "August"), - ("Sep", "Sept", "September"), - ("Oct", "October"), - ("Nov", "November"), - ("Dec", "December")] - HMS = [("h", "hour", "hours"), - ("m", "minute", "minutes"), - ("s", "second", "seconds")] - AMPM = [("am", "a"), - ("pm", "p")] - UTCZONE = ["UTC", "GMT", "Z"] - PERTAIN = ["of"] - TZOFFSET = {} - # TODO: ERA = ["AD", "BC", "CE", "BCE", "Stardate", - # "Anno Domini", "Year of Our Lord"] - - def __init__(self, dayfirst=False, yearfirst=False): - self._jump = self._convert(self.JUMP) - self._weekdays = self._convert(self.WEEKDAYS) - self._months = self._convert(self.MONTHS) - self._hms = self._convert(self.HMS) - self._ampm = self._convert(self.AMPM) - self._utczone = self._convert(self.UTCZONE) - self._pertain = self._convert(self.PERTAIN) - - self.dayfirst = dayfirst - self.yearfirst = yearfirst - - self._year = time.localtime().tm_year - self._century = self._year // 100 * 100 - - def _convert(self, lst): - dct = {} - for i, v in enumerate(lst): - if isinstance(v, tuple): - for v in v: - dct[v.lower()] = i - else: - dct[v.lower()] = i - return dct - - def jump(self, name): - return name.lower() in self._jump - - def weekday(self, name): - try: - return self._weekdays[name.lower()] - except KeyError: - pass - return None - - def month(self, name): - try: - return self._months[name.lower()] + 1 - except KeyError: - pass - return None - - def hms(self, name): - try: - return self._hms[name.lower()] - except KeyError: - return None - - def ampm(self, name): - try: - return self._ampm[name.lower()] - except KeyError: - return None - - def pertain(self, name): - return name.lower() in self._pertain - - def utczone(self, name): - return name.lower() in self._utczone - - def tzoffset(self, name): - if name in self._utczone: - return 0 - - return self.TZOFFSET.get(name) - - def convertyear(self, year, century_specified=False): - """ - Converts two-digit years to year within [-50, 49] - range of self._year (current local time) - """ - - # Function contract is that the year is always positive - assert year >= 0 - - if year < 100 and not century_specified: - # assume current century to start - year += self._century - - if year >= self._year + 50: # if too far in future - year -= 100 - elif year < self._year - 50: # if too far in past - year += 100 - - return year - - def validate(self, res): - # move to info - if res.year is not None: - res.year = self.convertyear(res.year, res.century_specified) - - if res.tzoffset == 0 and not res.tzname or res.tzname == 'Z': - res.tzname = "UTC" - res.tzoffset = 0 - elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): - res.tzoffset = 0 - return True - - -class _ymd(list): - def __init__(self, *args, **kwargs): - super(self.__class__, self).__init__(*args, **kwargs) - self.century_specified = False - self.dstridx = None - self.mstridx = None - self.ystridx = None - - @property - def has_year(self): - return self.ystridx is not None - - @property - def has_month(self): - return self.mstridx is not None - - @property - def has_day(self): - return self.dstridx is not None - - def could_be_day(self, value): - if self.has_day: - return False - elif not self.has_month: - return 1 <= value <= 31 - elif not self.has_year: - # Be permissive, assume leapyear - month = self[self.mstridx] - return 1 <= value <= monthrange(2000, month)[1] - else: - month = self[self.mstridx] - year = self[self.ystridx] - return 1 <= value <= monthrange(year, month)[1] - - def append(self, val, label=None): - if hasattr(val, '__len__'): - if val.isdigit() and len(val) > 2: - self.century_specified = True - if label not in [None, 'Y']: # pragma: no cover - raise ValueError(label) - label = 'Y' - elif val > 100: - self.century_specified = True - if label not in [None, 'Y']: # pragma: no cover - raise ValueError(label) - label = 'Y' - - super(self.__class__, self).append(int(val)) - - if label == 'M': - if self.has_month: - raise ValueError('Month is already set') - self.mstridx = len(self) - 1 - elif label == 'D': - if self.has_day: - raise ValueError('Day is already set') - self.dstridx = len(self) - 1 - elif label == 'Y': - if self.has_year: - raise ValueError('Year is already set') - self.ystridx = len(self) - 1 - - def _resolve_from_stridxs(self, strids): - """ - Try to resolve the identities of year/month/day elements using - ystridx, mstridx, and dstridx, if enough of these are specified. - """ - if len(self) == 3 and len(strids) == 2: - # we can back out the remaining stridx value - missing = [x for x in range(3) if x not in strids.values()] - key = [x for x in ['y', 'm', 'd'] if x not in strids] - assert len(missing) == len(key) == 1 - key = key[0] - val = missing[0] - strids[key] = val - - assert len(self) == len(strids) # otherwise this should not be called - out = {key: self[strids[key]] for key in strids} - return (out.get('y'), out.get('m'), out.get('d')) - - def resolve_ymd(self, yearfirst, dayfirst): - len_ymd = len(self) - year, month, day = (None, None, None) - - strids = (('y', self.ystridx), - ('m', self.mstridx), - ('d', self.dstridx)) - - strids = {key: val for key, val in strids if val is not None} - if (len(self) == len(strids) > 0 or - (len(self) == 3 and len(strids) == 2)): - return self._resolve_from_stridxs(strids) - - mstridx = self.mstridx - - if len_ymd > 3: - raise ValueError("More than three YMD values") - elif len_ymd == 1 or (mstridx is not None and len_ymd == 2): - # One member, or two members with a month string - if mstridx is not None: - month = self[mstridx] - # since mstridx is 0 or 1, self[mstridx-1] always - # looks up the other element - other = self[mstridx - 1] - else: - other = self[0] - - if len_ymd > 1 or mstridx is None: - if other > 31: - year = other - else: - day = other - - elif len_ymd == 2: - # Two members with numbers - if self[0] > 31: - # 99-01 - year, month = self - elif self[1] > 31: - # 01-99 - month, year = self - elif dayfirst and self[1] <= 12: - # 13-01 - day, month = self - else: - # 01-13 - month, day = self - - elif len_ymd == 3: - # Three members - if mstridx == 0: - if self[1] > 31: - # Apr-2003-25 - month, year, day = self - else: - month, day, year = self - elif mstridx == 1: - if self[0] > 31 or (yearfirst and self[2] <= 31): - # 99-Jan-01 - year, month, day = self - else: - # 01-Jan-01 - # Give precendence to day-first, since - # two-digit years is usually hand-written. - day, month, year = self - - elif mstridx == 2: - # WTF!? - if self[1] > 31: - # 01-99-Jan - day, year, month = self - else: - # 99-01-Jan - year, day, month = self - - else: - if (self[0] > 31 or - self.ystridx == 0 or - (yearfirst and self[1] <= 12 and self[2] <= 31)): - # 99-01-01 - if dayfirst and self[2] <= 12: - year, day, month = self - else: - year, month, day = self - elif self[0] > 12 or (dayfirst and self[1] <= 12): - # 13-01-01 - day, month, year = self - else: - # 01-13-01 - month, day, year = self - - return year, month, day - - -class parser(object): - def __init__(self, info=None): - self.info = info or parserinfo() - - def parse(self, timestr, default=None, - ignoretz=False, tzinfos=None, **kwargs): - """ - Parse the date/time string into a :class:`datetime.datetime` object. - - :param timestr: - Any date/time string using the supported formats. - - :param default: - The default datetime object, if this is a datetime object and not - ``None``, elements specified in ``timestr`` replace elements in the - default object. - - :param ignoretz: - If set ``True``, time zones in parsed strings are ignored and a - naive :class:`datetime.datetime` object is returned. - - :param tzinfos: - Additional time zone names / aliases which may be present in the - string. This argument maps time zone names (and optionally offsets - from those time zones) to time zones. This parameter can be a - dictionary with timezone aliases mapping time zone names to time - zones or a function taking two parameters (``tzname`` and - ``tzoffset``) and returning a time zone. - - The timezones to which the names are mapped can be an integer - offset from UTC in seconds or a :class:`tzinfo` object. - - .. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from dateutil.parser import parse - >>> from dateutil.tz import gettz - >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} - >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) - >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, - tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) - - This parameter is ignored if ``ignoretz`` is set. - - :param \\*\\*kwargs: - Keyword arguments as passed to ``_parse()``. - - :return: - Returns a :class:`datetime.datetime` object or, if the - ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the - first element being a :class:`datetime.datetime` object, the second - a tuple containing the fuzzy tokens. - - :raises ValueError: - Raised for invalid or unknown string format, if the provided - :class:`tzinfo` is not in a valid format, or if an invalid date - would be created. - - :raises TypeError: - Raised for non-string or character stream input. - - :raises OverflowError: - Raised if the parsed date exceeds the largest valid C integer on - your system. - """ - - if default is None: - default = datetime.datetime.now().replace(hour=0, minute=0, - second=0, microsecond=0) - - res, skipped_tokens = self._parse(timestr, **kwargs) - - if res is None: - raise ValueError("Unknown string format:", timestr) - - if len(res) == 0: - raise ValueError("String does not contain a date:", timestr) - - ret = self._build_naive(res, default) - - if not ignoretz: - ret = self._build_tzaware(ret, res, tzinfos) - - if kwargs.get('fuzzy_with_tokens', False): - return ret, skipped_tokens - else: - return ret - - class _result(_resultbase): - __slots__ = ["year", "month", "day", "weekday", - "hour", "minute", "second", "microsecond", - "tzname", "tzoffset", "ampm","any_unused_tokens"] - - def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, - fuzzy_with_tokens=False): - """ - Private method which performs the heavy lifting of parsing, called from - ``parse()``, which passes on its ``kwargs`` to this function. - - :param timestr: - The string to parse. - - :param dayfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the day (``True``) or month (``False``). If - ``yearfirst`` is set to ``True``, this distinguishes between YDM - and YMD. If set to ``None``, this value is retrieved from the - current :class:`parserinfo` object (which itself defaults to - ``False``). - - :param yearfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the year. If ``True``, the first number is taken - to be the year, otherwise the last number is taken to be the year. - If this is set to ``None``, the value is retrieved from the current - :class:`parserinfo` object (which itself defaults to ``False``). - - :param fuzzy: - Whether to allow fuzzy parsing, allowing for string like "Today is - January 1, 2047 at 8:21:00AM". - - :param fuzzy_with_tokens: - If ``True``, ``fuzzy`` is automatically set to True, and the parser - will return a tuple where the first element is the parsed - :class:`datetime.datetime` datetimestamp and the second element is - a tuple containing the portions of the string which were ignored: - - .. doctest:: - - >>> from dateutil.parser import parse - >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) - (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) - - """ - if fuzzy_with_tokens: - fuzzy = True - - info = self.info - - if dayfirst is None: - dayfirst = info.dayfirst - - if yearfirst is None: - yearfirst = info.yearfirst - - res = self._result() - l = _timelex.split(timestr) # Splits the timestr into tokens - - skipped_idxs = [] - - # year/month/day list - ymd = _ymd() - - len_l = len(l) - i = 0 - try: - while i < len_l: - - # Check if it's a number - value_repr = l[i] - try: - value = float(value_repr) - except ValueError: - value = None - - if value is not None: - # Numeric token - i = self._parse_numeric_token(l, i, info, ymd, res, fuzzy) - - # Check weekday - elif info.weekday(l[i]) is not None: - value = info.weekday(l[i]) - res.weekday = value - - # Check month name - elif info.month(l[i]) is not None: - value = info.month(l[i]) - ymd.append(value, 'M') - - if i + 1 < len_l: - if l[i + 1] in ('-', '/'): - # Jan-01[-99] - sep = l[i + 1] - ymd.append(l[i + 2]) - - if i + 3 < len_l and l[i + 3] == sep: - # Jan-01-99 - ymd.append(l[i + 4]) - i += 2 - - i += 2 - - elif (i + 4 < len_l and l[i + 1] == l[i + 3] == ' ' and - info.pertain(l[i + 2])): - # Jan of 01 - # In this case, 01 is clearly year - if l[i + 4].isdigit(): - # Convert it here to become unambiguous - value = int(l[i + 4]) - year = str(info.convertyear(value)) - ymd.append(year, 'Y') - else: - # Wrong guess - pass - # TODO: not hit in tests - i += 4 - - # Check am/pm - elif info.ampm(l[i]) is not None: - value = info.ampm(l[i]) - val_is_ampm = self._ampm_valid(res.hour, res.ampm, fuzzy) - - if val_is_ampm: - res.hour = self._adjust_ampm(res.hour, value) - res.ampm = value - - elif fuzzy: - skipped_idxs.append(i) - - # Check for a timezone name - elif self._could_be_tzname(res.hour, res.tzname, res.tzoffset, l[i]): - res.tzname = l[i] - res.tzoffset = info.tzoffset(res.tzname) - - # Check for something like GMT+3, or BRST+3. Notice - # that it doesn't mean "I am 3 hours after GMT", but - # "my time +3 is GMT". If found, we reverse the - # logic so that timezone parsing code will get it - # right. - if i + 1 < len_l and l[i + 1] in ('+', '-'): - l[i + 1] = ('+', '-')[l[i + 1] == '+'] - res.tzoffset = None - if info.utczone(res.tzname): - # With something like GMT+3, the timezone - # is *not* GMT. - res.tzname = None - - # Check for a numbered timezone - elif res.hour is not None and l[i] in ('+', '-'): - signal = (-1, 1)[l[i] == '+'] - len_li = len(l[i + 1]) - - # TODO: check that l[i + 1] is integer? - if len_li == 4: - # -0300 - hour_offset = int(l[i + 1][:2]) - min_offset = int(l[i + 1][2:]) - elif i + 2 < len_l and l[i + 2] == ':': - # -03:00 - hour_offset = int(l[i + 1]) - min_offset = int(l[i + 3]) # TODO: Check that l[i+3] is minute-like? - i += 2 - elif len_li <= 2: - # -[0]3 - hour_offset = int(l[i + 1][:2]) - min_offset = 0 - else: - raise ValueError(timestr) - - res.tzoffset = signal * (hour_offset * 3600 + min_offset * 60) - - # Look for a timezone name between parenthesis - if (i + 5 < len_l and - info.jump(l[i + 2]) and l[i + 3] == '(' and - l[i + 5] == ')' and - 3 <= len(l[i + 4]) and - self._could_be_tzname(res.hour, res.tzname, - None, l[i + 4])): - # -0300 (BRST) - res.tzname = l[i + 4] - i += 4 - - i += 1 - - # Check jumps - elif not (info.jump(l[i]) or fuzzy): - raise ValueError(timestr) - - else: - skipped_idxs.append(i) - i += 1 - - # Process year/month/day - year, month, day = ymd.resolve_ymd(yearfirst, dayfirst) - - res.century_specified = ymd.century_specified - res.year = year - res.month = month - res.day = day - - except (IndexError, ValueError): - return None, None - - if not info.validate(res): - return None, None - - if fuzzy_with_tokens: - skipped_tokens = self._recombine_skipped(l, skipped_idxs) - return res, tuple(skipped_tokens) - else: - return res, None - - def _parse_numeric_token(self, tokens, idx, info, ymd, res, fuzzy): - # Token is a number - value_repr = tokens[idx] - try: - value = self._to_decimal(value_repr) - except Exception as e: - six.raise_from(ValueError('Unknown numeric token'), e) - - len_li = len(value_repr) - - len_l = len(tokens) - - if (len(ymd) == 3 and len_li in (2, 4) and - res.hour is None and - (idx + 1 >= len_l or - (tokens[idx + 1] != ':' and - info.hms(tokens[idx + 1]) is None))): - # 19990101T23[59] - s = tokens[idx] - res.hour = int(s[:2]) - - if len_li == 4: - res.minute = int(s[2:]) - - elif len_li == 6 or (len_li > 6 and tokens[idx].find('.') == 6): - # YYMMDD or HHMMSS[.ss] - s = tokens[idx] - - if not ymd and '.' not in tokens[idx]: - ymd.append(s[:2]) - ymd.append(s[2:4]) - ymd.append(s[4:]) - else: - # 19990101T235959[.59] - - # TODO: Check if res attributes already set. - res.hour = int(s[:2]) - res.minute = int(s[2:4]) - res.second, res.microsecond = self._parsems(s[4:]) - - elif len_li in (8, 12, 14): - # YYYYMMDD - s = tokens[idx] - ymd.append(s[:4], 'Y') - ymd.append(s[4:6]) - ymd.append(s[6:8]) - - if len_li > 8: - res.hour = int(s[8:10]) - res.minute = int(s[10:12]) - - if len_li > 12: - res.second = int(s[12:]) - - elif self._find_hms_idx(idx, tokens, info, allow_jump=True) is not None: - # HH[ ]h or MM[ ]m or SS[.ss][ ]s - hms_idx = self._find_hms_idx(idx, tokens, info, allow_jump=True) - (idx, hms) = self._parse_hms(idx, tokens, info, hms_idx) - if hms is not None: - # TODO: checking that hour/minute/second are not - # already set? - self._assign_hms(res, value_repr, hms) - - elif idx + 2 < len_l and tokens[idx + 1] == ':': - # HH:MM[:SS[.ss]] - res.hour = int(value) - value = self._to_decimal(tokens[idx + 2]) # TODO: try/except for this? - (res.minute, res.second) = self._parse_min_sec(value) - - if idx + 4 < len_l and tokens[idx + 3] == ':': - res.second, res.microsecond = self._parsems(tokens[idx + 4]) - - idx += 2 - - idx += 2 - - elif idx + 1 < len_l and tokens[idx + 1] in ('-', '/', '.'): - sep = tokens[idx + 1] - ymd.append(value_repr) - - if idx + 2 < len_l and not info.jump(tokens[idx + 2]): - if tokens[idx + 2].isdigit(): - # 01-01[-01] - ymd.append(tokens[idx + 2]) - else: - # 01-Jan[-01] - value = info.month(tokens[idx + 2]) - - if value is not None: - ymd.append(value, 'M') - else: - raise ValueError() - - if idx + 3 < len_l and tokens[idx + 3] == sep: - # We have three members - value = info.month(tokens[idx + 4]) - - if value is not None: - ymd.append(value, 'M') - else: - ymd.append(tokens[idx + 4]) - idx += 2 - - idx += 1 - idx += 1 - - elif idx + 1 >= len_l or info.jump(tokens[idx + 1]): - if idx + 2 < len_l and info.ampm(tokens[idx + 2]) is not None: - # 12 am - hour = int(value) - res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 2])) - idx += 1 - else: - # Year, month or day - ymd.append(value) - idx += 1 - - elif info.ampm(tokens[idx + 1]) is not None and (0 <= value < 24): - # 12am - hour = int(value) - res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 1])) - idx += 1 - - elif ymd.could_be_day(value): - ymd.append(value) - - elif not fuzzy: - raise ValueError() - - return idx - - def _find_hms_idx(self, idx, tokens, info, allow_jump): - len_l = len(tokens) - - if idx+1 < len_l and info.hms(tokens[idx+1]) is not None: - # There is an "h", "m", or "s" label following this token. We take - # assign the upcoming label to the current token. - # e.g. the "12" in 12h" - hms_idx = idx + 1 - - elif (allow_jump and idx+2 < len_l and tokens[idx+1] == ' ' and - info.hms(tokens[idx+2]) is not None): - # There is a space and then an "h", "m", or "s" label. - # e.g. the "12" in "12 h" - hms_idx = idx + 2 - - elif idx > 0 and info.hms(tokens[idx-1]) is not None: - # There is a "h", "m", or "s" preceeding this token. Since neither - # of the previous cases was hit, there is no label following this - # token, so we use the previous label. - # e.g. the "04" in "12h04" - hms_idx = idx-1 - - elif (1 < idx == len_l-1 and tokens[idx-1] == ' ' and - info.hms(tokens[idx-2]) is not None): - # If we are looking at the final token, we allow for a - # backward-looking check to skip over a space. - # TODO: Are we sure this is the right condition here? - hms_idx = idx - 2 - - else: - hms_idx = None - - return hms_idx - - def _assign_hms(self, res, value_repr, hms): - # See GH issue #427, fixing float rounding - value = self._to_decimal(value_repr) - - if hms == 0: - # Hour - res.hour = int(value) - if value % 1: - res.minute = int(60*(value % 1)) - - elif hms == 1: - (res.minute, res.second) = self._parse_min_sec(value) - - elif hms == 2: - (res.second, res.microsecond) = self._parsems(value_repr) - - def _could_be_tzname(self, hour, tzname, tzoffset, token): - return (hour is not None and - tzname is None and - tzoffset is None and - len(token) <= 5 and - all(x in string.ascii_uppercase for x in token)) - - def _ampm_valid(self, hour, ampm, fuzzy): - """ - For fuzzy parsing, 'a' or 'am' (both valid English words) - may erroneously trigger the AM/PM flag. Deal with that - here. - """ - val_is_ampm = True - - # If there's already an AM/PM flag, this one isn't one. - if fuzzy and ampm is not None: - val_is_ampm = False - - # If AM/PM is found and hour is not, raise a ValueError - if hour is None: - if fuzzy: - val_is_ampm = False - else: - raise ValueError('No hour specified with AM or PM flag.') - elif not 0 <= hour <= 12: - # If AM/PM is found, it's a 12 hour clock, so raise - # an error for invalid range - if fuzzy: - val_is_ampm = False - else: - raise ValueError('Invalid hour specified for 12-hour clock.') - - return val_is_ampm - - def _adjust_ampm(self, hour, ampm): - if hour < 12 and ampm == 1: - hour += 12 - elif hour == 12 and ampm == 0: - hour = 0 - return hour - - def _parse_min_sec(self, value): - # TODO: Every usage of this function sets res.second to the return - # value. Are there any cases where second will be returned as None and - # we *dont* want to set res.second = None? - minute = int(value) - second = None - - sec_remainder = value % 1 - if sec_remainder: - second = int(60 * sec_remainder) - return (minute, second) - - def _parsems(self, value): - """Parse a I[.F] seconds value into (seconds, microseconds).""" - if "." not in value: - return int(value), 0 - else: - i, f = value.split(".") - return int(i), int(f.ljust(6, "0")[:6]) - - def _parse_hms(self, idx, tokens, info, hms_idx): - # TODO: Is this going to admit a lot of false-positives for when we - # just happen to have digits and "h", "m" or "s" characters in non-date - # text? I guess hex hashes won't have that problem, but there's plenty - # of random junk out there. - if hms_idx is None: - hms = None - new_idx = idx - elif hms_idx > idx: - hms = info.hms(tokens[hms_idx]) - new_idx = hms_idx - else: - # Looking backwards, increment one. - hms = info.hms(tokens[hms_idx]) + 1 - new_idx = idx - - return (new_idx, hms) - - def _recombine_skipped(self, tokens, skipped_idxs): - """ - >>> tokens = ["foo", " ", "bar", " ", "19June2000", "baz"] - >>> skipped_idxs = [0, 1, 2, 5] - >>> _recombine_skipped(tokens, skipped_idxs) - ["foo bar", "baz"] - """ - skipped_tokens = [] - for i, idx in enumerate(sorted(skipped_idxs)): - if i > 0 and idx - 1 == skipped_idxs[i - 1]: - skipped_tokens[-1] = skipped_tokens[-1] + tokens[idx] - else: - skipped_tokens.append(tokens[idx]) - - return skipped_tokens - - def _build_tzinfo(self, tzinfos, tzname, tzoffset): - if callable(tzinfos): - tzdata = tzinfos(tzname, tzoffset) - else: - tzdata = tzinfos.get(tzname) - # handle case where tzinfo is paased an options that returns None - # eg tzinfos = {'BRST' : None} - if isinstance(tzdata, datetime.tzinfo) or tzdata is None: - tzinfo = tzdata - elif isinstance(tzdata, text_type): - tzinfo = tz.tzstr(tzdata) - elif isinstance(tzdata, integer_types): - tzinfo = tz.tzoffset(tzname, tzdata) - return tzinfo - - def _build_tzaware(self, naive, res, tzinfos): - if (callable(tzinfos) or (tzinfos and res.tzname in tzinfos)): - tzinfo = self._build_tzinfo(tzinfos, res.tzname, res.tzoffset) - aware = naive.replace(tzinfo=tzinfo) - aware = self._assign_tzname(aware, res.tzname) - - elif res.tzname and res.tzname in time.tzname: - aware = naive.replace(tzinfo=tz.tzlocal()) - - # Handle ambiguous local datetime - aware = self._assign_tzname(aware, res.tzname) - - # This is mostly relevant for winter GMT zones parsed in the UK - if (aware.tzname() != res.tzname and - res.tzname in self.info.UTCZONE): - aware = aware.replace(tzinfo=tz.tzutc()) - - elif res.tzoffset == 0: - aware = naive.replace(tzinfo=tz.tzutc()) - - elif res.tzoffset: - aware = naive.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) - - elif not res.tzname and not res.tzoffset: - # i.e. no timezone information was found. - aware = naive - - elif res.tzname: - # tz-like string was parsed but we don't know what to do - # with it - warnings.warn("tzname {tzname} identified but not understood. " - "Pass `tzinfos` argument in order to correctly " - "return a timezone-aware datetime. In a future " - "version, this will raise an " - "exception.".format(tzname=res.tzname), - category=UnknownTimezoneWarning) - aware = naive - - return aware - - def _build_naive(self, res, default): - repl = {} - for attr in ("year", "month", "day", "hour", - "minute", "second", "microsecond"): - value = getattr(res, attr) - if value is not None: - repl[attr] = value - - if 'day' not in repl: - # If the default day exceeds the last day of the month, fall back - # to the end of the month. - cyear = default.year if res.year is None else res.year - cmonth = default.month if res.month is None else res.month - cday = default.day if res.day is None else res.day - - if cday > monthrange(cyear, cmonth)[1]: - repl['day'] = monthrange(cyear, cmonth)[1] - - naive = default.replace(**repl) - - if res.weekday is not None and not res.day: - naive = naive + relativedelta.relativedelta(weekday=res.weekday) - - return naive - - def _assign_tzname(self, dt, tzname): - if dt.tzname() != tzname: - new_dt = tz.enfold(dt, fold=1) - if new_dt.tzname() == tzname: - return new_dt - - return dt - - def _to_decimal(self, val): - try: - decimal_value = Decimal(val) - # See GH 662, edge case, infinite value should not be converted via `_to_decimal` - if not decimal_value.is_finite(): - raise ValueError("Converted decimal value is infinite or NaN") - except Exception as e: - msg = "Could not convert %s to decimal" % val - six.raise_from(ValueError(msg), e) - else: - return decimal_value - - -DEFAULTPARSER = parser() - - -def parse(timestr, parserinfo=None, **kwargs): - """ - - Parse a string in one of the supported formats, using the - ``parserinfo`` parameters. - - :param timestr: - A string containing a date/time stamp. - - :param parserinfo: - A :class:`parserinfo` object containing parameters for the parser. - If ``None``, the default arguments to the :class:`parserinfo` - constructor are used. - - The ``**kwargs`` parameter takes the following keyword arguments: - - :param default: - The default datetime object, if this is a datetime object and not - ``None``, elements specified in ``timestr`` replace elements in the - default object. - - :param ignoretz: - If set ``True``, time zones in parsed strings are ignored and a naive - :class:`datetime` object is returned. - - :param tzinfos: - Additional time zone names / aliases which may be present in the - string. This argument maps time zone names (and optionally offsets - from those time zones) to time zones. This parameter can be a - dictionary with timezone aliases mapping time zone names to time - zones or a function taking two parameters (``tzname`` and - ``tzoffset``) and returning a time zone. - - The timezones to which the names are mapped can be an integer - offset from UTC in seconds or a :class:`tzinfo` object. - - .. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from dateutil.parser import parse - >>> from dateutil.tz import gettz - >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} - >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) - >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, - tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) - - This parameter is ignored if ``ignoretz`` is set. - - :param dayfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the day (``True``) or month (``False``). If - ``yearfirst`` is set to ``True``, this distinguishes between YDM and - YMD. If set to ``None``, this value is retrieved from the current - :class:`parserinfo` object (which itself defaults to ``False``). - - :param yearfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the year. If ``True``, the first number is taken to - be the year, otherwise the last number is taken to be the year. If - this is set to ``None``, the value is retrieved from the current - :class:`parserinfo` object (which itself defaults to ``False``). - - :param fuzzy: - Whether to allow fuzzy parsing, allowing for string like "Today is - January 1, 2047 at 8:21:00AM". - - :param fuzzy_with_tokens: - If ``True``, ``fuzzy`` is automatically set to True, and the parser - will return a tuple where the first element is the parsed - :class:`datetime.datetime` datetimestamp and the second element is - a tuple containing the portions of the string which were ignored: - - .. doctest:: - - >>> from dateutil.parser import parse - >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) - (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) - - :return: - Returns a :class:`datetime.datetime` object or, if the - ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the - first element being a :class:`datetime.datetime` object, the second - a tuple containing the fuzzy tokens. - - :raises ValueError: - Raised for invalid or unknown string format, if the provided - :class:`tzinfo` is not in a valid format, or if an invalid date - would be created. - - :raises OverflowError: - Raised if the parsed date exceeds the largest valid C integer on - your system. - """ - if parserinfo: - return parser(parserinfo).parse(timestr, **kwargs) - else: - return DEFAULTPARSER.parse(timestr, **kwargs) - - -class _tzparser(object): - - class _result(_resultbase): - - __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", - "start", "end"] - - class _attr(_resultbase): - __slots__ = ["month", "week", "weekday", - "yday", "jyday", "day", "time"] - - def __repr__(self): - return self._repr("") - - def __init__(self): - _resultbase.__init__(self) - self.start = self._attr() - self.end = self._attr() - - def parse(self, tzstr): - res = self._result() - l = [x for x in re.split(r'([,:.]|[a-zA-Z]+|[0-9]+)',tzstr) if x] - used_idxs = list() - try: - - len_l = len(l) - - i = 0 - while i < len_l: - # BRST+3[BRDT[+2]] - j = i - while j < len_l and not [x for x in l[j] - if x in "0123456789:,-+"]: - j += 1 - if j != i: - if not res.stdabbr: - offattr = "stdoffset" - res.stdabbr = "".join(l[i:j]) - else: - offattr = "dstoffset" - res.dstabbr = "".join(l[i:j]) - - for ii in range(j): - used_idxs.append(ii) - i = j - if (i < len_l and (l[i] in ('+', '-') or l[i][0] in - "0123456789")): - if l[i] in ('+', '-'): - # Yes, that's right. See the TZ variable - # documentation. - signal = (1, -1)[l[i] == '+'] - used_idxs.append(i) - i += 1 - else: - signal = -1 - len_li = len(l[i]) - if len_li == 4: - # -0300 - setattr(res, offattr, (int(l[i][:2]) * 3600 + - int(l[i][2:]) * 60) * signal) - elif i + 1 < len_l and l[i + 1] == ':': - # -03:00 - setattr(res, offattr, - (int(l[i]) * 3600 + - int(l[i + 2]) * 60) * signal) - used_idxs.append(i) - i += 2 - elif len_li <= 2: - # -[0]3 - setattr(res, offattr, - int(l[i][:2]) * 3600 * signal) - else: - return None - used_idxs.append(i) - i += 1 - if res.dstabbr: - break - else: - break - - - if i < len_l: - for j in range(i, len_l): - if l[j] == ';': - l[j] = ',' - - assert l[i] == ',' - - i += 1 - - if i >= len_l: - pass - elif (8 <= l.count(',') <= 9 and - not [y for x in l[i:] if x != ',' - for y in x if y not in "0123456789+-"]): - # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] - for x in (res.start, res.end): - x.month = int(l[i]) - used_idxs.append(i) - i += 2 - if l[i] == '-': - value = int(l[i + 1]) * -1 - used_idxs.append(i) - i += 1 - else: - value = int(l[i]) - used_idxs.append(i) - i += 2 - if value: - x.week = value - x.weekday = (int(l[i]) - 1) % 7 - else: - x.day = int(l[i]) - used_idxs.append(i) - i += 2 - x.time = int(l[i]) - used_idxs.append(i) - i += 2 - if i < len_l: - if l[i] in ('-', '+'): - signal = (-1, 1)[l[i] == "+"] - used_idxs.append(i) - i += 1 - else: - signal = 1 - used_idxs.append(i) - res.dstoffset = (res.stdoffset + int(l[i]) * signal) - - # This was a made-up format that is not in normal use - warn(('Parsed time zone "%s"' % tzstr) + - 'is in a non-standard dateutil-specific format, which ' + - 'is now deprecated; support for parsing this format ' + - 'will be removed in future versions. It is recommended ' + - 'that you switch to a standard format like the GNU ' + - 'TZ variable format.', tz.DeprecatedTzFormatWarning) - elif (l.count(',') == 2 and l[i:].count('/') <= 2 and - not [y for x in l[i:] if x not in (',', '/', 'J', 'M', - '.', '-', ':') - for y in x if y not in "0123456789"]): - for x in (res.start, res.end): - if l[i] == 'J': - # non-leap year day (1 based) - used_idxs.append(i) - i += 1 - x.jyday = int(l[i]) - elif l[i] == 'M': - # month[-.]week[-.]weekday - used_idxs.append(i) - i += 1 - x.month = int(l[i]) - used_idxs.append(i) - i += 1 - assert l[i] in ('-', '.') - used_idxs.append(i) - i += 1 - x.week = int(l[i]) - if x.week == 5: - x.week = -1 - used_idxs.append(i) - i += 1 - assert l[i] in ('-', '.') - used_idxs.append(i) - i += 1 - x.weekday = (int(l[i]) - 1) % 7 - else: - # year day (zero based) - x.yday = int(l[i]) + 1 - - used_idxs.append(i) - i += 1 - - if i < len_l and l[i] == '/': - used_idxs.append(i) - i += 1 - # start time - len_li = len(l[i]) - if len_li == 4: - # -0300 - x.time = (int(l[i][:2]) * 3600 + - int(l[i][2:]) * 60) - elif i + 1 < len_l and l[i + 1] == ':': - # -03:00 - x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60 - used_idxs.append(i) - i += 2 - if i + 1 < len_l and l[i + 1] == ':': - used_idxs.append(i) - i += 2 - x.time += int(l[i]) - elif len_li <= 2: - # -[0]3 - x.time = (int(l[i][:2]) * 3600) - else: - return None - used_idxs.append(i) - i += 1 - - assert i == len_l or l[i] == ',' - - i += 1 - - assert i >= len_l - - except (IndexError, ValueError, AssertionError): - return None - - unused_idxs = set(range(len_l)).difference(used_idxs) - res.any_unused_tokens = not {l[n] for n in unused_idxs}.issubset({",",":"}) - return res - - -DEFAULTTZPARSER = _tzparser() - - -def _parsetz(tzstr): - return DEFAULTTZPARSER.parse(tzstr) - -class UnknownTimezoneWarning(RuntimeWarning): - """Raised when the parser finds a timezone it cannot parse into a tzinfo""" -# vim:ts=4:sw=4:et diff --git a/pype/vendor/dateutil/parser/isoparser.py b/pype/vendor/dateutil/parser/isoparser.py deleted file mode 100644 index cd27f93d96..0000000000 --- a/pype/vendor/dateutil/parser/isoparser.py +++ /dev/null @@ -1,406 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers a parser for ISO-8601 strings - -It is intended to support all valid date, time and datetime formats per the -ISO-8601 specification. - -..versionadded:: 2.7.0 -""" -from datetime import datetime, timedelta, time, date -import calendar -from dateutil import tz - -from functools import wraps - -import re -import six - -__all__ = ["isoparse", "isoparser"] - - -def _takes_ascii(f): - @wraps(f) - def func(self, str_in, *args, **kwargs): - # If it's a stream, read the whole thing - str_in = getattr(str_in, 'read', lambda: str_in)() - - # If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII - if isinstance(str_in, six.text_type): - # ASCII is the same in UTF-8 - try: - str_in = str_in.encode('ascii') - except UnicodeEncodeError as e: - msg = 'ISO-8601 strings should contain only ASCII characters' - six.raise_from(ValueError(msg), e) - - return f(self, str_in, *args, **kwargs) - - return func - - -class isoparser(object): - def __init__(self, sep=None): - """ - :param sep: - A single character that separates date and time portions. If - ``None``, the parser will accept any single character. - For strict ISO-8601 adherence, pass ``'T'``. - """ - if sep is not None: - if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'): - raise ValueError('Separator must be a single, non-numeric ' + - 'ASCII character') - - sep = sep.encode('ascii') - - self._sep = sep - - @_takes_ascii - def isoparse(self, dt_str): - """ - Parse an ISO-8601 datetime string into a :class:`datetime.datetime`. - - An ISO-8601 datetime string consists of a date portion, followed - optionally by a time portion - the date and time portions are separated - by a single character separator, which is ``T`` in the official - standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be - combined with a time portion. - - Supported date formats are: - - Common: - - - ``YYYY`` - - ``YYYY-MM`` or ``YYYYMM`` - - ``YYYY-MM-DD`` or ``YYYYMMDD`` - - Uncommon: - - - ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0) - - ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day - - The ISO week and day numbering follows the same logic as - :func:`datetime.date.isocalendar`. - - Supported time formats are: - - - ``hh`` - - ``hh:mm`` or ``hhmm`` - - ``hh:mm:ss`` or ``hhmmss`` - - ``hh:mm:ss.sss`` or ``hh:mm:ss.ssssss`` (3-6 sub-second digits) - - Midnight is a special case for `hh`, as the standard supports both - 00:00 and 24:00 as a representation. - - .. caution:: - - Support for fractional components other than seconds is part of the - ISO-8601 standard, but is not currently implemented in this parser. - - Supported time zone offset formats are: - - - `Z` (UTC) - - `±HH:MM` - - `±HHMM` - - `±HH` - - Offsets will be represented as :class:`dateutil.tz.tzoffset` objects, - with the exception of UTC, which will be represented as - :class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such - as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`. - - :param dt_str: - A string or stream containing only an ISO-8601 datetime string - - :return: - Returns a :class:`datetime.datetime` representing the string. - Unspecified components default to their lowest value. - - .. warning:: - - As of version 2.7.0, the strictness of the parser should not be - considered a stable part of the contract. Any valid ISO-8601 string - that parses correctly with the default settings will continue to - parse correctly in future versions, but invalid strings that - currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not - guaranteed to continue failing in future versions if they encode - a valid date. - - .. versionadded:: 2.7.0 - """ - components, pos = self._parse_isodate(dt_str) - - if len(dt_str) > pos: - if self._sep is None or dt_str[pos:pos + 1] == self._sep: - components += self._parse_isotime(dt_str[pos + 1:]) - else: - raise ValueError('String contains unknown ISO components') - - return datetime(*components) - - @_takes_ascii - def parse_isodate(self, datestr): - """ - Parse the date portion of an ISO string. - - :param datestr: - The string portion of an ISO string, without a separator - - :return: - Returns a :class:`datetime.date` object - """ - components, pos = self._parse_isodate(datestr) - if pos < len(datestr): - raise ValueError('String contains unknown ISO ' + - 'components: {}'.format(datestr)) - return date(*components) - - @_takes_ascii - def parse_isotime(self, timestr): - """ - Parse the time portion of an ISO string. - - :param timestr: - The time portion of an ISO string, without a separator - - :return: - Returns a :class:`datetime.time` object - """ - return time(*self._parse_isotime(timestr)) - - @_takes_ascii - def parse_tzstr(self, tzstr, zero_as_utc=True): - """ - Parse a valid ISO time zone string. - - See :func:`isoparser.isoparse` for details on supported formats. - - :param tzstr: - A string representing an ISO time zone offset - - :param zero_as_utc: - Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones - - :return: - Returns :class:`dateutil.tz.tzoffset` for offsets and - :class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is - specified) offsets equivalent to UTC. - """ - return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc) - - # Constants - _MICROSECOND_END_REGEX = re.compile(b'[-+Z]+') - _DATE_SEP = b'-' - _TIME_SEP = b':' - _MICRO_SEP = b'.' - - def _parse_isodate(self, dt_str): - try: - return self._parse_isodate_common(dt_str) - except ValueError: - return self._parse_isodate_uncommon(dt_str) - - def _parse_isodate_common(self, dt_str): - len_str = len(dt_str) - components = [1, 1, 1] - - if len_str < 4: - raise ValueError('ISO string too short') - - # Year - components[0] = int(dt_str[0:4]) - pos = 4 - if pos >= len_str: - return components, pos - - has_sep = dt_str[pos:pos + 1] == self._DATE_SEP - if has_sep: - pos += 1 - - # Month - if len_str - pos < 2: - raise ValueError('Invalid common month') - - components[1] = int(dt_str[pos:pos + 2]) - pos += 2 - - if pos >= len_str: - if has_sep: - return components, pos - else: - raise ValueError('Invalid ISO format') - - if has_sep: - if dt_str[pos:pos + 1] != self._DATE_SEP: - raise ValueError('Invalid separator in ISO string') - pos += 1 - - # Day - if len_str - pos < 2: - raise ValueError('Invalid common day') - components[2] = int(dt_str[pos:pos + 2]) - return components, pos + 2 - - def _parse_isodate_uncommon(self, dt_str): - if len(dt_str) < 4: - raise ValueError('ISO string too short') - - # All ISO formats start with the year - year = int(dt_str[0:4]) - - has_sep = dt_str[4:5] == self._DATE_SEP - - pos = 4 + has_sep # Skip '-' if it's there - if dt_str[pos:pos + 1] == b'W': - # YYYY-?Www-?D? - pos += 1 - weekno = int(dt_str[pos:pos + 2]) - pos += 2 - - dayno = 1 - if len(dt_str) > pos: - if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep: - raise ValueError('Inconsistent use of dash separator') - - pos += has_sep - - dayno = int(dt_str[pos:pos + 1]) - pos += 1 - - base_date = self._calculate_weekdate(year, weekno, dayno) - else: - # YYYYDDD or YYYY-DDD - if len(dt_str) - pos < 3: - raise ValueError('Invalid ordinal day') - - ordinal_day = int(dt_str[pos:pos + 3]) - pos += 3 - - if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)): - raise ValueError('Invalid ordinal day' + - ' {} for year {}'.format(ordinal_day, year)) - - base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1) - - components = [base_date.year, base_date.month, base_date.day] - return components, pos - - def _calculate_weekdate(self, year, week, day): - """ - Calculate the day of corresponding to the ISO year-week-day calendar. - - This function is effectively the inverse of - :func:`datetime.date.isocalendar`. - - :param year: - The year in the ISO calendar - - :param week: - The week in the ISO calendar - range is [1, 53] - - :param day: - The day in the ISO calendar - range is [1 (MON), 7 (SUN)] - - :return: - Returns a :class:`datetime.date` - """ - if not 0 < week < 54: - raise ValueError('Invalid week: {}'.format(week)) - - if not 0 < day < 8: # Range is 1-7 - raise ValueError('Invalid weekday: {}'.format(day)) - - # Get week 1 for the specific year: - jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it - week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1) - - # Now add the specific number of weeks and days to get what we want - week_offset = (week - 1) * 7 + (day - 1) - return week_1 + timedelta(days=week_offset) - - def _parse_isotime(self, timestr): - len_str = len(timestr) - components = [0, 0, 0, 0, None] - pos = 0 - comp = -1 - - if len(timestr) < 2: - raise ValueError('ISO time too short') - - has_sep = len_str >= 3 and timestr[2:3] == self._TIME_SEP - - while pos < len_str and comp < 5: - comp += 1 - - if timestr[pos:pos + 1] in b'-+Z': - # Detect time zone boundary - components[-1] = self._parse_tzstr(timestr[pos:]) - pos = len_str - break - - if comp < 3: - # Hour, minute, second - components[comp] = int(timestr[pos:pos + 2]) - pos += 2 - if (has_sep and pos < len_str and - timestr[pos:pos + 1] == self._TIME_SEP): - pos += 1 - - if comp == 3: - # Microsecond - if timestr[pos:pos + 1] != self._MICRO_SEP: - continue - - pos += 1 - us_str = self._MICROSECOND_END_REGEX.split(timestr[pos:pos + 6], - 1)[0] - - components[comp] = int(us_str) * 10**(6 - len(us_str)) - pos += len(us_str) - - if pos < len_str: - raise ValueError('Unused components in ISO string') - - if components[0] == 24: - # Standard supports 00:00 and 24:00 as representations of midnight - if any(component != 0 for component in components[1:4]): - raise ValueError('Hour may only be 24 at 24:00:00.000') - components[0] = 0 - - return components - - def _parse_tzstr(self, tzstr, zero_as_utc=True): - if tzstr == b'Z': - return tz.tzutc() - - if len(tzstr) not in {3, 5, 6}: - raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters') - - if tzstr[0:1] == b'-': - mult = -1 - elif tzstr[0:1] == b'+': - mult = 1 - else: - raise ValueError('Time zone offset requires sign') - - hours = int(tzstr[1:3]) - if len(tzstr) == 3: - minutes = 0 - else: - minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):]) - - if zero_as_utc and hours == 0 and minutes == 0: - return tz.tzutc() - else: - if minutes > 59: - raise ValueError('Invalid minutes in time zone offset') - - if hours > 23: - raise ValueError('Invalid hours in time zone offset') - - return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60) - - -DEFAULT_ISOPARSER = isoparser() -isoparse = DEFAULT_ISOPARSER.isoparse diff --git a/pype/vendor/dateutil/relativedelta.py b/pype/vendor/dateutil/relativedelta.py deleted file mode 100644 index 1e0d616532..0000000000 --- a/pype/vendor/dateutil/relativedelta.py +++ /dev/null @@ -1,590 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -import calendar - -import operator -from math import copysign - -from six import integer_types -from warnings import warn - -from ._common import weekday - -MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) - -__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] - - -class relativedelta(object): - """ - The relativedelta type is based on the specification of the excellent - work done by M.-A. Lemburg in his - `mx.DateTime `_ extension. - However, notice that this type does *NOT* implement the same algorithm as - his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. - - There are two different ways to build a relativedelta instance. The - first one is passing it two date/datetime classes:: - - relativedelta(datetime1, datetime2) - - The second one is passing it any number of the following keyword arguments:: - - relativedelta(arg1=x,arg2=y,arg3=z...) - - year, month, day, hour, minute, second, microsecond: - Absolute information (argument is singular); adding or subtracting a - relativedelta with absolute information does not perform an arithmetic - operation, but rather REPLACES the corresponding value in the - original datetime with the value(s) in relativedelta. - - years, months, weeks, days, hours, minutes, seconds, microseconds: - Relative information, may be negative (argument is plural); adding - or subtracting a relativedelta with relative information performs - the corresponding aritmetic operation on the original datetime value - with the information in the relativedelta. - - weekday: - One of the weekday instances (MO, TU, etc). These - instances may receive a parameter N, specifying the Nth - weekday, which could be positive or negative (like MO(+1) - or MO(-2). Not specifying it is the same as specifying - +1. You can also use an integer, where 0=MO. Notice that - if the calculated date is already Monday, for example, - using MO(1) or MO(-1) won't change the day. - - leapdays: - Will add given days to the date found, if year is a leap - year, and the date found is post 28 of february. - - yearday, nlyearday: - Set the yearday or the non-leap year day (jump leap days). - These are converted to day/month/leapdays information. - - There are relative and absolute forms of the keyword - arguments. The plural is relative, and the singular is - absolute. For each argument in the order below, the absolute form - is applied first (by setting each attribute to that value) and - then the relative form (by adding the value to the attribute). - - The order of attributes considered when this relativedelta is - added to a datetime is: - - 1. Year - 2. Month - 3. Day - 4. Hours - 5. Minutes - 6. Seconds - 7. Microseconds - - Finally, weekday is applied, using the rule described above. - - For example - - >>> dt = datetime(2018, 4, 9, 13, 37, 0) - >>> delta = relativedelta(hours=25, day=1, weekday=MO(1)) - datetime(2018, 4, 2, 14, 37, 0) - - First, the day is set to 1 (the first of the month), then 25 hours - are added, to get to the 2nd day and 14th hour, finally the - weekday is applied, but since the 2nd is already a Monday there is - no effect. - - """ - - def __init__(self, dt1=None, dt2=None, - years=0, months=0, days=0, leapdays=0, weeks=0, - hours=0, minutes=0, seconds=0, microseconds=0, - year=None, month=None, day=None, weekday=None, - yearday=None, nlyearday=None, - hour=None, minute=None, second=None, microsecond=None): - - if dt1 and dt2: - # datetime is a subclass of date. So both must be date - if not (isinstance(dt1, datetime.date) and - isinstance(dt2, datetime.date)): - raise TypeError("relativedelta only diffs datetime/date") - - # We allow two dates, or two datetimes, so we coerce them to be - # of the same type - if (isinstance(dt1, datetime.datetime) != - isinstance(dt2, datetime.datetime)): - if not isinstance(dt1, datetime.datetime): - dt1 = datetime.datetime.fromordinal(dt1.toordinal()) - elif not isinstance(dt2, datetime.datetime): - dt2 = datetime.datetime.fromordinal(dt2.toordinal()) - - self.years = 0 - self.months = 0 - self.days = 0 - self.leapdays = 0 - self.hours = 0 - self.minutes = 0 - self.seconds = 0 - self.microseconds = 0 - self.year = None - self.month = None - self.day = None - self.weekday = None - self.hour = None - self.minute = None - self.second = None - self.microsecond = None - self._has_time = 0 - - # Get year / month delta between the two - months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month) - self._set_months(months) - - # Remove the year/month delta so the timedelta is just well-defined - # time units (seconds, days and microseconds) - dtm = self.__radd__(dt2) - - # If we've overshot our target, make an adjustment - if dt1 < dt2: - compare = operator.gt - increment = 1 - else: - compare = operator.lt - increment = -1 - - while compare(dt1, dtm): - months += increment - self._set_months(months) - dtm = self.__radd__(dt2) - - # Get the timedelta between the "months-adjusted" date and dt1 - delta = dt1 - dtm - self.seconds = delta.seconds + delta.days * 86400 - self.microseconds = delta.microseconds - else: - # Check for non-integer values in integer-only quantities - if any(x is not None and x != int(x) for x in (years, months)): - raise ValueError("Non-integer years and months are " - "ambiguous and not currently supported.") - - # Relative information - self.years = int(years) - self.months = int(months) - self.days = days + weeks * 7 - self.leapdays = leapdays - self.hours = hours - self.minutes = minutes - self.seconds = seconds - self.microseconds = microseconds - - # Absolute information - self.year = year - self.month = month - self.day = day - self.hour = hour - self.minute = minute - self.second = second - self.microsecond = microsecond - - if any(x is not None and int(x) != x - for x in (year, month, day, hour, - minute, second, microsecond)): - # For now we'll deprecate floats - later it'll be an error. - warn("Non-integer value passed as absolute information. " + - "This is not a well-defined condition and will raise " + - "errors in future versions.", DeprecationWarning) - - if isinstance(weekday, integer_types): - self.weekday = weekdays[weekday] - else: - self.weekday = weekday - - yday = 0 - if nlyearday: - yday = nlyearday - elif yearday: - yday = yearday - if yearday > 59: - self.leapdays = -1 - if yday: - ydayidx = [31, 59, 90, 120, 151, 181, 212, - 243, 273, 304, 334, 366] - for idx, ydays in enumerate(ydayidx): - if yday <= ydays: - self.month = idx+1 - if idx == 0: - self.day = yday - else: - self.day = yday-ydayidx[idx-1] - break - else: - raise ValueError("invalid year day (%d)" % yday) - - self._fix() - - def _fix(self): - if abs(self.microseconds) > 999999: - s = _sign(self.microseconds) - div, mod = divmod(self.microseconds * s, 1000000) - self.microseconds = mod * s - self.seconds += div * s - if abs(self.seconds) > 59: - s = _sign(self.seconds) - div, mod = divmod(self.seconds * s, 60) - self.seconds = mod * s - self.minutes += div * s - if abs(self.minutes) > 59: - s = _sign(self.minutes) - div, mod = divmod(self.minutes * s, 60) - self.minutes = mod * s - self.hours += div * s - if abs(self.hours) > 23: - s = _sign(self.hours) - div, mod = divmod(self.hours * s, 24) - self.hours = mod * s - self.days += div * s - if abs(self.months) > 11: - s = _sign(self.months) - div, mod = divmod(self.months * s, 12) - self.months = mod * s - self.years += div * s - if (self.hours or self.minutes or self.seconds or self.microseconds - or self.hour is not None or self.minute is not None or - self.second is not None or self.microsecond is not None): - self._has_time = 1 - else: - self._has_time = 0 - - @property - def weeks(self): - return int(self.days / 7.0) - - @weeks.setter - def weeks(self, value): - self.days = self.days - (self.weeks * 7) + value * 7 - - def _set_months(self, months): - self.months = months - if abs(self.months) > 11: - s = _sign(self.months) - div, mod = divmod(self.months * s, 12) - self.months = mod * s - self.years = div * s - else: - self.years = 0 - - def normalized(self): - """ - Return a version of this object represented entirely using integer - values for the relative attributes. - - >>> relativedelta(days=1.5, hours=2).normalized() - relativedelta(days=1, hours=14) - - :return: - Returns a :class:`dateutil.relativedelta.relativedelta` object. - """ - # Cascade remainders down (rounding each to roughly nearest microsecond) - days = int(self.days) - - hours_f = round(self.hours + 24 * (self.days - days), 11) - hours = int(hours_f) - - minutes_f = round(self.minutes + 60 * (hours_f - hours), 10) - minutes = int(minutes_f) - - seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8) - seconds = int(seconds_f) - - microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds)) - - # Constructor carries overflow back up with call to _fix() - return self.__class__(years=self.years, months=self.months, - days=days, hours=hours, minutes=minutes, - seconds=seconds, microseconds=microseconds, - leapdays=self.leapdays, year=self.year, - month=self.month, day=self.day, - weekday=self.weekday, hour=self.hour, - minute=self.minute, second=self.second, - microsecond=self.microsecond) - - def __add__(self, other): - if isinstance(other, relativedelta): - return self.__class__(years=other.years + self.years, - months=other.months + self.months, - days=other.days + self.days, - hours=other.hours + self.hours, - minutes=other.minutes + self.minutes, - seconds=other.seconds + self.seconds, - microseconds=(other.microseconds + - self.microseconds), - leapdays=other.leapdays or self.leapdays, - year=(other.year if other.year is not None - else self.year), - month=(other.month if other.month is not None - else self.month), - day=(other.day if other.day is not None - else self.day), - weekday=(other.weekday if other.weekday is not None - else self.weekday), - hour=(other.hour if other.hour is not None - else self.hour), - minute=(other.minute if other.minute is not None - else self.minute), - second=(other.second if other.second is not None - else self.second), - microsecond=(other.microsecond if other.microsecond - is not None else - self.microsecond)) - if isinstance(other, datetime.timedelta): - return self.__class__(years=self.years, - months=self.months, - days=self.days + other.days, - hours=self.hours, - minutes=self.minutes, - seconds=self.seconds + other.seconds, - microseconds=self.microseconds + other.microseconds, - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - if not isinstance(other, datetime.date): - return NotImplemented - elif self._has_time and not isinstance(other, datetime.datetime): - other = datetime.datetime.fromordinal(other.toordinal()) - year = (self.year or other.year)+self.years - month = self.month or other.month - if self.months: - assert 1 <= abs(self.months) <= 12 - month += self.months - if month > 12: - year += 1 - month -= 12 - elif month < 1: - year -= 1 - month += 12 - day = min(calendar.monthrange(year, month)[1], - self.day or other.day) - repl = {"year": year, "month": month, "day": day} - for attr in ["hour", "minute", "second", "microsecond"]: - value = getattr(self, attr) - if value is not None: - repl[attr] = value - days = self.days - if self.leapdays and month > 2 and calendar.isleap(year): - days += self.leapdays - ret = (other.replace(**repl) - + datetime.timedelta(days=days, - hours=self.hours, - minutes=self.minutes, - seconds=self.seconds, - microseconds=self.microseconds)) - if self.weekday: - weekday, nth = self.weekday.weekday, self.weekday.n or 1 - jumpdays = (abs(nth) - 1) * 7 - if nth > 0: - jumpdays += (7 - ret.weekday() + weekday) % 7 - else: - jumpdays += (ret.weekday() - weekday) % 7 - jumpdays *= -1 - ret += datetime.timedelta(days=jumpdays) - return ret - - def __radd__(self, other): - return self.__add__(other) - - def __rsub__(self, other): - return self.__neg__().__radd__(other) - - def __sub__(self, other): - if not isinstance(other, relativedelta): - return NotImplemented # In case the other object defines __rsub__ - return self.__class__(years=self.years - other.years, - months=self.months - other.months, - days=self.days - other.days, - hours=self.hours - other.hours, - minutes=self.minutes - other.minutes, - seconds=self.seconds - other.seconds, - microseconds=self.microseconds - other.microseconds, - leapdays=self.leapdays or other.leapdays, - year=(self.year if self.year is not None - else other.year), - month=(self.month if self.month is not None else - other.month), - day=(self.day if self.day is not None else - other.day), - weekday=(self.weekday if self.weekday is not None else - other.weekday), - hour=(self.hour if self.hour is not None else - other.hour), - minute=(self.minute if self.minute is not None else - other.minute), - second=(self.second if self.second is not None else - other.second), - microsecond=(self.microsecond if self.microsecond - is not None else - other.microsecond)) - - def __abs__(self): - return self.__class__(years=abs(self.years), - months=abs(self.months), - days=abs(self.days), - hours=abs(self.hours), - minutes=abs(self.minutes), - seconds=abs(self.seconds), - microseconds=abs(self.microseconds), - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - - def __neg__(self): - return self.__class__(years=-self.years, - months=-self.months, - days=-self.days, - hours=-self.hours, - minutes=-self.minutes, - seconds=-self.seconds, - microseconds=-self.microseconds, - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - - def __bool__(self): - return not (not self.years and - not self.months and - not self.days and - not self.hours and - not self.minutes and - not self.seconds and - not self.microseconds and - not self.leapdays and - self.year is None and - self.month is None and - self.day is None and - self.weekday is None and - self.hour is None and - self.minute is None and - self.second is None and - self.microsecond is None) - # Compatibility with Python 2.x - __nonzero__ = __bool__ - - def __mul__(self, other): - try: - f = float(other) - except TypeError: - return NotImplemented - - return self.__class__(years=int(self.years * f), - months=int(self.months * f), - days=int(self.days * f), - hours=int(self.hours * f), - minutes=int(self.minutes * f), - seconds=int(self.seconds * f), - microseconds=int(self.microseconds * f), - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - - __rmul__ = __mul__ - - def __eq__(self, other): - if not isinstance(other, relativedelta): - return NotImplemented - if self.weekday or other.weekday: - if not self.weekday or not other.weekday: - return False - if self.weekday.weekday != other.weekday.weekday: - return False - n1, n2 = self.weekday.n, other.weekday.n - if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): - return False - return (self.years == other.years and - self.months == other.months and - self.days == other.days and - self.hours == other.hours and - self.minutes == other.minutes and - self.seconds == other.seconds and - self.microseconds == other.microseconds and - self.leapdays == other.leapdays and - self.year == other.year and - self.month == other.month and - self.day == other.day and - self.hour == other.hour and - self.minute == other.minute and - self.second == other.second and - self.microsecond == other.microsecond) - - def __hash__(self): - return hash(( - self.weekday, - self.years, - self.months, - self.days, - self.hours, - self.minutes, - self.seconds, - self.microseconds, - self.leapdays, - self.year, - self.month, - self.day, - self.hour, - self.minute, - self.second, - self.microsecond, - )) - - def __ne__(self, other): - return not self.__eq__(other) - - def __div__(self, other): - try: - reciprocal = 1 / float(other) - except TypeError: - return NotImplemented - - return self.__mul__(reciprocal) - - __truediv__ = __div__ - - def __repr__(self): - l = [] - for attr in ["years", "months", "days", "leapdays", - "hours", "minutes", "seconds", "microseconds"]: - value = getattr(self, attr) - if value: - l.append("{attr}={value:+g}".format(attr=attr, value=value)) - for attr in ["year", "month", "day", "weekday", - "hour", "minute", "second", "microsecond"]: - value = getattr(self, attr) - if value is not None: - l.append("{attr}={value}".format(attr=attr, value=repr(value))) - return "{classname}({attrs})".format(classname=self.__class__.__name__, - attrs=", ".join(l)) - - -def _sign(x): - return int(copysign(1, x)) - -# vim:ts=4:sw=4:et diff --git a/pype/vendor/dateutil/rrule.py b/pype/vendor/dateutil/rrule.py deleted file mode 100644 index 8e9c2af184..0000000000 --- a/pype/vendor/dateutil/rrule.py +++ /dev/null @@ -1,1672 +0,0 @@ -# -*- coding: utf-8 -*- -""" -The rrule module offers a small, complete, and very fast, implementation of -the recurrence rules documented in the -`iCalendar RFC `_, -including support for caching of results. -""" -import itertools -import datetime -import calendar -import re -import sys - -try: - from math import gcd -except ImportError: - from fractions import gcd - -from six import advance_iterator, integer_types -from six.moves import _thread, range -import heapq - -from ._common import weekday as weekdaybase -from .tz import tzutc, tzlocal - -# For warning about deprecation of until and count -from warnings import warn - -__all__ = ["rrule", "rruleset", "rrulestr", - "YEARLY", "MONTHLY", "WEEKLY", "DAILY", - "HOURLY", "MINUTELY", "SECONDLY", - "MO", "TU", "WE", "TH", "FR", "SA", "SU"] - -# Every mask is 7 days longer to handle cross-year weekly periods. -M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + - [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) -M365MASK = list(M366MASK) -M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) -MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) -MDAY365MASK = list(MDAY366MASK) -M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) -NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) -NMDAY365MASK = list(NMDAY366MASK) -M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) -M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) -WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 -del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] -MDAY365MASK = tuple(MDAY365MASK) -M365MASK = tuple(M365MASK) - -FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY'] - -(YEARLY, - MONTHLY, - WEEKLY, - DAILY, - HOURLY, - MINUTELY, - SECONDLY) = list(range(7)) - -# Imported on demand. -easter = None -parser = None - - -class weekday(weekdaybase): - """ - This version of weekday does not allow n = 0. - """ - def __init__(self, wkday, n=None): - if n == 0: - raise ValueError("Can't create weekday with n==0") - - super(weekday, self).__init__(wkday, n) - - -MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) - - -def _invalidates_cache(f): - """ - Decorator for rruleset methods which may invalidate the - cached length. - """ - def inner_func(self, *args, **kwargs): - rv = f(self, *args, **kwargs) - self._invalidate_cache() - return rv - - return inner_func - - -class rrulebase(object): - def __init__(self, cache=False): - if cache: - self._cache = [] - self._cache_lock = _thread.allocate_lock() - self._invalidate_cache() - else: - self._cache = None - self._cache_complete = False - self._len = None - - def __iter__(self): - if self._cache_complete: - return iter(self._cache) - elif self._cache is None: - return self._iter() - else: - return self._iter_cached() - - def _invalidate_cache(self): - if self._cache is not None: - self._cache = [] - self._cache_complete = False - self._cache_gen = self._iter() - - if self._cache_lock.locked(): - self._cache_lock.release() - - self._len = None - - def _iter_cached(self): - i = 0 - gen = self._cache_gen - cache = self._cache - acquire = self._cache_lock.acquire - release = self._cache_lock.release - while gen: - if i == len(cache): - acquire() - if self._cache_complete: - break - try: - for j in range(10): - cache.append(advance_iterator(gen)) - except StopIteration: - self._cache_gen = gen = None - self._cache_complete = True - break - release() - yield cache[i] - i += 1 - while i < self._len: - yield cache[i] - i += 1 - - def __getitem__(self, item): - if self._cache_complete: - return self._cache[item] - elif isinstance(item, slice): - if item.step and item.step < 0: - return list(iter(self))[item] - else: - return list(itertools.islice(self, - item.start or 0, - item.stop or sys.maxsize, - item.step or 1)) - elif item >= 0: - gen = iter(self) - try: - for i in range(item+1): - res = advance_iterator(gen) - except StopIteration: - raise IndexError - return res - else: - return list(iter(self))[item] - - def __contains__(self, item): - if self._cache_complete: - return item in self._cache - else: - for i in self: - if i == item: - return True - elif i > item: - return False - return False - - # __len__() introduces a large performance penality. - def count(self): - """ Returns the number of recurrences in this set. It will have go - trough the whole recurrence, if this hasn't been done before. """ - if self._len is None: - for x in self: - pass - return self._len - - def before(self, dt, inc=False): - """ Returns the last recurrence before the given datetime instance. The - inc keyword defines what happens if dt is an occurrence. With - inc=True, if dt itself is an occurrence, it will be returned. """ - if self._cache_complete: - gen = self._cache - else: - gen = self - last = None - if inc: - for i in gen: - if i > dt: - break - last = i - else: - for i in gen: - if i >= dt: - break - last = i - return last - - def after(self, dt, inc=False): - """ Returns the first recurrence after the given datetime instance. The - inc keyword defines what happens if dt is an occurrence. With - inc=True, if dt itself is an occurrence, it will be returned. """ - if self._cache_complete: - gen = self._cache - else: - gen = self - if inc: - for i in gen: - if i >= dt: - return i - else: - for i in gen: - if i > dt: - return i - return None - - def xafter(self, dt, count=None, inc=False): - """ - Generator which yields up to `count` recurrences after the given - datetime instance, equivalent to `after`. - - :param dt: - The datetime at which to start generating recurrences. - - :param count: - The maximum number of recurrences to generate. If `None` (default), - dates are generated until the recurrence rule is exhausted. - - :param inc: - If `dt` is an instance of the rule and `inc` is `True`, it is - included in the output. - - :yields: Yields a sequence of `datetime` objects. - """ - - if self._cache_complete: - gen = self._cache - else: - gen = self - - # Select the comparison function - if inc: - comp = lambda dc, dtc: dc >= dtc - else: - comp = lambda dc, dtc: dc > dtc - - # Generate dates - n = 0 - for d in gen: - if comp(d, dt): - if count is not None: - n += 1 - if n > count: - break - - yield d - - def between(self, after, before, inc=False, count=1): - """ Returns all the occurrences of the rrule between after and before. - The inc keyword defines what happens if after and/or before are - themselves occurrences. With inc=True, they will be included in the - list, if they are found in the recurrence set. """ - if self._cache_complete: - gen = self._cache - else: - gen = self - started = False - l = [] - if inc: - for i in gen: - if i > before: - break - elif not started: - if i >= after: - started = True - l.append(i) - else: - l.append(i) - else: - for i in gen: - if i >= before: - break - elif not started: - if i > after: - started = True - l.append(i) - else: - l.append(i) - return l - - -class rrule(rrulebase): - """ - That's the base of the rrule operation. It accepts all the keywords - defined in the RFC as its constructor parameters (except byday, - which was renamed to byweekday) and more. The constructor prototype is:: - - rrule(freq) - - Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, - or SECONDLY. - - .. note:: - Per RFC section 3.3.10, recurrence instances falling on invalid dates - and times are ignored rather than coerced: - - Recurrence rules may generate recurrence instances with an invalid - date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM - on a day where the local time is moved forward by an hour at 1:00 - AM). Such recurrence instances MUST be ignored and MUST NOT be - counted as part of the recurrence set. - - This can lead to possibly surprising behavior when, for example, the - start date occurs at the end of the month: - - >>> from dateutil.rrule import rrule, MONTHLY - >>> from datetime import datetime - >>> start_date = datetime(2014, 12, 31) - >>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date)) - ... # doctest: +NORMALIZE_WHITESPACE - [datetime.datetime(2014, 12, 31, 0, 0), - datetime.datetime(2015, 1, 31, 0, 0), - datetime.datetime(2015, 3, 31, 0, 0), - datetime.datetime(2015, 5, 31, 0, 0)] - - Additionally, it supports the following keyword arguments: - - :param dtstart: - The recurrence start. Besides being the base for the recurrence, - missing parameters in the final recurrence instances will also be - extracted from this date. If not given, datetime.now() will be used - instead. - :param interval: - The interval between each freq iteration. For example, when using - YEARLY, an interval of 2 means once every two years, but with HOURLY, - it means once every two hours. The default interval is 1. - :param wkst: - The week start day. Must be one of the MO, TU, WE constants, or an - integer, specifying the first day of the week. This will affect - recurrences based on weekly periods. The default week start is got - from calendar.firstweekday(), and may be modified by - calendar.setfirstweekday(). - :param count: - How many occurrences will be generated. - - .. note:: - As of version 2.5.0, the use of the ``until`` keyword together - with the ``count`` keyword is deprecated per RFC-5545 Sec. 3.3.10. - :param until: - If given, this must be a datetime instance, that will specify the - limit of the recurrence. The last recurrence in the rule is the greatest - datetime that is less than or equal to the value specified in the - ``until`` parameter. - - .. note:: - As of version 2.5.0, the use of the ``until`` keyword together - with the ``count`` keyword is deprecated per RFC-5545 Sec. 3.3.10. - :param bysetpos: - If given, it must be either an integer, or a sequence of integers, - positive or negative. Each given integer will specify an occurrence - number, corresponding to the nth occurrence of the rule inside the - frequency period. For example, a bysetpos of -1 if combined with a - MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will - result in the last work day of every month. - :param bymonth: - If given, it must be either an integer, or a sequence of integers, - meaning the months to apply the recurrence to. - :param bymonthday: - If given, it must be either an integer, or a sequence of integers, - meaning the month days to apply the recurrence to. - :param byyearday: - If given, it must be either an integer, or a sequence of integers, - meaning the year days to apply the recurrence to. - :param byeaster: - If given, it must be either an integer, or a sequence of integers, - positive or negative. Each integer will define an offset from the - Easter Sunday. Passing the offset 0 to byeaster will yield the Easter - Sunday itself. This is an extension to the RFC specification. - :param byweekno: - If given, it must be either an integer, or a sequence of integers, - meaning the week numbers to apply the recurrence to. Week numbers - have the meaning described in ISO8601, that is, the first week of - the year is that containing at least four days of the new year. - :param byweekday: - If given, it must be either an integer (0 == MO), a sequence of - integers, one of the weekday constants (MO, TU, etc), or a sequence - of these constants. When given, these variables will define the - weekdays where the recurrence will be applied. It's also possible to - use an argument n for the weekday instances, which will mean the nth - occurrence of this weekday in the period. For example, with MONTHLY, - or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the - first friday of the month where the recurrence happens. Notice that in - the RFC documentation, this is specified as BYDAY, but was renamed to - avoid the ambiguity of that keyword. - :param byhour: - If given, it must be either an integer, or a sequence of integers, - meaning the hours to apply the recurrence to. - :param byminute: - If given, it must be either an integer, or a sequence of integers, - meaning the minutes to apply the recurrence to. - :param bysecond: - If given, it must be either an integer, or a sequence of integers, - meaning the seconds to apply the recurrence to. - :param cache: - If given, it must be a boolean value specifying to enable or disable - caching of results. If you will use the same rrule instance multiple - times, enabling caching will improve the performance considerably. - """ - def __init__(self, freq, dtstart=None, - interval=1, wkst=None, count=None, until=None, bysetpos=None, - bymonth=None, bymonthday=None, byyearday=None, byeaster=None, - byweekno=None, byweekday=None, - byhour=None, byminute=None, bysecond=None, - cache=False): - super(rrule, self).__init__(cache) - global easter - if not dtstart: - if until and until.tzinfo: - dtstart = datetime.datetime.now(tz=until.tzinfo).replace(microsecond=0) - else: - dtstart = datetime.datetime.now().replace(microsecond=0) - elif not isinstance(dtstart, datetime.datetime): - dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) - else: - dtstart = dtstart.replace(microsecond=0) - self._dtstart = dtstart - self._tzinfo = dtstart.tzinfo - self._freq = freq - self._interval = interval - self._count = count - - # Cache the original byxxx rules, if they are provided, as the _byxxx - # attributes do not necessarily map to the inputs, and this can be - # a problem in generating the strings. Only store things if they've - # been supplied (the string retrieval will just use .get()) - self._original_rule = {} - - if until and not isinstance(until, datetime.datetime): - until = datetime.datetime.fromordinal(until.toordinal()) - self._until = until - - if self._dtstart and self._until: - if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None): - # According to RFC5545 Section 3.3.10: - # https://tools.ietf.org/html/rfc5545#section-3.3.10 - # - # > If the "DTSTART" property is specified as a date with UTC - # > time or a date with local time and time zone reference, - # > then the UNTIL rule part MUST be specified as a date with - # > UTC time. - raise ValueError( - 'RRULE UNTIL values must be specified in UTC when DTSTART ' - 'is timezone-aware' - ) - - if count is not None and until: - warn("Using both 'count' and 'until' is inconsistent with RFC 5545" - " and has been deprecated in dateutil. Future versions will " - "raise an error.", DeprecationWarning) - - if wkst is None: - self._wkst = calendar.firstweekday() - elif isinstance(wkst, integer_types): - self._wkst = wkst - else: - self._wkst = wkst.weekday - - if bysetpos is None: - self._bysetpos = None - elif isinstance(bysetpos, integer_types): - if bysetpos == 0 or not (-366 <= bysetpos <= 366): - raise ValueError("bysetpos must be between 1 and 366, " - "or between -366 and -1") - self._bysetpos = (bysetpos,) - else: - self._bysetpos = tuple(bysetpos) - for pos in self._bysetpos: - if pos == 0 or not (-366 <= pos <= 366): - raise ValueError("bysetpos must be between 1 and 366, " - "or between -366 and -1") - - if self._bysetpos: - self._original_rule['bysetpos'] = self._bysetpos - - if (byweekno is None and byyearday is None and bymonthday is None and - byweekday is None and byeaster is None): - if freq == YEARLY: - if bymonth is None: - bymonth = dtstart.month - self._original_rule['bymonth'] = None - bymonthday = dtstart.day - self._original_rule['bymonthday'] = None - elif freq == MONTHLY: - bymonthday = dtstart.day - self._original_rule['bymonthday'] = None - elif freq == WEEKLY: - byweekday = dtstart.weekday() - self._original_rule['byweekday'] = None - - # bymonth - if bymonth is None: - self._bymonth = None - else: - if isinstance(bymonth, integer_types): - bymonth = (bymonth,) - - self._bymonth = tuple(sorted(set(bymonth))) - - if 'bymonth' not in self._original_rule: - self._original_rule['bymonth'] = self._bymonth - - # byyearday - if byyearday is None: - self._byyearday = None - else: - if isinstance(byyearday, integer_types): - byyearday = (byyearday,) - - self._byyearday = tuple(sorted(set(byyearday))) - self._original_rule['byyearday'] = self._byyearday - - # byeaster - if byeaster is not None: - if not easter: - from dateutil import easter - if isinstance(byeaster, integer_types): - self._byeaster = (byeaster,) - else: - self._byeaster = tuple(sorted(byeaster)) - - self._original_rule['byeaster'] = self._byeaster - else: - self._byeaster = None - - # bymonthday - if bymonthday is None: - self._bymonthday = () - self._bynmonthday = () - else: - if isinstance(bymonthday, integer_types): - bymonthday = (bymonthday,) - - bymonthday = set(bymonthday) # Ensure it's unique - - self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0)) - self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0)) - - # Storing positive numbers first, then negative numbers - if 'bymonthday' not in self._original_rule: - self._original_rule['bymonthday'] = tuple( - itertools.chain(self._bymonthday, self._bynmonthday)) - - # byweekno - if byweekno is None: - self._byweekno = None - else: - if isinstance(byweekno, integer_types): - byweekno = (byweekno,) - - self._byweekno = tuple(sorted(set(byweekno))) - - self._original_rule['byweekno'] = self._byweekno - - # byweekday / bynweekday - if byweekday is None: - self._byweekday = None - self._bynweekday = None - else: - # If it's one of the valid non-sequence types, convert to a - # single-element sequence before the iterator that builds the - # byweekday set. - if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"): - byweekday = (byweekday,) - - self._byweekday = set() - self._bynweekday = set() - for wday in byweekday: - if isinstance(wday, integer_types): - self._byweekday.add(wday) - elif not wday.n or freq > MONTHLY: - self._byweekday.add(wday.weekday) - else: - self._bynweekday.add((wday.weekday, wday.n)) - - if not self._byweekday: - self._byweekday = None - elif not self._bynweekday: - self._bynweekday = None - - if self._byweekday is not None: - self._byweekday = tuple(sorted(self._byweekday)) - orig_byweekday = [weekday(x) for x in self._byweekday] - else: - orig_byweekday = () - - if self._bynweekday is not None: - self._bynweekday = tuple(sorted(self._bynweekday)) - orig_bynweekday = [weekday(*x) for x in self._bynweekday] - else: - orig_bynweekday = () - - if 'byweekday' not in self._original_rule: - self._original_rule['byweekday'] = tuple(itertools.chain( - orig_byweekday, orig_bynweekday)) - - # byhour - if byhour is None: - if freq < HOURLY: - self._byhour = {dtstart.hour} - else: - self._byhour = None - else: - if isinstance(byhour, integer_types): - byhour = (byhour,) - - if freq == HOURLY: - self._byhour = self.__construct_byset(start=dtstart.hour, - byxxx=byhour, - base=24) - else: - self._byhour = set(byhour) - - self._byhour = tuple(sorted(self._byhour)) - self._original_rule['byhour'] = self._byhour - - # byminute - if byminute is None: - if freq < MINUTELY: - self._byminute = {dtstart.minute} - else: - self._byminute = None - else: - if isinstance(byminute, integer_types): - byminute = (byminute,) - - if freq == MINUTELY: - self._byminute = self.__construct_byset(start=dtstart.minute, - byxxx=byminute, - base=60) - else: - self._byminute = set(byminute) - - self._byminute = tuple(sorted(self._byminute)) - self._original_rule['byminute'] = self._byminute - - # bysecond - if bysecond is None: - if freq < SECONDLY: - self._bysecond = ((dtstart.second,)) - else: - self._bysecond = None - else: - if isinstance(bysecond, integer_types): - bysecond = (bysecond,) - - self._bysecond = set(bysecond) - - if freq == SECONDLY: - self._bysecond = self.__construct_byset(start=dtstart.second, - byxxx=bysecond, - base=60) - else: - self._bysecond = set(bysecond) - - self._bysecond = tuple(sorted(self._bysecond)) - self._original_rule['bysecond'] = self._bysecond - - if self._freq >= HOURLY: - self._timeset = None - else: - self._timeset = [] - for hour in self._byhour: - for minute in self._byminute: - for second in self._bysecond: - self._timeset.append( - datetime.time(hour, minute, second, - tzinfo=self._tzinfo)) - self._timeset.sort() - self._timeset = tuple(self._timeset) - - def __str__(self): - """ - Output a string that would generate this RRULE if passed to rrulestr. - This is mostly compatible with RFC5545, except for the - dateutil-specific extension BYEASTER. - """ - - output = [] - h, m, s = [None] * 3 - if self._dtstart: - output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S')) - h, m, s = self._dtstart.timetuple()[3:6] - - parts = ['FREQ=' + FREQNAMES[self._freq]] - if self._interval != 1: - parts.append('INTERVAL=' + str(self._interval)) - - if self._wkst: - parts.append('WKST=' + repr(weekday(self._wkst))[0:2]) - - if self._count is not None: - parts.append('COUNT=' + str(self._count)) - - if self._until: - parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S')) - - if self._original_rule.get('byweekday') is not None: - # The str() method on weekday objects doesn't generate - # RFC5545-compliant strings, so we should modify that. - original_rule = dict(self._original_rule) - wday_strings = [] - for wday in original_rule['byweekday']: - if wday.n: - wday_strings.append('{n:+d}{wday}'.format( - n=wday.n, - wday=repr(wday)[0:2])) - else: - wday_strings.append(repr(wday)) - - original_rule['byweekday'] = wday_strings - else: - original_rule = self._original_rule - - partfmt = '{name}={vals}' - for name, key in [('BYSETPOS', 'bysetpos'), - ('BYMONTH', 'bymonth'), - ('BYMONTHDAY', 'bymonthday'), - ('BYYEARDAY', 'byyearday'), - ('BYWEEKNO', 'byweekno'), - ('BYDAY', 'byweekday'), - ('BYHOUR', 'byhour'), - ('BYMINUTE', 'byminute'), - ('BYSECOND', 'bysecond'), - ('BYEASTER', 'byeaster')]: - value = original_rule.get(key) - if value: - parts.append(partfmt.format(name=name, vals=(','.join(str(v) - for v in value)))) - - output.append('RRULE:' + ';'.join(parts)) - return '\n'.join(output) - - def replace(self, **kwargs): - """Return new rrule with same attributes except for those attributes given new - values by whichever keyword arguments are specified.""" - new_kwargs = {"interval": self._interval, - "count": self._count, - "dtstart": self._dtstart, - "freq": self._freq, - "until": self._until, - "wkst": self._wkst, - "cache": False if self._cache is None else True } - new_kwargs.update(self._original_rule) - new_kwargs.update(kwargs) - return rrule(**new_kwargs) - - def _iter(self): - year, month, day, hour, minute, second, weekday, yearday, _ = \ - self._dtstart.timetuple() - - # Some local variables to speed things up a bit - freq = self._freq - interval = self._interval - wkst = self._wkst - until = self._until - bymonth = self._bymonth - byweekno = self._byweekno - byyearday = self._byyearday - byweekday = self._byweekday - byeaster = self._byeaster - bymonthday = self._bymonthday - bynmonthday = self._bynmonthday - bysetpos = self._bysetpos - byhour = self._byhour - byminute = self._byminute - bysecond = self._bysecond - - ii = _iterinfo(self) - ii.rebuild(year, month) - - getdayset = {YEARLY: ii.ydayset, - MONTHLY: ii.mdayset, - WEEKLY: ii.wdayset, - DAILY: ii.ddayset, - HOURLY: ii.ddayset, - MINUTELY: ii.ddayset, - SECONDLY: ii.ddayset}[freq] - - if freq < HOURLY: - timeset = self._timeset - else: - gettimeset = {HOURLY: ii.htimeset, - MINUTELY: ii.mtimeset, - SECONDLY: ii.stimeset}[freq] - if ((freq >= HOURLY and - self._byhour and hour not in self._byhour) or - (freq >= MINUTELY and - self._byminute and minute not in self._byminute) or - (freq >= SECONDLY and - self._bysecond and second not in self._bysecond)): - timeset = () - else: - timeset = gettimeset(hour, minute, second) - - total = 0 - count = self._count - while True: - # Get dayset with the right frequency - dayset, start, end = getdayset(year, month, day) - - # Do the "hard" work ;-) - filtered = False - for i in dayset[start:end]: - if ((bymonth and ii.mmask[i] not in bymonth) or - (byweekno and not ii.wnomask[i]) or - (byweekday and ii.wdaymask[i] not in byweekday) or - (ii.nwdaymask and not ii.nwdaymask[i]) or - (byeaster and not ii.eastermask[i]) or - ((bymonthday or bynmonthday) and - ii.mdaymask[i] not in bymonthday and - ii.nmdaymask[i] not in bynmonthday) or - (byyearday and - ((i < ii.yearlen and i+1 not in byyearday and - -ii.yearlen+i not in byyearday) or - (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and - -ii.nextyearlen+i-ii.yearlen not in byyearday)))): - dayset[i] = None - filtered = True - - # Output results - if bysetpos and timeset: - poslist = [] - for pos in bysetpos: - if pos < 0: - daypos, timepos = divmod(pos, len(timeset)) - else: - daypos, timepos = divmod(pos-1, len(timeset)) - try: - i = [x for x in dayset[start:end] - if x is not None][daypos] - time = timeset[timepos] - except IndexError: - pass - else: - date = datetime.date.fromordinal(ii.yearordinal+i) - res = datetime.datetime.combine(date, time) - if res not in poslist: - poslist.append(res) - poslist.sort() - for res in poslist: - if until and res > until: - self._len = total - return - elif res >= self._dtstart: - if count is not None: - count -= 1 - if count < 0: - self._len = total - return - total += 1 - yield res - else: - for i in dayset[start:end]: - if i is not None: - date = datetime.date.fromordinal(ii.yearordinal + i) - for time in timeset: - res = datetime.datetime.combine(date, time) - if until and res > until: - self._len = total - return - elif res >= self._dtstart: - if count is not None: - count -= 1 - if count < 0: - self._len = total - return - - total += 1 - yield res - - # Handle frequency and interval - fixday = False - if freq == YEARLY: - year += interval - if year > datetime.MAXYEAR: - self._len = total - return - ii.rebuild(year, month) - elif freq == MONTHLY: - month += interval - if month > 12: - div, mod = divmod(month, 12) - month = mod - year += div - if month == 0: - month = 12 - year -= 1 - if year > datetime.MAXYEAR: - self._len = total - return - ii.rebuild(year, month) - elif freq == WEEKLY: - if wkst > weekday: - day += -(weekday+1+(6-wkst))+self._interval*7 - else: - day += -(weekday-wkst)+self._interval*7 - weekday = wkst - fixday = True - elif freq == DAILY: - day += interval - fixday = True - elif freq == HOURLY: - if filtered: - # Jump to one iteration before next day - hour += ((23-hour)//interval)*interval - - if byhour: - ndays, hour = self.__mod_distance(value=hour, - byxxx=self._byhour, - base=24) - else: - ndays, hour = divmod(hour+interval, 24) - - if ndays: - day += ndays - fixday = True - - timeset = gettimeset(hour, minute, second) - elif freq == MINUTELY: - if filtered: - # Jump to one iteration before next day - minute += ((1439-(hour*60+minute))//interval)*interval - - valid = False - rep_rate = (24*60) - for j in range(rep_rate // gcd(interval, rep_rate)): - if byminute: - nhours, minute = \ - self.__mod_distance(value=minute, - byxxx=self._byminute, - base=60) - else: - nhours, minute = divmod(minute+interval, 60) - - div, hour = divmod(hour+nhours, 24) - if div: - day += div - fixday = True - filtered = False - - if not byhour or hour in byhour: - valid = True - break - - if not valid: - raise ValueError('Invalid combination of interval and ' + - 'byhour resulting in empty rule.') - - timeset = gettimeset(hour, minute, second) - elif freq == SECONDLY: - if filtered: - # Jump to one iteration before next day - second += (((86399 - (hour * 3600 + minute * 60 + second)) - // interval) * interval) - - rep_rate = (24 * 3600) - valid = False - for j in range(0, rep_rate // gcd(interval, rep_rate)): - if bysecond: - nminutes, second = \ - self.__mod_distance(value=second, - byxxx=self._bysecond, - base=60) - else: - nminutes, second = divmod(second+interval, 60) - - div, minute = divmod(minute+nminutes, 60) - if div: - hour += div - div, hour = divmod(hour, 24) - if div: - day += div - fixday = True - - if ((not byhour or hour in byhour) and - (not byminute or minute in byminute) and - (not bysecond or second in bysecond)): - valid = True - break - - if not valid: - raise ValueError('Invalid combination of interval, ' + - 'byhour and byminute resulting in empty' + - ' rule.') - - timeset = gettimeset(hour, minute, second) - - if fixday and day > 28: - daysinmonth = calendar.monthrange(year, month)[1] - if day > daysinmonth: - while day > daysinmonth: - day -= daysinmonth - month += 1 - if month == 13: - month = 1 - year += 1 - if year > datetime.MAXYEAR: - self._len = total - return - daysinmonth = calendar.monthrange(year, month)[1] - ii.rebuild(year, month) - - def __construct_byset(self, start, byxxx, base): - """ - If a `BYXXX` sequence is passed to the constructor at the same level as - `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some - specifications which cannot be reached given some starting conditions. - - This occurs whenever the interval is not coprime with the base of a - given unit and the difference between the starting position and the - ending position is not coprime with the greatest common denominator - between the interval and the base. For example, with a FREQ of hourly - starting at 17:00 and an interval of 4, the only valid values for - BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not - coprime. - - :param start: - Specifies the starting position. - :param byxxx: - An iterable containing the list of allowed values. - :param base: - The largest allowable value for the specified frequency (e.g. - 24 hours, 60 minutes). - - This does not preserve the type of the iterable, returning a set, since - the values should be unique and the order is irrelevant, this will - speed up later lookups. - - In the event of an empty set, raises a :exception:`ValueError`, as this - results in an empty rrule. - """ - - cset = set() - - # Support a single byxxx value. - if isinstance(byxxx, integer_types): - byxxx = (byxxx, ) - - for num in byxxx: - i_gcd = gcd(self._interval, base) - # Use divmod rather than % because we need to wrap negative nums. - if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: - cset.add(num) - - if len(cset) == 0: - raise ValueError("Invalid rrule byxxx generates an empty set.") - - return cset - - def __mod_distance(self, value, byxxx, base): - """ - Calculates the next value in a sequence where the `FREQ` parameter is - specified along with a `BYXXX` parameter at the same "level" - (e.g. `HOURLY` specified with `BYHOUR`). - - :param value: - The old value of the component. - :param byxxx: - The `BYXXX` set, which should have been generated by - `rrule._construct_byset`, or something else which checks that a - valid rule is present. - :param base: - The largest allowable value for the specified frequency (e.g. - 24 hours, 60 minutes). - - If a valid value is not found after `base` iterations (the maximum - number before the sequence would start to repeat), this raises a - :exception:`ValueError`, as no valid values were found. - - This returns a tuple of `divmod(n*interval, base)`, where `n` is the - smallest number of `interval` repetitions until the next specified - value in `byxxx` is found. - """ - accumulator = 0 - for ii in range(1, base + 1): - # Using divmod() over % to account for negative intervals - div, value = divmod(value + self._interval, base) - accumulator += div - if value in byxxx: - return (accumulator, value) - - -class _iterinfo(object): - __slots__ = ["rrule", "lastyear", "lastmonth", - "yearlen", "nextyearlen", "yearordinal", "yearweekday", - "mmask", "mrange", "mdaymask", "nmdaymask", - "wdaymask", "wnomask", "nwdaymask", "eastermask"] - - def __init__(self, rrule): - for attr in self.__slots__: - setattr(self, attr, None) - self.rrule = rrule - - def rebuild(self, year, month): - # Every mask is 7 days longer to handle cross-year weekly periods. - rr = self.rrule - if year != self.lastyear: - self.yearlen = 365 + calendar.isleap(year) - self.nextyearlen = 365 + calendar.isleap(year + 1) - firstyday = datetime.date(year, 1, 1) - self.yearordinal = firstyday.toordinal() - self.yearweekday = firstyday.weekday() - - wday = datetime.date(year, 1, 1).weekday() - if self.yearlen == 365: - self.mmask = M365MASK - self.mdaymask = MDAY365MASK - self.nmdaymask = NMDAY365MASK - self.wdaymask = WDAYMASK[wday:] - self.mrange = M365RANGE - else: - self.mmask = M366MASK - self.mdaymask = MDAY366MASK - self.nmdaymask = NMDAY366MASK - self.wdaymask = WDAYMASK[wday:] - self.mrange = M366RANGE - - if not rr._byweekno: - self.wnomask = None - else: - self.wnomask = [0]*(self.yearlen+7) - # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) - no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 - if no1wkst >= 4: - no1wkst = 0 - # Number of days in the year, plus the days we got - # from last year. - wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 - else: - # Number of days in the year, minus the days we - # left in last year. - wyearlen = self.yearlen-no1wkst - div, mod = divmod(wyearlen, 7) - numweeks = div+mod//4 - for n in rr._byweekno: - if n < 0: - n += numweeks+1 - if not (0 < n <= numweeks): - continue - if n > 1: - i = no1wkst+(n-1)*7 - if no1wkst != firstwkst: - i -= 7-firstwkst - else: - i = no1wkst - for j in range(7): - self.wnomask[i] = 1 - i += 1 - if self.wdaymask[i] == rr._wkst: - break - if 1 in rr._byweekno: - # Check week number 1 of next year as well - # TODO: Check -numweeks for next year. - i = no1wkst+numweeks*7 - if no1wkst != firstwkst: - i -= 7-firstwkst - if i < self.yearlen: - # If week starts in next year, we - # don't care about it. - for j in range(7): - self.wnomask[i] = 1 - i += 1 - if self.wdaymask[i] == rr._wkst: - break - if no1wkst: - # Check last week number of last year as - # well. If no1wkst is 0, either the year - # started on week start, or week number 1 - # got days from last year, so there are no - # days from last year's last week number in - # this year. - if -1 not in rr._byweekno: - lyearweekday = datetime.date(year-1, 1, 1).weekday() - lno1wkst = (7-lyearweekday+rr._wkst) % 7 - lyearlen = 365+calendar.isleap(year-1) - if lno1wkst >= 4: - lno1wkst = 0 - lnumweeks = 52+(lyearlen + - (lyearweekday-rr._wkst) % 7) % 7//4 - else: - lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 - else: - lnumweeks = -1 - if lnumweeks in rr._byweekno: - for i in range(no1wkst): - self.wnomask[i] = 1 - - if (rr._bynweekday and (month != self.lastmonth or - year != self.lastyear)): - ranges = [] - if rr._freq == YEARLY: - if rr._bymonth: - for month in rr._bymonth: - ranges.append(self.mrange[month-1:month+1]) - else: - ranges = [(0, self.yearlen)] - elif rr._freq == MONTHLY: - ranges = [self.mrange[month-1:month+1]] - if ranges: - # Weekly frequency won't get here, so we may not - # care about cross-year weekly periods. - self.nwdaymask = [0]*self.yearlen - for first, last in ranges: - last -= 1 - for wday, n in rr._bynweekday: - if n < 0: - i = last+(n+1)*7 - i -= (self.wdaymask[i]-wday) % 7 - else: - i = first+(n-1)*7 - i += (7-self.wdaymask[i]+wday) % 7 - if first <= i <= last: - self.nwdaymask[i] = 1 - - if rr._byeaster: - self.eastermask = [0]*(self.yearlen+7) - eyday = easter.easter(year).toordinal()-self.yearordinal - for offset in rr._byeaster: - self.eastermask[eyday+offset] = 1 - - self.lastyear = year - self.lastmonth = month - - def ydayset(self, year, month, day): - return list(range(self.yearlen)), 0, self.yearlen - - def mdayset(self, year, month, day): - dset = [None]*self.yearlen - start, end = self.mrange[month-1:month+1] - for i in range(start, end): - dset[i] = i - return dset, start, end - - def wdayset(self, year, month, day): - # We need to handle cross-year weeks here. - dset = [None]*(self.yearlen+7) - i = datetime.date(year, month, day).toordinal()-self.yearordinal - start = i - for j in range(7): - dset[i] = i - i += 1 - # if (not (0 <= i < self.yearlen) or - # self.wdaymask[i] == self.rrule._wkst): - # This will cross the year boundary, if necessary. - if self.wdaymask[i] == self.rrule._wkst: - break - return dset, start, i - - def ddayset(self, year, month, day): - dset = [None] * self.yearlen - i = datetime.date(year, month, day).toordinal() - self.yearordinal - dset[i] = i - return dset, i, i + 1 - - def htimeset(self, hour, minute, second): - tset = [] - rr = self.rrule - for minute in rr._byminute: - for second in rr._bysecond: - tset.append(datetime.time(hour, minute, second, - tzinfo=rr._tzinfo)) - tset.sort() - return tset - - def mtimeset(self, hour, minute, second): - tset = [] - rr = self.rrule - for second in rr._bysecond: - tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) - tset.sort() - return tset - - def stimeset(self, hour, minute, second): - return (datetime.time(hour, minute, second, - tzinfo=self.rrule._tzinfo),) - - -class rruleset(rrulebase): - """ The rruleset type allows more complex recurrence setups, mixing - multiple rules, dates, exclusion rules, and exclusion dates. The type - constructor takes the following keyword arguments: - - :param cache: If True, caching of results will be enabled, improving - performance of multiple queries considerably. """ - - class _genitem(object): - def __init__(self, genlist, gen): - try: - self.dt = advance_iterator(gen) - genlist.append(self) - except StopIteration: - pass - self.genlist = genlist - self.gen = gen - - def __next__(self): - try: - self.dt = advance_iterator(self.gen) - except StopIteration: - if self.genlist[0] is self: - heapq.heappop(self.genlist) - else: - self.genlist.remove(self) - heapq.heapify(self.genlist) - - next = __next__ - - def __lt__(self, other): - return self.dt < other.dt - - def __gt__(self, other): - return self.dt > other.dt - - def __eq__(self, other): - return self.dt == other.dt - - def __ne__(self, other): - return self.dt != other.dt - - def __init__(self, cache=False): - super(rruleset, self).__init__(cache) - self._rrule = [] - self._rdate = [] - self._exrule = [] - self._exdate = [] - - @_invalidates_cache - def rrule(self, rrule): - """ Include the given :py:class:`rrule` instance in the recurrence set - generation. """ - self._rrule.append(rrule) - - @_invalidates_cache - def rdate(self, rdate): - """ Include the given :py:class:`datetime` instance in the recurrence - set generation. """ - self._rdate.append(rdate) - - @_invalidates_cache - def exrule(self, exrule): - """ Include the given rrule instance in the recurrence set exclusion - list. Dates which are part of the given recurrence rules will not - be generated, even if some inclusive rrule or rdate matches them. - """ - self._exrule.append(exrule) - - @_invalidates_cache - def exdate(self, exdate): - """ Include the given datetime instance in the recurrence set - exclusion list. Dates included that way will not be generated, - even if some inclusive rrule or rdate matches them. """ - self._exdate.append(exdate) - - def _iter(self): - rlist = [] - self._rdate.sort() - self._genitem(rlist, iter(self._rdate)) - for gen in [iter(x) for x in self._rrule]: - self._genitem(rlist, gen) - exlist = [] - self._exdate.sort() - self._genitem(exlist, iter(self._exdate)) - for gen in [iter(x) for x in self._exrule]: - self._genitem(exlist, gen) - lastdt = None - total = 0 - heapq.heapify(rlist) - heapq.heapify(exlist) - while rlist: - ritem = rlist[0] - if not lastdt or lastdt != ritem.dt: - while exlist and exlist[0] < ritem: - exitem = exlist[0] - advance_iterator(exitem) - if exlist and exlist[0] is exitem: - heapq.heapreplace(exlist, exitem) - if not exlist or ritem != exlist[0]: - total += 1 - yield ritem.dt - lastdt = ritem.dt - advance_iterator(ritem) - if rlist and rlist[0] is ritem: - heapq.heapreplace(rlist, ritem) - self._len = total - - -class _rrulestr(object): - - _freq_map = {"YEARLY": YEARLY, - "MONTHLY": MONTHLY, - "WEEKLY": WEEKLY, - "DAILY": DAILY, - "HOURLY": HOURLY, - "MINUTELY": MINUTELY, - "SECONDLY": SECONDLY} - - _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, - "FR": 4, "SA": 5, "SU": 6} - - def _handle_int(self, rrkwargs, name, value, **kwargs): - rrkwargs[name.lower()] = int(value) - - def _handle_int_list(self, rrkwargs, name, value, **kwargs): - rrkwargs[name.lower()] = [int(x) for x in value.split(',')] - - _handle_INTERVAL = _handle_int - _handle_COUNT = _handle_int - _handle_BYSETPOS = _handle_int_list - _handle_BYMONTH = _handle_int_list - _handle_BYMONTHDAY = _handle_int_list - _handle_BYYEARDAY = _handle_int_list - _handle_BYEASTER = _handle_int_list - _handle_BYWEEKNO = _handle_int_list - _handle_BYHOUR = _handle_int_list - _handle_BYMINUTE = _handle_int_list - _handle_BYSECOND = _handle_int_list - - def _handle_FREQ(self, rrkwargs, name, value, **kwargs): - rrkwargs["freq"] = self._freq_map[value] - - def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): - global parser - if not parser: - from dateutil import parser - try: - rrkwargs["until"] = parser.parse(value, - ignoretz=kwargs.get("ignoretz"), - tzinfos=kwargs.get("tzinfos")) - except ValueError: - raise ValueError("invalid until date") - - def _handle_WKST(self, rrkwargs, name, value, **kwargs): - rrkwargs["wkst"] = self._weekday_map[value] - - def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs): - """ - Two ways to specify this: +1MO or MO(+1) - """ - l = [] - for wday in value.split(','): - if '(' in wday: - # If it's of the form TH(+1), etc. - splt = wday.split('(') - w = splt[0] - n = int(splt[1][:-1]) - elif len(wday): - # If it's of the form +1MO - for i in range(len(wday)): - if wday[i] not in '+-0123456789': - break - n = wday[:i] or None - w = wday[i:] - if n: - n = int(n) - else: - raise ValueError("Invalid (empty) BYDAY specification.") - - l.append(weekdays[self._weekday_map[w]](n)) - rrkwargs["byweekday"] = l - - _handle_BYDAY = _handle_BYWEEKDAY - - def _parse_rfc_rrule(self, line, - dtstart=None, - cache=False, - ignoretz=False, - tzinfos=None): - if line.find(':') != -1: - name, value = line.split(':') - if name != "RRULE": - raise ValueError("unknown parameter name") - else: - value = line - rrkwargs = {} - for pair in value.split(';'): - name, value = pair.split('=') - name = name.upper() - value = value.upper() - try: - getattr(self, "_handle_"+name)(rrkwargs, name, value, - ignoretz=ignoretz, - tzinfos=tzinfos) - except AttributeError: - raise ValueError("unknown parameter '%s'" % name) - except (KeyError, ValueError): - raise ValueError("invalid '%s': %s" % (name, value)) - return rrule(dtstart=dtstart, cache=cache, **rrkwargs) - - def _parse_rfc(self, s, - dtstart=None, - cache=False, - unfold=False, - forceset=False, - compatible=False, - ignoretz=False, - tzids=None, - tzinfos=None): - global parser - if compatible: - forceset = True - unfold = True - - TZID_NAMES = dict(map( - lambda x: (x.upper(), x), - re.findall('TZID=(?P[^:]+):', s) - )) - s = s.upper() - if not s.strip(): - raise ValueError("empty string") - if unfold: - lines = s.splitlines() - i = 0 - while i < len(lines): - line = lines[i].rstrip() - if not line: - del lines[i] - elif i > 0 and line[0] == " ": - lines[i-1] += line[1:] - del lines[i] - else: - i += 1 - else: - lines = s.split() - if (not forceset and len(lines) == 1 and (s.find(':') == -1 or - s.startswith('RRULE:'))): - return self._parse_rfc_rrule(lines[0], cache=cache, - dtstart=dtstart, ignoretz=ignoretz, - tzinfos=tzinfos) - else: - rrulevals = [] - rdatevals = [] - exrulevals = [] - exdatevals = [] - for line in lines: - if not line: - continue - if line.find(':') == -1: - name = "RRULE" - value = line - else: - name, value = line.split(':', 1) - parms = name.split(';') - if not parms: - raise ValueError("empty property name") - name = parms[0] - parms = parms[1:] - if name == "RRULE": - for parm in parms: - raise ValueError("unsupported RRULE parm: "+parm) - rrulevals.append(value) - elif name == "RDATE": - for parm in parms: - if parm != "VALUE=DATE-TIME": - raise ValueError("unsupported RDATE parm: "+parm) - rdatevals.append(value) - elif name == "EXRULE": - for parm in parms: - raise ValueError("unsupported EXRULE parm: "+parm) - exrulevals.append(value) - elif name == "EXDATE": - for parm in parms: - if parm != "VALUE=DATE-TIME": - raise ValueError("unsupported EXDATE parm: "+parm) - exdatevals.append(value) - elif name == "DTSTART": - # RFC 5445 3.8.2.4: The VALUE parameter is optional, but - # may be found only once. - value_found = False - TZID = None - valid_values = {"VALUE=DATE-TIME", "VALUE=DATE"} - for parm in parms: - if parm.startswith("TZID="): - try: - tzkey = TZID_NAMES[parm.split('TZID=')[-1]] - except KeyError: - continue - if tzids is None: - from . import tz - tzlookup = tz.gettz - elif callable(tzids): - tzlookup = tzids - else: - tzlookup = getattr(tzids, 'get', None) - if tzlookup is None: - msg = ('tzids must be a callable, ' + - 'mapping, or None, ' + - 'not %s' % tzids) - raise ValueError(msg) - - TZID = tzlookup(tzkey) - continue - if parm not in valid_values: - raise ValueError("unsupported DTSTART parm: "+parm) - else: - if value_found: - msg = ("Duplicate value parameter found in " + - "DTSTART: " + parm) - raise ValueError(msg) - value_found = True - if not parser: - from dateutil import parser - dtstart = parser.parse(value, ignoretz=ignoretz, - tzinfos=tzinfos) - if TZID is not None: - if dtstart.tzinfo is None: - dtstart = dtstart.replace(tzinfo=TZID) - else: - raise ValueError('DTSTART specifies multiple timezones') - else: - raise ValueError("unsupported property: "+name) - if (forceset or len(rrulevals) > 1 or rdatevals - or exrulevals or exdatevals): - if not parser and (rdatevals or exdatevals): - from dateutil import parser - rset = rruleset(cache=cache) - for value in rrulevals: - rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, - ignoretz=ignoretz, - tzinfos=tzinfos)) - for value in rdatevals: - for datestr in value.split(','): - rset.rdate(parser.parse(datestr, - ignoretz=ignoretz, - tzinfos=tzinfos)) - for value in exrulevals: - rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, - ignoretz=ignoretz, - tzinfos=tzinfos)) - for value in exdatevals: - for datestr in value.split(','): - rset.exdate(parser.parse(datestr, - ignoretz=ignoretz, - tzinfos=tzinfos)) - if compatible and dtstart: - rset.rdate(dtstart) - return rset - else: - return self._parse_rfc_rrule(rrulevals[0], - dtstart=dtstart, - cache=cache, - ignoretz=ignoretz, - tzinfos=tzinfos) - - def __call__(self, s, **kwargs): - return self._parse_rfc(s, **kwargs) - - -rrulestr = _rrulestr() - -# vim:ts=4:sw=4:et diff --git a/pype/vendor/dateutil/tz/__init__.py b/pype/vendor/dateutil/tz/__init__.py deleted file mode 100644 index 5a2d9cd6e1..0000000000 --- a/pype/vendor/dateutil/tz/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -from .tz import * -from .tz import __doc__ - -#: Convenience constant providing a :class:`tzutc()` instance -#: -#: .. versionadded:: 2.7.0 -UTC = tzutc() - -__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", - "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz", - "enfold", "datetime_ambiguous", "datetime_exists", - "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"] - - -class DeprecatedTzFormatWarning(Warning): - """Warning raised when time zones are parsed from deprecated formats.""" diff --git a/pype/vendor/dateutil/tz/_common.py b/pype/vendor/dateutil/tz/_common.py deleted file mode 100644 index ccabb7da63..0000000000 --- a/pype/vendor/dateutil/tz/_common.py +++ /dev/null @@ -1,415 +0,0 @@ -from six import PY3 - -from functools import wraps - -from datetime import datetime, timedelta, tzinfo - - -ZERO = timedelta(0) - -__all__ = ['tzname_in_python2', 'enfold'] - - -def tzname_in_python2(namefunc): - """Change unicode output into bytestrings in Python 2 - - tzname() API changed in Python 3. It used to return bytes, but was changed - to unicode strings - """ - def adjust_encoding(*args, **kwargs): - name = namefunc(*args, **kwargs) - if name is not None and not PY3: - name = name.encode() - - return name - - return adjust_encoding - - -# The following is adapted from Alexander Belopolsky's tz library -# https://github.com/abalkin/tz -if hasattr(datetime, 'fold'): - # This is the pre-python 3.6 fold situation - def enfold(dt, fold=1): - """ - Provides a unified interface for assigning the ``fold`` attribute to - datetimes both before and after the implementation of PEP-495. - - :param fold: - The value for the ``fold`` attribute in the returned datetime. This - should be either 0 or 1. - - :return: - Returns an object for which ``getattr(dt, 'fold', 0)`` returns - ``fold`` for all versions of Python. In versions prior to - Python 3.6, this is a ``_DatetimeWithFold`` object, which is a - subclass of :py:class:`datetime.datetime` with the ``fold`` - attribute added, if ``fold`` is 1. - - .. versionadded:: 2.6.0 - """ - return dt.replace(fold=fold) - -else: - class _DatetimeWithFold(datetime): - """ - This is a class designed to provide a PEP 495-compliant interface for - Python versions before 3.6. It is used only for dates in a fold, so - the ``fold`` attribute is fixed at ``1``. - - .. versionadded:: 2.6.0 - """ - __slots__ = () - - def replace(self, *args, **kwargs): - """ - Return a datetime with the same attributes, except for those - attributes given new values by whichever keyword arguments are - specified. Note that tzinfo=None can be specified to create a naive - datetime from an aware datetime with no conversion of date and time - data. - - This is reimplemented in ``_DatetimeWithFold`` because pypy3 will - return a ``datetime.datetime`` even if ``fold`` is unchanged. - """ - argnames = ( - 'year', 'month', 'day', 'hour', 'minute', 'second', - 'microsecond', 'tzinfo' - ) - - for arg, argname in zip(args, argnames): - if argname in kwargs: - raise TypeError('Duplicate argument: {}'.format(argname)) - - kwargs[argname] = arg - - for argname in argnames: - if argname not in kwargs: - kwargs[argname] = getattr(self, argname) - - dt_class = self.__class__ if kwargs.get('fold', 1) else datetime - - return dt_class(**kwargs) - - @property - def fold(self): - return 1 - - def enfold(dt, fold=1): - """ - Provides a unified interface for assigning the ``fold`` attribute to - datetimes both before and after the implementation of PEP-495. - - :param fold: - The value for the ``fold`` attribute in the returned datetime. This - should be either 0 or 1. - - :return: - Returns an object for which ``getattr(dt, 'fold', 0)`` returns - ``fold`` for all versions of Python. In versions prior to - Python 3.6, this is a ``_DatetimeWithFold`` object, which is a - subclass of :py:class:`datetime.datetime` with the ``fold`` - attribute added, if ``fold`` is 1. - - .. versionadded:: 2.6.0 - """ - if getattr(dt, 'fold', 0) == fold: - return dt - - args = dt.timetuple()[:6] - args += (dt.microsecond, dt.tzinfo) - - if fold: - return _DatetimeWithFold(*args) - else: - return datetime(*args) - - -def _validate_fromutc_inputs(f): - """ - The CPython version of ``fromutc`` checks that the input is a ``datetime`` - object and that ``self`` is attached as its ``tzinfo``. - """ - @wraps(f) - def fromutc(self, dt): - if not isinstance(dt, datetime): - raise TypeError("fromutc() requires a datetime argument") - if dt.tzinfo is not self: - raise ValueError("dt.tzinfo is not self") - - return f(self, dt) - - return fromutc - - -class _tzinfo(tzinfo): - """ - Base class for all ``dateutil`` ``tzinfo`` objects. - """ - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - - dt = dt.replace(tzinfo=self) - - wall_0 = enfold(dt, fold=0) - wall_1 = enfold(dt, fold=1) - - same_offset = wall_0.utcoffset() == wall_1.utcoffset() - same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None) - - return same_dt and not same_offset - - def _fold_status(self, dt_utc, dt_wall): - """ - Determine the fold status of a "wall" datetime, given a representation - of the same datetime as a (naive) UTC datetime. This is calculated based - on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all - datetimes, and that this offset is the actual number of hours separating - ``dt_utc`` and ``dt_wall``. - - :param dt_utc: - Representation of the datetime as UTC - - :param dt_wall: - Representation of the datetime as "wall time". This parameter must - either have a `fold` attribute or have a fold-naive - :class:`datetime.tzinfo` attached, otherwise the calculation may - fail. - """ - if self.is_ambiguous(dt_wall): - delta_wall = dt_wall - dt_utc - _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst())) - else: - _fold = 0 - - return _fold - - def _fold(self, dt): - return getattr(dt, 'fold', 0) - - def _fromutc(self, dt): - """ - Given a timezone-aware datetime in a given timezone, calculates a - timezone-aware datetime in a new timezone. - - Since this is the one time that we *know* we have an unambiguous - datetime object, we take this opportunity to determine whether the - datetime is ambiguous and in a "fold" state (e.g. if it's the first - occurence, chronologically, of the ambiguous datetime). - - :param dt: - A timezone-aware :class:`datetime.datetime` object. - """ - - # Re-implement the algorithm from Python's datetime.py - dtoff = dt.utcoffset() - if dtoff is None: - raise ValueError("fromutc() requires a non-None utcoffset() " - "result") - - # The original datetime.py code assumes that `dst()` defaults to - # zero during ambiguous times. PEP 495 inverts this presumption, so - # for pre-PEP 495 versions of python, we need to tweak the algorithm. - dtdst = dt.dst() - if dtdst is None: - raise ValueError("fromutc() requires a non-None dst() result") - delta = dtoff - dtdst - - dt += delta - # Set fold=1 so we can default to being in the fold for - # ambiguous dates. - dtdst = enfold(dt, fold=1).dst() - if dtdst is None: - raise ValueError("fromutc(): dt.dst gave inconsistent " - "results; cannot convert") - return dt + dtdst - - @_validate_fromutc_inputs - def fromutc(self, dt): - """ - Given a timezone-aware datetime in a given timezone, calculates a - timezone-aware datetime in a new timezone. - - Since this is the one time that we *know* we have an unambiguous - datetime object, we take this opportunity to determine whether the - datetime is ambiguous and in a "fold" state (e.g. if it's the first - occurance, chronologically, of the ambiguous datetime). - - :param dt: - A timezone-aware :class:`datetime.datetime` object. - """ - dt_wall = self._fromutc(dt) - - # Calculate the fold status given the two datetimes. - _fold = self._fold_status(dt, dt_wall) - - # Set the default fold value for ambiguous dates - return enfold(dt_wall, fold=_fold) - - -class tzrangebase(_tzinfo): - """ - This is an abstract base class for time zones represented by an annual - transition into and out of DST. Child classes should implement the following - methods: - - * ``__init__(self, *args, **kwargs)`` - * ``transitions(self, year)`` - this is expected to return a tuple of - datetimes representing the DST on and off transitions in standard - time. - - A fully initialized ``tzrangebase`` subclass should also provide the - following attributes: - * ``hasdst``: Boolean whether or not the zone uses DST. - * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects - representing the respective UTC offsets. - * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short - abbreviations in DST and STD, respectively. - * ``_hasdst``: Whether or not the zone has DST. - - .. versionadded:: 2.6.0 - """ - def __init__(self): - raise NotImplementedError('tzrangebase is an abstract base class') - - def utcoffset(self, dt): - isdst = self._isdst(dt) - - if isdst is None: - return None - elif isdst: - return self._dst_offset - else: - return self._std_offset - - def dst(self, dt): - isdst = self._isdst(dt) - - if isdst is None: - return None - elif isdst: - return self._dst_base_offset - else: - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - if self._isdst(dt): - return self._dst_abbr - else: - return self._std_abbr - - def fromutc(self, dt): - """ Given a datetime in UTC, return local time """ - if not isinstance(dt, datetime): - raise TypeError("fromutc() requires a datetime argument") - - if dt.tzinfo is not self: - raise ValueError("dt.tzinfo is not self") - - # Get transitions - if there are none, fixed offset - transitions = self.transitions(dt.year) - if transitions is None: - return dt + self.utcoffset(dt) - - # Get the transition times in UTC - dston, dstoff = transitions - - dston -= self._std_offset - dstoff -= self._std_offset - - utc_transitions = (dston, dstoff) - dt_utc = dt.replace(tzinfo=None) - - isdst = self._naive_isdst(dt_utc, utc_transitions) - - if isdst: - dt_wall = dt + self._dst_offset - else: - dt_wall = dt + self._std_offset - - _fold = int(not isdst and self.is_ambiguous(dt_wall)) - - return enfold(dt_wall, fold=_fold) - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - if not self.hasdst: - return False - - start, end = self.transitions(dt.year) - - dt = dt.replace(tzinfo=None) - return (end <= dt < end + self._dst_base_offset) - - def _isdst(self, dt): - if not self.hasdst: - return False - elif dt is None: - return None - - transitions = self.transitions(dt.year) - - if transitions is None: - return False - - dt = dt.replace(tzinfo=None) - - isdst = self._naive_isdst(dt, transitions) - - # Handle ambiguous dates - if not isdst and self.is_ambiguous(dt): - return not self._fold(dt) - else: - return isdst - - def _naive_isdst(self, dt, transitions): - dston, dstoff = transitions - - dt = dt.replace(tzinfo=None) - - if dston < dstoff: - isdst = dston <= dt < dstoff - else: - isdst = not dstoff <= dt < dston - - return isdst - - @property - def _dst_base_offset(self): - return self._dst_offset - self._std_offset - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s(...)" % self.__class__.__name__ - - __reduce__ = object.__reduce__ diff --git a/pype/vendor/dateutil/tz/_factories.py b/pype/vendor/dateutil/tz/_factories.py deleted file mode 100644 index de2e0c1de7..0000000000 --- a/pype/vendor/dateutil/tz/_factories.py +++ /dev/null @@ -1,49 +0,0 @@ -from datetime import timedelta - - -class _TzSingleton(type): - def __init__(cls, *args, **kwargs): - cls.__instance = None - super(_TzSingleton, cls).__init__(*args, **kwargs) - - def __call__(cls): - if cls.__instance is None: - cls.__instance = super(_TzSingleton, cls).__call__() - return cls.__instance - -class _TzFactory(type): - def instance(cls, *args, **kwargs): - """Alternate constructor that returns a fresh instance""" - return type.__call__(cls, *args, **kwargs) - - -class _TzOffsetFactory(_TzFactory): - def __init__(cls, *args, **kwargs): - cls.__instances = {} - - def __call__(cls, name, offset): - if isinstance(offset, timedelta): - key = (name, offset.total_seconds()) - else: - key = (name, offset) - - instance = cls.__instances.get(key, None) - if instance is None: - instance = cls.__instances.setdefault(key, - cls.instance(name, offset)) - return instance - - -class _TzStrFactory(_TzFactory): - def __init__(cls, *args, **kwargs): - cls.__instances = {} - - def __call__(cls, s, posix_offset=False): - key = (s, posix_offset) - instance = cls.__instances.get(key, None) - - if instance is None: - instance = cls.__instances.setdefault(key, - cls.instance(s, posix_offset)) - return instance - diff --git a/pype/vendor/dateutil/tz/tz.py b/pype/vendor/dateutil/tz/tz.py deleted file mode 100644 index ac82b9c83d..0000000000 --- a/pype/vendor/dateutil/tz/tz.py +++ /dev/null @@ -1,1785 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers timezone implementations subclassing the abstract -:py:class:`datetime.tzinfo` type. There are classes to handle tzfile format -files (usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, -etc), TZ environment string (in all known formats), given ranges (with help -from relative deltas), local machine timezone, fixed offset timezone, and UTC -timezone. -""" -import datetime -import struct -import time -import sys -import os -import bisect - -import six -from six import string_types -from six.moves import _thread -from ._common import tzname_in_python2, _tzinfo -from ._common import tzrangebase, enfold -from ._common import _validate_fromutc_inputs - -from ._factories import _TzSingleton, _TzOffsetFactory -from ._factories import _TzStrFactory -try: - from .win import tzwin, tzwinlocal -except ImportError: - tzwin = tzwinlocal = None - -ZERO = datetime.timedelta(0) -EPOCH = datetime.datetime.utcfromtimestamp(0) -EPOCHORDINAL = EPOCH.toordinal() - - -@six.add_metaclass(_TzSingleton) -class tzutc(datetime.tzinfo): - """ - This is a tzinfo object that represents the UTC time zone. - - **Examples:** - - .. doctest:: - - >>> from datetime import * - >>> from dateutil.tz import * - - >>> datetime.now() - datetime.datetime(2003, 9, 27, 9, 40, 1, 521290) - - >>> datetime.now(tzutc()) - datetime.datetime(2003, 9, 27, 12, 40, 12, 156379, tzinfo=tzutc()) - - >>> datetime.now(tzutc()).tzname() - 'UTC' - - .. versionchanged:: 2.7.0 - ``tzutc()`` is now a singleton, so the result of ``tzutc()`` will - always return the same object. - - .. doctest:: - - >>> from dateutil.tz import tzutc, UTC - >>> tzutc() is tzutc() - True - >>> tzutc() is UTC - True - """ - def utcoffset(self, dt): - return ZERO - - def dst(self, dt): - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return "UTC" - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - return False - - @_validate_fromutc_inputs - def fromutc(self, dt): - """ - Fast track version of fromutc() returns the original ``dt`` object for - any valid :py:class:`datetime.datetime` object. - """ - return dt - - def __eq__(self, other): - if not isinstance(other, (tzutc, tzoffset)): - return NotImplemented - - return (isinstance(other, tzutc) or - (isinstance(other, tzoffset) and other._offset == ZERO)) - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s()" % self.__class__.__name__ - - __reduce__ = object.__reduce__ - - -@six.add_metaclass(_TzOffsetFactory) -class tzoffset(datetime.tzinfo): - """ - A simple class for representing a fixed offset from UTC. - - :param name: - The timezone name, to be returned when ``tzname()`` is called. - :param offset: - The time zone offset in seconds, or (since version 2.6.0, represented - as a :py:class:`datetime.timedelta` object). - """ - def __init__(self, name, offset): - self._name = name - - try: - # Allow a timedelta - offset = offset.total_seconds() - except (TypeError, AttributeError): - pass - self._offset = datetime.timedelta(seconds=offset) - - def utcoffset(self, dt): - return self._offset - - def dst(self, dt): - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return self._name - - @_validate_fromutc_inputs - def fromutc(self, dt): - return dt + self._offset - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - return False - - def __eq__(self, other): - if not isinstance(other, tzoffset): - return NotImplemented - - return self._offset == other._offset - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s(%s, %s)" % (self.__class__.__name__, - repr(self._name), - int(self._offset.total_seconds())) - - __reduce__ = object.__reduce__ - - -class tzlocal(_tzinfo): - """ - A :class:`tzinfo` subclass built around the ``time`` timezone functions. - """ - def __init__(self): - super(tzlocal, self).__init__() - - self._std_offset = datetime.timedelta(seconds=-time.timezone) - if time.daylight: - self._dst_offset = datetime.timedelta(seconds=-time.altzone) - else: - self._dst_offset = self._std_offset - - self._dst_saved = self._dst_offset - self._std_offset - self._hasdst = bool(self._dst_saved) - self._tznames = tuple(time.tzname) - - def utcoffset(self, dt): - if dt is None and self._hasdst: - return None - - if self._isdst(dt): - return self._dst_offset - else: - return self._std_offset - - def dst(self, dt): - if dt is None and self._hasdst: - return None - - if self._isdst(dt): - return self._dst_offset - self._std_offset - else: - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return self._tznames[self._isdst(dt)] - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - naive_dst = self._naive_is_dst(dt) - return (not naive_dst and - (naive_dst != self._naive_is_dst(dt - self._dst_saved))) - - def _naive_is_dst(self, dt): - timestamp = _datetime_to_timestamp(dt) - return time.localtime(timestamp + time.timezone).tm_isdst - - def _isdst(self, dt, fold_naive=True): - # We can't use mktime here. It is unstable when deciding if - # the hour near to a change is DST or not. - # - # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, - # dt.minute, dt.second, dt.weekday(), 0, -1)) - # return time.localtime(timestamp).tm_isdst - # - # The code above yields the following result: - # - # >>> import tz, datetime - # >>> t = tz.tzlocal() - # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - # 'BRDT' - # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() - # 'BRST' - # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - # 'BRST' - # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() - # 'BRDT' - # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - # 'BRDT' - # - # Here is a more stable implementation: - # - if not self._hasdst: - return False - - # Check for ambiguous times: - dstval = self._naive_is_dst(dt) - fold = getattr(dt, 'fold', None) - - if self.is_ambiguous(dt): - if fold is not None: - return not self._fold(dt) - else: - return True - - return dstval - - def __eq__(self, other): - if isinstance(other, tzlocal): - return (self._std_offset == other._std_offset and - self._dst_offset == other._dst_offset) - elif isinstance(other, tzutc): - return (not self._hasdst and - self._tznames[0] in {'UTC', 'GMT'} and - self._std_offset == ZERO) - elif isinstance(other, tzoffset): - return (not self._hasdst and - self._tznames[0] == other._name and - self._std_offset == other._offset) - else: - return NotImplemented - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s()" % self.__class__.__name__ - - __reduce__ = object.__reduce__ - - -class _ttinfo(object): - __slots__ = ["offset", "delta", "isdst", "abbr", - "isstd", "isgmt", "dstoffset"] - - def __init__(self): - for attr in self.__slots__: - setattr(self, attr, None) - - def __repr__(self): - l = [] - for attr in self.__slots__: - value = getattr(self, attr) - if value is not None: - l.append("%s=%s" % (attr, repr(value))) - return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) - - def __eq__(self, other): - if not isinstance(other, _ttinfo): - return NotImplemented - - return (self.offset == other.offset and - self.delta == other.delta and - self.isdst == other.isdst and - self.abbr == other.abbr and - self.isstd == other.isstd and - self.isgmt == other.isgmt and - self.dstoffset == other.dstoffset) - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __getstate__(self): - state = {} - for name in self.__slots__: - state[name] = getattr(self, name, None) - return state - - def __setstate__(self, state): - for name in self.__slots__: - if name in state: - setattr(self, name, state[name]) - - -class _tzfile(object): - """ - Lightweight class for holding the relevant transition and time zone - information read from binary tzfiles. - """ - attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list', - 'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first'] - - def __init__(self, **kwargs): - for attr in self.attrs: - setattr(self, attr, kwargs.get(attr, None)) - - -class tzfile(_tzinfo): - """ - This is a ``tzinfo`` subclass thant allows one to use the ``tzfile(5)`` - format timezone files to extract current and historical zone information. - - :param fileobj: - This can be an opened file stream or a file name that the time zone - information can be read from. - - :param filename: - This is an optional parameter specifying the source of the time zone - information in the event that ``fileobj`` is a file object. If omitted - and ``fileobj`` is a file stream, this parameter will be set either to - ``fileobj``'s ``name`` attribute or to ``repr(fileobj)``. - - See `Sources for Time Zone and Daylight Saving Time Data - `_ for more information. - Time zone files can be compiled from the `IANA Time Zone database files - `_ with the `zic time zone compiler - `_ - - .. note:: - - Only construct a ``tzfile`` directly if you have a specific timezone - file on disk that you want to read into a Python ``tzinfo`` object. - If you want to get a ``tzfile`` representing a specific IANA zone, - (e.g. ``'America/New_York'``), you should call - :func:`dateutil.tz.gettz` with the zone identifier. - - - **Examples:** - - Using the US Eastern time zone as an example, we can see that a ``tzfile`` - provides time zone information for the standard Daylight Saving offsets: - - .. testsetup:: tzfile - - from dateutil.tz import gettz - from datetime import datetime - - .. doctest:: tzfile - - >>> NYC = gettz('America/New_York') - >>> NYC - tzfile('/usr/share/zoneinfo/America/New_York') - - >>> print(datetime(2016, 1, 3, tzinfo=NYC)) # EST - 2016-01-03 00:00:00-05:00 - - >>> print(datetime(2016, 7, 7, tzinfo=NYC)) # EDT - 2016-07-07 00:00:00-04:00 - - - The ``tzfile`` structure contains a fully history of the time zone, - so historical dates will also have the right offsets. For example, before - the adoption of the UTC standards, New York used local solar mean time: - - .. doctest:: tzfile - - >>> print(datetime(1901, 4, 12, tzinfo=NYC)) # LMT - 1901-04-12 00:00:00-04:56 - - And during World War II, New York was on "Eastern War Time", which was a - state of permanent daylight saving time: - - .. doctest:: tzfile - - >>> print(datetime(1944, 2, 7, tzinfo=NYC)) # EWT - 1944-02-07 00:00:00-04:00 - - """ - - def __init__(self, fileobj, filename=None): - super(tzfile, self).__init__() - - file_opened_here = False - if isinstance(fileobj, string_types): - self._filename = fileobj - fileobj = open(fileobj, 'rb') - file_opened_here = True - elif filename is not None: - self._filename = filename - elif hasattr(fileobj, "name"): - self._filename = fileobj.name - else: - self._filename = repr(fileobj) - - if fileobj is not None: - if not file_opened_here: - fileobj = _ContextWrapper(fileobj) - - with fileobj as file_stream: - tzobj = self._read_tzfile(file_stream) - - self._set_tzdata(tzobj) - - def _set_tzdata(self, tzobj): - """ Set the time zone data of this object from a _tzfile object """ - # Copy the relevant attributes over as private attributes - for attr in _tzfile.attrs: - setattr(self, '_' + attr, getattr(tzobj, attr)) - - def _read_tzfile(self, fileobj): - out = _tzfile() - - # From tzfile(5): - # - # The time zone information files used by tzset(3) - # begin with the magic characters "TZif" to identify - # them as time zone information files, followed by - # sixteen bytes reserved for future use, followed by - # six four-byte values of type long, written in a - # ``standard'' byte order (the high-order byte - # of the value is written first). - if fileobj.read(4).decode() != "TZif": - raise ValueError("magic not found") - - fileobj.read(16) - - ( - # The number of UTC/local indicators stored in the file. - ttisgmtcnt, - - # The number of standard/wall indicators stored in the file. - ttisstdcnt, - - # The number of leap seconds for which data is - # stored in the file. - leapcnt, - - # The number of "transition times" for which data - # is stored in the file. - timecnt, - - # The number of "local time types" for which data - # is stored in the file (must not be zero). - typecnt, - - # The number of characters of "time zone - # abbreviation strings" stored in the file. - charcnt, - - ) = struct.unpack(">6l", fileobj.read(24)) - - # The above header is followed by tzh_timecnt four-byte - # values of type long, sorted in ascending order. - # These values are written in ``standard'' byte order. - # Each is used as a transition time (as returned by - # time(2)) at which the rules for computing local time - # change. - - if timecnt: - out.trans_list_utc = list(struct.unpack(">%dl" % timecnt, - fileobj.read(timecnt*4))) - else: - out.trans_list_utc = [] - - # Next come tzh_timecnt one-byte values of type unsigned - # char; each one tells which of the different types of - # ``local time'' types described in the file is associated - # with the same-indexed transition time. These values - # serve as indices into an array of ttinfo structures that - # appears next in the file. - - if timecnt: - out.trans_idx = struct.unpack(">%dB" % timecnt, - fileobj.read(timecnt)) - else: - out.trans_idx = [] - - # Each ttinfo structure is written as a four-byte value - # for tt_gmtoff of type long, in a standard byte - # order, followed by a one-byte value for tt_isdst - # and a one-byte value for tt_abbrind. In each - # structure, tt_gmtoff gives the number of - # seconds to be added to UTC, tt_isdst tells whether - # tm_isdst should be set by localtime(3), and - # tt_abbrind serves as an index into the array of - # time zone abbreviation characters that follow the - # ttinfo structure(s) in the file. - - ttinfo = [] - - for i in range(typecnt): - ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) - - abbr = fileobj.read(charcnt).decode() - - # Then there are tzh_leapcnt pairs of four-byte - # values, written in standard byte order; the - # first value of each pair gives the time (as - # returned by time(2)) at which a leap second - # occurs; the second gives the total number of - # leap seconds to be applied after the given time. - # The pairs of values are sorted in ascending order - # by time. - - # Not used, for now (but seek for correct file position) - if leapcnt: - fileobj.seek(leapcnt * 8, os.SEEK_CUR) - - # Then there are tzh_ttisstdcnt standard/wall - # indicators, each stored as a one-byte value; - # they tell whether the transition times associated - # with local time types were specified as standard - # time or wall clock time, and are used when - # a time zone file is used in handling POSIX-style - # time zone environment variables. - - if ttisstdcnt: - isstd = struct.unpack(">%db" % ttisstdcnt, - fileobj.read(ttisstdcnt)) - - # Finally, there are tzh_ttisgmtcnt UTC/local - # indicators, each stored as a one-byte value; - # they tell whether the transition times associated - # with local time types were specified as UTC or - # local time, and are used when a time zone file - # is used in handling POSIX-style time zone envi- - # ronment variables. - - if ttisgmtcnt: - isgmt = struct.unpack(">%db" % ttisgmtcnt, - fileobj.read(ttisgmtcnt)) - - # Build ttinfo list - out.ttinfo_list = [] - for i in range(typecnt): - gmtoff, isdst, abbrind = ttinfo[i] - # Round to full-minutes if that's not the case. Python's - # datetime doesn't accept sub-minute timezones. Check - # http://python.org/sf/1447945 for some information. - gmtoff = 60 * ((gmtoff + 30) // 60) - tti = _ttinfo() - tti.offset = gmtoff - tti.dstoffset = datetime.timedelta(0) - tti.delta = datetime.timedelta(seconds=gmtoff) - tti.isdst = isdst - tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] - tti.isstd = (ttisstdcnt > i and isstd[i] != 0) - tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) - out.ttinfo_list.append(tti) - - # Replace ttinfo indexes for ttinfo objects. - out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx] - - # Set standard, dst, and before ttinfos. before will be - # used when a given time is before any transitions, - # and will be set to the first non-dst ttinfo, or to - # the first dst, if all of them are dst. - out.ttinfo_std = None - out.ttinfo_dst = None - out.ttinfo_before = None - if out.ttinfo_list: - if not out.trans_list_utc: - out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0] - else: - for i in range(timecnt-1, -1, -1): - tti = out.trans_idx[i] - if not out.ttinfo_std and not tti.isdst: - out.ttinfo_std = tti - elif not out.ttinfo_dst and tti.isdst: - out.ttinfo_dst = tti - - if out.ttinfo_std and out.ttinfo_dst: - break - else: - if out.ttinfo_dst and not out.ttinfo_std: - out.ttinfo_std = out.ttinfo_dst - - for tti in out.ttinfo_list: - if not tti.isdst: - out.ttinfo_before = tti - break - else: - out.ttinfo_before = out.ttinfo_list[0] - - # Now fix transition times to become relative to wall time. - # - # I'm not sure about this. In my tests, the tz source file - # is setup to wall time, and in the binary file isstd and - # isgmt are off, so it should be in wall time. OTOH, it's - # always in gmt time. Let me know if you have comments - # about this. - laststdoffset = None - out.trans_list = [] - for i, tti in enumerate(out.trans_idx): - if not tti.isdst: - offset = tti.offset - laststdoffset = offset - else: - if laststdoffset is not None: - # Store the DST offset as well and update it in the list - tti.dstoffset = tti.offset - laststdoffset - out.trans_idx[i] = tti - - offset = laststdoffset or 0 - - out.trans_list.append(out.trans_list_utc[i] + offset) - - # In case we missed any DST offsets on the way in for some reason, make - # a second pass over the list, looking for the /next/ DST offset. - laststdoffset = None - for i in reversed(range(len(out.trans_idx))): - tti = out.trans_idx[i] - if tti.isdst: - if not (tti.dstoffset or laststdoffset is None): - tti.dstoffset = tti.offset - laststdoffset - else: - laststdoffset = tti.offset - - if not isinstance(tti.dstoffset, datetime.timedelta): - tti.dstoffset = datetime.timedelta(seconds=tti.dstoffset) - - out.trans_idx[i] = tti - - out.trans_idx = tuple(out.trans_idx) - out.trans_list = tuple(out.trans_list) - out.trans_list_utc = tuple(out.trans_list_utc) - - return out - - def _find_last_transition(self, dt, in_utc=False): - # If there's no list, there are no transitions to find - if not self._trans_list: - return None - - timestamp = _datetime_to_timestamp(dt) - - # Find where the timestamp fits in the transition list - if the - # timestamp is a transition time, it's part of the "after" period. - trans_list = self._trans_list_utc if in_utc else self._trans_list - idx = bisect.bisect_right(trans_list, timestamp) - - # We want to know when the previous transition was, so subtract off 1 - return idx - 1 - - def _get_ttinfo(self, idx): - # For no list or after the last transition, default to _ttinfo_std - if idx is None or (idx + 1) >= len(self._trans_list): - return self._ttinfo_std - - # If there is a list and the time is before it, return _ttinfo_before - if idx < 0: - return self._ttinfo_before - - return self._trans_idx[idx] - - def _find_ttinfo(self, dt): - idx = self._resolve_ambiguous_time(dt) - - return self._get_ttinfo(idx) - - def fromutc(self, dt): - """ - The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. - - :param dt: - A :py:class:`datetime.datetime` object. - - :raises TypeError: - Raised if ``dt`` is not a :py:class:`datetime.datetime` object. - - :raises ValueError: - Raised if this is called with a ``dt`` which does not have this - ``tzinfo`` attached. - - :return: - Returns a :py:class:`datetime.datetime` object representing the - wall time in ``self``'s time zone. - """ - # These isinstance checks are in datetime.tzinfo, so we'll preserve - # them, even if we don't care about duck typing. - if not isinstance(dt, datetime.datetime): - raise TypeError("fromutc() requires a datetime argument") - - if dt.tzinfo is not self: - raise ValueError("dt.tzinfo is not self") - - # First treat UTC as wall time and get the transition we're in. - idx = self._find_last_transition(dt, in_utc=True) - tti = self._get_ttinfo(idx) - - dt_out = dt + datetime.timedelta(seconds=tti.offset) - - fold = self.is_ambiguous(dt_out, idx=idx) - - return enfold(dt_out, fold=int(fold)) - - def is_ambiguous(self, dt, idx=None): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - if idx is None: - idx = self._find_last_transition(dt) - - # Calculate the difference in offsets from current to previous - timestamp = _datetime_to_timestamp(dt) - tti = self._get_ttinfo(idx) - - if idx is None or idx <= 0: - return False - - od = self._get_ttinfo(idx - 1).offset - tti.offset - tt = self._trans_list[idx] # Transition time - - return timestamp < tt + od - - def _resolve_ambiguous_time(self, dt): - idx = self._find_last_transition(dt) - - # If we have no transitions, return the index - _fold = self._fold(dt) - if idx is None or idx == 0: - return idx - - # If it's ambiguous and we're in a fold, shift to a different index. - idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) - - return idx - idx_offset - - def utcoffset(self, dt): - if dt is None: - return None - - if not self._ttinfo_std: - return ZERO - - return self._find_ttinfo(dt).delta - - def dst(self, dt): - if dt is None: - return None - - if not self._ttinfo_dst: - return ZERO - - tti = self._find_ttinfo(dt) - - if not tti.isdst: - return ZERO - - # The documentation says that utcoffset()-dst() must - # be constant for every dt. - return tti.dstoffset - - @tzname_in_python2 - def tzname(self, dt): - if not self._ttinfo_std or dt is None: - return None - return self._find_ttinfo(dt).abbr - - def __eq__(self, other): - if not isinstance(other, tzfile): - return NotImplemented - return (self._trans_list == other._trans_list and - self._trans_idx == other._trans_idx and - self._ttinfo_list == other._ttinfo_list) - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) - - def __reduce__(self): - return self.__reduce_ex__(None) - - def __reduce_ex__(self, protocol): - return (self.__class__, (None, self._filename), self.__dict__) - - -class tzrange(tzrangebase): - """ - The ``tzrange`` object is a time zone specified by a set of offsets and - abbreviations, equivalent to the way the ``TZ`` variable can be specified - in POSIX-like systems, but using Python delta objects to specify DST - start, end and offsets. - - :param stdabbr: - The abbreviation for standard time (e.g. ``'EST'``). - - :param stdoffset: - An integer or :class:`datetime.timedelta` object or equivalent - specifying the base offset from UTC. - - If unspecified, +00:00 is used. - - :param dstabbr: - The abbreviation for DST / "Summer" time (e.g. ``'EDT'``). - - If specified, with no other DST information, DST is assumed to occur - and the default behavior or ``dstoffset``, ``start`` and ``end`` is - used. If unspecified and no other DST information is specified, it - is assumed that this zone has no DST. - - If this is unspecified and other DST information is *is* specified, - DST occurs in the zone but the time zone abbreviation is left - unchanged. - - :param dstoffset: - A an integer or :class:`datetime.timedelta` object or equivalent - specifying the UTC offset during DST. If unspecified and any other DST - information is specified, it is assumed to be the STD offset +1 hour. - - :param start: - A :class:`relativedelta.relativedelta` object or equivalent specifying - the time and time of year that daylight savings time starts. To - specify, for example, that DST starts at 2AM on the 2nd Sunday in - March, pass: - - ``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))`` - - If unspecified and any other DST information is specified, the default - value is 2 AM on the first Sunday in April. - - :param end: - A :class:`relativedelta.relativedelta` object or equivalent - representing the time and time of year that daylight savings time - ends, with the same specification method as in ``start``. One note is - that this should point to the first time in the *standard* zone, so if - a transition occurs at 2AM in the DST zone and the clocks are set back - 1 hour to 1AM, set the ``hours`` parameter to +1. - - - **Examples:** - - .. testsetup:: tzrange - - from dateutil.tz import tzrange, tzstr - - .. doctest:: tzrange - - >>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT") - True - - >>> from dateutil.relativedelta import * - >>> range1 = tzrange("EST", -18000, "EDT") - >>> range2 = tzrange("EST", -18000, "EDT", -14400, - ... relativedelta(hours=+2, month=4, day=1, - ... weekday=SU(+1)), - ... relativedelta(hours=+1, month=10, day=31, - ... weekday=SU(-1))) - >>> tzstr('EST5EDT') == range1 == range2 - True - - """ - def __init__(self, stdabbr, stdoffset=None, - dstabbr=None, dstoffset=None, - start=None, end=None): - - global relativedelta - from dateutil import relativedelta - - self._std_abbr = stdabbr - self._dst_abbr = dstabbr - - try: - stdoffset = stdoffset.total_seconds() - except (TypeError, AttributeError): - pass - - try: - dstoffset = dstoffset.total_seconds() - except (TypeError, AttributeError): - pass - - if stdoffset is not None: - self._std_offset = datetime.timedelta(seconds=stdoffset) - else: - self._std_offset = ZERO - - if dstoffset is not None: - self._dst_offset = datetime.timedelta(seconds=dstoffset) - elif dstabbr and stdoffset is not None: - self._dst_offset = self._std_offset + datetime.timedelta(hours=+1) - else: - self._dst_offset = ZERO - - if dstabbr and start is None: - self._start_delta = relativedelta.relativedelta( - hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) - else: - self._start_delta = start - - if dstabbr and end is None: - self._end_delta = relativedelta.relativedelta( - hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) - else: - self._end_delta = end - - self._dst_base_offset_ = self._dst_offset - self._std_offset - self.hasdst = bool(self._start_delta) - - def transitions(self, year): - """ - For a given year, get the DST on and off transition times, expressed - always on the standard time side. For zones with no transitions, this - function returns ``None``. - - :param year: - The year whose transitions you would like to query. - - :return: - Returns a :class:`tuple` of :class:`datetime.datetime` objects, - ``(dston, dstoff)`` for zones with an annual DST transition, or - ``None`` for fixed offset zones. - """ - if not self.hasdst: - return None - - base_year = datetime.datetime(year, 1, 1) - - start = base_year + self._start_delta - end = base_year + self._end_delta - - return (start, end) - - def __eq__(self, other): - if not isinstance(other, tzrange): - return NotImplemented - - return (self._std_abbr == other._std_abbr and - self._dst_abbr == other._dst_abbr and - self._std_offset == other._std_offset and - self._dst_offset == other._dst_offset and - self._start_delta == other._start_delta and - self._end_delta == other._end_delta) - - @property - def _dst_base_offset(self): - return self._dst_base_offset_ - - -@six.add_metaclass(_TzStrFactory) -class tzstr(tzrange): - """ - ``tzstr`` objects are time zone objects specified by a time-zone string as - it would be passed to a ``TZ`` variable on POSIX-style systems (see - the `GNU C Library: TZ Variable`_ for more details). - - There is one notable exception, which is that POSIX-style time zones use an - inverted offset format, so normally ``GMT+3`` would be parsed as an offset - 3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an - offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX - behavior, pass a ``True`` value to ``posix_offset``. - - The :class:`tzrange` object provides the same functionality, but is - specified using :class:`relativedelta.relativedelta` objects. rather than - strings. - - :param s: - A time zone string in ``TZ`` variable format. This can be a - :class:`bytes` (2.x: :class:`str`), :class:`str` (2.x: - :class:`unicode`) or a stream emitting unicode characters - (e.g. :class:`StringIO`). - - :param posix_offset: - Optional. If set to ``True``, interpret strings such as ``GMT+3`` or - ``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the - POSIX standard. - - .. caution:: - - Prior to version 2.7.0, this function also supported time zones - in the format: - - * ``EST5EDT,4,0,6,7200,10,0,26,7200,3600`` - * ``EST5EDT,4,1,0,7200,10,-1,0,7200,3600`` - - This format is non-standard and has been deprecated; this function - will raise a :class:`DeprecatedTZFormatWarning` until - support is removed in a future version. - - .. _`GNU C Library: TZ Variable`: - https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html - """ - def __init__(self, s, posix_offset=False): - global parser - from dateutil.parser import _parser as parser - - self._s = s - - res = parser._parsetz(s) - if res is None or res.any_unused_tokens: - raise ValueError("unknown string format") - - # Here we break the compatibility with the TZ variable handling. - # GMT-3 actually *means* the timezone -3. - if res.stdabbr in ("GMT", "UTC") and not posix_offset: - res.stdoffset *= -1 - - # We must initialize it first, since _delta() needs - # _std_offset and _dst_offset set. Use False in start/end - # to avoid building it two times. - tzrange.__init__(self, res.stdabbr, res.stdoffset, - res.dstabbr, res.dstoffset, - start=False, end=False) - - if not res.dstabbr: - self._start_delta = None - self._end_delta = None - else: - self._start_delta = self._delta(res.start) - if self._start_delta: - self._end_delta = self._delta(res.end, isend=1) - - self.hasdst = bool(self._start_delta) - - def _delta(self, x, isend=0): - from dateutil import relativedelta - kwargs = {} - if x.month is not None: - kwargs["month"] = x.month - if x.weekday is not None: - kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) - if x.week > 0: - kwargs["day"] = 1 - else: - kwargs["day"] = 31 - elif x.day: - kwargs["day"] = x.day - elif x.yday is not None: - kwargs["yearday"] = x.yday - elif x.jyday is not None: - kwargs["nlyearday"] = x.jyday - if not kwargs: - # Default is to start on first sunday of april, and end - # on last sunday of october. - if not isend: - kwargs["month"] = 4 - kwargs["day"] = 1 - kwargs["weekday"] = relativedelta.SU(+1) - else: - kwargs["month"] = 10 - kwargs["day"] = 31 - kwargs["weekday"] = relativedelta.SU(-1) - if x.time is not None: - kwargs["seconds"] = x.time - else: - # Default is 2AM. - kwargs["seconds"] = 7200 - if isend: - # Convert to standard time, to follow the documented way - # of working with the extra hour. See the documentation - # of the tzinfo class. - delta = self._dst_offset - self._std_offset - kwargs["seconds"] -= delta.seconds + delta.days * 86400 - return relativedelta.relativedelta(**kwargs) - - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, repr(self._s)) - - -class _tzicalvtzcomp(object): - def __init__(self, tzoffsetfrom, tzoffsetto, isdst, - tzname=None, rrule=None): - self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) - self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) - self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom - self.isdst = isdst - self.tzname = tzname - self.rrule = rrule - - -class _tzicalvtz(_tzinfo): - def __init__(self, tzid, comps=[]): - super(_tzicalvtz, self).__init__() - - self._tzid = tzid - self._comps = comps - self._cachedate = [] - self._cachecomp = [] - self._cache_lock = _thread.allocate_lock() - - def _find_comp(self, dt): - if len(self._comps) == 1: - return self._comps[0] - - dt = dt.replace(tzinfo=None) - - try: - with self._cache_lock: - return self._cachecomp[self._cachedate.index( - (dt, self._fold(dt)))] - except ValueError: - pass - - lastcompdt = None - lastcomp = None - - for comp in self._comps: - compdt = self._find_compdt(comp, dt) - - if compdt and (not lastcompdt or lastcompdt < compdt): - lastcompdt = compdt - lastcomp = comp - - if not lastcomp: - # RFC says nothing about what to do when a given - # time is before the first onset date. We'll look for the - # first standard component, or the first component, if - # none is found. - for comp in self._comps: - if not comp.isdst: - lastcomp = comp - break - else: - lastcomp = comp[0] - - with self._cache_lock: - self._cachedate.insert(0, (dt, self._fold(dt))) - self._cachecomp.insert(0, lastcomp) - - if len(self._cachedate) > 10: - self._cachedate.pop() - self._cachecomp.pop() - - return lastcomp - - def _find_compdt(self, comp, dt): - if comp.tzoffsetdiff < ZERO and self._fold(dt): - dt -= comp.tzoffsetdiff - - compdt = comp.rrule.before(dt, inc=True) - - return compdt - - def utcoffset(self, dt): - if dt is None: - return None - - return self._find_comp(dt).tzoffsetto - - def dst(self, dt): - comp = self._find_comp(dt) - if comp.isdst: - return comp.tzoffsetdiff - else: - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return self._find_comp(dt).tzname - - def __repr__(self): - return "" % repr(self._tzid) - - __reduce__ = object.__reduce__ - - -class tzical(object): - """ - This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure - as set out in `RFC 5545`_ Section 4.6.5 into one or more `tzinfo` objects. - - :param `fileobj`: - A file or stream in iCalendar format, which should be UTF-8 encoded - with CRLF endings. - - .. _`RFC 5545`: https://tools.ietf.org/html/rfc5545 - """ - def __init__(self, fileobj): - global rrule - from dateutil import rrule - - if isinstance(fileobj, string_types): - self._s = fileobj - # ical should be encoded in UTF-8 with CRLF - fileobj = open(fileobj, 'r') - else: - self._s = getattr(fileobj, 'name', repr(fileobj)) - fileobj = _ContextWrapper(fileobj) - - self._vtz = {} - - with fileobj as fobj: - self._parse_rfc(fobj.read()) - - def keys(self): - """ - Retrieves the available time zones as a list. - """ - return list(self._vtz.keys()) - - def get(self, tzid=None): - """ - Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``. - - :param tzid: - If there is exactly one time zone available, omitting ``tzid`` - or passing :py:const:`None` value returns it. Otherwise a valid - key (which can be retrieved from :func:`keys`) is required. - - :raises ValueError: - Raised if ``tzid`` is not specified but there are either more - or fewer than 1 zone defined. - - :returns: - Returns either a :py:class:`datetime.tzinfo` object representing - the relevant time zone or :py:const:`None` if the ``tzid`` was - not found. - """ - if tzid is None: - if len(self._vtz) == 0: - raise ValueError("no timezones defined") - elif len(self._vtz) > 1: - raise ValueError("more than one timezone available") - tzid = next(iter(self._vtz)) - - return self._vtz.get(tzid) - - def _parse_offset(self, s): - s = s.strip() - if not s: - raise ValueError("empty offset") - if s[0] in ('+', '-'): - signal = (-1, +1)[s[0] == '+'] - s = s[1:] - else: - signal = +1 - if len(s) == 4: - return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal - elif len(s) == 6: - return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal - else: - raise ValueError("invalid offset: " + s) - - def _parse_rfc(self, s): - lines = s.splitlines() - if not lines: - raise ValueError("empty string") - - # Unfold - i = 0 - while i < len(lines): - line = lines[i].rstrip() - if not line: - del lines[i] - elif i > 0 and line[0] == " ": - lines[i-1] += line[1:] - del lines[i] - else: - i += 1 - - tzid = None - comps = [] - invtz = False - comptype = None - for line in lines: - if not line: - continue - name, value = line.split(':', 1) - parms = name.split(';') - if not parms: - raise ValueError("empty property name") - name = parms[0].upper() - parms = parms[1:] - if invtz: - if name == "BEGIN": - if value in ("STANDARD", "DAYLIGHT"): - # Process component - pass - else: - raise ValueError("unknown component: "+value) - comptype = value - founddtstart = False - tzoffsetfrom = None - tzoffsetto = None - rrulelines = [] - tzname = None - elif name == "END": - if value == "VTIMEZONE": - if comptype: - raise ValueError("component not closed: "+comptype) - if not tzid: - raise ValueError("mandatory TZID not found") - if not comps: - raise ValueError( - "at least one component is needed") - # Process vtimezone - self._vtz[tzid] = _tzicalvtz(tzid, comps) - invtz = False - elif value == comptype: - if not founddtstart: - raise ValueError("mandatory DTSTART not found") - if tzoffsetfrom is None: - raise ValueError( - "mandatory TZOFFSETFROM not found") - if tzoffsetto is None: - raise ValueError( - "mandatory TZOFFSETFROM not found") - # Process component - rr = None - if rrulelines: - rr = rrule.rrulestr("\n".join(rrulelines), - compatible=True, - ignoretz=True, - cache=True) - comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, - (comptype == "DAYLIGHT"), - tzname, rr) - comps.append(comp) - comptype = None - else: - raise ValueError("invalid component end: "+value) - elif comptype: - if name == "DTSTART": - # DTSTART in VTIMEZONE takes a subset of valid RRULE - # values under RFC 5545. - for parm in parms: - if parm != 'VALUE=DATE-TIME': - msg = ('Unsupported DTSTART param in ' + - 'VTIMEZONE: ' + parm) - raise ValueError(msg) - rrulelines.append(line) - founddtstart = True - elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): - rrulelines.append(line) - elif name == "TZOFFSETFROM": - if parms: - raise ValueError( - "unsupported %s parm: %s " % (name, parms[0])) - tzoffsetfrom = self._parse_offset(value) - elif name == "TZOFFSETTO": - if parms: - raise ValueError( - "unsupported TZOFFSETTO parm: "+parms[0]) - tzoffsetto = self._parse_offset(value) - elif name == "TZNAME": - if parms: - raise ValueError( - "unsupported TZNAME parm: "+parms[0]) - tzname = value - elif name == "COMMENT": - pass - else: - raise ValueError("unsupported property: "+name) - else: - if name == "TZID": - if parms: - raise ValueError( - "unsupported TZID parm: "+parms[0]) - tzid = value - elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): - pass - else: - raise ValueError("unsupported property: "+name) - elif name == "BEGIN" and value == "VTIMEZONE": - tzid = None - comps = [] - invtz = True - - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, repr(self._s)) - - -if sys.platform != "win32": - TZFILES = ["/etc/localtime", "localtime"] - TZPATHS = ["/usr/share/zoneinfo", - "/usr/lib/zoneinfo", - "/usr/share/lib/zoneinfo", - "/etc/zoneinfo"] -else: - TZFILES = [] - TZPATHS = [] - - -def __get_gettz(): - tzlocal_classes = (tzlocal,) - if tzwinlocal is not None: - tzlocal_classes += (tzwinlocal,) - - class GettzFunc(object): - """ - Retrieve a time zone object from a string representation - - This function is intended to retrieve the :py:class:`tzinfo` subclass - that best represents the time zone that would be used if a POSIX - `TZ variable`_ were set to the same value. - - If no argument or an empty string is passed to ``gettz``, local time - is returned: - - .. code-block:: python3 - - >>> gettz() - tzfile('/etc/localtime') - - This function is also the preferred way to map IANA tz database keys - to :class:`tzfile` objects: - - .. code-block:: python3 - - >>> gettz('Pacific/Kiritimati') - tzfile('/usr/share/zoneinfo/Pacific/Kiritimati') - - On Windows, the standard is extended to include the Windows-specific - zone names provided by the operating system: - - .. code-block:: python3 - - >>> gettz('Egypt Standard Time') - tzwin('Egypt Standard Time') - - Passing a GNU ``TZ`` style string time zone specification returns a - :class:`tzstr` object: - - .. code-block:: python3 - - >>> gettz('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') - tzstr('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') - - :param name: - A time zone name (IANA, or, on Windows, Windows keys), location of - a ``tzfile(5)`` zoneinfo file or ``TZ`` variable style time zone - specifier. An empty string, no argument or ``None`` is interpreted - as local time. - - :return: - Returns an instance of one of ``dateutil``'s :py:class:`tzinfo` - subclasses. - - .. versionchanged:: 2.7.0 - - After version 2.7.0, any two calls to ``gettz`` using the same - input strings will return the same object: - - .. code-block:: python3 - - >>> tz.gettz('America/Chicago') is tz.gettz('America/Chicago') - True - - In addition to improving performance, this ensures that - `"same zone" semantics`_ are used for datetimes in the same zone. - - - .. _`TZ variable`: - https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html - - .. _`"same zone" semantics`: - https://blog.ganssle.io/articles/2018/02/aware-datetime-arithmetic.html - """ - def __init__(self): - - self.__instances = {} - self._cache_lock = _thread.allocate_lock() - - def __call__(self, name=None): - with self._cache_lock: - rv = self.__instances.get(name, None) - - if rv is None: - rv = self.nocache(name=name) - if not (name is None or isinstance(rv, tzlocal_classes)): - # tzlocal is slightly more complicated than the other - # time zone providers because it depends on environment - # at construction time, so don't cache that. - self.__instances[name] = rv - - return rv - - def cache_clear(self): - with self._cache_lock: - self.__instances = {} - - @staticmethod - def nocache(name=None): - """A non-cached version of gettz""" - tz = None - if not name: - try: - name = os.environ["TZ"] - except KeyError: - pass - if name is None or name == ":": - for filepath in TZFILES: - if not os.path.isabs(filepath): - filename = filepath - for path in TZPATHS: - filepath = os.path.join(path, filename) - if os.path.isfile(filepath): - break - else: - continue - if os.path.isfile(filepath): - try: - tz = tzfile(filepath) - break - except (IOError, OSError, ValueError): - pass - else: - tz = tzlocal() - else: - if name.startswith(":"): - name = name[1:] - if os.path.isabs(name): - if os.path.isfile(name): - tz = tzfile(name) - else: - tz = None - else: - for path in TZPATHS: - filepath = os.path.join(path, name) - if not os.path.isfile(filepath): - filepath = filepath.replace(' ', '_') - if not os.path.isfile(filepath): - continue - try: - tz = tzfile(filepath) - break - except (IOError, OSError, ValueError): - pass - else: - tz = None - if tzwin is not None: - try: - tz = tzwin(name) - except WindowsError: - tz = None - - if not tz: - from dateutil.zoneinfo import get_zonefile_instance - tz = get_zonefile_instance().get(name) - - if not tz: - for c in name: - # name is not a tzstr unless it has at least - # one offset. For short values of "name", an - # explicit for loop seems to be the fastest way - # To determine if a string contains a digit - if c in "0123456789": - try: - tz = tzstr(name) - except ValueError: - pass - break - else: - if name in ("GMT", "UTC"): - tz = tzutc() - elif name in time.tzname: - tz = tzlocal() - return tz - - return GettzFunc() - - -gettz = __get_gettz() -del __get_gettz - - -def datetime_exists(dt, tz=None): - """ - Given a datetime and a time zone, determine whether or not a given datetime - would fall in a gap. - - :param dt: - A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` - is provided.) - - :param tz: - A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If - ``None`` or not provided, the datetime's own time zone will be used. - - :return: - Returns a boolean value whether or not the "wall time" exists in - ``tz``. - - .. versionadded:: 2.7.0 - """ - if tz is None: - if dt.tzinfo is None: - raise ValueError('Datetime is naive and no time zone provided.') - tz = dt.tzinfo - - dt = dt.replace(tzinfo=None) - - # This is essentially a test of whether or not the datetime can survive - # a round trip to UTC. - dt_rt = dt.replace(tzinfo=tz).astimezone(tzutc()).astimezone(tz) - dt_rt = dt_rt.replace(tzinfo=None) - - return dt == dt_rt - - -def datetime_ambiguous(dt, tz=None): - """ - Given a datetime and a time zone, determine whether or not a given datetime - is ambiguous (i.e if there are two times differentiated only by their DST - status). - - :param dt: - A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` - is provided.) - - :param tz: - A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If - ``None`` or not provided, the datetime's own time zone will be used. - - :return: - Returns a boolean value whether or not the "wall time" is ambiguous in - ``tz``. - - .. versionadded:: 2.6.0 - """ - if tz is None: - if dt.tzinfo is None: - raise ValueError('Datetime is naive and no time zone provided.') - - tz = dt.tzinfo - - # If a time zone defines its own "is_ambiguous" function, we'll use that. - is_ambiguous_fn = getattr(tz, 'is_ambiguous', None) - if is_ambiguous_fn is not None: - try: - return tz.is_ambiguous(dt) - except Exception: - pass - - # If it doesn't come out and tell us it's ambiguous, we'll just check if - # the fold attribute has any effect on this particular date and time. - dt = dt.replace(tzinfo=tz) - wall_0 = enfold(dt, fold=0) - wall_1 = enfold(dt, fold=1) - - same_offset = wall_0.utcoffset() == wall_1.utcoffset() - same_dst = wall_0.dst() == wall_1.dst() - - return not (same_offset and same_dst) - - -def resolve_imaginary(dt): - """ - Given a datetime that may be imaginary, return an existing datetime. - - This function assumes that an imaginary datetime represents what the - wall time would be in a zone had the offset transition not occurred, so - it will always fall forward by the transition's change in offset. - - .. doctest:: - - >>> from dateutil import tz - >>> from datetime import datetime - >>> NYC = tz.gettz('America/New_York') - >>> print(tz.resolve_imaginary(datetime(2017, 3, 12, 2, 30, tzinfo=NYC))) - 2017-03-12 03:30:00-04:00 - - >>> KIR = tz.gettz('Pacific/Kiritimati') - >>> print(tz.resolve_imaginary(datetime(1995, 1, 1, 12, 30, tzinfo=KIR))) - 1995-01-02 12:30:00+14:00 - - As a note, :func:`datetime.astimezone` is guaranteed to produce a valid, - existing datetime, so a round-trip to and from UTC is sufficient to get - an extant datetime, however, this generally "falls back" to an earlier time - rather than falling forward to the STD side (though no guarantees are made - about this behavior). - - :param dt: - A :class:`datetime.datetime` which may or may not exist. - - :return: - Returns an existing :class:`datetime.datetime`. If ``dt`` was not - imaginary, the datetime returned is guaranteed to be the same object - passed to the function. - - .. versionadded:: 2.7.0 - """ - if dt.tzinfo is not None and not datetime_exists(dt): - - curr_offset = (dt + datetime.timedelta(hours=24)).utcoffset() - old_offset = (dt - datetime.timedelta(hours=24)).utcoffset() - - dt += curr_offset - old_offset - - return dt - - -def _datetime_to_timestamp(dt): - """ - Convert a :class:`datetime.datetime` object to an epoch timestamp in - seconds since January 1, 1970, ignoring the time zone. - """ - return (dt.replace(tzinfo=None) - EPOCH).total_seconds() - - -class _ContextWrapper(object): - """ - Class for wrapping contexts so that they are passed through in a - with statement. - """ - def __init__(self, context): - self.context = context - - def __enter__(self): - return self.context - - def __exit__(*args, **kwargs): - pass - -# vim:ts=4:sw=4:et diff --git a/pype/vendor/dateutil/tz/win.py b/pype/vendor/dateutil/tz/win.py deleted file mode 100644 index def4353a64..0000000000 --- a/pype/vendor/dateutil/tz/win.py +++ /dev/null @@ -1,331 +0,0 @@ -# This code was originally contributed by Jeffrey Harris. -import datetime -import struct - -from six.moves import winreg -from six import text_type - -try: - import ctypes - from ctypes import wintypes -except ValueError: - # ValueError is raised on non-Windows systems for some horrible reason. - raise ImportError("Running tzwin on non-Windows system") - -from ._common import tzrangebase - -__all__ = ["tzwin", "tzwinlocal", "tzres"] - -ONEWEEK = datetime.timedelta(7) - -TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" -TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" -TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" - - -def _settzkeyname(): - handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) - try: - winreg.OpenKey(handle, TZKEYNAMENT).Close() - TZKEYNAME = TZKEYNAMENT - except WindowsError: - TZKEYNAME = TZKEYNAME9X - handle.Close() - return TZKEYNAME - - -TZKEYNAME = _settzkeyname() - - -class tzres(object): - """ - Class for accessing `tzres.dll`, which contains timezone name related - resources. - - .. versionadded:: 2.5.0 - """ - p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char - - def __init__(self, tzres_loc='tzres.dll'): - # Load the user32 DLL so we can load strings from tzres - user32 = ctypes.WinDLL('user32') - - # Specify the LoadStringW function - user32.LoadStringW.argtypes = (wintypes.HINSTANCE, - wintypes.UINT, - wintypes.LPWSTR, - ctypes.c_int) - - self.LoadStringW = user32.LoadStringW - self._tzres = ctypes.WinDLL(tzres_loc) - self.tzres_loc = tzres_loc - - def load_name(self, offset): - """ - Load a timezone name from a DLL offset (integer). - - >>> from dateutil.tzwin import tzres - >>> tzr = tzres() - >>> print(tzr.load_name(112)) - 'Eastern Standard Time' - - :param offset: - A positive integer value referring to a string from the tzres dll. - - ..note: - Offsets found in the registry are generally of the form - `@tzres.dll,-114`. The offset in this case if 114, not -114. - - """ - resource = self.p_wchar() - lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR) - nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0) - return resource[:nchar] - - def name_from_string(self, tzname_str): - """ - Parse strings as returned from the Windows registry into the time zone - name as defined in the registry. - - >>> from dateutil.tzwin import tzres - >>> tzr = tzres() - >>> print(tzr.name_from_string('@tzres.dll,-251')) - 'Dateline Daylight Time' - >>> print(tzr.name_from_string('Eastern Standard Time')) - 'Eastern Standard Time' - - :param tzname_str: - A timezone name string as returned from a Windows registry key. - - :return: - Returns the localized timezone string from tzres.dll if the string - is of the form `@tzres.dll,-offset`, else returns the input string. - """ - if not tzname_str.startswith('@'): - return tzname_str - - name_splt = tzname_str.split(',-') - try: - offset = int(name_splt[1]) - except: - raise ValueError("Malformed timezone string.") - - return self.load_name(offset) - - -class tzwinbase(tzrangebase): - """tzinfo class based on win32's timezones available in the registry.""" - def __init__(self): - raise NotImplementedError('tzwinbase is an abstract base class') - - def __eq__(self, other): - # Compare on all relevant dimensions, including name. - if not isinstance(other, tzwinbase): - return NotImplemented - - return (self._std_offset == other._std_offset and - self._dst_offset == other._dst_offset and - self._stddayofweek == other._stddayofweek and - self._dstdayofweek == other._dstdayofweek and - self._stdweeknumber == other._stdweeknumber and - self._dstweeknumber == other._dstweeknumber and - self._stdhour == other._stdhour and - self._dsthour == other._dsthour and - self._stdminute == other._stdminute and - self._dstminute == other._dstminute and - self._std_abbr == other._std_abbr and - self._dst_abbr == other._dst_abbr) - - @staticmethod - def list(): - """Return a list of all time zones known to the system.""" - with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: - with winreg.OpenKey(handle, TZKEYNAME) as tzkey: - result = [winreg.EnumKey(tzkey, i) - for i in range(winreg.QueryInfoKey(tzkey)[0])] - return result - - def display(self): - return self._display - - def transitions(self, year): - """ - For a given year, get the DST on and off transition times, expressed - always on the standard time side. For zones with no transitions, this - function returns ``None``. - - :param year: - The year whose transitions you would like to query. - - :return: - Returns a :class:`tuple` of :class:`datetime.datetime` objects, - ``(dston, dstoff)`` for zones with an annual DST transition, or - ``None`` for fixed offset zones. - """ - - if not self.hasdst: - return None - - dston = picknthweekday(year, self._dstmonth, self._dstdayofweek, - self._dsthour, self._dstminute, - self._dstweeknumber) - - dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek, - self._stdhour, self._stdminute, - self._stdweeknumber) - - # Ambiguous dates default to the STD side - dstoff -= self._dst_base_offset - - return dston, dstoff - - def _get_hasdst(self): - return self._dstmonth != 0 - - @property - def _dst_base_offset(self): - return self._dst_base_offset_ - - -class tzwin(tzwinbase): - - def __init__(self, name): - self._name = name - - with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: - tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name) - with winreg.OpenKey(handle, tzkeyname) as tzkey: - keydict = valuestodict(tzkey) - - self._std_abbr = keydict["Std"] - self._dst_abbr = keydict["Dlt"] - - self._display = keydict["Display"] - - # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm - tup = struct.unpack("=3l16h", keydict["TZI"]) - stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 - dstoffset = stdoffset-tup[2] # + DaylightBias * -1 - self._std_offset = datetime.timedelta(minutes=stdoffset) - self._dst_offset = datetime.timedelta(minutes=dstoffset) - - # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs - # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx - (self._stdmonth, - self._stddayofweek, # Sunday = 0 - self._stdweeknumber, # Last = 5 - self._stdhour, - self._stdminute) = tup[4:9] - - (self._dstmonth, - self._dstdayofweek, # Sunday = 0 - self._dstweeknumber, # Last = 5 - self._dsthour, - self._dstminute) = tup[12:17] - - self._dst_base_offset_ = self._dst_offset - self._std_offset - self.hasdst = self._get_hasdst() - - def __repr__(self): - return "tzwin(%s)" % repr(self._name) - - def __reduce__(self): - return (self.__class__, (self._name,)) - - -class tzwinlocal(tzwinbase): - def __init__(self): - with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: - with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: - keydict = valuestodict(tzlocalkey) - - self._std_abbr = keydict["StandardName"] - self._dst_abbr = keydict["DaylightName"] - - try: - tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME, - sn=self._std_abbr) - with winreg.OpenKey(handle, tzkeyname) as tzkey: - _keydict = valuestodict(tzkey) - self._display = _keydict["Display"] - except OSError: - self._display = None - - stdoffset = -keydict["Bias"]-keydict["StandardBias"] - dstoffset = stdoffset-keydict["DaylightBias"] - - self._std_offset = datetime.timedelta(minutes=stdoffset) - self._dst_offset = datetime.timedelta(minutes=dstoffset) - - # For reasons unclear, in this particular key, the day of week has been - # moved to the END of the SYSTEMTIME structure. - tup = struct.unpack("=8h", keydict["StandardStart"]) - - (self._stdmonth, - self._stdweeknumber, # Last = 5 - self._stdhour, - self._stdminute) = tup[1:5] - - self._stddayofweek = tup[7] - - tup = struct.unpack("=8h", keydict["DaylightStart"]) - - (self._dstmonth, - self._dstweeknumber, # Last = 5 - self._dsthour, - self._dstminute) = tup[1:5] - - self._dstdayofweek = tup[7] - - self._dst_base_offset_ = self._dst_offset - self._std_offset - self.hasdst = self._get_hasdst() - - def __repr__(self): - return "tzwinlocal()" - - def __str__(self): - # str will return the standard name, not the daylight name. - return "tzwinlocal(%s)" % repr(self._std_abbr) - - def __reduce__(self): - return (self.__class__, ()) - - -def picknthweekday(year, month, dayofweek, hour, minute, whichweek): - """ dayofweek == 0 means Sunday, whichweek 5 means last instance """ - first = datetime.datetime(year, month, 1, hour, minute) - - # This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6), - # Because 7 % 7 = 0 - weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) - wd = weekdayone + ((whichweek - 1) * ONEWEEK) - if (wd.month != month): - wd -= ONEWEEK - - return wd - - -def valuestodict(key): - """Convert a registry key's values to a dictionary.""" - dout = {} - size = winreg.QueryInfoKey(key)[1] - tz_res = None - - for i in range(size): - key_name, value, dtype = winreg.EnumValue(key, i) - if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN: - # If it's a DWORD (32-bit integer), it's stored as unsigned - convert - # that to a proper signed integer - if value & (1 << 31): - value = value - (1 << 32) - elif dtype == winreg.REG_SZ: - # If it's a reference to the tzres DLL, load the actual string - if value.startswith('@tzres'): - tz_res = tz_res or tzres() - value = tz_res.name_from_string(value) - - value = value.rstrip('\x00') # Remove trailing nulls - - dout[key_name] = value - - return dout diff --git a/pype/vendor/dateutil/tzwin.py b/pype/vendor/dateutil/tzwin.py deleted file mode 100644 index cebc673e40..0000000000 --- a/pype/vendor/dateutil/tzwin.py +++ /dev/null @@ -1,2 +0,0 @@ -# tzwin has moved to dateutil.tz.win -from .tz.win import * diff --git a/pype/vendor/dateutil/utils.py b/pype/vendor/dateutil/utils.py deleted file mode 100644 index ebcce6aa2a..0000000000 --- a/pype/vendor/dateutil/utils.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers general convenience and utility functions for dealing with -datetimes. - -.. versionadded:: 2.7.0 -""" -from __future__ import unicode_literals - -from datetime import datetime, time - - -def today(tzinfo=None): - """ - Returns a :py:class:`datetime` representing the current day at midnight - - :param tzinfo: - The time zone to attach (also used to determine the current day). - - :return: - A :py:class:`datetime.datetime` object representing the current day - at midnight. - """ - - dt = datetime.now(tzinfo) - return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) - - -def default_tzinfo(dt, tzinfo): - """ - Sets the the ``tzinfo`` parameter on naive datetimes only - - This is useful for example when you are provided a datetime that may have - either an implicit or explicit time zone, such as when parsing a time zone - string. - - .. doctest:: - - >>> from dateutil.tz import tzoffset - >>> from dateutil.parser import parse - >>> from dateutil.utils import default_tzinfo - >>> dflt_tz = tzoffset("EST", -18000) - >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz)) - 2014-01-01 12:30:00+00:00 - >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz)) - 2014-01-01 12:30:00-05:00 - - :param dt: - The datetime on which to replace the time zone - - :param tzinfo: - The :py:class:`datetime.tzinfo` subclass instance to assign to - ``dt`` if (and only if) it is naive. - - :return: - Returns an aware :py:class:`datetime.datetime`. - """ - if dt.tzinfo is not None: - return dt - else: - return dt.replace(tzinfo=tzinfo) - - -def within_delta(dt1, dt2, delta): - """ - Useful for comparing two datetimes that may a negilible difference - to be considered equal. - """ - delta = abs(delta) - difference = dt1 - dt2 - return -delta <= difference <= delta diff --git a/pype/vendor/dateutil/zoneinfo/__init__.py b/pype/vendor/dateutil/zoneinfo/__init__.py deleted file mode 100644 index 34f11ad66c..0000000000 --- a/pype/vendor/dateutil/zoneinfo/__init__.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -import warnings -import json - -from tarfile import TarFile -from pkgutil import get_data -from io import BytesIO - -from dateutil.tz import tzfile as _tzfile - -__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] - -ZONEFILENAME = "dateutil-zoneinfo.tar.gz" -METADATA_FN = 'METADATA' - - -class tzfile(_tzfile): - def __reduce__(self): - return (gettz, (self._filename,)) - - -def getzoneinfofile_stream(): - try: - return BytesIO(get_data(__name__, ZONEFILENAME)) - except IOError as e: # TODO switch to FileNotFoundError? - warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) - return None - - -class ZoneInfoFile(object): - def __init__(self, zonefile_stream=None): - if zonefile_stream is not None: - with TarFile.open(fileobj=zonefile_stream) as tf: - self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) - for zf in tf.getmembers() - if zf.isfile() and zf.name != METADATA_FN} - # deal with links: They'll point to their parent object. Less - # waste of memory - links = {zl.name: self.zones[zl.linkname] - for zl in tf.getmembers() if - zl.islnk() or zl.issym()} - self.zones.update(links) - try: - metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) - metadata_str = metadata_json.read().decode('UTF-8') - self.metadata = json.loads(metadata_str) - except KeyError: - # no metadata in tar file - self.metadata = None - else: - self.zones = {} - self.metadata = None - - def get(self, name, default=None): - """ - Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method - for retrieving zones from the zone dictionary. - - :param name: - The name of the zone to retrieve. (Generally IANA zone names) - - :param default: - The value to return in the event of a missing key. - - .. versionadded:: 2.6.0 - - """ - return self.zones.get(name, default) - - -# The current API has gettz as a module function, although in fact it taps into -# a stateful class. So as a workaround for now, without changing the API, we -# will create a new "global" class instance the first time a user requests a -# timezone. Ugly, but adheres to the api. -# -# TODO: Remove after deprecation period. -_CLASS_ZONE_INSTANCE = [] - - -def get_zonefile_instance(new_instance=False): - """ - This is a convenience function which provides a :class:`ZoneInfoFile` - instance using the data provided by the ``dateutil`` package. By default, it - caches a single instance of the ZoneInfoFile object and returns that. - - :param new_instance: - If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and - used as the cached instance for the next call. Otherwise, new instances - are created only as necessary. - - :return: - Returns a :class:`ZoneInfoFile` object. - - .. versionadded:: 2.6 - """ - if new_instance: - zif = None - else: - zif = getattr(get_zonefile_instance, '_cached_instance', None) - - if zif is None: - zif = ZoneInfoFile(getzoneinfofile_stream()) - - get_zonefile_instance._cached_instance = zif - - return zif - - -def gettz(name): - """ - This retrieves a time zone from the local zoneinfo tarball that is packaged - with dateutil. - - :param name: - An IANA-style time zone name, as found in the zoneinfo file. - - :return: - Returns a :class:`dateutil.tz.tzfile` time zone object. - - .. warning:: - It is generally inadvisable to use this function, and it is only - provided for API compatibility with earlier versions. This is *not* - equivalent to ``dateutil.tz.gettz()``, which selects an appropriate - time zone based on the inputs, favoring system zoneinfo. This is ONLY - for accessing the dateutil-specific zoneinfo (which may be out of - date compared to the system zoneinfo). - - .. deprecated:: 2.6 - If you need to use a specific zoneinfofile over the system zoneinfo, - instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call - :func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead. - - Use :func:`get_zonefile_instance` to retrieve an instance of the - dateutil-provided zoneinfo. - """ - warnings.warn("zoneinfo.gettz() will be removed in future versions, " - "to use the dateutil-provided zoneinfo files, instantiate a " - "ZoneInfoFile object and use ZoneInfoFile.zones.get() " - "instead. See the documentation for details.", - DeprecationWarning) - - if len(_CLASS_ZONE_INSTANCE) == 0: - _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) - return _CLASS_ZONE_INSTANCE[0].zones.get(name) - - -def gettz_db_metadata(): - """ Get the zonefile metadata - - See `zonefile_metadata`_ - - :returns: - A dictionary with the database metadata - - .. deprecated:: 2.6 - See deprecation warning in :func:`zoneinfo.gettz`. To get metadata, - query the attribute ``zoneinfo.ZoneInfoFile.metadata``. - """ - warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future " - "versions, to use the dateutil-provided zoneinfo files, " - "ZoneInfoFile object and query the 'metadata' attribute " - "instead. See the documentation for details.", - DeprecationWarning) - - if len(_CLASS_ZONE_INSTANCE) == 0: - _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) - return _CLASS_ZONE_INSTANCE[0].metadata diff --git a/pype/vendor/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/pype/vendor/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz deleted file mode 100644 index e86b54fe28..0000000000 Binary files a/pype/vendor/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz and /dev/null differ diff --git a/pype/vendor/dateutil/zoneinfo/rebuild.py b/pype/vendor/dateutil/zoneinfo/rebuild.py deleted file mode 100644 index 78f0d1a0ce..0000000000 --- a/pype/vendor/dateutil/zoneinfo/rebuild.py +++ /dev/null @@ -1,53 +0,0 @@ -import logging -import os -import tempfile -import shutil -import json -from subprocess import check_call -from tarfile import TarFile - -from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME - - -def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): - """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* - - filename is the timezone tarball from ``ftp.iana.org/tz``. - - """ - tmpdir = tempfile.mkdtemp() - zonedir = os.path.join(tmpdir, "zoneinfo") - moduledir = os.path.dirname(__file__) - try: - with TarFile.open(filename) as tf: - for name in zonegroups: - tf.extract(name, tmpdir) - filepaths = [os.path.join(tmpdir, n) for n in zonegroups] - try: - check_call(["zic", "-d", zonedir] + filepaths) - except OSError as e: - _print_on_nosuchfile(e) - raise - # write metadata file - with open(os.path.join(zonedir, METADATA_FN), 'w') as f: - json.dump(metadata, f, indent=4, sort_keys=True) - target = os.path.join(moduledir, ZONEFILENAME) - with TarFile.open(target, "w:%s" % format) as tf: - for entry in os.listdir(zonedir): - entrypath = os.path.join(zonedir, entry) - tf.add(entrypath, entry) - finally: - shutil.rmtree(tmpdir) - - -def _print_on_nosuchfile(e): - """Print helpful troubleshooting message - - e is an exception raised by subprocess.check_call() - - """ - if e.errno == 2: - logging.error( - "Could not find zic. Perhaps you need to install " - "libc-bin or some other package that provides it, " - "or it's not in your PATH?") diff --git a/pype/vendor/ffmpeg/__init__.py b/pype/vendor/ffmpeg/__init__.py deleted file mode 100644 index 7e913d37ae..0000000000 --- a/pype/vendor/ffmpeg/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from __future__ import unicode_literals - -from . import _filters, _ffmpeg, _run, _probe -from ._filters import * -from ._ffmpeg import * -from ._run import * -from ._view import * -from ._probe import * - -__all__ = _filters.__all__ + _ffmpeg.__all__ + _run.__all__ + _view.__all__ + _probe.__all__ diff --git a/pype/vendor/ffmpeg/_ffmpeg.py b/pype/vendor/ffmpeg/_ffmpeg.py deleted file mode 100644 index fa81f5755a..0000000000 --- a/pype/vendor/ffmpeg/_ffmpeg.py +++ /dev/null @@ -1,102 +0,0 @@ -from __future__ import unicode_literals - -#from past.builtins import basestring -from ._utils import basestring - -from .nodes import ( - filter_operator, - GlobalNode, - InputNode, - MergeOutputsNode, - OutputNode, - output_operator, -) - - -def input(filename, **kwargs): - """Input file URL (ffmpeg ``-i`` option) - - Any supplied kwargs are passed to ffmpeg verbatim (e.g. ``t=20``, - ``f='mp4'``, ``acodec='pcm'``, etc.). - - To tell ffmpeg to read from stdin, use ``pipe:`` as the filename. - - Official documentation: `Main options `__ - """ - kwargs['filename'] = filename - fmt = kwargs.pop('f', None) - if fmt: - if 'format' in kwargs: - raise ValueError("Can't specify both `format` and `f` kwargs") - kwargs['format'] = fmt - return InputNode(input.__name__, kwargs=kwargs).stream() - - -@output_operator() -def global_args(stream, *args): - """Add extra global command-line argument(s), e.g. ``-progress``. - """ - return GlobalNode(stream, global_args.__name__, args).stream() - - -@output_operator() -def overwrite_output(stream): - """Overwrite output files without asking (ffmpeg ``-y`` option) - - Official documentation: `Main options `__ - """ - return GlobalNode(stream, overwrite_output.__name__, ['-y']).stream() - - -@output_operator() -def merge_outputs(*streams): - """Include all given outputs in one ffmpeg command line - """ - return MergeOutputsNode(streams, merge_outputs.__name__).stream() - - -@filter_operator() -def output(*streams_and_filename, **kwargs): - """Output file URL - - Syntax: - `ffmpeg.output(stream1[, stream2, stream3...], filename, **ffmpeg_args)` - - Any supplied keyword arguments are passed to ffmpeg verbatim (e.g. - ``t=20``, ``f='mp4'``, ``acodec='pcm'``, ``vcodec='rawvideo'``, - etc.). Some keyword-arguments are handled specially, as shown below. - - Args: - video_bitrate: parameter for ``-b:v``, e.g. ``video_bitrate=1000``. - audio_bitrate: parameter for ``-b:a``, e.g. ``audio_bitrate=200``. - format: alias for ``-f`` parameter, e.g. ``format='mp4'`` - (equivalent to ``f='mp4'``). - - If multiple streams are provided, they are mapped to the same - output. - - To tell ffmpeg to write to stdout, use ``pipe:`` as the filename. - - Official documentation: `Synopsis `__ - """ - streams_and_filename = list(streams_and_filename) - if 'filename' not in kwargs: - if not isinstance(streams_and_filename[-1], basestring): - raise ValueError('A filename must be provided') - kwargs['filename'] = streams_and_filename.pop(-1) - streams = streams_and_filename - - fmt = kwargs.pop('f', None) - if fmt: - if 'format' in kwargs: - raise ValueError("Can't specify both `format` and `f` kwargs") - kwargs['format'] = fmt - return OutputNode(streams, output.__name__, kwargs=kwargs).stream() - - -__all__ = [ - 'input', - 'merge_outputs', - 'output', - 'overwrite_output', -] diff --git a/pype/vendor/ffmpeg/_filters.py b/pype/vendor/ffmpeg/_filters.py deleted file mode 100644 index 6ba30d3404..0000000000 --- a/pype/vendor/ffmpeg/_filters.py +++ /dev/null @@ -1,453 +0,0 @@ -from __future__ import unicode_literals - -from .nodes import FilterNode, filter_operator -from ._utils import escape_chars - - -@filter_operator() -def filter_multi_output(stream_spec, filter_name, *args, **kwargs): - """Apply custom filter with one or more outputs. - - This is the same as ``filter_`` except that the filter can produce more than one output. - - To reference an output stream, use either the ``.stream`` operator or bracket shorthand: - - Example: - - ``` - split = ffmpeg.input('in.mp4').filter_multi_output('split') - split0 = split.stream(0) - split1 = split[1] - ffmpeg.concat(split0, split1).output('out.mp4').run() - ``` - """ - return FilterNode(stream_spec, filter_name, args=args, kwargs=kwargs, max_inputs=None) - - -@filter_operator() -def filter(stream_spec, filter_name, *args, **kwargs): - """Apply custom filter. - - ``filter_`` is normally used by higher-level filter functions such as ``hflip``, but if a filter implementation - is missing from ``fmpeg-python``, you can call ``filter_`` directly to have ``fmpeg-python`` pass the filter name - and arguments to ffmpeg verbatim. - - Args: - stream_spec: a Stream, list of Streams, or label-to-Stream dictionary mapping - filter_name: ffmpeg filter name, e.g. `colorchannelmixer` - *args: list of args to pass to ffmpeg verbatim - **kwargs: list of keyword-args to pass to ffmpeg verbatim - - The function name is suffixed with ``_`` in order avoid confusion with the standard python ``filter`` function. - - Example: - - ``ffmpeg.input('in.mp4').filter('hflip').output('out.mp4').run()`` - """ - return filter_multi_output(stream_spec, filter_name, *args, **kwargs).stream() - - -@filter_operator() -def filter_(stream_spec, filter_name, *args, **kwargs): - """Alternate name for ``filter``, so as to not collide with the - built-in python ``filter`` operator. - """ - return filter(stream_spec, filter_name, *args, **kwargs) - - -@filter_operator() -def split(stream): - return FilterNode(stream, split.__name__) - - -@filter_operator() -def asplit(stream): - return FilterNode(stream, asplit.__name__) - - -@filter_operator() -def setpts(stream, expr): - """Change the PTS (presentation timestamp) of the input frames. - - Args: - expr: The expression which is evaluated for each frame to construct its timestamp. - - Official documentation: `setpts, asetpts `__ - """ - return FilterNode(stream, setpts.__name__, args=[expr]).stream() - - -@filter_operator() -def trim(stream, **kwargs): - """Trim the input so that the output contains one continuous subpart of the input. - - Args: - start: Specify the time of the start of the kept section, i.e. the frame with the timestamp start will be the - first frame in the output. - end: Specify the time of the first frame that will be dropped, i.e. the frame immediately preceding the one - with the timestamp end will be the last frame in the output. - start_pts: This is the same as start, except this option sets the start timestamp in timebase units instead of - seconds. - end_pts: This is the same as end, except this option sets the end timestamp in timebase units instead of - seconds. - duration: The maximum duration of the output in seconds. - start_frame: The number of the first frame that should be passed to the output. - end_frame: The number of the first frame that should be dropped. - - Official documentation: `trim `__ - """ - return FilterNode(stream, trim.__name__, kwargs=kwargs).stream() - - -@filter_operator() -def overlay(main_parent_node, overlay_parent_node, eof_action='repeat', **kwargs): - """Overlay one video on top of another. - - Args: - x: Set the expression for the x coordinates of the overlaid video on the main video. Default value is 0. In - case the expression is invalid, it is set to a huge value (meaning that the overlay will not be displayed - within the output visible area). - y: Set the expression for the y coordinates of the overlaid video on the main video. Default value is 0. In - case the expression is invalid, it is set to a huge value (meaning that the overlay will not be displayed - within the output visible area). - eof_action: The action to take when EOF is encountered on the secondary input; it accepts one of the following - values: - - * ``repeat``: Repeat the last frame (the default). - * ``endall``: End both streams. - * ``pass``: Pass the main input through. - - eval: Set when the expressions for x, and y are evaluated. - It accepts the following values: - - * ``init``: only evaluate expressions once during the filter initialization or when a command is - processed - * ``frame``: evaluate expressions for each incoming frame - - Default value is ``frame``. - shortest: If set to 1, force the output to terminate when the shortest input terminates. Default value is 0. - format: Set the format for the output video. - It accepts the following values: - - * ``yuv420``: force YUV420 output - * ``yuv422``: force YUV422 output - * ``yuv444``: force YUV444 output - * ``rgb``: force packed RGB output - * ``gbrp``: force planar RGB output - - Default value is ``yuv420``. - rgb (deprecated): If set to 1, force the filter to accept inputs in the RGB color space. Default value is 0. - This option is deprecated, use format instead. - repeatlast: If set to 1, force the filter to draw the last overlay frame over the main input until the end of - the stream. A value of 0 disables this behavior. Default value is 1. - - Official documentation: `overlay `__ - """ - kwargs['eof_action'] = eof_action - return FilterNode([main_parent_node, overlay_parent_node], overlay.__name__, kwargs=kwargs, max_inputs=2).stream() - - -@filter_operator() -def hflip(stream): - """Flip the input video horizontally. - - Official documentation: `hflip `__ - """ - return FilterNode(stream, hflip.__name__).stream() - - -@filter_operator() -def vflip(stream): - """Flip the input video vertically. - - Official documentation: `vflip `__ - """ - return FilterNode(stream, vflip.__name__).stream() - - -@filter_operator() -def crop(stream, x, y, width, height, **kwargs): - """Crop the input video. - - Args: - x: The horizontal position, in the input video, of the left edge of - the output video. - y: The vertical position, in the input video, of the top edge of the - output video. - width: The width of the output video. Must be greater than 0. - heigth: The height of the output video. Must be greater than 0. - - Official documentation: `crop `__ - """ - return FilterNode( - stream, - crop.__name__, - args=[width, height, x, y], - kwargs=kwargs - ).stream() - - -@filter_operator() -def drawbox(stream, x, y, width, height, color, thickness=None, **kwargs): - """Draw a colored box on the input image. - - Args: - x: The expression which specifies the top left corner x coordinate of the box. It defaults to 0. - y: The expression which specifies the top left corner y coordinate of the box. It defaults to 0. - width: Specify the width of the box; if 0 interpreted as the input width. It defaults to 0. - heigth: Specify the height of the box; if 0 interpreted as the input height. It defaults to 0. - color: Specify the color of the box to write. For the general syntax of this option, check the "Color" section - in the ffmpeg-utils manual. If the special value invert is used, the box edge color is the same as the - video with inverted luma. - thickness: The expression which sets the thickness of the box edge. Default value is 3. - w: Alias for ``width``. - h: Alias for ``height``. - c: Alias for ``color``. - t: Alias for ``thickness``. - - Official documentation: `drawbox `__ - """ - if thickness: - kwargs['t'] = thickness - return FilterNode(stream, drawbox.__name__, args=[x, y, width, height, color], kwargs=kwargs).stream() - - -@filter_operator() -def drawtext(stream, text=None, x=0, y=0, escape_text=True, **kwargs): - """Draw a text string or text from a specified file on top of a video, using the libfreetype library. - - To enable compilation of this filter, you need to configure FFmpeg with ``--enable-libfreetype``. To enable default - font fallback and the font option you need to configure FFmpeg with ``--enable-libfontconfig``. To enable the - text_shaping option, you need to configure FFmpeg with ``--enable-libfribidi``. - - Args: - box: Used to draw a box around text using the background color. The value must be either 1 (enable) or 0 - (disable). The default value of box is 0. - boxborderw: Set the width of the border to be drawn around the box using boxcolor. The default value of - boxborderw is 0. - boxcolor: The color to be used for drawing box around text. For the syntax of this option, check the "Color" - section in the ffmpeg-utils manual. The default value of boxcolor is "white". - line_spacing: Set the line spacing in pixels of the border to be drawn around the box using box. The default - value of line_spacing is 0. - borderw: Set the width of the border to be drawn around the text using bordercolor. The default value of - borderw is 0. - bordercolor: Set the color to be used for drawing border around text. For the syntax of this option, check the - "Color" section in the ffmpeg-utils manual. The default value of bordercolor is "black". - expansion: Select how the text is expanded. Can be either none, strftime (deprecated) or normal (default). See - the Text expansion section below for details. - basetime: Set a start time for the count. Value is in microseconds. Only applied in the deprecated strftime - expansion mode. To emulate in normal expansion mode use the pts function, supplying the start time (in - seconds) as the second argument. - fix_bounds: If true, check and fix text coords to avoid clipping. - fontcolor: The color to be used for drawing fonts. For the syntax of this option, check the "Color" section in - the ffmpeg-utils manual. The default value of fontcolor is "black". - fontcolor_expr: String which is expanded the same way as text to obtain dynamic fontcolor value. By default - this option has empty value and is not processed. When this option is set, it overrides fontcolor option. - font: The font family to be used for drawing text. By default Sans. - fontfile: The font file to be used for drawing text. The path must be included. This parameter is mandatory if - the fontconfig support is disabled. - alpha: Draw the text applying alpha blending. The value can be a number between 0.0 and 1.0. The expression - accepts the same variables x, y as well. The default value is 1. Please see fontcolor_expr. - fontsize: The font size to be used for drawing text. The default value of fontsize is 16. - text_shaping: If set to 1, attempt to shape the text (for example, reverse the order of right-to-left text and - join Arabic characters) before drawing it. Otherwise, just draw the text exactly as given. By default 1 (if - supported). - ft_load_flags: The flags to be used for loading the fonts. The flags map the corresponding flags supported by - libfreetype, and are a combination of the following values: - - * ``default`` - * ``no_scale`` - * ``no_hinting`` - * ``render`` - * ``no_bitmap`` - * ``vertical_layout`` - * ``force_autohint`` - * ``crop_bitmap`` - * ``pedantic`` - * ``ignore_global_advance_width`` - * ``no_recurse`` - * ``ignore_transform`` - * ``monochrome`` - * ``linear_design`` - * ``no_autohint`` - - Default value is "default". For more information consult the documentation for the FT_LOAD_* libfreetype - flags. - shadowcolor: The color to be used for drawing a shadow behind the drawn text. For the syntax of this option, - check the "Color" section in the ffmpeg-utils manual. The default value of shadowcolor is "black". - shadowx: The x offset for the text shadow position with respect to the position of the text. It can be either - positive or negative values. The default value is "0". - shadowy: The y offset for the text shadow position with respect to the position of the text. It can be either - positive or negative values. The default value is "0". - start_number: The starting frame number for the n/frame_num variable. The default value is "0". - tabsize: The size in number of spaces to use for rendering the tab. Default value is 4. - timecode: Set the initial timecode representation in "hh:mm:ss[:;.]ff" format. It can be used with or without - text parameter. timecode_rate option must be specified. - rate: Set the timecode frame rate (timecode only). - timecode_rate: Alias for ``rate``. - r: Alias for ``rate``. - tc24hmax: If set to 1, the output of the timecode option will wrap around at 24 hours. Default is 0 (disabled). - text: The text string to be drawn. The text must be a sequence of UTF-8 encoded characters. This parameter is - mandatory if no file is specified with the parameter textfile. - textfile: A text file containing text to be drawn. The text must be a sequence of UTF-8 encoded characters. - This parameter is mandatory if no text string is specified with the parameter text. If both text and - textfile are specified, an error is thrown. - reload: If set to 1, the textfile will be reloaded before each frame. Be sure to update it atomically, or it - may be read partially, or even fail. - x: The expression which specifies the offset where text will be drawn within the video frame. It is relative to - the left border of the output image. The default value is "0". - y: The expression which specifies the offset where text will be drawn within the video frame. It is relative to - the top border of the output image. The default value is "0". See below for the list of accepted constants - and functions. - - Expression constants: - The parameters for x and y are expressions containing the following constants and functions: - - dar: input display aspect ratio, it is the same as ``(w / h) * sar`` - - hsub: horizontal chroma subsample values. For example for the pixel format "yuv422p" hsub is 2 and vsub - is 1. - - vsub: vertical chroma subsample values. For example for the pixel format "yuv422p" hsub is 2 and vsub - is 1. - - line_h: the height of each text line - - lh: Alias for ``line_h``. - - main_h: the input height - - h: Alias for ``main_h``. - - H: Alias for ``main_h``. - - main_w: the input width - - w: Alias for ``main_w``. - - W: Alias for ``main_w``. - - ascent: the maximum distance from the baseline to the highest/upper grid coordinate used to place a glyph - outline point, for all the rendered glyphs. It is a positive value, due to the grid's orientation with the Y - axis upwards. - - max_glyph_a: Alias for ``ascent``. - - descent: the maximum distance from the baseline to the lowest grid coordinate used to place a glyph outline - point, for all the rendered glyphs. This is a negative value, due to the grid's orientation, with the Y axis - upwards. - - max_glyph_d: Alias for ``descent``. - - max_glyph_h: maximum glyph height, that is the maximum height for all the glyphs contained in the rendered - text, it is equivalent to ascent - descent. - - max_glyph_w: maximum glyph width, that is the maximum width for all the glyphs contained in the rendered - text. - - n: the number of input frame, starting from 0 - - rand(min, max): return a random number included between min and max - - sar: The input sample aspect ratio. - - t: timestamp expressed in seconds, NAN if the input timestamp is unknown - - text_h: the height of the rendered text - - th: Alias for ``text_h``. - - text_w: the width of the rendered text - - tw: Alias for ``text_w``. - - x: the x offset coordinates where the text is drawn. - - y: the y offset coordinates where the text is drawn. - - These parameters allow the x and y expressions to refer each other, so you can for example specify - ``y=x/dar``. - - Official documentation: `drawtext `__ - """ - if text is not None: - if escape_text: - text = escape_chars(text, '\\\'%') - kwargs['text'] = text - if x != 0: - kwargs['x'] = x - if y != 0: - kwargs['y'] = y - return filter(stream, drawtext.__name__, **kwargs) - - -@filter_operator() -def concat(*streams, **kwargs): - """Concatenate audio and video streams, joining them together one after the other. - - The filter works on segments of synchronized video and audio streams. All segments must have the same number of - streams of each type, and that will also be the number of streams at output. - - Args: - unsafe: Activate unsafe mode: do not fail if segments have a different format. - - Related streams do not always have exactly the same duration, for various reasons including codec frame size or - sloppy authoring. For that reason, related synchronized streams (e.g. a video and its audio track) should be - concatenated at once. The concat filter will use the duration of the longest stream in each segment (except the - last one), and if necessary pad shorter audio streams with silence. - - For this filter to work correctly, all segments must start at timestamp 0. - - All corresponding streams must have the same parameters in all segments; the filtering system will automatically - select a common pixel format for video streams, and a common sample format, sample rate and channel layout for - audio streams, but other settings, such as resolution, must be converted explicitly by the user. - - Different frame rates are acceptable but will result in variable frame rate at output; be sure to configure the - output file to handle it. - - Official documentation: `concat `__ - """ - video_stream_count = kwargs.get('v', 1) - audio_stream_count = kwargs.get('a', 0) - stream_count = video_stream_count + audio_stream_count - if len(streams) % stream_count != 0: - raise ValueError( - 'Expected concat input streams to have length multiple of {} (v={}, a={}); got {}' - .format(stream_count, video_stream_count, audio_stream_count, len(streams))) - kwargs['n'] = int(len(streams) / stream_count) - return FilterNode(streams, concat.__name__, kwargs=kwargs, max_inputs=None).stream() - - -@filter_operator() -def zoompan(stream, **kwargs): - """Apply Zoom & Pan effect. - - Args: - zoom: Set the zoom expression. Default is 1. - x: Set the x expression. Default is 0. - y: Set the y expression. Default is 0. - d: Set the duration expression in number of frames. This sets for how many number of frames effect will last - for single input image. - s: Set the output image size, default is ``hd720``. - fps: Set the output frame rate, default is 25. - z: Alias for ``zoom``. - - Official documentation: `zoompan `__ - """ - return FilterNode(stream, zoompan.__name__, kwargs=kwargs).stream() - - -@filter_operator() -def hue(stream, **kwargs): - """Modify the hue and/or the saturation of the input. - - Args: - h: Specify the hue angle as a number of degrees. It accepts an expression, and defaults to "0". - s: Specify the saturation in the [-10,10] range. It accepts an expression and defaults to "1". - H: Specify the hue angle as a number of radians. It accepts an expression, and defaults to "0". - b: Specify the brightness in the [-10,10] range. It accepts an expression and defaults to "0". - - Official documentation: `hue `__ - """ - return FilterNode(stream, hue.__name__, kwargs=kwargs).stream() - - -@filter_operator() -def colorchannelmixer(stream, *args, **kwargs): - """Adjust video input frames by re-mixing color channels. - - Official documentation: `colorchannelmixer `__ - """ - return FilterNode(stream, colorchannelmixer.__name__, kwargs=kwargs).stream() - - -__all__ = [ - 'colorchannelmixer', - 'concat', - 'crop', - 'drawbox', - 'drawtext', - 'filter', - 'filter_', - 'filter_multi_output', - 'hflip', - 'hue', - 'overlay', - 'setpts', - 'trim', - 'vflip', - 'zoompan', -] diff --git a/pype/vendor/ffmpeg/_probe.py b/pype/vendor/ffmpeg/_probe.py deleted file mode 100644 index 2bcf63683e..0000000000 --- a/pype/vendor/ffmpeg/_probe.py +++ /dev/null @@ -1,25 +0,0 @@ -import json -import subprocess -from ._run import Error - - -def probe(filename, cmd='ffprobe'): - """Run ffprobe on the specified file and return a JSON representation of the output. - - Raises: - :class:`ffmpeg.Error`: if ffprobe returns a non-zero exit code, - an :class:`Error` is returned with a generic error message. - The stderr output can be retrieved by accessing the - ``stderr`` property of the exception. - """ - args = [cmd, '-show_format', '-show_streams', '-of', 'json', filename] - p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = p.communicate() - if p.returncode != 0: - raise Error('ffprobe', out, err) - return json.loads(out.decode('utf-8')) - - -__all__ = [ - 'probe', -] diff --git a/pype/vendor/ffmpeg/_run.py b/pype/vendor/ffmpeg/_run.py deleted file mode 100644 index 77da8d3837..0000000000 --- a/pype/vendor/ffmpeg/_run.py +++ /dev/null @@ -1,222 +0,0 @@ -from __future__ import unicode_literals -from .dag import get_outgoing_edges, topo_sort -from ._utils import basestring -from builtins import str -from functools import reduce -import collections -import copy -import operator -import subprocess - -from ._ffmpeg import ( - input, - output, -) -from .nodes import ( - get_stream_spec_nodes, - FilterNode, - GlobalNode, - InputNode, - OutputNode, - output_operator, -) - - -class Error(Exception): - def __init__(self, cmd, stdout, stderr): - super(Error, self).__init__('{} error (see stderr output for detail)'.format(cmd)) - self.stdout = stdout - self.stderr = stderr - - -def _convert_kwargs_to_cmd_line_args(kwargs): - args = [] - for k in sorted(kwargs.keys()): - v = kwargs[k] - args.append('-{}'.format(k)) - if v is not None: - args.append('{}'.format(v)) - return args - - -def _get_input_args(input_node): - if input_node.name == input.__name__: - kwargs = copy.copy(input_node.kwargs) - filename = kwargs.pop('filename') - fmt = kwargs.pop('format', None) - video_size = kwargs.pop('video_size', None) - args = [] - if fmt: - args += ['-f', fmt] - if video_size: - args += ['-video_size', '{}x{}'.format(video_size[0], video_size[1])] - args += _convert_kwargs_to_cmd_line_args(kwargs) - args += ['-i', filename] - else: - raise ValueError('Unsupported input node: {}'.format(input_node)) - return args - - -def _format_input_stream_name(stream_name_map, edge, is_final_arg=False): - prefix = stream_name_map[edge.upstream_node, edge.upstream_label] - if not edge.upstream_selector: - suffix = '' - else: - suffix = ':{}'.format(edge.upstream_selector) - if is_final_arg and isinstance(edge.upstream_node, InputNode): - ## Special case: `-map` args should not have brackets for input - ## nodes. - fmt = '{}{}' - else: - fmt = '[{}{}]' - return fmt.format(prefix, suffix) - - -def _format_output_stream_name(stream_name_map, edge): - return '[{}]'.format(stream_name_map[edge.upstream_node, edge.upstream_label]) - - -def _get_filter_spec(node, outgoing_edge_map, stream_name_map): - incoming_edges = node.incoming_edges - outgoing_edges = get_outgoing_edges(node, outgoing_edge_map) - inputs = [_format_input_stream_name(stream_name_map, edge) for edge in incoming_edges] - outputs = [_format_output_stream_name(stream_name_map, edge) for edge in outgoing_edges] - filter_spec = '{}{}{}'.format(''.join(inputs), node._get_filter(outgoing_edges), ''.join(outputs)) - return filter_spec - - -def _allocate_filter_stream_names(filter_nodes, outgoing_edge_maps, stream_name_map): - stream_count = 0 - for upstream_node in filter_nodes: - outgoing_edge_map = outgoing_edge_maps[upstream_node] - for upstream_label, downstreams in list(outgoing_edge_map.items()): - if len(downstreams) > 1: - # TODO: automatically insert `splits` ahead of time via graph transformation. - raise ValueError( - 'Encountered {} with multiple outgoing edges with same upstream label {!r}; a ' - '`split` filter is probably required'.format(upstream_node, upstream_label)) - stream_name_map[upstream_node, upstream_label] = 's{}'.format(stream_count) - stream_count += 1 - - -def _get_filter_arg(filter_nodes, outgoing_edge_maps, stream_name_map): - _allocate_filter_stream_names(filter_nodes, outgoing_edge_maps, stream_name_map) - filter_specs = [_get_filter_spec(node, outgoing_edge_maps[node], stream_name_map) for node in filter_nodes] - return ';'.join(filter_specs) - - -def _get_global_args(node): - return list(node.args) - - -def _get_output_args(node, stream_name_map): - if node.name != output.__name__: - raise ValueError('Unsupported output node: {}'.format(node)) - args = [] - - if len(node.incoming_edges) == 0: - raise ValueError('Output node {} has no mapped streams'.format(node)) - - for edge in node.incoming_edges: - # edge = node.incoming_edges[0] - stream_name = _format_input_stream_name(stream_name_map, edge, is_final_arg=True) - if stream_name != '0' or len(node.incoming_edges) > 1: - args += ['-map', stream_name] - - kwargs = copy.copy(node.kwargs) - filename = kwargs.pop('filename') - if 'format' in kwargs: - args += ['-f', kwargs.pop('format')] - if 'video_bitrate' in kwargs: - args += ['-b:v', str(kwargs.pop('video_bitrate'))] - if 'audio_bitrate' in kwargs: - args += ['-b:a', str(kwargs.pop('audio_bitrate'))] - if 'video_size' in kwargs: - video_size = kwargs.pop('video_size') - if not isinstance(video_size, basestring) and isinstance(video_size, collections.Iterable): - video_size = '{}x{}'.format(video_size[0], video_size[1]) - args += ['-video_size', video_size] - args += _convert_kwargs_to_cmd_line_args(kwargs) - args += [filename] - return args - - -@output_operator() -def get_args(stream_spec, overwrite_output=False): - """Build command-line arguments to be passed to ffmpeg.""" - nodes = get_stream_spec_nodes(stream_spec) - args = [] - # TODO: group nodes together, e.g. `-i somefile -r somerate`. - sorted_nodes, outgoing_edge_maps = topo_sort(nodes) - input_nodes = [node for node in sorted_nodes if isinstance(node, InputNode)] - output_nodes = [node for node in sorted_nodes if isinstance(node, OutputNode)] - global_nodes = [node for node in sorted_nodes if isinstance(node, GlobalNode)] - filter_nodes = [node for node in sorted_nodes if isinstance(node, FilterNode)] - stream_name_map = {(node, None): str(i) for i, node in enumerate(input_nodes)} - filter_arg = _get_filter_arg(filter_nodes, outgoing_edge_maps, stream_name_map) - args += reduce(operator.add, [_get_input_args(node) for node in input_nodes]) - if filter_arg: - args += ['-filter_complex', filter_arg] - args += reduce(operator.add, [_get_output_args(node, stream_name_map) for node in output_nodes]) - args += reduce(operator.add, [_get_global_args(node) for node in global_nodes], []) - if overwrite_output: - args += ['-y'] - return args - - -@output_operator() -def compile(stream_spec, cmd='ffmpeg', overwrite_output=False): - """Build command-line for invoking ffmpeg. - - The :meth:`run` function uses this to build the commnad line - arguments and should work in most cases, but calling this function - directly is useful for debugging or if you need to invoke ffmpeg - manually for whatever reason. - - This is the same as calling :meth:`get_args` except that it also - includes the ``ffmpeg`` command as the first argument. - """ - if isinstance(cmd, basestring): - cmd = [cmd] - elif type(cmd) != list: - cmd = list(cmd) - return cmd + get_args(stream_spec, overwrite_output=overwrite_output) - - -@output_operator() -def run( - stream_spec, cmd='ffmpeg', capture_stdout=False, capture_stderr=False, input=None, - quiet=False, overwrite_output=False): - """Ivoke ffmpeg for the supplied node graph. - - Args: - capture_stdout: if True, capture stdout (to be used with - ``pipe:`` ffmpeg outputs). - capture_stderr: if True, capture stderr. - quiet: shorthand for setting ``capture_stdout`` and ``capture_stderr``. - input: text to be sent to stdin (to be used with ``pipe:`` - ffmpeg inputs) - **kwargs: keyword-arguments passed to ``get_args()`` (e.g. - ``overwrite_output=True``). - - Returns: (out, err) tuple containing captured stdout and stderr data. - """ - args = compile(stream_spec, cmd, overwrite_output=overwrite_output) - stdin_stream = subprocess.PIPE if input else None - stdout_stream = subprocess.PIPE if capture_stdout or quiet else None - stderr_stream = subprocess.PIPE if capture_stderr or quiet else None - p = subprocess.Popen(args, stdin=stdin_stream, stdout=stdout_stream, stderr=stderr_stream) - out, err = p.communicate(input) - retcode = p.poll() - if retcode: - raise Error('ffmpeg', out, err) - print err - return out, err - - -__all__ = [ - 'compile', - 'Error', - 'get_args', - 'run', -] diff --git a/pype/vendor/ffmpeg/_utils.py b/pype/vendor/ffmpeg/_utils.py deleted file mode 100644 index c3a6da3210..0000000000 --- a/pype/vendor/ffmpeg/_utils.py +++ /dev/null @@ -1,80 +0,0 @@ -from __future__ import unicode_literals -from builtins import str -#from past.builtins import basestring -import hashlib -import sys - -if sys.version_info.major == 2: - # noinspection PyUnresolvedReferences,PyShadowingBuiltins - str = str - - -# `past.builtins.basestring` module can't be imported on Python3 in some environments (Ubuntu). -# This code is copy-pasted from it to avoid crashes. -class BaseBaseString(type): - def __instancecheck__(cls, instance): - return isinstance(instance, (bytes, str)) - - def __subclasshook__(cls, thing): - # TODO: What should go here? - raise NotImplemented - - -def with_metaclass(meta, *bases): - class metaclass(meta): - __call__ = type.__call__ - __init__ = type.__init__ - - def __new__(cls, name, this_bases, d): - if this_bases is None: - return type.__new__(cls, name, (), d) - return meta(name, bases, d) - - return metaclass('temporary_class', None, {}) - - -if sys.version_info.major >= 3: - class basestring(with_metaclass(BaseBaseString)): - pass -else: - # noinspection PyUnresolvedReferences,PyCompatibility - from builtins import basestring - - -def _recursive_repr(item): - """Hack around python `repr` to deterministically represent dictionaries. - - This is able to represent more things than json.dumps, since it does not require things to be JSON serializable - (e.g. datetimes). - """ - if isinstance(item, basestring): - result = str(item) - elif isinstance(item, list): - result = '[{}]'.format(', '.join([_recursive_repr(x) for x in item])) - elif isinstance(item, dict): - kv_pairs = ['{}: {}'.format(_recursive_repr(k), _recursive_repr(item[k])) for k in sorted(item)] - result = '{' + ', '.join(kv_pairs) + '}' - else: - result = repr(item) - return result - - -def get_hash(item): - repr_ = _recursive_repr(item).encode('utf-8') - return hashlib.md5(repr_).hexdigest() - - -def get_hash_int(item): - return int(get_hash(item), base=16) - - -def escape_chars(text, chars): - """Helper function to escape uncomfortable characters.""" - text = str(text) - chars = list(set(chars)) - if '\\' in chars: - chars.remove('\\') - chars.insert(0, '\\') - for ch in chars: - text = text.replace(ch, '\\' + ch) - return text diff --git a/pype/vendor/ffmpeg/_view.py b/pype/vendor/ffmpeg/_view.py deleted file mode 100644 index 06e6ff4cbc..0000000000 --- a/pype/vendor/ffmpeg/_view.py +++ /dev/null @@ -1,98 +0,0 @@ -from __future__ import unicode_literals - -from builtins import str -from .dag import get_outgoing_edges -from ._run import topo_sort -import tempfile - -from ffmpeg.nodes import ( - FilterNode, - get_stream_spec_nodes, - InputNode, - OutputNode, - stream_operator, -) - - -_RIGHT_ARROW = '\u2192' - - -def _get_node_color(node): - if isinstance(node, InputNode): - color = '#99cc00' - elif isinstance(node, OutputNode): - color = '#99ccff' - elif isinstance(node, FilterNode): - color = '#ffcc00' - else: - color = None - return color - - -@stream_operator() -def view(stream_spec, detail=False, filename=None, pipe=False, **kwargs): - try: - import graphviz - except ImportError: - raise ImportError('failed to import graphviz; please make sure graphviz is installed (e.g. `pip install ' - 'graphviz`)') - - show_labels = kwargs.pop('show_labels', True) - if pipe and filename is not None: - raise ValueError('Can\'t specify both `filename` and `pipe`') - elif not pipe and filename is None: - filename = tempfile.mktemp() - - nodes = get_stream_spec_nodes(stream_spec) - - sorted_nodes, outgoing_edge_maps = topo_sort(nodes) - graph = graphviz.Digraph(format='png') - graph.attr(rankdir='LR') - if len(list(kwargs.keys())) != 0: - raise ValueError('Invalid kwargs key(s): {}'.format(', '.join(list(kwargs.keys())))) - - for node in sorted_nodes: - color = _get_node_color(node) - - if detail: - lines = [node.short_repr] - lines += ['{!r}'.format(arg) for arg in node.args] - lines += ['{}={!r}'.format(key, node.kwargs[key]) for key in sorted(node.kwargs)] - node_text = '\n'.join(lines) - else: - node_text = node.short_repr - graph.node(str(hash(node)), node_text, shape='box', style='filled', fillcolor=color) - outgoing_edge_map = outgoing_edge_maps.get(node, {}) - - for edge in get_outgoing_edges(node, outgoing_edge_map): - kwargs = {} - up_label = edge.upstream_label - down_label = edge.downstream_label - up_selector = edge.upstream_selector - - if show_labels and (up_label is not None or down_label is not None or up_selector is not None): - if up_label is None: - up_label = '' - if up_selector is not None: - up_label += ":" + up_selector - if down_label is None: - down_label = '' - if up_label != '' and down_label != '': - middle = ' {} '.format(_RIGHT_ARROW) - else: - middle = '' - kwargs['label'] = '{} {} {}'.format(up_label, middle, down_label) - upstream_node_id = str(hash(edge.upstream_node)) - downstream_node_id = str(hash(edge.downstream_node)) - graph.edge(upstream_node_id, downstream_node_id, **kwargs) - - if pipe: - return graph.pipe() - else: - graph.view(filename, cleanup=True) - return stream_spec - - -__all__ = [ - 'view', -] diff --git a/pype/vendor/ffmpeg/dag.py b/pype/vendor/ffmpeg/dag.py deleted file mode 100644 index fb51c6a1b2..0000000000 --- a/pype/vendor/ffmpeg/dag.py +++ /dev/null @@ -1,182 +0,0 @@ -from __future__ import unicode_literals - -from ._utils import get_hash, get_hash_int -from builtins import object -from collections import namedtuple - - -class DagNode(object): - """Node in a directed-acyclic graph (DAG). - - Edges: - DagNodes are connected by edges. An edge connects two nodes with a label for each side: - - ``upstream_node``: upstream/parent node - - ``upstream_label``: label on the outgoing side of the upstream node - - ``downstream_node``: downstream/child node - - ``downstream_label``: label on the incoming side of the downstream node - - For example, DagNode A may be connected to DagNode B with an edge labelled "foo" on A's side, and "bar" on B's - side: - - _____ _____ - | | | | - | A >[foo]---[bar]> B | - |_____| |_____| - - Edge labels may be integers or strings, and nodes cannot have more than one incoming edge with the same label. - - DagNodes may have any number of incoming edges and any number of outgoing edges. DagNodes keep track only of - their incoming edges, but the entire graph structure can be inferred by looking at the furthest downstream - nodes and working backwards. - - Hashing: - DagNodes must be hashable, and two nodes are considered to be equivalent if they have the same hash value. - - Nodes are immutable, and the hash should remain constant as a result. If a node with new contents is required, - create a new node and throw the old one away. - - String representation: - In order for graph visualization tools to show useful information, nodes must be representable as strings. The - ``repr`` operator should provide a more or less "full" representation of the node, and the ``short_repr`` - property should be a shortened, concise representation. - - Again, because nodes are immutable, the string representations should remain constant. - """ - - def __hash__(self): - """Return an integer hash of the node.""" - raise NotImplementedError() - - def __eq__(self, other): - """Compare two nodes; implementations should return True if (and only if) hashes match.""" - raise NotImplementedError() - - def __repr__(self, other): - """Return a full string representation of the node.""" - raise NotImplementedError() - - @property - def short_repr(self): - """Return a partial/concise representation of the node.""" - raise NotImplementedError() - - @property - def incoming_edge_map(self): - """Provides information about all incoming edges that connect to this node. - - The edge map is a dictionary that maps an ``incoming_label`` to ``(outgoing_node, outgoing_label)``. Note that - implicity, ``incoming_node`` is ``self``. See "Edges" section above. - """ - raise NotImplementedError() - - -DagEdge = namedtuple('DagEdge', ['downstream_node', 'downstream_label', 'upstream_node', 'upstream_label', 'upstream_selector']) - - -def get_incoming_edges(downstream_node, incoming_edge_map): - edges = [] - for downstream_label, upstream_info in list(incoming_edge_map.items()): - upstream_node, upstream_label, upstream_selector = upstream_info - edges += [DagEdge(downstream_node, downstream_label, upstream_node, upstream_label, upstream_selector)] - return edges - - -def get_outgoing_edges(upstream_node, outgoing_edge_map): - edges = [] - for upstream_label, downstream_infos in list(outgoing_edge_map.items()): - for downstream_info in downstream_infos: - downstream_node, downstream_label, downstream_selector = downstream_info - edges += [DagEdge(downstream_node, downstream_label, upstream_node, upstream_label, downstream_selector)] - return edges - - -class KwargReprNode(DagNode): - """A DagNode that can be represented as a set of args+kwargs. - """ - - @property - def __upstream_hashes(self): - hashes = [] - for downstream_label, upstream_info in list(self.incoming_edge_map.items()): - upstream_node, upstream_label, upstream_selector = upstream_info - hashes += [hash(x) for x in [downstream_label, upstream_node, upstream_label, upstream_selector]] - return hashes - - @property - def __inner_hash(self): - props = {'args': self.args, 'kwargs': self.kwargs} - return get_hash(props) - - def __get_hash(self): - hashes = self.__upstream_hashes + [self.__inner_hash] - return get_hash_int(hashes) - - def __init__(self, incoming_edge_map, name, args, kwargs): - self.__incoming_edge_map = incoming_edge_map - self.name = name - self.args = args - self.kwargs = kwargs - self.__hash = self.__get_hash() - - def __hash__(self): - return self.__hash - - def __eq__(self, other): - return hash(self) == hash(other) - - @property - def short_hash(self): - return '{:x}'.format(abs(hash(self)))[:12] - - def long_repr(self, include_hash=True): - formatted_props = ['{!r}'.format(arg) for arg in self.args] - formatted_props += ['{}={!r}'.format(key, self.kwargs[key]) for key in sorted(self.kwargs)] - out = '{}({})'.format(self.name, ', '.join(formatted_props)) - if include_hash: - out += ' <{}>'.format(self.short_hash) - return out - - def __repr__(self): - return self.long_repr() - - @property - def incoming_edges(self): - return get_incoming_edges(self, self.incoming_edge_map) - - @property - def incoming_edge_map(self): - return self.__incoming_edge_map - - @property - def short_repr(self): - return self.name - - -def topo_sort(downstream_nodes): - marked_nodes = [] - sorted_nodes = [] - outgoing_edge_maps = {} - - def visit(upstream_node, upstream_label, downstream_node, downstream_label, downstream_selector=None): - if upstream_node in marked_nodes: - raise RuntimeError('Graph is not a DAG') - - if downstream_node is not None: - outgoing_edge_map = outgoing_edge_maps.get(upstream_node, {}) - outgoing_edge_infos = outgoing_edge_map.get(upstream_label, []) - outgoing_edge_infos += [(downstream_node, downstream_label, downstream_selector)] - outgoing_edge_map[upstream_label] = outgoing_edge_infos - outgoing_edge_maps[upstream_node] = outgoing_edge_map - - if upstream_node not in sorted_nodes: - marked_nodes.append(upstream_node) - for edge in upstream_node.incoming_edges: - visit(edge.upstream_node, edge.upstream_label, edge.downstream_node, edge.downstream_label, edge.upstream_selector) - marked_nodes.remove(upstream_node) - sorted_nodes.append(upstream_node) - - unmarked_nodes = [(node, None) for node in downstream_nodes] - while unmarked_nodes: - upstream_node, upstream_label = unmarked_nodes.pop() - visit(upstream_node, upstream_label, None, None) - return sorted_nodes, outgoing_edge_maps diff --git a/pype/vendor/ffmpeg/nodes.py b/pype/vendor/ffmpeg/nodes.py deleted file mode 100644 index 4fa7ff2bbb..0000000000 --- a/pype/vendor/ffmpeg/nodes.py +++ /dev/null @@ -1,289 +0,0 @@ -from __future__ import unicode_literals - -# #from past.builtins import basestring -# import basestring -from .dag import KwargReprNode -from ._utils import escape_chars, get_hash_int -from builtins import object, basestring -import os - - -def _is_of_types(obj, types): - valid = False - for stream_type in types: - if isinstance(obj, stream_type): - valid = True - break - return valid - - -def _get_types_str(types): - return ', '.join(['{}.{}'.format(x.__module__, x.__name__) for x in types]) - - -class Stream(object): - """Represents the outgoing edge of an upstream node; may be used to create more downstream nodes.""" - - def __init__(self, upstream_node, upstream_label, node_types, upstream_selector=None): - if not _is_of_types(upstream_node, node_types): - raise TypeError('Expected upstream node to be of one of the following type(s): {}; got {}'.format( - _get_types_str(node_types), type(upstream_node))) - self.node = upstream_node - self.label = upstream_label - self.selector = upstream_selector - - def __hash__(self): - return get_hash_int([hash(self.node), hash(self.label)]) - - def __eq__(self, other): - return hash(self) == hash(other) - - def __repr__(self): - node_repr = self.node.long_repr(include_hash=False) - selector = '' - if self.selector: - selector = ':{}'.format(self.selector) - out = '{}[{!r}{}] <{}>'.format(node_repr, self.label, selector, self.node.short_hash) - return out - - def __getitem__(self, index): - """ - Select a component (audio, video) of the stream. - - Example: - Process the audio and video portions of a stream independently:: - - input = ffmpeg.input('in.mp4') - audio = input[:'a'].filter("aecho", 0.8, 0.9, 1000, 0.3) - video = input[:'v'].hflip() - out = ffmpeg.output(audio, video, 'out.mp4') - """ - if self.selector is not None: - raise ValueError('Stream already has a selector: {}'.format(self)) - elif not isinstance(index, basestring): - raise TypeError("Expected string index (e.g. 'a'); got {!r}".format(index)) - return self.node.stream(label=self.label, selector=index) - - -def get_stream_map(stream_spec): - if stream_spec is None: - stream_map = {} - elif isinstance(stream_spec, Stream): - stream_map = {None: stream_spec} - elif isinstance(stream_spec, (list, tuple)): - stream_map = dict(enumerate(stream_spec)) - elif isinstance(stream_spec, dict): - stream_map = stream_spec - return stream_map - - -def get_stream_map_nodes(stream_map): - nodes = [] - for stream in list(stream_map.values()): - if not isinstance(stream, Stream): - raise TypeError('Expected Stream; got {}'.format(type(stream))) - nodes.append(stream.node) - return nodes - - -def get_stream_spec_nodes(stream_spec): - stream_map = get_stream_map(stream_spec) - return get_stream_map_nodes(stream_map) - - -class Node(KwargReprNode): - """Node base""" - - @classmethod - def __check_input_len(cls, stream_map, min_inputs, max_inputs): - if min_inputs is not None and len(stream_map) < min_inputs: - raise ValueError('Expected at least {} input stream(s); got {}'.format(min_inputs, len(stream_map))) - elif max_inputs is not None and len(stream_map) > max_inputs: - raise ValueError('Expected at most {} input stream(s); got {}'.format(max_inputs, len(stream_map))) - - @classmethod - def __check_input_types(cls, stream_map, incoming_stream_types): - for stream in list(stream_map.values()): - if not _is_of_types(stream, incoming_stream_types): - raise TypeError('Expected incoming stream(s) to be of one of the following types: {}; got {}' - .format(_get_types_str(incoming_stream_types), type(stream))) - - @classmethod - def __get_incoming_edge_map(cls, stream_map): - incoming_edge_map = {} - for downstream_label, upstream in list(stream_map.items()): - incoming_edge_map[downstream_label] = (upstream.node, upstream.label, upstream.selector) - return incoming_edge_map - - def __init__(self, stream_spec, name, incoming_stream_types, outgoing_stream_type, min_inputs, - max_inputs, args=[], kwargs={}): - stream_map = get_stream_map(stream_spec) - self.__check_input_len(stream_map, min_inputs, max_inputs) - self.__check_input_types(stream_map, incoming_stream_types) - incoming_edge_map = self.__get_incoming_edge_map(stream_map) - - super(Node, self).__init__(incoming_edge_map, name, args, kwargs) - self.__outgoing_stream_type = outgoing_stream_type - self.__incoming_stream_types = incoming_stream_types - - def stream(self, label=None, selector=None): - """Create an outgoing stream originating from this node. - - More nodes may be attached onto the outgoing stream. - """ - return self.__outgoing_stream_type(self, label, upstream_selector=selector) - - def __getitem__(self, item): - """Create an outgoing stream originating from this node; syntactic sugar for ``self.stream(label)``. - It can also be used to apply a selector: e.g. ``node[0:'a']`` returns a stream with label 0 and - selector ``'a'``, which is the same as ``node.stream(label=0, selector='a')``. - - Example: - Process the audio and video portions of a stream independently:: - - input = ffmpeg.input('in.mp4') - audio = input[:'a'].filter("aecho", 0.8, 0.9, 1000, 0.3) - video = input[:'v'].hflip() - out = ffmpeg.output(audio, video, 'out.mp4') - """ - if isinstance(item, slice): - return self.stream(label=item.start, selector=item.stop) - else: - return self.stream(label=item) - - -class FilterableStream(Stream): - def __init__(self, upstream_node, upstream_label, upstream_selector=None): - super(FilterableStream, self).__init__(upstream_node, upstream_label, {InputNode, FilterNode}, - upstream_selector) - - -# noinspection PyMethodOverriding -class InputNode(Node): - """InputNode type""" - - def __init__(self, name, args=[], kwargs={}): - super(InputNode, self).__init__( - stream_spec=None, - name=name, - incoming_stream_types={}, - outgoing_stream_type=FilterableStream, - min_inputs=0, - max_inputs=0, - args=args, - kwargs=kwargs - ) - - @property - def short_repr(self): - return os.path.basename(self.kwargs['filename']) - - -# noinspection PyMethodOverriding -class FilterNode(Node): - def __init__(self, stream_spec, name, max_inputs=1, args=[], kwargs={}): - super(FilterNode, self).__init__( - stream_spec=stream_spec, - name=name, - incoming_stream_types={FilterableStream}, - outgoing_stream_type=FilterableStream, - min_inputs=1, - max_inputs=max_inputs, - args=args, - kwargs=kwargs - ) - - """FilterNode""" - - def _get_filter(self, outgoing_edges): - args = self.args - kwargs = self.kwargs - if self.name in ('split', 'asplit'): - args = [len(outgoing_edges)] - - out_args = [escape_chars(x, '\\\'=:') for x in args] - out_kwargs = {} - for k, v in list(kwargs.items()): - k = escape_chars(k, '\\\'=:') - v = escape_chars(v, '\\\'=:') - out_kwargs[k] = v - - arg_params = [escape_chars(v, '\\\'=:') for v in out_args] - kwarg_params = ['{}={}'.format(k, out_kwargs[k]) for k in sorted(out_kwargs)] - params = arg_params + kwarg_params - - params_text = escape_chars(self.name, '\\\'=:') - - if params: - params_text += '={}'.format(':'.join(params)) - return escape_chars(params_text, '\\\'[],;') - - -# noinspection PyMethodOverriding -class OutputNode(Node): - def __init__(self, stream, name, args=[], kwargs={}): - super(OutputNode, self).__init__( - stream_spec=stream, - name=name, - incoming_stream_types={FilterableStream}, - outgoing_stream_type=OutputStream, - min_inputs=1, - max_inputs=None, - args=args, - kwargs=kwargs - ) - - @property - def short_repr(self): - return os.path.basename(self.kwargs['filename']) - - -class OutputStream(Stream): - def __init__(self, upstream_node, upstream_label, upstream_selector=None): - super(OutputStream, self).__init__(upstream_node, upstream_label, {OutputNode, GlobalNode, MergeOutputsNode}, - upstream_selector=upstream_selector) - - -# noinspection PyMethodOverriding -class MergeOutputsNode(Node): - def __init__(self, streams, name): - super(MergeOutputsNode, self).__init__( - stream_spec=streams, - name=name, - incoming_stream_types={OutputStream}, - outgoing_stream_type=OutputStream, - min_inputs=1, - max_inputs=None - ) - - -# noinspection PyMethodOverriding -class GlobalNode(Node): - def __init__(self, stream, name, args=[], kwargs={}): - super(GlobalNode, self).__init__( - stream_spec=stream, - name=name, - incoming_stream_types={OutputStream}, - outgoing_stream_type=OutputStream, - min_inputs=1, - max_inputs=1, - args=args, - kwargs=kwargs - ) - - -def stream_operator(stream_classes={Stream}, name=None): - def decorator(func): - func_name = name or func.__name__ - [setattr(stream_class, func_name, func) for stream_class in stream_classes] - return func - - return decorator - - -def filter_operator(name=None): - return stream_operator(stream_classes={FilterableStream}, name=name) - - -def output_operator(name=None): - return stream_operator(stream_classes={OutputStream}, name=name) diff --git a/pype/vendor/ffmpeg/tests/__init__.py b/pype/vendor/ffmpeg/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/ffmpeg/tests/sample_data/in1.mp4 b/pype/vendor/ffmpeg/tests/sample_data/in1.mp4 deleted file mode 100644 index 2c7d59e9f5..0000000000 Binary files a/pype/vendor/ffmpeg/tests/sample_data/in1.mp4 and /dev/null differ diff --git a/pype/vendor/ffmpeg/tests/sample_data/overlay.png b/pype/vendor/ffmpeg/tests/sample_data/overlay.png deleted file mode 100644 index 5da0087238..0000000000 Binary files a/pype/vendor/ffmpeg/tests/sample_data/overlay.png and /dev/null differ diff --git a/pype/vendor/ffmpeg/tests/test_ffmpeg.py b/pype/vendor/ffmpeg/tests/test_ffmpeg.py deleted file mode 100644 index b62f2efced..0000000000 --- a/pype/vendor/ffmpeg/tests/test_ffmpeg.py +++ /dev/null @@ -1,629 +0,0 @@ -from __future__ import unicode_literals - -from builtins import str -from builtins import bytes -from builtins import range -import ffmpeg -import os -import pytest -import random -import re -import subprocess - - -TEST_DIR = os.path.dirname(__file__) -SAMPLE_DATA_DIR = os.path.join(TEST_DIR, 'sample_data') -TEST_INPUT_FILE1 = os.path.join(SAMPLE_DATA_DIR, 'in1.mp4') -TEST_OVERLAY_FILE = os.path.join(SAMPLE_DATA_DIR, 'overlay.png') -TEST_OUTPUT_FILE1 = os.path.join(SAMPLE_DATA_DIR, 'out1.mp4') -TEST_OUTPUT_FILE2 = os.path.join(SAMPLE_DATA_DIR, 'out2.mp4') -BOGUS_INPUT_FILE = os.path.join(SAMPLE_DATA_DIR, 'bogus') - - -subprocess.check_call(['ffmpeg', '-version']) - - -def test_escape_chars(): - assert ffmpeg._utils.escape_chars('a:b', ':') == 'a\:b' - assert ffmpeg._utils.escape_chars('a\\:b', ':\\') == 'a\\\\\\:b' - assert ffmpeg._utils.escape_chars('a:b,c[d]e%{}f\'g\'h\\i', '\\\':,[]%') == 'a\\:b\\,c\\[d\\]e\\%{}f\\\'g\\\'h\\\\i' - assert ffmpeg._utils.escape_chars(123, ':\\') == '123' - - -def test_fluent_equality(): - base1 = ffmpeg.input('dummy1.mp4') - base2 = ffmpeg.input('dummy1.mp4') - base3 = ffmpeg.input('dummy2.mp4') - t1 = base1.trim(start_frame=10, end_frame=20) - t2 = base1.trim(start_frame=10, end_frame=20) - t3 = base1.trim(start_frame=10, end_frame=30) - t4 = base2.trim(start_frame=10, end_frame=20) - t5 = base3.trim(start_frame=10, end_frame=20) - assert t1 == t2 - assert t1 != t3 - assert t1 == t4 - assert t1 != t5 - - -def test_fluent_concat(): - base = ffmpeg.input('dummy.mp4') - trimmed1 = base.trim(start_frame=10, end_frame=20) - trimmed2 = base.trim(start_frame=30, end_frame=40) - trimmed3 = base.trim(start_frame=50, end_frame=60) - concat1 = ffmpeg.concat(trimmed1, trimmed2, trimmed3) - concat2 = ffmpeg.concat(trimmed1, trimmed2, trimmed3) - concat3 = ffmpeg.concat(trimmed1, trimmed3, trimmed2) - assert concat1 == concat2 - assert concat1 != concat3 - - -def test_fluent_output(): - (ffmpeg - .input('dummy.mp4') - .trim(start_frame=10, end_frame=20) - .output('dummy2.mp4') - ) - - -def test_fluent_complex_filter(): - in_file = ffmpeg.input('dummy.mp4') - return (ffmpeg - .concat( - in_file.trim(start_frame=10, end_frame=20), - in_file.trim(start_frame=30, end_frame=40), - in_file.trim(start_frame=50, end_frame=60) - ) - .output('dummy2.mp4') - ) - - -def test_node_repr(): - in_file = ffmpeg.input('dummy.mp4') - trim1 = ffmpeg.trim(in_file, start_frame=10, end_frame=20) - trim2 = ffmpeg.trim(in_file, start_frame=30, end_frame=40) - trim3 = ffmpeg.trim(in_file, start_frame=50, end_frame=60) - concatted = ffmpeg.concat(trim1, trim2, trim3) - output = ffmpeg.output(concatted, 'dummy2.mp4') - assert repr(in_file.node) == 'input(filename={!r}) <{}>'.format('dummy.mp4', in_file.node.short_hash) - assert repr(trim1.node) == 'trim(end_frame=20, start_frame=10) <{}>'.format(trim1.node.short_hash) - assert repr(trim2.node) == 'trim(end_frame=40, start_frame=30) <{}>'.format(trim2.node.short_hash) - assert repr(trim3.node) == 'trim(end_frame=60, start_frame=50) <{}>'.format(trim3.node.short_hash) - assert repr(concatted.node) == 'concat(n=3) <{}>'.format(concatted.node.short_hash) - assert repr(output.node) == 'output(filename={!r}) <{}>'.format('dummy2.mp4', output.node.short_hash) - - -def test_stream_repr(): - in_file = ffmpeg.input('dummy.mp4') - assert repr(in_file) == 'input(filename={!r})[None] <{}>'.format('dummy.mp4', in_file.node.short_hash) - split0 = in_file.filter_multi_output('split')[0] - assert repr(split0) == 'split()[0] <{}>'.format(split0.node.short_hash) - dummy_out = in_file.filter_multi_output('dummy')['out'] - assert repr(dummy_out) == 'dummy()[{!r}] <{}>'.format(dummy_out.label, dummy_out.node.short_hash) - - -def test__get_args__simple(): - out_file = ffmpeg.input('dummy.mp4').output('dummy2.mp4') - assert out_file.get_args() == ['-i', 'dummy.mp4', 'dummy2.mp4'] - - -def test_global_args(): - out_file = ffmpeg.input('dummy.mp4').output('dummy2.mp4').global_args('-progress', 'someurl') - assert out_file.get_args() == ['-i', 'dummy.mp4', 'dummy2.mp4', '-progress', 'someurl'] - - -def _get_simple_example(): - return ffmpeg.input(TEST_INPUT_FILE1).output(TEST_OUTPUT_FILE1) - - -def _get_complex_filter_example(): - split = (ffmpeg - .input(TEST_INPUT_FILE1) - .vflip() - .split() - ) - split0 = split[0] - split1 = split[1] - - overlay_file = ffmpeg.input(TEST_OVERLAY_FILE) - overlay_file = ffmpeg.crop(overlay_file, 10, 10, 158, 112) - return (ffmpeg - .concat( - split0.trim(start_frame=10, end_frame=20), - split1.trim(start_frame=30, end_frame=40), - ) - .overlay(overlay_file.hflip()) - .drawbox(50, 50, 120, 120, color='red', thickness=5) - .output(TEST_OUTPUT_FILE1) - .overwrite_output() - ) - - -def test__get_args__complex_filter(): - out = _get_complex_filter_example() - args = ffmpeg.get_args(out) - assert args == ['-i', TEST_INPUT_FILE1, - '-i', TEST_OVERLAY_FILE, - '-filter_complex', - '[0]vflip[s0];' \ - '[s0]split=2[s1][s2];' \ - '[s1]trim=end_frame=20:start_frame=10[s3];' \ - '[s2]trim=end_frame=40:start_frame=30[s4];' \ - '[s3][s4]concat=n=2[s5];' \ - '[1]crop=158:112:10:10[s6];' \ - '[s6]hflip[s7];' \ - '[s5][s7]overlay=eof_action=repeat[s8];' \ - '[s8]drawbox=50:50:120:120:red:t=5[s9]', - '-map', '[s9]', TEST_OUTPUT_FILE1, - '-y' - ] - - -def test_combined_output(): - i1 = ffmpeg.input(TEST_INPUT_FILE1) - i2 = ffmpeg.input(TEST_OVERLAY_FILE) - out = ffmpeg.output(i1, i2, TEST_OUTPUT_FILE1) - assert out.get_args() == [ - '-i', TEST_INPUT_FILE1, - '-i', TEST_OVERLAY_FILE, - '-map', '0', - '-map', '1', - TEST_OUTPUT_FILE1 - ] - - -def test_filter_with_selector(): - i = ffmpeg.input(TEST_INPUT_FILE1) - v1 = i['v'].hflip() - a1 = i['a'].filter('aecho', 0.8, 0.9, 1000, 0.3) - out = ffmpeg.output(a1, v1, TEST_OUTPUT_FILE1) - assert out.get_args() == [ - '-i', TEST_INPUT_FILE1, - '-filter_complex', - '[0:a]aecho=0.8:0.9:1000:0.3[s0];' \ - '[0:v]hflip[s1]', - '-map', '[s0]', '-map', '[s1]', - TEST_OUTPUT_FILE1 - ] - - - -def test_get_item_with_bad_selectors(): - input = ffmpeg.input(TEST_INPUT_FILE1) - - with pytest.raises(ValueError) as excinfo: - input['a']['a'] - assert str(excinfo.value).startswith('Stream already has a selector:') - - with pytest.raises(TypeError) as excinfo: - input[:'a'] - assert str(excinfo.value).startswith("Expected string index (e.g. 'a')") - - with pytest.raises(TypeError) as excinfo: - input[5] - assert str(excinfo.value).startswith("Expected string index (e.g. 'a')") - - -def _get_complex_filter_asplit_example(): - split = (ffmpeg - .input(TEST_INPUT_FILE1) - .vflip() - .asplit() - ) - split0 = split[0] - split1 = split[1] - - return (ffmpeg - .concat( - split0.filter('atrim', start=10, end=20), - split1.filter('atrim', start=30, end=40), - ) - .output(TEST_OUTPUT_FILE1) - .overwrite_output() - ) - - -def test_filter_concat__video_only(): - in1 = ffmpeg.input('in1.mp4') - in2 = ffmpeg.input('in2.mp4') - args = ( - ffmpeg - .concat(in1, in2) - .output('out.mp4') - .get_args() - ) - assert args == [ - '-i', - 'in1.mp4', - '-i', - 'in2.mp4', - '-filter_complex', - '[0][1]concat=n=2[s0]', - '-map', - '[s0]', - 'out.mp4', - ] - - -def test_filter_concat__audio_only(): - in1 = ffmpeg.input('in1.mp4') - in2 = ffmpeg.input('in2.mp4') - args = ( - ffmpeg - .concat(in1, in2, v=0, a=1) - .output('out.mp4') - .get_args() - ) - assert args == [ - '-i', - 'in1.mp4', - '-i', - 'in2.mp4', - '-filter_complex', - '[0][1]concat=a=1:n=2:v=0[s0]', - '-map', - '[s0]', - 'out.mp4' - ] - - -def test_filter_concat__audio_video(): - in1 = ffmpeg.input('in1.mp4') - in2 = ffmpeg.input('in2.mp4') - joined = ffmpeg.concat(in1['v'], in1['a'], in2.hflip(), in2['a'], v=1, a=1).node - args = ( - ffmpeg - .output(joined[0], joined[1], 'out.mp4') - .get_args() - ) - assert args == [ - '-i', - 'in1.mp4', - '-i', - 'in2.mp4', - '-filter_complex', - '[1]hflip[s0];[0:v][0:a][s0][1:a]concat=a=1:n=2:v=1[s1][s2]', - '-map', - '[s1]', - '-map', - '[s2]', - 'out.mp4', - ] - - -def test_filter_concat__wrong_stream_count(): - in1 = ffmpeg.input('in1.mp4') - in2 = ffmpeg.input('in2.mp4') - with pytest.raises(ValueError) as excinfo: - ffmpeg.concat(in1['v'], in1['a'], in2.hflip(), v=1, a=1).node - assert str(excinfo.value) == \ - 'Expected concat input streams to have length multiple of 2 (v=1, a=1); got 3' - - -def test_filter_asplit(): - out = _get_complex_filter_asplit_example() - args = out.get_args() - assert args == [ - '-i', - TEST_INPUT_FILE1, - '-filter_complex', - '[0]vflip[s0];[s0]asplit=2[s1][s2];[s1]atrim=end=20:start=10[s3];[s2]atrim=end=40:start=30[s4];[s3]' - '[s4]concat=n=2[s5]', - '-map', - '[s5]', - TEST_OUTPUT_FILE1, - '-y' - ] - - -def test__output__bitrate(): - args = ( - ffmpeg - .input('in') - .output('out', video_bitrate=1000, audio_bitrate=200) - .get_args() - ) - assert args == ['-i', 'in', '-b:v', '1000', '-b:a', '200', 'out'] - - -@pytest.mark.parametrize('video_size', [(320, 240), '320x240']) -def test__output__video_size(video_size): - args = ( - ffmpeg - .input('in') - .output('out', video_size=video_size) - .get_args() - ) - assert args == ['-i', 'in', '-video_size', '320x240', 'out'] - - -def test_filter_normal_arg_escape(): - """Test string escaping of normal filter args (e.g. ``font`` param of ``drawtext`` filter).""" - def _get_drawtext_font_repr(font): - """Build a command-line arg using drawtext ``font`` param and extract the ``-filter_complex`` arg.""" - args = (ffmpeg - .input('in') - .drawtext('test', font='a{}b'.format(font)) - .output('out') - .get_args() - ) - assert args[:3] == ['-i', 'in', '-filter_complex'] - assert args[4:] == ['-map', '[s0]', 'out'] - match = re.match(r'\[0\]drawtext=font=a((.|\n)*)b:text=test\[s0\]', args[3], re.MULTILINE) - assert match is not None, 'Invalid -filter_complex arg: {!r}'.format(args[3]) - return match.group(1) - - expected_backslash_counts = { - 'x': 0, - '\'': 3, - '\\': 3, - '%': 0, - ':': 2, - ',': 1, - '[': 1, - ']': 1, - '=': 2, - '\n': 0, - } - for ch, expected_backslash_count in list(expected_backslash_counts.items()): - expected = '{}{}'.format('\\' * expected_backslash_count, ch) - actual = _get_drawtext_font_repr(ch) - assert expected == actual - - -def test_filter_text_arg_str_escape(): - """Test string escaping of normal filter args (e.g. ``text`` param of ``drawtext`` filter).""" - def _get_drawtext_text_repr(text): - """Build a command-line arg using drawtext ``text`` param and extract the ``-filter_complex`` arg.""" - args = (ffmpeg - .input('in') - .drawtext('a{}b'.format(text)) - .output('out') - .get_args() - ) - assert args[:3] == ['-i', 'in', '-filter_complex'] - assert args[4:] == ['-map', '[s0]', 'out'] - match = re.match(r'\[0\]drawtext=text=a((.|\n)*)b\[s0\]', args[3], re.MULTILINE) - assert match is not None, 'Invalid -filter_complex arg: {!r}'.format(args[3]) - return match.group(1) - - expected_backslash_counts = { - 'x': 0, - '\'': 7, - '\\': 7, - '%': 4, - ':': 2, - ',': 1, - '[': 1, - ']': 1, - '=': 2, - '\n': 0, - } - for ch, expected_backslash_count in list(expected_backslash_counts.items()): - expected = '{}{}'.format('\\' * expected_backslash_count, ch) - actual = _get_drawtext_text_repr(ch) - assert expected == actual - - -#def test_version(): -# subprocess.check_call(['ffmpeg', '-version']) - - -def test__compile(): - out_file = ffmpeg.input('dummy.mp4').output('dummy2.mp4') - assert out_file.compile() == ['ffmpeg', '-i', 'dummy.mp4', 'dummy2.mp4'] - assert out_file.compile(cmd='ffmpeg.old') == ['ffmpeg.old', '-i', 'dummy.mp4', 'dummy2.mp4'] - - -def test__run(): - stream = _get_complex_filter_example() - out, err = ffmpeg.run(stream) - assert out is None - assert err is None - - -@pytest.mark.parametrize('capture_stdout', [True, False]) -@pytest.mark.parametrize('capture_stderr', [True, False]) -def test__run__capture_out(mocker, capture_stdout, capture_stderr): - mocker.patch.object(ffmpeg._run, 'compile', return_value=['echo', 'test']) - stream = _get_simple_example() - out, err = ffmpeg.run(stream, capture_stdout=capture_stdout, capture_stderr=capture_stderr) - if capture_stdout: - assert out == 'test\n'.encode() - else: - assert out is None - if capture_stderr: - assert err == ''.encode() - else: - assert err is None - - -def test__run__input_output(mocker): - mocker.patch.object(ffmpeg._run, 'compile', return_value=['cat']) - stream = _get_simple_example() - out, err = ffmpeg.run(stream, input='test'.encode(), capture_stdout=True) - assert out == 'test'.encode() - assert err is None - - -@pytest.mark.parametrize('capture_stdout', [True, False]) -@pytest.mark.parametrize('capture_stderr', [True, False]) -def test__run__error(mocker, capture_stdout, capture_stderr): - mocker.patch.object(ffmpeg._run, 'compile', return_value=['ffmpeg']) - stream = _get_complex_filter_example() - with pytest.raises(ffmpeg.Error) as excinfo: - out, err = ffmpeg.run(stream, capture_stdout=capture_stdout, capture_stderr=capture_stderr) - assert str(excinfo.value) == 'ffmpeg error (see stderr output for detail)' - out = excinfo.value.stdout - err = excinfo.value.stderr - if capture_stdout: - assert out == ''.encode() - else: - assert out is None - if capture_stderr: - assert err.decode().startswith('ffmpeg version') - else: - assert err is None - - -def test__run__multi_output(): - in_ = ffmpeg.input(TEST_INPUT_FILE1) - out1 = in_.output(TEST_OUTPUT_FILE1) - out2 = in_.output(TEST_OUTPUT_FILE2) - ffmpeg.run([out1, out2], overwrite_output=True) - - -def test__run__dummy_cmd(): - stream = _get_complex_filter_example() - ffmpeg.run(stream, cmd='true') - - -def test__run__dummy_cmd_list(): - stream = _get_complex_filter_example() - ffmpeg.run(stream, cmd=['true', 'ignored']) - - -def test__filter__custom(): - stream = ffmpeg.input('dummy.mp4') - stream = ffmpeg.filter(stream, 'custom_filter', 'a', 'b', kwarg1='c') - stream = ffmpeg.output(stream, 'dummy2.mp4') - assert stream.get_args() == [ - '-i', 'dummy.mp4', - '-filter_complex', '[0]custom_filter=a:b:kwarg1=c[s0]', - '-map', '[s0]', - 'dummy2.mp4' - ] - - -def test__filter__custom_fluent(): - stream = (ffmpeg - .input('dummy.mp4') - .filter('custom_filter', 'a', 'b', kwarg1='c') - .output('dummy2.mp4') - ) - assert stream.get_args() == [ - '-i', 'dummy.mp4', - '-filter_complex', '[0]custom_filter=a:b:kwarg1=c[s0]', - '-map', '[s0]', - 'dummy2.mp4' - ] - - -def test__merge_outputs(): - in_ = ffmpeg.input('in.mp4') - out1 = in_.output('out1.mp4') - out2 = in_.output('out2.mp4') - assert ffmpeg.merge_outputs(out1, out2).get_args() == [ - '-i', 'in.mp4', 'out1.mp4', 'out2.mp4' - ] - assert ffmpeg.get_args([out1, out2]) == [ - '-i', 'in.mp4', 'out2.mp4', 'out1.mp4' - ] - - -def test__input__start_time(): - assert ffmpeg.input('in', ss=10.5).output('out').get_args() == ['-ss', '10.5', '-i', 'in', 'out'] - assert ffmpeg.input('in', ss=0.0).output('out').get_args() == ['-ss', '0.0', '-i', 'in', 'out'] - - -def test_multi_passthrough(): - out1 = ffmpeg.input('in1.mp4').output('out1.mp4') - out2 = ffmpeg.input('in2.mp4').output('out2.mp4') - out = ffmpeg.merge_outputs(out1, out2) - assert ffmpeg.get_args(out) == [ - '-i', 'in1.mp4', - '-i', 'in2.mp4', - 'out1.mp4', - '-map', '1', - 'out2.mp4' - ] - assert ffmpeg.get_args([out1, out2]) == [ - '-i', 'in2.mp4', - '-i', 'in1.mp4', - 'out2.mp4', - '-map', '1', - 'out1.mp4' - ] - - -def test_passthrough_selectors(): - i1 = ffmpeg.input(TEST_INPUT_FILE1) - args = ( - ffmpeg - .output(i1['1'], i1['2'], TEST_OUTPUT_FILE1) - .get_args() - ) - assert args == [ - '-i', TEST_INPUT_FILE1, - '-map', '0:1', - '-map', '0:2', - TEST_OUTPUT_FILE1, - ] - - -def test_mixed_passthrough_selectors(): - i1 = ffmpeg.input(TEST_INPUT_FILE1) - args = ( - ffmpeg - .output(i1['1'].hflip(), i1['2'], TEST_OUTPUT_FILE1) - .get_args() - ) - assert args == [ - '-i', TEST_INPUT_FILE1, - '-filter_complex', - '[0:1]hflip[s0]', - '-map', '[s0]', - '-map', '0:2', - TEST_OUTPUT_FILE1, - ] - - -def test_pipe(): - width = 32 - height = 32 - frame_size = width * height * 3 # 3 bytes for rgb24 - frame_count = 10 - start_frame = 2 - - out = (ffmpeg - .input('pipe:0', format='rawvideo', pixel_format='rgb24', video_size=(width, height), framerate=10) - .trim(start_frame=start_frame) - .output('pipe:1', format='rawvideo') - ) - - args = out.get_args() - assert args == [ - '-f', 'rawvideo', - '-video_size', '{}x{}'.format(width, height), - '-framerate', '10', - '-pixel_format', 'rgb24', - '-i', 'pipe:0', - '-filter_complex', - '[0]trim=start_frame=2[s0]', - '-map', '[s0]', - '-f', 'rawvideo', - 'pipe:1' - ] - - cmd = ['ffmpeg'] + args - p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - - in_data = bytes(bytearray([random.randint(0,255) for _ in range(frame_size * frame_count)])) - p.stdin.write(in_data) # note: this could block, in which case need to use threads - p.stdin.close() - - out_data = p.stdout.read() - assert len(out_data) == frame_size * (frame_count - start_frame) - assert out_data == in_data[start_frame*frame_size:] - - -def test__probe(): - data = ffmpeg.probe(TEST_INPUT_FILE1) - assert set(data.keys()) == {'format', 'streams'} - assert data['format']['duration'] == '7.036000' - - -def test__probe__exception(): - with pytest.raises(ffmpeg.Error) as excinfo: - ffmpeg.probe(BOGUS_INPUT_FILE) - assert str(excinfo.value) == 'ffprobe error (see stderr output for detail)' - assert 'No such file or directory'.encode() in excinfo.value.stderr diff --git a/pype/vendor/ftrack_api/__init__.py b/pype/vendor/ftrack_api/__init__.py deleted file mode 100644 index d8ee30bd8f..0000000000 --- a/pype/vendor/ftrack_api/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from ._version import __version__ -from .session import Session - - -def mixin(instance, mixin_class, name=None): - '''Mixin *mixin_class* to *instance*. - - *name* can be used to specify new class name. If not specified then one will - be generated. - - ''' - if name is None: - name = '{0}{1}'.format( - instance.__class__.__name__, mixin_class.__name__ - ) - - # Check mixin class not already present in mro in order to avoid consistent - # method resolution failure. - if mixin_class in instance.__class__.mro(): - return - - instance.__class__ = type( - name, - ( - mixin_class, - instance.__class__ - ), - {} - ) diff --git a/pype/vendor/ftrack_api/_centralized_storage_scenario.py b/pype/vendor/ftrack_api/_centralized_storage_scenario.py deleted file mode 100644 index 65d4d8622b..0000000000 --- a/pype/vendor/ftrack_api/_centralized_storage_scenario.py +++ /dev/null @@ -1,658 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -from __future__ import absolute_import - -from builtins import str -from builtins import object -import logging -import json -import sys -import os - -import ftrack_api -import ftrack_api.structure.standard as _standard -from ftrack_api.logging import LazyLogMessage as L - - -scenario_name = 'ftrack.centralized-storage' - - -class ConfigureCentralizedStorageScenario(object): - '''Configure a centralized storage scenario.''' - - def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - @property - def storage_scenario(self): - '''Return storage scenario setting.''' - return self.session.query( - 'select value from Setting ' - 'where name is "storage_scenario" and group is "STORAGE"' - ).one() - - @property - def existing_centralized_storage_configuration(self): - '''Return existing centralized storage configuration.''' - storage_scenario = self.storage_scenario - - try: - configuration = json.loads(storage_scenario['value']) - except (ValueError, TypeError): - return None - - if not isinstance(configuration, dict): - return None - - if configuration.get('scenario') != scenario_name: - return None - - return configuration.get('data', {}) - - def _get_confirmation_text(self, configuration): - '''Return confirmation text from *configuration*.''' - configure_location = configuration.get('configure_location') - select_location = configuration.get('select_location') - select_mount_point = configuration.get('select_mount_point') - - if configure_location: - location_text = str( - 'A new location will be created:\n\n' - '* Label: {location_label}\n' - '* Name: {location_name}\n' - '* Description: {location_description}\n' - ).format(**configure_location) - else: - location = self.session.get( - 'Location', select_location['location_id'] - ) - location_text = ( - u'You have choosen to use an existing location: {0}'.format( - location['label'] - ) - ) - - mount_points_text = str( - '* Linux: {linux}\n' - '* OS X: {osx}\n' - '* Windows: {windows}\n\n' - ).format( - linux=select_mount_point.get('linux_mount_point') or '*Not set*', - osx=select_mount_point.get('osx_mount_point') or '*Not set*', - windows=select_mount_point.get('windows_mount_point') or '*Not set*' - ) - - mount_points_not_set = [] - - if not select_mount_point.get('linux_mount_point'): - mount_points_not_set.append('Linux') - - if not select_mount_point.get('osx_mount_point'): - mount_points_not_set.append('OS X') - - if not select_mount_point.get('windows_mount_point'): - mount_points_not_set.append('Windows') - - if mount_points_not_set: - mount_points_text += str( - 'Please be aware that this location will not be working on ' - '{missing} because the mount points are not set up.' - ).format( - missing=' and '.join(mount_points_not_set) - ) - - text = str( - '#Confirm storage setup#\n\n' - 'Almost there! Please take a moment to verify the settings you ' - 'are about to save. You can always come back later and update the ' - 'configuration.\n' - '##Location##\n\n' - '{location}\n' - '##Mount points##\n\n' - '{mount_points}' - ).format( - location=location_text, - mount_points=mount_points_text - ) - - return text - - def configure_scenario(self, event): - '''Configure scenario based on *event* and return form items.''' - steps = ( - 'select_scenario', - 'select_location', - 'configure_location', - 'select_structure', - 'select_mount_point', - 'confirm_summary', - 'save_configuration' - ) - - warning_message = '' - values = event['data'].get('values', {}) - - # Calculate previous step and the next. - previous_step = values.get('step', 'select_scenario') - next_step = steps[steps.index(previous_step) + 1] - state = 'configuring' - - self.logger.info(L( - u'Configuring scenario, previous step: {0}, next step: {1}. ' - u'Values {2!r}.', - previous_step, next_step, values - )) - - if 'configuration' in values: - configuration = values.pop('configuration') - else: - configuration = {} - - if values: - # Update configuration with values from the previous step. - configuration[previous_step] = values - - if previous_step == 'select_location': - values = configuration['select_location'] - if values.get('location_id') != 'create_new_location': - location_exists = self.session.query( - 'Location where id is "{0}"'.format( - values.get('location_id') - ) - ).first() - if not location_exists: - next_step = 'select_location' - warning_message = ( - '**The selected location does not exist. Please choose ' - 'one from the dropdown or create a new one.**' - ) - - if next_step == 'select_location': - try: - location_id = ( - self.existing_centralized_storage_configuration['location_id'] - ) - except (KeyError, TypeError): - location_id = None - - options = [{ - 'label': 'Create new location', - 'value': 'create_new_location' - }] - for location in self.session.query( - 'select name, label, description from Location' - ): - if location['name'] not in ( - 'ftrack.origin', 'ftrack.unmanaged', 'ftrack.connect', - 'ftrack.server', 'ftrack.review' - ): - options.append({ - 'label': u'{label} ({name})'.format( - label=location['label'], name=location['name'] - ), - 'description': location['description'], - 'value': location['id'] - }) - - warning = '' - if location_id is not None: - # If there is already a location configured we must make the - # user aware that changing the location may be problematic. - warning = ( - '\n\n**Be careful if you switch to another location ' - 'for an existing storage scenario. Components that have ' - 'already been published to the previous location will be ' - 'made unavailable for common use.**' - ) - default_value = location_id - elif location_id is None and len(options) == 1: - # No location configured and no existing locations to use. - default_value = 'create_new_location' - else: - # There are existing locations to choose from but non of them - # are currently active in the centralized storage scenario. - default_value = None - - items = [{ - 'type': 'label', - 'value': ( - '#Select location#\n' - 'Choose an already existing location or create a new one ' - 'to represent your centralized storage. {0}'.format( - warning - ) - ) - }, { - 'type': 'enumerator', - 'label': 'Location', - 'name': 'location_id', - 'value': default_value, - 'data': options - }] - - default_location_name = 'studio.central-storage-location' - default_location_label = 'Studio location' - default_location_description = ( - 'The studio central location where all components are ' - 'stored.' - ) - - if previous_step == 'configure_location': - configure_location = configuration.get( - 'configure_location' - ) - - if configure_location: - try: - existing_location = self.session.query( - u'Location where name is "{0}"'.format( - configure_location.get('location_name') - ) - ).first() - except UnicodeEncodeError: - next_step = 'configure_location' - warning_message += ( - '**The location name contains non-ascii characters. ' - 'Please change the name and try again.**' - ) - values = configuration['select_location'] - else: - if existing_location: - next_step = 'configure_location' - warning_message += ( - u'**There is already a location named {0}. ' - u'Please change the name and try again.**'.format( - configure_location.get('location_name') - ) - ) - values = configuration['select_location'] - - if ( - not configure_location.get('location_name') or - not configure_location.get('location_label') or - not configure_location.get('location_description') - ): - next_step = 'configure_location' - warning_message += ( - '**Location name, label and description cannot ' - 'be empty.**' - ) - values = configuration['select_location'] - - if next_step == 'configure_location': - # Populate form with previous configuration. - default_location_label = configure_location['location_label'] - default_location_name = configure_location['location_name'] - default_location_description = ( - configure_location['location_description'] - ) - - if next_step == 'configure_location': - - if values.get('location_id') == 'create_new_location': - # Add options to create a new location. - items = [{ - 'type': 'label', - 'value': ( - '#Create location#\n' - 'Here you will create a new location to be used ' - 'with your new Storage scenario. For your ' - 'convenience we have already filled in some default ' - 'values. If this is the first time you are configuring ' - 'a storage scenario in ftrack we recommend that you ' - 'stick with these settings.' - ) - }, { - 'label': 'Label', - 'name': 'location_label', - 'value': default_location_label, - 'type': 'text' - }, { - 'label': 'Name', - 'name': 'location_name', - 'value': default_location_name, - 'type': 'text' - }, { - 'label': 'Description', - 'name': 'location_description', - 'value': default_location_description, - 'type': 'text' - }] - - else: - # The user selected an existing location. Move on to next - # step. - next_step = 'select_mount_point' - - if next_step == 'select_structure': - # There is only one structure to choose from, go to next step. - next_step = 'select_mount_point' - # items = [ - # { - # 'type': 'label', - # 'value': ( - # '#Select structure#\n' - # 'Select which structure to use with your location. ' - # 'The structure is used to generate the filesystem ' - # 'path for components that are added to this location.' - # ) - # }, - # { - # 'type': 'enumerator', - # 'label': 'Structure', - # 'name': 'structure_id', - # 'value': 'standard', - # 'data': [{ - # 'label': 'Standard', - # 'value': 'standard', - # 'description': ( - # 'The Standard structure uses the names in your ' - # 'project structure to determine the path.' - # ) - # }] - # } - # ] - - if next_step == 'select_mount_point': - try: - mount_points = ( - self.existing_centralized_storage_configuration['accessor']['mount_points'] - ) - except (KeyError, TypeError): - mount_points = dict() - - items = [ - { - 'value': ( - '#Mount points#\n' - 'Set mount points for your centralized storage ' - 'location. For the location to work as expected each ' - 'platform that you intend to use must have the ' - 'corresponding mount point set and the storage must ' - 'be accessible. If not set correctly files will not be ' - 'saved or read.' - ), - 'type': 'label' - }, { - 'type': 'text', - 'label': 'Linux', - 'name': 'linux_mount_point', - 'empty_text': 'E.g. /usr/mnt/MyStorage ...', - 'value': mount_points.get('linux', '') - }, { - 'type': 'text', - 'label': 'OS X', - 'name': 'osx_mount_point', - 'empty_text': 'E.g. /Volumes/MyStorage ...', - 'value': mount_points.get('osx', '') - }, { - 'type': 'text', - 'label': 'Windows', - 'name': 'windows_mount_point', - 'empty_text': 'E.g. \\\\MyStorage ...', - 'value': mount_points.get('windows', '') - } - ] - - if next_step == 'confirm_summary': - items = [{ - 'type': 'label', - 'value': self._get_confirmation_text(configuration) - }] - state = 'confirm' - - if next_step == 'save_configuration': - mount_points = configuration['select_mount_point'] - select_location = configuration['select_location'] - - if select_location['location_id'] == 'create_new_location': - configure_location = configuration['configure_location'] - location = self.session.create( - 'Location', - { - 'name': configure_location['location_name'], - 'label': configure_location['location_label'], - 'description': ( - configure_location['location_description'] - ) - } - ) - - else: - location = self.session.query( - 'Location where id is "{0}"'.format( - select_location['location_id'] - ) - ).one() - - setting_value = json.dumps({ - 'scenario': scenario_name, - 'data': { - 'location_id': location['id'], - 'location_name': location['name'], - 'accessor': { - 'mount_points': { - 'linux': mount_points['linux_mount_point'], - 'osx': mount_points['osx_mount_point'], - 'windows': mount_points['windows_mount_point'] - } - } - } - }) - - self.storage_scenario['value'] = setting_value - self.session.commit() - - # Broadcast an event that storage scenario has been configured. - event = ftrack_api.event.base.Event( - topic='ftrack.storage-scenario.configure-done' - ) - self.session.event_hub.publish(event) - - items = [{ - 'type': 'label', - 'value': ( - '#Done!#\n' - 'Your storage scenario is now configured and ready ' - 'to use. **Note that you may have to restart Connect and ' - 'other applications to start using it.**' - ) - }] - state = 'done' - - if warning_message: - items.insert(0, { - 'type': 'label', - 'value': warning_message - }) - - items.append({ - 'type': 'hidden', - 'value': configuration, - 'name': 'configuration' - }) - items.append({ - 'type': 'hidden', - 'value': next_step, - 'name': 'step' - }) - - return { - 'items': items, - 'state': state - } - - def discover_centralized_scenario(self, event): - '''Return action discover dictionary for *event*.''' - return { - 'id': scenario_name, - 'name': 'Centralized storage scenario', - 'description': ( - '(Recommended) centralized storage scenario where all files ' - 'are kept on a storage that is mounted and available to ' - 'everyone in the studio.' - ) - } - - def register(self, session): - '''Subscribe to events on *session*.''' - self.session = session - - #: TODO: Move these to a separate function. - session.event_hub.subscribe( - str( - 'topic=ftrack.storage-scenario.discover ' - 'and source.user.username="{0}"' - ).format( - session.api_user - ), - self.discover_centralized_scenario - ) - session.event_hub.subscribe( - str( - 'topic=ftrack.storage-scenario.configure ' - 'and data.scenario_id="{0}" ' - 'and source.user.username="{1}"' - ).format( - scenario_name, - session.api_user - ), - self.configure_scenario - ) - - -class ActivateCentralizedStorageScenario(object): - '''Activate a centralized storage scenario.''' - - def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - def activate(self, event): - '''Activate scenario in *event*.''' - storage_scenario = event['data']['storage_scenario'] - - try: - location_data = storage_scenario['data'] - location_name = location_data['location_name'] - location_id = location_data['location_id'] - mount_points = location_data['accessor']['mount_points'] - - except KeyError: - error_message = ( - 'Unable to read storage scenario data.' - ) - self.logger.error(L(error_message)) - raise ftrack_api.exception.LocationError( - 'Unable to configure location based on scenario.' - ) - - else: - location = self.session.create( - 'Location', - data=dict( - name=location_name, - id=location_id - ), - reconstructing=True - ) - - if 'darwin' in sys.platform: - prefix = mount_points['osx'] - elif 'linux' in sys.platform: - prefix = mount_points['linux'] - elif 'win' in sys.platform: - prefix = mount_points['windows'] - else: - raise ftrack_api.exception.LocationError( - ( - 'Unable to find accessor prefix for platform {0}.' - ).format(sys.platform) - ) - - location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=prefix - ) - location.structure = _standard.StandardStructure() - location.priority = 1 - self.logger.info(L( - u'Storage scenario activated. Configured {0!r} from ' - u'{1!r}', - location, storage_scenario - )) - - def _verify_startup(self, event): - '''Verify the storage scenario configuration.''' - storage_scenario = event['data']['storage_scenario'] - location_data = storage_scenario['data'] - mount_points = location_data['accessor']['mount_points'] - - prefix = None - if 'darwin' in sys.platform: - prefix = mount_points['osx'] - elif 'linux' in sys.platform: - prefix = mount_points['linux'] - elif 'win' in sys.platform: - prefix = mount_points['windows'] - - if not prefix: - return ( - u'The storage scenario has not been configured for your ' - u'operating system. ftrack may not be able to ' - u'store and track files correctly.' - ) - - if not os.path.isdir(prefix): - return ( - str( - 'The path {0} does not exist. ftrack may not be able to ' - 'store and track files correctly. \n\nIf the storage is ' - 'newly setup you may want to create necessary folder ' - 'structures. If the storage is a network drive you should ' - 'make sure that it is mounted correctly.' - ).format(prefix) - ) - - def register(self, session): - '''Subscribe to events on *session*.''' - self.session = session - - session.event_hub.subscribe( - ( - 'topic=ftrack.storage-scenario.activate ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) - ), - self.activate - ) - - # Listen to verify startup event from ftrack connect to allow responding - # with a message if something is not working correctly with this - # scenario that the user should be notified about. - self.session.event_hub.subscribe( - ( - 'topic=ftrack.connect.verify-startup ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) - ), - self._verify_startup - ) - -def register(session): - '''Register storage scenario.''' - scenario = ActivateCentralizedStorageScenario() - scenario.register(session) - - -def register_configuration(session): - '''Register storage scenario.''' - scenario = ConfigureCentralizedStorageScenario() - scenario.register(session) diff --git a/pype/vendor/ftrack_api/_python_ntpath.py b/pype/vendor/ftrack_api/_python_ntpath.py deleted file mode 100644 index 57928e3e31..0000000000 --- a/pype/vendor/ftrack_api/_python_ntpath.py +++ /dev/null @@ -1,535 +0,0 @@ -# pragma: no cover -# Module 'ntpath' -- common operations on WinNT/Win95 pathnames -"""Common pathname manipulations, WindowsNT/95 version. - -Instead of importing this module directly, import os and refer to this -module as os.path. -""" - -from builtins import zip -import os -import sys -import stat -import genericpath -import warnings - -from genericpath import * - -__all__ = ["normcase","isabs","join","splitdrive","split","splitext", - "basename","dirname","commonprefix","getsize","getmtime", - "getatime","getctime", "islink","exists","lexists","isdir","isfile", - "ismount","walk","expanduser","expandvars","normpath","abspath", - "splitunc","curdir","pardir","sep","pathsep","defpath","altsep", - "extsep","devnull","realpath","supports_unicode_filenames","relpath"] - -# strings representing various path-related bits and pieces -curdir = '.' -pardir = '..' -extsep = '.' -sep = '\\' -pathsep = ';' -altsep = '/' -defpath = '.;C:\\bin' -if 'ce' in sys.builtin_module_names: - defpath = '\\Windows' -elif 'os2' in sys.builtin_module_names: - # OS/2 w/ VACPP - altsep = '/' -devnull = 'nul' - -# Normalize the case of a pathname and map slashes to backslashes. -# Other normalizations (such as optimizing '../' away) are not done -# (this is done by normpath). - -def normcase(s): - """Normalize case of pathname. - - Makes all characters lowercase and all slashes into backslashes.""" - return s.replace("/", "\\").lower() - - -# Return whether a path is absolute. -# Trivial in Posix, harder on the Mac or MS-DOS. -# For DOS it is absolute if it starts with a slash or backslash (current -# volume), or if a pathname after the volume letter and colon / UNC resource -# starts with a slash or backslash. - -def isabs(s): - """Test whether a path is absolute""" - s = splitdrive(s)[1] - return s != '' and s[:1] in '/\\' - - -# Join two (or more) paths. - -def join(a, *p): - """Join two or more pathname components, inserting "\\" as needed. - If any component is an absolute path, all previous path components - will be discarded.""" - path = a - for b in p: - b_wins = 0 # set to 1 iff b makes path irrelevant - if path == "": - b_wins = 1 - - elif isabs(b): - # This probably wipes out path so far. However, it's more - # complicated if path begins with a drive letter: - # 1. join('c:', '/a') == 'c:/a' - # 2. join('c:/', '/a') == 'c:/a' - # But - # 3. join('c:/a', '/b') == '/b' - # 4. join('c:', 'd:/') = 'd:/' - # 5. join('c:/', 'd:/') = 'd:/' - if path[1:2] != ":" or b[1:2] == ":": - # Path doesn't start with a drive letter, or cases 4 and 5. - b_wins = 1 - - # Else path has a drive letter, and b doesn't but is absolute. - elif len(path) > 3 or (len(path) == 3 and - path[-1] not in "/\\"): - # case 3 - b_wins = 1 - - if b_wins: - path = b - else: - # Join, and ensure there's a separator. - assert len(path) > 0 - if path[-1] in "/\\": - if b and b[0] in "/\\": - path += b[1:] - else: - path += b - elif path[-1] == ":": - path += b - elif b: - if b[0] in "/\\": - path += b - else: - path += "\\" + b - else: - # path is not empty and does not end with a backslash, - # but b is empty; since, e.g., split('a/') produces - # ('a', ''), it's best if join() adds a backslash in - # this case. - path += '\\' - - return path - - -# Split a path in a drive specification (a drive letter followed by a -# colon) and the path specification. -# It is always true that drivespec + pathspec == p -def splitdrive(p): - """Split a pathname into drive and path specifiers. Returns a 2-tuple -"(drive,path)"; either part may be empty""" - if p[1:2] == ':': - return p[0:2], p[2:] - return '', p - - -# Parse UNC paths -def splitunc(p): - """Split a pathname into UNC mount point and relative path specifiers. - - Return a 2-tuple (unc, rest); either part may be empty. - If unc is not empty, it has the form '//host/mount' (or similar - using backslashes). unc+rest is always the input path. - Paths containing drive letters never have an UNC part. - """ - if p[1:2] == ':': - return '', p # Drive letter present - firstTwo = p[0:2] - if firstTwo == '//' or firstTwo == '\\\\': - # is a UNC path: - # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter - # \\machine\mountpoint\directories... - # directory ^^^^^^^^^^^^^^^ - normp = normcase(p) - index = normp.find('\\', 2) - if index == -1: - ##raise RuntimeError, 'illegal UNC path: "' + p + '"' - return ("", p) - index = normp.find('\\', index + 1) - if index == -1: - index = len(p) - return p[:index], p[index:] - return '', p - - -# Split a path in head (everything up to the last '/') and tail (the -# rest). After the trailing '/' is stripped, the invariant -# join(head, tail) == p holds. -# The resulting head won't end in '/' unless it is the root. - -def split(p): - """Split a pathname. - - Return tuple (head, tail) where tail is everything after the final slash. - Either part may be empty.""" - - d, p = splitdrive(p) - # set i to index beyond p's last slash - i = len(p) - while i and p[i-1] not in '/\\': - i = i - 1 - head, tail = p[:i], p[i:] # now tail has no slashes - # remove trailing slashes from head, unless it's all slashes - head2 = head - while head2 and head2[-1] in '/\\': - head2 = head2[:-1] - head = head2 or head - return d + head, tail - - -# Split a path in root and extension. -# The extension is everything starting at the last dot in the last -# pathname component; the root is everything before that. -# It is always true that root + ext == p. - -def splitext(p): - return genericpath._splitext(p, sep, altsep, extsep) -splitext.__doc__ = genericpath._splitext.__doc__ - - -# Return the tail (basename) part of a path. - -def basename(p): - """Returns the final component of a pathname""" - return split(p)[1] - - -# Return the head (dirname) part of a path. - -def dirname(p): - """Returns the directory component of a pathname""" - return split(p)[0] - -# Is a path a symbolic link? -# This will always return false on systems where posix.lstat doesn't exist. - -def islink(path): - """Test for symbolic link. - On WindowsNT/95 and OS/2 always returns false - """ - return False - -# alias exists to lexists -lexists = exists - -# Is a path a mount point? Either a root (with or without drive letter) -# or an UNC path with at most a / or \ after the mount point. - -def ismount(path): - """Test whether a path is a mount point (defined as root of drive)""" - unc, rest = splitunc(path) - if unc: - return rest in ("", "/", "\\") - p = splitdrive(path)[1] - return len(p) == 1 and p[0] in '/\\' - - -# Directory tree walk. -# For each directory under top (including top itself, but excluding -# '.' and '..'), func(arg, dirname, filenames) is called, where -# dirname is the name of the directory and filenames is the list -# of files (and subdirectories etc.) in the directory. -# The func may modify the filenames list, to implement a filter, -# or to impose a different order of visiting. - -def walk(top, func, arg): - """Directory tree walk with callback function. - - For each directory in the directory tree rooted at top (including top - itself, but excluding '.' and '..'), call func(arg, dirname, fnames). - dirname is the name of the directory, and fnames a list of the names of - the files and subdirectories in dirname (excluding '.' and '..'). func - may modify the fnames list in-place (e.g. via del or slice assignment), - and walk will only recurse into the subdirectories whose names remain in - fnames; this can be used to implement a filter, or to impose a specific - order of visiting. No semantics are defined for, or required of, arg, - beyond that arg is always passed to func. It can be used, e.g., to pass - a filename pattern, or a mutable object designed to accumulate - statistics. Passing None for arg is common.""" - warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.", - stacklevel=2) - try: - names = os.listdir(top) - except os.error: - return - func(arg, top, names) - for name in names: - name = join(top, name) - if isdir(name): - walk(name, func, arg) - - -# Expand paths beginning with '~' or '~user'. -# '~' means $HOME; '~user' means that user's home directory. -# If the path doesn't begin with '~', or if the user or $HOME is unknown, -# the path is returned unchanged (leaving error reporting to whatever -# function is called with the expanded path as argument). -# See also module 'glob' for expansion of *, ? and [...] in pathnames. -# (A function should also be defined to do full *sh-style environment -# variable expansion.) - -def expanduser(path): - """Expand ~ and ~user constructs. - - If user or $HOME is unknown, do nothing.""" - if path[:1] != '~': - return path - i, n = 1, len(path) - while i < n and path[i] not in '/\\': - i = i + 1 - - if 'HOME' in os.environ: - userhome = os.environ['HOME'] - elif 'USERPROFILE' in os.environ: - userhome = os.environ['USERPROFILE'] - elif not 'HOMEPATH' in os.environ: - return path - else: - try: - drive = os.environ['HOMEDRIVE'] - except KeyError: - drive = '' - userhome = join(drive, os.environ['HOMEPATH']) - - if i != 1: #~user - userhome = join(dirname(userhome), path[1:i]) - - return userhome + path[i:] - - -# Expand paths containing shell variable substitutions. -# The following rules apply: -# - no expansion within single quotes -# - '$$' is translated into '$' -# - '%%' is translated into '%' if '%%' are not seen in %var1%%var2% -# - ${varname} is accepted. -# - $varname is accepted. -# - %varname% is accepted. -# - varnames can be made out of letters, digits and the characters '_-' -# (though is not verified in the ${varname} and %varname% cases) -# XXX With COMMAND.COM you can use any characters in a variable name, -# XXX except '^|<>='. - -def expandvars(path): - """Expand shell variables of the forms $var, ${var} and %var%. - - Unknown variables are left unchanged.""" - if '$' not in path and '%' not in path: - return path - import string - varchars = string.ascii_letters + string.digits + '_-' - res = '' - index = 0 - pathlen = len(path) - while index < pathlen: - c = path[index] - if c == '\'': # no expansion within single quotes - path = path[index + 1:] - pathlen = len(path) - try: - index = path.index('\'') - res = res + '\'' + path[:index + 1] - except ValueError: - res = res + path - index = pathlen - 1 - elif c == '%': # variable or '%' - if path[index + 1:index + 2] == '%': - res = res + c - index = index + 1 - else: - path = path[index+1:] - pathlen = len(path) - try: - index = path.index('%') - except ValueError: - res = res + '%' + path - index = pathlen - 1 - else: - var = path[:index] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '%' + var + '%' - elif c == '$': # variable or '$$' - if path[index + 1:index + 2] == '$': - res = res + c - index = index + 1 - elif path[index + 1:index + 2] == '{': - path = path[index+2:] - pathlen = len(path) - try: - index = path.index('}') - var = path[:index] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '${' + var + '}' - except ValueError: - res = res + '${' + path - index = pathlen - 1 - else: - var = '' - index = index + 1 - c = path[index:index + 1] - while c != '' and c in varchars: - var = var + c - index = index + 1 - c = path[index:index + 1] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '$' + var - if c != '': - index = index - 1 - else: - res = res + c - index = index + 1 - return res - - -# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A\B. -# Previously, this function also truncated pathnames to 8+3 format, -# but as this module is called "ntpath", that's obviously wrong! - -def normpath(path): - """Normalize path, eliminating double slashes, etc.""" - # Preserve unicode (if path is unicode) - backslash, dot = (u'\\', u'.') if isinstance(path, str) else ('\\', '.') - if path.startswith(('\\\\.\\', '\\\\?\\')): - # in the case of paths with these prefixes: - # \\.\ -> device names - # \\?\ -> literal paths - # do not do any normalization, but return the path unchanged - return path - path = path.replace("/", "\\") - prefix, path = splitdrive(path) - # We need to be careful here. If the prefix is empty, and the path starts - # with a backslash, it could either be an absolute path on the current - # drive (\dir1\dir2\file) or a UNC filename (\\server\mount\dir1\file). It - # is therefore imperative NOT to collapse multiple backslashes blindly in - # that case. - # The code below preserves multiple backslashes when there is no drive - # letter. This means that the invalid filename \\\a\b is preserved - # unchanged, where a\\\b is normalised to a\b. It's not clear that there - # is any better behaviour for such edge cases. - if prefix == '': - # No drive letter - preserve initial backslashes - while path[:1] == "\\": - prefix = prefix + backslash - path = path[1:] - else: - # We have a drive letter - collapse initial backslashes - if path.startswith("\\"): - prefix = prefix + backslash - path = path.lstrip("\\") - comps = path.split("\\") - i = 0 - while i < len(comps): - if comps[i] in ('.', ''): - del comps[i] - elif comps[i] == '..': - if i > 0 and comps[i-1] != '..': - del comps[i-1:i+1] - i -= 1 - elif i == 0 and prefix.endswith("\\"): - del comps[i] - else: - i += 1 - else: - i += 1 - # If the path is now empty, substitute '.' - if not prefix and not comps: - comps.append(dot) - return prefix + backslash.join(comps) - - -# Return an absolute path. -try: - from nt import _getfullpathname - -except ImportError: # not running on Windows - mock up something sensible - def abspath(path): - """Return the absolute version of a path.""" - if not isabs(path): - if isinstance(path, str): - cwd = os.getcwd() - else: - cwd = os.getcwd() - path = join(cwd, path) - return normpath(path) - -else: # use native Windows method on Windows - def abspath(path): - """Return the absolute version of a path.""" - - if path: # Empty path must return current working directory. - try: - path = _getfullpathname(path) - except WindowsError: - pass # Bad path - return unchanged. - elif isinstance(path, str): - path = os.getcwd() - else: - path = os.getcwd() - return normpath(path) - -# realpath is a no-op on systems without islink support -realpath = abspath -# Win9x family and earlier have no Unicode filename support. -supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and - sys.getwindowsversion()[3] >= 2) - -def _abspath_split(path): - abs = abspath(normpath(path)) - prefix, rest = splitunc(abs) - is_unc = bool(prefix) - if not is_unc: - prefix, rest = splitdrive(abs) - return is_unc, prefix, [x for x in rest.split(sep) if x] - -def relpath(path, start=curdir): - """Return a relative version of a path""" - - if not path: - raise ValueError("no path specified") - - start_is_unc, start_prefix, start_list = _abspath_split(start) - path_is_unc, path_prefix, path_list = _abspath_split(path) - - if path_is_unc ^ start_is_unc: - raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" - % (path, start)) - if path_prefix.lower() != start_prefix.lower(): - if path_is_unc: - raise ValueError("path is on UNC root %s, start on UNC root %s" - % (path_prefix, start_prefix)) - else: - raise ValueError("path is on drive %s, start on drive %s" - % (path_prefix, start_prefix)) - # Work out how much of the filepath is shared by start and path. - i = 0 - for e1, e2 in zip(start_list, path_list): - if e1.lower() != e2.lower(): - break - i += 1 - - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return curdir - return join(*rel_list) - -try: - # The genericpath.isdir implementation uses os.stat and checks the mode - # attribute to tell whether or not the path is a directory. - # This is overkill on Windows - just pass the path to GetFileAttributes - # and check the attribute from there. - from nt import _isdir as isdir -except ImportError: - # Use genericpath.isdir as imported above. - pass diff --git a/pype/vendor/ftrack_api/_version.py b/pype/vendor/ftrack_api/_version.py deleted file mode 100644 index 0e1a38d3c2..0000000000 --- a/pype/vendor/ftrack_api/_version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '1.7.0' diff --git a/pype/vendor/ftrack_api/accessor/__init__.py b/pype/vendor/ftrack_api/accessor/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/pype/vendor/ftrack_api/accessor/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/pype/vendor/ftrack_api/accessor/base.py b/pype/vendor/ftrack_api/accessor/base.py deleted file mode 100644 index b76e2a6a9c..0000000000 --- a/pype/vendor/ftrack_api/accessor/base.py +++ /dev/null @@ -1,124 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -from builtins import object -import abc - -import ftrack_api.exception -from future.utils import with_metaclass - - -class Accessor(with_metaclass(abc.ABCMeta, object)): - '''Provide data access to a location. - - A location represents a specific storage, but access to that storage may - vary. For example, both local filesystem and FTP access may be possible for - the same storage. An accessor implements these different ways of accessing - the same data location. - - As different accessors may access the same location, only part of a data - path that is commonly understood may be stored in the database. The format - of this path should be a contract between the accessors that require access - to the same location and is left as an implementation detail. As such, this - system provides no guarantee that two different accessors can provide access - to the same location, though this is a clear goal. The path stored centrally - is referred to as the **resource identifier** and should be used when - calling any of the accessor methods that accept a *resource_identifier* - argument. - - ''' - - def __init__(self): - '''Initialise location accessor.''' - super(Accessor, self).__init__() - - @abc.abstractmethod - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. - - Each entry in the returned list should be a valid resource identifier. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist or - :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if - *resource_identifier* is not a container. - - ''' - - @abc.abstractmethod - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - - @abc.abstractmethod - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - - @abc.abstractmethod - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - - @abc.abstractmethod - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - - @abc.abstractmethod - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api.data.Data` for *resource_identifier*.''' - - @abc.abstractmethod - def remove(self, resource_identifier): - '''Remove *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist. - - ''' - - @abc.abstractmethod - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. - - If *recursive* is True, also make any intermediate containers. - - Should silently ignore existing containers and not recreate them. - - ''' - - @abc.abstractmethod - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` - if container of *resource_identifier* could not be determined. - - ''' - - def remove_container(self, resource_identifier): # pragma: no cover - '''Remove container at *resource_identifier*.''' - return self.remove(resource_identifier) - - def get_filesystem_path(self, resource_identifier): # pragma: no cover - '''Return filesystem path for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - filesystem path could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving filesystem paths is not supported by this accessor. - - ''' - raise ftrack_api.exception.AccessorUnsupportedOperationError( - 'get_filesystem_path', resource_identifier=resource_identifier - ) - - def get_url(self, resource_identifier): - '''Return URL for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by this accessor. - - ''' - raise ftrack_api.exception.AccessorUnsupportedOperationError( - 'get_url', resource_identifier=resource_identifier - ) diff --git a/pype/vendor/ftrack_api/accessor/disk.py b/pype/vendor/ftrack_api/accessor/disk.py deleted file mode 100644 index 20a2d4b535..0000000000 --- a/pype/vendor/ftrack_api/accessor/disk.py +++ /dev/null @@ -1,251 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import os -import sys -import errno -import contextlib - -import ftrack_api._python_ntpath as ntpath -import ftrack_api.accessor.base -import ftrack_api.data -from ftrack_api.exception import ( - AccessorFilesystemPathError, - AccessorUnsupportedOperationError, - AccessorResourceNotFoundError, - AccessorOperationFailedError, - AccessorPermissionDeniedError, - AccessorResourceInvalidError, - AccessorContainerNotEmptyError, - AccessorParentResourceNotFoundError -) - - -class DiskAccessor(ftrack_api.accessor.base.Accessor): - '''Provide disk access to a location. - - Expect resource identifiers to refer to relative filesystem paths. - - ''' - - def __init__(self, prefix, **kw): - '''Initialise location accessor. - - *prefix* specifies the base folder for the disk based structure and - will be prepended to any path. It should be specified in the syntax of - the current OS. - - ''' - if prefix: - prefix = os.path.expanduser(os.path.expandvars(prefix)) - prefix = os.path.abspath(prefix) - self.prefix = prefix - - super(DiskAccessor, self).__init__(**kw) - - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. - - Each entry in the returned list should be a valid resource identifier. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist or - :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if - *resource_identifier* is not a container. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='list', resource_identifier=resource_identifier - ): - listing = [] - for entry in os.listdir(filesystem_path): - listing.append(os.path.join(resource_identifier, entry)) - - return listing - - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.exists(filesystem_path) - - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.isfile(filesystem_path) - - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.isdir(filesystem_path) - - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - raise AccessorUnsupportedOperationError(operation='is_sequence') - - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api.Data` for *resource_identifier*.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='open', resource_identifier=resource_identifier - ): - data = ftrack_api.data.File(filesystem_path, mode) - - return data - - def remove(self, resource_identifier): - '''Remove *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - if self.is_file(resource_identifier): - with error_handler( - operation='remove', resource_identifier=resource_identifier - ): - os.remove(filesystem_path) - - elif self.is_container(resource_identifier): - with error_handler( - operation='remove', resource_identifier=resource_identifier - ): - os.rmdir(filesystem_path) - - else: - raise AccessorResourceNotFoundError( - resource_identifier=resource_identifier - ) - - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. - - If *recursive* is True, also make any intermediate containers. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='makeContainer', resource_identifier=resource_identifier - ): - try: - if recursive: - os.makedirs(filesystem_path) - else: - try: - os.mkdir(filesystem_path) - except OSError as error: - if error.errno == errno.ENOENT: - raise AccessorParentResourceNotFoundError( - resource_identifier=resource_identifier - ) - else: - raise - - except OSError as error: - if error.errno != errno.EEXIST: - raise - - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` if - container of *resource_identifier* could not be determined. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - container = os.path.dirname(filesystem_path) - - if self.prefix: - if not container.startswith(self.prefix): - raise AccessorParentResourceNotFoundError( - resource_identifier=resource_identifier, - message='Could not determine container for ' - '{resource_identifier} as container falls outside ' - 'of configured prefix.' - ) - - # Convert container filesystem path into resource identifier. - container = container[len(self.prefix):] - if ntpath.isabs(container): - # Ensure that resulting path is relative by stripping any - # leftover prefixed slashes from string. - # E.g. If prefix was '/tmp' and path was '/tmp/foo/bar' the - # result will be 'foo/bar'. - container = container.lstrip('\\/') - - return container - - def get_filesystem_path(self, resource_identifier): - '''Return filesystem path for *resource_identifier*. - - For example:: - - >>> accessor = DiskAccessor('my.location', '/mountpoint') - >>> print accessor.get_filesystem_path('test.txt') - /mountpoint/test.txt - >>> print accessor.get_filesystem_path('/mountpoint/test.txt') - /mountpoint/test.txt - - Raise :exc:`ftrack_api.exception.AccessorFilesystemPathError` if filesystem - path could not be determined from *resource_identifier*. - - ''' - filesystem_path = resource_identifier - if filesystem_path: - filesystem_path = os.path.normpath(filesystem_path) - - if self.prefix: - if not os.path.isabs(filesystem_path): - filesystem_path = os.path.normpath( - os.path.join(self.prefix, filesystem_path) - ) - - if not filesystem_path.startswith(self.prefix): - raise AccessorFilesystemPathError( - resource_identifier=resource_identifier, - message='Could not determine access path for ' - 'resource_identifier outside of configured prefix: ' - '{resource_identifier}.' - ) - - return filesystem_path - - -@contextlib.contextmanager -def error_handler(**kw): - '''Conform raised OSError/IOError exception to appropriate FTrack error.''' - try: - yield - - except (OSError, IOError) as error: - (exception_type, exception_value, traceback) = sys.exc_info() - kw.setdefault('error', error) - - - error_code = getattr(error, 'errno') - if not error_code: - raise AccessorOperationFailedError(**kw) - - if error_code == errno.ENOENT: - raise AccessorResourceNotFoundError(**kw) - - elif error_code == errno.EPERM: - raise AccessorPermissionDeniedError(**kw) - - elif error_code == errno.ENOTEMPTY: - raise AccessorContainerNotEmptyError(**kw) - - elif error_code in (errno.ENOTDIR, errno.EISDIR, errno.EINVAL): - raise AccessorResourceInvalidError(**kw) - - else: - raise AccessorOperationFailedError(**kw) - - except Exception: - raise diff --git a/pype/vendor/ftrack_api/accessor/server.py b/pype/vendor/ftrack_api/accessor/server.py deleted file mode 100644 index 0fc5f43f30..0000000000 --- a/pype/vendor/ftrack_api/accessor/server.py +++ /dev/null @@ -1,240 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import hashlib -import base64 -import json - -import requests - -from .base import Accessor -from ..data import String -import ftrack_api.exception -import ftrack_api.symbol - - -class ServerFile(String): - '''Representation of a server file.''' - - def __init__(self, resource_identifier, session, mode='rb'): - '''Initialise file.''' - self.mode = mode - self.resource_identifier = resource_identifier - self._session = session - self._has_read = False - - super(ServerFile, self).__init__() - - def flush(self): - '''Flush all changes.''' - super(ServerFile, self).flush() - - if self.mode == 'wb': - self._write() - - def read(self, limit=None): - '''Read file.''' - if not self._has_read: - self._read() - self._has_read = True - - return super(ServerFile, self).read(limit) - - def _read(self): - '''Read all remote content from key into wrapped_file.''' - position = self.tell() - self.seek(0) - - response = requests.get( - '{0}/component/get'.format(self._session.server_url), - params={ - 'id': self.resource_identifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key - }, - stream=True - ) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to read data: {0}.'.format(error) - ) - - for block in response.iter_content(ftrack_api.symbol.CHUNK_SIZE): - self.wrapped_file.write(block) - - self.flush() - self.seek(position) - - def _write(self): - '''Write current data to remote key.''' - position = self.tell() - self.seek(0) - - # Retrieve component from cache to construct a filename. - component = self._session.get('FileComponent', self.resource_identifier) - if not component: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Unable to retrieve component with id: {0}.'.format( - self.resource_identifier - ) - ) - - # Construct a name from component name and file_type. - name = component['name'] - if component['file_type']: - name = u'{0}.{1}'.format( - name, - component['file_type'].lstrip('.') - ) - - try: - metadata = self._session.get_upload_metadata( - component_id=self.resource_identifier, - file_name=name, - file_size=self._get_size(), - checksum=self._compute_checksum() - ) - except Exception as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to get put metadata: {0}.'.format(error) - ) - - # Ensure at beginning of file before put. - self.seek(0) - - # Put the file based on the metadata. - response = requests.put( - metadata['url'], - data=self.wrapped_file, - headers=metadata['headers'] - ) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to put file to server: {0}.'.format(error) - ) - - self.seek(position) - - def _get_size(self): - '''Return size of file in bytes.''' - position = self.tell() - self.seek(0, os.SEEK_END) - length = self.tell() - self.seek(position) - return length - - def _compute_checksum(self): - '''Return checksum for file.''' - fp = self.wrapped_file - buf_size = ftrack_api.symbol.CHUNK_SIZE - hash_obj = hashlib.md5() - spos = fp.tell() - - s = fp.read(buf_size) - while s: - hash_obj.update(s) - s = fp.read(buf_size) - - base64_digest = base64.encodebytes(hash_obj.digest()).decode('utf-8') - if base64_digest[-1] == '\n': - base64_digest = base64_digest[0:-1] - - fp.seek(spos) - return base64_digest - - -class _ServerAccessor(Accessor): - '''Provide server location access.''' - - def __init__(self, session, **kw): - '''Initialise location accessor.''' - super(_ServerAccessor, self).__init__(**kw) - - self._session = session - - def open(self, resource_identifier, mode='rb'): - '''Return :py:class:`~ftrack_api.Data` for *resource_identifier*.''' - return ServerFile(resource_identifier, session=self._session, mode=mode) - - def remove(self, resourceIdentifier): - '''Remove *resourceIdentifier*.''' - response = requests.get( - '{0}/component/remove'.format(self._session.server_url), - params={ - 'id': resourceIdentifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key - } - ) - if response.status_code != 200: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to remove file.' - ) - - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*.''' - return None - - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*.''' - - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container.''' - raise NotImplementedError() - - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - return False - - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - raise NotImplementedError() - - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - raise NotImplementedError() - - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - raise NotImplementedError() - - def get_url(self, resource_identifier): - '''Return url for *resource_identifier*.''' - url_string = ( - u'{url}/component/get?id={id}&username={username}' - u'&apiKey={apiKey}' - ) - return url_string.format( - url=self._session.server_url, - id=resource_identifier, - username=self._session.api_user, - apiKey=self._session.api_key - ) - - def get_thumbnail_url(self, resource_identifier, size=None): - '''Return thumbnail url for *resource_identifier*. - - Optionally, specify *size* to constrain the downscaled image to size - x size pixels. - ''' - url_string = ( - u'{url}/component/thumbnail?id={id}&username={username}' - u'&apiKey={apiKey}' - ) - url = url_string.format( - url=self._session.server_url, - id=resource_identifier, - username=self._session.api_user, - apiKey=self._session.api_key - ) - if size: - url += u'&size={0}'.format(size) - - return url diff --git a/pype/vendor/ftrack_api/attribute.py b/pype/vendor/ftrack_api/attribute.py deleted file mode 100644 index 8909b52702..0000000000 --- a/pype/vendor/ftrack_api/attribute.py +++ /dev/null @@ -1,698 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -from builtins import object -import collections -import copy -import logging -import functools - -import ftrack_api.symbol -import ftrack_api.exception -import ftrack_api.collection -import ftrack_api.inspection -import ftrack_api.operation - -logger = logging.getLogger( - __name__ -) - - -def merge_references(function): - '''Decorator to handle merging of references / collections.''' - - @functools.wraps(function) - def get_value(attribute, entity): - '''Merge the attribute with the local cache.''' - - if attribute.name not in entity._inflated: - # Only merge on first access to avoid - # inflating them multiple times. - - logger.debug( - 'Merging potential new data into attached ' - 'entity for attribute {0}.'.format( - attribute.name - ) - ) - - # Local attributes. - local_value = attribute.get_local_value(entity) - if isinstance( - local_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - logger.debug( - 'Merging local value for attribute {0}.'.format(attribute) - ) - - merged_local_value = entity.session._merge( - local_value, merged=dict() - ) - - if merged_local_value is not local_value: - with entity.session.operation_recording(False): - attribute.set_local_value(entity, merged_local_value) - - # Remote attributes. - remote_value = attribute.get_remote_value(entity) - if isinstance( - remote_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) - ) - - merged_remote_value = entity.session._merge( - remote_value, merged=dict() - ) - - if merged_remote_value is not remote_value: - attribute.set_remote_value(entity, merged_remote_value) - - entity._inflated.add( - attribute.name - ) - - return function( - attribute, entity - ) - - return get_value - - -class Attributes(object): - '''Collection of properties accessible by name.''' - - def __init__(self, attributes=None): - super(Attributes, self).__init__() - self._data = dict() - if attributes is not None: - for attribute in attributes: - self.add(attribute) - - def add(self, attribute): - '''Add *attribute*.''' - existing = self._data.get(attribute.name, None) - if existing: - raise ftrack_api.exception.NotUniqueError( - 'Attribute with name {0} already added as {1}' - .format(attribute.name, existing) - ) - - self._data[attribute.name] = attribute - - def remove(self, attribute): - '''Remove attribute.''' - self._data.pop(attribute.name) - - def get(self, name): - '''Return attribute by *name*. - - If no attribute matches *name* then return None. - - ''' - return self._data.get(name, None) - - def keys(self): - '''Return list of attribute names.''' - return list(self._data.keys()) - - def __contains__(self, item): - '''Return whether *item* present.''' - if not isinstance(item, Attribute): - return False - - return item.name in self._data - - def __iter__(self): - '''Return iterator over attributes.''' - return iter(self._data.values()) - - def __len__(self): - '''Return count of attributes.''' - return len(self._data) - - -class Attribute(object): - '''A name and value pair persisted remotely.''' - - def __init__( - self, name, default_value=ftrack_api.symbol.NOT_SET, mutable=True - ): - '''Initialise attribute with *name*. - - *default_value* represents the default value for the attribute. It may - be a callable. It is not used within the attribute when providing - values, but instead exists for other parts of the system to reference. - - If *mutable* is set to False then the local value of the attribute on an - entity can only be set when both the existing local and remote values - are :attr:`ftrack_api.symbol.NOT_SET`. The exception to this is when the - target value is also :attr:`ftrack_api.symbol.NOT_SET`. - - ''' - super(Attribute, self).__init__() - self._name = name - self._mutable = mutable - self.default_value = default_value - - self._local_key = 'local' - self._remote_key = 'remote' - - def __repr__(self): - '''Return representation of entity.''' - return '<{0}.{1}({2}) object at {3}>'.format( - self.__module__, - self.__class__.__name__, - self.name, - id(self) - ) - - def get_entity_storage(self, entity): - '''Return attribute storage on *entity* creating if missing.''' - storage_key = '_ftrack_attribute_storage' - storage = getattr(entity, storage_key, None) - if storage is None: - storage = collections.defaultdict( - lambda: - { - self._local_key: ftrack_api.symbol.NOT_SET, - self._remote_key: ftrack_api.symbol.NOT_SET - } - ) - setattr(entity, storage_key, storage) - - return storage - - @property - def name(self): - '''Return name.''' - return self._name - - @property - def mutable(self): - '''Return whether attribute is mutable.''' - return self._mutable - - def get_value(self, entity): - '''Return current value for *entity*. - - If a value was set locally then return it, otherwise return last known - remote value. If no remote value yet retrieved, make a request for it - via the session and block until available. - - ''' - value = self.get_local_value(entity) - if value is not ftrack_api.symbol.NOT_SET: - return value - - value = self.get_remote_value(entity) - if value is not ftrack_api.symbol.NOT_SET: - return value - - if not entity.session.auto_populate: - return value - - self.populate_remote_value(entity) - return self.get_remote_value(entity) - - def get_local_value(self, entity): - '''Return locally set value for *entity*.''' - storage = self.get_entity_storage(entity) - return storage[self.name][self._local_key] - - def get_remote_value(self, entity): - '''Return remote value for *entity*. - - .. note:: - - Only return locally stored remote value, do not fetch from remote. - - ''' - storage = self.get_entity_storage(entity) - return storage[self.name][self._remote_key] - - def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' - if ( - not self.mutable - and self.is_set(entity) - and value is not ftrack_api.symbol.NOT_SET - ): - raise ftrack_api.exception.ImmutableAttributeError(self) - - old_value = self.get_local_value(entity) - - storage = self.get_entity_storage(entity) - storage[self.name][self._local_key] = value - - # Record operation. - if entity.session.record_operations: - entity.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - entity.entity_type, - ftrack_api.inspection.primary_key(entity), - self.name, - old_value, - value - ) - ) - - def set_remote_value(self, entity, value): - '''Set remote *value*. - - .. note:: - - Only set locally stored remote value, do not persist to remote. - - ''' - storage = self.get_entity_storage(entity) - storage[self.name][self._remote_key] = value - - def populate_remote_value(self, entity): - '''Populate remote value for *entity*.''' - entity.session.populate([entity], self.name) - - def is_modified(self, entity): - '''Return whether local value set and differs from remote. - - .. note:: - - Will not fetch remote value so may report True even when values - are the same on the remote. - - ''' - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - return ( - local_value is not ftrack_api.symbol.NOT_SET - and local_value != remote_value - ) - - def is_set(self, entity): - '''Return whether a value is set for *entity*.''' - return any([ - self.get_local_value(entity) is not ftrack_api.symbol.NOT_SET, - self.get_remote_value(entity) is not ftrack_api.symbol.NOT_SET - ]) - - -class ScalarAttribute(Attribute): - '''Represent a scalar value.''' - - def __init__(self, name, data_type, **kw): - '''Initialise property.''' - super(ScalarAttribute, self).__init__(name, **kw) - self.data_type = data_type - - -class ReferenceAttribute(Attribute): - '''Reference another entity.''' - - def __init__(self, name, entity_type, **kw): - '''Initialise property.''' - super(ReferenceAttribute, self).__init__(name, **kw) - self.entity_type = entity_type - - def populate_remote_value(self, entity): - '''Populate remote value for *entity*. - - As attribute references another entity, use that entity's configured - default projections to auto populate useful attributes when loading. - - ''' - reference_entity_type = entity.session.types[self.entity_type] - default_projections = reference_entity_type.default_projections - - projections = [] - if default_projections: - for projection in default_projections: - projections.append('{0}.{1}'.format(self.name, projection)) - else: - projections.append(self.name) - - entity.session.populate([entity], ', '.join(projections)) - - def is_modified(self, entity): - '''Return whether a local value has been set and differs from remote. - - .. note:: - - Will not fetch remote value so may report True even when values - are the same on the remote. - - ''' - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - - if local_value is ftrack_api.symbol.NOT_SET: - return False - - if remote_value is ftrack_api.symbol.NOT_SET: - return True - - if ( - ftrack_api.inspection.identity(local_value) - != ftrack_api.inspection.identity(remote_value) - ): - return True - - return False - - - @merge_references - def get_value(self, entity): - return super(ReferenceAttribute, self).get_value( - entity - ) - -class AbstractCollectionAttribute(Attribute): - '''Base class for collection attributes.''' - - #: Collection class used by attribute. - collection_class = None - - @merge_references - def get_value(self, entity): - '''Return current value for *entity*. - - If a value was set locally then return it, otherwise return last known - remote value. If no remote value yet retrieved, make a request for it - via the session and block until available. - - .. note:: - - As value is a collection that is mutable, will transfer a remote - value into the local value on access if no local value currently - set. - - ''' - super(AbstractCollectionAttribute, self).get_value(entity) - - # Conditionally, copy remote value into local value so that it can be - # mutated without side effects. - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - if ( - local_value is ftrack_api.symbol.NOT_SET - and isinstance(remote_value, self.collection_class) - ): - try: - with entity.session.operation_recording(False): - self.set_local_value(entity, copy.copy(remote_value)) - except ftrack_api.exception.ImmutableAttributeError: - pass - - value = self.get_local_value(entity) - - # If the local value is still not set then attempt to set it with a - # suitable placeholder collection so that the caller can interact with - # the collection using its normal interface. This is required for a - # newly created entity for example. It *could* be done as a simple - # default value, but that would incur cost for every collection even - # when they are not modified before commit. - if value is ftrack_api.symbol.NOT_SET: - try: - with entity.session.operation_recording(False): - self.set_local_value( - entity, - # None should be treated as empty collection. - None - ) - except ftrack_api.exception.ImmutableAttributeError: - pass - - return self.get_local_value(entity) - - def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' - if value is not ftrack_api.symbol.NOT_SET: - value = self._adapt_to_collection(entity, value) - value.mutable = self.mutable - - super(AbstractCollectionAttribute, self).set_local_value(entity, value) - - def set_remote_value(self, entity, value): - '''Set remote *value*. - - .. note:: - - Only set locally stored remote value, do not persist to remote. - - ''' - if value is not ftrack_api.symbol.NOT_SET: - value = self._adapt_to_collection(entity, value) - value.mutable = False - - super(AbstractCollectionAttribute, self).set_remote_value(entity, value) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to appropriate collection instance for *entity*. - - .. note:: - - If *value* is None then return a suitable empty collection. - - ''' - raise NotImplementedError() - - -class CollectionAttribute(AbstractCollectionAttribute): - '''Represent a collection of other entities.''' - - #: Collection class used by attribute. - collection_class = ftrack_api.collection.Collection - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to a Collection instance on *entity*.''' - - if not isinstance(value, ftrack_api.collection.Collection): - - if value is None: - value = ftrack_api.collection.Collection(entity, self) - - elif isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute' - ) - - return value - - -class KeyValueMappedCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped key, value collection of entities.''' - - #: Collection class used by attribute. - collection_class = ftrack_api.collection.KeyValueMappedCollectionProxy - - def __init__( - self, name, creator, key_attribute, value_attribute, **kw - ): - '''Initialise attribute with *name*. - - *creator* should be a function that accepts a dictionary of data and - is used by the referenced collection to create new entities in the - collection. - - *key_attribute* should be the name of the attribute on an entity in - the collection that represents the value for 'key' of the dictionary. - - *value_attribute* should be the name of the attribute on an entity in - the collection that represents the value for 'value' of the dictionary. - - ''' - self.creator = creator - self.key_attribute = key_attribute - self.value_attribute = value_attribute - - super(KeyValueMappedCollectionAttribute, self).__init__(name, **kw) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api.collection.KeyValueMappedCollectionProxy - ): - - if value is None: - value = ftrack_api.collection.KeyValueMappedCollectionProxy( - ftrack_api.collection.Collection(entity, self), - self.creator, self.key_attribute, - self.value_attribute - ) - - elif isinstance(value, (list, ftrack_api.collection.Collection)): - - if isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - value = ftrack_api.collection.KeyValueMappedCollectionProxy( - value, self.creator, self.key_attribute, - self.value_attribute - ) - - elif isinstance(value, collections.Mapping): - # Convert mapping. - # TODO: When backend model improves, revisit this logic. - # First get existing value and delete all references. This is - # needed because otherwise they will not be automatically - # removed server side. - # The following should not cause recursion as the internal - # values should be mapped collections already. - current_value = self.get_value(entity) - if not isinstance( - current_value, - ftrack_api.collection.KeyValueMappedCollectionProxy - ): - raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a KeyValueMappedCollectionProxy.' - ) - - # Create the new collection using the existing collection as - # basis. Then update through proxy interface to ensure all - # internal operations called consistently (such as entity - # deletion for key removal). - collection = ftrack_api.collection.Collection( - entity, self, data=current_value.collection[:] - ) - collection_proxy = ( - ftrack_api.collection.KeyValueMappedCollectionProxy( - collection, self.creator, - self.key_attribute, self.value_attribute - ) - ) - - # Remove expired keys from collection. - expired_keys = set(current_value.keys()) - set(value.keys()) - for key in expired_keys: - del collection_proxy[key] - - # Set new values for existing keys / add new keys. - for key, value in list(value.items()): - collection_proxy[key] = value - - value = collection_proxy - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute.' - ) - - return value - - -class CustomAttributeCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped custom attribute collection of entities.''' - - #: Collection class used by attribute. - collection_class = ( - ftrack_api.collection.CustomAttributeCollectionProxy - ) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api.collection.CustomAttributeCollectionProxy - ): - - if value is None: - value = ftrack_api.collection.CustomAttributeCollectionProxy( - ftrack_api.collection.Collection(entity, self) - ) - - elif isinstance(value, (list, ftrack_api.collection.Collection)): - - # Why are we creating a new if it is a list? This will cause - # any merge to create a new proxy and collection. - if isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - value = ftrack_api.collection.CustomAttributeCollectionProxy( - value - ) - - elif isinstance(value, collections.Mapping): - # Convert mapping. - # TODO: When backend model improves, revisit this logic. - # First get existing value and delete all references. This is - # needed because otherwise they will not be automatically - # removed server side. - # The following should not cause recursion as the internal - # values should be mapped collections already. - current_value = self.get_value(entity) - if not isinstance( - current_value, - ftrack_api.collection.CustomAttributeCollectionProxy - ): - raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a MappedCollectionProxy.' - ) - - # Create the new collection using the existing collection as - # basis. Then update through proxy interface to ensure all - # internal operations called consistently (such as entity - # deletion for key removal). - collection = ftrack_api.collection.Collection( - entity, self, data=current_value.collection[:] - ) - collection_proxy = ( - ftrack_api.collection.CustomAttributeCollectionProxy( - collection - ) - ) - - # Remove expired keys from collection. - expired_keys = set(current_value.keys()) - set(value.keys()) - for key in expired_keys: - del collection_proxy[key] - - # Set new values for existing keys / add new keys. - for key, value in list(value.items()): - collection_proxy[key] = value - - value = collection_proxy - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute.' - ) - - return value diff --git a/pype/vendor/ftrack_api/cache.py b/pype/vendor/ftrack_api/cache.py deleted file mode 100644 index 42cccbaba2..0000000000 --- a/pype/vendor/ftrack_api/cache.py +++ /dev/null @@ -1,597 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -'''Caching framework. - -Defines a standardised :class:`Cache` interface for storing data against -specific keys. Key generation is also standardised using a :class:`KeyMaker` -interface. - -Combining a Cache and KeyMaker allows for memoisation of function calls with -respect to the arguments used by using a :class:`Memoiser`. - -As a convenience a simple :func:`memoise` decorator is included for quick -memoisation of function using a global cache and standard key maker. - -''' - -from future import standard_library -standard_library.install_aliases() -from builtins import str -from past.builtins import basestring -from builtins import object -import collections -import functools -import abc -import copy -import inspect -import re -try: - # Python 2.x - import anydbm -except ImportError: - import dbm as anydbm - - -import contextlib -from future.utils import with_metaclass -try: - import cPickle as pickle - -except: - import pickle - -import ftrack_api.inspection -import ftrack_api.symbol - - -class Cache(with_metaclass(abc.ABCMeta, object)): - '''Cache interface. - - Derive from this to define concrete cache implementations. A cache is - centered around the concept of key:value pairings where the key is unique - across the cache. - - ''' - - @abc.abstractmethod - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - - @abc.abstractmethod - def set(self, key, value): - '''Set *value* for *key*.''' - - @abc.abstractmethod - def remove(self, key): - '''Remove *key* and return stored value. - - Raise :exc:`KeyError` if *key* not found. - - ''' - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - raise NotImplementedError() # pragma: no cover - - def values(self): - '''Return values for current keys.''' - values = [] - for key in list(self.keys()): - try: - value = self.get(key) - except KeyError: - continue - else: - values.append(value) - - return values - - def clear(self, pattern=None): - '''Remove all keys matching *pattern*. - - *pattern* should be a regular expression string. - - If *pattern* is None then all keys will be removed. - - ''' - - if pattern is not None: - pattern = re.compile(pattern) - - for key in list(self.keys()): - if pattern is not None: - if not pattern.search(key): - continue - - try: - self.remove(key) - except KeyError: - pass - - -class ProxyCache(Cache): - '''Proxy another cache.''' - - def __init__(self, proxied): - '''Initialise cache with *proxied* cache instance.''' - self.proxied = proxied - super(ProxyCache, self).__init__() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self.proxied.get(key) - - def set(self, key, value): - '''Set *value* for *key*.''' - return self.proxied.set(key, value) - - def remove(self, key): - '''Remove *key* and return stored value. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self.proxied.remove(key) - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - return list(self.proxied.keys()) - - -class LayeredCache(Cache): - '''Layered cache.''' - - def __init__(self, caches): - '''Initialise cache with *caches*.''' - super(LayeredCache, self).__init__() - self.caches = caches - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - Attempt to retrieve from cache layers in turn, starting with shallowest. - If value retrieved, then also set the value in each higher level cache - up from where retrieved. - - ''' - target_caches = [] - value = ftrack_api.symbol.NOT_SET - - for cache in self.caches: - try: - value = cache.get(key) - except KeyError: - target_caches.append(cache) - continue - else: - break - - if value is ftrack_api.symbol.NOT_SET: - raise KeyError(key) - - # Set value on all higher level caches. - for cache in target_caches: - cache.set(key, value) - - return value - - def set(self, key, value): - '''Set *value* for *key*.''' - for cache in self.caches: - cache.set(key, value) - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found in any layer. - - ''' - removed = False - for cache in self.caches: - try: - cache.remove(key) - except KeyError: - pass - else: - removed = True - - if not removed: - raise KeyError(key) - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - keys = [] - for cache in self.caches: - keys.extend(list(cache.keys())) - - return list(set(keys)) - - -class MemoryCache(Cache): - '''Memory based cache.''' - - def __init__(self): - '''Initialise cache.''' - self._cache = {} - super(MemoryCache, self).__init__() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self._cache[key] - - def set(self, key, value): - '''Set *value* for *key*.''' - self._cache[key] = value - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - del self._cache[key] - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - return list(self._cache.keys()) - - -class FileCache(Cache): - '''File based cache that uses :mod:`anydbm` module. - - .. note:: - - No locking of the underlying file is performed. - - ''' - - def __init__(self, path): - '''Initialise cache at *path*.''' - self.path = path - - # Initialise cache. - cache = anydbm.open(self.path, 'c') - cache.close() - - super(FileCache, self).__init__() - - @contextlib.contextmanager - def _database(self): - '''Yield opened database file.''' - cache = anydbm.open(self.path, 'w') - try: - yield cache - finally: - cache.close() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - with self._database() as cache: - return cache[key.encode('ascii')].decode('utf-8') - - def set(self, key, value): - '''Set *value* for *key*.''' - with self._database() as cache: - cache[key.encode('ascii')] = value - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - with self._database() as cache: - del cache[key.encode('ascii')] - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - with self._database() as cache: - return [s.decode('utf-8') for s in cache.keys()] - #return list(map(str, cache.keys())) - - -class SerialisedCache(ProxyCache): - '''Proxied cache that stores values as serialised data.''' - - def __init__(self, proxied, encode=None, decode=None): - '''Initialise cache with *encode* and *decode* callables. - - *proxied* is the underlying cache to use for storage. - - ''' - self.encode = encode - self.decode = decode - super(SerialisedCache, self).__init__(proxied) - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - value = super(SerialisedCache, self).get(key) - if self.decode: - value = self.decode(value) - - return value - - def set(self, key, value): - '''Set *value* for *key*.''' - if self.encode: - value = self.encode(value) - - super(SerialisedCache, self).set(key, value) - - -class KeyMaker(with_metaclass(abc.ABCMeta, object)): - '''Generate unique keys.''' - - def __init__(self): - '''Initialise key maker.''' - super(KeyMaker, self).__init__() - self.item_separator = '' - - def key(self, *items): - '''Return key for *items*.''' - keys = [] - for item in items: - keys.append(self._key(item)) - - return self.item_separator.join(keys) - - @abc.abstractmethod - def _key(self, obj): - '''Return key for *obj*.''' - - -class StringKeyMaker(KeyMaker): - '''Generate string key.''' - - def _key(self, obj): - '''Return key for *obj*.''' - return str(obj) - - -class ObjectKeyMaker(KeyMaker): - '''Generate unique keys for objects.''' - - def __init__(self): - '''Initialise key maker.''' - super(ObjectKeyMaker, self).__init__() - self.item_separator = b'\0' - self.mapping_identifier = b'\1' - self.mapping_pair_separator = b'\2' - self.iterable_identifier = b'\3' - self.name_identifier = b'\4' - - def _key(self, item): - return self.__key(item) - - def __key(self, item): - '''Return key for *item*. - - Returned key will be a pickle like string representing the *item*. This - allows for typically non-hashable objects to be used in key generation - (such as dictionaries). - - If *item* is iterable then each item in it shall also be passed to this - method to ensure correct key generation. - - Special markers are used to distinguish handling of specific cases in - order to ensure uniqueness of key corresponds directly to *item*. - - Example:: - - >>> key_maker = ObjectKeyMaker() - >>> def add(x, y): - ... "Return sum of *x* and *y*." - ... return x + y - ... - >>> key_maker.key(add, (1, 2)) - '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x02.\x03' - >>> key_maker.key(add, (1, 3)) - '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x03.\x03' - - ''' - - - # TODO: Consider using a more robust and comprehensive solution such as - # dill (https://github.com/uqfoundation/dill). - if isinstance(item, collections.Iterable): - - if isinstance(item, basestring): - return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) - - if isinstance(item, collections.Mapping): - contents = self.item_separator.join([ - ( - self._key(key) + - self.mapping_pair_separator + - self._key(value) - ) - for key, value in sorted(item.items()) - ]) - - return ( - self.mapping_identifier + - contents + - self.mapping_identifier - ) - else: - contents = self.item_separator.join([ - self._key(item) for item in item - ]) - return ( - self.iterable_identifier + - contents + - self.iterable_identifier - ) - - elif inspect.ismethod(item): - - return b''.join(( - self.name_identifier, - item.__name__.encode(), - self.item_separator, - item.__self__.__class__.__name__.encode(), - self.item_separator, - item.__module__.encode() - )) - - elif inspect.isfunction(item) or inspect.isclass(item): - return b''.join(( - self.name_identifier, - item.__name__.encode(), - self.item_separator, - item.__module__.encode() - )) - - elif inspect.isbuiltin(item): - return self.name_identifier + item.__name__.encode() - - else: - return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) - - -class Memoiser(object): - '''Memoise function calls using a :class:`KeyMaker` and :class:`Cache`. - - Example:: - - >>> memoiser = Memoiser(MemoryCache(), ObjectKeyMaker()) - >>> def add(x, y): - ... "Return sum of *x* and *y*." - ... print 'Called' - ... return x + y - ... - >>> memoiser.call(add, (1, 2), {}) - Called - >>> memoiser.call(add, (1, 2), {}) - >>> memoiser.call(add, (1, 3), {}) - Called - - ''' - - def __init__(self, cache=None, key_maker=None, return_copies=True): - '''Initialise with *cache* and *key_maker* to use. - - If *cache* is not specified a default :class:`MemoryCache` will be - used. Similarly, if *key_maker* is not specified a default - :class:`ObjectKeyMaker` will be used. - - If *return_copies* is True then all results returned from the cache will - be deep copies to avoid indirect mutation of cached values. - - ''' - self.cache = cache - if self.cache is None: - self.cache = MemoryCache() - - self.key_maker = key_maker - if self.key_maker is None: - self.key_maker = ObjectKeyMaker() - - self.return_copies = return_copies - super(Memoiser, self).__init__() - - def call(self, function, args=None, kw=None): - '''Call *function* with *args* and *kw* and return result. - - If *function* was previously called with exactly the same arguments - then return cached result if available. - - Store result for call in cache. - - ''' - if args is None: - args = () - - if kw is None: - kw = {} - - # Support arguments being passed as positionals or keywords. - arguments = inspect.getcallargs(function, *args, **kw) - - key = self.key_maker.key(function, arguments) - try: - value = self.cache.get(key) - - except KeyError: - value = function(*args, **kw) - self.cache.set(key, value) - - # If requested, deep copy value to return in order to avoid cached value - # being inadvertently altered by the caller. - if self.return_copies: - value = copy.deepcopy(value) - - return value - - -def memoise_decorator(memoiser): - '''Decorator to memoise function calls using *memoiser*.''' - def outer(function): - - @functools.wraps(function) - def inner(*args, **kw): - return memoiser.call(function, args, kw) - - return inner - - return outer - - -#: Default memoiser. -memoiser = Memoiser() - -#: Default memoise decorator using standard cache and key maker. -memoise = memoise_decorator(memoiser) diff --git a/pype/vendor/ftrack_api/collection.py b/pype/vendor/ftrack_api/collection.py deleted file mode 100644 index c86e8dd49a..0000000000 --- a/pype/vendor/ftrack_api/collection.py +++ /dev/null @@ -1,514 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -from builtins import str -import logging - -import collections -import copy - -import ftrack_api.exception -import ftrack_api.inspection -import ftrack_api.symbol -import ftrack_api.operation -import ftrack_api.cache -from ftrack_api.logging import LazyLogMessage as L - - -class Collection(collections.MutableSequence): - '''A collection of entities.''' - - def __init__(self, entity, attribute, mutable=True, data=None): - '''Initialise collection.''' - self.entity = entity - self.attribute = attribute - self._data = [] - self._identities = set() - - # Set initial dataset. - # Note: For initialisation, immutability is deferred till after initial - # population as otherwise there would be no public way to initialise an - # immutable collection. The reason self._data is not just set directly - # is to ensure other logic can be applied without special handling. - self.mutable = True - try: - if data is None: - data = [] - - with self.entity.session.operation_recording(False): - self.extend(data) - finally: - self.mutable = mutable - - def _identity_key(self, entity): - '''Return identity key for *entity*.''' - return str(ftrack_api.inspection.identity(entity)) - - def __copy__(self): - '''Return shallow copy. - - .. note:: - - To maintain expectations on usage, the shallow copy will include a - shallow copy of the underlying data store. - - ''' - cls = self.__class__ - copied_instance = cls.__new__(cls) - copied_instance.__dict__.update(self.__dict__) - copied_instance._data = copy.copy(self._data) - copied_instance._identities = copy.copy(self._identities) - - return copied_instance - - def _notify(self, old_value): - '''Notify about modification.''' - # Record operation. - if self.entity.session.record_operations: - self.entity.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - self.entity.entity_type, - ftrack_api.inspection.primary_key(self.entity), - self.attribute.name, - old_value, - self - ) - ) - - def insert(self, index, item): - '''Insert *item* at *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - if item in self: - raise ftrack_api.exception.DuplicateItemInCollectionError( - item, self - ) - - old_value = copy.copy(self) - self._data.insert(index, item) - self._identities.add(self._identity_key(item)) - self._notify(old_value) - - def __contains__(self, value): - '''Return whether *value* present in collection.''' - return self._identity_key(value) in self._identities - - def __getitem__(self, index): - '''Return item at *index*.''' - return self._data[index] - - def __setitem__(self, index, item): - '''Set *item* against *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - try: - existing_index = self.index(item) - except ValueError: - pass - else: - if index != existing_index: - raise ftrack_api.exception.DuplicateItemInCollectionError( - item, self - ) - - old_value = copy.copy(self) - try: - existing_item = self._data[index] - except IndexError: - pass - else: - self._identities.remove(self._identity_key(existing_item)) - - self._data[index] = item - self._identities.add(self._identity_key(item)) - self._notify(old_value) - - def __delitem__(self, index): - '''Remove item at *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - old_value = copy.copy(self) - item = self._data[index] - del self._data[index] - self._identities.remove(self._identity_key(item)) - self._notify(old_value) - - def __len__(self): - '''Return count of items.''' - return len(self._data) - - def __eq__(self, other): - '''Return whether this collection is equal to *other*.''' - if not isinstance(other, Collection): - return False - - return sorted(self._identities) == sorted(other._identities) - - def __ne__(self, other): - '''Return whether this collection is not equal to *other*.''' - return not self == other - - -class MappedCollectionProxy(collections.MutableMapping): - '''Common base class for mapped collection of entities.''' - - def __init__(self, collection): - '''Initialise proxy for *collection*.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.collection = collection - super(MappedCollectionProxy, self).__init__() - - def __copy__(self): - '''Return shallow copy. - - .. note:: - - To maintain expectations on usage, the shallow copy will include a - shallow copy of the underlying collection. - - ''' - cls = self.__class__ - copied_instance = cls.__new__(cls) - copied_instance.__dict__.update(self.__dict__) - copied_instance.collection = copy.copy(self.collection) - - return copied_instance - - @property - def mutable(self): - '''Return whether collection is mutable.''' - return self.collection.mutable - - @mutable.setter - def mutable(self, value): - '''Set whether collection is mutable to *value*.''' - self.collection.mutable = value - - @property - def attribute(self): - '''Return attribute bound to.''' - return self.collection.attribute - - @attribute.setter - def attribute(self, value): - '''Set bound attribute to *value*.''' - self.collection.attribute = value - - -class KeyValueMappedCollectionProxy(MappedCollectionProxy): - '''A mapped collection of key, value entities. - - Proxy a standard :class:`Collection` as a mapping where certain attributes - from the entities in the collection are mapped to key, value pairs. - - For example:: - - >>> collection = [Metadata(key='foo', value='bar'), ...] - >>> mapped = KeyValueMappedCollectionProxy( - ... collection, create_metadata, - ... key_attribute='key', value_attribute='value' - ... ) - >>> print mapped['foo'] - 'bar' - >>> mapped['bam'] = 'biz' - >>> print mapped.collection[-1] - Metadata(key='bam', value='biz') - - ''' - - def __init__( - self, collection, creator, key_attribute, value_attribute - ): - '''Initialise collection.''' - self.creator = creator - self.key_attribute = key_attribute - self.value_attribute = value_attribute - super(KeyValueMappedCollectionProxy, self).__init__(collection) - - def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' - for entity in self.collection: - if entity[self.key_attribute] == key: - return entity - - raise KeyError(key) - - def __getitem__(self, key): - '''Return value for *key*.''' - entity = self._get_entity_by_key(key) - return entity[self.value_attribute] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - try: - entity = self._get_entity_by_key(key) - except KeyError: - data = { - self.key_attribute: key, - self.value_attribute: value - } - entity = self.creator(self, data) - - if ( - ftrack_api.inspection.state(entity) is - ftrack_api.symbol.CREATED - ): - # Persisting this entity will be handled here, record the - # operation. - self.collection.append(entity) - - else: - # The entity is created and persisted separately by the - # creator. Do not record this operation. - with self.collection.entity.session.operation_recording(False): - # Do not record this operation since it will trigger - # redudant and potentially failing operations. - self.collection.append(entity) - - else: - entity[self.value_attribute] = value - - def __delitem__(self, key): - '''Remove and delete *key*. - - .. note:: - - The associated entity will be deleted as well. - - ''' - for index, entity in enumerate(self.collection): - if entity[self.key_attribute] == key: - break - else: - raise KeyError(key) - - del self.collection[index] - entity.session.delete(entity) - - def __iter__(self): - '''Iterate over all keys.''' - keys = set() - for entity in self.collection: - keys.add(entity[self.key_attribute]) - - return iter(keys) - - def __len__(self): - '''Return count of keys.''' - keys = set() - for entity in self.collection: - keys.add(entity[self.key_attribute]) - - return len(keys) - - def keys(self): - # COMPAT for unit tests.. - return list(super( - KeyValueMappedCollectionProxy, self - ).keys()) - - -class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): - '''Generate key for session.''' - - def _key(self, obj): - '''Return key for *obj*.''' - if isinstance(obj, dict): - session = obj.get('session') - if session is not None: - # Key by session only. - return str(id(session)) - - return str(obj) - - -#: Memoiser for use with callables that should be called once per session. -memoise_session = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - - -@memoise_session -def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. - - The configuration objects will have key, project_id, id and object_type_id - populated. - - ''' - return session.query( - 'select key, project_id, id, object_type_id, entity_type from ' - 'CustomAttributeConfiguration' - ).all() - - -class CustomAttributeCollectionProxy(MappedCollectionProxy): - '''A mapped collection of custom attribute value entities.''' - - def __init__( - self, collection - ): - '''Initialise collection.''' - self.key_attribute = 'configuration_id' - self.value_attribute = 'value' - super(CustomAttributeCollectionProxy, self).__init__(collection) - - def _get_entity_configurations(self): - '''Return all configurations for current collection entity.''' - entity = self.collection.entity - entity_type = None - project_id = None - object_type_id = None - - if 'object_type_id' in list(entity.keys()): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] - - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' - - if entity.entity_type == 'Asset': - project_id = entity['parent']['project_id'] - entity_type = 'asset' - - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' - - if entity.entity_type == 'User': - entity_type = 'user' - - if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) - - configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): - if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id - ): - configurations.append(configuration) - - # Return with global configurations at the end of the list. This is done - # so that global conigurations are shadowed by project specific if the - # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) - - def _get_keys(self): - '''Return a list of all keys.''' - keys = [] - for configuration in self._get_entity_configurations(): - keys.append(configuration['key']) - - return keys - - def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' - configuration_id = self.get_configuration_id_from_key(key) - for entity in self.collection: - if entity[self.key_attribute] == configuration_id: - return entity - - return None - - def get_configuration_id_from_key(self, key): - '''Return id of configuration with matching *key*. - - Raise :exc:`KeyError` if no configuration with matching *key* found. - - ''' - for configuration in self._get_entity_configurations(): - if key == configuration['key']: - return configuration['id'] - - raise KeyError(key) - - def __getitem__(self, key): - '''Return value for *key*.''' - entity = self._get_entity_by_key(key) - - if entity: - return entity[self.value_attribute] - - for configuration in self._get_entity_configurations(): - if configuration['key'] == key: - return configuration['default'] - - raise KeyError(key) - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - custom_attribute_value = self._get_entity_by_key(key) - - if custom_attribute_value: - custom_attribute_value[self.value_attribute] = value - else: - entity = self.collection.entity - session = entity.session - data = { - self.key_attribute: self.get_configuration_id_from_key(key), - self.value_attribute: value, - 'entity_id': entity['id'] - } - - # Make sure to use the currently active collection. This is - # necessary since a merge might have replaced the current one. - self.collection.entity['custom_attributes'].collection.append( - session.create('CustomAttributeValue', data) - ) - - def __delitem__(self, key): - '''Remove and delete *key*. - - .. note:: - - The associated entity will be deleted as well. - - ''' - custom_attribute_value = self._get_entity_by_key(key) - - if custom_attribute_value: - index = self.collection.index(custom_attribute_value) - del self.collection[index] - - custom_attribute_value.session.delete(custom_attribute_value) - else: - self.logger.warning(L( - 'Cannot delete {0!r} on {1!r}, no custom attribute value set.', - key, self.collection.entity - )) - - def __eq__(self, collection): - '''Return True if *collection* equals proxy collection.''' - if collection is ftrack_api.symbol.NOT_SET: - return False - - return collection.collection == self.collection - - def __iter__(self): - '''Iterate over all keys.''' - keys = self._get_keys() - return iter(keys) - - def __len__(self): - '''Return count of keys.''' - keys = self._get_keys() - return len(keys) diff --git a/pype/vendor/ftrack_api/data.py b/pype/vendor/ftrack_api/data.py deleted file mode 100644 index 4992cd7ac2..0000000000 --- a/pype/vendor/ftrack_api/data.py +++ /dev/null @@ -1,133 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -from builtins import object -import os -from abc import ABCMeta, abstractmethod -import tempfile -from future.utils import with_metaclass - - -class Data(with_metaclass(ABCMeta, object)): - '''File-like object for manipulating data.''' - - def __init__(self): - '''Initialise data access.''' - self.closed = False - - @abstractmethod - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - - @abstractmethod - def write(self, content): - '''Write content at current position.''' - - def flush(self): - '''Flush buffers ensuring data written.''' - - def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*. - - The *whence* argument is optional and defaults to os.SEEK_SET or 0 - (absolute file positioning); other values are os.SEEK_CUR or 1 - (seek relative to the current position) and os.SEEK_END or 2 - (seek relative to the file's end). - - ''' - raise NotImplementedError('Seek not supported.') - - def tell(self): - '''Return current position of internal pointer.''' - raise NotImplementedError('Tell not supported.') - - def close(self): - '''Flush buffers and prevent further access.''' - self.flush() - self.closed = True - - -class FileWrapper(Data): - '''Data wrapper for Python file objects.''' - - def __init__(self, wrapped_file): - '''Initialise access to *wrapped_file*.''' - self.wrapped_file = wrapped_file - self._read_since_last_write = False - super(FileWrapper, self).__init__() - - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - self._read_since_last_write = True - - if limit is None: - limit = -1 - - return self.wrapped_file.read(limit) - - def write(self, content): - '''Write content at current position.''' - if self._read_since_last_write: - # Windows requires a seek before switching from read to write. - self.seek(self.tell()) - - self.wrapped_file.write(content) - self._read_since_last_write = False - - def flush(self): - '''Flush buffers ensuring data written.''' - super(FileWrapper, self).flush() - if hasattr(self.wrapped_file, 'flush'): - self.wrapped_file.flush() - - def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*.''' - self.wrapped_file.seek(offset, whence) - - def tell(self): - '''Return current position of internal pointer.''' - return self.wrapped_file.tell() - - def close(self): - '''Flush buffers and prevent further access.''' - if not self.closed: - super(FileWrapper, self).close() - if hasattr(self.wrapped_file, 'close'): - self.wrapped_file.close() - - -class File(FileWrapper): - '''Data wrapper accepting filepath.''' - - def __init__(self, path, mode='rb'): - '''Open file at *path* with *mode*.''' - file_object = open(path, mode) - super(File, self).__init__(file_object) - - -class String(FileWrapper): - '''Data wrapper using TemporaryFile instance.''' - - def __init__(self, content=None): - '''Initialise data with *content*.''' - super(String, self).__init__( - tempfile.TemporaryFile() - ) - - if content is not None: - self.wrapped_file.write(content.encode()) - self.wrapped_file.seek(0) - - - def write(self, content): - if not isinstance(content, bytes): - content = content.encode() - - super(String, self).write( - content - ) - - def read(self, limit=None): - return super(String, self).read( - limit - ).decode('utf-8') \ No newline at end of file diff --git a/pype/vendor/ftrack_api/entity/__init__.py b/pype/vendor/ftrack_api/entity/__init__.py deleted file mode 100644 index 1d452f2828..0000000000 --- a/pype/vendor/ftrack_api/entity/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack \ No newline at end of file diff --git a/pype/vendor/ftrack_api/entity/asset_version.py b/pype/vendor/ftrack_api/entity/asset_version.py deleted file mode 100644 index 859d94e436..0000000000 --- a/pype/vendor/ftrack_api/entity/asset_version.py +++ /dev/null @@ -1,91 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class AssetVersion(ftrack_api.entity.base.Entity): - '''Represent asset version.''' - - def create_component( - self, path, data=None, location=None - ): - '''Create a new component from *path* with additional *data* - - .. note:: - - This is a helper method. To create components manually use the - standard :meth:`Session.create` method. - - *path* can be a string representing a filesystem path to the data to - use for the component. The *path* can also be specified as a sequence - string, in which case a sequence component with child components for - each item in the sequence will be created automatically. The accepted - format for a sequence is '{head}{padding}{tail} [{ranges}]'. For - example:: - - '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' - - .. seealso:: - - `Clique documentation `_ - - *data* should be a dictionary of any additional data to construct the - component with (as passed to :meth:`Session.create`). This version is - automatically set as the component's version. - - If *location* is specified then automatically add component to that - location. - - ''' - if data is None: - data = {} - - data.pop('version_id', None) - data['version'] = self - - return self.session.create_component(path, data=data, location=location) - - def encode_media(self, media, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. - - *media* can be a path to a file or a FileComponent in the ftrack.server - location. - - The job will encode *media* based on the file type and job data contains - information about encoding in the following format:: - - { - 'output': [{ - 'format': 'video/mp4', - 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' - }, { - 'format': 'image/jpeg', - 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' - }], - 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', - 'keep_original': True - } - - The output components are associated with the job via the job_components - relation. - - An image component will always be generated if possible, and will be - set as the version's thumbnail. - - The new components will automatically be associated with the version. - A server version of 3.3.32 or higher is required for this to function - properly. - - If *media* is a file path, a new source component will be created and - added to the ftrack server location and a call to :meth:`commit` will be - issued. If *media* is a FileComponent, it will be assumed to be in - available in the ftrack.server location. - - If *keep_original* is not set, the original media will be kept if it - is a FileComponent, and deleted if it is a file path. You can specify - True or False to change this behavior. - ''' - return self.session.encode_media( - media, version_id=self['id'], keep_original=keep_original - ) diff --git a/pype/vendor/ftrack_api/entity/base.py b/pype/vendor/ftrack_api/entity/base.py deleted file mode 100644 index 4932372214..0000000000 --- a/pype/vendor/ftrack_api/entity/base.py +++ /dev/null @@ -1,407 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -from builtins import str -import abc -import collections -import logging - -import ftrack_api.symbol -import ftrack_api.attribute -import ftrack_api.inspection -import ftrack_api.exception -import ftrack_api.operation -from ftrack_api.logging import LazyLogMessage as L -from future.utils import with_metaclass - - -class _EntityBase(object): - '''Base class to allow for mixins, we need a common base.''' - pass - - -class DynamicEntityTypeMetaclass(abc.ABCMeta): - '''Custom metaclass to customise representation of dynamic classes. - - .. note:: - - Derive from same metaclass as derived bases to avoid conflicts. - - ''' - def __repr__(self): - '''Return representation of class.''' - return ''.format(self.__name__) - - -class Entity(with_metaclass(DynamicEntityTypeMetaclass, _EntityBase, collections.MutableMapping)): - '''Base class for all entities.''' - - entity_type = 'Entity' - attributes = None - primary_key_attributes = None - default_projections = None - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - super(Entity, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.session = session - self._inflated = set() - - if data is None: - data = {} - - self.logger.debug(L( - '{0} entity from {1!r}.', - ('Reconstructing' if reconstructing else 'Constructing'), data - )) - - self._ignore_data_keys = ['__entity_type__'] - if not reconstructing: - self._construct(data) - else: - self._reconstruct(data) - - def _construct(self, data): - '''Construct from *data*.''' - # Suspend operation recording so that all modifications can be applied - # in single create operation. In addition, recording a modification - # operation requires a primary key which may not be available yet. - - relational_attributes = dict() - - with self.session.operation_recording(False): - # Set defaults for any unset local attributes. - for attribute in self.__class__.attributes: - if attribute.name not in data: - default_value = attribute.default_value - if callable(default_value): - default_value = default_value(self) - - attribute.set_local_value(self, default_value) - - - # Data represents locally set values. - for key, value in list(data.items()): - if key in self._ignore_data_keys: - continue - - attribute = self.__class__.attributes.get(key) - if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such ' - 'attribute found on entity {1!r}.', key, self - )) - continue - - if not isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - relational_attributes.setdefault( - attribute, value - ) - - else: - attribute.set_local_value(self, value) - - # Record create operation. - # Note: As this operation is recorded *before* any Session.merge takes - # place there is the possibility that the operation will hold references - # to outdated data in entity_data. However, this would be unusual in - # that it would mean the same new entity was created twice and only one - # altered. Conversely, if this operation were recorded *after* - # Session.merge took place, any cache would not be able to determine - # the status of the entity, which could be important if the cache should - # not store newly created entities that have not yet been persisted. Out - # of these two 'evils' this approach is deemed the lesser at this time. - # A third, more involved, approach to satisfy both might be to record - # the operation with a PENDING entity_data value and then update with - # merged values post merge. - if self.session.record_operations: - entity_data = {} - - # Lower level API used here to avoid including any empty - # collections that are automatically generated on access. - for attribute in self.attributes: - value = attribute.get_local_value(self) - if value is not ftrack_api.symbol.NOT_SET: - entity_data[attribute.name] = value - - self.session.recorded_operations.push( - ftrack_api.operation.CreateEntityOperation( - self.entity_type, - ftrack_api.inspection.primary_key(self), - entity_data - ) - ) - - for attribute, value in list(relational_attributes.items()): - # Finally we set values for "relational" attributes, we need - # to do this at the end in order to get the create operations - # in the correct order as the newly created attributes might - # contain references to the newly created entity. - - attribute.set_local_value( - self, value - ) - - def _reconstruct(self, data): - '''Reconstruct from *data*.''' - # Data represents remote values. - for key, value in list(data.items()): - if key in self._ignore_data_keys: - continue - - attribute = self.__class__.attributes.get(key) - if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such attribute ' - 'found on entity {1!r}.', key, self - )) - continue - - attribute.set_remote_value(self, value) - - def __repr__(self): - '''Return representation of instance.''' - return ''.format( - self.__class__.__name__, id(self) - ) - - def __str__(self): - '''Return string representation of instance.''' - with self.session.auto_populating(False): - primary_key = ['Unknown'] - try: - primary_key = list(ftrack_api.inspection.primary_key(self).values()) - except KeyError: - pass - - return '<{0}({1})>'.format( - self.__class__.__name__, ', '.join(primary_key) - ) - - def __hash__(self): - '''Return hash representing instance.''' - return hash(str(ftrack_api.inspection.identity(self))) - - def __eq__(self, other): - '''Return whether *other* is equal to this instance. - - .. note:: - - Equality is determined by both instances having the same identity. - Values of attributes are not considered. - - ''' - try: - return ( - ftrack_api.inspection.identity(other) - == ftrack_api.inspection.identity(self) - ) - except (AttributeError, KeyError): - return False - - def __getitem__(self, key): - '''Return attribute value for *key*.''' - attribute = self.__class__.attributes.get(key) - if attribute is None: - raise KeyError(key) - - return attribute.get_value(self) - - def __setitem__(self, key, value): - '''Set attribute *value* for *key*.''' - attribute = self.__class__.attributes.get(key) - if attribute is None: - raise KeyError(key) - - attribute.set_local_value(self, value) - - def __delitem__(self, key): - '''Clear attribute value for *key*. - - .. note:: - - Will not remove the attribute, but instead clear any local value - and revert to the last known server value. - - ''' - attribute = self.__class__.attributes.get(key) - attribute.set_local_value(self, ftrack_api.symbol.NOT_SET) - - def __iter__(self): - '''Iterate over all attributes keys.''' - for attribute in self.__class__.attributes: - yield attribute.name - - def __len__(self): - '''Return count of attributes.''' - return len(self.__class__.attributes) - - def values(self): - '''Return list of values.''' - if self.session.auto_populate: - self._populate_unset_scalar_attributes() - - return list(super(Entity, self).values()) - - def items(self): - '''Return list of tuples of (key, value) pairs. - - .. note:: - - Will fetch all values from the server if not already fetched or set - locally. - - ''' - if self.session.auto_populate: - self._populate_unset_scalar_attributes() - - return list(super(Entity, self).items()) - - def clear(self): - '''Reset all locally modified attribute values.''' - for attribute in self: - del self[attribute] - - def merge(self, entity, merged=None): - '''Merge *entity* attribute values and other data into this entity. - - Only merge values from *entity* that are not - :attr:`ftrack_api.symbol.NOT_SET`. - - Return a list of changes made with each change being a mapping with - the keys: - - * type - Either 'remote_attribute', 'local_attribute' or 'property'. - * name - The name of the attribute / property modified. - * old_value - The previous value. - * new_value - The new merged value. - - ''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - log_message = 'Merged {type} "{name}": {old_value!r} -> {new_value!r}' - changes = [] - - # Attributes. - - # Prioritise by type so that scalar values are set first. This should - # guarantee that the attributes making up the identity of the entity - # are merged before merging any collections that may have references to - # this entity. - attributes = collections.deque() - for attribute in entity.attributes: - if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - attributes.appendleft(attribute) - else: - attributes.append(attribute) - - for other_attribute in attributes: - attribute = self.attributes.get(other_attribute.name) - - # Local attributes. - other_local_value = other_attribute.get_local_value(entity) - if other_local_value is not ftrack_api.symbol.NOT_SET: - local_value = attribute.get_local_value(self) - if local_value != other_local_value: - merged_local_value = self.session.merge( - other_local_value, merged=merged - ) - - attribute.set_local_value(self, merged_local_value) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_local_value - }) - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - # Remote attributes. - other_remote_value = other_attribute.get_remote_value(entity) - if other_remote_value is not ftrack_api.symbol.NOT_SET: - remote_value = attribute.get_remote_value(self) - if remote_value != other_remote_value: - merged_remote_value = self.session.merge( - other_remote_value, merged=merged - ) - - attribute.set_remote_value( - self, merged_remote_value - ) - - changes.append({ - 'type': 'remote_attribute', - 'name': attribute.name, - 'old_value': remote_value, - 'new_value': merged_remote_value - }) - - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - # We need to handle collections separately since - # they may store a local copy of the remote attribute - # even though it may not be modified. - if not isinstance( - attribute, ftrack_api.attribute.AbstractCollectionAttribute - ): - continue - - local_value = attribute.get_local_value( - self - ) - - # Populated but not modified, update it. - if ( - local_value is not ftrack_api.symbol.NOT_SET and - local_value == remote_value - ): - attribute.set_local_value( - self, merged_remote_value - ) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_remote_value - }) - - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - return changes - - def _populate_unset_scalar_attributes(self): - '''Populate all unset scalar attributes in one query.''' - projections = [] - for attribute in self.attributes: - if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - if attribute.get_remote_value(self) is ftrack_api.symbol.NOT_SET: - projections.append(attribute.name) - - if projections: - self.session.populate([self], ', '.join(projections)) diff --git a/pype/vendor/ftrack_api/entity/component.py b/pype/vendor/ftrack_api/entity/component.py deleted file mode 100644 index c08ff7918b..0000000000 --- a/pype/vendor/ftrack_api/entity/component.py +++ /dev/null @@ -1,75 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -from builtins import object -import ftrack_api.entity.base - - -class Component(ftrack_api.entity.base.Entity): - '''Represent a component.''' - - def get_availability(self, locations=None): - '''Return availability in *locations*. - - If *locations* is None, all known locations will be checked. - - Return a dictionary of {location_id:percentage_availability} - - ''' - return self.session.get_component_availability( - self, locations=locations - ) - - -class CreateThumbnailMixin(object): - '''Mixin to add create_thumbnail method on entity class.''' - - def create_thumbnail(self, path, data=None): - '''Set entity thumbnail from *path*. - - Creates a thumbnail component using in the ftrack.server location - :meth:`Session.create_component - ` The thumbnail component - will be created using *data* if specified. If no component name is - given, `thumbnail` will be used. - - The file is expected to be of an appropriate size and valid file - type. - - .. note:: - - A :meth:`Session.commit` will be - automatically issued. - - ''' - if data is None: - data = {} - if not data.get('name'): - data['name'] = 'thumbnail' - - thumbnail_component = self.session.create_component( - path, data, location=None - ) - - origin_location = self.session.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - server_location = self.session.get( - 'Location', ftrack_api.symbol.SERVER_LOCATION_ID - ) - server_location.add_component(thumbnail_component, [origin_location]) - - # TODO: This commit can be avoided by reordering the operations in - # this method so that the component is transferred to ftrack.server - # after the thumbnail has been set. - # - # There is currently a bug in the API backend, causing the operations - # to *some* times be ordered wrongly, where the update occurs before - # the component has been created, causing an integrity error. - # - # Once this issue has been resolved, this commit can be removed and - # and the update placed between component creation and registration. - self['thumbnail_id'] = thumbnail_component['id'] - self.session.commit() - - return thumbnail_component diff --git a/pype/vendor/ftrack_api/entity/factory.py b/pype/vendor/ftrack_api/entity/factory.py deleted file mode 100644 index 2d86e06055..0000000000 --- a/pype/vendor/ftrack_api/entity/factory.py +++ /dev/null @@ -1,439 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - - -from builtins import str -from builtins import object - -import logging -import uuid -import functools - -import ftrack_api.attribute -import ftrack_api.entity.base -import ftrack_api.entity.location -import ftrack_api.entity.component -import ftrack_api.entity.asset_version -import ftrack_api.entity.project_schema -import ftrack_api.entity.note -import ftrack_api.entity.job -import ftrack_api.entity.user -import ftrack_api.symbol -import ftrack_api.cache -from ftrack_api.logging import LazyLogMessage as L - - -class Factory(object): - '''Entity class factory.''' - - def __init__(self): - '''Initialise factory.''' - super(Factory, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*. - - *bases* should be a list of bases to give the constructed class. If not - specified, default to :class:`ftrack_api.entity.base.Entity`. - - ''' - entity_type = schema['id'] - class_name = entity_type - - class_bases = bases - if class_bases is None: - class_bases = [ftrack_api.entity.base.Entity] - - class_namespace = dict() - - # Build attributes for class. - attributes = ftrack_api.attribute.Attributes() - immutable = schema.get('immutable', []) - for name, fragment in list(schema.get('properties', {}).items()): - mutable = name not in immutable - - default = fragment.get('default', ftrack_api.symbol.NOT_SET) - if default == '{uid}': - default = lambda instance: str(uuid.uuid4()) - - data_type = fragment.get('type', ftrack_api.symbol.NOT_SET) - - if data_type is not ftrack_api.symbol.NOT_SET: - - if data_type in ( - 'string', 'boolean', 'integer', 'number', 'variable' - ): - # Basic scalar attribute. - if data_type == 'number': - data_type = 'float' - - if data_type == 'string': - data_format = fragment.get('format') - if data_format == 'date-time': - data_type = 'datetime' - - attribute = self.create_scalar_attribute( - class_name, name, mutable, default, data_type - ) - if attribute: - attributes.add(attribute) - - elif data_type == 'array': - attribute = self.create_collection_attribute( - class_name, name, mutable - ) - if attribute: - attributes.add(attribute) - - elif data_type == 'mapped_array': - reference = fragment.get('items', {}).get('$ref') - if not reference: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) - continue - - attribute = self.create_mapped_collection_attribute( - class_name, name, mutable, reference - ) - if attribute: - attributes.add(attribute) - - else: - self.logger.debug(L( - 'Skipping {0}.{1} attribute with unrecognised data ' - 'type {2}', class_name, name, data_type - )) - else: - # Reference attribute. - reference = fragment.get('$ref', ftrack_api.symbol.NOT_SET) - if reference is ftrack_api.symbol.NOT_SET: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) - continue - - attribute = self.create_reference_attribute( - class_name, name, mutable, reference - ) - if attribute: - attributes.add(attribute) - - default_projections = schema.get('default_projections', []) - - # Construct class. - class_namespace['entity_type'] = entity_type - class_namespace['attributes'] = attributes - class_namespace['primary_key_attributes'] = schema['primary_key'][:] - class_namespace['default_projections'] = default_projections - - from future.utils import ( - native_str - ) - - cls = type( - native_str(class_name), # type doesn't accept unicode. - tuple(class_bases), - class_namespace - ) - - return cls - - def create_scalar_attribute( - self, class_name, name, mutable, default, data_type - ): - '''Return appropriate scalar attribute instance.''' - return ftrack_api.attribute.ScalarAttribute( - name, data_type=data_type, default_value=default, mutable=mutable - ) - - def create_reference_attribute(self, class_name, name, mutable, reference): - '''Return appropriate reference attribute instance.''' - return ftrack_api.attribute.ReferenceAttribute( - name, reference, mutable=mutable - ) - - def create_collection_attribute(self, class_name, name, mutable): - '''Return appropriate collection attribute instance.''' - return ftrack_api.attribute.CollectionAttribute( - name, mutable=mutable - ) - - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has ' - 'no implementation defined for reference {2}.', - class_name, name, reference - )) - - -class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): - '''Generate key for defaults.''' - - def _key(self, obj): - '''Return key for *obj*.''' - if isinstance(obj, dict): - entity = obj.get('entity') - if entity is not None: - # Key by session only. - return str(id(entity.session)) - - return str(obj) - - -#: Memoiser for use with default callables that should only be called once per -# session. -memoise_defaults = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - -#: Memoiser for use with callables that should be called once per session. -memoise_session = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - - -@memoise_session -def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. - - The configuration objects will have key, project_id, id and object_type_id - populated. - - ''' - return session.query( - 'select key, project_id, id, object_type_id, entity_type, ' - 'is_hierarchical from CustomAttributeConfiguration' - ).all() - - -def _get_entity_configurations(entity): - '''Return all configurations for current collection entity.''' - entity_type = None - project_id = None - object_type_id = None - - if 'object_type_id' in list(entity.keys()): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] - - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' - - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' - - if entity.entity_type == 'User': - entity_type = 'user' - - if entity.entity_type == 'Asset': - entity_type = 'asset' - - if entity.entity_type in ('TypedContextList', 'AssetVersionList'): - entity_type = 'list' - - if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) - - configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): - if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id - ): - # The custom attribute configuration is for the target entity type. - configurations.append(configuration) - elif ( - entity_type in ('asset', 'assetversion', 'show', 'task') and - configuration['project_id'] in (project_id, None) and - configuration['is_hierarchical'] - ): - # The target entity type allows hierarchical attributes. - configurations.append(configuration) - - # Return with global configurations at the end of the list. This is done - # so that global conigurations are shadowed by project specific if the - # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) - - -class StandardFactory(Factory): - '''Standard entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - if not bases: - bases = [] - - extra_bases = [] - # Customise classes. - if schema['id'] == 'ProjectSchema': - extra_bases = [ftrack_api.entity.project_schema.ProjectSchema] - - elif schema['id'] == 'Location': - extra_bases = [ftrack_api.entity.location.Location] - - elif schema['id'] == 'AssetVersion': - extra_bases = [ftrack_api.entity.asset_version.AssetVersion] - - elif schema['id'].endswith('Component'): - extra_bases = [ftrack_api.entity.component.Component] - - elif schema['id'] == 'Note': - extra_bases = [ftrack_api.entity.note.Note] - - elif schema['id'] == 'Job': - extra_bases = [ftrack_api.entity.job.Job] - - elif schema['id'] == 'User': - extra_bases = [ftrack_api.entity.user.User] - - bases = extra_bases + bases - - # If bases does not contain any items, add the base entity class. - if not bases: - bases = [ftrack_api.entity.base.Entity] - - # Add mixins. - if 'notes' in schema.get('properties', {}): - bases.append( - ftrack_api.entity.note.CreateNoteMixin - ) - - if 'thumbnail_id' in schema.get('properties', {}): - bases.append( - ftrack_api.entity.component.CreateThumbnailMixin - ) - - cls = super(StandardFactory, self).create(schema, bases=bases) - - return cls - - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - if reference == 'Metadata': - - def create_metadata(proxy, data, reference): - '''Return metadata for *data*.''' - entity = proxy.collection.entity - session = entity.session - data.update({ - 'parent_id': entity['id'], - 'parent_type': entity.entity_type - }) - return session.create(reference, data) - - creator = functools.partial( - create_metadata, reference=reference - ) - key_attribute = 'key' - value_attribute = 'value' - - return ftrack_api.attribute.KeyValueMappedCollectionAttribute( - name, creator, key_attribute, value_attribute, mutable=mutable - ) - - elif reference == 'CustomAttributeValue': - return ( - ftrack_api.attribute.CustomAttributeCollectionAttribute( - name, mutable=mutable - ) - ) - - elif reference.endswith('CustomAttributeValue'): - def creator(proxy, data): - '''Create a custom attribute based on *proxy* and *data*. - - Raise :py:exc:`KeyError` if related entity is already presisted - to the server. The proxy represents dense custom attribute - values and should never create new custom attribute values - through the proxy if entity exists on the remote. - - If the entity is not persisted the ususal - CustomAttributeValue items cannot be updated as - the related entity does not exist on remote and values not in - the proxy. Instead a CustomAttributeValue will - be reconstructed and an update operation will be recorded. - - ''' - entity = proxy.collection.entity - if ( - ftrack_api.inspection.state(entity) is not - ftrack_api.symbol.CREATED - ): - raise KeyError( - 'Custom attributes must be created explicitly for the ' - 'given entity type before being set.' - ) - - configuration = None - for candidate in _get_entity_configurations(entity): - if candidate['key'] == data['key']: - configuration = candidate - break - - if configuration is None: - raise ValueError( - u'No valid custom attribute for data {0!r} was found.' - .format(data) - ) - - create_data = dict(list(data.items())) - create_data['configuration_id'] = configuration['id'] - create_data['entity_id'] = entity['id'] - - session = entity.session - - # Create custom attribute by reconstructing it and update the - # value. This will prevent a create operation to be sent to the - # remote, as create operations for this entity type is not - # allowed. Instead an update operation will be recorded. - value = create_data.pop('value') - item = session.create( - reference, - create_data, - reconstructing=True - ) - - # Record update operation. - item['value'] = value - - return item - - key_attribute = 'key' - value_attribute = 'value' - - return ftrack_api.attribute.KeyValueMappedCollectionAttribute( - name, creator, key_attribute, value_attribute, mutable=mutable - ) - - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has no configuration ' - 'for reference {2}.', class_name, name, reference - )) diff --git a/pype/vendor/ftrack_api/entity/job.py b/pype/vendor/ftrack_api/entity/job.py deleted file mode 100644 index ae37922c51..0000000000 --- a/pype/vendor/ftrack_api/entity/job.py +++ /dev/null @@ -1,48 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class Job(ftrack_api.entity.base.Entity): - '''Represent job.''' - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - To set a job `description` visible in the web interface, *data* can - contain a key called `data` which should be a JSON serialised - dictionary containing description:: - - data = { - 'status': 'running', - 'data': json.dumps(dict(description='My job description.')), - ... - } - - Will raise a :py:exc:`ValueError` if *data* contains `type` and `type` - is set to something not equal to "api_job". - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - - if not reconstructing: - if data.get('type') not in ('api_job', None): - raise ValueError( - 'Invalid job type "{0}". Must be "api_job"'.format( - data.get('type') - ) - ) - - super(Job, self).__init__( - session, data=data, reconstructing=reconstructing - ) diff --git a/pype/vendor/ftrack_api/entity/location.py b/pype/vendor/ftrack_api/entity/location.py deleted file mode 100644 index 6edd9c9b6e..0000000000 --- a/pype/vendor/ftrack_api/entity/location.py +++ /dev/null @@ -1,735 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -from builtins import zip -from past.builtins import basestring -from builtins import object -import collections -import functools - -import ftrack_api.entity.base -import ftrack_api.exception -import ftrack_api.event.base -import ftrack_api.symbol -import ftrack_api.inspection -from ftrack_api.logging import LazyLogMessage as L - - -class Location(ftrack_api.entity.base.Entity): - '''Represent storage for components.''' - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - self.accessor = ftrack_api.symbol.NOT_SET - self.structure = ftrack_api.symbol.NOT_SET - self.resource_identifier_transformer = ftrack_api.symbol.NOT_SET - self.priority = 95 - super(Location, self).__init__( - session, data=data, reconstructing=reconstructing - ) - - def __str__(self): - '''Return string representation of instance.''' - representation = super(Location, self).__str__() - - with self.session.auto_populating(False): - name = self['name'] - if name is not ftrack_api.symbol.NOT_SET: - representation = representation.replace( - '(', '("{0}", '.format(name) - ) - - return representation - - def add_component(self, component, source, recursive=True): - '''Add *component* to location. - - *component* should be a single component instance. - - *source* should be an instance of another location that acts as the - source. - - Raise :exc:`ftrack_api.ComponentInLocationError` if the *component* - already exists in this location. - - Raise :exc:`ftrack_api.LocationError` if managing data and the generated - target structure for the component already exists according to the - accessor. This helps prevent potential data loss by avoiding overwriting - existing data. Note that there is a race condition between the check and - the write so if another process creates data at the same target during - that period it will be overwritten. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the component registration. - - ''' - return self.add_components( - [component], sources=source, recursive=recursive - ) - - def add_components(self, components, sources, recursive=True, _depth=0): - '''Add *components* to location. - - *components* should be a list of component instances. - - *sources* may be either a single source or a list of sources. If a list - then each corresponding index in *sources* will be used for each - *component*. A source should be an instance of another location. - - Raise :exc:`ftrack_api.exception.ComponentInLocationError` if any - component in *components* already exists in this location. In this case, - no changes will be made and no data transferred. - - Raise :exc:`ftrack_api.exception.LocationError` if managing data and the - generated target structure for the component already exists according to - the accessor. This helps prevent potential data loss by avoiding - overwriting existing data. Note that there is a race condition between - the check and the write so if another process creates data at the same - target during that period it will be overwritten. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components registration. - - .. important:: - - If this location manages data then the *components* data is first - transferred to the target prescribed by the structure plugin, using - the configured accessor. If any component fails to transfer then - :exc:`ftrack_api.exception.LocationError` is raised and none of the - components are registered with the database. In this case it is left - up to the caller to decide and act on manually cleaning up any - transferred data using the 'transferred' detail in the raised error. - - Likewise, after transfer, all components are registered with the - database in a batch call. If any component causes an error then all - components will remain unregistered and - :exc:`ftrack_api.exception.LocationError` will be raised detailing - issues and any transferred data under the 'transferred' detail key. - - ''' - if ( - isinstance(sources, basestring) - or not isinstance(sources, collections.Sequence) - ): - sources = [sources] - - sources_count = len(sources) - if sources_count not in (1, len(components)): - raise ValueError( - 'sources must be either a single source or a sequence of ' - 'sources with indexes corresponding to passed components.' - ) - - if not self.structure: - raise ftrack_api.exception.LocationError( - 'No structure defined for location {location}.', - details=dict(location=self) - ) - - if not components: - # Optimisation: Return early when no components to process, such as - # when called recursively on an empty sequence component. - return - - indent = ' ' * (_depth + 1) - - # Check that components not already added to location. - existing_components = [] - try: - self.get_resource_identifiers(components) - - except ftrack_api.exception.ComponentNotInLocationError as error: - missing_component_ids = [ - missing_component['id'] - for missing_component in error.details['components'] - ] - for component in components: - if component['id'] not in missing_component_ids: - existing_components.append(component) - - else: - existing_components.extend(components) - - if existing_components: - # Some of the components already present in location. - raise ftrack_api.exception.ComponentInLocationError( - existing_components, self - ) - - # Attempt to transfer each component's data to this location. - transferred = [] - - for index, component in enumerate(components): - try: - # Determine appropriate source. - if sources_count == 1: - source = sources[0] - else: - source = sources[index] - - # Add members first for container components. - is_container = 'members' in list(component.keys()) - if is_container and recursive: - self.add_components( - component['members'], source, recursive=recursive, - _depth=(_depth + 1) - ) - - # Add component to this location. - context = self._get_context(component, source) - resource_identifier = self.structure.get_resource_identifier( - component, context - ) - - # Manage data transfer. - self._add_data(component, resource_identifier, source) - - except Exception as error: - raise ftrack_api.exception.LocationError( - 'Failed to transfer component {component} data to location ' - '{location} due to error:\n{indent}{error}\n{indent}' - 'Transferred component data that may require cleanup: ' - '{transferred}', - details=dict( - indent=indent, - component=component, - location=self, - error=error, - transferred=transferred - ) - ) - - else: - transferred.append((component, resource_identifier)) - - # Register all successfully transferred components. - components_to_register = [] - component_resource_identifiers = [] - - try: - for component, resource_identifier in transferred: - if self.resource_identifier_transformer: - # Optionally encode resource identifier before storing. - resource_identifier = ( - self.resource_identifier_transformer.encode( - resource_identifier, - context={'component': component} - ) - ) - - components_to_register.append(component) - component_resource_identifiers.append(resource_identifier) - - # Store component in location information. - self._register_components_in_location( - components, component_resource_identifiers - ) - - except Exception as error: - raise ftrack_api.exception.LocationError( - 'Failed to register components with location {location} due to ' - 'error:\n{indent}{error}\n{indent}Transferred component data ' - 'that may require cleanup: {transferred}', - details=dict( - indent=indent, - location=self, - error=error, - transferred=transferred - ) - ) - - # Publish events. - for component in components_to_register: - - component_id = list(ftrack_api.inspection.primary_key( - component - ).values())[0] - location_id = list(ftrack_api.inspection.primary_key(self).values())[0] - - self.session.event_hub.publish( - ftrack_api.event.base.Event( - topic=ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ), - ), - on_error='ignore' - ) - - def _get_context(self, component, source): - '''Return context for *component* and *source*.''' - context = {} - if source: - try: - source_resource_identifier = source.get_resource_identifier( - component - ) - except ftrack_api.exception.ComponentNotInLocationError: - pass - else: - context.update(dict( - source_resource_identifier=source_resource_identifier - )) - - return context - - def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. - - *resource_identifier* specifies the identifier to use with this - locations accessor. - - ''' - self.logger.debug(L( - 'Adding data for component {0!r} from source {1!r} to location ' - '{2!r} using resource identifier {3!r}.', - component, resource_identifier, source, self - )) - - # Read data from source and write to this location. - if not source.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for source location {location}.', - details=dict(location=source) - ) - - if not self.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for target location {location}.', - details=dict(location=self) - ) - - is_container = 'members' in list(component.keys()) - if is_container: - # TODO: Improve this check. Possibly introduce an inspection - # such as ftrack_api.inspection.is_sequence_component. - if component.entity_type != 'SequenceComponent': - self.accessor.make_container(resource_identifier) - - else: - # Try to make container of component. - try: - container = self.accessor.get_container( - resource_identifier - ) - - except ftrack_api.exception.AccessorParentResourceNotFoundError: - # Container could not be retrieved from - # resource_identifier. Assume that there is no need to - # make the container. - pass - - else: - # No need for existence check as make_container does not - # recreate existing containers. - self.accessor.make_container(container) - - if self.accessor.exists(resource_identifier): - # Note: There is a race condition here in that the - # data may be added externally between the check for - # existence and the actual write which would still - # result in potential data loss. However, there is no - # good cross platform, cross accessor solution for this - # at present. - raise ftrack_api.exception.LocationError( - 'Cannot add component as data already exists and ' - 'overwriting could result in data loss. Computed ' - 'target resource identifier was: {0}' - .format(resource_identifier) - ) - - # Read and write data. - source_data = source.accessor.open( - source.get_resource_identifier(component), 'rb' - ) - target_data = self.accessor.open(resource_identifier, 'wb') - - # Read/write data in chunks to avoid reading all into memory at the - # same time. - chunked_read = functools.partial( - source_data.read, ftrack_api.symbol.CHUNK_SIZE - ) - for chunk in iter(chunked_read, b''): - target_data.write(chunk) - - target_data.close() - source_data.close() - - def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location against *resource_identifier*.''' - return self._register_components_in_location( - [component], [resource_identifier] - ) - - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. - - Indices of *components* and *resource_identifiers* should align. - - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): - self.session.create( - 'ComponentLocation', data=dict( - component=component, - location=self, - resource_identifier=resource_identifier - ) - ) - - self.session.commit() - - def remove_component(self, component, recursive=True): - '''Remove *component* from location. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the component deregistration. - - ''' - return self.remove_components([component], recursive=recursive) - - def remove_components(self, components, recursive=True): - '''Remove *components* from location. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components deregistration. - - ''' - for component in components: - # Check component is in this location - self.get_resource_identifier(component) - - # Remove members first for container components. - is_container = 'members' in list(component.keys()) - if is_container and recursive: - self.remove_components( - component['members'], recursive=recursive - ) - - # Remove data. - self._remove_data(component) - - # Remove metadata. - self._deregister_component_in_location(component) - - # Emit event. - component_id = list(ftrack_api.inspection.primary_key( - component - ).values())[0] - location_id = list(ftrack_api.inspection.primary_key(self).values())[0] - self.session.event_hub.publish( - ftrack_api.event.base.Event( - topic=ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ) - ), - on_error='ignore' - ) - - def _remove_data(self, component): - '''Remove data associated with *component*.''' - if not self.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for location {location}.', - details=dict(location=self) - ) - - try: - self.accessor.remove( - self.get_resource_identifier(component) - ) - except ftrack_api.exception.AccessorResourceNotFoundError: - # If accessor does not support detecting sequence paths then an - # AccessorResourceNotFoundError is raised. For now, if the - # component type is 'SequenceComponent' assume success. - if not component.entity_type == 'SequenceComponent': - raise - - def _deregister_component_in_location(self, component): - '''Deregister *component* from location.''' - component_id = list(ftrack_api.inspection.primary_key(component).values())[0] - location_id = list(ftrack_api.inspection.primary_key(self).values())[0] - - # TODO: Use session.get for optimisation. - component_location = self.session.query( - 'ComponentLocation where component_id is {0} and location_id is ' - '{1}'.format(component_id, location_id) - )[0] - - self.session.delete(component_location) - - # TODO: Should auto-commit here be optional? - self.session.commit() - - def get_component_availability(self, component): - '''Return availability of *component* in this location as a float.''' - return self.session.get_component_availability( - component, locations=[self] - )[self['id']] - - def get_component_availabilities(self, components): - '''Return availabilities of *components* in this location. - - Return list of float values corresponding to each component. - - ''' - return [ - availability[self['id']] for availability in - self.session.get_component_availabilities( - components, locations=[self] - ) - ] - - def get_resource_identifier(self, component): - '''Return resource identifier for *component*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if the - component is not present in this location. - - ''' - return self.get_resource_identifiers([component])[0] - - def get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the components are not present in this location. - - ''' - resource_identifiers = self._get_resource_identifiers(components) - - # Optionally decode resource identifier. - if self.resource_identifier_transformer: - for index, resource_identifier in enumerate(resource_identifiers): - resource_identifiers[index] = ( - self.resource_identifier_transformer.decode( - resource_identifier - ) - ) - - return resource_identifiers - - def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the components are not present in this location. - - ''' - component_ids_mapping = collections.OrderedDict() - for component in components: - component_id = list(ftrack_api.inspection.primary_key( - component - ).values())[0] - component_ids_mapping[component_id] = component - - component_locations = self.session.query( - 'select component_id, resource_identifier from ComponentLocation ' - 'where location_id is {0} and component_id in ({1})' - .format( - list(ftrack_api.inspection.primary_key(self).values())[0], - ', '.join(list(component_ids_mapping.keys())) - ) - ) - - resource_identifiers_map = {} - for component_location in component_locations: - resource_identifiers_map[component_location['component_id']] = ( - component_location['resource_identifier'] - ) - - resource_identifiers = [] - missing = [] - for component_id, component in list(component_ids_mapping.items()): - if component_id not in resource_identifiers_map: - missing.append(component) - else: - resource_identifiers.append( - resource_identifiers_map[component_id] - ) - - if missing: - raise ftrack_api.exception.ComponentNotInLocationError( - missing, self - ) - - return resource_identifiers - - def get_filesystem_path(self, component): - '''Return filesystem path for *component*.''' - return self.get_filesystem_paths([component])[0] - - def get_filesystem_paths(self, components): - '''Return filesystem paths for *components*.''' - resource_identifiers = self.get_resource_identifiers(components) - - filesystem_paths = [] - for resource_identifier in resource_identifiers: - filesystem_paths.append( - self.accessor.get_filesystem_path(resource_identifier) - ) - - return filesystem_paths - - def get_url(self, component): - '''Return url for *component*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *component* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by the location's accessor. - ''' - resource_identifier = self.get_resource_identifier(component) - - return self.accessor.get_url(resource_identifier) - - -class MemoryLocationMixin(object): - '''Represent storage for components. - - Unlike a standard location, only store metadata for components in this - location in memory rather than persisting to the database. - - ''' - - @property - def _cache(self): - '''Return cache.''' - try: - cache = self.__cache - except AttributeError: - cache = self.__cache = {} - - return cache - - def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location with *resource_identifier*.''' - component_id = list(ftrack_api.inspection.primary_key(component).values())[0] - self._cache[component_id] = resource_identifier - - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. - - Indices of *components* and *resource_identifiers* should align. - - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): - self._register_component_in_location(component, resource_identifier) - - def _deregister_component_in_location(self, component): - '''Deregister *component* in location.''' - component_id = list(ftrack_api.inspection.primary_key(component).values())[0] - self._cache.pop(component_id) - - def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the referenced components are not present in this location. - - ''' - resource_identifiers = [] - missing = [] - for component in components: - component_id = list(ftrack_api.inspection.primary_key( - component - ).values())[0] - resource_identifier = self._cache.get(component_id) - if resource_identifier is None: - missing.append(component) - else: - resource_identifiers.append(resource_identifier) - - if missing: - raise ftrack_api.exception.ComponentNotInLocationError( - missing, self - ) - - return resource_identifiers - - -class UnmanagedLocationMixin(object): - '''Location that does not manage data.''' - - def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. - - *resource_identifier* specifies the identifier to use with this - locations accessor. - - Overridden to have no effect. - - ''' - return - - def _remove_data(self, component): - '''Remove data associated with *component*. - - Overridden to have no effect. - - ''' - return - - -class OriginLocationMixin(MemoryLocationMixin, UnmanagedLocationMixin): - '''Special origin location that expects sources as filepaths.''' - - def _get_context(self, component, source): - '''Return context for *component* and *source*.''' - context = {} - if source: - context.update(dict( - source_resource_identifier=source - )) - - return context - - -class ServerLocationMixin(object): - '''Location representing ftrack server. - - Adds convenience methods to location, specific to ftrack server. - ''' - def get_thumbnail_url(self, component, size=None): - '''Return thumbnail url for *component*. - - Optionally, specify *size* to constrain the downscaled image to size - x size pixels. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by the location's accessor. - ''' - resource_identifier = self.get_resource_identifier(component) - return self.accessor.get_thumbnail_url(resource_identifier, size) diff --git a/pype/vendor/ftrack_api/entity/note.py b/pype/vendor/ftrack_api/entity/note.py deleted file mode 100644 index 5d27b32f63..0000000000 --- a/pype/vendor/ftrack_api/entity/note.py +++ /dev/null @@ -1,69 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -from builtins import object -import ftrack_api.entity.base - - -class Note(ftrack_api.entity.base.Entity): - '''Represent a note.''' - - def create_reply( - self, content, author - ): - '''Create a reply with *content* and *author*. - - .. note:: - - This is a helper method. To create replies manually use the - standard :meth:`Session.create` method. - - ''' - reply = self.session.create( - 'Note', { - 'author': author, - 'content': content - } - ) - - self['replies'].append(reply) - - return reply - - -class CreateNoteMixin(object): - '''Mixin to add create_note method on entity class.''' - - def create_note(self, content, author, recipients=None, category=None): - '''Create note with *content*, *author*. - - Note category can be set by including *category* and *recipients* - can be specified as a list of user or group instances. - - ''' - if not recipients: - recipients = [] - - category_id = None - if category: - category_id = category['id'] - - data = { - 'content': content, - 'author': author, - 'category_id': category_id - } - - note = self.session.create('Note', data) - - self['notes'].append(note) - - for resource in recipients: - recipient = self.session.create('Recipient', { - 'note_id': note['id'], - 'resource_id': resource['id'] - }) - - note['recipients'].append(recipient) - - return note diff --git a/pype/vendor/ftrack_api/entity/project_schema.py b/pype/vendor/ftrack_api/entity/project_schema.py deleted file mode 100644 index ec6db7c019..0000000000 --- a/pype/vendor/ftrack_api/entity/project_schema.py +++ /dev/null @@ -1,94 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class ProjectSchema(ftrack_api.entity.base.Entity): - '''Class representing ProjectSchema.''' - - def get_statuses(self, schema, type_id=None): - '''Return statuses for *schema* and optional *type_id*. - - *type_id* is the id of the Type for a TypedContext and can be used to - get statuses where the workflow has been overridden. - - ''' - # Task has overrides and need to be handled separately. - if schema == 'Task': - if type_id is not None: - overrides = self['_overrides'] - for override in overrides: - if override['type_id'] == type_id: - return override['workflow_schema']['statuses'][:] - - return self['_task_workflow']['statuses'][:] - - elif schema == 'AssetVersion': - return self['_version_workflow']['statuses'][:] - - else: - try: - EntityTypeClass = self.session.types[schema] - except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) - - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) - - try: - object_type_id = object_type_id_attribute.default_value - except AttributeError: - raise ValueError( - 'Schema {0} does not have statuses.'.format(schema) - ) - - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: - result = self.session.query( - 'select task_status from SchemaStatus ' - 'where schema_id is {0}'.format(_schema['id']) - ) - return [ - schema_type['task_status'] for schema_type in result - ] - - raise ValueError( - 'No valid statuses were found for schema {0}.'.format(schema) - ) - - def get_types(self, schema): - '''Return types for *schema*.''' - # Task need to be handled separately. - if schema == 'Task': - return self['_task_type_schema']['types'][:] - - else: - try: - EntityTypeClass = self.session.types[schema] - except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) - - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) - - try: - object_type_id = object_type_id_attribute.default_value - except AttributeError: - raise ValueError( - 'Schema {0} does not have types.'.format(schema) - ) - - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: - result = self.session.query( - 'select task_type from SchemaType ' - 'where schema_id is {0}'.format(_schema['id']) - ) - return [schema_type['task_type'] for schema_type in result] - - raise ValueError( - 'No valid types were found for schema {0}.'.format(schema) - ) diff --git a/pype/vendor/ftrack_api/entity/user.py b/pype/vendor/ftrack_api/entity/user.py deleted file mode 100644 index 49318f8650..0000000000 --- a/pype/vendor/ftrack_api/entity/user.py +++ /dev/null @@ -1,124 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -from builtins import str -import arrow - -import ftrack_api.entity.base -import ftrack_api.exception - - -class User(ftrack_api.entity.base.Entity): - '''Represent a user.''' - - def start_timer(self, context=None, comment='', name=None, force=False): - '''Start a timer for *context* and return it. - - *force* can be used to automatically stop an existing timer and create a - timelog for it. If you need to get access to the created timelog, use - :func:`stop_timer` instead. - - *comment* and *name* are optional but will be set on the timer. - - .. note:: - - This method will automatically commit the changes and if *force* is - False then it will fail with a - :class:`ftrack_api.exception.NotUniqueError` exception if a - timer is already running. - - ''' - if force: - try: - self.stop_timer() - except ftrack_api.exception.NoResultFoundError: - self.logger.debug('Failed to stop existing timer.') - - timer = self.session.create('Timer', { - 'user': self, - 'context': context, - 'name': name, - 'comment': comment - }) - - # Commit the new timer and try to catch any error that indicate another - # timelog already exists and inform the user about it. - try: - self.session.commit() - except ftrack_api.exception.ServerError as error: - if 'IntegrityError' in str(error): - raise ftrack_api.exception.NotUniqueError( - ('Failed to start a timelog for user with id: {0}, it is ' - 'likely that a timer is already running. Either use ' - 'force=True or stop the timer first.').format(self['id']) - ) - else: - # Reraise the error as it might be something unrelated. - raise - - return timer - - def stop_timer(self): - '''Stop the current timer and return a timelog created from it. - - If a timer is not running, a - :exc:`ftrack_api.exception.NoResultFoundError` exception will be - raised. - - .. note:: - - This method will automatically commit the changes. - - ''' - timer = self.session.query( - 'Timer where user_id = "{0}"'.format(self['id']) - ).one() - - # If the server is running in the same timezone as the local - # timezone, we remove the TZ offset to get the correct duration. - is_timezone_support_enabled = self.session.server_information.get( - 'is_timezone_support_enabled', None - ) - if is_timezone_support_enabled is None: - self.logger.warning( - 'Could not identify if server has timezone support enabled. ' - 'Will assume server is running in UTC.' - ) - is_timezone_support_enabled = True - - if is_timezone_support_enabled: - now = arrow.now() - else: - now = arrow.now().replace(tzinfo='utc') - - delta = now - timer['start'] - duration = delta.days * 24 * 60 * 60 + delta.seconds - - timelog = self.session.create('Timelog', { - 'user_id': timer['user_id'], - 'context_id': timer['context_id'], - 'comment': timer['comment'], - 'start': timer['start'], - 'duration': duration, - 'name': timer['name'] - }) - - self.session.delete(timer) - self.session.commit() - - return timelog - - def send_invite(self): - '''Send a invation email to the user''' - - self.session.send_user_invite( - self - ) - def reset_api_key(self): - '''Reset the users api key.''' - - response = self.session.reset_remote( - 'api_key', entity=self - ) - - return response['api_key'] diff --git a/pype/vendor/ftrack_api/event/__init__.py b/pype/vendor/ftrack_api/event/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/pype/vendor/ftrack_api/event/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/pype/vendor/ftrack_api/event/base.py b/pype/vendor/ftrack_api/event/base.py deleted file mode 100644 index f6f0a916c7..0000000000 --- a/pype/vendor/ftrack_api/event/base.py +++ /dev/null @@ -1,86 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from builtins import str -import uuid -import collections - - -class Event(collections.MutableMapping): - '''Represent a single event.''' - - def __init__(self, topic, id=None, data=None, sent=None, - source=None, target='', in_reply_to_event=None): - '''Initialise event. - - *topic* is the required topic for the event. It can use a dotted - notation to demarcate groupings. For example, 'ftrack.update'. - - *id* is the unique id for this event instance. It is primarily used when - replying to an event. If not supplied a default uuid based value will - be used. - - *data* refers to event specific data. It should be a mapping structure - and defaults to an empty dictionary if not supplied. - - *sent* is the timestamp the event is sent. It will be set automatically - as send time unless specified here. - - *source* is information about where the event originated. It should be - a mapping and include at least a unique id value under an 'id' key. If - not specified, senders usually populate the value automatically at - publish time. - - *target* can be an expression that targets this event. For example, - a reply event would target the event to the sender of the source event. - The expression will be tested against subscriber information only. - - *in_reply_to_event* is used when replying to an event and should contain - the unique id of the event being replied to. - - ''' - super(Event, self).__init__() - self._data = dict( - id=id or uuid.uuid4().hex, - data=data or {}, - topic=topic, - sent=sent, - source=source or {}, - target=target, - in_reply_to_event=in_reply_to_event - ) - self._stopped = False - - def stop(self): - '''Stop further processing of this event.''' - self._stopped = True - - def is_stopped(self): - '''Return whether event has been stopped.''' - return self._stopped - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) - - def __getitem__(self, key): - '''Return value for *key*.''' - return self._data[key] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - self._data[key] = value - - def __delitem__(self, key): - '''Remove *key*.''' - del self._data[key] - - def __iter__(self): - '''Iterate over all keys.''' - return iter(self._data) - - def __len__(self): - '''Return count of keys.''' - return len(self._data) diff --git a/pype/vendor/ftrack_api/event/expression.py b/pype/vendor/ftrack_api/event/expression.py deleted file mode 100644 index 569854d35f..0000000000 --- a/pype/vendor/ftrack_api/event/expression.py +++ /dev/null @@ -1,285 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from builtins import map -from past.builtins import basestring -from builtins import object -from operator import eq, ne, ge, le, gt, lt - -from pyparsing import (Group, Word, CaselessKeyword, Forward, - FollowedBy, Suppress, oneOf, OneOrMore, Optional, - alphanums, quotedString, removeQuotes) - -import ftrack_api.exception - -# Do not enable packrat since it is not thread-safe and will result in parsing -# exceptions in a multi threaded environment. -# ParserElement.enablePackrat() - - -class Parser(object): - '''Parse string based expression into :class:`Expression` instance.''' - - def __init__(self): - '''Initialise parser.''' - self._operators = { - '=': eq, - '!=': ne, - '>=': ge, - '<=': le, - '>': gt, - '<': lt - } - self._parser = self._construct_parser() - super(Parser, self).__init__() - - def _construct_parser(self): - '''Construct and return parser.''' - field = Word(alphanums + '_.') - operator = oneOf(list(self._operators.keys())) - value = Word(alphanums + '-_,./*@+') - quoted_value = quotedString('quoted_value').setParseAction(removeQuotes) - - condition = Group( - field + operator + (quoted_value | value) - )('condition') - - not_ = Optional(Suppress(CaselessKeyword('not')))('not') - and_ = Suppress(CaselessKeyword('and'))('and') - or_ = Suppress(CaselessKeyword('or'))('or') - - expression = Forward() - parenthesis = Suppress('(') + expression + Suppress(')') - previous = condition | parenthesis - - for conjunction in (not_, and_, or_): - current = Forward() - - if conjunction in (and_, or_): - conjunction_expression = ( - FollowedBy(previous + conjunction + previous) - + Group( - previous + OneOrMore(conjunction + previous) - )(conjunction.resultsName) - ) - - elif conjunction in (not_, ): - conjunction_expression = ( - FollowedBy(conjunction.expr + current) - + Group(conjunction + current)(conjunction.resultsName) - ) - - else: # pragma: no cover - raise ValueError('Unrecognised conjunction.') - - current <<= (conjunction_expression | previous) - previous = current - - expression <<= previous - return expression('expression') - - def parse(self, expression): - '''Parse string *expression* into :class:`Expression`. - - Raise :exc:`ftrack_api.exception.ParseError` if *expression* could - not be parsed. - - ''' - result = None - expression = expression.strip() - if expression: - try: - result = self._parser.parseString( - expression, parseAll=True - ) - except Exception as error: - raise ftrack_api.exception.ParseError( - 'Failed to parse: {0}. {1}'.format(expression, error) - ) - - return self._process(result) - - def _process(self, result): - '''Process *result* using appropriate method. - - Method called is determined by the name of the result. - - ''' - method_name = '_process_{0}'.format(result.getName()) - method = getattr(self, method_name) - return method(result) - - def _process_expression(self, result): - '''Process *result* as expression.''' - return self._process(result[0]) - - def _process_not(self, result): - '''Process *result* as NOT operation.''' - return Not(self._process(result[0])) - - def _process_and(self, result): - '''Process *result* as AND operation.''' - return All([self._process(entry) for entry in result]) - - def _process_or(self, result): - '''Process *result* as OR operation.''' - return Any([self._process(entry) for entry in result]) - - def _process_condition(self, result): - '''Process *result* as condition.''' - key, operator, value = result - return Condition(key, self._operators[operator], value) - - def _process_quoted_value(self, result): - '''Process *result* as quoted value.''' - return result - - -class Expression(object): - '''Represent a structured expression to test candidates against.''' - - def __str__(self): - '''Return string representation.''' - return '<{0}>'.format(self.__class__.__name__) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return True - - -class All(Expression): - '''Match candidate that matches all of the specified expressions. - - .. note:: - - If no expressions are supplied then will always match. - - ''' - - def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' - self._expressions = expressions or [] - super(All, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return all([ - expression.match(candidate) for expression in self._expressions - ]) - - -class Any(Expression): - '''Match candidate that matches any of the specified expressions. - - .. note:: - - If no expressions are supplied then will never match. - - ''' - - def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' - self._expressions = expressions or [] - super(Any, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return any([ - expression.match(candidate) for expression in self._expressions - ]) - - -class Not(Expression): - '''Negate expression.''' - - def __init__(self, expression): - '''Initialise with *expression* to negate.''' - self._expression = expression - super(Not, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, - self._expression - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return not self._expression.match(candidate) - - -class Condition(Expression): - '''Represent condition.''' - - def __init__(self, key, operator, value): - '''Initialise condition. - - *key* is the key to check on the data when matching. It can be a nested - key represented by dots. For example, 'data.eventType' would attempt to - match candidate['data']['eventType']. If the candidate is missing any - of the requested keys then the match fails immediately. - - *operator* is the operator function to use to perform the match between - the retrieved candidate value and the conditional *value*. - - If *value* is a string, it can use a wildcard '*' at the end to denote - that any values matching the substring portion are valid when matching - equality only. - - ''' - self._key = key - self._operator = operator - self._value = value - self._wildcard = '*' - self._operatorMapping = { - eq: '=', - ne: '!=', - ge: '>=', - le: '<=', - gt: '>', - lt: '<' - } - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}{2}{3}>'.format( - self.__class__.__name__, - self._key, - self._operatorMapping.get(self._operator, self._operator), - self._value - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - key_parts = self._key.split('.') - - try: - value = candidate - for keyPart in key_parts: - value = value[keyPart] - except (KeyError, TypeError): - return False - - if ( - self._operator is eq - and isinstance(self._value, basestring) - and self._value[-1] == self._wildcard - ): - return self._value[:-1] in value - else: - return self._operator(value, self._value) diff --git a/pype/vendor/ftrack_api/event/hub.py b/pype/vendor/ftrack_api/event/hub.py deleted file mode 100644 index d5bbb98012..0000000000 --- a/pype/vendor/ftrack_api/event/hub.py +++ /dev/null @@ -1,1085 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -from __future__ import absolute_import - -from future import standard_library -standard_library.install_aliases() -from builtins import str -from builtins import range -from builtins import object -import collections -import urllib.parse -import threading -import queue as queue -import logging -import time -import uuid -import operator -import functools -import json -import socket -import warnings - -import requests -import requests.exceptions -import websocket - -import ftrack_api.exception -import ftrack_api.event.base -import ftrack_api.event.subscriber -import ftrack_api.event.expression -from ftrack_api.logging import LazyLogMessage as L - - -SocketIoSession = collections.namedtuple('SocketIoSession', [ - 'id', - 'heartbeatTimeout', - 'supportedTransports', -]) - - -ServerDetails = collections.namedtuple('ServerDetails', [ - 'scheme', - 'hostname', - 'port', -]) - - - - -class EventHub(object): - '''Manage routing of events.''' - - _future_signature_warning = ( - 'When constructing your Session object you did not explicitly define ' - 'auto_connect_event_hub as True even though you appear to be publishing ' - 'and / or subscribing to asynchronous events. In version version 2.0 of ' - 'the ftrack-python-api the default behavior will change from True ' - 'to False. Please make sure to update your tools. You can read more at ' - 'http://ftrack-python-api.rtd.ftrack.com/en/stable/release/migration.html' - ) - - def __init__(self, server_url, api_user, api_key): - '''Initialise hub, connecting to ftrack *server_url*. - - *api_user* is the user to authenticate as and *api_key* is the API key - to authenticate with. - - ''' - super(EventHub, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.id = uuid.uuid4().hex - self._connection = None - - self._unique_packet_id = 0 - self._packet_callbacks = {} - self._lock = threading.RLock() - - self._wait_timeout = 4 - - self._subscribers = [] - self._reply_callbacks = {} - self._intentional_disconnect = False - - self._event_queue = queue.Queue() - self._event_namespace = 'ftrack.event' - self._expression_parser = ftrack_api.event.expression.Parser() - - # Default values for auto reconnection timeout on unintentional - # disconnection. Equates to 5 minutes. - self._auto_reconnect_attempts = 30 - self._auto_reconnect_delay = 10 - - self._deprecation_warning_auto_connect = False - - # Mapping of Socket.IO codes to meaning. - self._code_name_mapping = { - '0': 'disconnect', - '1': 'connect', - '2': 'heartbeat', - '3': 'message', - '4': 'json', - '5': 'event', - '6': 'acknowledge', - '7': 'error' - } - self._code_name_mapping.update( - dict((name, code) for code, name in list(self._code_name_mapping.items())) - ) - - self._server_url = server_url - self._api_user = api_user - self._api_key = api_key - - # Parse server URL and store server details. - url_parse_result = urllib.parse.urlparse(self._server_url) - if not url_parse_result.scheme: - raise ValueError('Could not determine scheme from server url.') - - if not url_parse_result.hostname: - raise ValueError('Could not determine hostname from server url.') - - self.server = ServerDetails( - url_parse_result.scheme, - url_parse_result.hostname, - url_parse_result.port - ) - - def get_server_url(self): - '''Return URL to server.''' - return '{0}://{1}'.format( - self.server.scheme, self.get_network_location() - ) - - def get_network_location(self): - '''Return network location part of url (hostname with optional port).''' - if self.server.port: - return '{0}:{1}'.format(self.server.hostname, self.server.port) - else: - return self.server.hostname - - @property - def secure(self): - '''Return whether secure connection used.''' - return self.server.scheme == 'https' - - def connect(self): - '''Initialise connection to server. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if already - connected or connection fails. - - ''' - - self._deprecation_warning_auto_connect = False - - if self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Already connected.' - ) - - # Reset flag tracking whether disconnection was intentional. - self._intentional_disconnect = False - - try: - # Connect to socket.io server using websocket transport. - session = self._get_socket_io_session() - - if 'websocket' not in session.supportedTransports: - raise ValueError( - 'Server does not support websocket sessions.' - ) - - scheme = 'wss' if self.secure else 'ws' - url = '{0}://{1}/socket.io/1/websocket/{2}'.format( - scheme, self.get_network_location(), session.id - ) - - # timeout is set to 60 seconds to avoid the issue where the socket - # ends up in a bad state where it is reported as connected but the - # connection has been closed. The issue happens often when connected - # to a secure socket and the computer goes to sleep. - # More information on how the timeout works can be found here: - # https://docs.python.org/2/library/socket.html#socket.socket.setblocking - self._connection = websocket.create_connection(url, timeout=60) - - except Exception: - self.logger.debug( - L( - 'Error connecting to event server at {0}.', - self.get_server_url() - ), - exc_info=1 - ) - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to connect to event server at {0}.' - .format(self.get_server_url()) - ) - - # Start background processing thread. - self._processor_thread = _ProcessorThread(self) - self._processor_thread.start() - - # Subscribe to reply events if not already. Note: Only adding the - # subscriber locally as the following block will notify server of all - # existing subscribers, which would cause the server to report a - # duplicate subscriber error if EventHub.subscribe was called here. - try: - self._add_subscriber( - 'topic=ftrack.meta.reply', - self._handle_reply, - subscriber=dict( - id=self.id - ) - ) - except ftrack_api.exception.NotUniqueError: - pass - - # Now resubscribe any existing stored subscribers. This can happen when - # reconnecting automatically for example. - for subscriber in self._subscribers[:]: - self._notify_server_about_subscriber(subscriber) - - @property - def connected(self): - '''Return if connected.''' - return self._connection is not None and self._connection.connected - - def disconnect(self, unsubscribe=True): - '''Disconnect from server. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not - currently connected. - - If *unsubscribe* is True then unsubscribe all current subscribers - automatically before disconnecting. - - ''' - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Not currently connected.' - ) - - else: - # Set flag to indicate disconnection was intentional. - self._intentional_disconnect = True - - # Set blocking to true on socket to make sure unsubscribe events - # are emitted before closing the connection. - self._connection.sock.setblocking(1) - - # Unsubscribe all subscribers. - if unsubscribe: - for subscriber in self._subscribers[:]: - self.unsubscribe(subscriber.metadata['id']) - - # Now disconnect. - self._connection.close() - self._connection = None - - # Shutdown background processing thread. - self._processor_thread.cancel() - - # Join to it if it is not current thread to help ensure a clean - # shutdown. - if threading.current_thread() != self._processor_thread: - self._processor_thread.join(self._wait_timeout) - - def reconnect(self, attempts=10, delay=5): - '''Reconnect to server. - - Make *attempts* number of attempts with *delay* in seconds between each - attempt. - - .. note:: - - All current subscribers will be automatically resubscribed after - successful reconnection. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if fail to - reconnect. - - ''' - try: - self.disconnect(unsubscribe=False) - except ftrack_api.exception.EventHubConnectionError: - pass - - for attempt in range(attempts): - self.logger.debug(L( - 'Reconnect attempt {0} of {1}', attempt, attempts - )) - - # Silence logging temporarily to avoid lots of failed connection - # related information. - try: - logging.disable(logging.CRITICAL) - - try: - self.connect() - except ftrack_api.exception.EventHubConnectionError: - time.sleep(delay) - else: - break - - finally: - logging.disable(logging.NOTSET) - - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to reconnect to event server at {0} after {1} attempts.' - .format(self.get_server_url(), attempts) - ) - - def wait(self, duration=None): - '''Wait for events and handle as they arrive. - - If *duration* is specified, then only process events until duration is - reached. *duration* is in seconds though float values can be used for - smaller values. - - ''' - started = time.time() - - while True: - try: - event = self._event_queue.get(timeout=0.1) - except queue.Empty: - pass - else: - self._handle(event) - - # Additional special processing of events. - if event['topic'] == 'ftrack.meta.disconnected': - break - - if duration is not None: - if (time.time() - started) > duration: - break - - def get_subscriber_by_identifier(self, identifier): - '''Return subscriber with matching *identifier*. - - Return None if no subscriber with *identifier* found. - - ''' - for subscriber in self._subscribers[:]: - if subscriber.metadata.get('id') == identifier: - return subscriber - - return None - - def subscribe(self, subscription, callback, subscriber=None, priority=100): - '''Register *callback* for *subscription*. - - A *subscription* is a string that can specify in detail which events the - callback should receive. The filtering is applied against each event - object. Nested references are supported using '.' separators. - For example, 'topic=foo and data.eventType=Shot' would match the - following event:: - - - - The *callback* should accept an instance of - :class:`ftrack_api.event.base.Event` as its sole argument. - - Callbacks are called in order of *priority*. The lower the priority - number the sooner it will be called, with 0 being the first. The - default priority is 100. Note that priority only applies against other - callbacks registered with this hub and not as a global priority. - - An earlier callback can prevent processing of subsequent callbacks by - calling :meth:`Event.stop` on the passed `event` before - returning. - - .. warning:: - - Handlers block processing of other received events. For long - running callbacks it is advisable to delegate the main work to - another process or thread. - - A *callback* can be attached to *subscriber* information that details - the subscriber context. A subscriber context will be generated - automatically if not supplied. - - .. note:: - - The subscription will be stored locally, but until the server - receives notification of the subscription it is possible the - callback will not be called. - - Return subscriber identifier. - - Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with - the same identifier already exists. - - ''' - # Add subscriber locally. - subscriber = self._add_subscriber( - subscription, callback, subscriber, priority - ) - - # Notify server now if possible. - try: - self._notify_server_about_subscriber(subscriber) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server about new subscriber {0} ' - 'as server not currently reachable.', subscriber.metadata['id'] - )) - - return subscriber.metadata['id'] - - def _add_subscriber( - self, subscription, callback, subscriber=None, priority=100 - ): - '''Add subscriber locally. - - See :meth:`subscribe` for argument descriptions. - - Return :class:`ftrack_api.event.subscriber.Subscriber` instance. - - Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with - the same identifier already exists. - - ''' - if subscriber is None: - subscriber = {} - - subscriber.setdefault('id', uuid.uuid4().hex) - - # Check subscriber not already subscribed. - existing_subscriber = self.get_subscriber_by_identifier( - subscriber['id'] - ) - - if existing_subscriber is not None: - raise ftrack_api.exception.NotUniqueError( - 'Subscriber with identifier {0} already exists.' - .format(subscriber['id']) - ) - - subscriber = ftrack_api.event.subscriber.Subscriber( - subscription=subscription, - callback=callback, - metadata=subscriber, - priority=priority - ) - - self._subscribers.append(subscriber) - - return subscriber - - def _notify_server_about_subscriber(self, subscriber): - '''Notify server of new *subscriber*.''' - subscribe_event = ftrack_api.event.base.Event( - topic='ftrack.meta.subscribe', - data=dict( - subscriber=subscriber.metadata, - subscription=str(subscriber.subscription) - ) - ) - - self._publish( - subscribe_event, - callback=functools.partial(self._on_subscribed, subscriber) - ) - - def _on_subscribed(self, subscriber, response): - '''Handle acknowledgement of subscription.''' - if response.get('success') is False: - self.logger.warning(L( - 'Server failed to subscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) - - def unsubscribe(self, subscriber_identifier): - '''Unsubscribe subscriber with *subscriber_identifier*. - - .. note:: - - If the server is not reachable then it won't be notified of the - unsubscription. However, the subscriber will be removed locally - regardless. - - ''' - subscriber = self.get_subscriber_by_identifier(subscriber_identifier) - - if subscriber is None: - raise ftrack_api.exception.NotFoundError( - 'Cannot unsubscribe missing subscriber with identifier {0}' - .format(subscriber_identifier) - ) - - self._subscribers.pop(self._subscribers.index(subscriber)) - - # Notify the server if possible. - unsubscribe_event = ftrack_api.event.base.Event( - topic='ftrack.meta.unsubscribe', - data=dict(subscriber=subscriber.metadata) - ) - - try: - self._publish( - unsubscribe_event, - callback=functools.partial(self._on_unsubscribed, subscriber) - ) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server to unsubscribe subscriber {0} as ' - 'server not currently reachable.', subscriber.metadata['id'] - )) - - def _on_unsubscribed(self, subscriber, response): - '''Handle acknowledgement of unsubscribing *subscriber*.''' - if response.get('success') is not True: - self.logger.warning(L( - 'Server failed to unsubscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) - - def _prepare_event(self, event): - '''Prepare *event* for sending.''' - event['source'].setdefault('id', self.id) - event['source'].setdefault('user', { - 'username': self._api_user - }) - - def _prepare_reply_event(self, event, source_event, source=None): - '''Prepare *event* as a reply to another *source_event*. - - Modify *event*, setting appropriate values to target event correctly as - a reply. - - ''' - event['target'] = 'id={0}'.format(source_event['source']['id']) - event['in_reply_to_event'] = source_event['id'] - if source is not None: - event['source'] = source - - def publish( - self, event, synchronous=False, on_reply=None, on_error='raise' - ): - '''Publish *event*. - - If *synchronous* is specified as True then this method will wait and - return a list of results from any called callbacks. - - .. note:: - - Currently, if synchronous is True then only locally registered - callbacks will be called and no event will be sent to the server. - This may change in future. - - *on_reply* is an optional callable to call with any reply event that is - received in response to the published *event*. - - .. note:: - - Will not be called when *synchronous* is True. - - If *on_error* is set to 'ignore' then errors raised during publish of - event will be caught by this method and ignored. - - ''' - if self._deprecation_warning_auto_connect and not synchronous: - warnings.warn( - self._future_signature_warning, FutureWarning - ) - - try: - return self._publish( - event, synchronous=synchronous, on_reply=on_reply - ) - except Exception: - if on_error == 'ignore': - pass - else: - raise - - def publish_reply(self, source_event, data, source=None): - '''Publish a reply event to *source_event* with supplied *data*. - - If *source* is specified it will be used for the source value of the - sent event. - - ''' - reply_event = ftrack_api.event.base.Event( - 'ftrack.meta.reply', - data=data - ) - self._prepare_reply_event(reply_event, source_event, source=source) - self.publish(reply_event) - - def _publish(self, event, synchronous=False, callback=None, on_reply=None): - '''Publish *event*. - - If *synchronous* is specified as True then this method will wait and - return a list of results from any called callbacks. - - .. note:: - - Currently, if synchronous is True then only locally registered - callbacks will be called and no event will be sent to the server. - This may change in future. - - A *callback* can also be specified. This callback will be called once - the server acknowledges receipt of the sent event. A default callback - that checks for errors from the server will be used if not specified. - - *on_reply* is an optional callable to call with any reply event that is - received in response to the published *event*. Note that there is no - guarantee that a reply will be sent. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not - currently connected. - - ''' - # Prepare event adding any relevant additional information. - self._prepare_event(event) - - if synchronous: - # Bypass emitting event to server and instead call locally - # registered handlers directly, collecting and returning results. - return self._handle(event, synchronous=synchronous) - - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Cannot publish event asynchronously as not connected to ' - 'server.' - ) - - # Use standard callback if none specified. - if callback is None: - callback = functools.partial(self._on_published, event) - - # Emit event to central server for asynchronous processing. - try: - # Register on reply callback if specified. - if on_reply is not None: - # TODO: Add cleanup process that runs after a set duration to - # garbage collect old reply callbacks and prevent dictionary - # growing too large. - self._reply_callbacks[event['id']] = on_reply - - try: - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) - except ftrack_api.exception.EventHubConnectionError: - # Connection may have dropped temporarily. Wait a few moments to - # see if background thread reconnects automatically. - time.sleep(15) - - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) - except: - raise - - except Exception: - # Failure to send event should not cause caller to fail. - # TODO: This behaviour is inconsistent with the failing earlier on - # lack of connection and also with the error handling parameter of - # EventHub.publish. Consider refactoring. - self.logger.exception(L('Error sending event {0}.', event)) - - def _on_published(self, event, response): - '''Handle acknowledgement of published event.''' - if response.get('success', False) is False: - self.logger.error(L( - 'Server responded with error while publishing event {0}. ' - 'Error was: {1}', event, response.get('message') - )) - - def _handle(self, event, synchronous=False): - '''Handle *event*. - - If *synchronous* is True, do not send any automatic reply events. - - ''' - # Sort by priority, lower is higher. - # TODO: Use a sorted list to avoid sorting each time in order to improve - # performance. - subscribers = sorted( - self._subscribers, key=operator.attrgetter('priority') - ) - - results = [] - - target = event.get('target', None) - target_expression = None - if target: - try: - target_expression = self._expression_parser.parse(target) - except Exception: - self.logger.exception(L( - 'Cannot handle event as failed to parse event target ' - 'information: {0}', event - )) - return - - for subscriber in subscribers: - # Check if event is targeted to the subscriber. - if ( - target_expression is not None - and not target_expression.match(subscriber.metadata) - ): - continue - - # Check if subscriber interested in the event. - if not subscriber.interested_in(event): - continue - - response = None - - try: - response = subscriber.callback(event) - results.append(response) - except Exception: - self.logger.exception(L( - 'Error calling subscriber {0} for event {1}.', - subscriber, event - )) - - # Automatically publish a non None response as a reply when not in - # synchronous mode. - if not synchronous: - if self._deprecation_warning_auto_connect: - warnings.warn( - self._future_signature_warning, FutureWarning - ) - - if response is not None: - try: - self.publish_reply( - event, data=response, source=subscriber.metadata - ) - - except Exception: - self.logger.exception(L( - 'Error publishing response {0} from subscriber {1} ' - 'for event {2}.', response, subscriber, event - )) - - # Check whether to continue processing topic event. - if event.is_stopped(): - self.logger.debug(L( - 'Subscriber {0} stopped event {1}. Will not process ' - 'subsequent subscriber callbacks for this event.', - subscriber, event - )) - break - - return results - - def _handle_reply(self, event): - '''Handle reply *event*, passing it to any registered callback.''' - callback = self._reply_callbacks.get(event['in_reply_to_event'], None) - if callback is not None: - callback(event) - - def subscription(self, subscription, callback, subscriber=None, - priority=100): - '''Return context manager with *callback* subscribed to *subscription*. - - The subscribed callback will be automatically unsubscribed on exit - of the context manager. - - ''' - return _SubscriptionContext( - self, subscription, callback, subscriber=subscriber, - priority=priority, - ) - - # Socket.IO interface. - # - - def _get_socket_io_session(self): - '''Connect to server and retrieve session information.''' - socket_io_url = ( - '{0}://{1}/socket.io/1/?api_user={2}&api_key={3}' - ).format( - self.server.scheme, - self.get_network_location(), - self._api_user, - self._api_key - ) - try: - response = requests.get( - socket_io_url, - timeout=60 # 60 seconds timeout to recieve errors faster. - ) - except requests.exceptions.Timeout as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Timed out connecting to server: {0}.'.format(error) - ) - except requests.exceptions.SSLError as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to negotiate SSL with server: {0}.'.format(error) - ) - except requests.exceptions.ConnectionError as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to connect to server: {0}.'.format(error) - ) - else: - status = response.status_code - if status != 200: - raise ftrack_api.exception.EventHubConnectionError( - 'Received unexpected status code {0}.'.format(status) - ) - - # Parse result and return session information. - parts = response.text.split(':') - return SocketIoSession( - parts[0], - parts[1], - parts[3].split(',') - ) - - def _add_packet_callback(self, callback): - '''Store callback against a new unique packet ID. - - Return the unique packet ID. - - ''' - with self._lock: - self._unique_packet_id += 1 - unique_identifier = self._unique_packet_id - - self._packet_callbacks[unique_identifier] = callback - - return '{0}+'.format(unique_identifier) - - def _pop_packet_callback(self, packet_identifier): - '''Pop and return callback for *packet_identifier*.''' - return self._packet_callbacks.pop(packet_identifier) - - def _emit_event_packet(self, namespace, event, callback): - '''Send *event* packet under *namespace*.''' - data = self._encode( - dict(name=namespace, args=[event]) - ) - self._send_packet( - self._code_name_mapping['event'], data=data, callback=callback - ) - - def _acknowledge_packet(self, packet_identifier, *args): - '''Send acknowledgement of packet with *packet_identifier*.''' - packet_identifier = packet_identifier.rstrip('+') - data = str(packet_identifier) - if args: - data += '+{1}'.format(self._encode(args)) - - self._send_packet(self._code_name_mapping['acknowledge'], data=data) - - def _send_packet(self, code, data='', callback=None): - '''Send packet via connection.''' - path = '' - packet_identifier = ( - self._add_packet_callback(callback) if callback else '' - ) - packet_parts = (str(code), packet_identifier, path, data) - packet = ':'.join(packet_parts) - - try: - self._connection.send(packet) - self.logger.debug(L(u'Sent packet: {0}', packet)) - except socket.error as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to send packet: {0}'.format(error) - ) - - def _receive_packet(self): - '''Receive and return packet via connection.''' - try: - packet = self._connection.recv() - except Exception as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Error receiving packet: {0}'.format(error) - ) - - try: - parts = packet.split(':', 3) - except AttributeError: - raise ftrack_api.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) - ) - - code, packet_identifier, path, data = None, None, None, None - - count = len(parts) - if count == 4: - code, packet_identifier, path, data = parts - elif count == 3: - code, packet_identifier, path = parts - elif count == 1: - code = parts[0] - else: - raise ftrack_api.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) - ) - - self.logger.debug(L('Received packet: {0}', packet)) - return code, packet_identifier, path, data - - def _handle_packet(self, code, packet_identifier, path, data): - '''Handle packet received from server.''' - code_name = self._code_name_mapping[code] - - if code_name == 'connect': - self.logger.debug('Connected to event server.') - event = ftrack_api.event.base.Event('ftrack.meta.connected') - self._event_queue.put(event) - - elif code_name == 'disconnect': - self.logger.debug('Disconnected from event server.') - if not self._intentional_disconnect: - self.logger.debug( - 'Disconnected unexpectedly. Attempting to reconnect.' - ) - try: - self.reconnect( - attempts=self._auto_reconnect_attempts, - delay=self._auto_reconnect_delay - ) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug('Failed to reconnect automatically.') - else: - self.logger.debug('Reconnected successfully.') - - if not self.connected: - event = ftrack_api.event.base.Event('ftrack.meta.disconnected') - self._event_queue.put(event) - - elif code_name == 'heartbeat': - # Reply with heartbeat. - self._send_packet(self._code_name_mapping['heartbeat']) - - elif code_name == 'message': - self.logger.debug(L('Message received: {0}', data)) - - elif code_name == 'event': - payload = self._decode(data) - args = payload.get('args', []) - - if len(args) == 1: - event_payload = args[0] - if isinstance(event_payload, collections.Mapping): - try: - event = ftrack_api.event.base.Event(**event_payload) - except Exception: - self.logger.exception(L( - 'Failed to convert payload into event: {0}', - event_payload - )) - return - - self._event_queue.put(event) - - elif code_name == 'acknowledge': - parts = data.split('+', 1) - acknowledged_packet_identifier = int(parts[0]) - args = [] - if len(parts) == 2: - args = self._decode(parts[1]) - - try: - callback = self._pop_packet_callback( - acknowledged_packet_identifier - ) - except KeyError: - pass - else: - callback(*args) - - elif code_name == 'error': - self.logger.error(L('Event server reported error: {0}.', data)) - - else: - self.logger.debug(L('{0}: {1}', code_name, data)) - - def _encode(self, data): - '''Return *data* encoded as JSON formatted string.''' - return json.dumps( - data, - default=self._encode_object_hook, - ensure_ascii=False - ) - - def _encode_object_hook(self, item): - '''Return *item* transformed for encoding.''' - if isinstance(item, ftrack_api.event.base.Event): - # Convert to dictionary for encoding. - item = dict(**item) - - if 'in_reply_to_event' in item: - # Convert keys to server convention. - item['inReplyToEvent'] = item.pop('in_reply_to_event') - - return item - - raise TypeError('{0!r} is not JSON serializable'.format(item)) - - def _decode(self, string): - '''Return decoded JSON *string* as Python object.''' - return json.loads(string, object_hook=self._decode_object_hook) - - def _decode_object_hook(self, item): - '''Return *item* transformed.''' - if isinstance(item, collections.Mapping): - if 'inReplyToEvent' in item: - item['in_reply_to_event'] = item.pop('inReplyToEvent') - - return item - - -class _SubscriptionContext(object): - '''Context manager for a one-off subscription.''' - - def __init__(self, hub, subscription, callback, subscriber, priority): - '''Initialise context.''' - self._hub = hub - self._subscription = subscription - self._callback = callback - self._subscriber = subscriber - self._priority = priority - self._subscriberIdentifier = None - - def __enter__(self): - '''Enter context subscribing callback to topic.''' - self._subscriberIdentifier = self._hub.subscribe( - self._subscription, self._callback, subscriber=self._subscriber, - priority=self._priority - ) - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context unsubscribing callback from topic.''' - self._hub.unsubscribe(self._subscriberIdentifier) - - -class _ProcessorThread(threading.Thread): - '''Process messages from server.''' - - daemon = True - - def __init__(self, client): - '''Initialise thread with Socket.IO *client* instance.''' - super(_ProcessorThread, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.client = client - self.done = threading.Event() - - def run(self): - '''Perform work in thread.''' - while not self.done.is_set(): - try: - code, packet_identifier, path, data = self.client._receive_packet() - self.client._handle_packet(code, packet_identifier, path, data) - - except ftrack_api.exception.EventHubPacketError as error: - self.logger.debug(L('Ignoring invalid packet: {0}', error)) - continue - - except ftrack_api.exception.EventHubConnectionError: - self.cancel() - - # Fake a disconnection event in order to trigger reconnection - # when necessary. - self.client._handle_packet('0', '', '', '') - - break - - except Exception as error: - self.logger.debug(L('Aborting processor thread: {0}', error)) - self.cancel() - break - - def cancel(self): - '''Cancel work as soon as possible.''' - self.done.set() diff --git a/pype/vendor/ftrack_api/event/subscriber.py b/pype/vendor/ftrack_api/event/subscriber.py deleted file mode 100644 index 2afdaa7877..0000000000 --- a/pype/vendor/ftrack_api/event/subscriber.py +++ /dev/null @@ -1,28 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from builtins import object -import ftrack_api.event.subscription - - -class Subscriber(object): - '''Represent event subscriber.''' - - def __init__(self, subscription, callback, metadata, priority): - '''Initialise subscriber.''' - self.subscription = ftrack_api.event.subscription.Subscription( - subscription - ) - self.callback = callback - self.metadata = metadata - self.priority = priority - - def __str__(self): - '''Return string representation.''' - return '<{0} metadata={1} subscription="{2}">'.format( - self.__class__.__name__, self.metadata, self.subscription - ) - - def interested_in(self, event): - '''Return whether subscriber interested in *event*.''' - return self.subscription.includes(event) diff --git a/pype/vendor/ftrack_api/event/subscription.py b/pype/vendor/ftrack_api/event/subscription.py deleted file mode 100644 index f3a839f586..0000000000 --- a/pype/vendor/ftrack_api/event/subscription.py +++ /dev/null @@ -1,24 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from builtins import object -import ftrack_api.event.expression - - -class Subscription(object): - '''Represent a subscription.''' - - parser = ftrack_api.event.expression.Parser() - - def __init__(self, subscription): - '''Initialise with *subscription*.''' - self._subscription = subscription - self._expression = self.parser.parse(subscription) - - def __str__(self): - '''Return string representation.''' - return self._subscription - - def includes(self, event): - '''Return whether subscription includes *event*.''' - return self._expression.match(event) diff --git a/pype/vendor/ftrack_api/exception.py b/pype/vendor/ftrack_api/exception.py deleted file mode 100644 index 77bdf5b1ae..0000000000 --- a/pype/vendor/ftrack_api/exception.py +++ /dev/null @@ -1,393 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from builtins import str -import sys -import traceback - -import ftrack_api.entity.base - - -class Error(Exception): - '''ftrack specific error.''' - - default_message = 'Unspecified error occurred.' - - def __init__(self, message=None, details=None): - '''Initialise exception with *message*. - - If *message* is None, the class 'default_message' will be used. - - *details* should be a mapping of extra information that can be used in - the message and also to provide more context. - - ''' - if message is None: - message = self.default_message - - self.message = message - self.details = details - if self.details is None: - self.details = {} - - self.traceback = traceback.format_exc() - - def __str__(self): - '''Return string representation.''' - keys = {} - for key, value in self.details.items(): - if isinstance(value, str): - value = value.encode(sys.getfilesystemencoding()) - keys[key] = value - - return str(self.message.format(**keys)) - - -class AuthenticationError(Error): - '''Raise when an authentication error occurs.''' - - default_message = 'Authentication error.' - - -class ServerError(Error): - '''Raise when the server reports an error.''' - - default_message = 'Server reported error processing request.' - - -class ServerCompatibilityError(ServerError): - '''Raise when server appears incompatible.''' - - default_message = 'Server incompatible.' - - -class NotFoundError(Error): - '''Raise when something that should exist is not found.''' - - default_message = 'Not found.' - - -class NotUniqueError(Error): - '''Raise when unique value required and duplicate detected.''' - - default_message = 'Non-unique value detected.' - - -class IncorrectResultError(Error): - '''Raise when a result is incorrect.''' - - default_message = 'Incorrect result detected.' - - -class NoResultFoundError(IncorrectResultError): - '''Raise when a result was expected but no result was found.''' - - default_message = 'Expected result, but no result was found.' - - -class MultipleResultsFoundError(IncorrectResultError): - '''Raise when a single result expected, but multiple results found.''' - - default_message = 'Expected single result, but received multiple results.' - - -class EntityTypeError(Error): - '''Raise when an entity type error occurs.''' - - default_message = 'Entity type error.' - - -class UnrecognisedEntityTypeError(EntityTypeError): - '''Raise when an unrecognised entity type detected.''' - - default_message = 'Entity type "{entity_type}" not recognised.' - - def __init__(self, entity_type, **kw): - '''Initialise with *entity_type* that is unrecognised.''' - kw.setdefault('details', {}).update(dict( - entity_type=entity_type - )) - super(UnrecognisedEntityTypeError, self).__init__(**kw) - - -class OperationError(Error): - '''Raise when an operation error occurs.''' - - default_message = 'Operation error.' - - -class InvalidStateError(Error): - '''Raise when an invalid state detected.''' - - default_message = 'Invalid state.' - - -class InvalidStateTransitionError(InvalidStateError): - '''Raise when an invalid state transition detected.''' - - default_message = ( - 'Invalid transition from {current_state!r} to {target_state!r} state ' - 'for entity {entity!r}' - ) - - def __init__(self, current_state, target_state, entity, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - current_state=current_state, - target_state=target_state, - entity=entity - )) - super(InvalidStateTransitionError, self).__init__(**kw) - - -class AttributeError(Error): - '''Raise when an error related to an attribute occurs.''' - - default_message = 'Attribute error.' - - -class ImmutableAttributeError(AttributeError): - '''Raise when modification of immutable attribute attempted.''' - - default_message = ( - 'Cannot modify value of immutable {attribute.name!r} attribute.' - ) - - def __init__(self, attribute, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - attribute=attribute - )) - super(ImmutableAttributeError, self).__init__(**kw) - - -class CollectionError(Error): - '''Raise when an error related to collections occurs.''' - - default_message = 'Collection error.' - - def __init__(self, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - collection=collection - )) - super(CollectionError, self).__init__(**kw) - - -class ImmutableCollectionError(CollectionError): - '''Raise when modification of immutable collection attempted.''' - - default_message = ( - 'Cannot modify value of immutable collection {collection!r}.' - ) - - -class DuplicateItemInCollectionError(CollectionError): - '''Raise when duplicate item in collection detected.''' - - default_message = ( - 'Item {item!r} already exists in collection {collection!r}.' - ) - - def __init__(self, item, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - item=item - )) - super(DuplicateItemInCollectionError, self).__init__(collection, **kw) - - -class ParseError(Error): - '''Raise when a parsing error occurs.''' - - default_message = 'Failed to parse.' - - -class EventHubError(Error): - '''Raise when issues related to event hub occur.''' - - default_message = 'Event hub error occurred.' - - -class EventHubConnectionError(EventHubError): - '''Raise when event hub encounters connection problem.''' - - default_message = 'Event hub is not connected.' - - -class EventHubPacketError(EventHubError): - '''Raise when event hub encounters an issue with a packet.''' - - default_message = 'Invalid packet.' - - -class PermissionDeniedError(Error): - '''Raise when permission is denied.''' - - default_message = 'Permission denied.' - - -class LocationError(Error): - '''Base for errors associated with locations.''' - - default_message = 'Unspecified location error' - - -class ComponentNotInAnyLocationError(LocationError): - '''Raise when component not available in any location.''' - - default_message = 'Component not available in any location.' - - -class ComponentNotInLocationError(LocationError): - '''Raise when component(s) not in location.''' - - default_message = ( - 'Component(s) {formatted_components} not found in location {location}.' - ) - - def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' - if isinstance(components, ftrack_api.entity.base.Entity): - components = [components] - - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) - - super(ComponentNotInLocationError, self).__init__(**kw) - - -class ComponentInLocationError(LocationError): - '''Raise when component(s) already exists in location.''' - - default_message = ( - 'Component(s) {formatted_components} already exist in location ' - '{location}.' - ) - - def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' - if isinstance(components, ftrack_api.entity.base.Entity): - components = [components] - - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) - - super(ComponentInLocationError, self).__init__(**kw) - - -class AccessorError(Error): - '''Base for errors associated with accessors.''' - - default_message = 'Unspecified accessor error' - - -class AccessorOperationFailedError(AccessorError): - '''Base for failed operations on accessors.''' - - default_message = 'Operation {operation} failed: {error}' - - def __init__( - self, operation='', resource_identifier=None, error=None, **kw - ): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier, - error=error - )) - super(AccessorOperationFailedError, self).__init__(**kw) - - -class AccessorUnsupportedOperationError(AccessorOperationFailedError): - '''Raise when operation is unsupported.''' - - default_message = 'Operation {operation} unsupported.' - - -class AccessorPermissionDeniedError(AccessorOperationFailedError): - '''Raise when permission denied.''' - - default_message = ( - 'Cannot {operation} {resource_identifier}. Permission denied.' - ) - - -class AccessorResourceIdentifierError(AccessorError): - '''Raise when a error related to a resource_identifier occurs.''' - - default_message = 'Resource identifier is invalid: {resource_identifier}.' - - def __init__(self, resource_identifier, **kw): - kw.setdefault('details', {}).update(dict( - resource_identifier=resource_identifier - )) - super(AccessorResourceIdentifierError, self).__init__(**kw) - - -class AccessorFilesystemPathError(AccessorResourceIdentifierError): - '''Raise when a error related to an accessor filesystem path occurs.''' - - default_message = ( - 'Could not determine filesystem path from resource identifier: ' - '{resource_identifier}.' - ) - - -class AccessorResourceError(AccessorError): - '''Base for errors associated with specific resource.''' - - default_message = 'Unspecified resource error: {resource_identifier}' - - def __init__(self, operation='', resource_identifier=None, error=None, - **kw): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier - )) - super(AccessorResourceError, self).__init__(**kw) - - -class AccessorResourceNotFoundError(AccessorResourceError): - '''Raise when a required resource is not found.''' - - default_message = 'Resource not found: {resource_identifier}' - - -class AccessorParentResourceNotFoundError(AccessorResourceError): - '''Raise when a parent resource (such as directory) is not found.''' - - default_message = 'Parent resource is missing: {resource_identifier}' - - -class AccessorResourceInvalidError(AccessorResourceError): - '''Raise when a resource is not the right type.''' - - default_message = 'Resource invalid: {resource_identifier}' - - -class AccessorContainerNotEmptyError(AccessorResourceError): - '''Raise when container is not empty.''' - - default_message = 'Container is not empty: {resource_identifier}' - - -class StructureError(Error): - '''Base for errors associated with structures.''' - - default_message = 'Unspecified structure error' - - -class ConnectionClosedError(Error): - '''Raise when attempt to use closed connection detected.''' - - default_message = "Connection closed." diff --git a/pype/vendor/ftrack_api/formatter.py b/pype/vendor/ftrack_api/formatter.py deleted file mode 100644 index 619c39c6eb..0000000000 --- a/pype/vendor/ftrack_api/formatter.py +++ /dev/null @@ -1,132 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from builtins import str -import termcolor - -import ftrack_api.entity.base -import ftrack_api.collection -import ftrack_api.symbol -import ftrack_api.inspection - - -#: Useful filters to pass to :func:`format`.` -FILTER = { - 'ignore_unset': ( - lambda entity, name, value: value is not ftrack_api.symbol.NOT_SET - ) -} - - -def format( - entity, formatters=None, attribute_filter=None, recursive=False, - indent=0, indent_first_line=True, _seen=None -): - '''Return formatted string representing *entity*. - - *formatters* can be used to customise formatting of elements. It should be a - mapping with one or more of the following keys: - - * header - Used to format entity type. - * label - Used to format attribute names. - - Specify an *attribute_filter* to control which attributes to include. By - default all attributes are included. The *attribute_filter* should be a - callable that accepts `(entity, attribute_name, attribute_value)` and - returns True if the attribute should be included in the output. For example, - to filter out all unset values:: - - attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] - - If *recursive* is True then recurse into Collections and format each entity - present. - - *indent* specifies the overall indentation in spaces of the formatted text, - whilst *indent_first_line* determines whether to apply that indent to the - first generated line. - - .. warning:: - - Iterates over all *entity* attributes which may cause multiple queries - to the server. Turn off auto populating in the session to prevent this. - - ''' - # Initialise default formatters. - if formatters is None: - formatters = dict() - - formatters.setdefault( - 'header', lambda text: termcolor.colored( - text, 'white', 'on_blue', attrs=['bold'] - ) - ) - formatters.setdefault( - 'label', lambda text: termcolor.colored( - text, 'blue', attrs=['bold'] - ) - ) - - # Determine indents. - spacer = ' ' * indent - if indent_first_line: - first_line_spacer = spacer - else: - first_line_spacer = '' - - # Avoid infinite recursion on circular references. - if _seen is None: - _seen = set() - - identifier = str(ftrack_api.inspection.identity(entity)) - if identifier in _seen: - return ( - first_line_spacer + - formatters['header'](entity.entity_type) + '{...}' - ) - - _seen.add(identifier) - information = list() - - information.append( - first_line_spacer + formatters['header'](entity.entity_type) - ) - for key, value in sorted(entity.items()): - if attribute_filter is not None: - if not attribute_filter(entity, key, value): - continue - - child_indent = indent + len(key) + 3 - - if isinstance(value, ftrack_api.entity.base.Entity): - value = format( - value, - formatters=formatters, - attribute_filter=attribute_filter, - recursive=recursive, - indent=child_indent, - indent_first_line=False, - _seen=_seen.copy() - ) - - if isinstance(value, ftrack_api.collection.Collection): - if recursive: - child_values = [] - for index, child in enumerate(value): - child_value = format( - child, - formatters=formatters, - attribute_filter=attribute_filter, - recursive=recursive, - indent=child_indent, - indent_first_line=index != 0, - _seen=_seen.copy() - ) - child_values.append(child_value) - - value = '\n'.join(child_values) - - information.append( - spacer + u' {0}: {1}'.format(formatters['label'](key), value) - ) - - return '\n'.join(information) diff --git a/pype/vendor/ftrack_api/inspection.py b/pype/vendor/ftrack_api/inspection.py deleted file mode 100644 index 71c01f50c0..0000000000 --- a/pype/vendor/ftrack_api/inspection.py +++ /dev/null @@ -1,141 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -from builtins import str -from future.utils import native_str -import collections - -import ftrack_api.symbol -import ftrack_api.operation - - -def identity(entity): - '''Return unique identity of *entity*.''' - return ( - str(entity.entity_type), - list(primary_key(entity).values()) - ) - - -def primary_key(entity): - '''Return primary key of *entity* as an ordered mapping of {field: value}. - - To get just the primary key values:: - - primary_key(entity).values() - - ''' - primary_key = collections.OrderedDict() - for name in entity.primary_key_attributes: - value = entity[name] - if value is ftrack_api.symbol.NOT_SET: - raise KeyError( - 'Missing required value for primary key attribute "{0}" on ' - 'entity {1!r}.'.format(name, entity) - ) - - - # This is something I am not happy about. - # COMPAT! - - primary_key[native_str(name)] = native_str(value) - - return primary_key - - -def _state(operation, state): - '''Return state following *operation* against current *state*.''' - if ( - isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ) - and state is ftrack_api.symbol.NOT_SET - ): - state = ftrack_api.symbol.CREATED - - elif ( - isinstance( - operation, ftrack_api.operation.UpdateEntityOperation - ) - and state is ftrack_api.symbol.NOT_SET - ): - state = ftrack_api.symbol.MODIFIED - - elif isinstance( - operation, ftrack_api.operation.DeleteEntityOperation - ): - state = ftrack_api.symbol.DELETED - - return state - - -def state(entity): - '''Return current *entity* state. - - .. seealso:: :func:`ftrack_api.inspection.states`. - - ''' - value = ftrack_api.symbol.NOT_SET - - for operation in entity.session.recorded_operations: - # Determine if operation refers to an entity and whether that entity - # is *entity*. - if ( - isinstance( - operation, - ( - ftrack_api.operation.CreateEntityOperation, - ftrack_api.operation.UpdateEntityOperation, - ftrack_api.operation.DeleteEntityOperation - ) - ) - and operation.entity_type == entity.entity_type - and operation.entity_key == primary_key(entity) - ): - value = _state(operation, value) - - return value - - -def states(entities): - '''Return current states of *entities*. - - An optimised function for determining states of multiple entities in one - go. - - .. note:: - - All *entities* should belong to the same session. - - .. seealso:: :func:`ftrack_api.inspection.state`. - - ''' - if not entities: - return [] - - session = entities[0].session - - entities_by_identity = collections.OrderedDict() - for entity in entities: - key = (entity.entity_type, str(list(primary_key(entity).values()))) - entities_by_identity[key] = ftrack_api.symbol.NOT_SET - - for operation in session.recorded_operations: - if ( - isinstance( - operation, - ( - ftrack_api.operation.CreateEntityOperation, - ftrack_api.operation.UpdateEntityOperation, - ftrack_api.operation.DeleteEntityOperation - ) - ) - ): - key = (operation.entity_type, str(list(operation.entity_key.values()))) - if key not in entities_by_identity: - continue - - value = _state(operation, entities_by_identity[key]) - entities_by_identity[key] = value - - return list(entities_by_identity.values()) diff --git a/pype/vendor/ftrack_api/logging.py b/pype/vendor/ftrack_api/logging.py deleted file mode 100644 index 49944b2034..0000000000 --- a/pype/vendor/ftrack_api/logging.py +++ /dev/null @@ -1,27 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - - -from builtins import object -class LazyLogMessage(object): - '''A log message that can be evaluated lazily for improved performance. - - Example:: - - # Formatting of string will not occur unless debug logging enabled. - logger.debug(LazyLogMessage( - 'Hello {0}', 'world' - )) - - ''' - - def __init__(self, message, *args, **kwargs): - '''Initialise with *message* format string and arguments.''' - self.message = message - self.args = args - self.kwargs = kwargs - - def __str__(self): - '''Return string representation.''' - return self.message.format(*self.args, **self.kwargs) - diff --git a/pype/vendor/ftrack_api/operation.py b/pype/vendor/ftrack_api/operation.py deleted file mode 100644 index 521712c046..0000000000 --- a/pype/vendor/ftrack_api/operation.py +++ /dev/null @@ -1,116 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -from builtins import object -import copy - - -class Operations(object): - '''Stack of operations.''' - - def __init__(self): - '''Initialise stack.''' - self._stack = [] - super(Operations, self).__init__() - - def clear(self): - '''Clear all operations.''' - del self._stack[:] - - def push(self, operation): - '''Push *operation* onto stack.''' - self._stack.append(operation) - - def pop(self): - '''Pop and return most recent operation from stack.''' - return self._stack.pop() - - def __len__(self): - '''Return count of operations.''' - return len(self._stack) - - def __iter__(self): - '''Return iterator over operations.''' - return iter(self._stack) - - -class Operation(object): - '''Represent an operation.''' - - -class CreateEntityOperation(Operation): - '''Represent create entity operation.''' - - def __init__(self, entity_type, entity_key, entity_data): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - *entity_data* should be a mapping of the initial data to populate the - entity with when creating. - - .. note:: - - Shallow copies will be made of each value in *entity_data*. - - ''' - super(CreateEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - self.entity_data = {} - for key, value in list(entity_data.items()): - self.entity_data[key] = copy.copy(value) - - -class UpdateEntityOperation(Operation): - '''Represent update entity operation.''' - - def __init__( - self, entity_type, entity_key, attribute_name, old_value, new_value - ): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - *attribute_name* should be the string name of the attribute being - modified and *old_value* and *new_value* should reflect the change in - value. - - .. note:: - - Shallow copies will be made of both *old_value* and *new_value*. - - ''' - super(UpdateEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - self.attribute_name = attribute_name - self.old_value = copy.copy(old_value) - self.new_value = copy.copy(new_value) - - -class DeleteEntityOperation(Operation): - '''Represent delete entity operation.''' - - def __init__(self, entity_type, entity_key): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - ''' - super(DeleteEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - diff --git a/pype/vendor/ftrack_api/plugin.py b/pype/vendor/ftrack_api/plugin.py deleted file mode 100644 index 8c164edf23..0000000000 --- a/pype/vendor/ftrack_api/plugin.py +++ /dev/null @@ -1,121 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging -import os -import uuid -import imp -import inspect - - -def discover(paths, positional_arguments=None, keyword_arguments=None): - '''Find and load plugins in search *paths*. - - Each discovered module should implement a register function that accepts - *positional_arguments* and *keyword_arguments* as \*args and \*\*kwargs - respectively. - - If a register function does not accept variable arguments, then attempt to - only pass accepted arguments to the function by inspecting its signature. - - ''' - logger = logging.getLogger(__name__ + '.discover') - - if positional_arguments is None: - positional_arguments = [] - - if keyword_arguments is None: - keyword_arguments = {} - - for path in paths: - # Ignore empty paths that could resolve to current directory. - path = path.strip() - if not path: - continue - - for base, directories, filenames in os.walk(path): - for filename in filenames: - name, extension = os.path.splitext(filename) - if extension != '.py': - continue - - module_path = os.path.join(base, filename) - unique_name = uuid.uuid4().hex - - try: - module = imp.load_source(unique_name, module_path) - except Exception as error: - logger.warning( - 'Failed to load plugin from "{0}": {1}' - .format(module_path, error) - ) - continue - - try: - module.register - except AttributeError: - logger.warning( - 'Failed to load plugin that did not define a ' - '"register" function at the module level: {0}' - .format(module_path) - ) - else: - # Attempt to only pass arguments that are accepted by the - # register function. - specification = inspect.getargspec(module.register) - - selected_positional_arguments = positional_arguments - selected_keyword_arguments = keyword_arguments - - if ( - not specification.varargs and - len(positional_arguments) > len(specification.args) - ): - logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' - ) - - selected_positional_arguments = positional_arguments[ - len(specification.args): - ] - selected_keyword_arguments = {} - - elif not specification.keywords: - # Remove arguments that have been passed as positionals. - remainder = specification.args[ - len(positional_arguments): - ] - - # Determine remaining available keyword arguments. - defined_keyword_arguments = [] - if specification.defaults: - defined_keyword_arguments = specification.args[ - -len(specification.defaults): - ] - - remaining_keyword_arguments = set([ - keyword_argument for keyword_argument - in defined_keyword_arguments - if keyword_argument in remainder - ]) - - if not set(keyword_arguments.keys()).issubset( - remaining_keyword_arguments - ): - logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' - ) - selected_keyword_arguments = { - key: value - for key, value in list(keyword_arguments.items()) - if key in remaining_keyword_arguments - } - - module.register( - *selected_positional_arguments, - **selected_keyword_arguments - ) diff --git a/pype/vendor/ftrack_api/query.py b/pype/vendor/ftrack_api/query.py deleted file mode 100644 index ea101a29d4..0000000000 --- a/pype/vendor/ftrack_api/query.py +++ /dev/null @@ -1,202 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import re -import collections - -import ftrack_api.exception - - -class QueryResult(collections.Sequence): - '''Results from a query.''' - - OFFSET_EXPRESSION = re.compile('(?Poffset (?P\d+))') - LIMIT_EXPRESSION = re.compile('(?Plimit (?P\d+))') - - def __init__(self, session, expression, page_size=500): - '''Initialise result set. - - *session* should be an instance of :class:`ftrack_api.session.Session` - that will be used for executing the query *expression*. - - *page_size* should be an integer specifying the maximum number of - records to fetch in one request allowing the results to be fetched - incrementally in a transparent manner for optimal performance. Any - offset or limit specified in *expression* are honoured for final result - set, but intermediate queries may be issued with different offsets and - limits in order to fetch pages. When an embedded limit is smaller than - the given *page_size* it will be used instead and no paging will take - place. - - .. warning:: - - Setting *page_size* to a very large amount may negatively impact - performance of not only the caller, but the server in general. - - ''' - super(QueryResult, self).__init__() - self._session = session - self._results = [] - - ( - self._expression, - self._offset, - self._limit - ) = self._extract_offset_and_limit(expression) - - self._page_size = page_size - if self._limit is not None and self._limit < self._page_size: - # Optimise case where embedded limit is less than fetching a - # single page. - self._page_size = self._limit - - self._next_offset = self._offset - if self._next_offset is None: - # Initialise with zero offset. - self._next_offset = 0 - - def _extract_offset_and_limit(self, expression): - '''Process *expression* extracting offset and limit. - - Return (expression, offset, limit). - - ''' - offset = None - match = self.OFFSET_EXPRESSION.search(expression) - if match: - offset = int(match.group('value')) - expression = ( - expression[:match.start('offset')] + - expression[match.end('offset'):] - ) - - limit = None - match = self.LIMIT_EXPRESSION.search(expression) - if match: - limit = int(match.group('value')) - expression = ( - expression[:match.start('limit')] + - expression[match.end('limit'):] - ) - - return expression.strip(), offset, limit - - def __getitem__(self, index): - '''Return value at *index*.''' - while self._can_fetch_more() and index >= len(self._results): - self._fetch_more() - - return self._results[index] - - def __len__(self): - '''Return number of items.''' - while self._can_fetch_more(): - self._fetch_more() - - return len(self._results) - - def _can_fetch_more(self): - '''Return whether more results are available to fetch.''' - return self._next_offset is not None - - def _fetch_more(self): - '''Fetch next page of results if available.''' - if not self._can_fetch_more(): - return - - expression = '{0} offset {1} limit {2}'.format( - self._expression, self._next_offset, self._page_size - ) - records, metadata = self._session._query(expression) - self._results.extend(records) - - if self._limit is not None and (len(self._results) >= self._limit): - # Original limit reached. - self._next_offset = None - del self._results[self._limit:] - else: - # Retrieve next page offset from returned metadata. - self._next_offset = metadata.get('next', {}).get('offset', None) - - def all(self): - '''Fetch and return all data.''' - return list(self) - - def one(self): - '''Return exactly one single result from query by applying a limit. - - Raise :exc:`ValueError` if an existing limit is already present in the - expression. - - Raise :exc:`ValueError` if an existing offset is already present in the - expression as offset is inappropriate when expecting a single item. - - Raise :exc:`~ftrack_api.exception.MultipleResultsFoundError` if more - than one result was available or - :exc:`~ftrack_api.exception.NoResultFoundError` if no results were - available. - - .. note:: - - Both errors subclass - :exc:`~ftrack_api.exception.IncorrectResultError` if you want to - catch only one error type. - - ''' - expression = self._expression - - if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) - - if self._offset is not None: - raise ValueError( - 'Expression contains an offset clause which does not make ' - 'sense when selecting a single item.' - ) - - # Apply custom limit as optimisation. A limit of 2 is used rather than - # 1 so that it is possible to test for multiple matching entries - # case. - expression += ' limit 2' - - results, metadata = self._session._query(expression) - - if not results: - raise ftrack_api.exception.NoResultFoundError() - - if len(results) != 1: - raise ftrack_api.exception.MultipleResultsFoundError() - - return results[0] - - def first(self): - '''Return first matching result from query by applying a limit. - - Raise :exc:`ValueError` if an existing limit is already present in the - expression. - - If no matching result available return None. - - ''' - expression = self._expression - - if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) - - # Apply custom offset if present. - if self._offset is not None: - expression += ' offset {0}'.format(self._offset) - - # Apply custom limit as optimisation. - expression += ' limit 1' - - results, metadata = self._session._query(expression) - - if results: - return results[0] - - return None diff --git a/pype/vendor/ftrack_api/resource_identifier_transformer/__init__.py b/pype/vendor/ftrack_api/resource_identifier_transformer/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/pype/vendor/ftrack_api/resource_identifier_transformer/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/pype/vendor/ftrack_api/resource_identifier_transformer/base.py b/pype/vendor/ftrack_api/resource_identifier_transformer/base.py deleted file mode 100644 index b4cbbc3a2a..0000000000 --- a/pype/vendor/ftrack_api/resource_identifier_transformer/base.py +++ /dev/null @@ -1,51 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - - -from builtins import object -class ResourceIdentifierTransformer(object): - '''Transform resource identifiers. - - Provide ability to modify resource identifier before it is stored centrally - (:meth:`encode`), or after it has been retrieved, but before it is used - locally (:meth:`decode`). - - For example, you might want to decompose paths into a set of key, value - pairs to store centrally and then compose a path from those values when - reading back. - - .. note:: - - This is separate from any transformations an - :class:`ftrack_api.accessor.base.Accessor` may perform and is targeted - towards common transformations. - - ''' - - def __init__(self, session): - '''Initialise resource identifier transformer. - - *session* should be the :class:`ftrack_api.session.Session` instance - to use for communication with the server. - - ''' - self.session = session - super(ResourceIdentifierTransformer, self).__init__() - - def encode(self, resource_identifier, context=None): - '''Return encoded *resource_identifier* for storing centrally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return resource_identifier - - def decode(self, resource_identifier, context=None): - '''Return decoded *resource_identifier* for use locally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return resource_identifier diff --git a/pype/vendor/ftrack_api/session.py b/pype/vendor/ftrack_api/session.py deleted file mode 100644 index 517acd8576..0000000000 --- a/pype/vendor/ftrack_api/session.py +++ /dev/null @@ -1,2494 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import -from __future__ import division - -from builtins import zip -from builtins import map -from builtins import str -from past.builtins import basestring -from builtins import object -from past.utils import old_div -import json -import logging -import collections -import datetime -import os -import getpass -import functools -import itertools -import distutils.version -import hashlib -import tempfile -import threading -import atexit -import warnings - -import requests -import requests.auth -import arrow -import clique - -import ftrack_api -import ftrack_api.exception -import ftrack_api.entity.factory -import ftrack_api.entity.base -import ftrack_api.entity.location -import ftrack_api.cache -import ftrack_api.symbol -import ftrack_api.query -import ftrack_api.attribute -import ftrack_api.collection -import ftrack_api.event.hub -import ftrack_api.event.base -import ftrack_api.plugin -import ftrack_api.inspection -import ftrack_api.operation -import ftrack_api.accessor.disk -import ftrack_api.structure.origin -import ftrack_api.structure.entity_id -import ftrack_api.accessor.server -import ftrack_api._centralized_storage_scenario -from ftrack_api.logging import LazyLogMessage as L - - -class SessionAuthentication(requests.auth.AuthBase): - '''Attach ftrack session authentication information to requests.''' - - def __init__(self, api_key, api_user): - '''Initialise with *api_key* and *api_user*.''' - self.api_key = api_key - self.api_user = api_user - super(SessionAuthentication, self).__init__() - - def __call__(self, request): - '''Modify *request* to have appropriate headers.''' - request.headers.update({ - 'ftrack-api-key': self.api_key, - 'ftrack-user': self.api_user - }) - return request - - -class Session(object): - '''An isolated session for interaction with an ftrack server.''' - - def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=None, schema_cache_path=None, - plugin_arguments=None - ): - '''Initialise session. - - *server_url* should be the URL of the ftrack server to connect to - including any port number. If not specified attempt to look up from - :envvar:`FTRACK_SERVER`. - - *api_key* should be the API key to use for authentication whilst - *api_user* should be the username of the user in ftrack to record - operations against. If not specified, *api_key* should be retrieved - from :envvar:`FTRACK_API_KEY` and *api_user* from - :envvar:`FTRACK_API_USER`. - - If *auto_populate* is True (the default), then accessing entity - attributes will cause them to be automatically fetched from the server - if they are not already. This flag can be changed on the session - directly at any time. - - *plugin_paths* should be a list of paths to search for plugins. If not - specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`. - - *cache* should be an instance of a cache that fulfils the - :class:`ftrack_api.cache.Cache` interface and will be used as the cache - for the session. It can also be a callable that will be called with the - session instance as sole argument. The callable should return ``None`` - if a suitable cache could not be configured, but session instantiation - can continue safely. - - .. note:: - - The session will add the specified cache to a pre-configured layered - cache that specifies the top level cache as a - :class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary - to construct a separate memory cache for typical behaviour. Working - around this behaviour or removing the memory cache can lead to - unexpected behaviour. - - *cache_key_maker* should be an instance of a key maker that fulfils the - :class:`ftrack_api.cache.KeyMaker` interface and will be used to - generate keys for objects being stored in the *cache*. If not specified, - a :class:`~ftrack_api.cache.StringKeyMaker` will be used. - - If *auto_connect_event_hub* is True then embedded event hub will be - automatically connected to the event server and allow for publishing and - subscribing to **non-local** events. If False, then only publishing and - subscribing to **local** events will be possible until the hub is - manually connected using :meth:`EventHub.connect - `. - - .. note:: - - The event hub connection is performed in a background thread to - improve session startup time. If a registered plugin requires a - connected event hub then it should check the event hub connection - status explicitly. Subscribing to events does *not* require a - connected event hub. - - Enable schema caching by setting *schema_cache_path* to a folder path. - If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to - determine the path to store cache in. If the environment variable is - also not specified then a temporary directory will be used. Set to - `False` to disable schema caching entirely. - - *plugin_arguments* should be an optional mapping (dict) of keyword - arguments to pass to plugin register functions upon discovery. If a - discovered plugin has a signature that is incompatible with the passed - arguments, the discovery mechanism will attempt to reduce the passed - arguments to only those that the plugin accepts. Note that a warning - will be logged in this case. - - ''' - super(Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self._closed = False - - if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') - - if not server_url: - raise TypeError( - 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' - ) - - self._server_url = server_url - - if api_key is None: - api_key = os.environ.get( - 'FTRACK_API_KEY', - # Backwards compatibility - os.environ.get('FTRACK_APIKEY') - ) - - if not api_key: - raise TypeError( - 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' - ) - - self._api_key = api_key - - if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') - if not api_user: - try: - api_user = getpass.getuser() - except Exception: - pass - - if not api_user: - raise TypeError( - 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' - ) - - self._api_user = api_user - - # Currently pending operations. - self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True - - self.cache_key_maker = cache_key_maker - if self.cache_key_maker is None: - self.cache_key_maker = ftrack_api.cache.StringKeyMaker() - - # Enforce always having a memory cache at top level so that the same - # in-memory instance is returned from session. - self.cache = ftrack_api.cache.LayeredCache([ - ftrack_api.cache.MemoryCache() - ]) - - if cache is not None: - if callable(cache): - cache = cache(self) - - if cache is not None: - self.cache.caches.append(cache) - - self._managed_request = None - self._request = requests.Session() - self._request.auth = SessionAuthentication( - self._api_key, self._api_user - ) - - self.auto_populate = auto_populate - - # Fetch server information and in doing so also check credentials. - self._server_information = self._fetch_server_information() - - # Now check compatibility of server based on retrieved information. - self.check_server_compatibility() - - # Construct event hub and load plugins. - self._event_hub = ftrack_api.event.hub.EventHub( - self._server_url, - self._api_user, - self._api_key, - ) - - self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): - # Connect to event hub in background thread so as not to block main - # session usage waiting for event hub connection. - self._auto_connect_event_hub_thread = threading.Thread( - target=self._event_hub.connect - ) - self._auto_connect_event_hub_thread.daemon = True - self._auto_connect_event_hub_thread.start() - - # To help with migration from auto_connect_event_hub default changing - # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) - - # Register to auto-close session on exit. - atexit.register(self.close) - - self._plugin_paths = plugin_paths - if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) - - self._discover_plugins(plugin_arguments=plugin_arguments) - - # TODO: Make schemas read-only and non-mutable (or at least without - # rebuilding types)? - if schema_cache_path is not False: - if schema_cache_path is None: - schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() - ) - - schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_schema_cache.json' - ) - - self.schemas = self._load_schemas(schema_cache_path) - self.types = self._build_entity_type_classes(self.schemas) - - ftrack_api._centralized_storage_scenario.register(self) - - self._configure_locations() - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) - ), - synchronous=True - ) - - def __enter__(self): - '''Return session as context manager.''' - return self - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit session context, closing session in process.''' - self.close() - - @property - def _request(self): - '''Return request session. - - Raise :exc:`ftrack_api.exception.ConnectionClosedError` if session has - been closed and connection unavailable. - - ''' - if self._managed_request is None: - raise ftrack_api.exception.ConnectionClosedError() - - return self._managed_request - - @_request.setter - def _request(self, value): - '''Set request session to *value*.''' - self._managed_request = value - - @property - def closed(self): - '''Return whether session has been closed.''' - return self._closed - - @property - def server_information(self): - '''Return server information such as server version.''' - return self._server_information.copy() - - @property - def server_url(self): - '''Return server ulr used for session.''' - return self._server_url - - @property - def api_user(self): - '''Return username used for session.''' - return self._api_user - - @property - def api_key(self): - '''Return API key used for session.''' - return self._api_key - - @property - def event_hub(self): - '''Return event hub.''' - return self._event_hub - - @property - def _local_cache(self): - '''Return top level memory cache.''' - return self.cache.caches[0] - - def check_server_compatibility(self): - '''Check compatibility with connected server.''' - server_version = self.server_information.get('version') - if server_version is None: - raise ftrack_api.exception.ServerCompatibilityError( - 'Could not determine server version.' - ) - - # Perform basic version check. - if server_version != 'dev': - min_server_version = '3.3.11' - if ( - distutils.version.LooseVersion(min_server_version) - > distutils.version.LooseVersion(server_version) - ): - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0} incompatible with this version of the ' - 'API which requires a server version >= {1}'.format( - server_version, - min_server_version - ) - ) - - def close(self): - '''Close session. - - Close connections to server. Clear any pending operations and local - cache. - - Use this to ensure that session is cleaned up properly after use. - - ''' - if self.closed: - self.logger.debug('Session already closed.') - return - - self._closed = True - - self.logger.debug('Closing session.') - if self.recorded_operations: - self.logger.warning( - 'Closing session with pending operations not persisted.' - ) - - # Clear pending operations. - self.recorded_operations.clear() - - # Clear top level cache (expected to be enforced memory cache). - self._local_cache.clear() - - # Close connections. - self._request.close() - self._request = None - - try: - self.event_hub.disconnect() - if self._auto_connect_event_hub_thread: - self._auto_connect_event_hub_thread.join() - except ftrack_api.exception.EventHubConnectionError: - pass - - self.logger.debug('Session closed.') - - def reset(self): - '''Reset session clearing local state. - - Clear all pending operations and expunge all entities from session. - - Also clear the local cache. If the cache used by the session is a - :class:`~ftrack_api.cache.LayeredCache` then only clear top level cache. - Otherwise, clear the entire cache. - - Plugins are not rediscovered or reinitialised, but certain plugin events - are re-emitted to properly configure session aspects that are dependant - on cache (such as location plugins). - - .. warning:: - - Previously attached entities are not reset in memory and will retain - their state, but should not be used. Doing so will cause errors. - - ''' - if self.recorded_operations: - self.logger.warning( - 'Resetting session with pending operations not persisted.' - ) - - # Clear pending operations. - self.recorded_operations.clear() - - # Clear top level cache (expected to be enforced memory cache). - self._local_cache.clear() - - # Re-configure certain session aspects that may be dependant on cache. - self._configure_locations() - - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.reset', - data=dict( - session=self - ) - ), - synchronous=True - ) - - def auto_populating(self, auto_populate): - '''Temporarily set auto populate to *auto_populate*. - - The current setting will be restored automatically when done. - - Example:: - - with session.auto_populating(False): - print entity['name'] - - ''' - return AutoPopulatingContext(self, auto_populate) - - def operation_recording(self, record_operations): - '''Temporarily set operation recording to *record_operations*. - - The current setting will be restored automatically when done. - - Example:: - - with session.operation_recording(False): - entity['name'] = 'change_not_recorded' - - ''' - return OperationRecordingContext(self, record_operations) - - @property - def created(self): - '''Return list of newly created entities.''' - entities = list(self._local_cache.values()) - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in zip(entities, states) - if state is ftrack_api.symbol.CREATED - ] - - @property - def modified(self): - '''Return list of locally modified entities.''' - entities = list(self._local_cache.values()) - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in zip(entities, states) - if state is ftrack_api.symbol.MODIFIED - ] - - @property - def deleted(self): - '''Return list of deleted entities.''' - entities = list(self._local_cache.values()) - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in zip(entities, states) - if state is ftrack_api.symbol.DELETED - ] - - def reset_remote(self, reset_type, entity=None): - '''Perform a server side reset. - - *reset_type* is a server side supported reset type, - passing the optional *entity* to perform the option upon. - - Please refer to ftrack documentation for a complete list of - supported server side reset types. - ''' - - payload = { - 'action': 'reset_remote', - 'reset_type': reset_type - } - - if entity is not None: - payload.update({ - 'entity_type': entity.entity_type, - 'entity_key': entity.get('id') - }) - - result = self._call( - [payload] - ) - - return result[0]['data'] - - def create(self, entity_type, data=None, reconstructing=False): - '''Create and return an entity of *entity_type* with initial *data*. - - If specified, *data* should be a dictionary of key, value pairs that - should be used to populate attributes on the entity. - - If *reconstructing* is False then create a new entity setting - appropriate defaults for missing data. If True then reconstruct an - existing entity. - - Constructed entity will be automatically :meth:`merged ` - into the session. - - ''' - entity = self._create(entity_type, data, reconstructing=reconstructing) - entity = self.merge(entity) - return entity - - def _create(self, entity_type, data, reconstructing): - '''Create and return an entity of *entity_type* with initial *data*.''' - try: - EntityTypeClass = self.types[entity_type] - except KeyError: - raise ftrack_api.exception.UnrecognisedEntityTypeError(entity_type) - - return EntityTypeClass(self, data=data, reconstructing=reconstructing) - - def ensure(self, entity_type, data, identifying_keys=None): - '''Retrieve entity of *entity_type* with *data*, creating if necessary. - - *data* should be a dictionary of the same form passed to :meth:`create`. - - By default, check for an entity that has matching *data*. If - *identifying_keys* is specified as a list of keys then only consider the - values from *data* for those keys when searching for existing entity. If - *data* is missing an identifying key then raise :exc:`KeyError`. - - If no *identifying_keys* specified then use all of the keys from the - passed *data*. Raise :exc:`ValueError` if no *identifying_keys* can be - determined. - - Each key should be a string. - - .. note:: - - Currently only top level scalars supported. To ensure an entity by - looking at relationships, manually issue the :meth:`query` and - :meth:`create` calls. - - If more than one entity matches the determined filter criteria then - raise :exc:`~ftrack_api.exception.MultipleResultsFoundError`. - - If no matching entity found then create entity using supplied *data*. - - If a matching entity is found, then update it if necessary with *data*. - - .. note:: - - If entity created or updated then a :meth:`commit` will be issued - automatically. If this behaviour is undesired, perform the - :meth:`query` and :meth:`create` calls manually. - - Return retrieved or created entity. - - Example:: - - # First time, a new entity with `username=martin` is created. - entity = session.ensure('User', {'username': 'martin'}) - - # After that, the existing entity is retrieved. - entity = session.ensure('User', {'username': 'martin'}) - - # When existing entity retrieved, entity may also be updated to - # match supplied data. - entity = session.ensure( - 'User', {'username': 'martin', 'email': 'martin@example.com'} - ) - - ''' - if not identifying_keys: - identifying_keys = list(data.keys()) - - self.logger.debug(L( - 'Ensuring entity {0!r} with data {1!r} using identifying keys ' - '{2!r}', entity_type, data, identifying_keys - )) - - if not identifying_keys: - raise ValueError( - 'Could not determine any identifying data to check against ' - 'when ensuring {0!r} with data {1!r}. Identifying keys: {2!r}' - .format(entity_type, data, identifying_keys) - ) - - expression = '{0} where'.format(entity_type) - criteria = [] - for identifying_key in identifying_keys: - value = data[identifying_key] - - if isinstance(value, basestring): - value = '"{0}"'.format(value) - - elif isinstance( - value, (arrow.Arrow, datetime.datetime, datetime.date) - ): - # Server does not store microsecond or timezone currently so - # need to strip from query. - # TODO: When datetime handling improved, update this logic. - value = ( - arrow.get(value).naive.replace(microsecond=0).isoformat() - ) - value = '"{0}"'.format(value) - - criteria.append('{0} is {1}'.format(identifying_key, value)) - - expression = '{0} {1}'.format( - expression, ' and '.join(criteria) - ) - - try: - entity = self.query(expression).one() - - except ftrack_api.exception.NoResultFoundError: - self.logger.debug('Creating entity as did not already exist.') - - # Create entity. - entity = self.create(entity_type, data) - self.commit() - - else: - self.logger.debug('Retrieved matching existing entity.') - - # Update entity if required. - updated = False - for key, target_value in list(data.items()): - if entity[key] != target_value: - entity[key] = target_value - updated = True - - if updated: - self.logger.debug('Updating existing entity to match new data.') - self.commit() - - return entity - - def delete(self, entity): - '''Mark *entity* for deletion.''' - if self.record_operations: - self.recorded_operations.push( - ftrack_api.operation.DeleteEntityOperation( - entity.entity_type, - ftrack_api.inspection.primary_key(entity) - ) - ) - - def get(self, entity_type, entity_key): - '''Return entity of *entity_type* with unique *entity_key*. - - First check for an existing entry in the configured cache, otherwise - issue a query to the server. - - If no matching entity found, return None. - - ''' - self.logger.debug(L('Get {0} with key {1}', entity_type, entity_key)) - - primary_key_definition = self.types[entity_type].primary_key_attributes - if isinstance(entity_key, basestring): - entity_key = [entity_key] - - if len(entity_key) != len(primary_key_definition): - raise ValueError( - 'Incompatible entity_key {0!r} supplied. Entity type {1} ' - 'expects a primary key composed of {2} values ({3}).' - .format( - entity_key, entity_type, len(primary_key_definition), - ', '.join(primary_key_definition) - ) - ) - - entity = None - try: - entity = self._get(entity_type, entity_key) - - except KeyError: - - # Query for matching entity. - self.logger.debug( - 'Entity not present in cache. Issuing new query.' - ) - condition = [] - for key, value in zip(primary_key_definition, entity_key): - condition.append('{0} is "{1}"'.format(key, value)) - - expression = '{0} where ({1})'.format( - entity_type, ' and '.join(condition) - ) - - results = self.query(expression).all() - if results: - entity = results[0] - - return entity - - def _get(self, entity_type, entity_key): - '''Return cached entity of *entity_type* with unique *entity_key*. - - Raise :exc:`KeyError` if no such entity in the cache. - - ''' - # Check cache for existing entity emulating - # ftrack_api.inspection.identity result object to pass to key maker. - cache_key = self.cache_key_maker.key( - (str(entity_type), list(map(str, entity_key))) - ) - self.logger.debug(L( - 'Checking cache for entity with key {0}', cache_key - )) - entity = self.cache.get(cache_key) - self.logger.debug(L( - 'Retrieved existing entity from cache: {0} at {1}', - entity, id(entity) - )) - - return entity - - def query(self, expression, page_size=500): - '''Query against remote data according to *expression*. - - *expression* is not executed directly. Instead return an - :class:`ftrack_api.query.QueryResult` instance that will execute remote - call on access. - - *page_size* specifies the maximum page size that the returned query - result object should be configured with. - - .. seealso:: :ref:`querying` - - ''' - self.logger.debug(L('Query {0!r}', expression)) - - # Add in sensible projections if none specified. Note that this is - # done here rather than on the server to allow local modification of the - # schema setting to include commonly used custom attributes for example. - # TODO: Use a proper parser perhaps? - if not expression.startswith('select'): - entity_type = expression.split(' ', 1)[0] - EntityTypeClass = self.types[entity_type] - projections = EntityTypeClass.default_projections - - expression = 'select {0} from {1}'.format( - ', '.join(projections), - expression - ) - - query_result = ftrack_api.query.QueryResult( - self, expression, page_size=page_size - ) - return query_result - - def _query(self, expression): - '''Execute *query* and return (records, metadata). - - Records will be a list of entities retrieved via the query and metadata - a dictionary of accompanying information about the result set. - - ''' - # TODO: Actually support batching several queries together. - # TODO: Should batches have unique ids to match them up later. - batch = [{ - 'action': 'query', - 'expression': expression - }] - - # TODO: When should this execute? How to handle background=True? - results = self._call(batch) - - # Merge entities into local cache and return merged entities. - data = [] - merged = dict() - for entity in results[0]['data']: - data.append(self._merge_recursive(entity, merged)) - - return data, results[0]['metadata'] - - def merge(self, value, merged=None): - '''Merge *value* into session and return merged value. - - *merged* should be a mapping to record merges during run and should be - used to avoid infinite recursion. If not set will default to a - dictionary. - - ''' - if merged is None: - merged = {} - - with self.operation_recording(False): - return self._merge(value, merged) - - def _merge(self, value, merged): - '''Return merged *value*.''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if isinstance(value, ftrack_api.entity.base.Entity): - log_debug and self.logger.debug( - 'Merging entity into session: {0} at {1}' - .format(value, id(value)) - ) - - return self._merge_entity(value, merged=merged) - - elif isinstance(value, ftrack_api.collection.Collection): - log_debug and self.logger.debug( - 'Merging collection into session: {0!r} at {1}' - .format(value, id(value)) - ) - - merged_collection = [] - for entry in value: - merged_collection.append( - self._merge(entry, merged=merged) - ) - - return merged_collection - - elif isinstance(value, ftrack_api.collection.MappedCollectionProxy): - log_debug and self.logger.debug( - 'Merging mapped collection into session: {0!r} at {1}' - .format(value, id(value)) - ) - - merged_collection = [] - for entry in value.collection: - merged_collection.append( - self._merge(entry, merged=merged) - ) - - return merged_collection - - else: - return value - - def _merge_recursive(self, entity, merged=None): - '''Merge *entity* and all its attributes recursivly.''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - attached = self.merge(entity, merged) - - for attribute in entity.attributes: - # Remote attributes. - remote_value = attribute.get_remote_value(entity) - - if isinstance( - remote_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - log_debug and self.logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) - ) - - if isinstance(remote_value, ftrack_api.entity.base.Entity): - self._merge_recursive(remote_value, merged=merged) - - elif isinstance( - remote_value, ftrack_api.collection.Collection - ): - for entry in remote_value: - self._merge_recursive(entry, merged=merged) - - elif isinstance( - remote_value, ftrack_api.collection.MappedCollectionProxy - ): - for entry in remote_value.collection: - self._merge_recursive(entry, merged=merged) - - return attached - - def _merge_entity(self, entity, merged=None): - '''Merge *entity* into session returning merged entity. - - Merge is recursive so any references to other entities will also be - merged. - - *entity* will never be modified in place. Ensure that the returned - merged entity instance is used. - - ''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - with self.auto_populating(False): - entity_key = self.cache_key_maker.key( - ftrack_api.inspection.identity(entity) - ) - - # Check whether this entity has already been processed. - attached_entity = merged.get(entity_key) - if attached_entity is not None: - log_debug and self.logger.debug( - 'Entity already processed for key {0} as {1} at {2}' - .format(entity_key, attached_entity, id(attached_entity)) - ) - - return attached_entity - else: - log_debug and self.logger.debug( - 'Entity not already processed for key {0}.' - .format(entity_key) - ) - - # Check for existing instance of entity in cache. - log_debug and self.logger.debug( - 'Checking for entity in cache with key {0}'.format(entity_key) - ) - try: - attached_entity = self.cache.get(entity_key) - - log_debug and self.logger.debug( - 'Retrieved existing entity from cache: {0} at {1}' - .format(attached_entity, id(attached_entity)) - ) - - except KeyError: - # Construct new minimal instance to store in cache. - attached_entity = self._create( - entity.entity_type, {}, reconstructing=True - ) - - log_debug and self.logger.debug( - 'Entity not present in cache. Constructed new instance: ' - '{0} at {1}'.format(attached_entity, id(attached_entity)) - ) - - # Mark entity as seen to avoid infinite loops. - merged[entity_key] = attached_entity - - changes = attached_entity.merge(entity, merged=merged) - if changes: - self.cache.set(entity_key, attached_entity) - self.logger.debug('Cache updated with merged entity.') - - else: - self.logger.debug( - 'Cache not updated with merged entity as no differences ' - 'detected.' - ) - - return attached_entity - - def populate(self, entities, projections): - '''Populate *entities* with attributes specified by *projections*. - - Any locally set values included in the *projections* will not be - overwritten with the retrieved remote value. If this 'synchronise' - behaviour is required, first clear the relevant values on the entity by - setting them to :attr:`ftrack_api.symbol.NOT_SET`. Deleting the key will - have the same effect:: - - >>> print(user['username']) - martin - >>> del user['username'] - >>> print(user['username']) - Symbol(NOT_SET) - - .. note:: - - Entities that have been created and not yet persisted will be - skipped as they have no remote values to fetch. - - ''' - self.logger.debug(L( - 'Populate {0!r} projections for {1}.', projections, entities - )) - - if not isinstance( - entities, (list, tuple, ftrack_api.query.QueryResult) - ): - entities = [entities] - - # TODO: How to handle a mixed collection of different entity types - # Should probably fail, but need to consider handling hierarchies such - # as User and Group both deriving from Resource. Actually, could just - # proceed and ignore projections that are not present in entity type. - - entities_to_process = [] - - for entity in entities: - if ftrack_api.inspection.state(entity) is ftrack_api.symbol.CREATED: - # Created entities that are not yet persisted have no remote - # values. Don't raise an error here as it is reasonable to - # iterate over an entities properties and see that some of them - # are NOT_SET. - self.logger.debug(L( - 'Skipping newly created entity {0!r} for population as no ' - 'data will exist in the remote for this entity yet.', entity - )) - continue - - entities_to_process.append(entity) - - if entities_to_process: - reference_entity = entities_to_process[0] - entity_type = reference_entity.entity_type - query = 'select {0} from {1}'.format(projections, entity_type) - - primary_key_definition = reference_entity.primary_key_attributes - entity_keys = [ - list(ftrack_api.inspection.primary_key(entity).values()) - for entity in entities_to_process - ] - - if len(primary_key_definition) > 1: - # Composite keys require full OR syntax unfortunately. - conditions = [] - for entity_key in entity_keys: - condition = [] - for key, value in zip(primary_key_definition, entity_key): - condition.append('{0} is "{1}"'.format(key, value)) - - conditions.append('({0})'.format('and '.join(condition))) - - query = '{0} where {1}'.format(query, ' or '.join(conditions)) - - else: - primary_key = primary_key_definition[0] - - if len(entity_keys) > 1: - query = '{0} where {1} in ({2})'.format( - query, primary_key, - ','.join([ - str(entity_key[0]) for entity_key in entity_keys - ]) - ) - else: - query = '{0} where {1} is {2}'.format( - query, primary_key, str(entity_keys[0][0]) - ) - - result = self.query(query) - - # Fetch all results now. Doing so will cause them to populate the - # relevant entities in the cache. - result.all() - - # TODO: Should we check that all requested attributes were - # actually populated? If some weren't would we mark that to avoid - # repeated calls or perhaps raise an error? - - # TODO: Make atomic. - def commit(self): - '''Commit all local changes to the server.''' - batch = [] - - with self.auto_populating(False): - for operation in self.recorded_operations: - - # Convert operation to payload. - if isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ): - # At present, data payload requires duplicating entity - # type in data and also ensuring primary key added. - entity_data = { - '__entity_type__': operation.entity_type, - } - entity_data.update(operation.entity_key) - entity_data.update(operation.entity_data) - - payload = OperationPayload({ - 'action': 'create', - 'entity_type': operation.entity_type, - 'entity_key': list(operation.entity_key.values()), - 'entity_data': entity_data - }) - - elif isinstance( - operation, ftrack_api.operation.UpdateEntityOperation - ): - entity_data = { - # At present, data payload requires duplicating entity - # type. - '__entity_type__': operation.entity_type, - operation.attribute_name: operation.new_value - } - - payload = OperationPayload({ - 'action': 'update', - 'entity_type': operation.entity_type, - 'entity_key': list(operation.entity_key.values()), - 'entity_data': entity_data - }) - - elif isinstance( - operation, ftrack_api.operation.DeleteEntityOperation - ): - payload = OperationPayload({ - 'action': 'delete', - 'entity_type': operation.entity_type, - 'entity_key': list(operation.entity_key.values()) - }) - - else: - raise ValueError( - 'Cannot commit. Unrecognised operation type {0} ' - 'detected.'.format(type(operation)) - ) - - batch.append(payload) - - # Optimise batch. - # TODO: Might be better to perform these on the operations list instead - # so all operation contextual information available. - - # If entity was created and deleted in one batch then remove all - # payloads for that entity. - created = set() - deleted = set() - - for payload in batch: - if payload['action'] == 'create': - created.add( - (payload['entity_type'], str(payload['entity_key'])) - ) - - elif payload['action'] == 'delete': - deleted.add( - (payload['entity_type'], str(payload['entity_key'])) - ) - - created_then_deleted = deleted.intersection(created) - if created_then_deleted: - optimised_batch = [] - for payload in batch: - entity_type = payload.get('entity_type') - entity_key = str(payload.get('entity_key')) - - if (entity_type, entity_key) in created_then_deleted: - continue - - optimised_batch.append(payload) - - batch = optimised_batch - - # Remove early update operations so that only last operation on - # attribute is applied server side. - updates_map = set() - for payload in reversed(batch): - if payload['action'] in ('update', ): - for key, value in list(payload['entity_data'].items()): - if key == '__entity_type__': - continue - - identity = ( - payload['entity_type'], str(payload['entity_key']), key - ) - if identity in updates_map: - del payload['entity_data'][key] - else: - updates_map.add(identity) - - # Remove NOT_SET values from entity_data. - for payload in batch: - entity_data = payload.get('entity_data', {}) - for key, value in list(entity_data.items()): - if value is ftrack_api.symbol.NOT_SET: - del entity_data[key] - - # Remove payloads with redundant entity_data. - optimised_batch = [] - for payload in batch: - entity_data = payload.get('entity_data') - if entity_data is not None: - keys = list(entity_data.keys()) - if not keys or keys == ['__entity_type__']: - continue - - optimised_batch.append(payload) - - batch = optimised_batch - - # Collapse updates that are consecutive into one payload. Also, collapse - # updates that occur immediately after creation into the create payload. - optimised_batch = [] - previous_payload = None - - for payload in batch: - if ( - previous_payload is not None - and payload['action'] == 'update' - and previous_payload['action'] in ('create', 'update') - and previous_payload['entity_type'] == payload['entity_type'] - and previous_payload['entity_key'] == payload['entity_key'] - ): - previous_payload['entity_data'].update(payload['entity_data']) - continue - - else: - optimised_batch.append(payload) - previous_payload = payload - - batch = optimised_batch - - # Process batch. - if batch: - result = self._call(batch) - - # Clear recorded operations. - self.recorded_operations.clear() - - # As optimisation, clear local values which are not primary keys to - # avoid redundant merges when merging references. Note: primary keys - # remain as needed for cache retrieval on new entities. - with self.auto_populating(False): - with self.operation_recording(False): - for entity in list(self._local_cache.values()): - for attribute in entity: - if attribute not in entity.primary_key_attributes: - del entity[attribute] - - # Process results merging into cache relevant data. - for entry in result: - - if entry['action'] in ('create', 'update'): - # Merge returned entities into local cache. - self.merge(entry['data']) - - elif entry['action'] == 'delete': - # TODO: Detach entity - need identity returned? - # TODO: Expunge entity from cache. - pass - # Clear remaining local state, including local values for primary - # keys on entities that were merged. - with self.auto_populating(False): - with self.operation_recording(False): - for entity in list(self._local_cache.values()): - entity.clear() - - def rollback(self): - '''Clear all recorded operations and local state. - - Typically this would be used following a failed :meth:`commit` in order - to revert the session to a known good state. - - Newly created entities not yet persisted will be detached from the - session / purged from cache and no longer contribute, but the actual - objects are not deleted from memory. They should no longer be used and - doing so could cause errors. - - ''' - with self.auto_populating(False): - with self.operation_recording(False): - - # Detach all newly created entities and remove from cache. This - # is done because simply clearing the local values of newly - # created entities would result in entities with no identity as - # primary key was local while not persisted. In addition, it - # makes no sense for failed created entities to exist in session - # or cache. - for operation in self.recorded_operations: - if isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ): - entity_key = str(( - str(operation.entity_type), - list(operation.entity_key.values()) - )) - try: - self.cache.remove(entity_key) - except KeyError: - pass - - # Clear locally stored modifications on remaining entities. - for entity in list(self._local_cache.values()): - entity.clear() - - self.recorded_operations.clear() - - def _fetch_server_information(self): - '''Return server information.''' - result = self._call([{'action': 'query_server_information'}]) - return result[0] - - def _discover_plugins(self, plugin_arguments=None): - '''Find and load plugins in search paths. - - Each discovered module should implement a register function that - accepts this session as first argument. Typically the function should - register appropriate event listeners against the session's event hub. - - def register(session): - session.event_hub.subscribe( - 'topic=ftrack.api.session.construct-entity-type', - construct_entity_type - ) - - *plugin_arguments* should be an optional mapping of keyword arguments - and values to pass to plugin register functions upon discovery. - - ''' - plugin_arguments = plugin_arguments or {} - ftrack_api.plugin.discover( - self._plugin_paths, [self], plugin_arguments - ) - - def _read_schemas_from_cache(self, schema_cache_path): - '''Return schemas and schema hash from *schema_cache_path*. - - *schema_cache_path* should be the path to the file containing the - schemas in JSON format. - - ''' - self.logger.debug(L( - 'Reading schemas from cache {0!r}', schema_cache_path - )) - - if not os.path.exists(schema_cache_path): - self.logger.info(L( - 'Cache file not found at {0!r}.', schema_cache_path - )) - - return [], None - - with open(schema_cache_path, 'r') as schema_file: - schemas = json.load(schema_file) - hash_ = hashlib.md5( - json.dumps(schemas, sort_keys=True).encode('utf-8') - ).hexdigest() - - return schemas, hash_ - - def _write_schemas_to_cache(self, schemas, schema_cache_path): - '''Write *schemas* to *schema_cache_path*. - - *schema_cache_path* should be a path to a file that the schemas can be - written to in JSON format. - - ''' - self.logger.debug(L( - 'Updating schema cache {0!r} with new schemas.', schema_cache_path - )) - - with open(schema_cache_path, 'w') as local_cache_file: - json.dump(schemas, local_cache_file, indent=4) - - def _load_schemas(self, schema_cache_path): - '''Load schemas. - - First try to load schemas from cache at *schema_cache_path*. If the - cache is not available or the cache appears outdated then load schemas - from server and store fresh copy in cache. - - If *schema_cache_path* is set to `False`, always load schemas from - server bypassing cache. - - ''' - local_schema_hash = None - schemas = [] - - if schema_cache_path: - try: - schemas, local_schema_hash = self._read_schemas_from_cache( - schema_cache_path - ) - except (IOError, TypeError, AttributeError, ValueError): - # Catch any known exceptions when trying to read the local - # schema cache to prevent API from being unusable. - self.logger.exception(L( - 'Schema cache could not be loaded from {0!r}', - schema_cache_path - )) - - # Use `dictionary.get` to retrieve hash to support older version of - # ftrack server not returning a schema hash. - server_hash = self._server_information.get( - 'schema_hash', False - ) - if local_schema_hash != server_hash: - self.logger.debug(L( - 'Loading schemas from server due to hash not matching.' - 'Local: {0!r} != Server: {1!r}', local_schema_hash, server_hash - )) - schemas = self._call([{'action': 'query_schemas'}])[0] - - if schema_cache_path: - try: - self._write_schemas_to_cache(schemas, schema_cache_path) - except (IOError, TypeError): - self.logger.exception(L( - 'Failed to update schema cache {0!r}.', - schema_cache_path - )) - - else: - self.logger.debug(L( - 'Using cached schemas from {0!r}', schema_cache_path - )) - - return schemas - - def _build_entity_type_classes(self, schemas): - '''Build default entity type classes.''' - fallback_factory = ftrack_api.entity.factory.StandardFactory() - classes = {} - - for schema in schemas: - results = self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.construct-entity-type', - data=dict( - schema=schema, - schemas=schemas - ) - ), - synchronous=True - ) - - results = [result for result in results if result is not None] - - if not results: - self.logger.debug(L( - 'Using default StandardFactory to construct entity type ' - 'class for "{0}"', schema['id'] - )) - entity_type_class = fallback_factory.create(schema) - - elif len(results) > 1: - raise ValueError( - 'Expected single entity type to represent schema "{0}" but ' - 'received {1} entity types instead.' - .format(schema['id'], len(results)) - ) - - else: - entity_type_class = results[0] - - classes[entity_type_class.entity_type] = entity_type_class - - return classes - - def _configure_locations(self): - '''Configure locations.''' - # First configure builtin locations, by injecting them into local cache. - - # Origin. - location = self.create( - 'Location', - data=dict( - name='ftrack.origin', - id=ftrack_api.symbol.ORIGIN_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.OriginLocationMixin, - name='OriginLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - location.priority = 100 - - # Unmanaged. - location = self.create( - 'Location', - data=dict( - name='ftrack.unmanaged', - id=ftrack_api.symbol.UNMANAGED_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - # location.resource_identifier_transformer = ( - # ftrack_api.resource_identifier_transformer.internal.InternalResourceIdentifierTransformer(session) - # ) - location.priority = 90 - - # Review. - location = self.create( - 'Location', - data=dict( - name='ftrack.review', - id=ftrack_api.symbol.REVIEW_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - location.priority = 110 - - # Server. - location = self.create( - 'Location', - data=dict( - name='ftrack.server', - id=ftrack_api.symbol.SERVER_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.ServerLocationMixin, - name='ServerLocation' - ) - location.accessor = ftrack_api.accessor.server._ServerAccessor( - session=self - ) - location.structure = ftrack_api.structure.entity_id.EntityIdStructure() - location.priority = 150 - - # Master location based on server scenario. - storage_scenario = self.server_information.get('storage_scenario') - - if ( - storage_scenario and - storage_scenario.get('scenario') - ): - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.storage-scenario.activate', - data=dict( - storage_scenario=storage_scenario - ) - ), - synchronous=True - ) - - # Next, allow further configuration of locations via events. - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.configure-location', - data=dict( - session=self - ) - ), - synchronous=True - ) - - def _call(self, data): - '''Make request to server with *data*.''' - url = self._server_url + '/api' - headers = { - 'content-type': 'application/json', - 'accept': 'application/json' - } - data = self.encode(data, entity_attribute_strategy='modified_only') - - self.logger.debug(L('Calling server {0} with {1!r}', url, data)) - - response = self._request.post( - url, - headers=headers, - data=data - ) - - self.logger.debug(L('Call took: {0}', response.elapsed.total_seconds())) - - self.logger.debug(L('Response: {0!r}', response.text)) - try: - result = self.decode(response.text) - - except Exception: - error_message = ( - 'Server reported error in unexpected format. Raw error was: {0}' - .format(response.text) - ) - self.logger.error(error_message) - raise ftrack_api.exception.ServerError(error_message) - - else: - if 'exception' in result: - # Handle exceptions. - error_message = 'Server reported error: {0}({1})'.format( - result['exception'], result['content'] - ) - self.logger.error(error_message) - raise ftrack_api.exception.ServerError(error_message) - - return result - - def encode(self, data, entity_attribute_strategy='set_only'): - '''Return *data* encoded as JSON formatted string. - - *entity_attribute_strategy* specifies how entity attributes should be - handled. The following strategies are available: - - * *all* - Encode all attributes, loading any that are currently NOT_SET. - * *set_only* - Encode only attributes that are currently set without - loading any from the remote. - * *modified_only* - Encode only attributes that have been modified - locally. - * *persisted_only* - Encode only remote (persisted) attribute values. - - ''' - entity_attribute_strategies = ( - 'all', 'set_only', 'modified_only', 'persisted_only' - ) - if entity_attribute_strategy not in entity_attribute_strategies: - raise ValueError( - 'Unsupported entity_attribute_strategy "{0}". Must be one of ' - '{1}'.format( - entity_attribute_strategy, - ', '.join(entity_attribute_strategies) - ) - ) - - return json.dumps( - data, - sort_keys=True, - default=functools.partial( - self._encode, - entity_attribute_strategy=entity_attribute_strategy - ) - ) - - def _encode(self, item, entity_attribute_strategy='set_only'): - '''Return JSON encodable version of *item*. - - *entity_attribute_strategy* specifies how entity attributes should be - handled. See :meth:`Session.encode` for available strategies. - - ''' - if isinstance(item, (arrow.Arrow, datetime.datetime, datetime.date)): - return { - '__type__': 'datetime', - 'value': item.isoformat() - } - - if isinstance(item, OperationPayload): - data = dict(list(item.items())) - if "entity_data" in data: - for key, value in list(data["entity_data"].items()): - if isinstance(value, ftrack_api.entity.base.Entity): - data["entity_data"][key] = self.entity_reference(value) - - return data - - if isinstance(item, ftrack_api.entity.base.Entity): - data = self.entity_reference(item) - - with self.auto_populating(True): - - for attribute in item.attributes: - value = ftrack_api.symbol.NOT_SET - - if entity_attribute_strategy == 'all': - value = attribute.get_value(item) - - elif entity_attribute_strategy == 'set_only': - if attribute.is_set(item): - value = attribute.get_local_value(item) - if value is ftrack_api.symbol.NOT_SET: - value = attribute.get_remote_value(item) - - elif entity_attribute_strategy == 'modified_only': - if attribute.is_modified(item): - value = attribute.get_local_value(item) - - elif entity_attribute_strategy == 'persisted_only': - value = attribute.get_remote_value(item) - - if value is not ftrack_api.symbol.NOT_SET: - if isinstance( - attribute, ftrack_api.attribute.ReferenceAttribute - ): - if isinstance(value, ftrack_api.entity.base.Entity): - value = self.entity_reference(value) - - data[attribute.name] = value - - return data - - if isinstance( - item, ftrack_api.collection.MappedCollectionProxy - ): - # Use proxied collection for serialisation. - item = item.collection - - if isinstance(item, ftrack_api.collection.Collection): - data = [] - for entity in item: - data.append(self.entity_reference(entity)) - - return data - - raise TypeError('{0!r} is not JSON serializable'.format(item)) - - def entity_reference(self, entity): - '''Return entity reference that uniquely identifies *entity*. - - Return a mapping containing the __entity_type__ of the entity along with - the key, value pairs that make up it's primary key. - - ''' - reference = { - '__entity_type__': entity.entity_type - } - with self.auto_populating(False): - reference.update(ftrack_api.inspection.primary_key(entity)) - - return reference - - def _entity_reference(self, entity): - '''Return entity reference that uniquely identifies *entity*. - - Return a mapping containing the __entity_type__ of the entity along with - the key, value pairs that make up it's primary key. - - .. note:: - - This private method is now available as public method - :meth:`entity_reference`. This alias remains for backwards - compatibility, but will be removed in version 2.0. - - ''' - warnings.warn( - ( - "Session._entity_reference is now available as public method " - "Session.entity_reference. The private method will be removed " - "in version 2.0." - ), - PendingDeprecationWarning - ) - return self.entity_reference(entity) - - def decode(self, string): - '''Return decoded JSON *string* as Python object.''' - with self.operation_recording(False): - return json.loads(string, object_hook=self._decode) - - def _decode(self, item): - '''Return *item* transformed into appropriate representation.''' - if isinstance(item, collections.Mapping): - if '__type__' in item: - if item['__type__'] == 'datetime': - item = arrow.get(item['value']) - - elif '__entity_type__' in item: - item = self._create( - item['__entity_type__'], item, reconstructing=True - ) - - return item - - def _get_locations(self, filter_inaccessible=True): - '''Helper to returns locations ordered by priority. - - If *filter_inaccessible* is True then only accessible locations will be - included in result. - - ''' - # Optimise this call. - locations = self.query('Location') - - # Filter. - if filter_inaccessible: - locations = [location for location in locations if location.accessor] - - # Sort by priority. - locations = sorted( - locations, key=lambda location: location.priority - ) - - return locations - - def pick_location(self, component=None): - '''Return suitable location to use. - - If no *component* specified then return highest priority accessible - location. Otherwise, return highest priority accessible location that - *component* is available in. - - Return None if no suitable location could be picked. - - ''' - if component: - return self.pick_locations([component])[0] - - else: - locations = self._get_locations() - if locations: - return locations[0] - else: - return None - - def pick_locations(self, components): - '''Return suitable locations for *components*. - - Return list of locations corresponding to *components* where each - picked location is the highest priority accessible location for that - component. If a component has no location available then its - corresponding entry will be None. - - ''' - candidate_locations = self._get_locations() - availabilities = self.get_component_availabilities( - components, locations=candidate_locations - ) - - locations = [] - for component, availability in zip(components, availabilities): - location = None - - for candidate_location in candidate_locations: - if availability.get(candidate_location['id']) > 0.0: - location = candidate_location - break - - locations.append(location) - - return locations - - def create_component( - self, path, data=None, location='auto' - ): - '''Create a new component from *path* with additional *data* - - .. note:: - - This is a helper method. To create components manually use the - standard :meth:`Session.create` method. - - *path* can be a string representing a filesystem path to the data to - use for the component. The *path* can also be specified as a sequence - string, in which case a sequence component with child components for - each item in the sequence will be created automatically. The accepted - format for a sequence is '{head}{padding}{tail} [{ranges}]'. For - example:: - - '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' - - .. seealso:: - - `Clique documentation `_ - - *data* should be a dictionary of any additional data to construct the - component with (as passed to :meth:`Session.create`). - - If *location* is specified then automatically add component to that - location. The default of 'auto' will automatically pick a suitable - location to add the component to if one is available. To not add to any - location specifiy locations as None. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components registration in the - location. - ''' - if data is None: - data = {} - - if location == 'auto': - # Check if the component name matches one of the ftrackreview - # specific names. Add the component to the ftrack.review location if - # so. This is used to not break backwards compatibility. - if data.get('name') in ( - 'ftrackreview-mp4', 'ftrackreview-webm', 'ftrackreview-image' - ): - location = self.get( - 'Location', ftrack_api.symbol.REVIEW_LOCATION_ID - ) - - else: - location = self.pick_location() - - try: - collection = clique.parse(path) - - except ValueError: - # Assume is a single file. - if 'size' not in data: - data['size'] = self._get_filesystem_size(path) - - data.setdefault('file_type', os.path.splitext(path)[-1]) - - return self._create_component( - 'FileComponent', path, data, location - ) - - else: - # Calculate size of container and members. - member_sizes = {} - container_size = data.get('size') - - if container_size is not None: - if len(collection.indexes) > 0: - member_size = int( - round(old_div(container_size, len(collection.indexes))) - ) - for item in collection: - member_sizes[item] = member_size - - else: - container_size = 0 - for item in collection: - member_sizes[item] = self._get_filesystem_size(item) - container_size += member_sizes[item] - - # Create sequence component - container_path = collection.format('{head}{padding}{tail}') - data.setdefault('padding', collection.padding) - data.setdefault('file_type', os.path.splitext(container_path)[-1]) - data.setdefault('size', container_size) - - container = self._create_component( - 'SequenceComponent', container_path, data, location=None - ) - - # Create member components for sequence. - for member_path in collection: - member_data = { - 'name': collection.match(member_path).group('index'), - 'container': container, - 'size': member_sizes[member_path], - 'file_type': os.path.splitext(member_path)[-1] - } - - component = self._create_component( - 'FileComponent', member_path, member_data, location=None - ) - container['members'].append(component) - - if location: - origin_location = self.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - location.add_component( - container, origin_location, recursive=True - ) - - return container - - def _create_component(self, entity_type, path, data, location): - '''Create and return component. - - See public function :py:func:`createComponent` for argument details. - - ''' - component = self.create(entity_type, data) - - # Add to special origin location so that it is possible to add to other - # locations. - origin_location = self.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - origin_location.add_component(component, path, recursive=False) - - if location: - location.add_component(component, origin_location, recursive=False) - - return component - - def _get_filesystem_size(self, path): - '''Return size from *path*''' - try: - size = os.path.getsize(path) - except OSError: - size = 0 - - return size - - def get_component_availability(self, component, locations=None): - '''Return availability of *component*. - - If *locations* is set then limit result to availability of *component* - in those *locations*. - - Return a dictionary of {location_id:percentage_availability} - - ''' - return self.get_component_availabilities( - [component], locations=locations - )[0] - - def get_component_availabilities(self, components, locations=None): - '''Return availabilities of *components*. - - If *locations* is set then limit result to availabilities of - *components* in those *locations*. - - Return a list of dictionaries of {location_id:percentage_availability}. - The list indexes correspond to those of *components*. - - ''' - availabilities = [] - - if locations is None: - locations = self.query('Location') - - # Separate components into two lists, those that are containers and - # those that are not, so that queries can be optimised. - standard_components = [] - container_components = [] - - for component in components: - if 'members' in list(component.keys()): - container_components.append(component) - else: - standard_components.append(component) - - # Perform queries. - if standard_components: - self.populate( - standard_components, 'component_locations.location_id' - ) - - if container_components: - self.populate( - container_components, - 'members, component_locations.location_id' - ) - - base_availability = {} - for location in locations: - base_availability[location['id']] = 0.0 - - for component in components: - availability = base_availability.copy() - availabilities.append(availability) - - is_container = 'members' in list(component.keys()) - if is_container and len(component['members']): - member_availabilities = self.get_component_availabilities( - component['members'], locations=locations - ) - multiplier = old_div(1.0, len(component['members'])) - for member, member_availability in zip( - component['members'], member_availabilities - ): - for location_id, ratio in list(member_availability.items()): - availability[location_id] += ( - ratio * multiplier - ) - else: - for component_location in component['component_locations']: - location_id = component_location['location_id'] - if location_id in availability: - availability[location_id] = 100.0 - - for location_id, percentage in list(availability.items()): - # Avoid quantization error by rounding percentage and clamping - # to range 0-100. - adjusted_percentage = round(percentage, 9) - adjusted_percentage = max(0.0, min(adjusted_percentage, 100.0)) - availability[location_id] = adjusted_percentage - - return availabilities - - def delayed_job(self, job_type): - '''Execute a delayed job on the server, a `ftrack.entity.job.Job` is returned. - - *job_type* should be one of the allowed job types. There is currently - only one remote job type "SYNC_USERS_LDAP". - ''' - if job_type not in (ftrack_api.symbol.JOB_SYNC_USERS_LDAP, ): - raise ValueError( - u'Invalid Job type: {0}.'.format(job_type) - ) - - operation = { - 'action': 'delayed_job', - 'job_type': job_type.name - } - - try: - result = self._call( - [operation] - )[0] - - except ftrack_api.exception.ServerError as error: - raise - - return result['data'] - - def get_widget_url(self, name, entity=None, theme=None): - '''Return an authenticated URL for widget with *name* and given options. - - The returned URL will be authenticated using a token which will expire - after 6 minutes. - - *name* should be the name of the widget to return and should be one of - 'info', 'tasks' or 'tasks_browser'. - - Certain widgets require an entity to be specified. If so, specify it by - setting *entity* to a valid entity instance. - - *theme* sets the theme of the widget and can be either 'light' or 'dark' - (defaulting to 'dark' if an invalid option given). - - ''' - operation = { - 'action': 'get_widget_url', - 'name': name, - 'theme': theme - } - if entity: - operation['entity_type'] = entity.entity_type - operation['entity_key'] = ( - list(ftrack_api.inspection.primary_key(entity).values()) - ) - - try: - result = self._call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'get_widget_url\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support "get_widget_url", ' - 'please update server and try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - else: - return result[0]['widget_url'] - - def encode_media(self, media, version_id=None, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. - - *media* can be a path to a file or a FileComponent in the ftrack.server - location. - - The job will encode *media* based on the file type and job data contains - information about encoding in the following format:: - - { - 'output': [{ - 'format': 'video/mp4', - 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' - }, { - 'format': 'image/jpeg', - 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' - }], - 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', - 'keep_original': True - } - - The output components are associated with the job via the job_components - relation. - - An image component will always be generated if possible that can be used - as a thumbnail. - - If *media* is a file path, a new source component will be created and - added to the ftrack server location and a call to :meth:`commit` will be - issued. If *media* is a FileComponent, it will be assumed to be in - available in the ftrack.server location. - - If *version_id* is specified, the new components will automatically be - associated with the AssetVersion. Otherwise, the components will not - be associated to a version even if the supplied *media* belongs to one. - A server version of 3.3.32 or higher is required for the version_id - argument to function properly. - - If *keep_original* is not set, the original media will be kept if it - is a FileComponent, and deleted if it is a file path. You can specify - True or False to change this behavior. - ''' - if isinstance(media, basestring): - # Media is a path to a file. - server_location = self.get( - 'Location', ftrack_api.symbol.SERVER_LOCATION_ID - ) - if keep_original == 'auto': - keep_original = False - - component_data = None - if keep_original: - component_data = dict(version_id=version_id) - - component = self.create_component( - path=media, - data=component_data, - location=server_location - ) - - # Auto commit to ensure component exists when sent to server. - self.commit() - - elif ( - hasattr(media, 'entity_type') and - media.entity_type in ('FileComponent',) - ): - # Existing file component. - component = media - if keep_original == 'auto': - keep_original = True - - else: - raise ValueError( - 'Unable to encode media of type: {0}'.format(type(media)) - ) - - operation = { - 'action': 'encode_media', - 'component_id': component['id'], - 'version_id': version_id, - 'keep_original': keep_original - } - - try: - result = self._call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'encode_media\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support "encode_media", ' - 'please update server and try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - return self.get('Job', result[0]['job_id']) - - def get_upload_metadata( - self, component_id, file_name, file_size, checksum=None - ): - '''Return URL and headers used to upload data for *component_id*. - - *file_name* and *file_size* should match the components details. - - The returned URL should be requested using HTTP PUT with the specified - headers. - - The *checksum* is used as the Content-MD5 header and should contain - the base64-encoded 128-bit MD5 digest of the message (without the - headers) according to RFC 1864. This can be used as a message integrity - check to verify that the data is the same data that was originally sent. - ''' - operation = { - 'action': 'get_upload_metadata', - 'component_id': component_id, - 'file_name': file_name, - 'file_size': file_size, - 'checksum': checksum - } - - try: - result = self._call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'get_upload_metadata\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"get_upload_metadata", please update server and try ' - 'again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - return result[0] - - def send_user_invite(self, user): - '''Send a invitation to the provided *user*. - - *user* is a User instance - - ''' - - self.send_user_invites( - [user] - ) - - def send_user_invites(self, users): - '''Send a invitation to the provided *user*. - - *users* is a list of User instances - - ''' - - operations = [] - - for user in users: - operations.append( - { - 'action': 'send_user_invite', - 'user_id': user['id'] - } - ) - - try: - self._call(operations) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'send_user_invite\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"send_user_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - def send_review_session_invite(self, invitee): - '''Send an invite to a review session to *invitee*. - - *invitee* is a instance of ReviewSessionInvitee. - - .. note:: - - The *invitee* must be committed. - - ''' - self.send_review_session_invites([invitee]) - - def send_review_session_invites(self, invitees): - '''Send an invite to a review session to a list of *invitees*. - - *invitee* is a list of ReviewSessionInvitee objects. - - .. note:: - - All *invitees* must be committed. - - ''' - operations = [] - - for invitee in invitees: - operations.append( - { - 'action': 'send_review_session_invite', - 'review_session_invitee_id': invitee['id'] - } - ) - - try: - self._call(operations) - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'send_review_session_invite\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"send_review_session_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - -class AutoPopulatingContext(object): - '''Context manager for temporary change of session auto_populate value.''' - - def __init__(self, session, auto_populate): - '''Initialise context.''' - super(AutoPopulatingContext, self).__init__() - self._session = session - self._auto_populate = auto_populate - self._current_auto_populate = None - - def __enter__(self): - '''Enter context switching to desired auto populate setting.''' - self._current_auto_populate = self._session.auto_populate - self._session.auto_populate = self._auto_populate - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context resetting auto populate to original setting.''' - self._session.auto_populate = self._current_auto_populate - - -class OperationRecordingContext(object): - '''Context manager for temporary change of session record_operations.''' - - def __init__(self, session, record_operations): - '''Initialise context.''' - super(OperationRecordingContext, self).__init__() - self._session = session - self._record_operations = record_operations - self._current_record_operations = None - - def __enter__(self): - '''Enter context.''' - self._current_record_operations = self._session.record_operations - self._session.record_operations = self._record_operations - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context.''' - self._session.record_operations = self._current_record_operations - - -class OperationPayload(collections.MutableMapping): - '''Represent operation payload.''' - - def __init__(self, *args, **kwargs): - '''Initialise payload.''' - super(OperationPayload, self).__init__() - self._data = dict() - self.update(dict(*args, **kwargs)) - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) - - def __getitem__(self, key): - '''Return value for *key*.''' - return self._data[key] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - self._data[key] = value - - def __delitem__(self, key): - '''Remove *key*.''' - del self._data[key] - - def __iter__(self): - '''Iterate over all keys.''' - return iter(self._data) - - def __len__(self): - '''Return count of keys.''' - return len(self._data) diff --git a/pype/vendor/ftrack_api/structure/__init__.py b/pype/vendor/ftrack_api/structure/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/pype/vendor/ftrack_api/structure/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/pype/vendor/ftrack_api/structure/base.py b/pype/vendor/ftrack_api/structure/base.py deleted file mode 100644 index de3335f9f4..0000000000 --- a/pype/vendor/ftrack_api/structure/base.py +++ /dev/null @@ -1,38 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from builtins import object -from abc import ABCMeta, abstractmethod -from future.utils import with_metaclass - - -class Structure(with_metaclass(ABCMeta, object)): - '''Structure plugin interface. - - A structure plugin should compute appropriate paths for data. - - ''' - - def __init__(self, prefix=''): - '''Initialise structure.''' - self.prefix = prefix - self.path_separator = '/' - super(Structure, self).__init__() - - @abstractmethod - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - - def _get_sequence_expression(self, sequence): - '''Return a sequence expression for *sequence* component.''' - padding = sequence['padding'] - if padding: - expression = '%0{0}d'.format(padding) - else: - expression = '%d' - - return expression diff --git a/pype/vendor/ftrack_api/structure/entity_id.py b/pype/vendor/ftrack_api/structure/entity_id.py deleted file mode 100644 index ae466bf6d9..0000000000 --- a/pype/vendor/ftrack_api/structure/entity_id.py +++ /dev/null @@ -1,12 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.structure.base - - -class EntityIdStructure(ftrack_api.structure.base.Structure): - '''Entity id pass-through structure.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a *resourceIdentifier* for supplied *entity*.''' - return entity['id'] diff --git a/pype/vendor/ftrack_api/structure/id.py b/pype/vendor/ftrack_api/structure/id.py deleted file mode 100644 index acc3e21b02..0000000000 --- a/pype/vendor/ftrack_api/structure/id.py +++ /dev/null @@ -1,91 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os - -import ftrack_api.symbol -import ftrack_api.structure.base - - -class IdStructure(ftrack_api.structure.base.Structure): - '''Id based structure supporting Components only. - - A components unique id will be used to form a path to store the data at. - To avoid millions of entries in one directory each id is chunked into four - prefix directories with the remainder used to name the file:: - - /prefix/1/2/3/4/56789 - - If the component has a defined filetype it will be added to the path:: - - /prefix/1/2/3/4/56789.exr - - Components that are children of container components will be placed inside - the id structure of their parent:: - - /prefix/1/2/3/4/56789/355827648d.exr - /prefix/1/2/3/4/56789/ajf24215b5.exr - - However, sequence children will be named using their label as an index and - a common prefix of 'file.':: - - /prefix/1/2/3/4/56789/file.0001.exr - /prefix/1/2/3/4/56789/file.0002.exr - - ''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - if entity.entity_type in ('FileComponent',): - # When in a container, place the file inside a directory named - # after the container. - container = entity['container'] - if container and container is not ftrack_api.symbol.NOT_SET: - path = self.get_resource_identifier(container) - - if container.entity_type in ('SequenceComponent',): - # Label doubles as index for now. - name = 'file.{0}{1}'.format( - entity['name'], entity['file_type'] - ) - parts = [os.path.dirname(path), name] - - else: - # Just place uniquely identified file into directory - name = entity['id'] + entity['file_type'] - parts = [path, name] - - else: - name = entity['id'][4:] + entity['file_type'] - parts = ([self.prefix] + list(entity['id'][:4]) + [name]) - - elif entity.entity_type in ('SequenceComponent',): - name = 'file' - - # Add a sequence identifier. - sequence_expression = self._get_sequence_expression(entity) - name += '.{0}'.format(sequence_expression) - - if ( - entity['file_type'] and - entity['file_type'] is not ftrack_api.symbol.NOT_SET - ): - name += entity['file_type'] - - parts = ([self.prefix] + list(entity['id'][:4]) - + [entity['id'][4:]] + [name]) - - elif entity.entity_type in ('ContainerComponent',): - # Just an id directory - parts = ([self.prefix] + - list(entity['id'][:4]) + [entity['id'][4:]]) - - else: - raise NotImplementedError('Cannot generate path for unsupported ' - 'entity {0}'.format(entity)) - - return self.path_separator.join(parts).strip('/') diff --git a/pype/vendor/ftrack_api/structure/origin.py b/pype/vendor/ftrack_api/structure/origin.py deleted file mode 100644 index 0d4d3a57f5..0000000000 --- a/pype/vendor/ftrack_api/structure/origin.py +++ /dev/null @@ -1,28 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from .base import Structure - - -class OriginStructure(Structure): - '''Origin structure that passes through existing resource identifier.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* should be a mapping that includes at least a - 'source_resource_identifier' key that refers to the resource identifier - to pass through. - - ''' - if context is None: - context = {} - - resource_identifier = context.get('source_resource_identifier') - if resource_identifier is None: - raise ValueError( - 'Could not generate resource identifier as no source resource ' - 'identifier found in passed context.' - ) - - return resource_identifier diff --git a/pype/vendor/ftrack_api/structure/standard.py b/pype/vendor/ftrack_api/structure/standard.py deleted file mode 100644 index f1833f3f16..0000000000 --- a/pype/vendor/ftrack_api/structure/standard.py +++ /dev/null @@ -1,215 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -from builtins import str -import os -import re -import unicodedata - -import ftrack_api.symbol -import ftrack_api.structure.base - - -class StandardStructure(ftrack_api.structure.base.Structure): - '''Project hierarchy based structure that only supports Components. - - The resource identifier is generated from the project code, the name - of objects in the project structure, asset name and version number:: - - my_project/folder_a/folder_b/asset_name/v003 - - If the component is a `FileComponent` then the name of the component and the - file type are used as filename in the resource_identifier:: - - my_project/folder_a/folder_b/asset_name/v003/foo.jpg - - If the component is a `SequenceComponent` then a sequence expression, - `%04d`, is used. E.g. a component with the name `foo` yields:: - - my_project/folder_a/folder_b/asset_name/v003/foo.%04d.jpg - - For the member components their index in the sequence is used:: - - my_project/folder_a/folder_b/asset_name/v003/foo.0042.jpg - - The name of the component is added to the resource identifier if the - component is a `ContainerComponent`. E.g. a container component with the - name `bar` yields:: - - my_project/folder_a/folder_b/asset_name/v003/bar - - For a member of that container the file name is based on the component name - and file type:: - - my_project/folder_a/folder_b/asset_name/v003/bar/baz.pdf - - ''' - - def __init__( - self, project_versions_prefix=None, illegal_character_substitute='_' - ): - '''Initialise structure. - - If *project_versions_prefix* is defined, insert after the project code - for versions published directly under the project:: - - my_project//v001/foo.jpg - - Replace illegal characters with *illegal_character_substitute* if - defined. - - .. note:: - - Nested component containers/sequences are not supported. - - ''' - super(StandardStructure, self).__init__() - self.project_versions_prefix = project_versions_prefix - self.illegal_character_substitute = illegal_character_substitute - - def _get_parts(self, entity): - '''Return resource identifier parts from *entity*.''' - session = entity.session - - version = entity['version'] - - if version is ftrack_api.symbol.NOT_SET and entity['version_id']: - version = session.get('AssetVersion', entity['version_id']) - - error_message = ( - 'Component {0!r} must be attached to a committed ' - 'version and a committed asset with a parent context.'.format( - entity - ) - ) - - if ( - version is ftrack_api.symbol.NOT_SET or - version in session.created - ): - raise ftrack_api.exception.StructureError(error_message) - - link = version['link'] - - if not link: - raise ftrack_api.exception.StructureError(error_message) - - structure_names = [ - item['name'] - for item in link[1:-1] - ] - - project_id = link[0]['id'] - project = session.get('Project', project_id) - asset = version['asset'] - - version_number = self._format_version(version['version']) - - parts = [] - parts.append(project['name']) - - if structure_names: - parts.extend(structure_names) - elif self.project_versions_prefix: - # Add *project_versions_prefix* if configured and the version is - # published directly under the project. - parts.append(self.project_versions_prefix) - - parts.append(asset['name']) - parts.append(version_number) - - return [self.sanitise_for_filesystem(part) for part in parts] - - def _format_version(self, number): - '''Return a formatted string representing version *number*.''' - return 'v{0:03d}'.format(number) - - def sanitise_for_filesystem(self, value): - '''Return *value* with illegal filesystem characters replaced. - - An illegal character is one that is not typically valid for filesystem - usage, such as non ascii characters, or can be awkward to use in a - filesystem, such as spaces. Replace these characters with - the character specified by *illegal_character_substitute* on - initialisation. If no character was specified as substitute then return - *value* unmodified. - - ''' - if self.illegal_character_substitute is None: - return value - - value = unicodedata.normalize('NFKD', str(value)).encode('ascii', 'ignore') - value = re.sub('[^\w\.-]', self.illegal_character_substitute, value.decode('utf-8')) - return str(value.strip().lower()) - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information, but - is unused in this implementation. - - - Raise a :py:exc:`ftrack_api.exeption.StructureError` if *entity* is not - attached to a committed version and a committed asset with a parent - context. - - ''' - if entity.entity_type in ('FileComponent',): - container = entity['container'] - - if container: - # Get resource identifier for container. - container_path = self.get_resource_identifier(container) - - if container.entity_type in ('SequenceComponent',): - # Strip the sequence component expression from the parent - # container and back the correct filename, i.e. - # /sequence/component/sequence_component_name.0012.exr. - name = '{0}.{1}{2}'.format( - container['name'], entity['name'], entity['file_type'] - ) - parts = [ - os.path.dirname(container_path), - self.sanitise_for_filesystem(name) - ] - - else: - # Container is not a sequence component so add it as a - # normal component inside the container. - name = entity['name'] + entity['file_type'] - parts = [ - container_path, self.sanitise_for_filesystem(name) - ] - - else: - # File component does not have a container, construct name from - # component name and file type. - parts = self._get_parts(entity) - name = entity['name'] + entity['file_type'] - parts.append(self.sanitise_for_filesystem(name)) - - elif entity.entity_type in ('SequenceComponent',): - # Create sequence expression for the sequence component and add it - # to the parts. - parts = self._get_parts(entity) - sequence_expression = self._get_sequence_expression(entity) - parts.append( - '{0}.{1}{2}'.format( - self.sanitise_for_filesystem(entity['name']), - sequence_expression, - self.sanitise_for_filesystem(entity['file_type']) - ) - ) - - elif entity.entity_type in ('ContainerComponent',): - # Add the name of the container to the resource identifier parts. - parts = self._get_parts(entity) - parts.append(self.sanitise_for_filesystem(entity['name'])) - - else: - raise NotImplementedError( - 'Cannot generate resource identifier for unsupported ' - 'entity {0!r}'.format(entity) - ) - - return self.path_separator.join(parts) diff --git a/pype/vendor/ftrack_api/symbol.py b/pype/vendor/ftrack_api/symbol.py deleted file mode 100644 index acc074f08e..0000000000 --- a/pype/vendor/ftrack_api/symbol.py +++ /dev/null @@ -1,76 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - - -from builtins import object -class Symbol(object): - '''A constant symbol.''' - - def __init__(self, name, value=True): - '''Initialise symbol with unique *name* and *value*. - - *value* is used for nonzero testing. - - ''' - self.name = name - self.value = value - - def __str__(self): - '''Return string representation.''' - return self.name - - def __repr__(self): - '''Return representation.''' - return '{0}({1})'.format(self.__class__.__name__, self.name) - - def __bool__(self): - '''Return whether symbol represents non-zero value.''' - return bool(self.value) - - def __copy__(self): - '''Return shallow copy. - - Overridden to always return same instance. - - ''' - return self - - -#: Symbol representing that no value has been set or loaded. -NOT_SET = Symbol('NOT_SET', False) - -#: Symbol representing created state. -CREATED = Symbol('CREATED') - -#: Symbol representing modified state. -MODIFIED = Symbol('MODIFIED') - -#: Symbol representing deleted state. -DELETED = Symbol('DELETED') - -#: Topic published when component added to a location. -COMPONENT_ADDED_TO_LOCATION_TOPIC = 'ftrack.location.component-added' - -#: Topic published when component removed from a location. -COMPONENT_REMOVED_FROM_LOCATION_TOPIC = 'ftrack.location.component-removed' - -#: Identifier of builtin origin location. -ORIGIN_LOCATION_ID = 'ce9b348f-8809-11e3-821c-20c9d081909b' - -#: Identifier of builtin unmanaged location. -UNMANAGED_LOCATION_ID = 'cb268ecc-8809-11e3-a7e2-20c9d081909b' - -#: Identifier of builtin review location. -REVIEW_LOCATION_ID = 'cd41be70-8809-11e3-b98a-20c9d081909b' - -#: Identifier of builtin connect location. -CONNECT_LOCATION_ID = '07b82a97-8cf9-11e3-9383-20c9d081909b' - -#: Identifier of builtin server location. -SERVER_LOCATION_ID = '3a372bde-05bc-11e4-8908-20c9d081909b' - -#: Chunk size used when working with data. -CHUNK_SIZE = 8192 - -#: Symbol representing syncing users with ldap -JOB_SYNC_USERS_LDAP = Symbol('SYNC_USERS_LDAP') diff --git a/pype/vendor/future/__init__.py b/pype/vendor/future/__init__.py deleted file mode 100644 index 8139aa33a9..0000000000 --- a/pype/vendor/future/__init__.py +++ /dev/null @@ -1,93 +0,0 @@ -""" -future: Easy, safe support for Python 2/3 compatibility -======================================================= - -``future`` is the missing compatibility layer between Python 2 and Python -3. It allows you to use a single, clean Python 3.x-compatible codebase to -support both Python 2 and Python 3 with minimal overhead. - -It is designed to be used as follows:: - - from __future__ import (absolute_import, division, - print_function, unicode_literals) - from builtins import ( - bytes, dict, int, list, object, range, str, - ascii, chr, hex, input, next, oct, open, - pow, round, super, - filter, map, zip) - -followed by predominantly standard, idiomatic Python 3 code that then runs -similarly on Python 2.6/2.7 and Python 3.3+. - -The imports have no effect on Python 3. On Python 2, they shadow the -corresponding builtins, which normally have different semantics on Python 3 -versus 2, to provide their Python 3 semantics. - - -Standard library reorganization -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``future`` supports the standard library reorganization (PEP 3108) through the -following Py3 interfaces: - - >>> # Top-level packages with Py3 names provided on Py2: - >>> import html.parser - >>> import queue - >>> import tkinter.dialog - >>> import xmlrpc.client - >>> # etc. - - >>> # Aliases provided for extensions to existing Py2 module names: - >>> from future.standard_library import install_aliases - >>> install_aliases() - - >>> from collections import Counter, OrderedDict # backported to Py2.6 - >>> from collections import UserDict, UserList, UserString - >>> import urllib.request - >>> from itertools import filterfalse, zip_longest - >>> from subprocess import getoutput, getstatusoutput - - -Automatic conversion --------------------- - -An included script called `futurize -`_ aids in converting -code (from either Python 2 or Python 3) to code compatible with both -platforms. It is similar to ``python-modernize`` but goes further in -providing Python 3 compatibility through the use of the backported types -and builtin functions in ``future``. - - -Documentation -------------- - -See: http://python-future.org - - -Credits -------- - -:Author: Ed Schofield -:Sponsor: Python Charmers Pty Ltd, Australia, and Python Charmers Pte - Ltd, Singapore. http://pythoncharmers.com -:Others: See docs/credits.rst or http://python-future.org/credits.html - - -Licensing ---------- -Copyright 2013-2016 Python Charmers Pty Ltd, Australia. -The software is distributed under an MIT licence. See LICENSE.txt. - -""" - -__title__ = 'future' -__author__ = 'Ed Schofield' -__license__ = 'MIT' -__copyright__ = 'Copyright 2013-2016 Python Charmers Pty Ltd' -__ver_major__ = 0 -__ver_minor__ = 16 -__ver_patch__ = 0 -__ver_sub__ = '' -__version__ = "%d.%d.%d%s" % (__ver_major__, __ver_minor__, - __ver_patch__, __ver_sub__) diff --git a/pype/vendor/future/backports/__init__.py b/pype/vendor/future/backports/__init__.py deleted file mode 100644 index 68291141c9..0000000000 --- a/pype/vendor/future/backports/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -""" -future.backports package -""" - -from __future__ import absolute_import - -import sys - -__future_module__ = True -from future.standard_library import import_top_level_modules - - -if sys.version_info[0] == 3: - import_top_level_modules() - - -from .misc import (ceil, - OrderedDict, - Counter, - ChainMap, - check_output, - count, - recursive_repr, - _count_elements, - cmp_to_key - ) diff --git a/pype/vendor/future/backports/_markupbase.py b/pype/vendor/future/backports/_markupbase.py deleted file mode 100644 index d51bfc7ef1..0000000000 --- a/pype/vendor/future/backports/_markupbase.py +++ /dev/null @@ -1,422 +0,0 @@ -"""Shared support for scanning document type declarations in HTML and XHTML. - -Backported for python-future from Python 3.3. Reason: ParserBase is an -old-style class in the Python 2.7 source of markupbase.py, which I suspect -might be the cause of sporadic unit-test failures on travis-ci.org with -test_htmlparser.py. The test failures look like this: - - ====================================================================== - -ERROR: test_attr_entity_replacement (future.tests.test_htmlparser.AttributesStrictTestCase) - ----------------------------------------------------------------------- - -Traceback (most recent call last): - File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 661, in test_attr_entity_replacement - [("starttag", "a", [("b", "&><\"'")])]) - File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 93, in _run_check - collector = self.get_collector() - File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 617, in get_collector - return EventCollector(strict=True) - File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 27, in __init__ - html.parser.HTMLParser.__init__(self, *args, **kw) - File "/home/travis/build/edschofield/python-future/future/backports/html/parser.py", line 135, in __init__ - self.reset() - File "/home/travis/build/edschofield/python-future/future/backports/html/parser.py", line 143, in reset - _markupbase.ParserBase.reset(self) - -TypeError: unbound method reset() must be called with ParserBase instance as first argument (got EventCollector instance instead) - -This module is used as a foundation for the html.parser module. It has no -documented public API and should not be used directly. - -""" - -import re - -_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match -_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match -_commentclose = re.compile(r'--\s*>') -_markedsectionclose = re.compile(r']\s*]\s*>') - -# An analysis of the MS-Word extensions is available at -# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf - -_msmarkedsectionclose = re.compile(r']\s*>') - -del re - - -class ParserBase(object): - """Parser base class which provides some common support methods used - by the SGML/HTML and XHTML parsers.""" - - def __init__(self): - if self.__class__ is ParserBase: - raise RuntimeError( - "_markupbase.ParserBase must be subclassed") - - def error(self, message): - raise NotImplementedError( - "subclasses of ParserBase must override error()") - - def reset(self): - self.lineno = 1 - self.offset = 0 - - def getpos(self): - """Return current line number and offset.""" - return self.lineno, self.offset - - # Internal -- update line number and offset. This should be - # called for each piece of data exactly once, in order -- in other - # words the concatenation of all the input strings to this - # function should be exactly the entire input. - def updatepos(self, i, j): - if i >= j: - return j - rawdata = self.rawdata - nlines = rawdata.count("\n", i, j) - if nlines: - self.lineno = self.lineno + nlines - pos = rawdata.rindex("\n", i, j) # Should not fail - self.offset = j-(pos+1) - else: - self.offset = self.offset + j-i - return j - - _decl_otherchars = '' - - # Internal -- parse declaration (for use by subclasses). - def parse_declaration(self, i): - # This is some sort of declaration; in "HTML as - # deployed," this should only be the document type - # declaration (""). - # ISO 8879:1986, however, has more complex - # declaration syntax for elements in , including: - # --comment-- - # [marked section] - # name in the following list: ENTITY, DOCTYPE, ELEMENT, - # ATTLIST, NOTATION, SHORTREF, USEMAP, - # LINKTYPE, LINK, IDLINK, USELINK, SYSTEM - rawdata = self.rawdata - j = i + 2 - assert rawdata[i:j] == "": - # the empty comment - return j + 1 - if rawdata[j:j+1] in ("-", ""): - # Start of comment followed by buffer boundary, - # or just a buffer boundary. - return -1 - # A simple, practical version could look like: ((name|stringlit) S*) + '>' - n = len(rawdata) - if rawdata[j:j+2] == '--': #comment - # Locate --.*-- as the body of the comment - return self.parse_comment(i) - elif rawdata[j] == '[': #marked section - # Locate [statusWord [...arbitrary SGML...]] as the body of the marked section - # Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA - # Note that this is extended by Microsoft Office "Save as Web" function - # to include [if...] and [endif]. - return self.parse_marked_section(i) - else: #all other declaration elements - decltype, j = self._scan_name(j, i) - if j < 0: - return j - if decltype == "doctype": - self._decl_otherchars = '' - while j < n: - c = rawdata[j] - if c == ">": - # end of declaration syntax - data = rawdata[i+2:j] - if decltype == "doctype": - self.handle_decl(data) - else: - # According to the HTML5 specs sections "8.2.4.44 Bogus - # comment state" and "8.2.4.45 Markup declaration open - # state", a comment token should be emitted. - # Calling unknown_decl provides more flexibility though. - self.unknown_decl(data) - return j + 1 - if c in "\"'": - m = _declstringlit_match(rawdata, j) - if not m: - return -1 # incomplete - j = m.end() - elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ": - name, j = self._scan_name(j, i) - elif c in self._decl_otherchars: - j = j + 1 - elif c == "[": - # this could be handled in a separate doctype parser - if decltype == "doctype": - j = self._parse_doctype_subset(j + 1, i) - elif decltype in set(["attlist", "linktype", "link", "element"]): - # must tolerate []'d groups in a content model in an element declaration - # also in data attribute specifications of attlist declaration - # also link type declaration subsets in linktype declarations - # also link attribute specification lists in link declarations - self.error("unsupported '[' char in %s declaration" % decltype) - else: - self.error("unexpected '[' char in declaration") - else: - self.error( - "unexpected %r char in declaration" % rawdata[j]) - if j < 0: - return j - return -1 # incomplete - - # Internal -- parse a marked section - # Override this to handle MS-word extension syntax content - def parse_marked_section(self, i, report=1): - rawdata= self.rawdata - assert rawdata[i:i+3] == ' ending - match= _markedsectionclose.search(rawdata, i+3) - elif sectName in set(["if", "else", "endif"]): - # look for MS Office ]> ending - match= _msmarkedsectionclose.search(rawdata, i+3) - else: - self.error('unknown status keyword %r in marked section' % rawdata[i+3:j]) - if not match: - return -1 - if report: - j = match.start(0) - self.unknown_decl(rawdata[i+3: j]) - return match.end(0) - - # Internal -- parse comment, return length or -1 if not terminated - def parse_comment(self, i, report=1): - rawdata = self.rawdata - if rawdata[i:i+4] != ' delimiter transport-padding - # --> CRLF body-part - for body_part in msgtexts: - # delimiter transport-padding CRLF - self.write(self._NL + '--' + boundary + self._NL) - # body-part - self._fp.write(body_part) - # close-delimiter transport-padding - self.write(self._NL + '--' + boundary + '--') - if msg.epilogue is not None: - self.write(self._NL) - if self._mangle_from_: - epilogue = fcre.sub('>From ', msg.epilogue) - else: - epilogue = msg.epilogue - self._write_lines(epilogue) - - def _handle_multipart_signed(self, msg): - # The contents of signed parts has to stay unmodified in order to keep - # the signature intact per RFC1847 2.1, so we disable header wrapping. - # RDM: This isn't enough to completely preserve the part, but it helps. - p = self.policy - self.policy = p.clone(max_line_length=0) - try: - self._handle_multipart(msg) - finally: - self.policy = p - - def _handle_message_delivery_status(self, msg): - # We can't just write the headers directly to self's file object - # because this will leave an extra newline between the last header - # block and the boundary. Sigh. - blocks = [] - for part in msg.get_payload(): - s = self._new_buffer() - g = self.clone(s) - g.flatten(part, unixfrom=False, linesep=self._NL) - text = s.getvalue() - lines = text.split(self._encoded_NL) - # Strip off the unnecessary trailing empty line - if lines and lines[-1] == self._encoded_EMPTY: - blocks.append(self._encoded_NL.join(lines[:-1])) - else: - blocks.append(text) - # Now join all the blocks with an empty line. This has the lovely - # effect of separating each block with an empty line, but not adding - # an extra one after the last one. - self._fp.write(self._encoded_NL.join(blocks)) - - def _handle_message(self, msg): - s = self._new_buffer() - g = self.clone(s) - # The payload of a message/rfc822 part should be a multipart sequence - # of length 1. The zeroth element of the list should be the Message - # object for the subpart. Extract that object, stringify it, and - # write it out. - # Except, it turns out, when it's a string instead, which happens when - # and only when HeaderParser is used on a message of mime type - # message/rfc822. Such messages are generated by, for example, - # Groupwise when forwarding unadorned messages. (Issue 7970.) So - # in that case we just emit the string body. - payload = msg._payload - if isinstance(payload, list): - g.flatten(msg.get_payload(0), unixfrom=False, linesep=self._NL) - payload = s.getvalue() - else: - payload = self._encode(payload) - self._fp.write(payload) - - # This used to be a module level function; we use a classmethod for this - # and _compile_re so we can continue to provide the module level function - # for backward compatibility by doing - # _make_boudary = Generator._make_boundary - # at the end of the module. It *is* internal, so we could drop that... - @classmethod - def _make_boundary(cls, text=None): - # Craft a random boundary. If text is given, ensure that the chosen - # boundary doesn't appear in the text. - token = random.randrange(sys.maxsize) - boundary = ('=' * 15) + (_fmt % token) + '==' - if text is None: - return boundary - b = boundary - counter = 0 - while True: - cre = cls._compile_re('^--' + re.escape(b) + '(--)?$', re.MULTILINE) - if not cre.search(text): - break - b = boundary + '.' + str(counter) - counter += 1 - return b - - @classmethod - def _compile_re(cls, s, flags): - return re.compile(s, flags) - -class BytesGenerator(Generator): - """Generates a bytes version of a Message object tree. - - Functionally identical to the base Generator except that the output is - bytes and not string. When surrogates were used in the input to encode - bytes, these are decoded back to bytes for output. If the policy has - cte_type set to 7bit, then the message is transformed such that the - non-ASCII bytes are properly content transfer encoded, using the charset - unknown-8bit. - - The outfp object must accept bytes in its write method. - """ - - # Bytes versions of this constant for use in manipulating data from - # the BytesIO buffer. - _encoded_EMPTY = b'' - - def write(self, s): - self._fp.write(str(s).encode('ascii', 'surrogateescape')) - - def _new_buffer(self): - return BytesIO() - - def _encode(self, s): - return s.encode('ascii') - - def _write_headers(self, msg): - # This is almost the same as the string version, except for handling - # strings with 8bit bytes. - for h, v in msg.raw_items(): - self._fp.write(self.policy.fold_binary(h, v)) - # A blank line always separates headers from body - self.write(self._NL) - - def _handle_text(self, msg): - # If the string has surrogates the original source was bytes, so - # just write it back out. - if msg._payload is None: - return - if _has_surrogates(msg._payload) and not self.policy.cte_type=='7bit': - if self._mangle_from_: - msg._payload = fcre.sub(">From ", msg._payload) - self._write_lines(msg._payload) - else: - super(BytesGenerator,self)._handle_text(msg) - - # Default body handler - _writeBody = _handle_text - - @classmethod - def _compile_re(cls, s, flags): - return re.compile(s.encode('ascii'), flags) - - -_FMT = '[Non-text (%(type)s) part of message omitted, filename %(filename)s]' - -class DecodedGenerator(Generator): - """Generates a text representation of a message. - - Like the Generator base class, except that non-text parts are substituted - with a format string representing the part. - """ - def __init__(self, outfp, mangle_from_=True, maxheaderlen=78, fmt=None): - """Like Generator.__init__() except that an additional optional - argument is allowed. - - Walks through all subparts of a message. If the subpart is of main - type `text', then it prints the decoded payload of the subpart. - - Otherwise, fmt is a format string that is used instead of the message - payload. fmt is expanded with the following keywords (in - %(keyword)s format): - - type : Full MIME type of the non-text part - maintype : Main MIME type of the non-text part - subtype : Sub-MIME type of the non-text part - filename : Filename of the non-text part - description: Description associated with the non-text part - encoding : Content transfer encoding of the non-text part - - The default value for fmt is None, meaning - - [Non-text (%(type)s) part of message omitted, filename %(filename)s] - """ - Generator.__init__(self, outfp, mangle_from_, maxheaderlen) - if fmt is None: - self._fmt = _FMT - else: - self._fmt = fmt - - def _dispatch(self, msg): - for part in msg.walk(): - maintype = part.get_content_maintype() - if maintype == 'text': - print(part.get_payload(decode=False), file=self) - elif maintype == 'multipart': - # Just skip this - pass - else: - print(self._fmt % { - 'type' : part.get_content_type(), - 'maintype' : part.get_content_maintype(), - 'subtype' : part.get_content_subtype(), - 'filename' : part.get_filename('[no filename]'), - 'description': part.get('Content-Description', - '[no description]'), - 'encoding' : part.get('Content-Transfer-Encoding', - '[no encoding]'), - }, file=self) - - -# Helper used by Generator._make_boundary -_width = len(repr(sys.maxsize-1)) -_fmt = '%%0%dd' % _width - -# Backward compatibility -_make_boundary = Generator._make_boundary diff --git a/pype/vendor/future/backports/email/header.py b/pype/vendor/future/backports/email/header.py deleted file mode 100644 index 63bf038c02..0000000000 --- a/pype/vendor/future/backports/email/header.py +++ /dev/null @@ -1,581 +0,0 @@ -# Copyright (C) 2002-2007 Python Software Foundation -# Author: Ben Gertzfield, Barry Warsaw -# Contact: email-sig@python.org - -"""Header encoding and decoding functionality.""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import -from future.builtins import bytes, range, str, super, zip - -__all__ = [ - 'Header', - 'decode_header', - 'make_header', - ] - -import re -import binascii - -from future.backports import email -from future.backports.email import base64mime -from future.backports.email.errors import HeaderParseError -import future.backports.email.charset as _charset - -# Helpers -from future.backports.email.quoprimime import _max_append, header_decode - -Charset = _charset.Charset - -NL = '\n' -SPACE = ' ' -BSPACE = b' ' -SPACE8 = ' ' * 8 -EMPTYSTRING = '' -MAXLINELEN = 78 -FWS = ' \t' - -USASCII = Charset('us-ascii') -UTF8 = Charset('utf-8') - -# Match encoded-word strings in the form =?charset?q?Hello_World?= -ecre = re.compile(r''' - =\? # literal =? - (?P[^?]*?) # non-greedy up to the next ? is the charset - \? # literal ? - (?P[qb]) # either a "q" or a "b", case insensitive - \? # literal ? - (?P.*?) # non-greedy up to the next ?= is the encoded string - \?= # literal ?= - ''', re.VERBOSE | re.IGNORECASE | re.MULTILINE) - -# Field name regexp, including trailing colon, but not separating whitespace, -# according to RFC 2822. Character range is from tilde to exclamation mark. -# For use with .match() -fcre = re.compile(r'[\041-\176]+:$') - -# Find a header embedded in a putative header value. Used to check for -# header injection attack. -_embeded_header = re.compile(r'\n[^ \t]+:') - - -def decode_header(header): - """Decode a message header value without converting charset. - - Returns a list of (string, charset) pairs containing each of the decoded - parts of the header. Charset is None for non-encoded parts of the header, - otherwise a lower-case string containing the name of the character set - specified in the encoded string. - - header may be a string that may or may not contain RFC2047 encoded words, - or it may be a Header object. - - An email.errors.HeaderParseError may be raised when certain decoding error - occurs (e.g. a base64 decoding exception). - """ - # If it is a Header object, we can just return the encoded chunks. - if hasattr(header, '_chunks'): - return [(_charset._encode(string, str(charset)), str(charset)) - for string, charset in header._chunks] - # If no encoding, just return the header with no charset. - if not ecre.search(header): - return [(header, None)] - # First step is to parse all the encoded parts into triplets of the form - # (encoded_string, encoding, charset). For unencoded strings, the last - # two parts will be None. - words = [] - for line in header.splitlines(): - parts = ecre.split(line) - first = True - while parts: - unencoded = parts.pop(0) - if first: - unencoded = unencoded.lstrip() - first = False - if unencoded: - words.append((unencoded, None, None)) - if parts: - charset = parts.pop(0).lower() - encoding = parts.pop(0).lower() - encoded = parts.pop(0) - words.append((encoded, encoding, charset)) - # Now loop over words and remove words that consist of whitespace - # between two encoded strings. - import sys - droplist = [] - for n, w in enumerate(words): - if n>1 and w[1] and words[n-2][1] and words[n-1][0].isspace(): - droplist.append(n-1) - for d in reversed(droplist): - del words[d] - - # The next step is to decode each encoded word by applying the reverse - # base64 or quopri transformation. decoded_words is now a list of the - # form (decoded_word, charset). - decoded_words = [] - for encoded_string, encoding, charset in words: - if encoding is None: - # This is an unencoded word. - decoded_words.append((encoded_string, charset)) - elif encoding == 'q': - word = header_decode(encoded_string) - decoded_words.append((word, charset)) - elif encoding == 'b': - paderr = len(encoded_string) % 4 # Postel's law: add missing padding - if paderr: - encoded_string += '==='[:4 - paderr] - try: - word = base64mime.decode(encoded_string) - except binascii.Error: - raise HeaderParseError('Base64 decoding error') - else: - decoded_words.append((word, charset)) - else: - raise AssertionError('Unexpected encoding: ' + encoding) - # Now convert all words to bytes and collapse consecutive runs of - # similarly encoded words. - collapsed = [] - last_word = last_charset = None - for word, charset in decoded_words: - if isinstance(word, str): - word = bytes(word, 'raw-unicode-escape') - if last_word is None: - last_word = word - last_charset = charset - elif charset != last_charset: - collapsed.append((last_word, last_charset)) - last_word = word - last_charset = charset - elif last_charset is None: - last_word += BSPACE + word - else: - last_word += word - collapsed.append((last_word, last_charset)) - return collapsed - - -def make_header(decoded_seq, maxlinelen=None, header_name=None, - continuation_ws=' '): - """Create a Header from a sequence of pairs as returned by decode_header() - - decode_header() takes a header value string and returns a sequence of - pairs of the format (decoded_string, charset) where charset is the string - name of the character set. - - This function takes one of those sequence of pairs and returns a Header - instance. Optional maxlinelen, header_name, and continuation_ws are as in - the Header constructor. - """ - h = Header(maxlinelen=maxlinelen, header_name=header_name, - continuation_ws=continuation_ws) - for s, charset in decoded_seq: - # None means us-ascii but we can simply pass it on to h.append() - if charset is not None and not isinstance(charset, Charset): - charset = Charset(charset) - h.append(s, charset) - return h - - -class Header(object): - def __init__(self, s=None, charset=None, - maxlinelen=None, header_name=None, - continuation_ws=' ', errors='strict'): - """Create a MIME-compliant header that can contain many character sets. - - Optional s is the initial header value. If None, the initial header - value is not set. You can later append to the header with .append() - method calls. s may be a byte string or a Unicode string, but see the - .append() documentation for semantics. - - Optional charset serves two purposes: it has the same meaning as the - charset argument to the .append() method. It also sets the default - character set for all subsequent .append() calls that omit the charset - argument. If charset is not provided in the constructor, the us-ascii - charset is used both as s's initial charset and as the default for - subsequent .append() calls. - - The maximum line length can be specified explicitly via maxlinelen. For - splitting the first line to a shorter value (to account for the field - header which isn't included in s, e.g. `Subject') pass in the name of - the field in header_name. The default maxlinelen is 78 as recommended - by RFC 2822. - - continuation_ws must be RFC 2822 compliant folding whitespace (usually - either a space or a hard tab) which will be prepended to continuation - lines. - - errors is passed through to the .append() call. - """ - if charset is None: - charset = USASCII - elif not isinstance(charset, Charset): - charset = Charset(charset) - self._charset = charset - self._continuation_ws = continuation_ws - self._chunks = [] - if s is not None: - self.append(s, charset, errors) - if maxlinelen is None: - maxlinelen = MAXLINELEN - self._maxlinelen = maxlinelen - if header_name is None: - self._headerlen = 0 - else: - # Take the separating colon and space into account. - self._headerlen = len(header_name) + 2 - - def __str__(self): - """Return the string value of the header.""" - self._normalize() - uchunks = [] - lastcs = None - lastspace = None - for string, charset in self._chunks: - # We must preserve spaces between encoded and non-encoded word - # boundaries, which means for us we need to add a space when we go - # from a charset to None/us-ascii, or from None/us-ascii to a - # charset. Only do this for the second and subsequent chunks. - # Don't add a space if the None/us-ascii string already has - # a space (trailing or leading depending on transition) - nextcs = charset - if nextcs == _charset.UNKNOWN8BIT: - original_bytes = string.encode('ascii', 'surrogateescape') - string = original_bytes.decode('ascii', 'replace') - if uchunks: - hasspace = string and self._nonctext(string[0]) - if lastcs not in (None, 'us-ascii'): - if nextcs in (None, 'us-ascii') and not hasspace: - uchunks.append(SPACE) - nextcs = None - elif nextcs not in (None, 'us-ascii') and not lastspace: - uchunks.append(SPACE) - lastspace = string and self._nonctext(string[-1]) - lastcs = nextcs - uchunks.append(string) - return EMPTYSTRING.join(uchunks) - - # Rich comparison operators for equality only. BAW: does it make sense to - # have or explicitly disable <, <=, >, >= operators? - def __eq__(self, other): - # other may be a Header or a string. Both are fine so coerce - # ourselves to a unicode (of the unencoded header value), swap the - # args and do another comparison. - return other == str(self) - - def __ne__(self, other): - return not self == other - - def append(self, s, charset=None, errors='strict'): - """Append a string to the MIME header. - - Optional charset, if given, should be a Charset instance or the name - of a character set (which will be converted to a Charset instance). A - value of None (the default) means that the charset given in the - constructor is used. - - s may be a byte string or a Unicode string. If it is a byte string - (i.e. isinstance(s, str) is false), then charset is the encoding of - that byte string, and a UnicodeError will be raised if the string - cannot be decoded with that charset. If s is a Unicode string, then - charset is a hint specifying the character set of the characters in - the string. In either case, when producing an RFC 2822 compliant - header using RFC 2047 rules, the string will be encoded using the - output codec of the charset. If the string cannot be encoded to the - output codec, a UnicodeError will be raised. - - Optional `errors' is passed as the errors argument to the decode - call if s is a byte string. - """ - if charset is None: - charset = self._charset - elif not isinstance(charset, Charset): - charset = Charset(charset) - if not isinstance(s, str): - input_charset = charset.input_codec or 'us-ascii' - if input_charset == _charset.UNKNOWN8BIT: - s = s.decode('us-ascii', 'surrogateescape') - else: - s = s.decode(input_charset, errors) - # Ensure that the bytes we're storing can be decoded to the output - # character set, otherwise an early error is raised. - output_charset = charset.output_codec or 'us-ascii' - if output_charset != _charset.UNKNOWN8BIT: - try: - s.encode(output_charset, errors) - except UnicodeEncodeError: - if output_charset!='us-ascii': - raise - charset = UTF8 - self._chunks.append((s, charset)) - - def _nonctext(self, s): - """True if string s is not a ctext character of RFC822. - """ - return s.isspace() or s in ('(', ')', '\\') - - def encode(self, splitchars=';, \t', maxlinelen=None, linesep='\n'): - r"""Encode a message header into an RFC-compliant format. - - There are many issues involved in converting a given string for use in - an email header. Only certain character sets are readable in most - email clients, and as header strings can only contain a subset of - 7-bit ASCII, care must be taken to properly convert and encode (with - Base64 or quoted-printable) header strings. In addition, there is a - 75-character length limit on any given encoded header field, so - line-wrapping must be performed, even with double-byte character sets. - - Optional maxlinelen specifies the maximum length of each generated - line, exclusive of the linesep string. Individual lines may be longer - than maxlinelen if a folding point cannot be found. The first line - will be shorter by the length of the header name plus ": " if a header - name was specified at Header construction time. The default value for - maxlinelen is determined at header construction time. - - Optional splitchars is a string containing characters which should be - given extra weight by the splitting algorithm during normal header - wrapping. This is in very rough support of RFC 2822's `higher level - syntactic breaks': split points preceded by a splitchar are preferred - during line splitting, with the characters preferred in the order in - which they appear in the string. Space and tab may be included in the - string to indicate whether preference should be given to one over the - other as a split point when other split chars do not appear in the line - being split. Splitchars does not affect RFC 2047 encoded lines. - - Optional linesep is a string to be used to separate the lines of - the value. The default value is the most useful for typical - Python applications, but it can be set to \r\n to produce RFC-compliant - line separators when needed. - """ - self._normalize() - if maxlinelen is None: - maxlinelen = self._maxlinelen - # A maxlinelen of 0 means don't wrap. For all practical purposes, - # choosing a huge number here accomplishes that and makes the - # _ValueFormatter algorithm much simpler. - if maxlinelen == 0: - maxlinelen = 1000000 - formatter = _ValueFormatter(self._headerlen, maxlinelen, - self._continuation_ws, splitchars) - lastcs = None - hasspace = lastspace = None - for string, charset in self._chunks: - if hasspace is not None: - hasspace = string and self._nonctext(string[0]) - import sys - if lastcs not in (None, 'us-ascii'): - if not hasspace or charset not in (None, 'us-ascii'): - formatter.add_transition() - elif charset not in (None, 'us-ascii') and not lastspace: - formatter.add_transition() - lastspace = string and self._nonctext(string[-1]) - lastcs = charset - hasspace = False - lines = string.splitlines() - if lines: - formatter.feed('', lines[0], charset) - else: - formatter.feed('', '', charset) - for line in lines[1:]: - formatter.newline() - if charset.header_encoding is not None: - formatter.feed(self._continuation_ws, ' ' + line.lstrip(), - charset) - else: - sline = line.lstrip() - fws = line[:len(line)-len(sline)] - formatter.feed(fws, sline, charset) - if len(lines) > 1: - formatter.newline() - if self._chunks: - formatter.add_transition() - value = formatter._str(linesep) - if _embeded_header.search(value): - raise HeaderParseError("header value appears to contain " - "an embedded header: {!r}".format(value)) - return value - - def _normalize(self): - # Step 1: Normalize the chunks so that all runs of identical charsets - # get collapsed into a single unicode string. - chunks = [] - last_charset = None - last_chunk = [] - for string, charset in self._chunks: - if charset == last_charset: - last_chunk.append(string) - else: - if last_charset is not None: - chunks.append((SPACE.join(last_chunk), last_charset)) - last_chunk = [string] - last_charset = charset - if last_chunk: - chunks.append((SPACE.join(last_chunk), last_charset)) - self._chunks = chunks - - -class _ValueFormatter(object): - def __init__(self, headerlen, maxlen, continuation_ws, splitchars): - self._maxlen = maxlen - self._continuation_ws = continuation_ws - self._continuation_ws_len = len(continuation_ws) - self._splitchars = splitchars - self._lines = [] - self._current_line = _Accumulator(headerlen) - - def _str(self, linesep): - self.newline() - return linesep.join(self._lines) - - def __str__(self): - return self._str(NL) - - def newline(self): - end_of_line = self._current_line.pop() - if end_of_line != (' ', ''): - self._current_line.push(*end_of_line) - if len(self._current_line) > 0: - if self._current_line.is_onlyws(): - self._lines[-1] += str(self._current_line) - else: - self._lines.append(str(self._current_line)) - self._current_line.reset() - - def add_transition(self): - self._current_line.push(' ', '') - - def feed(self, fws, string, charset): - # If the charset has no header encoding (i.e. it is an ASCII encoding) - # then we must split the header at the "highest level syntactic break" - # possible. Note that we don't have a lot of smarts about field - # syntax; we just try to break on semi-colons, then commas, then - # whitespace. Eventually, this should be pluggable. - if charset.header_encoding is None: - self._ascii_split(fws, string, self._splitchars) - return - # Otherwise, we're doing either a Base64 or a quoted-printable - # encoding which means we don't need to split the line on syntactic - # breaks. We can basically just find enough characters to fit on the - # current line, minus the RFC 2047 chrome. What makes this trickier - # though is that we have to split at octet boundaries, not character - # boundaries but it's only safe to split at character boundaries so at - # best we can only get close. - encoded_lines = charset.header_encode_lines(string, self._maxlengths()) - # The first element extends the current line, but if it's None then - # nothing more fit on the current line so start a new line. - try: - first_line = encoded_lines.pop(0) - except IndexError: - # There are no encoded lines, so we're done. - return - if first_line is not None: - self._append_chunk(fws, first_line) - try: - last_line = encoded_lines.pop() - except IndexError: - # There was only one line. - return - self.newline() - self._current_line.push(self._continuation_ws, last_line) - # Everything else are full lines in themselves. - for line in encoded_lines: - self._lines.append(self._continuation_ws + line) - - def _maxlengths(self): - # The first line's length. - yield self._maxlen - len(self._current_line) - while True: - yield self._maxlen - self._continuation_ws_len - - def _ascii_split(self, fws, string, splitchars): - # The RFC 2822 header folding algorithm is simple in principle but - # complex in practice. Lines may be folded any place where "folding - # white space" appears by inserting a linesep character in front of the - # FWS. The complication is that not all spaces or tabs qualify as FWS, - # and we are also supposed to prefer to break at "higher level - # syntactic breaks". We can't do either of these without intimate - # knowledge of the structure of structured headers, which we don't have - # here. So the best we can do here is prefer to break at the specified - # splitchars, and hope that we don't choose any spaces or tabs that - # aren't legal FWS. (This is at least better than the old algorithm, - # where we would sometimes *introduce* FWS after a splitchar, or the - # algorithm before that, where we would turn all white space runs into - # single spaces or tabs.) - parts = re.split("(["+FWS+"]+)", fws+string) - if parts[0]: - parts[:0] = [''] - else: - parts.pop(0) - for fws, part in zip(*[iter(parts)]*2): - self._append_chunk(fws, part) - - def _append_chunk(self, fws, string): - self._current_line.push(fws, string) - if len(self._current_line) > self._maxlen: - # Find the best split point, working backward from the end. - # There might be none, on a long first line. - for ch in self._splitchars: - for i in range(self._current_line.part_count()-1, 0, -1): - if ch.isspace(): - fws = self._current_line[i][0] - if fws and fws[0]==ch: - break - prevpart = self._current_line[i-1][1] - if prevpart and prevpart[-1]==ch: - break - else: - continue - break - else: - fws, part = self._current_line.pop() - if self._current_line._initial_size > 0: - # There will be a header, so leave it on a line by itself. - self.newline() - if not fws: - # We don't use continuation_ws here because the whitespace - # after a header should always be a space. - fws = ' ' - self._current_line.push(fws, part) - return - remainder = self._current_line.pop_from(i) - self._lines.append(str(self._current_line)) - self._current_line.reset(remainder) - - -class _Accumulator(list): - - def __init__(self, initial_size=0): - self._initial_size = initial_size - super().__init__() - - def push(self, fws, string): - self.append((fws, string)) - - def pop_from(self, i=0): - popped = self[i:] - self[i:] = [] - return popped - - def pop(self): - if self.part_count()==0: - return ('', '') - return super().pop() - - def __len__(self): - return sum((len(fws)+len(part) for fws, part in self), - self._initial_size) - - def __str__(self): - return EMPTYSTRING.join((EMPTYSTRING.join((fws, part)) - for fws, part in self)) - - def reset(self, startval=None): - if startval is None: - startval = [] - self[:] = startval - self._initial_size = 0 - - def is_onlyws(self): - return self._initial_size==0 and (not self or str(self).isspace()) - - def part_count(self): - return super().__len__() diff --git a/pype/vendor/future/backports/email/headerregistry.py b/pype/vendor/future/backports/email/headerregistry.py deleted file mode 100644 index 9aaad65a14..0000000000 --- a/pype/vendor/future/backports/email/headerregistry.py +++ /dev/null @@ -1,592 +0,0 @@ -"""Representing and manipulating email headers via custom objects. - -This module provides an implementation of the HeaderRegistry API. -The implementation is designed to flexibly follow RFC5322 rules. - -Eventually HeaderRegistry will be a public API, but it isn't yet, -and will probably change some before that happens. - -""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from future.builtins import super -from future.builtins import str -from future.utils import text_to_native_str -from future.backports.email import utils -from future.backports.email import errors -from future.backports.email import _header_value_parser as parser - -class Address(object): - - def __init__(self, display_name='', username='', domain='', addr_spec=None): - """Create an object represeting a full email address. - - An address can have a 'display_name', a 'username', and a 'domain'. In - addition to specifying the username and domain separately, they may be - specified together by using the addr_spec keyword *instead of* the - username and domain keywords. If an addr_spec string is specified it - must be properly quoted according to RFC 5322 rules; an error will be - raised if it is not. - - An Address object has display_name, username, domain, and addr_spec - attributes, all of which are read-only. The addr_spec and the string - value of the object are both quoted according to RFC5322 rules, but - without any Content Transfer Encoding. - - """ - # This clause with its potential 'raise' may only happen when an - # application program creates an Address object using an addr_spec - # keyword. The email library code itself must always supply username - # and domain. - if addr_spec is not None: - if username or domain: - raise TypeError("addrspec specified when username and/or " - "domain also specified") - a_s, rest = parser.get_addr_spec(addr_spec) - if rest: - raise ValueError("Invalid addr_spec; only '{}' " - "could be parsed from '{}'".format( - a_s, addr_spec)) - if a_s.all_defects: - raise a_s.all_defects[0] - username = a_s.local_part - domain = a_s.domain - self._display_name = display_name - self._username = username - self._domain = domain - - @property - def display_name(self): - return self._display_name - - @property - def username(self): - return self._username - - @property - def domain(self): - return self._domain - - @property - def addr_spec(self): - """The addr_spec (username@domain) portion of the address, quoted - according to RFC 5322 rules, but with no Content Transfer Encoding. - """ - nameset = set(self.username) - if len(nameset) > len(nameset-parser.DOT_ATOM_ENDS): - lp = parser.quote_string(self.username) - else: - lp = self.username - if self.domain: - return lp + '@' + self.domain - if not lp: - return '<>' - return lp - - def __repr__(self): - return "Address(display_name={!r}, username={!r}, domain={!r})".format( - self.display_name, self.username, self.domain) - - def __str__(self): - nameset = set(self.display_name) - if len(nameset) > len(nameset-parser.SPECIALS): - disp = parser.quote_string(self.display_name) - else: - disp = self.display_name - if disp: - addr_spec = '' if self.addr_spec=='<>' else self.addr_spec - return "{} <{}>".format(disp, addr_spec) - return self.addr_spec - - def __eq__(self, other): - if type(other) != type(self): - return False - return (self.display_name == other.display_name and - self.username == other.username and - self.domain == other.domain) - - -class Group(object): - - def __init__(self, display_name=None, addresses=None): - """Create an object representing an address group. - - An address group consists of a display_name followed by colon and an - list of addresses (see Address) terminated by a semi-colon. The Group - is created by specifying a display_name and a possibly empty list of - Address objects. A Group can also be used to represent a single - address that is not in a group, which is convenient when manipulating - lists that are a combination of Groups and individual Addresses. In - this case the display_name should be set to None. In particular, the - string representation of a Group whose display_name is None is the same - as the Address object, if there is one and only one Address object in - the addresses list. - - """ - self._display_name = display_name - self._addresses = tuple(addresses) if addresses else tuple() - - @property - def display_name(self): - return self._display_name - - @property - def addresses(self): - return self._addresses - - def __repr__(self): - return "Group(display_name={!r}, addresses={!r}".format( - self.display_name, self.addresses) - - def __str__(self): - if self.display_name is None and len(self.addresses)==1: - return str(self.addresses[0]) - disp = self.display_name - if disp is not None: - nameset = set(disp) - if len(nameset) > len(nameset-parser.SPECIALS): - disp = parser.quote_string(disp) - adrstr = ", ".join(str(x) for x in self.addresses) - adrstr = ' ' + adrstr if adrstr else adrstr - return "{}:{};".format(disp, adrstr) - - def __eq__(self, other): - if type(other) != type(self): - return False - return (self.display_name == other.display_name and - self.addresses == other.addresses) - - -# Header Classes # - -class BaseHeader(str): - - """Base class for message headers. - - Implements generic behavior and provides tools for subclasses. - - A subclass must define a classmethod named 'parse' that takes an unfolded - value string and a dictionary as its arguments. The dictionary will - contain one key, 'defects', initialized to an empty list. After the call - the dictionary must contain two additional keys: parse_tree, set to the - parse tree obtained from parsing the header, and 'decoded', set to the - string value of the idealized representation of the data from the value. - (That is, encoded words are decoded, and values that have canonical - representations are so represented.) - - The defects key is intended to collect parsing defects, which the message - parser will subsequently dispose of as appropriate. The parser should not, - insofar as practical, raise any errors. Defects should be added to the - list instead. The standard header parsers register defects for RFC - compliance issues, for obsolete RFC syntax, and for unrecoverable parsing - errors. - - The parse method may add additional keys to the dictionary. In this case - the subclass must define an 'init' method, which will be passed the - dictionary as its keyword arguments. The method should use (usually by - setting them as the value of similarly named attributes) and remove all the - extra keys added by its parse method, and then use super to call its parent - class with the remaining arguments and keywords. - - The subclass should also make sure that a 'max_count' attribute is defined - that is either None or 1. XXX: need to better define this API. - - """ - - def __new__(cls, name, value): - kwds = {'defects': []} - cls.parse(value, kwds) - if utils._has_surrogates(kwds['decoded']): - kwds['decoded'] = utils._sanitize(kwds['decoded']) - self = str.__new__(cls, kwds['decoded']) - # del kwds['decoded'] - self.init(name, **kwds) - return self - - def init(self, name, **_3to2kwargs): - defects = _3to2kwargs['defects']; del _3to2kwargs['defects'] - parse_tree = _3to2kwargs['parse_tree']; del _3to2kwargs['parse_tree'] - self._name = name - self._parse_tree = parse_tree - self._defects = defects - - @property - def name(self): - return self._name - - @property - def defects(self): - return tuple(self._defects) - - def __reduce__(self): - return ( - _reconstruct_header, - ( - self.__class__.__name__, - self.__class__.__bases__, - str(self), - ), - self.__dict__) - - @classmethod - def _reconstruct(cls, value): - return str.__new__(cls, value) - - def fold(self, **_3to2kwargs): - policy = _3to2kwargs['policy']; del _3to2kwargs['policy'] - """Fold header according to policy. - - The parsed representation of the header is folded according to - RFC5322 rules, as modified by the policy. If the parse tree - contains surrogateescaped bytes, the bytes are CTE encoded using - the charset 'unknown-8bit". - - Any non-ASCII characters in the parse tree are CTE encoded using - charset utf-8. XXX: make this a policy setting. - - The returned value is an ASCII-only string possibly containing linesep - characters, and ending with a linesep character. The string includes - the header name and the ': ' separator. - - """ - # At some point we need to only put fws here if it was in the source. - header = parser.Header([ - parser.HeaderLabel([ - parser.ValueTerminal(self.name, 'header-name'), - parser.ValueTerminal(':', 'header-sep')]), - parser.CFWSList([parser.WhiteSpaceTerminal(' ', 'fws')]), - self._parse_tree]) - return header.fold(policy=policy) - - -def _reconstruct_header(cls_name, bases, value): - return type(text_to_native_str(cls_name), bases, {})._reconstruct(value) - - -class UnstructuredHeader(object): - - max_count = None - value_parser = staticmethod(parser.get_unstructured) - - @classmethod - def parse(cls, value, kwds): - kwds['parse_tree'] = cls.value_parser(value) - kwds['decoded'] = str(kwds['parse_tree']) - - -class UniqueUnstructuredHeader(UnstructuredHeader): - - max_count = 1 - - -class DateHeader(object): - - """Header whose value consists of a single timestamp. - - Provides an additional attribute, datetime, which is either an aware - datetime using a timezone, or a naive datetime if the timezone - in the input string is -0000. Also accepts a datetime as input. - The 'value' attribute is the normalized form of the timestamp, - which means it is the output of format_datetime on the datetime. - """ - - max_count = None - - # This is used only for folding, not for creating 'decoded'. - value_parser = staticmethod(parser.get_unstructured) - - @classmethod - def parse(cls, value, kwds): - if not value: - kwds['defects'].append(errors.HeaderMissingRequiredValue()) - kwds['datetime'] = None - kwds['decoded'] = '' - kwds['parse_tree'] = parser.TokenList() - return - if isinstance(value, str): - value = utils.parsedate_to_datetime(value) - kwds['datetime'] = value - kwds['decoded'] = utils.format_datetime(kwds['datetime']) - kwds['parse_tree'] = cls.value_parser(kwds['decoded']) - - def init(self, *args, **kw): - self._datetime = kw.pop('datetime') - super().init(*args, **kw) - - @property - def datetime(self): - return self._datetime - - -class UniqueDateHeader(DateHeader): - - max_count = 1 - - -class AddressHeader(object): - - max_count = None - - @staticmethod - def value_parser(value): - address_list, value = parser.get_address_list(value) - assert not value, 'this should not happen' - return address_list - - @classmethod - def parse(cls, value, kwds): - if isinstance(value, str): - # We are translating here from the RFC language (address/mailbox) - # to our API language (group/address). - kwds['parse_tree'] = address_list = cls.value_parser(value) - groups = [] - for addr in address_list.addresses: - groups.append(Group(addr.display_name, - [Address(mb.display_name or '', - mb.local_part or '', - mb.domain or '') - for mb in addr.all_mailboxes])) - defects = list(address_list.all_defects) - else: - # Assume it is Address/Group stuff - if not hasattr(value, '__iter__'): - value = [value] - groups = [Group(None, [item]) if not hasattr(item, 'addresses') - else item - for item in value] - defects = [] - kwds['groups'] = groups - kwds['defects'] = defects - kwds['decoded'] = ', '.join([str(item) for item in groups]) - if 'parse_tree' not in kwds: - kwds['parse_tree'] = cls.value_parser(kwds['decoded']) - - def init(self, *args, **kw): - self._groups = tuple(kw.pop('groups')) - self._addresses = None - super().init(*args, **kw) - - @property - def groups(self): - return self._groups - - @property - def addresses(self): - if self._addresses is None: - self._addresses = tuple([address for group in self._groups - for address in group.addresses]) - return self._addresses - - -class UniqueAddressHeader(AddressHeader): - - max_count = 1 - - -class SingleAddressHeader(AddressHeader): - - @property - def address(self): - if len(self.addresses)!=1: - raise ValueError(("value of single address header {} is not " - "a single address").format(self.name)) - return self.addresses[0] - - -class UniqueSingleAddressHeader(SingleAddressHeader): - - max_count = 1 - - -class MIMEVersionHeader(object): - - max_count = 1 - - value_parser = staticmethod(parser.parse_mime_version) - - @classmethod - def parse(cls, value, kwds): - kwds['parse_tree'] = parse_tree = cls.value_parser(value) - kwds['decoded'] = str(parse_tree) - kwds['defects'].extend(parse_tree.all_defects) - kwds['major'] = None if parse_tree.minor is None else parse_tree.major - kwds['minor'] = parse_tree.minor - if parse_tree.minor is not None: - kwds['version'] = '{}.{}'.format(kwds['major'], kwds['minor']) - else: - kwds['version'] = None - - def init(self, *args, **kw): - self._version = kw.pop('version') - self._major = kw.pop('major') - self._minor = kw.pop('minor') - super().init(*args, **kw) - - @property - def major(self): - return self._major - - @property - def minor(self): - return self._minor - - @property - def version(self): - return self._version - - -class ParameterizedMIMEHeader(object): - - # Mixin that handles the params dict. Must be subclassed and - # a property value_parser for the specific header provided. - - max_count = 1 - - @classmethod - def parse(cls, value, kwds): - kwds['parse_tree'] = parse_tree = cls.value_parser(value) - kwds['decoded'] = str(parse_tree) - kwds['defects'].extend(parse_tree.all_defects) - if parse_tree.params is None: - kwds['params'] = {} - else: - # The MIME RFCs specify that parameter ordering is arbitrary. - kwds['params'] = dict((utils._sanitize(name).lower(), - utils._sanitize(value)) - for name, value in parse_tree.params) - - def init(self, *args, **kw): - self._params = kw.pop('params') - super().init(*args, **kw) - - @property - def params(self): - return self._params.copy() - - -class ContentTypeHeader(ParameterizedMIMEHeader): - - value_parser = staticmethod(parser.parse_content_type_header) - - def init(self, *args, **kw): - super().init(*args, **kw) - self._maintype = utils._sanitize(self._parse_tree.maintype) - self._subtype = utils._sanitize(self._parse_tree.subtype) - - @property - def maintype(self): - return self._maintype - - @property - def subtype(self): - return self._subtype - - @property - def content_type(self): - return self.maintype + '/' + self.subtype - - -class ContentDispositionHeader(ParameterizedMIMEHeader): - - value_parser = staticmethod(parser.parse_content_disposition_header) - - def init(self, *args, **kw): - super().init(*args, **kw) - cd = self._parse_tree.content_disposition - self._content_disposition = cd if cd is None else utils._sanitize(cd) - - @property - def content_disposition(self): - return self._content_disposition - - -class ContentTransferEncodingHeader(object): - - max_count = 1 - - value_parser = staticmethod(parser.parse_content_transfer_encoding_header) - - @classmethod - def parse(cls, value, kwds): - kwds['parse_tree'] = parse_tree = cls.value_parser(value) - kwds['decoded'] = str(parse_tree) - kwds['defects'].extend(parse_tree.all_defects) - - def init(self, *args, **kw): - super().init(*args, **kw) - self._cte = utils._sanitize(self._parse_tree.cte) - - @property - def cte(self): - return self._cte - - -# The header factory # - -_default_header_map = { - 'subject': UniqueUnstructuredHeader, - 'date': UniqueDateHeader, - 'resent-date': DateHeader, - 'orig-date': UniqueDateHeader, - 'sender': UniqueSingleAddressHeader, - 'resent-sender': SingleAddressHeader, - 'to': UniqueAddressHeader, - 'resent-to': AddressHeader, - 'cc': UniqueAddressHeader, - 'resent-cc': AddressHeader, - 'bcc': UniqueAddressHeader, - 'resent-bcc': AddressHeader, - 'from': UniqueAddressHeader, - 'resent-from': AddressHeader, - 'reply-to': UniqueAddressHeader, - 'mime-version': MIMEVersionHeader, - 'content-type': ContentTypeHeader, - 'content-disposition': ContentDispositionHeader, - 'content-transfer-encoding': ContentTransferEncodingHeader, - } - -class HeaderRegistry(object): - - """A header_factory and header registry.""" - - def __init__(self, base_class=BaseHeader, default_class=UnstructuredHeader, - use_default_map=True): - """Create a header_factory that works with the Policy API. - - base_class is the class that will be the last class in the created - header class's __bases__ list. default_class is the class that will be - used if "name" (see __call__) does not appear in the registry. - use_default_map controls whether or not the default mapping of names to - specialized classes is copied in to the registry when the factory is - created. The default is True. - - """ - self.registry = {} - self.base_class = base_class - self.default_class = default_class - if use_default_map: - self.registry.update(_default_header_map) - - def map_to_type(self, name, cls): - """Register cls as the specialized class for handling "name" headers. - - """ - self.registry[name.lower()] = cls - - def __getitem__(self, name): - cls = self.registry.get(name.lower(), self.default_class) - return type(text_to_native_str('_'+cls.__name__), (cls, self.base_class), {}) - - def __call__(self, name, value): - """Create a header instance for header 'name' from 'value'. - - Creates a header instance by creating a specialized class for parsing - and representing the specified header by combining the factory - base_class with a specialized class from the registry or the - default_class, and passing the name and value to the constructed - class's constructor. - - """ - return self[name](name, value) diff --git a/pype/vendor/future/backports/email/iterators.py b/pype/vendor/future/backports/email/iterators.py deleted file mode 100644 index 82d320f814..0000000000 --- a/pype/vendor/future/backports/email/iterators.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright (C) 2001-2006 Python Software Foundation -# Author: Barry Warsaw -# Contact: email-sig@python.org - -"""Various types of useful iterators and generators.""" -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -__all__ = [ - 'body_line_iterator', - 'typed_subpart_iterator', - 'walk', - # Do not include _structure() since it's part of the debugging API. - ] - -import sys -from io import StringIO - - -# This function will become a method of the Message class -def walk(self): - """Walk over the message tree, yielding each subpart. - - The walk is performed in depth-first order. This method is a - generator. - """ - yield self - if self.is_multipart(): - for subpart in self.get_payload(): - for subsubpart in subpart.walk(): - yield subsubpart - - -# These two functions are imported into the Iterators.py interface module. -def body_line_iterator(msg, decode=False): - """Iterate over the parts, returning string payloads line-by-line. - - Optional decode (default False) is passed through to .get_payload(). - """ - for subpart in msg.walk(): - payload = subpart.get_payload(decode=decode) - if isinstance(payload, str): - for line in StringIO(payload): - yield line - - -def typed_subpart_iterator(msg, maintype='text', subtype=None): - """Iterate over the subparts with a given MIME type. - - Use `maintype' as the main MIME type to match against; this defaults to - "text". Optional `subtype' is the MIME subtype to match against; if - omitted, only the main type is matched. - """ - for subpart in msg.walk(): - if subpart.get_content_maintype() == maintype: - if subtype is None or subpart.get_content_subtype() == subtype: - yield subpart - - -def _structure(msg, fp=None, level=0, include_default=False): - """A handy debugging aid""" - if fp is None: - fp = sys.stdout - tab = ' ' * (level * 4) - print(tab + msg.get_content_type(), end='', file=fp) - if include_default: - print(' [%s]' % msg.get_default_type(), file=fp) - else: - print(file=fp) - if msg.is_multipart(): - for subpart in msg.get_payload(): - _structure(subpart, fp, level+1, include_default) diff --git a/pype/vendor/future/backports/email/message.py b/pype/vendor/future/backports/email/message.py deleted file mode 100644 index 99715fcc67..0000000000 --- a/pype/vendor/future/backports/email/message.py +++ /dev/null @@ -1,882 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2001-2007 Python Software Foundation -# Author: Barry Warsaw -# Contact: email-sig@python.org - -"""Basic message object for the email package object model.""" -from __future__ import absolute_import, division, unicode_literals -from future.builtins import list, range, str, zip - -__all__ = ['Message'] - -import re -import uu -import base64 -import binascii -from io import BytesIO, StringIO - -# Intrapackage imports -from future.utils import as_native_str -from future.backports.email import utils -from future.backports.email import errors -from future.backports.email._policybase import compat32 -from future.backports.email import charset as _charset -from future.backports.email._encoded_words import decode_b -Charset = _charset.Charset - -SEMISPACE = '; ' - -# Regular expression that matches `special' characters in parameters, the -# existence of which force quoting of the parameter value. -tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]') - - -def _splitparam(param): - # Split header parameters. BAW: this may be too simple. It isn't - # strictly RFC 2045 (section 5.1) compliant, but it catches most headers - # found in the wild. We may eventually need a full fledged parser. - # RDM: we might have a Header here; for now just stringify it. - a, sep, b = str(param).partition(';') - if not sep: - return a.strip(), None - return a.strip(), b.strip() - -def _formatparam(param, value=None, quote=True): - """Convenience function to format and return a key=value pair. - - This will quote the value if needed or if quote is true. If value is a - three tuple (charset, language, value), it will be encoded according - to RFC2231 rules. If it contains non-ascii characters it will likewise - be encoded according to RFC2231 rules, using the utf-8 charset and - a null language. - """ - if value is not None and len(value) > 0: - # A tuple is used for RFC 2231 encoded parameter values where items - # are (charset, language, value). charset is a string, not a Charset - # instance. RFC 2231 encoded values are never quoted, per RFC. - if isinstance(value, tuple): - # Encode as per RFC 2231 - param += '*' - value = utils.encode_rfc2231(value[2], value[0], value[1]) - return '%s=%s' % (param, value) - else: - try: - value.encode('ascii') - except UnicodeEncodeError: - param += '*' - value = utils.encode_rfc2231(value, 'utf-8', '') - return '%s=%s' % (param, value) - # BAW: Please check this. I think that if quote is set it should - # force quoting even if not necessary. - if quote or tspecials.search(value): - return '%s="%s"' % (param, utils.quote(value)) - else: - return '%s=%s' % (param, value) - else: - return param - -def _parseparam(s): - # RDM This might be a Header, so for now stringify it. - s = ';' + str(s) - plist = [] - while s[:1] == ';': - s = s[1:] - end = s.find(';') - while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2: - end = s.find(';', end + 1) - if end < 0: - end = len(s) - f = s[:end] - if '=' in f: - i = f.index('=') - f = f[:i].strip().lower() + '=' + f[i+1:].strip() - plist.append(f.strip()) - s = s[end:] - return plist - - -def _unquotevalue(value): - # This is different than utils.collapse_rfc2231_value() because it doesn't - # try to convert the value to a unicode. Message.get_param() and - # Message.get_params() are both currently defined to return the tuple in - # the face of RFC 2231 parameters. - if isinstance(value, tuple): - return value[0], value[1], utils.unquote(value[2]) - else: - return utils.unquote(value) - - -class Message(object): - """Basic message object. - - A message object is defined as something that has a bunch of RFC 2822 - headers and a payload. It may optionally have an envelope header - (a.k.a. Unix-From or From_ header). If the message is a container (i.e. a - multipart or a message/rfc822), then the payload is a list of Message - objects, otherwise it is a string. - - Message objects implement part of the `mapping' interface, which assumes - there is exactly one occurrence of the header per message. Some headers - do in fact appear multiple times (e.g. Received) and for those headers, - you must use the explicit API to set or get all the headers. Not all of - the mapping methods are implemented. - """ - def __init__(self, policy=compat32): - self.policy = policy - self._headers = list() - self._unixfrom = None - self._payload = None - self._charset = None - # Defaults for multipart messages - self.preamble = self.epilogue = None - self.defects = [] - # Default content type - self._default_type = 'text/plain' - - @as_native_str(encoding='utf-8') - def __str__(self): - """Return the entire formatted message as a string. - This includes the headers, body, and envelope header. - """ - return self.as_string() - - def as_string(self, unixfrom=False, maxheaderlen=0): - """Return the entire formatted message as a (unicode) string. - Optional `unixfrom' when True, means include the Unix From_ envelope - header. - - This is a convenience method and may not generate the message exactly - as you intend. For more flexibility, use the flatten() method of a - Generator instance. - """ - from future.backports.email.generator import Generator - fp = StringIO() - g = Generator(fp, mangle_from_=False, maxheaderlen=maxheaderlen) - g.flatten(self, unixfrom=unixfrom) - return fp.getvalue() - - def is_multipart(self): - """Return True if the message consists of multiple parts.""" - return isinstance(self._payload, list) - - # - # Unix From_ line - # - def set_unixfrom(self, unixfrom): - self._unixfrom = unixfrom - - def get_unixfrom(self): - return self._unixfrom - - # - # Payload manipulation. - # - def attach(self, payload): - """Add the given payload to the current payload. - - The current payload will always be a list of objects after this method - is called. If you want to set the payload to a scalar object, use - set_payload() instead. - """ - if self._payload is None: - self._payload = [payload] - else: - self._payload.append(payload) - - def get_payload(self, i=None, decode=False): - """Return a reference to the payload. - - The payload will either be a list object or a string. If you mutate - the list object, you modify the message's payload in place. Optional - i returns that index into the payload. - - Optional decode is a flag indicating whether the payload should be - decoded or not, according to the Content-Transfer-Encoding header - (default is False). - - When True and the message is not a multipart, the payload will be - decoded if this header's value is `quoted-printable' or `base64'. If - some other encoding is used, or the header is missing, or if the - payload has bogus data (i.e. bogus base64 or uuencoded data), the - payload is returned as-is. - - If the message is a multipart and the decode flag is True, then None - is returned. - """ - # Here is the logic table for this code, based on the email5.0.0 code: - # i decode is_multipart result - # ------ ------ ------------ ------------------------------ - # None True True None - # i True True None - # None False True _payload (a list) - # i False True _payload element i (a Message) - # i False False error (not a list) - # i True False error (not a list) - # None False False _payload - # None True False _payload decoded (bytes) - # Note that Barry planned to factor out the 'decode' case, but that - # isn't so easy now that we handle the 8 bit data, which needs to be - # converted in both the decode and non-decode path. - if self.is_multipart(): - if decode: - return None - if i is None: - return self._payload - else: - return self._payload[i] - # For backward compatibility, Use isinstance and this error message - # instead of the more logical is_multipart test. - if i is not None and not isinstance(self._payload, list): - raise TypeError('Expected list, got %s' % type(self._payload)) - payload = self._payload - # cte might be a Header, so for now stringify it. - cte = str(self.get('content-transfer-encoding', '')).lower() - # payload may be bytes here. - if isinstance(payload, str): - payload = str(payload) # for Python-Future, so surrogateescape works - if utils._has_surrogates(payload): - bpayload = payload.encode('ascii', 'surrogateescape') - if not decode: - try: - payload = bpayload.decode(self.get_param('charset', 'ascii'), 'replace') - except LookupError: - payload = bpayload.decode('ascii', 'replace') - elif decode: - try: - bpayload = payload.encode('ascii') - except UnicodeError: - # This won't happen for RFC compliant messages (messages - # containing only ASCII codepoints in the unicode input). - # If it does happen, turn the string into bytes in a way - # guaranteed not to fail. - bpayload = payload.encode('raw-unicode-escape') - if not decode: - return payload - if cte == 'quoted-printable': - return utils._qdecode(bpayload) - elif cte == 'base64': - # XXX: this is a bit of a hack; decode_b should probably be factored - # out somewhere, but I haven't figured out where yet. - value, defects = decode_b(b''.join(bpayload.splitlines())) - for defect in defects: - self.policy.handle_defect(self, defect) - return value - elif cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'): - in_file = BytesIO(bpayload) - out_file = BytesIO() - try: - uu.decode(in_file, out_file, quiet=True) - return out_file.getvalue() - except uu.Error: - # Some decoding problem - return bpayload - if isinstance(payload, str): - return bpayload - return payload - - def set_payload(self, payload, charset=None): - """Set the payload to the given value. - - Optional charset sets the message's default character set. See - set_charset() for details. - """ - self._payload = payload - if charset is not None: - self.set_charset(charset) - - def set_charset(self, charset): - """Set the charset of the payload to a given character set. - - charset can be a Charset instance, a string naming a character set, or - None. If it is a string it will be converted to a Charset instance. - If charset is None, the charset parameter will be removed from the - Content-Type field. Anything else will generate a TypeError. - - The message will be assumed to be of type text/* encoded with - charset.input_charset. It will be converted to charset.output_charset - and encoded properly, if needed, when generating the plain text - representation of the message. MIME headers (MIME-Version, - Content-Type, Content-Transfer-Encoding) will be added as needed. - """ - if charset is None: - self.del_param('charset') - self._charset = None - return - if not isinstance(charset, Charset): - charset = Charset(charset) - self._charset = charset - if 'MIME-Version' not in self: - self.add_header('MIME-Version', '1.0') - if 'Content-Type' not in self: - self.add_header('Content-Type', 'text/plain', - charset=charset.get_output_charset()) - else: - self.set_param('charset', charset.get_output_charset()) - if charset != charset.get_output_charset(): - self._payload = charset.body_encode(self._payload) - if 'Content-Transfer-Encoding' not in self: - cte = charset.get_body_encoding() - try: - cte(self) - except TypeError: - self._payload = charset.body_encode(self._payload) - self.add_header('Content-Transfer-Encoding', cte) - - def get_charset(self): - """Return the Charset instance associated with the message's payload. - """ - return self._charset - - # - # MAPPING INTERFACE (partial) - # - def __len__(self): - """Return the total number of headers, including duplicates.""" - return len(self._headers) - - def __getitem__(self, name): - """Get a header value. - - Return None if the header is missing instead of raising an exception. - - Note that if the header appeared multiple times, exactly which - occurrence gets returned is undefined. Use get_all() to get all - the values matching a header field name. - """ - return self.get(name) - - def __setitem__(self, name, val): - """Set the value of a header. - - Note: this does not overwrite an existing header with the same field - name. Use __delitem__() first to delete any existing headers. - """ - max_count = self.policy.header_max_count(name) - if max_count: - lname = name.lower() - found = 0 - for k, v in self._headers: - if k.lower() == lname: - found += 1 - if found >= max_count: - raise ValueError("There may be at most {} {} headers " - "in a message".format(max_count, name)) - self._headers.append(self.policy.header_store_parse(name, val)) - - def __delitem__(self, name): - """Delete all occurrences of a header, if present. - - Does not raise an exception if the header is missing. - """ - name = name.lower() - newheaders = list() - for k, v in self._headers: - if k.lower() != name: - newheaders.append((k, v)) - self._headers = newheaders - - def __contains__(self, name): - return name.lower() in [k.lower() for k, v in self._headers] - - def __iter__(self): - for field, value in self._headers: - yield field - - def keys(self): - """Return a list of all the message's header field names. - - These will be sorted in the order they appeared in the original - message, or were added to the message, and may contain duplicates. - Any fields deleted and re-inserted are always appended to the header - list. - """ - return [k for k, v in self._headers] - - def values(self): - """Return a list of all the message's header values. - - These will be sorted in the order they appeared in the original - message, or were added to the message, and may contain duplicates. - Any fields deleted and re-inserted are always appended to the header - list. - """ - return [self.policy.header_fetch_parse(k, v) - for k, v in self._headers] - - def items(self): - """Get all the message's header fields and values. - - These will be sorted in the order they appeared in the original - message, or were added to the message, and may contain duplicates. - Any fields deleted and re-inserted are always appended to the header - list. - """ - return [(k, self.policy.header_fetch_parse(k, v)) - for k, v in self._headers] - - def get(self, name, failobj=None): - """Get a header value. - - Like __getitem__() but return failobj instead of None when the field - is missing. - """ - name = name.lower() - for k, v in self._headers: - if k.lower() == name: - return self.policy.header_fetch_parse(k, v) - return failobj - - # - # "Internal" methods (public API, but only intended for use by a parser - # or generator, not normal application code. - # - - def set_raw(self, name, value): - """Store name and value in the model without modification. - - This is an "internal" API, intended only for use by a parser. - """ - self._headers.append((name, value)) - - def raw_items(self): - """Return the (name, value) header pairs without modification. - - This is an "internal" API, intended only for use by a generator. - """ - return iter(self._headers.copy()) - - # - # Additional useful stuff - # - - def get_all(self, name, failobj=None): - """Return a list of all the values for the named field. - - These will be sorted in the order they appeared in the original - message, and may contain duplicates. Any fields deleted and - re-inserted are always appended to the header list. - - If no such fields exist, failobj is returned (defaults to None). - """ - values = [] - name = name.lower() - for k, v in self._headers: - if k.lower() == name: - values.append(self.policy.header_fetch_parse(k, v)) - if not values: - return failobj - return values - - def add_header(self, _name, _value, **_params): - """Extended header setting. - - name is the header field to add. keyword arguments can be used to set - additional parameters for the header field, with underscores converted - to dashes. Normally the parameter will be added as key="value" unless - value is None, in which case only the key will be added. If a - parameter value contains non-ASCII characters it can be specified as a - three-tuple of (charset, language, value), in which case it will be - encoded according to RFC2231 rules. Otherwise it will be encoded using - the utf-8 charset and a language of ''. - - Examples: - - msg.add_header('content-disposition', 'attachment', filename='bud.gif') - msg.add_header('content-disposition', 'attachment', - filename=('utf-8', '', 'Fußballer.ppt')) - msg.add_header('content-disposition', 'attachment', - filename='Fußballer.ppt')) - """ - parts = [] - for k, v in _params.items(): - if v is None: - parts.append(k.replace('_', '-')) - else: - parts.append(_formatparam(k.replace('_', '-'), v)) - if _value is not None: - parts.insert(0, _value) - self[_name] = SEMISPACE.join(parts) - - def replace_header(self, _name, _value): - """Replace a header. - - Replace the first matching header found in the message, retaining - header order and case. If no matching header was found, a KeyError is - raised. - """ - _name = _name.lower() - for i, (k, v) in zip(range(len(self._headers)), self._headers): - if k.lower() == _name: - self._headers[i] = self.policy.header_store_parse(k, _value) - break - else: - raise KeyError(_name) - - # - # Use these three methods instead of the three above. - # - - def get_content_type(self): - """Return the message's content type. - - The returned string is coerced to lower case of the form - `maintype/subtype'. If there was no Content-Type header in the - message, the default type as given by get_default_type() will be - returned. Since according to RFC 2045, messages always have a default - type this will always return a value. - - RFC 2045 defines a message's default type to be text/plain unless it - appears inside a multipart/digest container, in which case it would be - message/rfc822. - """ - missing = object() - value = self.get('content-type', missing) - if value is missing: - # This should have no parameters - return self.get_default_type() - ctype = _splitparam(value)[0].lower() - # RFC 2045, section 5.2 says if its invalid, use text/plain - if ctype.count('/') != 1: - return 'text/plain' - return ctype - - def get_content_maintype(self): - """Return the message's main content type. - - This is the `maintype' part of the string returned by - get_content_type(). - """ - ctype = self.get_content_type() - return ctype.split('/')[0] - - def get_content_subtype(self): - """Returns the message's sub-content type. - - This is the `subtype' part of the string returned by - get_content_type(). - """ - ctype = self.get_content_type() - return ctype.split('/')[1] - - def get_default_type(self): - """Return the `default' content type. - - Most messages have a default content type of text/plain, except for - messages that are subparts of multipart/digest containers. Such - subparts have a default content type of message/rfc822. - """ - return self._default_type - - def set_default_type(self, ctype): - """Set the `default' content type. - - ctype should be either "text/plain" or "message/rfc822", although this - is not enforced. The default content type is not stored in the - Content-Type header. - """ - self._default_type = ctype - - def _get_params_preserve(self, failobj, header): - # Like get_params() but preserves the quoting of values. BAW: - # should this be part of the public interface? - missing = object() - value = self.get(header, missing) - if value is missing: - return failobj - params = [] - for p in _parseparam(value): - try: - name, val = p.split('=', 1) - name = name.strip() - val = val.strip() - except ValueError: - # Must have been a bare attribute - name = p.strip() - val = '' - params.append((name, val)) - params = utils.decode_params(params) - return params - - def get_params(self, failobj=None, header='content-type', unquote=True): - """Return the message's Content-Type parameters, as a list. - - The elements of the returned list are 2-tuples of key/value pairs, as - split on the `=' sign. The left hand side of the `=' is the key, - while the right hand side is the value. If there is no `=' sign in - the parameter the value is the empty string. The value is as - described in the get_param() method. - - Optional failobj is the object to return if there is no Content-Type - header. Optional header is the header to search instead of - Content-Type. If unquote is True, the value is unquoted. - """ - missing = object() - params = self._get_params_preserve(missing, header) - if params is missing: - return failobj - if unquote: - return [(k, _unquotevalue(v)) for k, v in params] - else: - return params - - def get_param(self, param, failobj=None, header='content-type', - unquote=True): - """Return the parameter value if found in the Content-Type header. - - Optional failobj is the object to return if there is no Content-Type - header, or the Content-Type header has no such parameter. Optional - header is the header to search instead of Content-Type. - - Parameter keys are always compared case insensitively. The return - value can either be a string, or a 3-tuple if the parameter was RFC - 2231 encoded. When it's a 3-tuple, the elements of the value are of - the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and - LANGUAGE can be None, in which case you should consider VALUE to be - encoded in the us-ascii charset. You can usually ignore LANGUAGE. - The parameter value (either the returned string, or the VALUE item in - the 3-tuple) is always unquoted, unless unquote is set to False. - - If your application doesn't care whether the parameter was RFC 2231 - encoded, it can turn the return value into a string as follows: - - param = msg.get_param('foo') - param = email.utils.collapse_rfc2231_value(rawparam) - - """ - if header not in self: - return failobj - for k, v in self._get_params_preserve(failobj, header): - if k.lower() == param.lower(): - if unquote: - return _unquotevalue(v) - else: - return v - return failobj - - def set_param(self, param, value, header='Content-Type', requote=True, - charset=None, language=''): - """Set a parameter in the Content-Type header. - - If the parameter already exists in the header, its value will be - replaced with the new value. - - If header is Content-Type and has not yet been defined for this - message, it will be set to "text/plain" and the new parameter and - value will be appended as per RFC 2045. - - An alternate header can specified in the header argument, and all - parameters will be quoted as necessary unless requote is False. - - If charset is specified, the parameter will be encoded according to RFC - 2231. Optional language specifies the RFC 2231 language, defaulting - to the empty string. Both charset and language should be strings. - """ - if not isinstance(value, tuple) and charset: - value = (charset, language, value) - - if header not in self and header.lower() == 'content-type': - ctype = 'text/plain' - else: - ctype = self.get(header) - if not self.get_param(param, header=header): - if not ctype: - ctype = _formatparam(param, value, requote) - else: - ctype = SEMISPACE.join( - [ctype, _formatparam(param, value, requote)]) - else: - ctype = '' - for old_param, old_value in self.get_params(header=header, - unquote=requote): - append_param = '' - if old_param.lower() == param.lower(): - append_param = _formatparam(param, value, requote) - else: - append_param = _formatparam(old_param, old_value, requote) - if not ctype: - ctype = append_param - else: - ctype = SEMISPACE.join([ctype, append_param]) - if ctype != self.get(header): - del self[header] - self[header] = ctype - - def del_param(self, param, header='content-type', requote=True): - """Remove the given parameter completely from the Content-Type header. - - The header will be re-written in place without the parameter or its - value. All values will be quoted as necessary unless requote is - False. Optional header specifies an alternative to the Content-Type - header. - """ - if header not in self: - return - new_ctype = '' - for p, v in self.get_params(header=header, unquote=requote): - if p.lower() != param.lower(): - if not new_ctype: - new_ctype = _formatparam(p, v, requote) - else: - new_ctype = SEMISPACE.join([new_ctype, - _formatparam(p, v, requote)]) - if new_ctype != self.get(header): - del self[header] - self[header] = new_ctype - - def set_type(self, type, header='Content-Type', requote=True): - """Set the main type and subtype for the Content-Type header. - - type must be a string in the form "maintype/subtype", otherwise a - ValueError is raised. - - This method replaces the Content-Type header, keeping all the - parameters in place. If requote is False, this leaves the existing - header's quoting as is. Otherwise, the parameters will be quoted (the - default). - - An alternative header can be specified in the header argument. When - the Content-Type header is set, we'll always also add a MIME-Version - header. - """ - # BAW: should we be strict? - if not type.count('/') == 1: - raise ValueError - # Set the Content-Type, you get a MIME-Version - if header.lower() == 'content-type': - del self['mime-version'] - self['MIME-Version'] = '1.0' - if header not in self: - self[header] = type - return - params = self.get_params(header=header, unquote=requote) - del self[header] - self[header] = type - # Skip the first param; it's the old type. - for p, v in params[1:]: - self.set_param(p, v, header, requote) - - def get_filename(self, failobj=None): - """Return the filename associated with the payload if present. - - The filename is extracted from the Content-Disposition header's - `filename' parameter, and it is unquoted. If that header is missing - the `filename' parameter, this method falls back to looking for the - `name' parameter. - """ - missing = object() - filename = self.get_param('filename', missing, 'content-disposition') - if filename is missing: - filename = self.get_param('name', missing, 'content-type') - if filename is missing: - return failobj - return utils.collapse_rfc2231_value(filename).strip() - - def get_boundary(self, failobj=None): - """Return the boundary associated with the payload if present. - - The boundary is extracted from the Content-Type header's `boundary' - parameter, and it is unquoted. - """ - missing = object() - boundary = self.get_param('boundary', missing) - if boundary is missing: - return failobj - # RFC 2046 says that boundaries may begin but not end in w/s - return utils.collapse_rfc2231_value(boundary).rstrip() - - def set_boundary(self, boundary): - """Set the boundary parameter in Content-Type to 'boundary'. - - This is subtly different than deleting the Content-Type header and - adding a new one with a new boundary parameter via add_header(). The - main difference is that using the set_boundary() method preserves the - order of the Content-Type header in the original message. - - HeaderParseError is raised if the message has no Content-Type header. - """ - missing = object() - params = self._get_params_preserve(missing, 'content-type') - if params is missing: - # There was no Content-Type header, and we don't know what type - # to set it to, so raise an exception. - raise errors.HeaderParseError('No Content-Type header found') - newparams = [] - foundp = False - for pk, pv in params: - if pk.lower() == 'boundary': - newparams.append(('boundary', '"%s"' % boundary)) - foundp = True - else: - newparams.append((pk, pv)) - if not foundp: - # The original Content-Type header had no boundary attribute. - # Tack one on the end. BAW: should we raise an exception - # instead??? - newparams.append(('boundary', '"%s"' % boundary)) - # Replace the existing Content-Type header with the new value - newheaders = [] - for h, v in self._headers: - if h.lower() == 'content-type': - parts = [] - for k, v in newparams: - if v == '': - parts.append(k) - else: - parts.append('%s=%s' % (k, v)) - val = SEMISPACE.join(parts) - newheaders.append(self.policy.header_store_parse(h, val)) - - else: - newheaders.append((h, v)) - self._headers = newheaders - - def get_content_charset(self, failobj=None): - """Return the charset parameter of the Content-Type header. - - The returned string is always coerced to lower case. If there is no - Content-Type header, or if that header has no charset parameter, - failobj is returned. - """ - missing = object() - charset = self.get_param('charset', missing) - if charset is missing: - return failobj - if isinstance(charset, tuple): - # RFC 2231 encoded, so decode it, and it better end up as ascii. - pcharset = charset[0] or 'us-ascii' - try: - # LookupError will be raised if the charset isn't known to - # Python. UnicodeError will be raised if the encoded text - # contains a character not in the charset. - as_bytes = charset[2].encode('raw-unicode-escape') - charset = str(as_bytes, pcharset) - except (LookupError, UnicodeError): - charset = charset[2] - # charset characters must be in us-ascii range - try: - charset.encode('us-ascii') - except UnicodeError: - return failobj - # RFC 2046, $4.1.2 says charsets are not case sensitive - return charset.lower() - - def get_charsets(self, failobj=None): - """Return a list containing the charset(s) used in this message. - - The returned list of items describes the Content-Type headers' - charset parameter for this message and all the subparts in its - payload. - - Each item will either be a string (the value of the charset parameter - in the Content-Type header of that part) or the value of the - 'failobj' parameter (defaults to None), if the part does not have a - main MIME type of "text", or the charset is not defined. - - The list will contain one string for each part of the message, plus - one for the container message (i.e. self), so that a non-multipart - message will still return a list of length 1. - """ - return [part.get_content_charset(failobj) for part in self.walk()] - - # I.e. def walk(self): ... - from future.backports.email.iterators import walk diff --git a/pype/vendor/future/backports/email/mime/__init__.py b/pype/vendor/future/backports/email/mime/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/future/backports/email/mime/application.py b/pype/vendor/future/backports/email/mime/application.py deleted file mode 100644 index 5cbfb174af..0000000000 --- a/pype/vendor/future/backports/email/mime/application.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (C) 2001-2006 Python Software Foundation -# Author: Keith Dart -# Contact: email-sig@python.org - -"""Class representing application/* type MIME documents.""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -from future.backports.email import encoders -from future.backports.email.mime.nonmultipart import MIMENonMultipart - -__all__ = ["MIMEApplication"] - - -class MIMEApplication(MIMENonMultipart): - """Class for generating application/* MIME documents.""" - - def __init__(self, _data, _subtype='octet-stream', - _encoder=encoders.encode_base64, **_params): - """Create an application/* type MIME document. - - _data is a string containing the raw application data. - - _subtype is the MIME content type subtype, defaulting to - 'octet-stream'. - - _encoder is a function which will perform the actual encoding for - transport of the application data, defaulting to base64 encoding. - - Any additional keyword arguments are passed to the base class - constructor, which turns them into parameters on the Content-Type - header. - """ - if _subtype is None: - raise TypeError('Invalid application MIME subtype') - MIMENonMultipart.__init__(self, 'application', _subtype, **_params) - self.set_payload(_data) - _encoder(self) diff --git a/pype/vendor/future/backports/email/mime/audio.py b/pype/vendor/future/backports/email/mime/audio.py deleted file mode 100644 index 4989c11420..0000000000 --- a/pype/vendor/future/backports/email/mime/audio.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright (C) 2001-2007 Python Software Foundation -# Author: Anthony Baxter -# Contact: email-sig@python.org - -"""Class representing audio/* type MIME documents.""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -__all__ = ['MIMEAudio'] - -import sndhdr - -from io import BytesIO -from future.backports.email import encoders -from future.backports.email.mime.nonmultipart import MIMENonMultipart - - -_sndhdr_MIMEmap = {'au' : 'basic', - 'wav' :'x-wav', - 'aiff':'x-aiff', - 'aifc':'x-aiff', - } - -# There are others in sndhdr that don't have MIME types. :( -# Additional ones to be added to sndhdr? midi, mp3, realaudio, wma?? -def _whatsnd(data): - """Try to identify a sound file type. - - sndhdr.what() has a pretty cruddy interface, unfortunately. This is why - we re-do it here. It would be easier to reverse engineer the Unix 'file' - command and use the standard 'magic' file, as shipped with a modern Unix. - """ - hdr = data[:512] - fakefile = BytesIO(hdr) - for testfn in sndhdr.tests: - res = testfn(hdr, fakefile) - if res is not None: - return _sndhdr_MIMEmap.get(res[0]) - return None - - -class MIMEAudio(MIMENonMultipart): - """Class for generating audio/* MIME documents.""" - - def __init__(self, _audiodata, _subtype=None, - _encoder=encoders.encode_base64, **_params): - """Create an audio/* type MIME document. - - _audiodata is a string containing the raw audio data. If this data - can be decoded by the standard Python `sndhdr' module, then the - subtype will be automatically included in the Content-Type header. - Otherwise, you can specify the specific audio subtype via the - _subtype parameter. If _subtype is not given, and no subtype can be - guessed, a TypeError is raised. - - _encoder is a function which will perform the actual encoding for - transport of the image data. It takes one argument, which is this - Image instance. It should use get_payload() and set_payload() to - change the payload to the encoded form. It should also add any - Content-Transfer-Encoding or other headers to the message as - necessary. The default encoding is Base64. - - Any additional keyword arguments are passed to the base class - constructor, which turns them into parameters on the Content-Type - header. - """ - if _subtype is None: - _subtype = _whatsnd(_audiodata) - if _subtype is None: - raise TypeError('Could not find audio MIME subtype') - MIMENonMultipart.__init__(self, 'audio', _subtype, **_params) - self.set_payload(_audiodata) - _encoder(self) diff --git a/pype/vendor/future/backports/email/mime/base.py b/pype/vendor/future/backports/email/mime/base.py deleted file mode 100644 index e77f3ca4ae..0000000000 --- a/pype/vendor/future/backports/email/mime/base.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) 2001-2006 Python Software Foundation -# Author: Barry Warsaw -# Contact: email-sig@python.org - -"""Base class for MIME specializations.""" -from __future__ import absolute_import, division, unicode_literals -from future.backports.email import message - -__all__ = ['MIMEBase'] - - -class MIMEBase(message.Message): - """Base class for MIME specializations.""" - - def __init__(self, _maintype, _subtype, **_params): - """This constructor adds a Content-Type: and a MIME-Version: header. - - The Content-Type: header is taken from the _maintype and _subtype - arguments. Additional parameters for this header are taken from the - keyword arguments. - """ - message.Message.__init__(self) - ctype = '%s/%s' % (_maintype, _subtype) - self.add_header('Content-Type', ctype, **_params) - self['MIME-Version'] = '1.0' diff --git a/pype/vendor/future/backports/email/mime/image.py b/pype/vendor/future/backports/email/mime/image.py deleted file mode 100644 index a03602464a..0000000000 --- a/pype/vendor/future/backports/email/mime/image.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (C) 2001-2006 Python Software Foundation -# Author: Barry Warsaw -# Contact: email-sig@python.org - -"""Class representing image/* type MIME documents.""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -__all__ = ['MIMEImage'] - -import imghdr - -from future.backports.email import encoders -from future.backports.email.mime.nonmultipart import MIMENonMultipart - - -class MIMEImage(MIMENonMultipart): - """Class for generating image/* type MIME documents.""" - - def __init__(self, _imagedata, _subtype=None, - _encoder=encoders.encode_base64, **_params): - """Create an image/* type MIME document. - - _imagedata is a string containing the raw image data. If this data - can be decoded by the standard Python `imghdr' module, then the - subtype will be automatically included in the Content-Type header. - Otherwise, you can specify the specific image subtype via the _subtype - parameter. - - _encoder is a function which will perform the actual encoding for - transport of the image data. It takes one argument, which is this - Image instance. It should use get_payload() and set_payload() to - change the payload to the encoded form. It should also add any - Content-Transfer-Encoding or other headers to the message as - necessary. The default encoding is Base64. - - Any additional keyword arguments are passed to the base class - constructor, which turns them into parameters on the Content-Type - header. - """ - if _subtype is None: - _subtype = imghdr.what(None, _imagedata) - if _subtype is None: - raise TypeError('Could not guess image MIME subtype') - MIMENonMultipart.__init__(self, 'image', _subtype, **_params) - self.set_payload(_imagedata) - _encoder(self) diff --git a/pype/vendor/future/backports/email/mime/message.py b/pype/vendor/future/backports/email/mime/message.py deleted file mode 100644 index 7f92075150..0000000000 --- a/pype/vendor/future/backports/email/mime/message.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (C) 2001-2006 Python Software Foundation -# Author: Barry Warsaw -# Contact: email-sig@python.org - -"""Class representing message/* MIME documents.""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -__all__ = ['MIMEMessage'] - -from future.backports.email import message -from future.backports.email.mime.nonmultipart import MIMENonMultipart - - -class MIMEMessage(MIMENonMultipart): - """Class representing message/* MIME documents.""" - - def __init__(self, _msg, _subtype='rfc822'): - """Create a message/* type MIME document. - - _msg is a message object and must be an instance of Message, or a - derived class of Message, otherwise a TypeError is raised. - - Optional _subtype defines the subtype of the contained message. The - default is "rfc822" (this is defined by the MIME standard, even though - the term "rfc822" is technically outdated by RFC 2822). - """ - MIMENonMultipart.__init__(self, 'message', _subtype) - if not isinstance(_msg, message.Message): - raise TypeError('Argument is not an instance of Message') - # It's convenient to use this base class method. We need to do it - # this way or we'll get an exception - message.Message.attach(self, _msg) - # And be sure our default type is set correctly - self.set_default_type('message/rfc822') diff --git a/pype/vendor/future/backports/email/mime/multipart.py b/pype/vendor/future/backports/email/mime/multipart.py deleted file mode 100644 index 6d7ed3dcb9..0000000000 --- a/pype/vendor/future/backports/email/mime/multipart.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright (C) 2002-2006 Python Software Foundation -# Author: Barry Warsaw -# Contact: email-sig@python.org - -"""Base class for MIME multipart/* type messages.""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -__all__ = ['MIMEMultipart'] - -from future.backports.email.mime.base import MIMEBase - - -class MIMEMultipart(MIMEBase): - """Base class for MIME multipart/* type messages.""" - - def __init__(self, _subtype='mixed', boundary=None, _subparts=None, - **_params): - """Creates a multipart/* type message. - - By default, creates a multipart/mixed message, with proper - Content-Type and MIME-Version headers. - - _subtype is the subtype of the multipart content type, defaulting to - `mixed'. - - boundary is the multipart boundary string. By default it is - calculated as needed. - - _subparts is a sequence of initial subparts for the payload. It - must be an iterable object, such as a list. You can always - attach new subparts to the message by using the attach() method. - - Additional parameters for the Content-Type header are taken from the - keyword arguments (or passed into the _params argument). - """ - MIMEBase.__init__(self, 'multipart', _subtype, **_params) - - # Initialise _payload to an empty list as the Message superclass's - # implementation of is_multipart assumes that _payload is a list for - # multipart messages. - self._payload = [] - - if _subparts: - for p in _subparts: - self.attach(p) - if boundary: - self.set_boundary(boundary) diff --git a/pype/vendor/future/backports/email/mime/nonmultipart.py b/pype/vendor/future/backports/email/mime/nonmultipart.py deleted file mode 100644 index 08c37c36d1..0000000000 --- a/pype/vendor/future/backports/email/mime/nonmultipart.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) 2002-2006 Python Software Foundation -# Author: Barry Warsaw -# Contact: email-sig@python.org - -"""Base class for MIME type messages that are not multipart.""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -__all__ = ['MIMENonMultipart'] - -from future.backports.email import errors -from future.backports.email.mime.base import MIMEBase - - -class MIMENonMultipart(MIMEBase): - """Base class for MIME multipart/* type messages.""" - - def attach(self, payload): - # The public API prohibits attaching multiple subparts to MIMEBase - # derived subtypes since none of them are, by definition, of content - # type multipart/* - raise errors.MultipartConversionError( - 'Cannot attach additional subparts to non-multipart/*') diff --git a/pype/vendor/future/backports/email/mime/text.py b/pype/vendor/future/backports/email/mime/text.py deleted file mode 100644 index 6269f4a68a..0000000000 --- a/pype/vendor/future/backports/email/mime/text.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright (C) 2001-2006 Python Software Foundation -# Author: Barry Warsaw -# Contact: email-sig@python.org - -"""Class representing text/* type MIME documents.""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -__all__ = ['MIMEText'] - -from future.backports.email.encoders import encode_7or8bit -from future.backports.email.mime.nonmultipart import MIMENonMultipart - - -class MIMEText(MIMENonMultipart): - """Class for generating text/* type MIME documents.""" - - def __init__(self, _text, _subtype='plain', _charset=None): - """Create a text/* type MIME document. - - _text is the string for this message object. - - _subtype is the MIME sub content type, defaulting to "plain". - - _charset is the character set parameter added to the Content-Type - header. This defaults to "us-ascii". Note that as a side-effect, the - Content-Transfer-Encoding header will also be set. - """ - - # If no _charset was specified, check to see if there are non-ascii - # characters present. If not, use 'us-ascii', otherwise use utf-8. - # XXX: This can be removed once #7304 is fixed. - if _charset is None: - try: - _text.encode('us-ascii') - _charset = 'us-ascii' - except UnicodeEncodeError: - _charset = 'utf-8' - - MIMENonMultipart.__init__(self, 'text', _subtype, - **{'charset': _charset}) - - self.set_payload(_text, _charset) diff --git a/pype/vendor/future/backports/email/parser.py b/pype/vendor/future/backports/email/parser.py deleted file mode 100644 index df1c6e2868..0000000000 --- a/pype/vendor/future/backports/email/parser.py +++ /dev/null @@ -1,135 +0,0 @@ -# Copyright (C) 2001-2007 Python Software Foundation -# Author: Barry Warsaw, Thomas Wouters, Anthony Baxter -# Contact: email-sig@python.org - -"""A parser of RFC 2822 and MIME email messages.""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import - -__all__ = ['Parser', 'HeaderParser', 'BytesParser', 'BytesHeaderParser'] - -import warnings -from io import StringIO, TextIOWrapper - -from future.backports.email.feedparser import FeedParser, BytesFeedParser -from future.backports.email.message import Message -from future.backports.email._policybase import compat32 - - -class Parser(object): - def __init__(self, _class=Message, **_3to2kwargs): - """Parser of RFC 2822 and MIME email messages. - - Creates an in-memory object tree representing the email message, which - can then be manipulated and turned over to a Generator to return the - textual representation of the message. - - The string must be formatted as a block of RFC 2822 headers and header - continuation lines, optionally preceeded by a `Unix-from' header. The - header block is terminated either by the end of the string or by a - blank line. - - _class is the class to instantiate for new message objects when they - must be created. This class must have a constructor that can take - zero arguments. Default is Message.Message. - - The policy keyword specifies a policy object that controls a number of - aspects of the parser's operation. The default policy maintains - backward compatibility. - - """ - if 'policy' in _3to2kwargs: policy = _3to2kwargs['policy']; del _3to2kwargs['policy'] - else: policy = compat32 - self._class = _class - self.policy = policy - - def parse(self, fp, headersonly=False): - """Create a message structure from the data in a file. - - Reads all the data from the file and returns the root of the message - structure. Optional headersonly is a flag specifying whether to stop - parsing after reading the headers or not. The default is False, - meaning it parses the entire contents of the file. - """ - feedparser = FeedParser(self._class, policy=self.policy) - if headersonly: - feedparser._set_headersonly() - while True: - data = fp.read(8192) - if not data: - break - feedparser.feed(data) - return feedparser.close() - - def parsestr(self, text, headersonly=False): - """Create a message structure from a string. - - Returns the root of the message structure. Optional headersonly is a - flag specifying whether to stop parsing after reading the headers or - not. The default is False, meaning it parses the entire contents of - the file. - """ - return self.parse(StringIO(text), headersonly=headersonly) - - - -class HeaderParser(Parser): - def parse(self, fp, headersonly=True): - return Parser.parse(self, fp, True) - - def parsestr(self, text, headersonly=True): - return Parser.parsestr(self, text, True) - - -class BytesParser(object): - - def __init__(self, *args, **kw): - """Parser of binary RFC 2822 and MIME email messages. - - Creates an in-memory object tree representing the email message, which - can then be manipulated and turned over to a Generator to return the - textual representation of the message. - - The input must be formatted as a block of RFC 2822 headers and header - continuation lines, optionally preceeded by a `Unix-from' header. The - header block is terminated either by the end of the input or by a - blank line. - - _class is the class to instantiate for new message objects when they - must be created. This class must have a constructor that can take - zero arguments. Default is Message.Message. - """ - self.parser = Parser(*args, **kw) - - def parse(self, fp, headersonly=False): - """Create a message structure from the data in a binary file. - - Reads all the data from the file and returns the root of the message - structure. Optional headersonly is a flag specifying whether to stop - parsing after reading the headers or not. The default is False, - meaning it parses the entire contents of the file. - """ - fp = TextIOWrapper(fp, encoding='ascii', errors='surrogateescape') - with fp: - return self.parser.parse(fp, headersonly) - - - def parsebytes(self, text, headersonly=False): - """Create a message structure from a byte string. - - Returns the root of the message structure. Optional headersonly is a - flag specifying whether to stop parsing after reading the headers or - not. The default is False, meaning it parses the entire contents of - the file. - """ - text = text.decode('ASCII', errors='surrogateescape') - return self.parser.parsestr(text, headersonly) - - -class BytesHeaderParser(BytesParser): - def parse(self, fp, headersonly=True): - return BytesParser.parse(self, fp, headersonly=True) - - def parsebytes(self, text, headersonly=True): - return BytesParser.parsebytes(self, text, headersonly=True) diff --git a/pype/vendor/future/backports/email/policy.py b/pype/vendor/future/backports/email/policy.py deleted file mode 100644 index 2f609a23ae..0000000000 --- a/pype/vendor/future/backports/email/policy.py +++ /dev/null @@ -1,193 +0,0 @@ -"""This will be the home for the policy that hooks in the new -code that adds all the email6 features. -""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import -from future.builtins import super - -from future.standard_library.email._policybase import (Policy, Compat32, - compat32, _extend_docstrings) -from future.standard_library.email.utils import _has_surrogates -from future.standard_library.email.headerregistry import HeaderRegistry as HeaderRegistry - -__all__ = [ - 'Compat32', - 'compat32', - 'Policy', - 'EmailPolicy', - 'default', - 'strict', - 'SMTP', - 'HTTP', - ] - -@_extend_docstrings -class EmailPolicy(Policy): - - """+ - PROVISIONAL - - The API extensions enabled by this policy are currently provisional. - Refer to the documentation for details. - - This policy adds new header parsing and folding algorithms. Instead of - simple strings, headers are custom objects with custom attributes - depending on the type of the field. The folding algorithm fully - implements RFCs 2047 and 5322. - - In addition to the settable attributes listed above that apply to - all Policies, this policy adds the following additional attributes: - - refold_source -- if the value for a header in the Message object - came from the parsing of some source, this attribute - indicates whether or not a generator should refold - that value when transforming the message back into - stream form. The possible values are: - - none -- all source values use original folding - long -- source values that have any line that is - longer than max_line_length will be - refolded - all -- all values are refolded. - - The default is 'long'. - - header_factory -- a callable that takes two arguments, 'name' and - 'value', where 'name' is a header field name and - 'value' is an unfolded header field value, and - returns a string-like object that represents that - header. A default header_factory is provided that - understands some of the RFC5322 header field types. - (Currently address fields and date fields have - special treatment, while all other fields are - treated as unstructured. This list will be - completed before the extension is marked stable.) - """ - - refold_source = 'long' - header_factory = HeaderRegistry() - - def __init__(self, **kw): - # Ensure that each new instance gets a unique header factory - # (as opposed to clones, which share the factory). - if 'header_factory' not in kw: - object.__setattr__(self, 'header_factory', HeaderRegistry()) - super().__init__(**kw) - - def header_max_count(self, name): - """+ - The implementation for this class returns the max_count attribute from - the specialized header class that would be used to construct a header - of type 'name'. - """ - return self.header_factory[name].max_count - - # The logic of the next three methods is chosen such that it is possible to - # switch a Message object between a Compat32 policy and a policy derived - # from this class and have the results stay consistent. This allows a - # Message object constructed with this policy to be passed to a library - # that only handles Compat32 objects, or to receive such an object and - # convert it to use the newer style by just changing its policy. It is - # also chosen because it postpones the relatively expensive full rfc5322 - # parse until as late as possible when parsing from source, since in many - # applications only a few headers will actually be inspected. - - def header_source_parse(self, sourcelines): - """+ - The name is parsed as everything up to the ':' and returned unmodified. - The value is determined by stripping leading whitespace off the - remainder of the first line, joining all subsequent lines together, and - stripping any trailing carriage return or linefeed characters. (This - is the same as Compat32). - - """ - name, value = sourcelines[0].split(':', 1) - value = value.lstrip(' \t') + ''.join(sourcelines[1:]) - return (name, value.rstrip('\r\n')) - - def header_store_parse(self, name, value): - """+ - The name is returned unchanged. If the input value has a 'name' - attribute and it matches the name ignoring case, the value is returned - unchanged. Otherwise the name and value are passed to header_factory - method, and the resulting custom header object is returned as the - value. In this case a ValueError is raised if the input value contains - CR or LF characters. - - """ - if hasattr(value, 'name') and value.name.lower() == name.lower(): - return (name, value) - if isinstance(value, str) and len(value.splitlines())>1: - raise ValueError("Header values may not contain linefeed " - "or carriage return characters") - return (name, self.header_factory(name, value)) - - def header_fetch_parse(self, name, value): - """+ - If the value has a 'name' attribute, it is returned to unmodified. - Otherwise the name and the value with any linesep characters removed - are passed to the header_factory method, and the resulting custom - header object is returned. Any surrogateescaped bytes get turned - into the unicode unknown-character glyph. - - """ - if hasattr(value, 'name'): - return value - return self.header_factory(name, ''.join(value.splitlines())) - - def fold(self, name, value): - """+ - Header folding is controlled by the refold_source policy setting. A - value is considered to be a 'source value' if and only if it does not - have a 'name' attribute (having a 'name' attribute means it is a header - object of some sort). If a source value needs to be refolded according - to the policy, it is converted into a custom header object by passing - the name and the value with any linesep characters removed to the - header_factory method. Folding of a custom header object is done by - calling its fold method with the current policy. - - Source values are split into lines using splitlines. If the value is - not to be refolded, the lines are rejoined using the linesep from the - policy and returned. The exception is lines containing non-ascii - binary data. In that case the value is refolded regardless of the - refold_source setting, which causes the binary data to be CTE encoded - using the unknown-8bit charset. - - """ - return self._fold(name, value, refold_binary=True) - - def fold_binary(self, name, value): - """+ - The same as fold if cte_type is 7bit, except that the returned value is - bytes. - - If cte_type is 8bit, non-ASCII binary data is converted back into - bytes. Headers with binary data are not refolded, regardless of the - refold_header setting, since there is no way to know whether the binary - data consists of single byte characters or multibyte characters. - - """ - folded = self._fold(name, value, refold_binary=self.cte_type=='7bit') - return folded.encode('ascii', 'surrogateescape') - - def _fold(self, name, value, refold_binary=False): - if hasattr(value, 'name'): - return value.fold(policy=self) - maxlen = self.max_line_length if self.max_line_length else float('inf') - lines = value.splitlines() - refold = (self.refold_source == 'all' or - self.refold_source == 'long' and - (lines and len(lines[0])+len(name)+2 > maxlen or - any(len(x) > maxlen for x in lines[1:]))) - if refold or refold_binary and _has_surrogates(value): - return self.header_factory(name, ''.join(lines)).fold(policy=self) - return name + ': ' + self.linesep.join(lines) + self.linesep - - -default = EmailPolicy() -# Make the default policy use the class default header_factory -del default.header_factory -strict = default.clone(raise_on_defect=True) -SMTP = default.clone(linesep='\r\n') -HTTP = default.clone(linesep='\r\n', max_line_length=None) diff --git a/pype/vendor/future/backports/email/quoprimime.py b/pype/vendor/future/backports/email/quoprimime.py deleted file mode 100644 index b69d158bc4..0000000000 --- a/pype/vendor/future/backports/email/quoprimime.py +++ /dev/null @@ -1,326 +0,0 @@ -# Copyright (C) 2001-2006 Python Software Foundation -# Author: Ben Gertzfield -# Contact: email-sig@python.org - -"""Quoted-printable content transfer encoding per RFCs 2045-2047. - -This module handles the content transfer encoding method defined in RFC 2045 -to encode US ASCII-like 8-bit data called `quoted-printable'. It is used to -safely encode text that is in a character set similar to the 7-bit US ASCII -character set, but that includes some 8-bit characters that are normally not -allowed in email bodies or headers. - -Quoted-printable is very space-inefficient for encoding binary files; use the -email.base64mime module for that instead. - -This module provides an interface to encode and decode both headers and bodies -with quoted-printable encoding. - -RFC 2045 defines a method for including character set information in an -`encoded-word' in a header. This method is commonly used for 8-bit real names -in To:/From:/Cc: etc. fields, as well as Subject: lines. - -This module does not do the line wrapping or end-of-line character -conversion necessary for proper internationalized headers; it only -does dumb encoding and decoding. To deal with the various line -wrapping issues, use the email.header module. -""" -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import -from future.builtins import bytes, chr, dict, int, range, super - -__all__ = [ - 'body_decode', - 'body_encode', - 'body_length', - 'decode', - 'decodestring', - 'header_decode', - 'header_encode', - 'header_length', - 'quote', - 'unquote', - ] - -import re -import io - -from string import ascii_letters, digits, hexdigits - -CRLF = '\r\n' -NL = '\n' -EMPTYSTRING = '' - -# Build a mapping of octets to the expansion of that octet. Since we're only -# going to have 256 of these things, this isn't terribly inefficient -# space-wise. Remember that headers and bodies have different sets of safe -# characters. Initialize both maps with the full expansion, and then override -# the safe bytes with the more compact form. -_QUOPRI_HEADER_MAP = dict((c, '=%02X' % c) for c in range(256)) -_QUOPRI_BODY_MAP = _QUOPRI_HEADER_MAP.copy() - -# Safe header bytes which need no encoding. -for c in bytes(b'-!*+/' + ascii_letters.encode('ascii') + digits.encode('ascii')): - _QUOPRI_HEADER_MAP[c] = chr(c) -# Headers have one other special encoding; spaces become underscores. -_QUOPRI_HEADER_MAP[ord(' ')] = '_' - -# Safe body bytes which need no encoding. -for c in bytes(b' !"#$%&\'()*+,-./0123456789:;<>' - b'?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`' - b'abcdefghijklmnopqrstuvwxyz{|}~\t'): - _QUOPRI_BODY_MAP[c] = chr(c) - - - -# Helpers -def header_check(octet): - """Return True if the octet should be escaped with header quopri.""" - return chr(octet) != _QUOPRI_HEADER_MAP[octet] - - -def body_check(octet): - """Return True if the octet should be escaped with body quopri.""" - return chr(octet) != _QUOPRI_BODY_MAP[octet] - - -def header_length(bytearray): - """Return a header quoted-printable encoding length. - - Note that this does not include any RFC 2047 chrome added by - `header_encode()`. - - :param bytearray: An array of bytes (a.k.a. octets). - :return: The length in bytes of the byte array when it is encoded with - quoted-printable for headers. - """ - return sum(len(_QUOPRI_HEADER_MAP[octet]) for octet in bytearray) - - -def body_length(bytearray): - """Return a body quoted-printable encoding length. - - :param bytearray: An array of bytes (a.k.a. octets). - :return: The length in bytes of the byte array when it is encoded with - quoted-printable for bodies. - """ - return sum(len(_QUOPRI_BODY_MAP[octet]) for octet in bytearray) - - -def _max_append(L, s, maxlen, extra=''): - if not isinstance(s, str): - s = chr(s) - if not L: - L.append(s.lstrip()) - elif len(L[-1]) + len(s) <= maxlen: - L[-1] += extra + s - else: - L.append(s.lstrip()) - - -def unquote(s): - """Turn a string in the form =AB to the ASCII character with value 0xab""" - return chr(int(s[1:3], 16)) - - -def quote(c): - return '=%02X' % ord(c) - - - -def header_encode(header_bytes, charset='iso-8859-1'): - """Encode a single header line with quoted-printable (like) encoding. - - Defined in RFC 2045, this `Q' encoding is similar to quoted-printable, but - used specifically for email header fields to allow charsets with mostly 7 - bit characters (and some 8 bit) to remain more or less readable in non-RFC - 2045 aware mail clients. - - charset names the character set to use in the RFC 2046 header. It - defaults to iso-8859-1. - """ - # Return empty headers as an empty string. - if not header_bytes: - return '' - # Iterate over every byte, encoding if necessary. - encoded = [] - for octet in header_bytes: - encoded.append(_QUOPRI_HEADER_MAP[octet]) - # Now add the RFC chrome to each encoded chunk and glue the chunks - # together. - return '=?%s?q?%s?=' % (charset, EMPTYSTRING.join(encoded)) - - -class _body_accumulator(io.StringIO): - - def __init__(self, maxlinelen, eol, *args, **kw): - super().__init__(*args, **kw) - self.eol = eol - self.maxlinelen = self.room = maxlinelen - - def write_str(self, s): - """Add string s to the accumulated body.""" - self.write(s) - self.room -= len(s) - - def newline(self): - """Write eol, then start new line.""" - self.write_str(self.eol) - self.room = self.maxlinelen - - def write_soft_break(self): - """Write a soft break, then start a new line.""" - self.write_str('=') - self.newline() - - def write_wrapped(self, s, extra_room=0): - """Add a soft line break if needed, then write s.""" - if self.room < len(s) + extra_room: - self.write_soft_break() - self.write_str(s) - - def write_char(self, c, is_last_char): - if not is_last_char: - # Another character follows on this line, so we must leave - # extra room, either for it or a soft break, and whitespace - # need not be quoted. - self.write_wrapped(c, extra_room=1) - elif c not in ' \t': - # For this and remaining cases, no more characters follow, - # so there is no need to reserve extra room (since a hard - # break will immediately follow). - self.write_wrapped(c) - elif self.room >= 3: - # It's a whitespace character at end-of-line, and we have room - # for the three-character quoted encoding. - self.write(quote(c)) - elif self.room == 2: - # There's room for the whitespace character and a soft break. - self.write(c) - self.write_soft_break() - else: - # There's room only for a soft break. The quoted whitespace - # will be the only content on the subsequent line. - self.write_soft_break() - self.write(quote(c)) - - -def body_encode(body, maxlinelen=76, eol=NL): - """Encode with quoted-printable, wrapping at maxlinelen characters. - - Each line of encoded text will end with eol, which defaults to "\\n". Set - this to "\\r\\n" if you will be using the result of this function directly - in an email. - - Each line will be wrapped at, at most, maxlinelen characters before the - eol string (maxlinelen defaults to 76 characters, the maximum value - permitted by RFC 2045). Long lines will have the 'soft line break' - quoted-printable character "=" appended to them, so the decoded text will - be identical to the original text. - - The minimum maxlinelen is 4 to have room for a quoted character ("=XX") - followed by a soft line break. Smaller values will generate a - ValueError. - - """ - - if maxlinelen < 4: - raise ValueError("maxlinelen must be at least 4") - if not body: - return body - - # The last line may or may not end in eol, but all other lines do. - last_has_eol = (body[-1] in '\r\n') - - # This accumulator will make it easier to build the encoded body. - encoded_body = _body_accumulator(maxlinelen, eol) - - lines = body.splitlines() - last_line_no = len(lines) - 1 - for line_no, line in enumerate(lines): - last_char_index = len(line) - 1 - for i, c in enumerate(line): - if body_check(ord(c)): - c = quote(c) - encoded_body.write_char(c, i==last_char_index) - # Add an eol if input line had eol. All input lines have eol except - # possibly the last one. - if line_no < last_line_no or last_has_eol: - encoded_body.newline() - - return encoded_body.getvalue() - - - -# BAW: I'm not sure if the intent was for the signature of this function to be -# the same as base64MIME.decode() or not... -def decode(encoded, eol=NL): - """Decode a quoted-printable string. - - Lines are separated with eol, which defaults to \\n. - """ - if not encoded: - return encoded - # BAW: see comment in encode() above. Again, we're building up the - # decoded string with string concatenation, which could be done much more - # efficiently. - decoded = '' - - for line in encoded.splitlines(): - line = line.rstrip() - if not line: - decoded += eol - continue - - i = 0 - n = len(line) - while i < n: - c = line[i] - if c != '=': - decoded += c - i += 1 - # Otherwise, c == "=". Are we at the end of the line? If so, add - # a soft line break. - elif i+1 == n: - i += 1 - continue - # Decode if in form =AB - elif i+2 < n and line[i+1] in hexdigits and line[i+2] in hexdigits: - decoded += unquote(line[i:i+3]) - i += 3 - # Otherwise, not in form =AB, pass literally - else: - decoded += c - i += 1 - - if i == n: - decoded += eol - # Special case if original string did not end with eol - if encoded[-1] not in '\r\n' and decoded.endswith(eol): - decoded = decoded[:-1] - return decoded - - -# For convenience and backwards compatibility w/ standard base64 module -body_decode = decode -decodestring = decode - - - -def _unquote_match(match): - """Turn a match in the form =AB to the ASCII character with value 0xab""" - s = match.group(0) - return unquote(s) - - -# Header decoding is done a bit differently -def header_decode(s): - """Decode a string encoded with RFC 2045 MIME header `Q' encoding. - - This function does not parse a full MIME header value encoded with - quoted-printable (like =?iso-8895-1?q?Hello_World?=) -- please use - the high level email.header class for that functionality. - """ - s = s.replace('_', ' ') - return re.sub(r'=[a-fA-F0-9]{2}', _unquote_match, s, re.ASCII) diff --git a/pype/vendor/future/backports/email/utils.py b/pype/vendor/future/backports/email/utils.py deleted file mode 100644 index 4abebf7cb6..0000000000 --- a/pype/vendor/future/backports/email/utils.py +++ /dev/null @@ -1,400 +0,0 @@ -# Copyright (C) 2001-2010 Python Software Foundation -# Author: Barry Warsaw -# Contact: email-sig@python.org - -"""Miscellaneous utilities.""" - -from __future__ import unicode_literals -from __future__ import division -from __future__ import absolute_import -from future import utils -from future.builtins import bytes, int, str - -__all__ = [ - 'collapse_rfc2231_value', - 'decode_params', - 'decode_rfc2231', - 'encode_rfc2231', - 'formataddr', - 'formatdate', - 'format_datetime', - 'getaddresses', - 'make_msgid', - 'mktime_tz', - 'parseaddr', - 'parsedate', - 'parsedate_tz', - 'parsedate_to_datetime', - 'unquote', - ] - -import os -import re -if utils.PY2: - re.ASCII = 0 -import time -import base64 -import random -import socket -from future.backports import datetime -from future.backports.urllib.parse import quote as url_quote, unquote as url_unquote -import warnings -from io import StringIO - -from future.backports.email._parseaddr import quote -from future.backports.email._parseaddr import AddressList as _AddressList -from future.backports.email._parseaddr import mktime_tz - -from future.backports.email._parseaddr import parsedate, parsedate_tz, _parsedate_tz - -from quopri import decodestring as _qdecode - -# Intrapackage imports -from future.backports.email.encoders import _bencode, _qencode -from future.backports.email.charset import Charset - -COMMASPACE = ', ' -EMPTYSTRING = '' -UEMPTYSTRING = '' -CRLF = '\r\n' -TICK = "'" - -specialsre = re.compile(r'[][\\()<>@,:;".]') -escapesre = re.compile(r'[\\"]') - -# How to figure out if we are processing strings that come from a byte -# source with undecodable characters. -_has_surrogates = re.compile( - '([^\ud800-\udbff]|\A)[\udc00-\udfff]([^\udc00-\udfff]|\Z)').search - -# How to deal with a string containing bytes before handing it to the -# application through the 'normal' interface. -def _sanitize(string): - # Turn any escaped bytes into unicode 'unknown' char. - original_bytes = string.encode('ascii', 'surrogateescape') - return original_bytes.decode('ascii', 'replace') - - -# Helpers - -def formataddr(pair, charset='utf-8'): - """The inverse of parseaddr(), this takes a 2-tuple of the form - (realname, email_address) and returns the string value suitable - for an RFC 2822 From, To or Cc header. - - If the first element of pair is false, then the second element is - returned unmodified. - - Optional charset if given is the character set that is used to encode - realname in case realname is not ASCII safe. Can be an instance of str or - a Charset-like object which has a header_encode method. Default is - 'utf-8'. - """ - name, address = pair - # The address MUST (per RFC) be ascii, so raise an UnicodeError if it isn't. - address.encode('ascii') - if name: - try: - name.encode('ascii') - except UnicodeEncodeError: - if isinstance(charset, str): - charset = Charset(charset) - encoded_name = charset.header_encode(name) - return "%s <%s>" % (encoded_name, address) - else: - quotes = '' - if specialsre.search(name): - quotes = '"' - name = escapesre.sub(r'\\\g<0>', name) - return '%s%s%s <%s>' % (quotes, name, quotes, address) - return address - - - -def getaddresses(fieldvalues): - """Return a list of (REALNAME, EMAIL) for each fieldvalue.""" - all = COMMASPACE.join(fieldvalues) - a = _AddressList(all) - return a.addresslist - - - -ecre = re.compile(r''' - =\? # literal =? - (?P[^?]*?) # non-greedy up to the next ? is the charset - \? # literal ? - (?P[qb]) # either a "q" or a "b", case insensitive - \? # literal ? - (?P.*?) # non-greedy up to the next ?= is the atom - \?= # literal ?= - ''', re.VERBOSE | re.IGNORECASE) - - -def _format_timetuple_and_zone(timetuple, zone): - return '%s, %02d %s %04d %02d:%02d:%02d %s' % ( - ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][timetuple[6]], - timetuple[2], - ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', - 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][timetuple[1] - 1], - timetuple[0], timetuple[3], timetuple[4], timetuple[5], - zone) - -def formatdate(timeval=None, localtime=False, usegmt=False): - """Returns a date string as specified by RFC 2822, e.g.: - - Fri, 09 Nov 2001 01:08:47 -0000 - - Optional timeval if given is a floating point time value as accepted by - gmtime() and localtime(), otherwise the current time is used. - - Optional localtime is a flag that when True, interprets timeval, and - returns a date relative to the local timezone instead of UTC, properly - taking daylight savings time into account. - - Optional argument usegmt means that the timezone is written out as - an ascii string, not numeric one (so "GMT" instead of "+0000"). This - is needed for HTTP, and is only used when localtime==False. - """ - # Note: we cannot use strftime() because that honors the locale and RFC - # 2822 requires that day and month names be the English abbreviations. - if timeval is None: - timeval = time.time() - if localtime: - now = time.localtime(timeval) - # Calculate timezone offset, based on whether the local zone has - # daylight savings time, and whether DST is in effect. - if time.daylight and now[-1]: - offset = time.altzone - else: - offset = time.timezone - hours, minutes = divmod(abs(offset), 3600) - # Remember offset is in seconds west of UTC, but the timezone is in - # minutes east of UTC, so the signs differ. - if offset > 0: - sign = '-' - else: - sign = '+' - zone = '%s%02d%02d' % (sign, hours, minutes // 60) - else: - now = time.gmtime(timeval) - # Timezone offset is always -0000 - if usegmt: - zone = 'GMT' - else: - zone = '-0000' - return _format_timetuple_and_zone(now, zone) - -def format_datetime(dt, usegmt=False): - """Turn a datetime into a date string as specified in RFC 2822. - - If usegmt is True, dt must be an aware datetime with an offset of zero. In - this case 'GMT' will be rendered instead of the normal +0000 required by - RFC2822. This is to support HTTP headers involving date stamps. - """ - now = dt.timetuple() - if usegmt: - if dt.tzinfo is None or dt.tzinfo != datetime.timezone.utc: - raise ValueError("usegmt option requires a UTC datetime") - zone = 'GMT' - elif dt.tzinfo is None: - zone = '-0000' - else: - zone = dt.strftime("%z") - return _format_timetuple_and_zone(now, zone) - - -def make_msgid(idstring=None, domain=None): - """Returns a string suitable for RFC 2822 compliant Message-ID, e.g: - - <20020201195627.33539.96671@nightshade.la.mastaler.com> - - Optional idstring if given is a string used to strengthen the - uniqueness of the message id. Optional domain if given provides the - portion of the message id after the '@'. It defaults to the locally - defined hostname. - """ - timeval = time.time() - utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval)) - pid = os.getpid() - randint = random.randrange(100000) - if idstring is None: - idstring = '' - else: - idstring = '.' + idstring - if domain is None: - domain = socket.getfqdn() - msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, domain) - return msgid - - -def parsedate_to_datetime(data): - _3to2list = list(_parsedate_tz(data)) - dtuple, tz, = [_3to2list[:-1]] + _3to2list[-1:] - if tz is None: - return datetime.datetime(*dtuple[:6]) - return datetime.datetime(*dtuple[:6], - tzinfo=datetime.timezone(datetime.timedelta(seconds=tz))) - - -def parseaddr(addr): - addrs = _AddressList(addr).addresslist - if not addrs: - return '', '' - return addrs[0] - - -# rfc822.unquote() doesn't properly de-backslash-ify in Python pre-2.3. -def unquote(str): - """Remove quotes from a string.""" - if len(str) > 1: - if str.startswith('"') and str.endswith('"'): - return str[1:-1].replace('\\\\', '\\').replace('\\"', '"') - if str.startswith('<') and str.endswith('>'): - return str[1:-1] - return str - - - -# RFC2231-related functions - parameter encoding and decoding -def decode_rfc2231(s): - """Decode string according to RFC 2231""" - parts = s.split(TICK, 2) - if len(parts) <= 2: - return None, None, s - return parts - - -def encode_rfc2231(s, charset=None, language=None): - """Encode string according to RFC 2231. - - If neither charset nor language is given, then s is returned as-is. If - charset is given but not language, the string is encoded using the empty - string for language. - """ - s = url_quote(s, safe='', encoding=charset or 'ascii') - if charset is None and language is None: - return s - if language is None: - language = '' - return "%s'%s'%s" % (charset, language, s) - - -rfc2231_continuation = re.compile(r'^(?P\w+)\*((?P[0-9]+)\*?)?$', - re.ASCII) - -def decode_params(params): - """Decode parameters list according to RFC 2231. - - params is a sequence of 2-tuples containing (param name, string value). - """ - # Copy params so we don't mess with the original - params = params[:] - new_params = [] - # Map parameter's name to a list of continuations. The values are a - # 3-tuple of the continuation number, the string value, and a flag - # specifying whether a particular segment is %-encoded. - rfc2231_params = {} - name, value = params.pop(0) - new_params.append((name, value)) - while params: - name, value = params.pop(0) - if name.endswith('*'): - encoded = True - else: - encoded = False - value = unquote(value) - mo = rfc2231_continuation.match(name) - if mo: - name, num = mo.group('name', 'num') - if num is not None: - num = int(num) - rfc2231_params.setdefault(name, []).append((num, value, encoded)) - else: - new_params.append((name, '"%s"' % quote(value))) - if rfc2231_params: - for name, continuations in rfc2231_params.items(): - value = [] - extended = False - # Sort by number - continuations.sort() - # And now append all values in numerical order, converting - # %-encodings for the encoded segments. If any of the - # continuation names ends in a *, then the entire string, after - # decoding segments and concatenating, must have the charset and - # language specifiers at the beginning of the string. - for num, s, encoded in continuations: - if encoded: - # Decode as "latin-1", so the characters in s directly - # represent the percent-encoded octet values. - # collapse_rfc2231_value treats this as an octet sequence. - s = url_unquote(s, encoding="latin-1") - extended = True - value.append(s) - value = quote(EMPTYSTRING.join(value)) - if extended: - charset, language, value = decode_rfc2231(value) - new_params.append((name, (charset, language, '"%s"' % value))) - else: - new_params.append((name, '"%s"' % value)) - return new_params - -def collapse_rfc2231_value(value, errors='replace', - fallback_charset='us-ascii'): - if not isinstance(value, tuple) or len(value) != 3: - return unquote(value) - # While value comes to us as a unicode string, we need it to be a bytes - # object. We do not want bytes() normal utf-8 decoder, we want a straight - # interpretation of the string as character bytes. - charset, language, text = value - rawbytes = bytes(text, 'raw-unicode-escape') - try: - return str(rawbytes, charset, errors) - except LookupError: - # charset is not a known codec. - return unquote(text) - - -# -# datetime doesn't provide a localtime function yet, so provide one. Code -# adapted from the patch in issue 9527. This may not be perfect, but it is -# better than not having it. -# - -def localtime(dt=None, isdst=-1): - """Return local time as an aware datetime object. - - If called without arguments, return current time. Otherwise *dt* - argument should be a datetime instance, and it is converted to the - local time zone according to the system time zone database. If *dt* is - naive (that is, dt.tzinfo is None), it is assumed to be in local time. - In this case, a positive or zero value for *isdst* causes localtime to - presume initially that summer time (for example, Daylight Saving Time) - is or is not (respectively) in effect for the specified time. A - negative value for *isdst* causes the localtime() function to attempt - to divine whether summer time is in effect for the specified time. - - """ - if dt is None: - return datetime.datetime.now(datetime.timezone.utc).astimezone() - if dt.tzinfo is not None: - return dt.astimezone() - # We have a naive datetime. Convert to a (localtime) timetuple and pass to - # system mktime together with the isdst hint. System mktime will return - # seconds since epoch. - tm = dt.timetuple()[:-1] + (isdst,) - seconds = time.mktime(tm) - localtm = time.localtime(seconds) - try: - delta = datetime.timedelta(seconds=localtm.tm_gmtoff) - tz = datetime.timezone(delta, localtm.tm_zone) - except AttributeError: - # Compute UTC offset and compare with the value implied by tm_isdst. - # If the values match, use the zone name implied by tm_isdst. - delta = dt - datetime.datetime(*time.gmtime(seconds)[:6]) - dst = time.daylight and localtm.tm_isdst > 0 - gmtoff = -(time.altzone if dst else time.timezone) - if delta == datetime.timedelta(seconds=gmtoff): - tz = datetime.timezone(delta, time.tzname[dst]) - else: - tz = datetime.timezone(delta) - return dt.replace(tzinfo=tz) diff --git a/pype/vendor/future/backports/html/__init__.py b/pype/vendor/future/backports/html/__init__.py deleted file mode 100644 index 837afce1ba..0000000000 --- a/pype/vendor/future/backports/html/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -""" -General functions for HTML manipulation, backported from Py3. - -Note that this uses Python 2.7 code with the corresponding Python 3 -module names and locations. -""" - -from __future__ import unicode_literals - - -_escape_map = {ord('&'): '&', ord('<'): '<', ord('>'): '>'} -_escape_map_full = {ord('&'): '&', ord('<'): '<', ord('>'): '>', - ord('"'): '"', ord('\''): '''} - -# NB: this is a candidate for a bytes/string polymorphic interface - -def escape(s, quote=True): - """ - Replace special characters "&", "<" and ">" to HTML-safe sequences. - If the optional flag quote is true (the default), the quotation mark - characters, both double quote (") and single quote (') characters are also - translated. - """ - assert not isinstance(s, bytes), 'Pass a unicode string' - if quote: - return s.translate(_escape_map_full) - return s.translate(_escape_map) - diff --git a/pype/vendor/future/backports/html/entities.py b/pype/vendor/future/backports/html/entities.py deleted file mode 100644 index 6798187c6a..0000000000 --- a/pype/vendor/future/backports/html/entities.py +++ /dev/null @@ -1,2515 +0,0 @@ -"""HTML character entity references. - -Backported for python-future from Python 3.3 -""" - -from __future__ import (absolute_import, division, - print_function, unicode_literals) -from future.builtins import * - - -# maps the HTML entity name to the Unicode codepoint -name2codepoint = { - 'AElig': 0x00c6, # latin capital letter AE = latin capital ligature AE, U+00C6 ISOlat1 - 'Aacute': 0x00c1, # latin capital letter A with acute, U+00C1 ISOlat1 - 'Acirc': 0x00c2, # latin capital letter A with circumflex, U+00C2 ISOlat1 - 'Agrave': 0x00c0, # latin capital letter A with grave = latin capital letter A grave, U+00C0 ISOlat1 - 'Alpha': 0x0391, # greek capital letter alpha, U+0391 - 'Aring': 0x00c5, # latin capital letter A with ring above = latin capital letter A ring, U+00C5 ISOlat1 - 'Atilde': 0x00c3, # latin capital letter A with tilde, U+00C3 ISOlat1 - 'Auml': 0x00c4, # latin capital letter A with diaeresis, U+00C4 ISOlat1 - 'Beta': 0x0392, # greek capital letter beta, U+0392 - 'Ccedil': 0x00c7, # latin capital letter C with cedilla, U+00C7 ISOlat1 - 'Chi': 0x03a7, # greek capital letter chi, U+03A7 - 'Dagger': 0x2021, # double dagger, U+2021 ISOpub - 'Delta': 0x0394, # greek capital letter delta, U+0394 ISOgrk3 - 'ETH': 0x00d0, # latin capital letter ETH, U+00D0 ISOlat1 - 'Eacute': 0x00c9, # latin capital letter E with acute, U+00C9 ISOlat1 - 'Ecirc': 0x00ca, # latin capital letter E with circumflex, U+00CA ISOlat1 - 'Egrave': 0x00c8, # latin capital letter E with grave, U+00C8 ISOlat1 - 'Epsilon': 0x0395, # greek capital letter epsilon, U+0395 - 'Eta': 0x0397, # greek capital letter eta, U+0397 - 'Euml': 0x00cb, # latin capital letter E with diaeresis, U+00CB ISOlat1 - 'Gamma': 0x0393, # greek capital letter gamma, U+0393 ISOgrk3 - 'Iacute': 0x00cd, # latin capital letter I with acute, U+00CD ISOlat1 - 'Icirc': 0x00ce, # latin capital letter I with circumflex, U+00CE ISOlat1 - 'Igrave': 0x00cc, # latin capital letter I with grave, U+00CC ISOlat1 - 'Iota': 0x0399, # greek capital letter iota, U+0399 - 'Iuml': 0x00cf, # latin capital letter I with diaeresis, U+00CF ISOlat1 - 'Kappa': 0x039a, # greek capital letter kappa, U+039A - 'Lambda': 0x039b, # greek capital letter lambda, U+039B ISOgrk3 - 'Mu': 0x039c, # greek capital letter mu, U+039C - 'Ntilde': 0x00d1, # latin capital letter N with tilde, U+00D1 ISOlat1 - 'Nu': 0x039d, # greek capital letter nu, U+039D - 'OElig': 0x0152, # latin capital ligature OE, U+0152 ISOlat2 - 'Oacute': 0x00d3, # latin capital letter O with acute, U+00D3 ISOlat1 - 'Ocirc': 0x00d4, # latin capital letter O with circumflex, U+00D4 ISOlat1 - 'Ograve': 0x00d2, # latin capital letter O with grave, U+00D2 ISOlat1 - 'Omega': 0x03a9, # greek capital letter omega, U+03A9 ISOgrk3 - 'Omicron': 0x039f, # greek capital letter omicron, U+039F - 'Oslash': 0x00d8, # latin capital letter O with stroke = latin capital letter O slash, U+00D8 ISOlat1 - 'Otilde': 0x00d5, # latin capital letter O with tilde, U+00D5 ISOlat1 - 'Ouml': 0x00d6, # latin capital letter O with diaeresis, U+00D6 ISOlat1 - 'Phi': 0x03a6, # greek capital letter phi, U+03A6 ISOgrk3 - 'Pi': 0x03a0, # greek capital letter pi, U+03A0 ISOgrk3 - 'Prime': 0x2033, # double prime = seconds = inches, U+2033 ISOtech - 'Psi': 0x03a8, # greek capital letter psi, U+03A8 ISOgrk3 - 'Rho': 0x03a1, # greek capital letter rho, U+03A1 - 'Scaron': 0x0160, # latin capital letter S with caron, U+0160 ISOlat2 - 'Sigma': 0x03a3, # greek capital letter sigma, U+03A3 ISOgrk3 - 'THORN': 0x00de, # latin capital letter THORN, U+00DE ISOlat1 - 'Tau': 0x03a4, # greek capital letter tau, U+03A4 - 'Theta': 0x0398, # greek capital letter theta, U+0398 ISOgrk3 - 'Uacute': 0x00da, # latin capital letter U with acute, U+00DA ISOlat1 - 'Ucirc': 0x00db, # latin capital letter U with circumflex, U+00DB ISOlat1 - 'Ugrave': 0x00d9, # latin capital letter U with grave, U+00D9 ISOlat1 - 'Upsilon': 0x03a5, # greek capital letter upsilon, U+03A5 ISOgrk3 - 'Uuml': 0x00dc, # latin capital letter U with diaeresis, U+00DC ISOlat1 - 'Xi': 0x039e, # greek capital letter xi, U+039E ISOgrk3 - 'Yacute': 0x00dd, # latin capital letter Y with acute, U+00DD ISOlat1 - 'Yuml': 0x0178, # latin capital letter Y with diaeresis, U+0178 ISOlat2 - 'Zeta': 0x0396, # greek capital letter zeta, U+0396 - 'aacute': 0x00e1, # latin small letter a with acute, U+00E1 ISOlat1 - 'acirc': 0x00e2, # latin small letter a with circumflex, U+00E2 ISOlat1 - 'acute': 0x00b4, # acute accent = spacing acute, U+00B4 ISOdia - 'aelig': 0x00e6, # latin small letter ae = latin small ligature ae, U+00E6 ISOlat1 - 'agrave': 0x00e0, # latin small letter a with grave = latin small letter a grave, U+00E0 ISOlat1 - 'alefsym': 0x2135, # alef symbol = first transfinite cardinal, U+2135 NEW - 'alpha': 0x03b1, # greek small letter alpha, U+03B1 ISOgrk3 - 'amp': 0x0026, # ampersand, U+0026 ISOnum - 'and': 0x2227, # logical and = wedge, U+2227 ISOtech - 'ang': 0x2220, # angle, U+2220 ISOamso - 'aring': 0x00e5, # latin small letter a with ring above = latin small letter a ring, U+00E5 ISOlat1 - 'asymp': 0x2248, # almost equal to = asymptotic to, U+2248 ISOamsr - 'atilde': 0x00e3, # latin small letter a with tilde, U+00E3 ISOlat1 - 'auml': 0x00e4, # latin small letter a with diaeresis, U+00E4 ISOlat1 - 'bdquo': 0x201e, # double low-9 quotation mark, U+201E NEW - 'beta': 0x03b2, # greek small letter beta, U+03B2 ISOgrk3 - 'brvbar': 0x00a6, # broken bar = broken vertical bar, U+00A6 ISOnum - 'bull': 0x2022, # bullet = black small circle, U+2022 ISOpub - 'cap': 0x2229, # intersection = cap, U+2229 ISOtech - 'ccedil': 0x00e7, # latin small letter c with cedilla, U+00E7 ISOlat1 - 'cedil': 0x00b8, # cedilla = spacing cedilla, U+00B8 ISOdia - 'cent': 0x00a2, # cent sign, U+00A2 ISOnum - 'chi': 0x03c7, # greek small letter chi, U+03C7 ISOgrk3 - 'circ': 0x02c6, # modifier letter circumflex accent, U+02C6 ISOpub - 'clubs': 0x2663, # black club suit = shamrock, U+2663 ISOpub - 'cong': 0x2245, # approximately equal to, U+2245 ISOtech - 'copy': 0x00a9, # copyright sign, U+00A9 ISOnum - 'crarr': 0x21b5, # downwards arrow with corner leftwards = carriage return, U+21B5 NEW - 'cup': 0x222a, # union = cup, U+222A ISOtech - 'curren': 0x00a4, # currency sign, U+00A4 ISOnum - 'dArr': 0x21d3, # downwards double arrow, U+21D3 ISOamsa - 'dagger': 0x2020, # dagger, U+2020 ISOpub - 'darr': 0x2193, # downwards arrow, U+2193 ISOnum - 'deg': 0x00b0, # degree sign, U+00B0 ISOnum - 'delta': 0x03b4, # greek small letter delta, U+03B4 ISOgrk3 - 'diams': 0x2666, # black diamond suit, U+2666 ISOpub - 'divide': 0x00f7, # division sign, U+00F7 ISOnum - 'eacute': 0x00e9, # latin small letter e with acute, U+00E9 ISOlat1 - 'ecirc': 0x00ea, # latin small letter e with circumflex, U+00EA ISOlat1 - 'egrave': 0x00e8, # latin small letter e with grave, U+00E8 ISOlat1 - 'empty': 0x2205, # empty set = null set = diameter, U+2205 ISOamso - 'emsp': 0x2003, # em space, U+2003 ISOpub - 'ensp': 0x2002, # en space, U+2002 ISOpub - 'epsilon': 0x03b5, # greek small letter epsilon, U+03B5 ISOgrk3 - 'equiv': 0x2261, # identical to, U+2261 ISOtech - 'eta': 0x03b7, # greek small letter eta, U+03B7 ISOgrk3 - 'eth': 0x00f0, # latin small letter eth, U+00F0 ISOlat1 - 'euml': 0x00eb, # latin small letter e with diaeresis, U+00EB ISOlat1 - 'euro': 0x20ac, # euro sign, U+20AC NEW - 'exist': 0x2203, # there exists, U+2203 ISOtech - 'fnof': 0x0192, # latin small f with hook = function = florin, U+0192 ISOtech - 'forall': 0x2200, # for all, U+2200 ISOtech - 'frac12': 0x00bd, # vulgar fraction one half = fraction one half, U+00BD ISOnum - 'frac14': 0x00bc, # vulgar fraction one quarter = fraction one quarter, U+00BC ISOnum - 'frac34': 0x00be, # vulgar fraction three quarters = fraction three quarters, U+00BE ISOnum - 'frasl': 0x2044, # fraction slash, U+2044 NEW - 'gamma': 0x03b3, # greek small letter gamma, U+03B3 ISOgrk3 - 'ge': 0x2265, # greater-than or equal to, U+2265 ISOtech - 'gt': 0x003e, # greater-than sign, U+003E ISOnum - 'hArr': 0x21d4, # left right double arrow, U+21D4 ISOamsa - 'harr': 0x2194, # left right arrow, U+2194 ISOamsa - 'hearts': 0x2665, # black heart suit = valentine, U+2665 ISOpub - 'hellip': 0x2026, # horizontal ellipsis = three dot leader, U+2026 ISOpub - 'iacute': 0x00ed, # latin small letter i with acute, U+00ED ISOlat1 - 'icirc': 0x00ee, # latin small letter i with circumflex, U+00EE ISOlat1 - 'iexcl': 0x00a1, # inverted exclamation mark, U+00A1 ISOnum - 'igrave': 0x00ec, # latin small letter i with grave, U+00EC ISOlat1 - 'image': 0x2111, # blackletter capital I = imaginary part, U+2111 ISOamso - 'infin': 0x221e, # infinity, U+221E ISOtech - 'int': 0x222b, # integral, U+222B ISOtech - 'iota': 0x03b9, # greek small letter iota, U+03B9 ISOgrk3 - 'iquest': 0x00bf, # inverted question mark = turned question mark, U+00BF ISOnum - 'isin': 0x2208, # element of, U+2208 ISOtech - 'iuml': 0x00ef, # latin small letter i with diaeresis, U+00EF ISOlat1 - 'kappa': 0x03ba, # greek small letter kappa, U+03BA ISOgrk3 - 'lArr': 0x21d0, # leftwards double arrow, U+21D0 ISOtech - 'lambda': 0x03bb, # greek small letter lambda, U+03BB ISOgrk3 - 'lang': 0x2329, # left-pointing angle bracket = bra, U+2329 ISOtech - 'laquo': 0x00ab, # left-pointing double angle quotation mark = left pointing guillemet, U+00AB ISOnum - 'larr': 0x2190, # leftwards arrow, U+2190 ISOnum - 'lceil': 0x2308, # left ceiling = apl upstile, U+2308 ISOamsc - 'ldquo': 0x201c, # left double quotation mark, U+201C ISOnum - 'le': 0x2264, # less-than or equal to, U+2264 ISOtech - 'lfloor': 0x230a, # left floor = apl downstile, U+230A ISOamsc - 'lowast': 0x2217, # asterisk operator, U+2217 ISOtech - 'loz': 0x25ca, # lozenge, U+25CA ISOpub - 'lrm': 0x200e, # left-to-right mark, U+200E NEW RFC 2070 - 'lsaquo': 0x2039, # single left-pointing angle quotation mark, U+2039 ISO proposed - 'lsquo': 0x2018, # left single quotation mark, U+2018 ISOnum - 'lt': 0x003c, # less-than sign, U+003C ISOnum - 'macr': 0x00af, # macron = spacing macron = overline = APL overbar, U+00AF ISOdia - 'mdash': 0x2014, # em dash, U+2014 ISOpub - 'micro': 0x00b5, # micro sign, U+00B5 ISOnum - 'middot': 0x00b7, # middle dot = Georgian comma = Greek middle dot, U+00B7 ISOnum - 'minus': 0x2212, # minus sign, U+2212 ISOtech - 'mu': 0x03bc, # greek small letter mu, U+03BC ISOgrk3 - 'nabla': 0x2207, # nabla = backward difference, U+2207 ISOtech - 'nbsp': 0x00a0, # no-break space = non-breaking space, U+00A0 ISOnum - 'ndash': 0x2013, # en dash, U+2013 ISOpub - 'ne': 0x2260, # not equal to, U+2260 ISOtech - 'ni': 0x220b, # contains as member, U+220B ISOtech - 'not': 0x00ac, # not sign, U+00AC ISOnum - 'notin': 0x2209, # not an element of, U+2209 ISOtech - 'nsub': 0x2284, # not a subset of, U+2284 ISOamsn - 'ntilde': 0x00f1, # latin small letter n with tilde, U+00F1 ISOlat1 - 'nu': 0x03bd, # greek small letter nu, U+03BD ISOgrk3 - 'oacute': 0x00f3, # latin small letter o with acute, U+00F3 ISOlat1 - 'ocirc': 0x00f4, # latin small letter o with circumflex, U+00F4 ISOlat1 - 'oelig': 0x0153, # latin small ligature oe, U+0153 ISOlat2 - 'ograve': 0x00f2, # latin small letter o with grave, U+00F2 ISOlat1 - 'oline': 0x203e, # overline = spacing overscore, U+203E NEW - 'omega': 0x03c9, # greek small letter omega, U+03C9 ISOgrk3 - 'omicron': 0x03bf, # greek small letter omicron, U+03BF NEW - 'oplus': 0x2295, # circled plus = direct sum, U+2295 ISOamsb - 'or': 0x2228, # logical or = vee, U+2228 ISOtech - 'ordf': 0x00aa, # feminine ordinal indicator, U+00AA ISOnum - 'ordm': 0x00ba, # masculine ordinal indicator, U+00BA ISOnum - 'oslash': 0x00f8, # latin small letter o with stroke, = latin small letter o slash, U+00F8 ISOlat1 - 'otilde': 0x00f5, # latin small letter o with tilde, U+00F5 ISOlat1 - 'otimes': 0x2297, # circled times = vector product, U+2297 ISOamsb - 'ouml': 0x00f6, # latin small letter o with diaeresis, U+00F6 ISOlat1 - 'para': 0x00b6, # pilcrow sign = paragraph sign, U+00B6 ISOnum - 'part': 0x2202, # partial differential, U+2202 ISOtech - 'permil': 0x2030, # per mille sign, U+2030 ISOtech - 'perp': 0x22a5, # up tack = orthogonal to = perpendicular, U+22A5 ISOtech - 'phi': 0x03c6, # greek small letter phi, U+03C6 ISOgrk3 - 'pi': 0x03c0, # greek small letter pi, U+03C0 ISOgrk3 - 'piv': 0x03d6, # greek pi symbol, U+03D6 ISOgrk3 - 'plusmn': 0x00b1, # plus-minus sign = plus-or-minus sign, U+00B1 ISOnum - 'pound': 0x00a3, # pound sign, U+00A3 ISOnum - 'prime': 0x2032, # prime = minutes = feet, U+2032 ISOtech - 'prod': 0x220f, # n-ary product = product sign, U+220F ISOamsb - 'prop': 0x221d, # proportional to, U+221D ISOtech - 'psi': 0x03c8, # greek small letter psi, U+03C8 ISOgrk3 - 'quot': 0x0022, # quotation mark = APL quote, U+0022 ISOnum - 'rArr': 0x21d2, # rightwards double arrow, U+21D2 ISOtech - 'radic': 0x221a, # square root = radical sign, U+221A ISOtech - 'rang': 0x232a, # right-pointing angle bracket = ket, U+232A ISOtech - 'raquo': 0x00bb, # right-pointing double angle quotation mark = right pointing guillemet, U+00BB ISOnum - 'rarr': 0x2192, # rightwards arrow, U+2192 ISOnum - 'rceil': 0x2309, # right ceiling, U+2309 ISOamsc - 'rdquo': 0x201d, # right double quotation mark, U+201D ISOnum - 'real': 0x211c, # blackletter capital R = real part symbol, U+211C ISOamso - 'reg': 0x00ae, # registered sign = registered trade mark sign, U+00AE ISOnum - 'rfloor': 0x230b, # right floor, U+230B ISOamsc - 'rho': 0x03c1, # greek small letter rho, U+03C1 ISOgrk3 - 'rlm': 0x200f, # right-to-left mark, U+200F NEW RFC 2070 - 'rsaquo': 0x203a, # single right-pointing angle quotation mark, U+203A ISO proposed - 'rsquo': 0x2019, # right single quotation mark, U+2019 ISOnum - 'sbquo': 0x201a, # single low-9 quotation mark, U+201A NEW - 'scaron': 0x0161, # latin small letter s with caron, U+0161 ISOlat2 - 'sdot': 0x22c5, # dot operator, U+22C5 ISOamsb - 'sect': 0x00a7, # section sign, U+00A7 ISOnum - 'shy': 0x00ad, # soft hyphen = discretionary hyphen, U+00AD ISOnum - 'sigma': 0x03c3, # greek small letter sigma, U+03C3 ISOgrk3 - 'sigmaf': 0x03c2, # greek small letter final sigma, U+03C2 ISOgrk3 - 'sim': 0x223c, # tilde operator = varies with = similar to, U+223C ISOtech - 'spades': 0x2660, # black spade suit, U+2660 ISOpub - 'sub': 0x2282, # subset of, U+2282 ISOtech - 'sube': 0x2286, # subset of or equal to, U+2286 ISOtech - 'sum': 0x2211, # n-ary sumation, U+2211 ISOamsb - 'sup': 0x2283, # superset of, U+2283 ISOtech - 'sup1': 0x00b9, # superscript one = superscript digit one, U+00B9 ISOnum - 'sup2': 0x00b2, # superscript two = superscript digit two = squared, U+00B2 ISOnum - 'sup3': 0x00b3, # superscript three = superscript digit three = cubed, U+00B3 ISOnum - 'supe': 0x2287, # superset of or equal to, U+2287 ISOtech - 'szlig': 0x00df, # latin small letter sharp s = ess-zed, U+00DF ISOlat1 - 'tau': 0x03c4, # greek small letter tau, U+03C4 ISOgrk3 - 'there4': 0x2234, # therefore, U+2234 ISOtech - 'theta': 0x03b8, # greek small letter theta, U+03B8 ISOgrk3 - 'thetasym': 0x03d1, # greek small letter theta symbol, U+03D1 NEW - 'thinsp': 0x2009, # thin space, U+2009 ISOpub - 'thorn': 0x00fe, # latin small letter thorn with, U+00FE ISOlat1 - 'tilde': 0x02dc, # small tilde, U+02DC ISOdia - 'times': 0x00d7, # multiplication sign, U+00D7 ISOnum - 'trade': 0x2122, # trade mark sign, U+2122 ISOnum - 'uArr': 0x21d1, # upwards double arrow, U+21D1 ISOamsa - 'uacute': 0x00fa, # latin small letter u with acute, U+00FA ISOlat1 - 'uarr': 0x2191, # upwards arrow, U+2191 ISOnum - 'ucirc': 0x00fb, # latin small letter u with circumflex, U+00FB ISOlat1 - 'ugrave': 0x00f9, # latin small letter u with grave, U+00F9 ISOlat1 - 'uml': 0x00a8, # diaeresis = spacing diaeresis, U+00A8 ISOdia - 'upsih': 0x03d2, # greek upsilon with hook symbol, U+03D2 NEW - 'upsilon': 0x03c5, # greek small letter upsilon, U+03C5 ISOgrk3 - 'uuml': 0x00fc, # latin small letter u with diaeresis, U+00FC ISOlat1 - 'weierp': 0x2118, # script capital P = power set = Weierstrass p, U+2118 ISOamso - 'xi': 0x03be, # greek small letter xi, U+03BE ISOgrk3 - 'yacute': 0x00fd, # latin small letter y with acute, U+00FD ISOlat1 - 'yen': 0x00a5, # yen sign = yuan sign, U+00A5 ISOnum - 'yuml': 0x00ff, # latin small letter y with diaeresis, U+00FF ISOlat1 - 'zeta': 0x03b6, # greek small letter zeta, U+03B6 ISOgrk3 - 'zwj': 0x200d, # zero width joiner, U+200D NEW RFC 2070 - 'zwnj': 0x200c, # zero width non-joiner, U+200C NEW RFC 2070 -} - - -# maps the HTML5 named character references to the equivalent Unicode character(s) -html5 = { - 'Aacute': '\xc1', - 'aacute': '\xe1', - 'Aacute;': '\xc1', - 'aacute;': '\xe1', - 'Abreve;': '\u0102', - 'abreve;': '\u0103', - 'ac;': '\u223e', - 'acd;': '\u223f', - 'acE;': '\u223e\u0333', - 'Acirc': '\xc2', - 'acirc': '\xe2', - 'Acirc;': '\xc2', - 'acirc;': '\xe2', - 'acute': '\xb4', - 'acute;': '\xb4', - 'Acy;': '\u0410', - 'acy;': '\u0430', - 'AElig': '\xc6', - 'aelig': '\xe6', - 'AElig;': '\xc6', - 'aelig;': '\xe6', - 'af;': '\u2061', - 'Afr;': '\U0001d504', - 'afr;': '\U0001d51e', - 'Agrave': '\xc0', - 'agrave': '\xe0', - 'Agrave;': '\xc0', - 'agrave;': '\xe0', - 'alefsym;': '\u2135', - 'aleph;': '\u2135', - 'Alpha;': '\u0391', - 'alpha;': '\u03b1', - 'Amacr;': '\u0100', - 'amacr;': '\u0101', - 'amalg;': '\u2a3f', - 'AMP': '&', - 'amp': '&', - 'AMP;': '&', - 'amp;': '&', - 'And;': '\u2a53', - 'and;': '\u2227', - 'andand;': '\u2a55', - 'andd;': '\u2a5c', - 'andslope;': '\u2a58', - 'andv;': '\u2a5a', - 'ang;': '\u2220', - 'ange;': '\u29a4', - 'angle;': '\u2220', - 'angmsd;': '\u2221', - 'angmsdaa;': '\u29a8', - 'angmsdab;': '\u29a9', - 'angmsdac;': '\u29aa', - 'angmsdad;': '\u29ab', - 'angmsdae;': '\u29ac', - 'angmsdaf;': '\u29ad', - 'angmsdag;': '\u29ae', - 'angmsdah;': '\u29af', - 'angrt;': '\u221f', - 'angrtvb;': '\u22be', - 'angrtvbd;': '\u299d', - 'angsph;': '\u2222', - 'angst;': '\xc5', - 'angzarr;': '\u237c', - 'Aogon;': '\u0104', - 'aogon;': '\u0105', - 'Aopf;': '\U0001d538', - 'aopf;': '\U0001d552', - 'ap;': '\u2248', - 'apacir;': '\u2a6f', - 'apE;': '\u2a70', - 'ape;': '\u224a', - 'apid;': '\u224b', - 'apos;': "'", - 'ApplyFunction;': '\u2061', - 'approx;': '\u2248', - 'approxeq;': '\u224a', - 'Aring': '\xc5', - 'aring': '\xe5', - 'Aring;': '\xc5', - 'aring;': '\xe5', - 'Ascr;': '\U0001d49c', - 'ascr;': '\U0001d4b6', - 'Assign;': '\u2254', - 'ast;': '*', - 'asymp;': '\u2248', - 'asympeq;': '\u224d', - 'Atilde': '\xc3', - 'atilde': '\xe3', - 'Atilde;': '\xc3', - 'atilde;': '\xe3', - 'Auml': '\xc4', - 'auml': '\xe4', - 'Auml;': '\xc4', - 'auml;': '\xe4', - 'awconint;': '\u2233', - 'awint;': '\u2a11', - 'backcong;': '\u224c', - 'backepsilon;': '\u03f6', - 'backprime;': '\u2035', - 'backsim;': '\u223d', - 'backsimeq;': '\u22cd', - 'Backslash;': '\u2216', - 'Barv;': '\u2ae7', - 'barvee;': '\u22bd', - 'Barwed;': '\u2306', - 'barwed;': '\u2305', - 'barwedge;': '\u2305', - 'bbrk;': '\u23b5', - 'bbrktbrk;': '\u23b6', - 'bcong;': '\u224c', - 'Bcy;': '\u0411', - 'bcy;': '\u0431', - 'bdquo;': '\u201e', - 'becaus;': '\u2235', - 'Because;': '\u2235', - 'because;': '\u2235', - 'bemptyv;': '\u29b0', - 'bepsi;': '\u03f6', - 'bernou;': '\u212c', - 'Bernoullis;': '\u212c', - 'Beta;': '\u0392', - 'beta;': '\u03b2', - 'beth;': '\u2136', - 'between;': '\u226c', - 'Bfr;': '\U0001d505', - 'bfr;': '\U0001d51f', - 'bigcap;': '\u22c2', - 'bigcirc;': '\u25ef', - 'bigcup;': '\u22c3', - 'bigodot;': '\u2a00', - 'bigoplus;': '\u2a01', - 'bigotimes;': '\u2a02', - 'bigsqcup;': '\u2a06', - 'bigstar;': '\u2605', - 'bigtriangledown;': '\u25bd', - 'bigtriangleup;': '\u25b3', - 'biguplus;': '\u2a04', - 'bigvee;': '\u22c1', - 'bigwedge;': '\u22c0', - 'bkarow;': '\u290d', - 'blacklozenge;': '\u29eb', - 'blacksquare;': '\u25aa', - 'blacktriangle;': '\u25b4', - 'blacktriangledown;': '\u25be', - 'blacktriangleleft;': '\u25c2', - 'blacktriangleright;': '\u25b8', - 'blank;': '\u2423', - 'blk12;': '\u2592', - 'blk14;': '\u2591', - 'blk34;': '\u2593', - 'block;': '\u2588', - 'bne;': '=\u20e5', - 'bnequiv;': '\u2261\u20e5', - 'bNot;': '\u2aed', - 'bnot;': '\u2310', - 'Bopf;': '\U0001d539', - 'bopf;': '\U0001d553', - 'bot;': '\u22a5', - 'bottom;': '\u22a5', - 'bowtie;': '\u22c8', - 'boxbox;': '\u29c9', - 'boxDL;': '\u2557', - 'boxDl;': '\u2556', - 'boxdL;': '\u2555', - 'boxdl;': '\u2510', - 'boxDR;': '\u2554', - 'boxDr;': '\u2553', - 'boxdR;': '\u2552', - 'boxdr;': '\u250c', - 'boxH;': '\u2550', - 'boxh;': '\u2500', - 'boxHD;': '\u2566', - 'boxHd;': '\u2564', - 'boxhD;': '\u2565', - 'boxhd;': '\u252c', - 'boxHU;': '\u2569', - 'boxHu;': '\u2567', - 'boxhU;': '\u2568', - 'boxhu;': '\u2534', - 'boxminus;': '\u229f', - 'boxplus;': '\u229e', - 'boxtimes;': '\u22a0', - 'boxUL;': '\u255d', - 'boxUl;': '\u255c', - 'boxuL;': '\u255b', - 'boxul;': '\u2518', - 'boxUR;': '\u255a', - 'boxUr;': '\u2559', - 'boxuR;': '\u2558', - 'boxur;': '\u2514', - 'boxV;': '\u2551', - 'boxv;': '\u2502', - 'boxVH;': '\u256c', - 'boxVh;': '\u256b', - 'boxvH;': '\u256a', - 'boxvh;': '\u253c', - 'boxVL;': '\u2563', - 'boxVl;': '\u2562', - 'boxvL;': '\u2561', - 'boxvl;': '\u2524', - 'boxVR;': '\u2560', - 'boxVr;': '\u255f', - 'boxvR;': '\u255e', - 'boxvr;': '\u251c', - 'bprime;': '\u2035', - 'Breve;': '\u02d8', - 'breve;': '\u02d8', - 'brvbar': '\xa6', - 'brvbar;': '\xa6', - 'Bscr;': '\u212c', - 'bscr;': '\U0001d4b7', - 'bsemi;': '\u204f', - 'bsim;': '\u223d', - 'bsime;': '\u22cd', - 'bsol;': '\\', - 'bsolb;': '\u29c5', - 'bsolhsub;': '\u27c8', - 'bull;': '\u2022', - 'bullet;': '\u2022', - 'bump;': '\u224e', - 'bumpE;': '\u2aae', - 'bumpe;': '\u224f', - 'Bumpeq;': '\u224e', - 'bumpeq;': '\u224f', - 'Cacute;': '\u0106', - 'cacute;': '\u0107', - 'Cap;': '\u22d2', - 'cap;': '\u2229', - 'capand;': '\u2a44', - 'capbrcup;': '\u2a49', - 'capcap;': '\u2a4b', - 'capcup;': '\u2a47', - 'capdot;': '\u2a40', - 'CapitalDifferentialD;': '\u2145', - 'caps;': '\u2229\ufe00', - 'caret;': '\u2041', - 'caron;': '\u02c7', - 'Cayleys;': '\u212d', - 'ccaps;': '\u2a4d', - 'Ccaron;': '\u010c', - 'ccaron;': '\u010d', - 'Ccedil': '\xc7', - 'ccedil': '\xe7', - 'Ccedil;': '\xc7', - 'ccedil;': '\xe7', - 'Ccirc;': '\u0108', - 'ccirc;': '\u0109', - 'Cconint;': '\u2230', - 'ccups;': '\u2a4c', - 'ccupssm;': '\u2a50', - 'Cdot;': '\u010a', - 'cdot;': '\u010b', - 'cedil': '\xb8', - 'cedil;': '\xb8', - 'Cedilla;': '\xb8', - 'cemptyv;': '\u29b2', - 'cent': '\xa2', - 'cent;': '\xa2', - 'CenterDot;': '\xb7', - 'centerdot;': '\xb7', - 'Cfr;': '\u212d', - 'cfr;': '\U0001d520', - 'CHcy;': '\u0427', - 'chcy;': '\u0447', - 'check;': '\u2713', - 'checkmark;': '\u2713', - 'Chi;': '\u03a7', - 'chi;': '\u03c7', - 'cir;': '\u25cb', - 'circ;': '\u02c6', - 'circeq;': '\u2257', - 'circlearrowleft;': '\u21ba', - 'circlearrowright;': '\u21bb', - 'circledast;': '\u229b', - 'circledcirc;': '\u229a', - 'circleddash;': '\u229d', - 'CircleDot;': '\u2299', - 'circledR;': '\xae', - 'circledS;': '\u24c8', - 'CircleMinus;': '\u2296', - 'CirclePlus;': '\u2295', - 'CircleTimes;': '\u2297', - 'cirE;': '\u29c3', - 'cire;': '\u2257', - 'cirfnint;': '\u2a10', - 'cirmid;': '\u2aef', - 'cirscir;': '\u29c2', - 'ClockwiseContourIntegral;': '\u2232', - 'CloseCurlyDoubleQuote;': '\u201d', - 'CloseCurlyQuote;': '\u2019', - 'clubs;': '\u2663', - 'clubsuit;': '\u2663', - 'Colon;': '\u2237', - 'colon;': ':', - 'Colone;': '\u2a74', - 'colone;': '\u2254', - 'coloneq;': '\u2254', - 'comma;': ',', - 'commat;': '@', - 'comp;': '\u2201', - 'compfn;': '\u2218', - 'complement;': '\u2201', - 'complexes;': '\u2102', - 'cong;': '\u2245', - 'congdot;': '\u2a6d', - 'Congruent;': '\u2261', - 'Conint;': '\u222f', - 'conint;': '\u222e', - 'ContourIntegral;': '\u222e', - 'Copf;': '\u2102', - 'copf;': '\U0001d554', - 'coprod;': '\u2210', - 'Coproduct;': '\u2210', - 'COPY': '\xa9', - 'copy': '\xa9', - 'COPY;': '\xa9', - 'copy;': '\xa9', - 'copysr;': '\u2117', - 'CounterClockwiseContourIntegral;': '\u2233', - 'crarr;': '\u21b5', - 'Cross;': '\u2a2f', - 'cross;': '\u2717', - 'Cscr;': '\U0001d49e', - 'cscr;': '\U0001d4b8', - 'csub;': '\u2acf', - 'csube;': '\u2ad1', - 'csup;': '\u2ad0', - 'csupe;': '\u2ad2', - 'ctdot;': '\u22ef', - 'cudarrl;': '\u2938', - 'cudarrr;': '\u2935', - 'cuepr;': '\u22de', - 'cuesc;': '\u22df', - 'cularr;': '\u21b6', - 'cularrp;': '\u293d', - 'Cup;': '\u22d3', - 'cup;': '\u222a', - 'cupbrcap;': '\u2a48', - 'CupCap;': '\u224d', - 'cupcap;': '\u2a46', - 'cupcup;': '\u2a4a', - 'cupdot;': '\u228d', - 'cupor;': '\u2a45', - 'cups;': '\u222a\ufe00', - 'curarr;': '\u21b7', - 'curarrm;': '\u293c', - 'curlyeqprec;': '\u22de', - 'curlyeqsucc;': '\u22df', - 'curlyvee;': '\u22ce', - 'curlywedge;': '\u22cf', - 'curren': '\xa4', - 'curren;': '\xa4', - 'curvearrowleft;': '\u21b6', - 'curvearrowright;': '\u21b7', - 'cuvee;': '\u22ce', - 'cuwed;': '\u22cf', - 'cwconint;': '\u2232', - 'cwint;': '\u2231', - 'cylcty;': '\u232d', - 'Dagger;': '\u2021', - 'dagger;': '\u2020', - 'daleth;': '\u2138', - 'Darr;': '\u21a1', - 'dArr;': '\u21d3', - 'darr;': '\u2193', - 'dash;': '\u2010', - 'Dashv;': '\u2ae4', - 'dashv;': '\u22a3', - 'dbkarow;': '\u290f', - 'dblac;': '\u02dd', - 'Dcaron;': '\u010e', - 'dcaron;': '\u010f', - 'Dcy;': '\u0414', - 'dcy;': '\u0434', - 'DD;': '\u2145', - 'dd;': '\u2146', - 'ddagger;': '\u2021', - 'ddarr;': '\u21ca', - 'DDotrahd;': '\u2911', - 'ddotseq;': '\u2a77', - 'deg': '\xb0', - 'deg;': '\xb0', - 'Del;': '\u2207', - 'Delta;': '\u0394', - 'delta;': '\u03b4', - 'demptyv;': '\u29b1', - 'dfisht;': '\u297f', - 'Dfr;': '\U0001d507', - 'dfr;': '\U0001d521', - 'dHar;': '\u2965', - 'dharl;': '\u21c3', - 'dharr;': '\u21c2', - 'DiacriticalAcute;': '\xb4', - 'DiacriticalDot;': '\u02d9', - 'DiacriticalDoubleAcute;': '\u02dd', - 'DiacriticalGrave;': '`', - 'DiacriticalTilde;': '\u02dc', - 'diam;': '\u22c4', - 'Diamond;': '\u22c4', - 'diamond;': '\u22c4', - 'diamondsuit;': '\u2666', - 'diams;': '\u2666', - 'die;': '\xa8', - 'DifferentialD;': '\u2146', - 'digamma;': '\u03dd', - 'disin;': '\u22f2', - 'div;': '\xf7', - 'divide': '\xf7', - 'divide;': '\xf7', - 'divideontimes;': '\u22c7', - 'divonx;': '\u22c7', - 'DJcy;': '\u0402', - 'djcy;': '\u0452', - 'dlcorn;': '\u231e', - 'dlcrop;': '\u230d', - 'dollar;': '$', - 'Dopf;': '\U0001d53b', - 'dopf;': '\U0001d555', - 'Dot;': '\xa8', - 'dot;': '\u02d9', - 'DotDot;': '\u20dc', - 'doteq;': '\u2250', - 'doteqdot;': '\u2251', - 'DotEqual;': '\u2250', - 'dotminus;': '\u2238', - 'dotplus;': '\u2214', - 'dotsquare;': '\u22a1', - 'doublebarwedge;': '\u2306', - 'DoubleContourIntegral;': '\u222f', - 'DoubleDot;': '\xa8', - 'DoubleDownArrow;': '\u21d3', - 'DoubleLeftArrow;': '\u21d0', - 'DoubleLeftRightArrow;': '\u21d4', - 'DoubleLeftTee;': '\u2ae4', - 'DoubleLongLeftArrow;': '\u27f8', - 'DoubleLongLeftRightArrow;': '\u27fa', - 'DoubleLongRightArrow;': '\u27f9', - 'DoubleRightArrow;': '\u21d2', - 'DoubleRightTee;': '\u22a8', - 'DoubleUpArrow;': '\u21d1', - 'DoubleUpDownArrow;': '\u21d5', - 'DoubleVerticalBar;': '\u2225', - 'DownArrow;': '\u2193', - 'Downarrow;': '\u21d3', - 'downarrow;': '\u2193', - 'DownArrowBar;': '\u2913', - 'DownArrowUpArrow;': '\u21f5', - 'DownBreve;': '\u0311', - 'downdownarrows;': '\u21ca', - 'downharpoonleft;': '\u21c3', - 'downharpoonright;': '\u21c2', - 'DownLeftRightVector;': '\u2950', - 'DownLeftTeeVector;': '\u295e', - 'DownLeftVector;': '\u21bd', - 'DownLeftVectorBar;': '\u2956', - 'DownRightTeeVector;': '\u295f', - 'DownRightVector;': '\u21c1', - 'DownRightVectorBar;': '\u2957', - 'DownTee;': '\u22a4', - 'DownTeeArrow;': '\u21a7', - 'drbkarow;': '\u2910', - 'drcorn;': '\u231f', - 'drcrop;': '\u230c', - 'Dscr;': '\U0001d49f', - 'dscr;': '\U0001d4b9', - 'DScy;': '\u0405', - 'dscy;': '\u0455', - 'dsol;': '\u29f6', - 'Dstrok;': '\u0110', - 'dstrok;': '\u0111', - 'dtdot;': '\u22f1', - 'dtri;': '\u25bf', - 'dtrif;': '\u25be', - 'duarr;': '\u21f5', - 'duhar;': '\u296f', - 'dwangle;': '\u29a6', - 'DZcy;': '\u040f', - 'dzcy;': '\u045f', - 'dzigrarr;': '\u27ff', - 'Eacute': '\xc9', - 'eacute': '\xe9', - 'Eacute;': '\xc9', - 'eacute;': '\xe9', - 'easter;': '\u2a6e', - 'Ecaron;': '\u011a', - 'ecaron;': '\u011b', - 'ecir;': '\u2256', - 'Ecirc': '\xca', - 'ecirc': '\xea', - 'Ecirc;': '\xca', - 'ecirc;': '\xea', - 'ecolon;': '\u2255', - 'Ecy;': '\u042d', - 'ecy;': '\u044d', - 'eDDot;': '\u2a77', - 'Edot;': '\u0116', - 'eDot;': '\u2251', - 'edot;': '\u0117', - 'ee;': '\u2147', - 'efDot;': '\u2252', - 'Efr;': '\U0001d508', - 'efr;': '\U0001d522', - 'eg;': '\u2a9a', - 'Egrave': '\xc8', - 'egrave': '\xe8', - 'Egrave;': '\xc8', - 'egrave;': '\xe8', - 'egs;': '\u2a96', - 'egsdot;': '\u2a98', - 'el;': '\u2a99', - 'Element;': '\u2208', - 'elinters;': '\u23e7', - 'ell;': '\u2113', - 'els;': '\u2a95', - 'elsdot;': '\u2a97', - 'Emacr;': '\u0112', - 'emacr;': '\u0113', - 'empty;': '\u2205', - 'emptyset;': '\u2205', - 'EmptySmallSquare;': '\u25fb', - 'emptyv;': '\u2205', - 'EmptyVerySmallSquare;': '\u25ab', - 'emsp13;': '\u2004', - 'emsp14;': '\u2005', - 'emsp;': '\u2003', - 'ENG;': '\u014a', - 'eng;': '\u014b', - 'ensp;': '\u2002', - 'Eogon;': '\u0118', - 'eogon;': '\u0119', - 'Eopf;': '\U0001d53c', - 'eopf;': '\U0001d556', - 'epar;': '\u22d5', - 'eparsl;': '\u29e3', - 'eplus;': '\u2a71', - 'epsi;': '\u03b5', - 'Epsilon;': '\u0395', - 'epsilon;': '\u03b5', - 'epsiv;': '\u03f5', - 'eqcirc;': '\u2256', - 'eqcolon;': '\u2255', - 'eqsim;': '\u2242', - 'eqslantgtr;': '\u2a96', - 'eqslantless;': '\u2a95', - 'Equal;': '\u2a75', - 'equals;': '=', - 'EqualTilde;': '\u2242', - 'equest;': '\u225f', - 'Equilibrium;': '\u21cc', - 'equiv;': '\u2261', - 'equivDD;': '\u2a78', - 'eqvparsl;': '\u29e5', - 'erarr;': '\u2971', - 'erDot;': '\u2253', - 'Escr;': '\u2130', - 'escr;': '\u212f', - 'esdot;': '\u2250', - 'Esim;': '\u2a73', - 'esim;': '\u2242', - 'Eta;': '\u0397', - 'eta;': '\u03b7', - 'ETH': '\xd0', - 'eth': '\xf0', - 'ETH;': '\xd0', - 'eth;': '\xf0', - 'Euml': '\xcb', - 'euml': '\xeb', - 'Euml;': '\xcb', - 'euml;': '\xeb', - 'euro;': '\u20ac', - 'excl;': '!', - 'exist;': '\u2203', - 'Exists;': '\u2203', - 'expectation;': '\u2130', - 'ExponentialE;': '\u2147', - 'exponentiale;': '\u2147', - 'fallingdotseq;': '\u2252', - 'Fcy;': '\u0424', - 'fcy;': '\u0444', - 'female;': '\u2640', - 'ffilig;': '\ufb03', - 'fflig;': '\ufb00', - 'ffllig;': '\ufb04', - 'Ffr;': '\U0001d509', - 'ffr;': '\U0001d523', - 'filig;': '\ufb01', - 'FilledSmallSquare;': '\u25fc', - 'FilledVerySmallSquare;': '\u25aa', - 'fjlig;': 'fj', - 'flat;': '\u266d', - 'fllig;': '\ufb02', - 'fltns;': '\u25b1', - 'fnof;': '\u0192', - 'Fopf;': '\U0001d53d', - 'fopf;': '\U0001d557', - 'ForAll;': '\u2200', - 'forall;': '\u2200', - 'fork;': '\u22d4', - 'forkv;': '\u2ad9', - 'Fouriertrf;': '\u2131', - 'fpartint;': '\u2a0d', - 'frac12': '\xbd', - 'frac12;': '\xbd', - 'frac13;': '\u2153', - 'frac14': '\xbc', - 'frac14;': '\xbc', - 'frac15;': '\u2155', - 'frac16;': '\u2159', - 'frac18;': '\u215b', - 'frac23;': '\u2154', - 'frac25;': '\u2156', - 'frac34': '\xbe', - 'frac34;': '\xbe', - 'frac35;': '\u2157', - 'frac38;': '\u215c', - 'frac45;': '\u2158', - 'frac56;': '\u215a', - 'frac58;': '\u215d', - 'frac78;': '\u215e', - 'frasl;': '\u2044', - 'frown;': '\u2322', - 'Fscr;': '\u2131', - 'fscr;': '\U0001d4bb', - 'gacute;': '\u01f5', - 'Gamma;': '\u0393', - 'gamma;': '\u03b3', - 'Gammad;': '\u03dc', - 'gammad;': '\u03dd', - 'gap;': '\u2a86', - 'Gbreve;': '\u011e', - 'gbreve;': '\u011f', - 'Gcedil;': '\u0122', - 'Gcirc;': '\u011c', - 'gcirc;': '\u011d', - 'Gcy;': '\u0413', - 'gcy;': '\u0433', - 'Gdot;': '\u0120', - 'gdot;': '\u0121', - 'gE;': '\u2267', - 'ge;': '\u2265', - 'gEl;': '\u2a8c', - 'gel;': '\u22db', - 'geq;': '\u2265', - 'geqq;': '\u2267', - 'geqslant;': '\u2a7e', - 'ges;': '\u2a7e', - 'gescc;': '\u2aa9', - 'gesdot;': '\u2a80', - 'gesdoto;': '\u2a82', - 'gesdotol;': '\u2a84', - 'gesl;': '\u22db\ufe00', - 'gesles;': '\u2a94', - 'Gfr;': '\U0001d50a', - 'gfr;': '\U0001d524', - 'Gg;': '\u22d9', - 'gg;': '\u226b', - 'ggg;': '\u22d9', - 'gimel;': '\u2137', - 'GJcy;': '\u0403', - 'gjcy;': '\u0453', - 'gl;': '\u2277', - 'gla;': '\u2aa5', - 'glE;': '\u2a92', - 'glj;': '\u2aa4', - 'gnap;': '\u2a8a', - 'gnapprox;': '\u2a8a', - 'gnE;': '\u2269', - 'gne;': '\u2a88', - 'gneq;': '\u2a88', - 'gneqq;': '\u2269', - 'gnsim;': '\u22e7', - 'Gopf;': '\U0001d53e', - 'gopf;': '\U0001d558', - 'grave;': '`', - 'GreaterEqual;': '\u2265', - 'GreaterEqualLess;': '\u22db', - 'GreaterFullEqual;': '\u2267', - 'GreaterGreater;': '\u2aa2', - 'GreaterLess;': '\u2277', - 'GreaterSlantEqual;': '\u2a7e', - 'GreaterTilde;': '\u2273', - 'Gscr;': '\U0001d4a2', - 'gscr;': '\u210a', - 'gsim;': '\u2273', - 'gsime;': '\u2a8e', - 'gsiml;': '\u2a90', - 'GT': '>', - 'gt': '>', - 'GT;': '>', - 'Gt;': '\u226b', - 'gt;': '>', - 'gtcc;': '\u2aa7', - 'gtcir;': '\u2a7a', - 'gtdot;': '\u22d7', - 'gtlPar;': '\u2995', - 'gtquest;': '\u2a7c', - 'gtrapprox;': '\u2a86', - 'gtrarr;': '\u2978', - 'gtrdot;': '\u22d7', - 'gtreqless;': '\u22db', - 'gtreqqless;': '\u2a8c', - 'gtrless;': '\u2277', - 'gtrsim;': '\u2273', - 'gvertneqq;': '\u2269\ufe00', - 'gvnE;': '\u2269\ufe00', - 'Hacek;': '\u02c7', - 'hairsp;': '\u200a', - 'half;': '\xbd', - 'hamilt;': '\u210b', - 'HARDcy;': '\u042a', - 'hardcy;': '\u044a', - 'hArr;': '\u21d4', - 'harr;': '\u2194', - 'harrcir;': '\u2948', - 'harrw;': '\u21ad', - 'Hat;': '^', - 'hbar;': '\u210f', - 'Hcirc;': '\u0124', - 'hcirc;': '\u0125', - 'hearts;': '\u2665', - 'heartsuit;': '\u2665', - 'hellip;': '\u2026', - 'hercon;': '\u22b9', - 'Hfr;': '\u210c', - 'hfr;': '\U0001d525', - 'HilbertSpace;': '\u210b', - 'hksearow;': '\u2925', - 'hkswarow;': '\u2926', - 'hoarr;': '\u21ff', - 'homtht;': '\u223b', - 'hookleftarrow;': '\u21a9', - 'hookrightarrow;': '\u21aa', - 'Hopf;': '\u210d', - 'hopf;': '\U0001d559', - 'horbar;': '\u2015', - 'HorizontalLine;': '\u2500', - 'Hscr;': '\u210b', - 'hscr;': '\U0001d4bd', - 'hslash;': '\u210f', - 'Hstrok;': '\u0126', - 'hstrok;': '\u0127', - 'HumpDownHump;': '\u224e', - 'HumpEqual;': '\u224f', - 'hybull;': '\u2043', - 'hyphen;': '\u2010', - 'Iacute': '\xcd', - 'iacute': '\xed', - 'Iacute;': '\xcd', - 'iacute;': '\xed', - 'ic;': '\u2063', - 'Icirc': '\xce', - 'icirc': '\xee', - 'Icirc;': '\xce', - 'icirc;': '\xee', - 'Icy;': '\u0418', - 'icy;': '\u0438', - 'Idot;': '\u0130', - 'IEcy;': '\u0415', - 'iecy;': '\u0435', - 'iexcl': '\xa1', - 'iexcl;': '\xa1', - 'iff;': '\u21d4', - 'Ifr;': '\u2111', - 'ifr;': '\U0001d526', - 'Igrave': '\xcc', - 'igrave': '\xec', - 'Igrave;': '\xcc', - 'igrave;': '\xec', - 'ii;': '\u2148', - 'iiiint;': '\u2a0c', - 'iiint;': '\u222d', - 'iinfin;': '\u29dc', - 'iiota;': '\u2129', - 'IJlig;': '\u0132', - 'ijlig;': '\u0133', - 'Im;': '\u2111', - 'Imacr;': '\u012a', - 'imacr;': '\u012b', - 'image;': '\u2111', - 'ImaginaryI;': '\u2148', - 'imagline;': '\u2110', - 'imagpart;': '\u2111', - 'imath;': '\u0131', - 'imof;': '\u22b7', - 'imped;': '\u01b5', - 'Implies;': '\u21d2', - 'in;': '\u2208', - 'incare;': '\u2105', - 'infin;': '\u221e', - 'infintie;': '\u29dd', - 'inodot;': '\u0131', - 'Int;': '\u222c', - 'int;': '\u222b', - 'intcal;': '\u22ba', - 'integers;': '\u2124', - 'Integral;': '\u222b', - 'intercal;': '\u22ba', - 'Intersection;': '\u22c2', - 'intlarhk;': '\u2a17', - 'intprod;': '\u2a3c', - 'InvisibleComma;': '\u2063', - 'InvisibleTimes;': '\u2062', - 'IOcy;': '\u0401', - 'iocy;': '\u0451', - 'Iogon;': '\u012e', - 'iogon;': '\u012f', - 'Iopf;': '\U0001d540', - 'iopf;': '\U0001d55a', - 'Iota;': '\u0399', - 'iota;': '\u03b9', - 'iprod;': '\u2a3c', - 'iquest': '\xbf', - 'iquest;': '\xbf', - 'Iscr;': '\u2110', - 'iscr;': '\U0001d4be', - 'isin;': '\u2208', - 'isindot;': '\u22f5', - 'isinE;': '\u22f9', - 'isins;': '\u22f4', - 'isinsv;': '\u22f3', - 'isinv;': '\u2208', - 'it;': '\u2062', - 'Itilde;': '\u0128', - 'itilde;': '\u0129', - 'Iukcy;': '\u0406', - 'iukcy;': '\u0456', - 'Iuml': '\xcf', - 'iuml': '\xef', - 'Iuml;': '\xcf', - 'iuml;': '\xef', - 'Jcirc;': '\u0134', - 'jcirc;': '\u0135', - 'Jcy;': '\u0419', - 'jcy;': '\u0439', - 'Jfr;': '\U0001d50d', - 'jfr;': '\U0001d527', - 'jmath;': '\u0237', - 'Jopf;': '\U0001d541', - 'jopf;': '\U0001d55b', - 'Jscr;': '\U0001d4a5', - 'jscr;': '\U0001d4bf', - 'Jsercy;': '\u0408', - 'jsercy;': '\u0458', - 'Jukcy;': '\u0404', - 'jukcy;': '\u0454', - 'Kappa;': '\u039a', - 'kappa;': '\u03ba', - 'kappav;': '\u03f0', - 'Kcedil;': '\u0136', - 'kcedil;': '\u0137', - 'Kcy;': '\u041a', - 'kcy;': '\u043a', - 'Kfr;': '\U0001d50e', - 'kfr;': '\U0001d528', - 'kgreen;': '\u0138', - 'KHcy;': '\u0425', - 'khcy;': '\u0445', - 'KJcy;': '\u040c', - 'kjcy;': '\u045c', - 'Kopf;': '\U0001d542', - 'kopf;': '\U0001d55c', - 'Kscr;': '\U0001d4a6', - 'kscr;': '\U0001d4c0', - 'lAarr;': '\u21da', - 'Lacute;': '\u0139', - 'lacute;': '\u013a', - 'laemptyv;': '\u29b4', - 'lagran;': '\u2112', - 'Lambda;': '\u039b', - 'lambda;': '\u03bb', - 'Lang;': '\u27ea', - 'lang;': '\u27e8', - 'langd;': '\u2991', - 'langle;': '\u27e8', - 'lap;': '\u2a85', - 'Laplacetrf;': '\u2112', - 'laquo': '\xab', - 'laquo;': '\xab', - 'Larr;': '\u219e', - 'lArr;': '\u21d0', - 'larr;': '\u2190', - 'larrb;': '\u21e4', - 'larrbfs;': '\u291f', - 'larrfs;': '\u291d', - 'larrhk;': '\u21a9', - 'larrlp;': '\u21ab', - 'larrpl;': '\u2939', - 'larrsim;': '\u2973', - 'larrtl;': '\u21a2', - 'lat;': '\u2aab', - 'lAtail;': '\u291b', - 'latail;': '\u2919', - 'late;': '\u2aad', - 'lates;': '\u2aad\ufe00', - 'lBarr;': '\u290e', - 'lbarr;': '\u290c', - 'lbbrk;': '\u2772', - 'lbrace;': '{', - 'lbrack;': '[', - 'lbrke;': '\u298b', - 'lbrksld;': '\u298f', - 'lbrkslu;': '\u298d', - 'Lcaron;': '\u013d', - 'lcaron;': '\u013e', - 'Lcedil;': '\u013b', - 'lcedil;': '\u013c', - 'lceil;': '\u2308', - 'lcub;': '{', - 'Lcy;': '\u041b', - 'lcy;': '\u043b', - 'ldca;': '\u2936', - 'ldquo;': '\u201c', - 'ldquor;': '\u201e', - 'ldrdhar;': '\u2967', - 'ldrushar;': '\u294b', - 'ldsh;': '\u21b2', - 'lE;': '\u2266', - 'le;': '\u2264', - 'LeftAngleBracket;': '\u27e8', - 'LeftArrow;': '\u2190', - 'Leftarrow;': '\u21d0', - 'leftarrow;': '\u2190', - 'LeftArrowBar;': '\u21e4', - 'LeftArrowRightArrow;': '\u21c6', - 'leftarrowtail;': '\u21a2', - 'LeftCeiling;': '\u2308', - 'LeftDoubleBracket;': '\u27e6', - 'LeftDownTeeVector;': '\u2961', - 'LeftDownVector;': '\u21c3', - 'LeftDownVectorBar;': '\u2959', - 'LeftFloor;': '\u230a', - 'leftharpoondown;': '\u21bd', - 'leftharpoonup;': '\u21bc', - 'leftleftarrows;': '\u21c7', - 'LeftRightArrow;': '\u2194', - 'Leftrightarrow;': '\u21d4', - 'leftrightarrow;': '\u2194', - 'leftrightarrows;': '\u21c6', - 'leftrightharpoons;': '\u21cb', - 'leftrightsquigarrow;': '\u21ad', - 'LeftRightVector;': '\u294e', - 'LeftTee;': '\u22a3', - 'LeftTeeArrow;': '\u21a4', - 'LeftTeeVector;': '\u295a', - 'leftthreetimes;': '\u22cb', - 'LeftTriangle;': '\u22b2', - 'LeftTriangleBar;': '\u29cf', - 'LeftTriangleEqual;': '\u22b4', - 'LeftUpDownVector;': '\u2951', - 'LeftUpTeeVector;': '\u2960', - 'LeftUpVector;': '\u21bf', - 'LeftUpVectorBar;': '\u2958', - 'LeftVector;': '\u21bc', - 'LeftVectorBar;': '\u2952', - 'lEg;': '\u2a8b', - 'leg;': '\u22da', - 'leq;': '\u2264', - 'leqq;': '\u2266', - 'leqslant;': '\u2a7d', - 'les;': '\u2a7d', - 'lescc;': '\u2aa8', - 'lesdot;': '\u2a7f', - 'lesdoto;': '\u2a81', - 'lesdotor;': '\u2a83', - 'lesg;': '\u22da\ufe00', - 'lesges;': '\u2a93', - 'lessapprox;': '\u2a85', - 'lessdot;': '\u22d6', - 'lesseqgtr;': '\u22da', - 'lesseqqgtr;': '\u2a8b', - 'LessEqualGreater;': '\u22da', - 'LessFullEqual;': '\u2266', - 'LessGreater;': '\u2276', - 'lessgtr;': '\u2276', - 'LessLess;': '\u2aa1', - 'lesssim;': '\u2272', - 'LessSlantEqual;': '\u2a7d', - 'LessTilde;': '\u2272', - 'lfisht;': '\u297c', - 'lfloor;': '\u230a', - 'Lfr;': '\U0001d50f', - 'lfr;': '\U0001d529', - 'lg;': '\u2276', - 'lgE;': '\u2a91', - 'lHar;': '\u2962', - 'lhard;': '\u21bd', - 'lharu;': '\u21bc', - 'lharul;': '\u296a', - 'lhblk;': '\u2584', - 'LJcy;': '\u0409', - 'ljcy;': '\u0459', - 'Ll;': '\u22d8', - 'll;': '\u226a', - 'llarr;': '\u21c7', - 'llcorner;': '\u231e', - 'Lleftarrow;': '\u21da', - 'llhard;': '\u296b', - 'lltri;': '\u25fa', - 'Lmidot;': '\u013f', - 'lmidot;': '\u0140', - 'lmoust;': '\u23b0', - 'lmoustache;': '\u23b0', - 'lnap;': '\u2a89', - 'lnapprox;': '\u2a89', - 'lnE;': '\u2268', - 'lne;': '\u2a87', - 'lneq;': '\u2a87', - 'lneqq;': '\u2268', - 'lnsim;': '\u22e6', - 'loang;': '\u27ec', - 'loarr;': '\u21fd', - 'lobrk;': '\u27e6', - 'LongLeftArrow;': '\u27f5', - 'Longleftarrow;': '\u27f8', - 'longleftarrow;': '\u27f5', - 'LongLeftRightArrow;': '\u27f7', - 'Longleftrightarrow;': '\u27fa', - 'longleftrightarrow;': '\u27f7', - 'longmapsto;': '\u27fc', - 'LongRightArrow;': '\u27f6', - 'Longrightarrow;': '\u27f9', - 'longrightarrow;': '\u27f6', - 'looparrowleft;': '\u21ab', - 'looparrowright;': '\u21ac', - 'lopar;': '\u2985', - 'Lopf;': '\U0001d543', - 'lopf;': '\U0001d55d', - 'loplus;': '\u2a2d', - 'lotimes;': '\u2a34', - 'lowast;': '\u2217', - 'lowbar;': '_', - 'LowerLeftArrow;': '\u2199', - 'LowerRightArrow;': '\u2198', - 'loz;': '\u25ca', - 'lozenge;': '\u25ca', - 'lozf;': '\u29eb', - 'lpar;': '(', - 'lparlt;': '\u2993', - 'lrarr;': '\u21c6', - 'lrcorner;': '\u231f', - 'lrhar;': '\u21cb', - 'lrhard;': '\u296d', - 'lrm;': '\u200e', - 'lrtri;': '\u22bf', - 'lsaquo;': '\u2039', - 'Lscr;': '\u2112', - 'lscr;': '\U0001d4c1', - 'Lsh;': '\u21b0', - 'lsh;': '\u21b0', - 'lsim;': '\u2272', - 'lsime;': '\u2a8d', - 'lsimg;': '\u2a8f', - 'lsqb;': '[', - 'lsquo;': '\u2018', - 'lsquor;': '\u201a', - 'Lstrok;': '\u0141', - 'lstrok;': '\u0142', - 'LT': '<', - 'lt': '<', - 'LT;': '<', - 'Lt;': '\u226a', - 'lt;': '<', - 'ltcc;': '\u2aa6', - 'ltcir;': '\u2a79', - 'ltdot;': '\u22d6', - 'lthree;': '\u22cb', - 'ltimes;': '\u22c9', - 'ltlarr;': '\u2976', - 'ltquest;': '\u2a7b', - 'ltri;': '\u25c3', - 'ltrie;': '\u22b4', - 'ltrif;': '\u25c2', - 'ltrPar;': '\u2996', - 'lurdshar;': '\u294a', - 'luruhar;': '\u2966', - 'lvertneqq;': '\u2268\ufe00', - 'lvnE;': '\u2268\ufe00', - 'macr': '\xaf', - 'macr;': '\xaf', - 'male;': '\u2642', - 'malt;': '\u2720', - 'maltese;': '\u2720', - 'Map;': '\u2905', - 'map;': '\u21a6', - 'mapsto;': '\u21a6', - 'mapstodown;': '\u21a7', - 'mapstoleft;': '\u21a4', - 'mapstoup;': '\u21a5', - 'marker;': '\u25ae', - 'mcomma;': '\u2a29', - 'Mcy;': '\u041c', - 'mcy;': '\u043c', - 'mdash;': '\u2014', - 'mDDot;': '\u223a', - 'measuredangle;': '\u2221', - 'MediumSpace;': '\u205f', - 'Mellintrf;': '\u2133', - 'Mfr;': '\U0001d510', - 'mfr;': '\U0001d52a', - 'mho;': '\u2127', - 'micro': '\xb5', - 'micro;': '\xb5', - 'mid;': '\u2223', - 'midast;': '*', - 'midcir;': '\u2af0', - 'middot': '\xb7', - 'middot;': '\xb7', - 'minus;': '\u2212', - 'minusb;': '\u229f', - 'minusd;': '\u2238', - 'minusdu;': '\u2a2a', - 'MinusPlus;': '\u2213', - 'mlcp;': '\u2adb', - 'mldr;': '\u2026', - 'mnplus;': '\u2213', - 'models;': '\u22a7', - 'Mopf;': '\U0001d544', - 'mopf;': '\U0001d55e', - 'mp;': '\u2213', - 'Mscr;': '\u2133', - 'mscr;': '\U0001d4c2', - 'mstpos;': '\u223e', - 'Mu;': '\u039c', - 'mu;': '\u03bc', - 'multimap;': '\u22b8', - 'mumap;': '\u22b8', - 'nabla;': '\u2207', - 'Nacute;': '\u0143', - 'nacute;': '\u0144', - 'nang;': '\u2220\u20d2', - 'nap;': '\u2249', - 'napE;': '\u2a70\u0338', - 'napid;': '\u224b\u0338', - 'napos;': '\u0149', - 'napprox;': '\u2249', - 'natur;': '\u266e', - 'natural;': '\u266e', - 'naturals;': '\u2115', - 'nbsp': '\xa0', - 'nbsp;': '\xa0', - 'nbump;': '\u224e\u0338', - 'nbumpe;': '\u224f\u0338', - 'ncap;': '\u2a43', - 'Ncaron;': '\u0147', - 'ncaron;': '\u0148', - 'Ncedil;': '\u0145', - 'ncedil;': '\u0146', - 'ncong;': '\u2247', - 'ncongdot;': '\u2a6d\u0338', - 'ncup;': '\u2a42', - 'Ncy;': '\u041d', - 'ncy;': '\u043d', - 'ndash;': '\u2013', - 'ne;': '\u2260', - 'nearhk;': '\u2924', - 'neArr;': '\u21d7', - 'nearr;': '\u2197', - 'nearrow;': '\u2197', - 'nedot;': '\u2250\u0338', - 'NegativeMediumSpace;': '\u200b', - 'NegativeThickSpace;': '\u200b', - 'NegativeThinSpace;': '\u200b', - 'NegativeVeryThinSpace;': '\u200b', - 'nequiv;': '\u2262', - 'nesear;': '\u2928', - 'nesim;': '\u2242\u0338', - 'NestedGreaterGreater;': '\u226b', - 'NestedLessLess;': '\u226a', - 'NewLine;': '\n', - 'nexist;': '\u2204', - 'nexists;': '\u2204', - 'Nfr;': '\U0001d511', - 'nfr;': '\U0001d52b', - 'ngE;': '\u2267\u0338', - 'nge;': '\u2271', - 'ngeq;': '\u2271', - 'ngeqq;': '\u2267\u0338', - 'ngeqslant;': '\u2a7e\u0338', - 'nges;': '\u2a7e\u0338', - 'nGg;': '\u22d9\u0338', - 'ngsim;': '\u2275', - 'nGt;': '\u226b\u20d2', - 'ngt;': '\u226f', - 'ngtr;': '\u226f', - 'nGtv;': '\u226b\u0338', - 'nhArr;': '\u21ce', - 'nharr;': '\u21ae', - 'nhpar;': '\u2af2', - 'ni;': '\u220b', - 'nis;': '\u22fc', - 'nisd;': '\u22fa', - 'niv;': '\u220b', - 'NJcy;': '\u040a', - 'njcy;': '\u045a', - 'nlArr;': '\u21cd', - 'nlarr;': '\u219a', - 'nldr;': '\u2025', - 'nlE;': '\u2266\u0338', - 'nle;': '\u2270', - 'nLeftarrow;': '\u21cd', - 'nleftarrow;': '\u219a', - 'nLeftrightarrow;': '\u21ce', - 'nleftrightarrow;': '\u21ae', - 'nleq;': '\u2270', - 'nleqq;': '\u2266\u0338', - 'nleqslant;': '\u2a7d\u0338', - 'nles;': '\u2a7d\u0338', - 'nless;': '\u226e', - 'nLl;': '\u22d8\u0338', - 'nlsim;': '\u2274', - 'nLt;': '\u226a\u20d2', - 'nlt;': '\u226e', - 'nltri;': '\u22ea', - 'nltrie;': '\u22ec', - 'nLtv;': '\u226a\u0338', - 'nmid;': '\u2224', - 'NoBreak;': '\u2060', - 'NonBreakingSpace;': '\xa0', - 'Nopf;': '\u2115', - 'nopf;': '\U0001d55f', - 'not': '\xac', - 'Not;': '\u2aec', - 'not;': '\xac', - 'NotCongruent;': '\u2262', - 'NotCupCap;': '\u226d', - 'NotDoubleVerticalBar;': '\u2226', - 'NotElement;': '\u2209', - 'NotEqual;': '\u2260', - 'NotEqualTilde;': '\u2242\u0338', - 'NotExists;': '\u2204', - 'NotGreater;': '\u226f', - 'NotGreaterEqual;': '\u2271', - 'NotGreaterFullEqual;': '\u2267\u0338', - 'NotGreaterGreater;': '\u226b\u0338', - 'NotGreaterLess;': '\u2279', - 'NotGreaterSlantEqual;': '\u2a7e\u0338', - 'NotGreaterTilde;': '\u2275', - 'NotHumpDownHump;': '\u224e\u0338', - 'NotHumpEqual;': '\u224f\u0338', - 'notin;': '\u2209', - 'notindot;': '\u22f5\u0338', - 'notinE;': '\u22f9\u0338', - 'notinva;': '\u2209', - 'notinvb;': '\u22f7', - 'notinvc;': '\u22f6', - 'NotLeftTriangle;': '\u22ea', - 'NotLeftTriangleBar;': '\u29cf\u0338', - 'NotLeftTriangleEqual;': '\u22ec', - 'NotLess;': '\u226e', - 'NotLessEqual;': '\u2270', - 'NotLessGreater;': '\u2278', - 'NotLessLess;': '\u226a\u0338', - 'NotLessSlantEqual;': '\u2a7d\u0338', - 'NotLessTilde;': '\u2274', - 'NotNestedGreaterGreater;': '\u2aa2\u0338', - 'NotNestedLessLess;': '\u2aa1\u0338', - 'notni;': '\u220c', - 'notniva;': '\u220c', - 'notnivb;': '\u22fe', - 'notnivc;': '\u22fd', - 'NotPrecedes;': '\u2280', - 'NotPrecedesEqual;': '\u2aaf\u0338', - 'NotPrecedesSlantEqual;': '\u22e0', - 'NotReverseElement;': '\u220c', - 'NotRightTriangle;': '\u22eb', - 'NotRightTriangleBar;': '\u29d0\u0338', - 'NotRightTriangleEqual;': '\u22ed', - 'NotSquareSubset;': '\u228f\u0338', - 'NotSquareSubsetEqual;': '\u22e2', - 'NotSquareSuperset;': '\u2290\u0338', - 'NotSquareSupersetEqual;': '\u22e3', - 'NotSubset;': '\u2282\u20d2', - 'NotSubsetEqual;': '\u2288', - 'NotSucceeds;': '\u2281', - 'NotSucceedsEqual;': '\u2ab0\u0338', - 'NotSucceedsSlantEqual;': '\u22e1', - 'NotSucceedsTilde;': '\u227f\u0338', - 'NotSuperset;': '\u2283\u20d2', - 'NotSupersetEqual;': '\u2289', - 'NotTilde;': '\u2241', - 'NotTildeEqual;': '\u2244', - 'NotTildeFullEqual;': '\u2247', - 'NotTildeTilde;': '\u2249', - 'NotVerticalBar;': '\u2224', - 'npar;': '\u2226', - 'nparallel;': '\u2226', - 'nparsl;': '\u2afd\u20e5', - 'npart;': '\u2202\u0338', - 'npolint;': '\u2a14', - 'npr;': '\u2280', - 'nprcue;': '\u22e0', - 'npre;': '\u2aaf\u0338', - 'nprec;': '\u2280', - 'npreceq;': '\u2aaf\u0338', - 'nrArr;': '\u21cf', - 'nrarr;': '\u219b', - 'nrarrc;': '\u2933\u0338', - 'nrarrw;': '\u219d\u0338', - 'nRightarrow;': '\u21cf', - 'nrightarrow;': '\u219b', - 'nrtri;': '\u22eb', - 'nrtrie;': '\u22ed', - 'nsc;': '\u2281', - 'nsccue;': '\u22e1', - 'nsce;': '\u2ab0\u0338', - 'Nscr;': '\U0001d4a9', - 'nscr;': '\U0001d4c3', - 'nshortmid;': '\u2224', - 'nshortparallel;': '\u2226', - 'nsim;': '\u2241', - 'nsime;': '\u2244', - 'nsimeq;': '\u2244', - 'nsmid;': '\u2224', - 'nspar;': '\u2226', - 'nsqsube;': '\u22e2', - 'nsqsupe;': '\u22e3', - 'nsub;': '\u2284', - 'nsubE;': '\u2ac5\u0338', - 'nsube;': '\u2288', - 'nsubset;': '\u2282\u20d2', - 'nsubseteq;': '\u2288', - 'nsubseteqq;': '\u2ac5\u0338', - 'nsucc;': '\u2281', - 'nsucceq;': '\u2ab0\u0338', - 'nsup;': '\u2285', - 'nsupE;': '\u2ac6\u0338', - 'nsupe;': '\u2289', - 'nsupset;': '\u2283\u20d2', - 'nsupseteq;': '\u2289', - 'nsupseteqq;': '\u2ac6\u0338', - 'ntgl;': '\u2279', - 'Ntilde': '\xd1', - 'ntilde': '\xf1', - 'Ntilde;': '\xd1', - 'ntilde;': '\xf1', - 'ntlg;': '\u2278', - 'ntriangleleft;': '\u22ea', - 'ntrianglelefteq;': '\u22ec', - 'ntriangleright;': '\u22eb', - 'ntrianglerighteq;': '\u22ed', - 'Nu;': '\u039d', - 'nu;': '\u03bd', - 'num;': '#', - 'numero;': '\u2116', - 'numsp;': '\u2007', - 'nvap;': '\u224d\u20d2', - 'nVDash;': '\u22af', - 'nVdash;': '\u22ae', - 'nvDash;': '\u22ad', - 'nvdash;': '\u22ac', - 'nvge;': '\u2265\u20d2', - 'nvgt;': '>\u20d2', - 'nvHarr;': '\u2904', - 'nvinfin;': '\u29de', - 'nvlArr;': '\u2902', - 'nvle;': '\u2264\u20d2', - 'nvlt;': '<\u20d2', - 'nvltrie;': '\u22b4\u20d2', - 'nvrArr;': '\u2903', - 'nvrtrie;': '\u22b5\u20d2', - 'nvsim;': '\u223c\u20d2', - 'nwarhk;': '\u2923', - 'nwArr;': '\u21d6', - 'nwarr;': '\u2196', - 'nwarrow;': '\u2196', - 'nwnear;': '\u2927', - 'Oacute': '\xd3', - 'oacute': '\xf3', - 'Oacute;': '\xd3', - 'oacute;': '\xf3', - 'oast;': '\u229b', - 'ocir;': '\u229a', - 'Ocirc': '\xd4', - 'ocirc': '\xf4', - 'Ocirc;': '\xd4', - 'ocirc;': '\xf4', - 'Ocy;': '\u041e', - 'ocy;': '\u043e', - 'odash;': '\u229d', - 'Odblac;': '\u0150', - 'odblac;': '\u0151', - 'odiv;': '\u2a38', - 'odot;': '\u2299', - 'odsold;': '\u29bc', - 'OElig;': '\u0152', - 'oelig;': '\u0153', - 'ofcir;': '\u29bf', - 'Ofr;': '\U0001d512', - 'ofr;': '\U0001d52c', - 'ogon;': '\u02db', - 'Ograve': '\xd2', - 'ograve': '\xf2', - 'Ograve;': '\xd2', - 'ograve;': '\xf2', - 'ogt;': '\u29c1', - 'ohbar;': '\u29b5', - 'ohm;': '\u03a9', - 'oint;': '\u222e', - 'olarr;': '\u21ba', - 'olcir;': '\u29be', - 'olcross;': '\u29bb', - 'oline;': '\u203e', - 'olt;': '\u29c0', - 'Omacr;': '\u014c', - 'omacr;': '\u014d', - 'Omega;': '\u03a9', - 'omega;': '\u03c9', - 'Omicron;': '\u039f', - 'omicron;': '\u03bf', - 'omid;': '\u29b6', - 'ominus;': '\u2296', - 'Oopf;': '\U0001d546', - 'oopf;': '\U0001d560', - 'opar;': '\u29b7', - 'OpenCurlyDoubleQuote;': '\u201c', - 'OpenCurlyQuote;': '\u2018', - 'operp;': '\u29b9', - 'oplus;': '\u2295', - 'Or;': '\u2a54', - 'or;': '\u2228', - 'orarr;': '\u21bb', - 'ord;': '\u2a5d', - 'order;': '\u2134', - 'orderof;': '\u2134', - 'ordf': '\xaa', - 'ordf;': '\xaa', - 'ordm': '\xba', - 'ordm;': '\xba', - 'origof;': '\u22b6', - 'oror;': '\u2a56', - 'orslope;': '\u2a57', - 'orv;': '\u2a5b', - 'oS;': '\u24c8', - 'Oscr;': '\U0001d4aa', - 'oscr;': '\u2134', - 'Oslash': '\xd8', - 'oslash': '\xf8', - 'Oslash;': '\xd8', - 'oslash;': '\xf8', - 'osol;': '\u2298', - 'Otilde': '\xd5', - 'otilde': '\xf5', - 'Otilde;': '\xd5', - 'otilde;': '\xf5', - 'Otimes;': '\u2a37', - 'otimes;': '\u2297', - 'otimesas;': '\u2a36', - 'Ouml': '\xd6', - 'ouml': '\xf6', - 'Ouml;': '\xd6', - 'ouml;': '\xf6', - 'ovbar;': '\u233d', - 'OverBar;': '\u203e', - 'OverBrace;': '\u23de', - 'OverBracket;': '\u23b4', - 'OverParenthesis;': '\u23dc', - 'par;': '\u2225', - 'para': '\xb6', - 'para;': '\xb6', - 'parallel;': '\u2225', - 'parsim;': '\u2af3', - 'parsl;': '\u2afd', - 'part;': '\u2202', - 'PartialD;': '\u2202', - 'Pcy;': '\u041f', - 'pcy;': '\u043f', - 'percnt;': '%', - 'period;': '.', - 'permil;': '\u2030', - 'perp;': '\u22a5', - 'pertenk;': '\u2031', - 'Pfr;': '\U0001d513', - 'pfr;': '\U0001d52d', - 'Phi;': '\u03a6', - 'phi;': '\u03c6', - 'phiv;': '\u03d5', - 'phmmat;': '\u2133', - 'phone;': '\u260e', - 'Pi;': '\u03a0', - 'pi;': '\u03c0', - 'pitchfork;': '\u22d4', - 'piv;': '\u03d6', - 'planck;': '\u210f', - 'planckh;': '\u210e', - 'plankv;': '\u210f', - 'plus;': '+', - 'plusacir;': '\u2a23', - 'plusb;': '\u229e', - 'pluscir;': '\u2a22', - 'plusdo;': '\u2214', - 'plusdu;': '\u2a25', - 'pluse;': '\u2a72', - 'PlusMinus;': '\xb1', - 'plusmn': '\xb1', - 'plusmn;': '\xb1', - 'plussim;': '\u2a26', - 'plustwo;': '\u2a27', - 'pm;': '\xb1', - 'Poincareplane;': '\u210c', - 'pointint;': '\u2a15', - 'Popf;': '\u2119', - 'popf;': '\U0001d561', - 'pound': '\xa3', - 'pound;': '\xa3', - 'Pr;': '\u2abb', - 'pr;': '\u227a', - 'prap;': '\u2ab7', - 'prcue;': '\u227c', - 'prE;': '\u2ab3', - 'pre;': '\u2aaf', - 'prec;': '\u227a', - 'precapprox;': '\u2ab7', - 'preccurlyeq;': '\u227c', - 'Precedes;': '\u227a', - 'PrecedesEqual;': '\u2aaf', - 'PrecedesSlantEqual;': '\u227c', - 'PrecedesTilde;': '\u227e', - 'preceq;': '\u2aaf', - 'precnapprox;': '\u2ab9', - 'precneqq;': '\u2ab5', - 'precnsim;': '\u22e8', - 'precsim;': '\u227e', - 'Prime;': '\u2033', - 'prime;': '\u2032', - 'primes;': '\u2119', - 'prnap;': '\u2ab9', - 'prnE;': '\u2ab5', - 'prnsim;': '\u22e8', - 'prod;': '\u220f', - 'Product;': '\u220f', - 'profalar;': '\u232e', - 'profline;': '\u2312', - 'profsurf;': '\u2313', - 'prop;': '\u221d', - 'Proportion;': '\u2237', - 'Proportional;': '\u221d', - 'propto;': '\u221d', - 'prsim;': '\u227e', - 'prurel;': '\u22b0', - 'Pscr;': '\U0001d4ab', - 'pscr;': '\U0001d4c5', - 'Psi;': '\u03a8', - 'psi;': '\u03c8', - 'puncsp;': '\u2008', - 'Qfr;': '\U0001d514', - 'qfr;': '\U0001d52e', - 'qint;': '\u2a0c', - 'Qopf;': '\u211a', - 'qopf;': '\U0001d562', - 'qprime;': '\u2057', - 'Qscr;': '\U0001d4ac', - 'qscr;': '\U0001d4c6', - 'quaternions;': '\u210d', - 'quatint;': '\u2a16', - 'quest;': '?', - 'questeq;': '\u225f', - 'QUOT': '"', - 'quot': '"', - 'QUOT;': '"', - 'quot;': '"', - 'rAarr;': '\u21db', - 'race;': '\u223d\u0331', - 'Racute;': '\u0154', - 'racute;': '\u0155', - 'radic;': '\u221a', - 'raemptyv;': '\u29b3', - 'Rang;': '\u27eb', - 'rang;': '\u27e9', - 'rangd;': '\u2992', - 'range;': '\u29a5', - 'rangle;': '\u27e9', - 'raquo': '\xbb', - 'raquo;': '\xbb', - 'Rarr;': '\u21a0', - 'rArr;': '\u21d2', - 'rarr;': '\u2192', - 'rarrap;': '\u2975', - 'rarrb;': '\u21e5', - 'rarrbfs;': '\u2920', - 'rarrc;': '\u2933', - 'rarrfs;': '\u291e', - 'rarrhk;': '\u21aa', - 'rarrlp;': '\u21ac', - 'rarrpl;': '\u2945', - 'rarrsim;': '\u2974', - 'Rarrtl;': '\u2916', - 'rarrtl;': '\u21a3', - 'rarrw;': '\u219d', - 'rAtail;': '\u291c', - 'ratail;': '\u291a', - 'ratio;': '\u2236', - 'rationals;': '\u211a', - 'RBarr;': '\u2910', - 'rBarr;': '\u290f', - 'rbarr;': '\u290d', - 'rbbrk;': '\u2773', - 'rbrace;': '}', - 'rbrack;': ']', - 'rbrke;': '\u298c', - 'rbrksld;': '\u298e', - 'rbrkslu;': '\u2990', - 'Rcaron;': '\u0158', - 'rcaron;': '\u0159', - 'Rcedil;': '\u0156', - 'rcedil;': '\u0157', - 'rceil;': '\u2309', - 'rcub;': '}', - 'Rcy;': '\u0420', - 'rcy;': '\u0440', - 'rdca;': '\u2937', - 'rdldhar;': '\u2969', - 'rdquo;': '\u201d', - 'rdquor;': '\u201d', - 'rdsh;': '\u21b3', - 'Re;': '\u211c', - 'real;': '\u211c', - 'realine;': '\u211b', - 'realpart;': '\u211c', - 'reals;': '\u211d', - 'rect;': '\u25ad', - 'REG': '\xae', - 'reg': '\xae', - 'REG;': '\xae', - 'reg;': '\xae', - 'ReverseElement;': '\u220b', - 'ReverseEquilibrium;': '\u21cb', - 'ReverseUpEquilibrium;': '\u296f', - 'rfisht;': '\u297d', - 'rfloor;': '\u230b', - 'Rfr;': '\u211c', - 'rfr;': '\U0001d52f', - 'rHar;': '\u2964', - 'rhard;': '\u21c1', - 'rharu;': '\u21c0', - 'rharul;': '\u296c', - 'Rho;': '\u03a1', - 'rho;': '\u03c1', - 'rhov;': '\u03f1', - 'RightAngleBracket;': '\u27e9', - 'RightArrow;': '\u2192', - 'Rightarrow;': '\u21d2', - 'rightarrow;': '\u2192', - 'RightArrowBar;': '\u21e5', - 'RightArrowLeftArrow;': '\u21c4', - 'rightarrowtail;': '\u21a3', - 'RightCeiling;': '\u2309', - 'RightDoubleBracket;': '\u27e7', - 'RightDownTeeVector;': '\u295d', - 'RightDownVector;': '\u21c2', - 'RightDownVectorBar;': '\u2955', - 'RightFloor;': '\u230b', - 'rightharpoondown;': '\u21c1', - 'rightharpoonup;': '\u21c0', - 'rightleftarrows;': '\u21c4', - 'rightleftharpoons;': '\u21cc', - 'rightrightarrows;': '\u21c9', - 'rightsquigarrow;': '\u219d', - 'RightTee;': '\u22a2', - 'RightTeeArrow;': '\u21a6', - 'RightTeeVector;': '\u295b', - 'rightthreetimes;': '\u22cc', - 'RightTriangle;': '\u22b3', - 'RightTriangleBar;': '\u29d0', - 'RightTriangleEqual;': '\u22b5', - 'RightUpDownVector;': '\u294f', - 'RightUpTeeVector;': '\u295c', - 'RightUpVector;': '\u21be', - 'RightUpVectorBar;': '\u2954', - 'RightVector;': '\u21c0', - 'RightVectorBar;': '\u2953', - 'ring;': '\u02da', - 'risingdotseq;': '\u2253', - 'rlarr;': '\u21c4', - 'rlhar;': '\u21cc', - 'rlm;': '\u200f', - 'rmoust;': '\u23b1', - 'rmoustache;': '\u23b1', - 'rnmid;': '\u2aee', - 'roang;': '\u27ed', - 'roarr;': '\u21fe', - 'robrk;': '\u27e7', - 'ropar;': '\u2986', - 'Ropf;': '\u211d', - 'ropf;': '\U0001d563', - 'roplus;': '\u2a2e', - 'rotimes;': '\u2a35', - 'RoundImplies;': '\u2970', - 'rpar;': ')', - 'rpargt;': '\u2994', - 'rppolint;': '\u2a12', - 'rrarr;': '\u21c9', - 'Rrightarrow;': '\u21db', - 'rsaquo;': '\u203a', - 'Rscr;': '\u211b', - 'rscr;': '\U0001d4c7', - 'Rsh;': '\u21b1', - 'rsh;': '\u21b1', - 'rsqb;': ']', - 'rsquo;': '\u2019', - 'rsquor;': '\u2019', - 'rthree;': '\u22cc', - 'rtimes;': '\u22ca', - 'rtri;': '\u25b9', - 'rtrie;': '\u22b5', - 'rtrif;': '\u25b8', - 'rtriltri;': '\u29ce', - 'RuleDelayed;': '\u29f4', - 'ruluhar;': '\u2968', - 'rx;': '\u211e', - 'Sacute;': '\u015a', - 'sacute;': '\u015b', - 'sbquo;': '\u201a', - 'Sc;': '\u2abc', - 'sc;': '\u227b', - 'scap;': '\u2ab8', - 'Scaron;': '\u0160', - 'scaron;': '\u0161', - 'sccue;': '\u227d', - 'scE;': '\u2ab4', - 'sce;': '\u2ab0', - 'Scedil;': '\u015e', - 'scedil;': '\u015f', - 'Scirc;': '\u015c', - 'scirc;': '\u015d', - 'scnap;': '\u2aba', - 'scnE;': '\u2ab6', - 'scnsim;': '\u22e9', - 'scpolint;': '\u2a13', - 'scsim;': '\u227f', - 'Scy;': '\u0421', - 'scy;': '\u0441', - 'sdot;': '\u22c5', - 'sdotb;': '\u22a1', - 'sdote;': '\u2a66', - 'searhk;': '\u2925', - 'seArr;': '\u21d8', - 'searr;': '\u2198', - 'searrow;': '\u2198', - 'sect': '\xa7', - 'sect;': '\xa7', - 'semi;': ';', - 'seswar;': '\u2929', - 'setminus;': '\u2216', - 'setmn;': '\u2216', - 'sext;': '\u2736', - 'Sfr;': '\U0001d516', - 'sfr;': '\U0001d530', - 'sfrown;': '\u2322', - 'sharp;': '\u266f', - 'SHCHcy;': '\u0429', - 'shchcy;': '\u0449', - 'SHcy;': '\u0428', - 'shcy;': '\u0448', - 'ShortDownArrow;': '\u2193', - 'ShortLeftArrow;': '\u2190', - 'shortmid;': '\u2223', - 'shortparallel;': '\u2225', - 'ShortRightArrow;': '\u2192', - 'ShortUpArrow;': '\u2191', - 'shy': '\xad', - 'shy;': '\xad', - 'Sigma;': '\u03a3', - 'sigma;': '\u03c3', - 'sigmaf;': '\u03c2', - 'sigmav;': '\u03c2', - 'sim;': '\u223c', - 'simdot;': '\u2a6a', - 'sime;': '\u2243', - 'simeq;': '\u2243', - 'simg;': '\u2a9e', - 'simgE;': '\u2aa0', - 'siml;': '\u2a9d', - 'simlE;': '\u2a9f', - 'simne;': '\u2246', - 'simplus;': '\u2a24', - 'simrarr;': '\u2972', - 'slarr;': '\u2190', - 'SmallCircle;': '\u2218', - 'smallsetminus;': '\u2216', - 'smashp;': '\u2a33', - 'smeparsl;': '\u29e4', - 'smid;': '\u2223', - 'smile;': '\u2323', - 'smt;': '\u2aaa', - 'smte;': '\u2aac', - 'smtes;': '\u2aac\ufe00', - 'SOFTcy;': '\u042c', - 'softcy;': '\u044c', - 'sol;': '/', - 'solb;': '\u29c4', - 'solbar;': '\u233f', - 'Sopf;': '\U0001d54a', - 'sopf;': '\U0001d564', - 'spades;': '\u2660', - 'spadesuit;': '\u2660', - 'spar;': '\u2225', - 'sqcap;': '\u2293', - 'sqcaps;': '\u2293\ufe00', - 'sqcup;': '\u2294', - 'sqcups;': '\u2294\ufe00', - 'Sqrt;': '\u221a', - 'sqsub;': '\u228f', - 'sqsube;': '\u2291', - 'sqsubset;': '\u228f', - 'sqsubseteq;': '\u2291', - 'sqsup;': '\u2290', - 'sqsupe;': '\u2292', - 'sqsupset;': '\u2290', - 'sqsupseteq;': '\u2292', - 'squ;': '\u25a1', - 'Square;': '\u25a1', - 'square;': '\u25a1', - 'SquareIntersection;': '\u2293', - 'SquareSubset;': '\u228f', - 'SquareSubsetEqual;': '\u2291', - 'SquareSuperset;': '\u2290', - 'SquareSupersetEqual;': '\u2292', - 'SquareUnion;': '\u2294', - 'squarf;': '\u25aa', - 'squf;': '\u25aa', - 'srarr;': '\u2192', - 'Sscr;': '\U0001d4ae', - 'sscr;': '\U0001d4c8', - 'ssetmn;': '\u2216', - 'ssmile;': '\u2323', - 'sstarf;': '\u22c6', - 'Star;': '\u22c6', - 'star;': '\u2606', - 'starf;': '\u2605', - 'straightepsilon;': '\u03f5', - 'straightphi;': '\u03d5', - 'strns;': '\xaf', - 'Sub;': '\u22d0', - 'sub;': '\u2282', - 'subdot;': '\u2abd', - 'subE;': '\u2ac5', - 'sube;': '\u2286', - 'subedot;': '\u2ac3', - 'submult;': '\u2ac1', - 'subnE;': '\u2acb', - 'subne;': '\u228a', - 'subplus;': '\u2abf', - 'subrarr;': '\u2979', - 'Subset;': '\u22d0', - 'subset;': '\u2282', - 'subseteq;': '\u2286', - 'subseteqq;': '\u2ac5', - 'SubsetEqual;': '\u2286', - 'subsetneq;': '\u228a', - 'subsetneqq;': '\u2acb', - 'subsim;': '\u2ac7', - 'subsub;': '\u2ad5', - 'subsup;': '\u2ad3', - 'succ;': '\u227b', - 'succapprox;': '\u2ab8', - 'succcurlyeq;': '\u227d', - 'Succeeds;': '\u227b', - 'SucceedsEqual;': '\u2ab0', - 'SucceedsSlantEqual;': '\u227d', - 'SucceedsTilde;': '\u227f', - 'succeq;': '\u2ab0', - 'succnapprox;': '\u2aba', - 'succneqq;': '\u2ab6', - 'succnsim;': '\u22e9', - 'succsim;': '\u227f', - 'SuchThat;': '\u220b', - 'Sum;': '\u2211', - 'sum;': '\u2211', - 'sung;': '\u266a', - 'sup1': '\xb9', - 'sup1;': '\xb9', - 'sup2': '\xb2', - 'sup2;': '\xb2', - 'sup3': '\xb3', - 'sup3;': '\xb3', - 'Sup;': '\u22d1', - 'sup;': '\u2283', - 'supdot;': '\u2abe', - 'supdsub;': '\u2ad8', - 'supE;': '\u2ac6', - 'supe;': '\u2287', - 'supedot;': '\u2ac4', - 'Superset;': '\u2283', - 'SupersetEqual;': '\u2287', - 'suphsol;': '\u27c9', - 'suphsub;': '\u2ad7', - 'suplarr;': '\u297b', - 'supmult;': '\u2ac2', - 'supnE;': '\u2acc', - 'supne;': '\u228b', - 'supplus;': '\u2ac0', - 'Supset;': '\u22d1', - 'supset;': '\u2283', - 'supseteq;': '\u2287', - 'supseteqq;': '\u2ac6', - 'supsetneq;': '\u228b', - 'supsetneqq;': '\u2acc', - 'supsim;': '\u2ac8', - 'supsub;': '\u2ad4', - 'supsup;': '\u2ad6', - 'swarhk;': '\u2926', - 'swArr;': '\u21d9', - 'swarr;': '\u2199', - 'swarrow;': '\u2199', - 'swnwar;': '\u292a', - 'szlig': '\xdf', - 'szlig;': '\xdf', - 'Tab;': '\t', - 'target;': '\u2316', - 'Tau;': '\u03a4', - 'tau;': '\u03c4', - 'tbrk;': '\u23b4', - 'Tcaron;': '\u0164', - 'tcaron;': '\u0165', - 'Tcedil;': '\u0162', - 'tcedil;': '\u0163', - 'Tcy;': '\u0422', - 'tcy;': '\u0442', - 'tdot;': '\u20db', - 'telrec;': '\u2315', - 'Tfr;': '\U0001d517', - 'tfr;': '\U0001d531', - 'there4;': '\u2234', - 'Therefore;': '\u2234', - 'therefore;': '\u2234', - 'Theta;': '\u0398', - 'theta;': '\u03b8', - 'thetasym;': '\u03d1', - 'thetav;': '\u03d1', - 'thickapprox;': '\u2248', - 'thicksim;': '\u223c', - 'ThickSpace;': '\u205f\u200a', - 'thinsp;': '\u2009', - 'ThinSpace;': '\u2009', - 'thkap;': '\u2248', - 'thksim;': '\u223c', - 'THORN': '\xde', - 'thorn': '\xfe', - 'THORN;': '\xde', - 'thorn;': '\xfe', - 'Tilde;': '\u223c', - 'tilde;': '\u02dc', - 'TildeEqual;': '\u2243', - 'TildeFullEqual;': '\u2245', - 'TildeTilde;': '\u2248', - 'times': '\xd7', - 'times;': '\xd7', - 'timesb;': '\u22a0', - 'timesbar;': '\u2a31', - 'timesd;': '\u2a30', - 'tint;': '\u222d', - 'toea;': '\u2928', - 'top;': '\u22a4', - 'topbot;': '\u2336', - 'topcir;': '\u2af1', - 'Topf;': '\U0001d54b', - 'topf;': '\U0001d565', - 'topfork;': '\u2ada', - 'tosa;': '\u2929', - 'tprime;': '\u2034', - 'TRADE;': '\u2122', - 'trade;': '\u2122', - 'triangle;': '\u25b5', - 'triangledown;': '\u25bf', - 'triangleleft;': '\u25c3', - 'trianglelefteq;': '\u22b4', - 'triangleq;': '\u225c', - 'triangleright;': '\u25b9', - 'trianglerighteq;': '\u22b5', - 'tridot;': '\u25ec', - 'trie;': '\u225c', - 'triminus;': '\u2a3a', - 'TripleDot;': '\u20db', - 'triplus;': '\u2a39', - 'trisb;': '\u29cd', - 'tritime;': '\u2a3b', - 'trpezium;': '\u23e2', - 'Tscr;': '\U0001d4af', - 'tscr;': '\U0001d4c9', - 'TScy;': '\u0426', - 'tscy;': '\u0446', - 'TSHcy;': '\u040b', - 'tshcy;': '\u045b', - 'Tstrok;': '\u0166', - 'tstrok;': '\u0167', - 'twixt;': '\u226c', - 'twoheadleftarrow;': '\u219e', - 'twoheadrightarrow;': '\u21a0', - 'Uacute': '\xda', - 'uacute': '\xfa', - 'Uacute;': '\xda', - 'uacute;': '\xfa', - 'Uarr;': '\u219f', - 'uArr;': '\u21d1', - 'uarr;': '\u2191', - 'Uarrocir;': '\u2949', - 'Ubrcy;': '\u040e', - 'ubrcy;': '\u045e', - 'Ubreve;': '\u016c', - 'ubreve;': '\u016d', - 'Ucirc': '\xdb', - 'ucirc': '\xfb', - 'Ucirc;': '\xdb', - 'ucirc;': '\xfb', - 'Ucy;': '\u0423', - 'ucy;': '\u0443', - 'udarr;': '\u21c5', - 'Udblac;': '\u0170', - 'udblac;': '\u0171', - 'udhar;': '\u296e', - 'ufisht;': '\u297e', - 'Ufr;': '\U0001d518', - 'ufr;': '\U0001d532', - 'Ugrave': '\xd9', - 'ugrave': '\xf9', - 'Ugrave;': '\xd9', - 'ugrave;': '\xf9', - 'uHar;': '\u2963', - 'uharl;': '\u21bf', - 'uharr;': '\u21be', - 'uhblk;': '\u2580', - 'ulcorn;': '\u231c', - 'ulcorner;': '\u231c', - 'ulcrop;': '\u230f', - 'ultri;': '\u25f8', - 'Umacr;': '\u016a', - 'umacr;': '\u016b', - 'uml': '\xa8', - 'uml;': '\xa8', - 'UnderBar;': '_', - 'UnderBrace;': '\u23df', - 'UnderBracket;': '\u23b5', - 'UnderParenthesis;': '\u23dd', - 'Union;': '\u22c3', - 'UnionPlus;': '\u228e', - 'Uogon;': '\u0172', - 'uogon;': '\u0173', - 'Uopf;': '\U0001d54c', - 'uopf;': '\U0001d566', - 'UpArrow;': '\u2191', - 'Uparrow;': '\u21d1', - 'uparrow;': '\u2191', - 'UpArrowBar;': '\u2912', - 'UpArrowDownArrow;': '\u21c5', - 'UpDownArrow;': '\u2195', - 'Updownarrow;': '\u21d5', - 'updownarrow;': '\u2195', - 'UpEquilibrium;': '\u296e', - 'upharpoonleft;': '\u21bf', - 'upharpoonright;': '\u21be', - 'uplus;': '\u228e', - 'UpperLeftArrow;': '\u2196', - 'UpperRightArrow;': '\u2197', - 'Upsi;': '\u03d2', - 'upsi;': '\u03c5', - 'upsih;': '\u03d2', - 'Upsilon;': '\u03a5', - 'upsilon;': '\u03c5', - 'UpTee;': '\u22a5', - 'UpTeeArrow;': '\u21a5', - 'upuparrows;': '\u21c8', - 'urcorn;': '\u231d', - 'urcorner;': '\u231d', - 'urcrop;': '\u230e', - 'Uring;': '\u016e', - 'uring;': '\u016f', - 'urtri;': '\u25f9', - 'Uscr;': '\U0001d4b0', - 'uscr;': '\U0001d4ca', - 'utdot;': '\u22f0', - 'Utilde;': '\u0168', - 'utilde;': '\u0169', - 'utri;': '\u25b5', - 'utrif;': '\u25b4', - 'uuarr;': '\u21c8', - 'Uuml': '\xdc', - 'uuml': '\xfc', - 'Uuml;': '\xdc', - 'uuml;': '\xfc', - 'uwangle;': '\u29a7', - 'vangrt;': '\u299c', - 'varepsilon;': '\u03f5', - 'varkappa;': '\u03f0', - 'varnothing;': '\u2205', - 'varphi;': '\u03d5', - 'varpi;': '\u03d6', - 'varpropto;': '\u221d', - 'vArr;': '\u21d5', - 'varr;': '\u2195', - 'varrho;': '\u03f1', - 'varsigma;': '\u03c2', - 'varsubsetneq;': '\u228a\ufe00', - 'varsubsetneqq;': '\u2acb\ufe00', - 'varsupsetneq;': '\u228b\ufe00', - 'varsupsetneqq;': '\u2acc\ufe00', - 'vartheta;': '\u03d1', - 'vartriangleleft;': '\u22b2', - 'vartriangleright;': '\u22b3', - 'Vbar;': '\u2aeb', - 'vBar;': '\u2ae8', - 'vBarv;': '\u2ae9', - 'Vcy;': '\u0412', - 'vcy;': '\u0432', - 'VDash;': '\u22ab', - 'Vdash;': '\u22a9', - 'vDash;': '\u22a8', - 'vdash;': '\u22a2', - 'Vdashl;': '\u2ae6', - 'Vee;': '\u22c1', - 'vee;': '\u2228', - 'veebar;': '\u22bb', - 'veeeq;': '\u225a', - 'vellip;': '\u22ee', - 'Verbar;': '\u2016', - 'verbar;': '|', - 'Vert;': '\u2016', - 'vert;': '|', - 'VerticalBar;': '\u2223', - 'VerticalLine;': '|', - 'VerticalSeparator;': '\u2758', - 'VerticalTilde;': '\u2240', - 'VeryThinSpace;': '\u200a', - 'Vfr;': '\U0001d519', - 'vfr;': '\U0001d533', - 'vltri;': '\u22b2', - 'vnsub;': '\u2282\u20d2', - 'vnsup;': '\u2283\u20d2', - 'Vopf;': '\U0001d54d', - 'vopf;': '\U0001d567', - 'vprop;': '\u221d', - 'vrtri;': '\u22b3', - 'Vscr;': '\U0001d4b1', - 'vscr;': '\U0001d4cb', - 'vsubnE;': '\u2acb\ufe00', - 'vsubne;': '\u228a\ufe00', - 'vsupnE;': '\u2acc\ufe00', - 'vsupne;': '\u228b\ufe00', - 'Vvdash;': '\u22aa', - 'vzigzag;': '\u299a', - 'Wcirc;': '\u0174', - 'wcirc;': '\u0175', - 'wedbar;': '\u2a5f', - 'Wedge;': '\u22c0', - 'wedge;': '\u2227', - 'wedgeq;': '\u2259', - 'weierp;': '\u2118', - 'Wfr;': '\U0001d51a', - 'wfr;': '\U0001d534', - 'Wopf;': '\U0001d54e', - 'wopf;': '\U0001d568', - 'wp;': '\u2118', - 'wr;': '\u2240', - 'wreath;': '\u2240', - 'Wscr;': '\U0001d4b2', - 'wscr;': '\U0001d4cc', - 'xcap;': '\u22c2', - 'xcirc;': '\u25ef', - 'xcup;': '\u22c3', - 'xdtri;': '\u25bd', - 'Xfr;': '\U0001d51b', - 'xfr;': '\U0001d535', - 'xhArr;': '\u27fa', - 'xharr;': '\u27f7', - 'Xi;': '\u039e', - 'xi;': '\u03be', - 'xlArr;': '\u27f8', - 'xlarr;': '\u27f5', - 'xmap;': '\u27fc', - 'xnis;': '\u22fb', - 'xodot;': '\u2a00', - 'Xopf;': '\U0001d54f', - 'xopf;': '\U0001d569', - 'xoplus;': '\u2a01', - 'xotime;': '\u2a02', - 'xrArr;': '\u27f9', - 'xrarr;': '\u27f6', - 'Xscr;': '\U0001d4b3', - 'xscr;': '\U0001d4cd', - 'xsqcup;': '\u2a06', - 'xuplus;': '\u2a04', - 'xutri;': '\u25b3', - 'xvee;': '\u22c1', - 'xwedge;': '\u22c0', - 'Yacute': '\xdd', - 'yacute': '\xfd', - 'Yacute;': '\xdd', - 'yacute;': '\xfd', - 'YAcy;': '\u042f', - 'yacy;': '\u044f', - 'Ycirc;': '\u0176', - 'ycirc;': '\u0177', - 'Ycy;': '\u042b', - 'ycy;': '\u044b', - 'yen': '\xa5', - 'yen;': '\xa5', - 'Yfr;': '\U0001d51c', - 'yfr;': '\U0001d536', - 'YIcy;': '\u0407', - 'yicy;': '\u0457', - 'Yopf;': '\U0001d550', - 'yopf;': '\U0001d56a', - 'Yscr;': '\U0001d4b4', - 'yscr;': '\U0001d4ce', - 'YUcy;': '\u042e', - 'yucy;': '\u044e', - 'yuml': '\xff', - 'Yuml;': '\u0178', - 'yuml;': '\xff', - 'Zacute;': '\u0179', - 'zacute;': '\u017a', - 'Zcaron;': '\u017d', - 'zcaron;': '\u017e', - 'Zcy;': '\u0417', - 'zcy;': '\u0437', - 'Zdot;': '\u017b', - 'zdot;': '\u017c', - 'zeetrf;': '\u2128', - 'ZeroWidthSpace;': '\u200b', - 'Zeta;': '\u0396', - 'zeta;': '\u03b6', - 'Zfr;': '\u2128', - 'zfr;': '\U0001d537', - 'ZHcy;': '\u0416', - 'zhcy;': '\u0436', - 'zigrarr;': '\u21dd', - 'Zopf;': '\u2124', - 'zopf;': '\U0001d56b', - 'Zscr;': '\U0001d4b5', - 'zscr;': '\U0001d4cf', - 'zwj;': '\u200d', - 'zwnj;': '\u200c', -} - -# maps the Unicode codepoint to the HTML entity name -codepoint2name = {} - -# maps the HTML entity name to the character -# (or a character reference if the character is outside the Latin-1 range) -entitydefs = {} - -for (name, codepoint) in name2codepoint.items(): - codepoint2name[codepoint] = name - entitydefs[name] = chr(codepoint) - -del name, codepoint - diff --git a/pype/vendor/future/backports/html/parser.py b/pype/vendor/future/backports/html/parser.py deleted file mode 100644 index 7b8cdba613..0000000000 --- a/pype/vendor/future/backports/html/parser.py +++ /dev/null @@ -1,537 +0,0 @@ -"""A parser for HTML and XHTML. - -Backported for python-future from Python 3.3. -""" - -# This file is based on sgmllib.py, but the API is slightly different. - -# XXX There should be a way to distinguish between PCDATA (parsed -# character data -- the normal case), RCDATA (replaceable character -# data -- only char and entity references and end tags are special) -# and CDATA (character data -- only end tags are special). - -from __future__ import (absolute_import, division, - print_function, unicode_literals) -from future.builtins import * -from future.backports import _markupbase -import re -import warnings - -# Regular expressions used for parsing - -interesting_normal = re.compile('[&<]') -incomplete = re.compile('&[a-zA-Z#]') - -entityref = re.compile('&([a-zA-Z][-.a-zA-Z0-9]*)[^a-zA-Z0-9]') -charref = re.compile('&#(?:[0-9]+|[xX][0-9a-fA-F]+)[^0-9a-fA-F]') - -starttagopen = re.compile('<[a-zA-Z]') -piclose = re.compile('>') -commentclose = re.compile(r'--\s*>') -tagfind = re.compile('([a-zA-Z][-.a-zA-Z0-9:_]*)(?:\s|/(?!>))*') -# see http://www.w3.org/TR/html5/tokenization.html#tag-open-state -# and http://www.w3.org/TR/html5/tokenization.html#tag-name-state -tagfind_tolerant = re.compile('[a-zA-Z][^\t\n\r\f />\x00]*') -# Note: -# 1) the strict attrfind isn't really strict, but we can't make it -# correctly strict without breaking backward compatibility; -# 2) if you change attrfind remember to update locatestarttagend too; -# 3) if you change attrfind and/or locatestarttagend the parser will -# explode, so don't do it. -attrfind = re.compile( - r'\s*([a-zA-Z_][-.:a-zA-Z_0-9]*)(\s*=\s*' - r'(\'[^\']*\'|"[^"]*"|[^\s"\'=<>`]*))?') -attrfind_tolerant = re.compile( - r'((?<=[\'"\s/])[^\s/>][^\s/=>]*)(\s*=+\s*' - r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?(?:\s|/(?!>))*') -locatestarttagend = re.compile(r""" - <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name - (?:\s+ # whitespace before attribute name - (?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name - (?:\s*=\s* # value indicator - (?:'[^']*' # LITA-enclosed value - |\"[^\"]*\" # LIT-enclosed value - |[^'\">\s]+ # bare value - ) - )? - ) - )* - \s* # trailing whitespace -""", re.VERBOSE) -locatestarttagend_tolerant = re.compile(r""" - <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name - (?:[\s/]* # optional whitespace before attribute name - (?:(?<=['"\s/])[^\s/>][^\s/=>]* # attribute name - (?:\s*=+\s* # value indicator - (?:'[^']*' # LITA-enclosed value - |"[^"]*" # LIT-enclosed value - |(?!['"])[^>\s]* # bare value - ) - (?:\s*,)* # possibly followed by a comma - )?(?:\s|/(?!>))* - )* - )? - \s* # trailing whitespace -""", re.VERBOSE) -endendtag = re.compile('>') -# the HTML 5 spec, section 8.1.2.2, doesn't allow spaces between -# ') - - -class HTMLParseError(Exception): - """Exception raised for all parse errors.""" - - def __init__(self, msg, position=(None, None)): - assert msg - self.msg = msg - self.lineno = position[0] - self.offset = position[1] - - def __str__(self): - result = self.msg - if self.lineno is not None: - result = result + ", at line %d" % self.lineno - if self.offset is not None: - result = result + ", column %d" % (self.offset + 1) - return result - - -class HTMLParser(_markupbase.ParserBase): - """Find tags and other markup and call handler functions. - - Usage: - p = HTMLParser() - p.feed(data) - ... - p.close() - - Start tags are handled by calling self.handle_starttag() or - self.handle_startendtag(); end tags by self.handle_endtag(). The - data between tags is passed from the parser to the derived class - by calling self.handle_data() with the data as argument (the data - may be split up in arbitrary chunks). Entity references are - passed by calling self.handle_entityref() with the entity - reference as the argument. Numeric character references are - passed to self.handle_charref() with the string containing the - reference as the argument. - """ - - CDATA_CONTENT_ELEMENTS = ("script", "style") - - def __init__(self, strict=False): - """Initialize and reset this instance. - - If strict is set to False (the default) the parser will parse invalid - markup, otherwise it will raise an error. Note that the strict mode - is deprecated. - """ - if strict: - warnings.warn("The strict mode is deprecated.", - DeprecationWarning, stacklevel=2) - self.strict = strict - self.reset() - - def reset(self): - """Reset this instance. Loses all unprocessed data.""" - self.rawdata = '' - self.lasttag = '???' - self.interesting = interesting_normal - self.cdata_elem = None - _markupbase.ParserBase.reset(self) - - def feed(self, data): - r"""Feed data to the parser. - - Call this as often as you want, with as little or as much text - as you want (may include '\n'). - """ - self.rawdata = self.rawdata + data - self.goahead(0) - - def close(self): - """Handle any buffered data.""" - self.goahead(1) - - def error(self, message): - raise HTMLParseError(message, self.getpos()) - - __starttag_text = None - - def get_starttag_text(self): - """Return full source of start tag: '<...>'.""" - return self.__starttag_text - - def set_cdata_mode(self, elem): - self.cdata_elem = elem.lower() - self.interesting = re.compile(r'' % self.cdata_elem, re.I) - - def clear_cdata_mode(self): - self.interesting = interesting_normal - self.cdata_elem = None - - # Internal -- handle data as far as reasonable. May leave state - # and data to be processed by a subsequent call. If 'end' is - # true, force handling all data as if followed by EOF marker. - def goahead(self, end): - rawdata = self.rawdata - i = 0 - n = len(rawdata) - while i < n: - match = self.interesting.search(rawdata, i) # < or & - if match: - j = match.start() - else: - if self.cdata_elem: - break - j = n - if i < j: self.handle_data(rawdata[i:j]) - i = self.updatepos(i, j) - if i == n: break - startswith = rawdata.startswith - if startswith('<', i): - if starttagopen.match(rawdata, i): # < + letter - k = self.parse_starttag(i) - elif startswith("', i + 1) - if k < 0: - k = rawdata.find('<', i + 1) - if k < 0: - k = i + 1 - else: - k += 1 - self.handle_data(rawdata[i:k]) - i = self.updatepos(i, k) - elif startswith("&#", i): - match = charref.match(rawdata, i) - if match: - name = match.group()[2:-1] - self.handle_charref(name) - k = match.end() - if not startswith(';', k-1): - k = k - 1 - i = self.updatepos(i, k) - continue - else: - if ";" in rawdata[i:]: #bail by consuming &# - self.handle_data(rawdata[0:2]) - i = self.updatepos(i, 2) - break - elif startswith('&', i): - match = entityref.match(rawdata, i) - if match: - name = match.group(1) - self.handle_entityref(name) - k = match.end() - if not startswith(';', k-1): - k = k - 1 - i = self.updatepos(i, k) - continue - match = incomplete.match(rawdata, i) - if match: - # match.group() will contain at least 2 chars - if end and match.group() == rawdata[i:]: - if self.strict: - self.error("EOF in middle of entity or char ref") - else: - if k <= i: - k = n - i = self.updatepos(i, i + 1) - # incomplete - break - elif (i + 1) < n: - # not the end of the buffer, and can't be confused - # with some other construct - self.handle_data("&") - i = self.updatepos(i, i + 1) - else: - break - else: - assert 0, "interesting.search() lied" - # end while - if end and i < n and not self.cdata_elem: - self.handle_data(rawdata[i:n]) - i = self.updatepos(i, n) - self.rawdata = rawdata[i:] - - # Internal -- parse html declarations, return length or -1 if not terminated - # See w3.org/TR/html5/tokenization.html#markup-declaration-open-state - # See also parse_declaration in _markupbase - def parse_html_declaration(self, i): - rawdata = self.rawdata - assert rawdata[i:i+2] == ' - gtpos = rawdata.find('>', i+9) - if gtpos == -1: - return -1 - self.handle_decl(rawdata[i+2:gtpos]) - return gtpos+1 - else: - return self.parse_bogus_comment(i) - - # Internal -- parse bogus comment, return length or -1 if not terminated - # see http://www.w3.org/TR/html5/tokenization.html#bogus-comment-state - def parse_bogus_comment(self, i, report=1): - rawdata = self.rawdata - assert rawdata[i:i+2] in ('', i+2) - if pos == -1: - return -1 - if report: - self.handle_comment(rawdata[i+2:pos]) - return pos + 1 - - # Internal -- parse processing instr, return end or -1 if not terminated - def parse_pi(self, i): - rawdata = self.rawdata - assert rawdata[i:i+2] == ' - if not match: - return -1 - j = match.start() - self.handle_pi(rawdata[i+2: j]) - j = match.end() - return j - - # Internal -- handle starttag, return end or -1 if not terminated - def parse_starttag(self, i): - self.__starttag_text = None - endpos = self.check_for_whole_start_tag(i) - if endpos < 0: - return endpos - rawdata = self.rawdata - self.__starttag_text = rawdata[i:endpos] - - # Now parse the data between i+1 and j into a tag and attrs - attrs = [] - match = tagfind.match(rawdata, i+1) - assert match, 'unexpected call to parse_starttag()' - k = match.end() - self.lasttag = tag = match.group(1).lower() - while k < endpos: - if self.strict: - m = attrfind.match(rawdata, k) - else: - m = attrfind_tolerant.match(rawdata, k) - if not m: - break - attrname, rest, attrvalue = m.group(1, 2, 3) - if not rest: - attrvalue = None - elif attrvalue[:1] == '\'' == attrvalue[-1:] or \ - attrvalue[:1] == '"' == attrvalue[-1:]: - attrvalue = attrvalue[1:-1] - if attrvalue: - attrvalue = self.unescape(attrvalue) - attrs.append((attrname.lower(), attrvalue)) - k = m.end() - - end = rawdata[k:endpos].strip() - if end not in (">", "/>"): - lineno, offset = self.getpos() - if "\n" in self.__starttag_text: - lineno = lineno + self.__starttag_text.count("\n") - offset = len(self.__starttag_text) \ - - self.__starttag_text.rfind("\n") - else: - offset = offset + len(self.__starttag_text) - if self.strict: - self.error("junk characters in start tag: %r" - % (rawdata[k:endpos][:20],)) - self.handle_data(rawdata[i:endpos]) - return endpos - if end.endswith('/>'): - # XHTML-style empty tag: - self.handle_startendtag(tag, attrs) - else: - self.handle_starttag(tag, attrs) - if tag in self.CDATA_CONTENT_ELEMENTS: - self.set_cdata_mode(tag) - return endpos - - # Internal -- check to see if we have a complete starttag; return end - # or -1 if incomplete. - def check_for_whole_start_tag(self, i): - rawdata = self.rawdata - if self.strict: - m = locatestarttagend.match(rawdata, i) - else: - m = locatestarttagend_tolerant.match(rawdata, i) - if m: - j = m.end() - next = rawdata[j:j+1] - if next == ">": - return j + 1 - if next == "/": - if rawdata.startswith("/>", j): - return j + 2 - if rawdata.startswith("/", j): - # buffer boundary - return -1 - # else bogus input - if self.strict: - self.updatepos(i, j + 1) - self.error("malformed empty start tag") - if j > i: - return j - else: - return i + 1 - if next == "": - # end of input - return -1 - if next in ("abcdefghijklmnopqrstuvwxyz=/" - "ABCDEFGHIJKLMNOPQRSTUVWXYZ"): - # end of input in or before attribute value, or we have the - # '/' from a '/>' ending - return -1 - if self.strict: - self.updatepos(i, j) - self.error("malformed start tag") - if j > i: - return j - else: - return i + 1 - raise AssertionError("we should not get here!") - - # Internal -- parse endtag, return end or -1 if incomplete - def parse_endtag(self, i): - rawdata = self.rawdata - assert rawdata[i:i+2] == " - if not match: - return -1 - gtpos = match.end() - match = endtagfind.match(rawdata, i) # - if not match: - if self.cdata_elem is not None: - self.handle_data(rawdata[i:gtpos]) - return gtpos - if self.strict: - self.error("bad end tag: %r" % (rawdata[i:gtpos],)) - # find the name: w3.org/TR/html5/tokenization.html#tag-name-state - namematch = tagfind_tolerant.match(rawdata, i+2) - if not namematch: - # w3.org/TR/html5/tokenization.html#end-tag-open-state - if rawdata[i:i+3] == '': - return i+3 - else: - return self.parse_bogus_comment(i) - tagname = namematch.group().lower() - # consume and ignore other stuff between the name and the > - # Note: this is not 100% correct, since we might have things like - # , but looking for > after tha name should cover - # most of the cases and is much simpler - gtpos = rawdata.find('>', namematch.end()) - self.handle_endtag(tagname) - return gtpos+1 - - elem = match.group(1).lower() # script or style - if self.cdata_elem is not None: - if elem != self.cdata_elem: - self.handle_data(rawdata[i:gtpos]) - return gtpos - - self.handle_endtag(elem.lower()) - self.clear_cdata_mode() - return gtpos - - # Overridable -- finish processing of start+end tag: - def handle_startendtag(self, tag, attrs): - self.handle_starttag(tag, attrs) - self.handle_endtag(tag) - - # Overridable -- handle start tag - def handle_starttag(self, tag, attrs): - pass - - # Overridable -- handle end tag - def handle_endtag(self, tag): - pass - - # Overridable -- handle character reference - def handle_charref(self, name): - pass - - # Overridable -- handle entity reference - def handle_entityref(self, name): - pass - - # Overridable -- handle data - def handle_data(self, data): - pass - - # Overridable -- handle comment - def handle_comment(self, data): - pass - - # Overridable -- handle declaration - def handle_decl(self, decl): - pass - - # Overridable -- handle processing instruction - def handle_pi(self, data): - pass - - def unknown_decl(self, data): - if self.strict: - self.error("unknown declaration: %r" % (data,)) - - # Internal -- helper to remove special character quoting - def unescape(self, s): - if '&' not in s: - return s - def replaceEntities(s): - s = s.groups()[0] - try: - if s[0] == "#": - s = s[1:] - if s[0] in ['x','X']: - c = int(s[1:].rstrip(';'), 16) - else: - c = int(s.rstrip(';')) - return chr(c) - except ValueError: - return '&#' + s - else: - from future.backports.html.entities import html5 - if s in html5: - return html5[s] - elif s.endswith(';'): - return '&' + s - for x in range(2, len(s)): - if s[:x] in html5: - return html5[s[:x]] + s[x:] - else: - return '&' + s - - return re.sub(r"&(#?[xX]?(?:[0-9a-fA-F]+;|\w{1,32};?))", - replaceEntities, s) - diff --git a/pype/vendor/future/backports/http/__init__.py b/pype/vendor/future/backports/http/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/future/backports/http/client.py b/pype/vendor/future/backports/http/client.py deleted file mode 100644 index 5dd983d808..0000000000 --- a/pype/vendor/future/backports/http/client.py +++ /dev/null @@ -1,1332 +0,0 @@ -"""HTTP/1.1 client library - -A backport of the Python 3.3 http/client.py module for python-future. - - - - -HTTPConnection goes through a number of "states", which define when a client -may legally make another request or fetch the response for a particular -request. This diagram details these state transitions: - - (null) - | - | HTTPConnection() - v - Idle - | - | putrequest() - v - Request-started - | - | ( putheader() )* endheaders() - v - Request-sent - | - | response = getresponse() - v - Unread-response [Response-headers-read] - |\____________________ - | | - | response.read() | putrequest() - v v - Idle Req-started-unread-response - ______/| - / | - response.read() | | ( putheader() )* endheaders() - v v - Request-started Req-sent-unread-response - | - | response.read() - v - Request-sent - -This diagram presents the following rules: - -- a second request may not be started until {response-headers-read} - -- a response [object] cannot be retrieved until {request-sent} - -- there is no differentiation between an unread response body and a - partially read response body - -Note: this enforcement is applied by the HTTPConnection class. The - HTTPResponse class does not enforce this state machine, which - implies sophisticated clients may accelerate the request/response - pipeline. Caution should be taken, though: accelerating the states - beyond the above pattern may imply knowledge of the server's - connection-close behavior for certain requests. For example, it - is impossible to tell whether the server will close the connection - UNTIL the response headers have been read; this means that further - requests cannot be placed into the pipeline until it is known that - the server will NOT be closing the connection. - -Logical State __state __response -------------- ------- ---------- -Idle _CS_IDLE None -Request-started _CS_REQ_STARTED None -Request-sent _CS_REQ_SENT None -Unread-response _CS_IDLE -Req-started-unread-response _CS_REQ_STARTED -Req-sent-unread-response _CS_REQ_SENT -""" - -from __future__ import (absolute_import, division, - print_function, unicode_literals) -from future.builtins import bytes, int, str, super -from future.utils import PY2 - -from future.backports.email import parser as email_parser -from future.backports.email import message as email_message -from future.backports.misc import create_connection as socket_create_connection -import io -import os -import socket -import collections -from future.backports.urllib.parse import urlsplit -import warnings -from array import array - -__all__ = ["HTTPResponse", "HTTPConnection", - "HTTPException", "NotConnected", "UnknownProtocol", - "UnknownTransferEncoding", "UnimplementedFileMode", - "IncompleteRead", "InvalidURL", "ImproperConnectionState", - "CannotSendRequest", "CannotSendHeader", "ResponseNotReady", - "BadStatusLine", "error", "responses"] - -HTTP_PORT = 80 -HTTPS_PORT = 443 - -_UNKNOWN = 'UNKNOWN' - -# connection states -_CS_IDLE = 'Idle' -_CS_REQ_STARTED = 'Request-started' -_CS_REQ_SENT = 'Request-sent' - -# status codes -# informational -CONTINUE = 100 -SWITCHING_PROTOCOLS = 101 -PROCESSING = 102 - -# successful -OK = 200 -CREATED = 201 -ACCEPTED = 202 -NON_AUTHORITATIVE_INFORMATION = 203 -NO_CONTENT = 204 -RESET_CONTENT = 205 -PARTIAL_CONTENT = 206 -MULTI_STATUS = 207 -IM_USED = 226 - -# redirection -MULTIPLE_CHOICES = 300 -MOVED_PERMANENTLY = 301 -FOUND = 302 -SEE_OTHER = 303 -NOT_MODIFIED = 304 -USE_PROXY = 305 -TEMPORARY_REDIRECT = 307 - -# client error -BAD_REQUEST = 400 -UNAUTHORIZED = 401 -PAYMENT_REQUIRED = 402 -FORBIDDEN = 403 -NOT_FOUND = 404 -METHOD_NOT_ALLOWED = 405 -NOT_ACCEPTABLE = 406 -PROXY_AUTHENTICATION_REQUIRED = 407 -REQUEST_TIMEOUT = 408 -CONFLICT = 409 -GONE = 410 -LENGTH_REQUIRED = 411 -PRECONDITION_FAILED = 412 -REQUEST_ENTITY_TOO_LARGE = 413 -REQUEST_URI_TOO_LONG = 414 -UNSUPPORTED_MEDIA_TYPE = 415 -REQUESTED_RANGE_NOT_SATISFIABLE = 416 -EXPECTATION_FAILED = 417 -UNPROCESSABLE_ENTITY = 422 -LOCKED = 423 -FAILED_DEPENDENCY = 424 -UPGRADE_REQUIRED = 426 -PRECONDITION_REQUIRED = 428 -TOO_MANY_REQUESTS = 429 -REQUEST_HEADER_FIELDS_TOO_LARGE = 431 - -# server error -INTERNAL_SERVER_ERROR = 500 -NOT_IMPLEMENTED = 501 -BAD_GATEWAY = 502 -SERVICE_UNAVAILABLE = 503 -GATEWAY_TIMEOUT = 504 -HTTP_VERSION_NOT_SUPPORTED = 505 -INSUFFICIENT_STORAGE = 507 -NOT_EXTENDED = 510 -NETWORK_AUTHENTICATION_REQUIRED = 511 - -# Mapping status codes to official W3C names -responses = { - 100: 'Continue', - 101: 'Switching Protocols', - - 200: 'OK', - 201: 'Created', - 202: 'Accepted', - 203: 'Non-Authoritative Information', - 204: 'No Content', - 205: 'Reset Content', - 206: 'Partial Content', - - 300: 'Multiple Choices', - 301: 'Moved Permanently', - 302: 'Found', - 303: 'See Other', - 304: 'Not Modified', - 305: 'Use Proxy', - 306: '(Unused)', - 307: 'Temporary Redirect', - - 400: 'Bad Request', - 401: 'Unauthorized', - 402: 'Payment Required', - 403: 'Forbidden', - 404: 'Not Found', - 405: 'Method Not Allowed', - 406: 'Not Acceptable', - 407: 'Proxy Authentication Required', - 408: 'Request Timeout', - 409: 'Conflict', - 410: 'Gone', - 411: 'Length Required', - 412: 'Precondition Failed', - 413: 'Request Entity Too Large', - 414: 'Request-URI Too Long', - 415: 'Unsupported Media Type', - 416: 'Requested Range Not Satisfiable', - 417: 'Expectation Failed', - 428: 'Precondition Required', - 429: 'Too Many Requests', - 431: 'Request Header Fields Too Large', - - 500: 'Internal Server Error', - 501: 'Not Implemented', - 502: 'Bad Gateway', - 503: 'Service Unavailable', - 504: 'Gateway Timeout', - 505: 'HTTP Version Not Supported', - 511: 'Network Authentication Required', -} - -# maximal amount of data to read at one time in _safe_read -MAXAMOUNT = 1048576 - -# maximal line length when calling readline(). -_MAXLINE = 65536 -_MAXHEADERS = 100 - - -class HTTPMessage(email_message.Message): - # XXX The only usage of this method is in - # http.server.CGIHTTPRequestHandler. Maybe move the code there so - # that it doesn't need to be part of the public API. The API has - # never been defined so this could cause backwards compatibility - # issues. - - def getallmatchingheaders(self, name): - """Find all header lines matching a given header name. - - Look through the list of headers and find all lines matching a given - header name (and their continuation lines). A list of the lines is - returned, without interpretation. If the header does not occur, an - empty list is returned. If the header occurs multiple times, all - occurrences are returned. Case is not important in the header name. - - """ - name = name.lower() + ':' - n = len(name) - lst = [] - hit = 0 - for line in self.keys(): - if line[:n].lower() == name: - hit = 1 - elif not line[:1].isspace(): - hit = 0 - if hit: - lst.append(line) - return lst - -def parse_headers(fp, _class=HTTPMessage): - """Parses only RFC2822 headers from a file pointer. - - email Parser wants to see strings rather than bytes. - But a TextIOWrapper around self.rfile would buffer too many bytes - from the stream, bytes which we later need to read as bytes. - So we read the correct bytes here, as bytes, for email Parser - to parse. - - """ - headers = [] - while True: - line = fp.readline(_MAXLINE + 1) - if len(line) > _MAXLINE: - raise LineTooLong("header line") - headers.append(line) - if len(headers) > _MAXHEADERS: - raise HTTPException("got more than %d headers" % _MAXHEADERS) - if line in (b'\r\n', b'\n', b''): - break - hstring = bytes(b'').join(headers).decode('iso-8859-1') - return email_parser.Parser(_class=_class).parsestr(hstring) - - -_strict_sentinel = object() - -class HTTPResponse(io.RawIOBase): - - # See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details. - - # The bytes from the socket object are iso-8859-1 strings. - # See RFC 2616 sec 2.2 which notes an exception for MIME-encoded - # text following RFC 2047. The basic status line parsing only - # accepts iso-8859-1. - - def __init__(self, sock, debuglevel=0, strict=_strict_sentinel, method=None, url=None): - # If the response includes a content-length header, we need to - # make sure that the client doesn't read more than the - # specified number of bytes. If it does, it will block until - # the server times out and closes the connection. This will - # happen if a self.fp.read() is done (without a size) whether - # self.fp is buffered or not. So, no self.fp.read() by - # clients unless they know what they are doing. - self.fp = sock.makefile("rb") - self.debuglevel = debuglevel - if strict is not _strict_sentinel: - warnings.warn("the 'strict' argument isn't supported anymore; " - "http.client now always assumes HTTP/1.x compliant servers.", - DeprecationWarning, 2) - self._method = method - - # The HTTPResponse object is returned via urllib. The clients - # of http and urllib expect different attributes for the - # headers. headers is used here and supports urllib. msg is - # provided as a backwards compatibility layer for http - # clients. - - self.headers = self.msg = None - - # from the Status-Line of the response - self.version = _UNKNOWN # HTTP-Version - self.status = _UNKNOWN # Status-Code - self.reason = _UNKNOWN # Reason-Phrase - - self.chunked = _UNKNOWN # is "chunked" being used? - self.chunk_left = _UNKNOWN # bytes left to read in current chunk - self.length = _UNKNOWN # number of bytes left in response - self.will_close = _UNKNOWN # conn will close at end of response - - def _read_status(self): - line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1") - if len(line) > _MAXLINE: - raise LineTooLong("status line") - if self.debuglevel > 0: - print("reply:", repr(line)) - if not line: - # Presumably, the server closed the connection before - # sending a valid response. - raise BadStatusLine(line) - try: - version, status, reason = line.split(None, 2) - except ValueError: - try: - version, status = line.split(None, 1) - reason = "" - except ValueError: - # empty version will cause next test to fail. - version = "" - if not version.startswith("HTTP/"): - self._close_conn() - raise BadStatusLine(line) - - # The status code is a three-digit number - try: - status = int(status) - if status < 100 or status > 999: - raise BadStatusLine(line) - except ValueError: - raise BadStatusLine(line) - return version, status, reason - - def begin(self): - if self.headers is not None: - # we've already started reading the response - return - - # read until we get a non-100 response - while True: - version, status, reason = self._read_status() - if status != CONTINUE: - break - # skip the header from the 100 response - while True: - skip = self.fp.readline(_MAXLINE + 1) - if len(skip) > _MAXLINE: - raise LineTooLong("header line") - skip = skip.strip() - if not skip: - break - if self.debuglevel > 0: - print("header:", skip) - - self.code = self.status = status - self.reason = reason.strip() - if version in ("HTTP/1.0", "HTTP/0.9"): - # Some servers might still return "0.9", treat it as 1.0 anyway - self.version = 10 - elif version.startswith("HTTP/1."): - self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1 - else: - raise UnknownProtocol(version) - - self.headers = self.msg = parse_headers(self.fp) - - if self.debuglevel > 0: - for hdr in self.headers: - print("header:", hdr, end=" ") - - # are we using the chunked-style of transfer encoding? - tr_enc = self.headers.get("transfer-encoding") - if tr_enc and tr_enc.lower() == "chunked": - self.chunked = True - self.chunk_left = None - else: - self.chunked = False - - # will the connection close at the end of the response? - self.will_close = self._check_close() - - # do we have a Content-Length? - # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" - self.length = None - length = self.headers.get("content-length") - - # are we using the chunked-style of transfer encoding? - tr_enc = self.headers.get("transfer-encoding") - if length and not self.chunked: - try: - self.length = int(length) - except ValueError: - self.length = None - else: - if self.length < 0: # ignore nonsensical negative lengths - self.length = None - else: - self.length = None - - # does the body have a fixed length? (of zero) - if (status == NO_CONTENT or status == NOT_MODIFIED or - 100 <= status < 200 or # 1xx codes - self._method == "HEAD"): - self.length = 0 - - # if the connection remains open, and we aren't using chunked, and - # a content-length was not provided, then assume that the connection - # WILL close. - if (not self.will_close and - not self.chunked and - self.length is None): - self.will_close = True - - def _check_close(self): - conn = self.headers.get("connection") - if self.version == 11: - # An HTTP/1.1 proxy is assumed to stay open unless - # explicitly closed. - conn = self.headers.get("connection") - if conn and "close" in conn.lower(): - return True - return False - - # Some HTTP/1.0 implementations have support for persistent - # connections, using rules different than HTTP/1.1. - - # For older HTTP, Keep-Alive indicates persistent connection. - if self.headers.get("keep-alive"): - return False - - # At least Akamai returns a "Connection: Keep-Alive" header, - # which was supposed to be sent by the client. - if conn and "keep-alive" in conn.lower(): - return False - - # Proxy-Connection is a netscape hack. - pconn = self.headers.get("proxy-connection") - if pconn and "keep-alive" in pconn.lower(): - return False - - # otherwise, assume it will close - return True - - def _close_conn(self): - fp = self.fp - self.fp = None - fp.close() - - def close(self): - super().close() # set "closed" flag - if self.fp: - self._close_conn() - - # These implementations are for the benefit of io.BufferedReader. - - # XXX This class should probably be revised to act more like - # the "raw stream" that BufferedReader expects. - - def flush(self): - super().flush() - if self.fp: - self.fp.flush() - - def readable(self): - return True - - # End of "raw stream" methods - - def isclosed(self): - """True if the connection is closed.""" - # NOTE: it is possible that we will not ever call self.close(). This - # case occurs when will_close is TRUE, length is None, and we - # read up to the last byte, but NOT past it. - # - # IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be - # called, meaning self.isclosed() is meaningful. - return self.fp is None - - def read(self, amt=None): - if self.fp is None: - return bytes(b"") - - if self._method == "HEAD": - self._close_conn() - return bytes(b"") - - if amt is not None: - # Amount is given, so call base class version - # (which is implemented in terms of self.readinto) - return bytes(super(HTTPResponse, self).read(amt)) - else: - # Amount is not given (unbounded read) so we must check self.length - # and self.chunked - - if self.chunked: - return self._readall_chunked() - - if self.length is None: - s = self.fp.read() - else: - try: - s = self._safe_read(self.length) - except IncompleteRead: - self._close_conn() - raise - self.length = 0 - self._close_conn() # we read everything - return bytes(s) - - def readinto(self, b): - if self.fp is None: - return 0 - - if self._method == "HEAD": - self._close_conn() - return 0 - - if self.chunked: - return self._readinto_chunked(b) - - if self.length is not None: - if len(b) > self.length: - # clip the read to the "end of response" - b = memoryview(b)[0:self.length] - - # we do not use _safe_read() here because this may be a .will_close - # connection, and the user is reading more bytes than will be provided - # (for example, reading in 1k chunks) - - if PY2: - data = self.fp.read(len(b)) - n = len(data) - b[:n] = data - else: - n = self.fp.readinto(b) - - if not n and b: - # Ideally, we would raise IncompleteRead if the content-length - # wasn't satisfied, but it might break compatibility. - self._close_conn() - elif self.length is not None: - self.length -= n - if not self.length: - self._close_conn() - return n - - def _read_next_chunk_size(self): - # Read the next chunk size from the file - line = self.fp.readline(_MAXLINE + 1) - if len(line) > _MAXLINE: - raise LineTooLong("chunk size") - i = line.find(b";") - if i >= 0: - line = line[:i] # strip chunk-extensions - try: - return int(line, 16) - except ValueError: - # close the connection as protocol synchronisation is - # probably lost - self._close_conn() - raise - - def _read_and_discard_trailer(self): - # read and discard trailer up to the CRLF terminator - ### note: we shouldn't have any trailers! - while True: - line = self.fp.readline(_MAXLINE + 1) - if len(line) > _MAXLINE: - raise LineTooLong("trailer line") - if not line: - # a vanishingly small number of sites EOF without - # sending the trailer - break - if line in (b'\r\n', b'\n', b''): - break - - def _readall_chunked(self): - assert self.chunked != _UNKNOWN - chunk_left = self.chunk_left - value = [] - while True: - if chunk_left is None: - try: - chunk_left = self._read_next_chunk_size() - if chunk_left == 0: - break - except ValueError: - raise IncompleteRead(bytes(b'').join(value)) - value.append(self._safe_read(chunk_left)) - - # we read the whole chunk, get another - self._safe_read(2) # toss the CRLF at the end of the chunk - chunk_left = None - - self._read_and_discard_trailer() - - # we read everything; close the "file" - self._close_conn() - - return bytes(b'').join(value) - - def _readinto_chunked(self, b): - assert self.chunked != _UNKNOWN - chunk_left = self.chunk_left - - total_bytes = 0 - mvb = memoryview(b) - while True: - if chunk_left is None: - try: - chunk_left = self._read_next_chunk_size() - if chunk_left == 0: - break - except ValueError: - raise IncompleteRead(bytes(b[0:total_bytes])) - - if len(mvb) < chunk_left: - n = self._safe_readinto(mvb) - self.chunk_left = chunk_left - n - return total_bytes + n - elif len(mvb) == chunk_left: - n = self._safe_readinto(mvb) - self._safe_read(2) # toss the CRLF at the end of the chunk - self.chunk_left = None - return total_bytes + n - else: - temp_mvb = mvb[0:chunk_left] - n = self._safe_readinto(temp_mvb) - mvb = mvb[n:] - total_bytes += n - - # we read the whole chunk, get another - self._safe_read(2) # toss the CRLF at the end of the chunk - chunk_left = None - - self._read_and_discard_trailer() - - # we read everything; close the "file" - self._close_conn() - - return total_bytes - - def _safe_read(self, amt): - """Read the number of bytes requested, compensating for partial reads. - - Normally, we have a blocking socket, but a read() can be interrupted - by a signal (resulting in a partial read). - - Note that we cannot distinguish between EOF and an interrupt when zero - bytes have been read. IncompleteRead() will be raised in this - situation. - - This function should be used when bytes "should" be present for - reading. If the bytes are truly not available (due to EOF), then the - IncompleteRead exception can be used to detect the problem. - """ - s = [] - while amt > 0: - chunk = self.fp.read(min(amt, MAXAMOUNT)) - if not chunk: - raise IncompleteRead(bytes(b'').join(s), amt) - s.append(chunk) - amt -= len(chunk) - return bytes(b"").join(s) - - def _safe_readinto(self, b): - """Same as _safe_read, but for reading into a buffer.""" - total_bytes = 0 - mvb = memoryview(b) - while total_bytes < len(b): - if MAXAMOUNT < len(mvb): - temp_mvb = mvb[0:MAXAMOUNT] - n = self.fp.readinto(temp_mvb) - else: - n = self.fp.readinto(mvb) - if not n: - raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b)) - mvb = mvb[n:] - total_bytes += n - return total_bytes - - def fileno(self): - return self.fp.fileno() - - def getheader(self, name, default=None): - if self.headers is None: - raise ResponseNotReady() - headers = self.headers.get_all(name) or default - if isinstance(headers, str) or not hasattr(headers, '__iter__'): - return headers - else: - return ', '.join(headers) - - def getheaders(self): - """Return list of (header, value) tuples.""" - if self.headers is None: - raise ResponseNotReady() - return list(self.headers.items()) - - # We override IOBase.__iter__ so that it doesn't check for closed-ness - - def __iter__(self): - return self - - # For compatibility with old-style urllib responses. - - def info(self): - return self.headers - - def geturl(self): - return self.url - - def getcode(self): - return self.status - -class HTTPConnection(object): - - _http_vsn = 11 - _http_vsn_str = 'HTTP/1.1' - - response_class = HTTPResponse - default_port = HTTP_PORT - auto_open = 1 - debuglevel = 0 - - def __init__(self, host, port=None, strict=_strict_sentinel, - timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None): - if strict is not _strict_sentinel: - warnings.warn("the 'strict' argument isn't supported anymore; " - "http.client now always assumes HTTP/1.x compliant servers.", - DeprecationWarning, 2) - self.timeout = timeout - self.source_address = source_address - self.sock = None - self._buffer = [] - self.__response = None - self.__state = _CS_IDLE - self._method = None - self._tunnel_host = None - self._tunnel_port = None - self._tunnel_headers = {} - - self._set_hostport(host, port) - - def set_tunnel(self, host, port=None, headers=None): - """ Sets up the host and the port for the HTTP CONNECT Tunnelling. - - The headers argument should be a mapping of extra HTTP headers - to send with the CONNECT request. - """ - self._tunnel_host = host - self._tunnel_port = port - if headers: - self._tunnel_headers = headers - else: - self._tunnel_headers.clear() - - def _set_hostport(self, host, port): - if port is None: - i = host.rfind(':') - j = host.rfind(']') # ipv6 addresses have [...] - if i > j: - try: - port = int(host[i+1:]) - except ValueError: - if host[i+1:] == "": # http://foo.com:/ == http://foo.com/ - port = self.default_port - else: - raise InvalidURL("nonnumeric port: '%s'" % host[i+1:]) - host = host[:i] - else: - port = self.default_port - if host and host[0] == '[' and host[-1] == ']': - host = host[1:-1] - self.host = host - self.port = port - - def set_debuglevel(self, level): - self.debuglevel = level - - def _tunnel(self): - self._set_hostport(self._tunnel_host, self._tunnel_port) - connect_str = "CONNECT %s:%d HTTP/1.0\r\n" % (self.host, self.port) - connect_bytes = connect_str.encode("ascii") - self.send(connect_bytes) - for header, value in self._tunnel_headers.items(): - header_str = "%s: %s\r\n" % (header, value) - header_bytes = header_str.encode("latin-1") - self.send(header_bytes) - self.send(bytes(b'\r\n')) - - response = self.response_class(self.sock, method=self._method) - (version, code, message) = response._read_status() - - if code != 200: - self.close() - raise socket.error("Tunnel connection failed: %d %s" % (code, - message.strip())) - while True: - line = response.fp.readline(_MAXLINE + 1) - if len(line) > _MAXLINE: - raise LineTooLong("header line") - if not line: - # for sites which EOF without sending a trailer - break - if line in (b'\r\n', b'\n', b''): - break - - def connect(self): - """Connect to the host and port specified in __init__.""" - self.sock = socket_create_connection((self.host,self.port), - self.timeout, self.source_address) - if self._tunnel_host: - self._tunnel() - - def close(self): - """Close the connection to the HTTP server.""" - if self.sock: - self.sock.close() # close it manually... there may be other refs - self.sock = None - if self.__response: - self.__response.close() - self.__response = None - self.__state = _CS_IDLE - - def send(self, data): - """Send `data' to the server. - ``data`` can be a string object, a bytes object, an array object, a - file-like object that supports a .read() method, or an iterable object. - """ - - if self.sock is None: - if self.auto_open: - self.connect() - else: - raise NotConnected() - - if self.debuglevel > 0: - print("send:", repr(data)) - blocksize = 8192 - # Python 2.7 array objects have a read method which is incompatible - # with the 2-arg calling syntax below. - if hasattr(data, "read") and not isinstance(data, array): - if self.debuglevel > 0: - print("sendIng a read()able") - encode = False - try: - mode = data.mode - except AttributeError: - # io.BytesIO and other file-like objects don't have a `mode` - # attribute. - pass - else: - if "b" not in mode: - encode = True - if self.debuglevel > 0: - print("encoding file using iso-8859-1") - while 1: - datablock = data.read(blocksize) - if not datablock: - break - if encode: - datablock = datablock.encode("iso-8859-1") - self.sock.sendall(datablock) - return - try: - self.sock.sendall(data) - except TypeError: - if isinstance(data, collections.Iterable): - for d in data: - self.sock.sendall(d) - else: - raise TypeError("data should be a bytes-like object " - "or an iterable, got %r" % type(data)) - - def _output(self, s): - """Add a line of output to the current request buffer. - - Assumes that the line does *not* end with \\r\\n. - """ - self._buffer.append(s) - - def _send_output(self, message_body=None): - """Send the currently buffered request and clear the buffer. - - Appends an extra \\r\\n to the buffer. - A message_body may be specified, to be appended to the request. - """ - self._buffer.extend((bytes(b""), bytes(b""))) - msg = bytes(b"\r\n").join(self._buffer) - del self._buffer[:] - # If msg and message_body are sent in a single send() call, - # it will avoid performance problems caused by the interaction - # between delayed ack and the Nagle algorithm. - if isinstance(message_body, bytes): - msg += message_body - message_body = None - self.send(msg) - if message_body is not None: - # message_body was not a string (i.e. it is a file), and - # we must run the risk of Nagle. - self.send(message_body) - - def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0): - """Send a request to the server. - - `method' specifies an HTTP request method, e.g. 'GET'. - `url' specifies the object being requested, e.g. '/index.html'. - `skip_host' if True does not add automatically a 'Host:' header - `skip_accept_encoding' if True does not add automatically an - 'Accept-Encoding:' header - """ - - # if a prior response has been completed, then forget about it. - if self.__response and self.__response.isclosed(): - self.__response = None - - - # in certain cases, we cannot issue another request on this connection. - # this occurs when: - # 1) we are in the process of sending a request. (_CS_REQ_STARTED) - # 2) a response to a previous request has signalled that it is going - # to close the connection upon completion. - # 3) the headers for the previous response have not been read, thus - # we cannot determine whether point (2) is true. (_CS_REQ_SENT) - # - # if there is no prior response, then we can request at will. - # - # if point (2) is true, then we will have passed the socket to the - # response (effectively meaning, "there is no prior response"), and - # will open a new one when a new request is made. - # - # Note: if a prior response exists, then we *can* start a new request. - # We are not allowed to begin fetching the response to this new - # request, however, until that prior response is complete. - # - if self.__state == _CS_IDLE: - self.__state = _CS_REQ_STARTED - else: - raise CannotSendRequest(self.__state) - - # Save the method we use, we need it later in the response phase - self._method = method - if not url: - url = '/' - request = '%s %s %s' % (method, url, self._http_vsn_str) - - # Non-ASCII characters should have been eliminated earlier - self._output(request.encode('ascii')) - - if self._http_vsn == 11: - # Issue some standard headers for better HTTP/1.1 compliance - - if not skip_host: - # this header is issued *only* for HTTP/1.1 - # connections. more specifically, this means it is - # only issued when the client uses the new - # HTTPConnection() class. backwards-compat clients - # will be using HTTP/1.0 and those clients may be - # issuing this header themselves. we should NOT issue - # it twice; some web servers (such as Apache) barf - # when they see two Host: headers - - # If we need a non-standard port,include it in the - # header. If the request is going through a proxy, - # but the host of the actual URL, not the host of the - # proxy. - - netloc = '' - if url.startswith('http'): - nil, netloc, nil, nil, nil = urlsplit(url) - - if netloc: - try: - netloc_enc = netloc.encode("ascii") - except UnicodeEncodeError: - netloc_enc = netloc.encode("idna") - self.putheader('Host', netloc_enc) - else: - try: - host_enc = self.host.encode("ascii") - except UnicodeEncodeError: - host_enc = self.host.encode("idna") - - # As per RFC 273, IPv6 address should be wrapped with [] - # when used as Host header - - if self.host.find(':') >= 0: - host_enc = bytes(b'[' + host_enc + b']') - - if self.port == self.default_port: - self.putheader('Host', host_enc) - else: - host_enc = host_enc.decode("ascii") - self.putheader('Host', "%s:%s" % (host_enc, self.port)) - - # note: we are assuming that clients will not attempt to set these - # headers since *this* library must deal with the - # consequences. this also means that when the supporting - # libraries are updated to recognize other forms, then this - # code should be changed (removed or updated). - - # we only want a Content-Encoding of "identity" since we don't - # support encodings such as x-gzip or x-deflate. - if not skip_accept_encoding: - self.putheader('Accept-Encoding', 'identity') - - # we can accept "chunked" Transfer-Encodings, but no others - # NOTE: no TE header implies *only* "chunked" - #self.putheader('TE', 'chunked') - - # if TE is supplied in the header, then it must appear in a - # Connection header. - #self.putheader('Connection', 'TE') - - else: - # For HTTP/1.0, the server will assume "not chunked" - pass - - def putheader(self, header, *values): - """Send a request header line to the server. - - For example: h.putheader('Accept', 'text/html') - """ - if self.__state != _CS_REQ_STARTED: - raise CannotSendHeader() - - if hasattr(header, 'encode'): - header = header.encode('ascii') - values = list(values) - for i, one_value in enumerate(values): - if hasattr(one_value, 'encode'): - values[i] = one_value.encode('latin-1') - elif isinstance(one_value, int): - values[i] = str(one_value).encode('ascii') - value = bytes(b'\r\n\t').join(values) - header = header + bytes(b': ') + value - self._output(header) - - def endheaders(self, message_body=None): - """Indicate that the last header line has been sent to the server. - - This method sends the request to the server. The optional message_body - argument can be used to pass a message body associated with the - request. The message body will be sent in the same packet as the - message headers if it is a string, otherwise it is sent as a separate - packet. - """ - if self.__state == _CS_REQ_STARTED: - self.__state = _CS_REQ_SENT - else: - raise CannotSendHeader() - self._send_output(message_body) - - def request(self, method, url, body=None, headers={}): - """Send a complete request to the server.""" - self._send_request(method, url, body, headers) - - def _set_content_length(self, body): - # Set the content-length based on the body. - thelen = None - try: - thelen = str(len(body)) - except TypeError as te: - # If this is a file-like object, try to - # fstat its file descriptor - try: - thelen = str(os.fstat(body.fileno()).st_size) - except (AttributeError, OSError): - # Don't send a length if this failed - if self.debuglevel > 0: print("Cannot stat!!") - - if thelen is not None: - self.putheader('Content-Length', thelen) - - def _send_request(self, method, url, body, headers): - # Honor explicitly requested Host: and Accept-Encoding: headers. - header_names = dict.fromkeys([k.lower() for k in headers]) - skips = {} - if 'host' in header_names: - skips['skip_host'] = 1 - if 'accept-encoding' in header_names: - skips['skip_accept_encoding'] = 1 - - self.putrequest(method, url, **skips) - - if body is not None and ('content-length' not in header_names): - self._set_content_length(body) - for hdr, value in headers.items(): - self.putheader(hdr, value) - if isinstance(body, str): - # RFC 2616 Section 3.7.1 says that text default has a - # default charset of iso-8859-1. - body = body.encode('iso-8859-1') - self.endheaders(body) - - def getresponse(self): - """Get the response from the server. - - If the HTTPConnection is in the correct state, returns an - instance of HTTPResponse or of whatever object is returned by - class the response_class variable. - - If a request has not been sent or if a previous response has - not be handled, ResponseNotReady is raised. If the HTTP - response indicates that the connection should be closed, then - it will be closed before the response is returned. When the - connection is closed, the underlying socket is closed. - """ - - # if a prior response has been completed, then forget about it. - if self.__response and self.__response.isclosed(): - self.__response = None - - # if a prior response exists, then it must be completed (otherwise, we - # cannot read this response's header to determine the connection-close - # behavior) - # - # note: if a prior response existed, but was connection-close, then the - # socket and response were made independent of this HTTPConnection - # object since a new request requires that we open a whole new - # connection - # - # this means the prior response had one of two states: - # 1) will_close: this connection was reset and the prior socket and - # response operate independently - # 2) persistent: the response was retained and we await its - # isclosed() status to become true. - # - if self.__state != _CS_REQ_SENT or self.__response: - raise ResponseNotReady(self.__state) - - if self.debuglevel > 0: - response = self.response_class(self.sock, self.debuglevel, - method=self._method) - else: - response = self.response_class(self.sock, method=self._method) - - response.begin() - assert response.will_close != _UNKNOWN - self.__state = _CS_IDLE - - if response.will_close: - # this effectively passes the connection to the response - self.close() - else: - # remember this, so we can tell when it is complete - self.__response = response - - return response - -try: - import ssl - from ssl import SSLContext -except ImportError: - pass -else: - class HTTPSConnection(HTTPConnection): - "This class allows communication via SSL." - - default_port = HTTPS_PORT - - # XXX Should key_file and cert_file be deprecated in favour of context? - - def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=_strict_sentinel, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None, **_3to2kwargs): - if 'check_hostname' in _3to2kwargs: check_hostname = _3to2kwargs['check_hostname']; del _3to2kwargs['check_hostname'] - else: check_hostname = None - if 'context' in _3to2kwargs: context = _3to2kwargs['context']; del _3to2kwargs['context'] - else: context = None - super(HTTPSConnection, self).__init__(host, port, strict, timeout, - source_address) - self.key_file = key_file - self.cert_file = cert_file - if context is None: - # Some reasonable defaults - context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - context.options |= ssl.OP_NO_SSLv2 - will_verify = context.verify_mode != ssl.CERT_NONE - if check_hostname is None: - check_hostname = will_verify - elif check_hostname and not will_verify: - raise ValueError("check_hostname needs a SSL context with " - "either CERT_OPTIONAL or CERT_REQUIRED") - if key_file or cert_file: - context.load_cert_chain(cert_file, key_file) - self._context = context - self._check_hostname = check_hostname - - def connect(self): - "Connect to a host on a given (SSL) port." - - sock = socket_create_connection((self.host, self.port), - self.timeout, self.source_address) - - if self._tunnel_host: - self.sock = sock - self._tunnel() - - server_hostname = self.host if ssl.HAS_SNI else None - self.sock = self._context.wrap_socket(sock, - server_hostname=server_hostname) - try: - if self._check_hostname: - ssl.match_hostname(self.sock.getpeercert(), self.host) - except Exception: - self.sock.shutdown(socket.SHUT_RDWR) - self.sock.close() - raise - - __all__.append("HTTPSConnection") - - - # ###################################### - # # We use the old HTTPSConnection class from Py2.7, because ssl.SSLContext - # # doesn't exist in the Py2.7 stdlib - # class HTTPSConnection(HTTPConnection): - # "This class allows communication via SSL." - - # default_port = HTTPS_PORT - - # def __init__(self, host, port=None, key_file=None, cert_file=None, - # strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - # source_address=None): - # HTTPConnection.__init__(self, host, port, strict, timeout, - # source_address) - # self.key_file = key_file - # self.cert_file = cert_file - - # def connect(self): - # "Connect to a host on a given (SSL) port." - - # sock = socket_create_connection((self.host, self.port), - # self.timeout, self.source_address) - # if self._tunnel_host: - # self.sock = sock - # self._tunnel() - # self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file) - - # __all__.append("HTTPSConnection") - # ###################################### - - -class HTTPException(Exception): - # Subclasses that define an __init__ must call Exception.__init__ - # or define self.args. Otherwise, str() will fail. - pass - -class NotConnected(HTTPException): - pass - -class InvalidURL(HTTPException): - pass - -class UnknownProtocol(HTTPException): - def __init__(self, version): - self.args = version, - self.version = version - -class UnknownTransferEncoding(HTTPException): - pass - -class UnimplementedFileMode(HTTPException): - pass - -class IncompleteRead(HTTPException): - def __init__(self, partial, expected=None): - self.args = partial, - self.partial = partial - self.expected = expected - def __repr__(self): - if self.expected is not None: - e = ', %i more expected' % self.expected - else: - e = '' - return 'IncompleteRead(%i bytes read%s)' % (len(self.partial), e) - def __str__(self): - return repr(self) - -class ImproperConnectionState(HTTPException): - pass - -class CannotSendRequest(ImproperConnectionState): - pass - -class CannotSendHeader(ImproperConnectionState): - pass - -class ResponseNotReady(ImproperConnectionState): - pass - -class BadStatusLine(HTTPException): - def __init__(self, line): - if not line: - line = repr(line) - self.args = line, - self.line = line - -class LineTooLong(HTTPException): - def __init__(self, line_type): - HTTPException.__init__(self, "got more than %d bytes when reading %s" - % (_MAXLINE, line_type)) - -# for backwards compatibility -error = HTTPException diff --git a/pype/vendor/future/backports/http/cookiejar.py b/pype/vendor/future/backports/http/cookiejar.py deleted file mode 100644 index cad72f9b4a..0000000000 --- a/pype/vendor/future/backports/http/cookiejar.py +++ /dev/null @@ -1,2109 +0,0 @@ -r"""HTTP cookie handling for web clients. - -This is a backport of the Py3.3 ``http.cookiejar`` module for -python-future. - -This module has (now fairly distant) origins in Gisle Aas' Perl module -HTTP::Cookies, from the libwww-perl library. - -Docstrings, comments and debug strings in this code refer to the -attributes of the HTTP cookie system as cookie-attributes, to distinguish -them clearly from Python attributes. - -Class diagram (note that BSDDBCookieJar and the MSIE* classes are not -distributed with the Python standard library, but are available from -http://wwwsearch.sf.net/): - - CookieJar____ - / \ \ - FileCookieJar \ \ - / | \ \ \ - MozillaCookieJar | LWPCookieJar \ \ - | | \ - | ---MSIEBase | \ - | / | | \ - | / MSIEDBCookieJar BSDDBCookieJar - |/ - MSIECookieJar - -""" - -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future.builtins import filter, int, map, open, str -from future.utils import as_native_str - -__all__ = ['Cookie', 'CookieJar', 'CookiePolicy', 'DefaultCookiePolicy', - 'FileCookieJar', 'LWPCookieJar', 'LoadError', 'MozillaCookieJar'] - -import copy -import datetime -import re -re.ASCII = 0 -import time -from future.backports.urllib.parse import urlparse, urlsplit, quote -from future.backports.http.client import HTTP_PORT -try: - import threading as _threading -except ImportError: - import dummy_threading as _threading -from calendar import timegm - -debug = False # set to True to enable debugging via the logging module -logger = None - -def _debug(*args): - if not debug: - return - global logger - if not logger: - import logging - logger = logging.getLogger("http.cookiejar") - return logger.debug(*args) - - -DEFAULT_HTTP_PORT = str(HTTP_PORT) -MISSING_FILENAME_TEXT = ("a filename was not supplied (nor was the CookieJar " - "instance initialised with one)") - -def _warn_unhandled_exception(): - # There are a few catch-all except: statements in this module, for - # catching input that's bad in unexpected ways. Warn if any - # exceptions are caught there. - import io, warnings, traceback - f = io.StringIO() - traceback.print_exc(None, f) - msg = f.getvalue() - warnings.warn("http.cookiejar bug!\n%s" % msg, stacklevel=2) - - -# Date/time conversion -# ----------------------------------------------------------------------------- - -EPOCH_YEAR = 1970 -def _timegm(tt): - year, month, mday, hour, min, sec = tt[:6] - if ((year >= EPOCH_YEAR) and (1 <= month <= 12) and (1 <= mday <= 31) and - (0 <= hour <= 24) and (0 <= min <= 59) and (0 <= sec <= 61)): - return timegm(tt) - else: - return None - -DAYS = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] -MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", - "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] -MONTHS_LOWER = [] -for month in MONTHS: MONTHS_LOWER.append(month.lower()) - -def time2isoz(t=None): - """Return a string representing time in seconds since epoch, t. - - If the function is called without an argument, it will use the current - time. - - The format of the returned string is like "YYYY-MM-DD hh:mm:ssZ", - representing Universal Time (UTC, aka GMT). An example of this format is: - - 1994-11-24 08:49:37Z - - """ - if t is None: - dt = datetime.datetime.utcnow() - else: - dt = datetime.datetime.utcfromtimestamp(t) - return "%04d-%02d-%02d %02d:%02d:%02dZ" % ( - dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) - -def time2netscape(t=None): - """Return a string representing time in seconds since epoch, t. - - If the function is called without an argument, it will use the current - time. - - The format of the returned string is like this: - - Wed, DD-Mon-YYYY HH:MM:SS GMT - - """ - if t is None: - dt = datetime.datetime.utcnow() - else: - dt = datetime.datetime.utcfromtimestamp(t) - return "%s %02d-%s-%04d %02d:%02d:%02d GMT" % ( - DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1], - dt.year, dt.hour, dt.minute, dt.second) - - -UTC_ZONES = {"GMT": None, "UTC": None, "UT": None, "Z": None} - -TIMEZONE_RE = re.compile(r"^([-+])?(\d\d?):?(\d\d)?$", re.ASCII) -def offset_from_tz_string(tz): - offset = None - if tz in UTC_ZONES: - offset = 0 - else: - m = TIMEZONE_RE.search(tz) - if m: - offset = 3600 * int(m.group(2)) - if m.group(3): - offset = offset + 60 * int(m.group(3)) - if m.group(1) == '-': - offset = -offset - return offset - -def _str2time(day, mon, yr, hr, min, sec, tz): - # translate month name to number - # month numbers start with 1 (January) - try: - mon = MONTHS_LOWER.index(mon.lower())+1 - except ValueError: - # maybe it's already a number - try: - imon = int(mon) - except ValueError: - return None - if 1 <= imon <= 12: - mon = imon - else: - return None - - # make sure clock elements are defined - if hr is None: hr = 0 - if min is None: min = 0 - if sec is None: sec = 0 - - yr = int(yr) - day = int(day) - hr = int(hr) - min = int(min) - sec = int(sec) - - if yr < 1000: - # find "obvious" year - cur_yr = time.localtime(time.time())[0] - m = cur_yr % 100 - tmp = yr - yr = yr + cur_yr - m - m = m - tmp - if abs(m) > 50: - if m > 0: yr = yr + 100 - else: yr = yr - 100 - - # convert UTC time tuple to seconds since epoch (not timezone-adjusted) - t = _timegm((yr, mon, day, hr, min, sec, tz)) - - if t is not None: - # adjust time using timezone string, to get absolute time since epoch - if tz is None: - tz = "UTC" - tz = tz.upper() - offset = offset_from_tz_string(tz) - if offset is None: - return None - t = t - offset - - return t - -STRICT_DATE_RE = re.compile( - r"^[SMTWF][a-z][a-z], (\d\d) ([JFMASOND][a-z][a-z]) " - "(\d\d\d\d) (\d\d):(\d\d):(\d\d) GMT$", re.ASCII) -WEEKDAY_RE = re.compile( - r"^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\s*", re.I | re.ASCII) -LOOSE_HTTP_DATE_RE = re.compile( - r"""^ - (\d\d?) # day - (?:\s+|[-\/]) - (\w+) # month - (?:\s+|[-\/]) - (\d+) # year - (?: - (?:\s+|:) # separator before clock - (\d\d?):(\d\d) # hour:min - (?::(\d\d))? # optional seconds - )? # optional clock - \s* - ([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+)? # timezone - \s* - (?:\(\w+\))? # ASCII representation of timezone in parens. - \s*$""", re.X | re.ASCII) -def http2time(text): - """Returns time in seconds since epoch of time represented by a string. - - Return value is an integer. - - None is returned if the format of str is unrecognized, the time is outside - the representable range, or the timezone string is not recognized. If the - string contains no timezone, UTC is assumed. - - The timezone in the string may be numerical (like "-0800" or "+0100") or a - string timezone (like "UTC", "GMT", "BST" or "EST"). Currently, only the - timezone strings equivalent to UTC (zero offset) are known to the function. - - The function loosely parses the following formats: - - Wed, 09 Feb 1994 22:23:32 GMT -- HTTP format - Tuesday, 08-Feb-94 14:15:29 GMT -- old rfc850 HTTP format - Tuesday, 08-Feb-1994 14:15:29 GMT -- broken rfc850 HTTP format - 09 Feb 1994 22:23:32 GMT -- HTTP format (no weekday) - 08-Feb-94 14:15:29 GMT -- rfc850 format (no weekday) - 08-Feb-1994 14:15:29 GMT -- broken rfc850 format (no weekday) - - The parser ignores leading and trailing whitespace. The time may be - absent. - - If the year is given with only 2 digits, the function will select the - century that makes the year closest to the current date. - - """ - # fast exit for strictly conforming string - m = STRICT_DATE_RE.search(text) - if m: - g = m.groups() - mon = MONTHS_LOWER.index(g[1].lower()) + 1 - tt = (int(g[2]), mon, int(g[0]), - int(g[3]), int(g[4]), float(g[5])) - return _timegm(tt) - - # No, we need some messy parsing... - - # clean up - text = text.lstrip() - text = WEEKDAY_RE.sub("", text, 1) # Useless weekday - - # tz is time zone specifier string - day, mon, yr, hr, min, sec, tz = [None]*7 - - # loose regexp parse - m = LOOSE_HTTP_DATE_RE.search(text) - if m is not None: - day, mon, yr, hr, min, sec, tz = m.groups() - else: - return None # bad format - - return _str2time(day, mon, yr, hr, min, sec, tz) - -ISO_DATE_RE = re.compile( - """^ - (\d{4}) # year - [-\/]? - (\d\d?) # numerical month - [-\/]? - (\d\d?) # day - (?: - (?:\s+|[-:Tt]) # separator before clock - (\d\d?):?(\d\d) # hour:min - (?::?(\d\d(?:\.\d*)?))? # optional seconds (and fractional) - )? # optional clock - \s* - ([-+]?\d\d?:?(:?\d\d)? - |Z|z)? # timezone (Z is "zero meridian", i.e. GMT) - \s*$""", re.X | re. ASCII) -def iso2time(text): - """ - As for http2time, but parses the ISO 8601 formats: - - 1994-02-03 14:15:29 -0100 -- ISO 8601 format - 1994-02-03 14:15:29 -- zone is optional - 1994-02-03 -- only date - 1994-02-03T14:15:29 -- Use T as separator - 19940203T141529Z -- ISO 8601 compact format - 19940203 -- only date - - """ - # clean up - text = text.lstrip() - - # tz is time zone specifier string - day, mon, yr, hr, min, sec, tz = [None]*7 - - # loose regexp parse - m = ISO_DATE_RE.search(text) - if m is not None: - # XXX there's an extra bit of the timezone I'm ignoring here: is - # this the right thing to do? - yr, mon, day, hr, min, sec, tz, _ = m.groups() - else: - return None # bad format - - return _str2time(day, mon, yr, hr, min, sec, tz) - - -# Header parsing -# ----------------------------------------------------------------------------- - -def unmatched(match): - """Return unmatched part of re.Match object.""" - start, end = match.span(0) - return match.string[:start]+match.string[end:] - -HEADER_TOKEN_RE = re.compile(r"^\s*([^=\s;,]+)") -HEADER_QUOTED_VALUE_RE = re.compile(r"^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"") -HEADER_VALUE_RE = re.compile(r"^\s*=\s*([^\s;,]*)") -HEADER_ESCAPE_RE = re.compile(r"\\(.)") -def split_header_words(header_values): - r"""Parse header values into a list of lists containing key,value pairs. - - The function knows how to deal with ",", ";" and "=" as well as quoted - values after "=". A list of space separated tokens are parsed as if they - were separated by ";". - - If the header_values passed as argument contains multiple values, then they - are treated as if they were a single value separated by comma ",". - - This means that this function is useful for parsing header fields that - follow this syntax (BNF as from the HTTP/1.1 specification, but we relax - the requirement for tokens). - - headers = #header - header = (token | parameter) *( [";"] (token | parameter)) - - token = 1* - separators = "(" | ")" | "<" | ">" | "@" - | "," | ";" | ":" | "\" | <"> - | "/" | "[" | "]" | "?" | "=" - | "{" | "}" | SP | HT - - quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) - qdtext = > - quoted-pair = "\" CHAR - - parameter = attribute "=" value - attribute = token - value = token | quoted-string - - Each header is represented by a list of key/value pairs. The value for a - simple token (not part of a parameter) is None. Syntactically incorrect - headers will not necessarily be parsed as you would want. - - This is easier to describe with some examples: - - >>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz']) - [[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]] - >>> split_header_words(['text/html; charset="iso-8859-1"']) - [[('text/html', None), ('charset', 'iso-8859-1')]] - >>> split_header_words([r'Basic realm="\"foo\bar\""']) - [[('Basic', None), ('realm', '"foobar"')]] - - """ - assert not isinstance(header_values, str) - result = [] - for text in header_values: - orig_text = text - pairs = [] - while text: - m = HEADER_TOKEN_RE.search(text) - if m: - text = unmatched(m) - name = m.group(1) - m = HEADER_QUOTED_VALUE_RE.search(text) - if m: # quoted value - text = unmatched(m) - value = m.group(1) - value = HEADER_ESCAPE_RE.sub(r"\1", value) - else: - m = HEADER_VALUE_RE.search(text) - if m: # unquoted value - text = unmatched(m) - value = m.group(1) - value = value.rstrip() - else: - # no value, a lone token - value = None - pairs.append((name, value)) - elif text.lstrip().startswith(","): - # concatenated headers, as per RFC 2616 section 4.2 - text = text.lstrip()[1:] - if pairs: result.append(pairs) - pairs = [] - else: - # skip junk - non_junk, nr_junk_chars = re.subn("^[=\s;]*", "", text) - assert nr_junk_chars > 0, ( - "split_header_words bug: '%s', '%s', %s" % - (orig_text, text, pairs)) - text = non_junk - if pairs: result.append(pairs) - return result - -HEADER_JOIN_ESCAPE_RE = re.compile(r"([\"\\])") -def join_header_words(lists): - """Do the inverse (almost) of the conversion done by split_header_words. - - Takes a list of lists of (key, value) pairs and produces a single header - value. Attribute values are quoted if needed. - - >>> join_header_words([[("text/plain", None), ("charset", "iso-8859/1")]]) - 'text/plain; charset="iso-8859/1"' - >>> join_header_words([[("text/plain", None)], [("charset", "iso-8859/1")]]) - 'text/plain, charset="iso-8859/1"' - - """ - headers = [] - for pairs in lists: - attr = [] - for k, v in pairs: - if v is not None: - if not re.search(r"^\w+$", v): - v = HEADER_JOIN_ESCAPE_RE.sub(r"\\\1", v) # escape " and \ - v = '"%s"' % v - k = "%s=%s" % (k, v) - attr.append(k) - if attr: headers.append("; ".join(attr)) - return ", ".join(headers) - -def strip_quotes(text): - if text.startswith('"'): - text = text[1:] - if text.endswith('"'): - text = text[:-1] - return text - -def parse_ns_headers(ns_headers): - """Ad-hoc parser for Netscape protocol cookie-attributes. - - The old Netscape cookie format for Set-Cookie can for instance contain - an unquoted "," in the expires field, so we have to use this ad-hoc - parser instead of split_header_words. - - XXX This may not make the best possible effort to parse all the crap - that Netscape Cookie headers contain. Ronald Tschalar's HTTPClient - parser is probably better, so could do worse than following that if - this ever gives any trouble. - - Currently, this is also used for parsing RFC 2109 cookies. - - """ - known_attrs = ("expires", "domain", "path", "secure", - # RFC 2109 attrs (may turn up in Netscape cookies, too) - "version", "port", "max-age") - - result = [] - for ns_header in ns_headers: - pairs = [] - version_set = False - for ii, param in enumerate(re.split(r";\s*", ns_header)): - param = param.rstrip() - if param == "": continue - if "=" not in param: - k, v = param, None - else: - k, v = re.split(r"\s*=\s*", param, 1) - k = k.lstrip() - if ii != 0: - lc = k.lower() - if lc in known_attrs: - k = lc - if k == "version": - # This is an RFC 2109 cookie. - v = strip_quotes(v) - version_set = True - if k == "expires": - # convert expires date to seconds since epoch - v = http2time(strip_quotes(v)) # None if invalid - pairs.append((k, v)) - - if pairs: - if not version_set: - pairs.append(("version", "0")) - result.append(pairs) - - return result - - -IPV4_RE = re.compile(r"\.\d+$", re.ASCII) -def is_HDN(text): - """Return True if text is a host domain name.""" - # XXX - # This may well be wrong. Which RFC is HDN defined in, if any (for - # the purposes of RFC 2965)? - # For the current implementation, what about IPv6? Remember to look - # at other uses of IPV4_RE also, if change this. - if IPV4_RE.search(text): - return False - if text == "": - return False - if text[0] == "." or text[-1] == ".": - return False - return True - -def domain_match(A, B): - """Return True if domain A domain-matches domain B, according to RFC 2965. - - A and B may be host domain names or IP addresses. - - RFC 2965, section 1: - - Host names can be specified either as an IP address or a HDN string. - Sometimes we compare one host name with another. (Such comparisons SHALL - be case-insensitive.) Host A's name domain-matches host B's if - - * their host name strings string-compare equal; or - - * A is a HDN string and has the form NB, where N is a non-empty - name string, B has the form .B', and B' is a HDN string. (So, - x.y.com domain-matches .Y.com but not Y.com.) - - Note that domain-match is not a commutative operation: a.b.c.com - domain-matches .c.com, but not the reverse. - - """ - # Note that, if A or B are IP addresses, the only relevant part of the - # definition of the domain-match algorithm is the direct string-compare. - A = A.lower() - B = B.lower() - if A == B: - return True - if not is_HDN(A): - return False - i = A.rfind(B) - if i == -1 or i == 0: - # A does not have form NB, or N is the empty string - return False - if not B.startswith("."): - return False - if not is_HDN(B[1:]): - return False - return True - -def liberal_is_HDN(text): - """Return True if text is a sort-of-like a host domain name. - - For accepting/blocking domains. - - """ - if IPV4_RE.search(text): - return False - return True - -def user_domain_match(A, B): - """For blocking/accepting domains. - - A and B may be host domain names or IP addresses. - - """ - A = A.lower() - B = B.lower() - if not (liberal_is_HDN(A) and liberal_is_HDN(B)): - if A == B: - # equal IP addresses - return True - return False - initial_dot = B.startswith(".") - if initial_dot and A.endswith(B): - return True - if not initial_dot and A == B: - return True - return False - -cut_port_re = re.compile(r":\d+$", re.ASCII) -def request_host(request): - """Return request-host, as defined by RFC 2965. - - Variation from RFC: returned value is lowercased, for convenient - comparison. - - """ - url = request.get_full_url() - host = urlparse(url)[1] - if host == "": - host = request.get_header("Host", "") - - # remove port, if present - host = cut_port_re.sub("", host, 1) - return host.lower() - -def eff_request_host(request): - """Return a tuple (request-host, effective request-host name). - - As defined by RFC 2965, except both are lowercased. - - """ - erhn = req_host = request_host(request) - if req_host.find(".") == -1 and not IPV4_RE.search(req_host): - erhn = req_host + ".local" - return req_host, erhn - -def request_path(request): - """Path component of request-URI, as defined by RFC 2965.""" - url = request.get_full_url() - parts = urlsplit(url) - path = escape_path(parts.path) - if not path.startswith("/"): - # fix bad RFC 2396 absoluteURI - path = "/" + path - return path - -def request_port(request): - host = request.host - i = host.find(':') - if i >= 0: - port = host[i+1:] - try: - int(port) - except ValueError: - _debug("nonnumeric port: '%s'", port) - return None - else: - port = DEFAULT_HTTP_PORT - return port - -# Characters in addition to A-Z, a-z, 0-9, '_', '.', and '-' that don't -# need to be escaped to form a valid HTTP URL (RFCs 2396 and 1738). -HTTP_PATH_SAFE = "%/;:@&=+$,!~*'()" -ESCAPED_CHAR_RE = re.compile(r"%([0-9a-fA-F][0-9a-fA-F])") -def uppercase_escaped_char(match): - return "%%%s" % match.group(1).upper() -def escape_path(path): - """Escape any invalid characters in HTTP URL, and uppercase all escapes.""" - # There's no knowing what character encoding was used to create URLs - # containing %-escapes, but since we have to pick one to escape invalid - # path characters, we pick UTF-8, as recommended in the HTML 4.0 - # specification: - # http://www.w3.org/TR/REC-html40/appendix/notes.html#h-B.2.1 - # And here, kind of: draft-fielding-uri-rfc2396bis-03 - # (And in draft IRI specification: draft-duerst-iri-05) - # (And here, for new URI schemes: RFC 2718) - path = quote(path, HTTP_PATH_SAFE) - path = ESCAPED_CHAR_RE.sub(uppercase_escaped_char, path) - return path - -def reach(h): - """Return reach of host h, as defined by RFC 2965, section 1. - - The reach R of a host name H is defined as follows: - - * If - - - H is the host domain name of a host; and, - - - H has the form A.B; and - - - A has no embedded (that is, interior) dots; and - - - B has at least one embedded dot, or B is the string "local". - then the reach of H is .B. - - * Otherwise, the reach of H is H. - - >>> reach("www.acme.com") - '.acme.com' - >>> reach("acme.com") - 'acme.com' - >>> reach("acme.local") - '.local' - - """ - i = h.find(".") - if i >= 0: - #a = h[:i] # this line is only here to show what a is - b = h[i+1:] - i = b.find(".") - if is_HDN(h) and (i >= 0 or b == "local"): - return "."+b - return h - -def is_third_party(request): - """ - - RFC 2965, section 3.3.6: - - An unverifiable transaction is to a third-party host if its request- - host U does not domain-match the reach R of the request-host O in the - origin transaction. - - """ - req_host = request_host(request) - if not domain_match(req_host, reach(request.get_origin_req_host())): - return True - else: - return False - - -class Cookie(object): - """HTTP Cookie. - - This class represents both Netscape and RFC 2965 cookies. - - This is deliberately a very simple class. It just holds attributes. It's - possible to construct Cookie instances that don't comply with the cookie - standards. CookieJar.make_cookies is the factory function for Cookie - objects -- it deals with cookie parsing, supplying defaults, and - normalising to the representation used in this class. CookiePolicy is - responsible for checking them to see whether they should be accepted from - and returned to the server. - - Note that the port may be present in the headers, but unspecified ("Port" - rather than"Port=80", for example); if this is the case, port is None. - - """ - - def __init__(self, version, name, value, - port, port_specified, - domain, domain_specified, domain_initial_dot, - path, path_specified, - secure, - expires, - discard, - comment, - comment_url, - rest, - rfc2109=False, - ): - - if version is not None: version = int(version) - if expires is not None: expires = int(expires) - if port is None and port_specified is True: - raise ValueError("if port is None, port_specified must be false") - - self.version = version - self.name = name - self.value = value - self.port = port - self.port_specified = port_specified - # normalise case, as per RFC 2965 section 3.3.3 - self.domain = domain.lower() - self.domain_specified = domain_specified - # Sigh. We need to know whether the domain given in the - # cookie-attribute had an initial dot, in order to follow RFC 2965 - # (as clarified in draft errata). Needed for the returned $Domain - # value. - self.domain_initial_dot = domain_initial_dot - self.path = path - self.path_specified = path_specified - self.secure = secure - self.expires = expires - self.discard = discard - self.comment = comment - self.comment_url = comment_url - self.rfc2109 = rfc2109 - - self._rest = copy.copy(rest) - - def has_nonstandard_attr(self, name): - return name in self._rest - def get_nonstandard_attr(self, name, default=None): - return self._rest.get(name, default) - def set_nonstandard_attr(self, name, value): - self._rest[name] = value - - def is_expired(self, now=None): - if now is None: now = time.time() - if (self.expires is not None) and (self.expires <= now): - return True - return False - - def __str__(self): - if self.port is None: p = "" - else: p = ":"+self.port - limit = self.domain + p + self.path - if self.value is not None: - namevalue = "%s=%s" % (self.name, self.value) - else: - namevalue = self.name - return "" % (namevalue, limit) - - @as_native_str() - def __repr__(self): - args = [] - for name in ("version", "name", "value", - "port", "port_specified", - "domain", "domain_specified", "domain_initial_dot", - "path", "path_specified", - "secure", "expires", "discard", "comment", "comment_url", - ): - attr = getattr(self, name) - ### Python-Future: - # Avoid u'...' prefixes for unicode strings: - if isinstance(attr, str): - attr = str(attr) - ### - args.append(str("%s=%s") % (name, repr(attr))) - args.append("rest=%s" % repr(self._rest)) - args.append("rfc2109=%s" % repr(self.rfc2109)) - return "Cookie(%s)" % ", ".join(args) - - -class CookiePolicy(object): - """Defines which cookies get accepted from and returned to server. - - May also modify cookies, though this is probably a bad idea. - - The subclass DefaultCookiePolicy defines the standard rules for Netscape - and RFC 2965 cookies -- override that if you want a customised policy. - - """ - def set_ok(self, cookie, request): - """Return true if (and only if) cookie should be accepted from server. - - Currently, pre-expired cookies never get this far -- the CookieJar - class deletes such cookies itself. - - """ - raise NotImplementedError() - - def return_ok(self, cookie, request): - """Return true if (and only if) cookie should be returned to server.""" - raise NotImplementedError() - - def domain_return_ok(self, domain, request): - """Return false if cookies should not be returned, given cookie domain. - """ - return True - - def path_return_ok(self, path, request): - """Return false if cookies should not be returned, given cookie path. - """ - return True - - -class DefaultCookiePolicy(CookiePolicy): - """Implements the standard rules for accepting and returning cookies.""" - - DomainStrictNoDots = 1 - DomainStrictNonDomain = 2 - DomainRFC2965Match = 4 - - DomainLiberal = 0 - DomainStrict = DomainStrictNoDots|DomainStrictNonDomain - - def __init__(self, - blocked_domains=None, allowed_domains=None, - netscape=True, rfc2965=False, - rfc2109_as_netscape=None, - hide_cookie2=False, - strict_domain=False, - strict_rfc2965_unverifiable=True, - strict_ns_unverifiable=False, - strict_ns_domain=DomainLiberal, - strict_ns_set_initial_dollar=False, - strict_ns_set_path=False, - ): - """Constructor arguments should be passed as keyword arguments only.""" - self.netscape = netscape - self.rfc2965 = rfc2965 - self.rfc2109_as_netscape = rfc2109_as_netscape - self.hide_cookie2 = hide_cookie2 - self.strict_domain = strict_domain - self.strict_rfc2965_unverifiable = strict_rfc2965_unverifiable - self.strict_ns_unverifiable = strict_ns_unverifiable - self.strict_ns_domain = strict_ns_domain - self.strict_ns_set_initial_dollar = strict_ns_set_initial_dollar - self.strict_ns_set_path = strict_ns_set_path - - if blocked_domains is not None: - self._blocked_domains = tuple(blocked_domains) - else: - self._blocked_domains = () - - if allowed_domains is not None: - allowed_domains = tuple(allowed_domains) - self._allowed_domains = allowed_domains - - def blocked_domains(self): - """Return the sequence of blocked domains (as a tuple).""" - return self._blocked_domains - def set_blocked_domains(self, blocked_domains): - """Set the sequence of blocked domains.""" - self._blocked_domains = tuple(blocked_domains) - - def is_blocked(self, domain): - for blocked_domain in self._blocked_domains: - if user_domain_match(domain, blocked_domain): - return True - return False - - def allowed_domains(self): - """Return None, or the sequence of allowed domains (as a tuple).""" - return self._allowed_domains - def set_allowed_domains(self, allowed_domains): - """Set the sequence of allowed domains, or None.""" - if allowed_domains is not None: - allowed_domains = tuple(allowed_domains) - self._allowed_domains = allowed_domains - - def is_not_allowed(self, domain): - if self._allowed_domains is None: - return False - for allowed_domain in self._allowed_domains: - if user_domain_match(domain, allowed_domain): - return False - return True - - def set_ok(self, cookie, request): - """ - If you override .set_ok(), be sure to call this method. If it returns - false, so should your subclass (assuming your subclass wants to be more - strict about which cookies to accept). - - """ - _debug(" - checking cookie %s=%s", cookie.name, cookie.value) - - assert cookie.name is not None - - for n in "version", "verifiability", "name", "path", "domain", "port": - fn_name = "set_ok_"+n - fn = getattr(self, fn_name) - if not fn(cookie, request): - return False - - return True - - def set_ok_version(self, cookie, request): - if cookie.version is None: - # Version is always set to 0 by parse_ns_headers if it's a Netscape - # cookie, so this must be an invalid RFC 2965 cookie. - _debug(" Set-Cookie2 without version attribute (%s=%s)", - cookie.name, cookie.value) - return False - if cookie.version > 0 and not self.rfc2965: - _debug(" RFC 2965 cookies are switched off") - return False - elif cookie.version == 0 and not self.netscape: - _debug(" Netscape cookies are switched off") - return False - return True - - def set_ok_verifiability(self, cookie, request): - if request.unverifiable and is_third_party(request): - if cookie.version > 0 and self.strict_rfc2965_unverifiable: - _debug(" third-party RFC 2965 cookie during " - "unverifiable transaction") - return False - elif cookie.version == 0 and self.strict_ns_unverifiable: - _debug(" third-party Netscape cookie during " - "unverifiable transaction") - return False - return True - - def set_ok_name(self, cookie, request): - # Try and stop servers setting V0 cookies designed to hack other - # servers that know both V0 and V1 protocols. - if (cookie.version == 0 and self.strict_ns_set_initial_dollar and - cookie.name.startswith("$")): - _debug(" illegal name (starts with '$'): '%s'", cookie.name) - return False - return True - - def set_ok_path(self, cookie, request): - if cookie.path_specified: - req_path = request_path(request) - if ((cookie.version > 0 or - (cookie.version == 0 and self.strict_ns_set_path)) and - not req_path.startswith(cookie.path)): - _debug(" path attribute %s is not a prefix of request " - "path %s", cookie.path, req_path) - return False - return True - - def set_ok_domain(self, cookie, request): - if self.is_blocked(cookie.domain): - _debug(" domain %s is in user block-list", cookie.domain) - return False - if self.is_not_allowed(cookie.domain): - _debug(" domain %s is not in user allow-list", cookie.domain) - return False - if cookie.domain_specified: - req_host, erhn = eff_request_host(request) - domain = cookie.domain - if self.strict_domain and (domain.count(".") >= 2): - # XXX This should probably be compared with the Konqueror - # (kcookiejar.cpp) and Mozilla implementations, but it's a - # losing battle. - i = domain.rfind(".") - j = domain.rfind(".", 0, i) - if j == 0: # domain like .foo.bar - tld = domain[i+1:] - sld = domain[j+1:i] - if sld.lower() in ("co", "ac", "com", "edu", "org", "net", - "gov", "mil", "int", "aero", "biz", "cat", "coop", - "info", "jobs", "mobi", "museum", "name", "pro", - "travel", "eu") and len(tld) == 2: - # domain like .co.uk - _debug(" country-code second level domain %s", domain) - return False - if domain.startswith("."): - undotted_domain = domain[1:] - else: - undotted_domain = domain - embedded_dots = (undotted_domain.find(".") >= 0) - if not embedded_dots and domain != ".local": - _debug(" non-local domain %s contains no embedded dot", - domain) - return False - if cookie.version == 0: - if (not erhn.endswith(domain) and - (not erhn.startswith(".") and - not ("."+erhn).endswith(domain))): - _debug(" effective request-host %s (even with added " - "initial dot) does not end with %s", - erhn, domain) - return False - if (cookie.version > 0 or - (self.strict_ns_domain & self.DomainRFC2965Match)): - if not domain_match(erhn, domain): - _debug(" effective request-host %s does not domain-match " - "%s", erhn, domain) - return False - if (cookie.version > 0 or - (self.strict_ns_domain & self.DomainStrictNoDots)): - host_prefix = req_host[:-len(domain)] - if (host_prefix.find(".") >= 0 and - not IPV4_RE.search(req_host)): - _debug(" host prefix %s for domain %s contains a dot", - host_prefix, domain) - return False - return True - - def set_ok_port(self, cookie, request): - if cookie.port_specified: - req_port = request_port(request) - if req_port is None: - req_port = "80" - else: - req_port = str(req_port) - for p in cookie.port.split(","): - try: - int(p) - except ValueError: - _debug(" bad port %s (not numeric)", p) - return False - if p == req_port: - break - else: - _debug(" request port (%s) not found in %s", - req_port, cookie.port) - return False - return True - - def return_ok(self, cookie, request): - """ - If you override .return_ok(), be sure to call this method. If it - returns false, so should your subclass (assuming your subclass wants to - be more strict about which cookies to return). - - """ - # Path has already been checked by .path_return_ok(), and domain - # blocking done by .domain_return_ok(). - _debug(" - checking cookie %s=%s", cookie.name, cookie.value) - - for n in "version", "verifiability", "secure", "expires", "port", "domain": - fn_name = "return_ok_"+n - fn = getattr(self, fn_name) - if not fn(cookie, request): - return False - return True - - def return_ok_version(self, cookie, request): - if cookie.version > 0 and not self.rfc2965: - _debug(" RFC 2965 cookies are switched off") - return False - elif cookie.version == 0 and not self.netscape: - _debug(" Netscape cookies are switched off") - return False - return True - - def return_ok_verifiability(self, cookie, request): - if request.unverifiable and is_third_party(request): - if cookie.version > 0 and self.strict_rfc2965_unverifiable: - _debug(" third-party RFC 2965 cookie during unverifiable " - "transaction") - return False - elif cookie.version == 0 and self.strict_ns_unverifiable: - _debug(" third-party Netscape cookie during unverifiable " - "transaction") - return False - return True - - def return_ok_secure(self, cookie, request): - if cookie.secure and request.type != "https": - _debug(" secure cookie with non-secure request") - return False - return True - - def return_ok_expires(self, cookie, request): - if cookie.is_expired(self._now): - _debug(" cookie expired") - return False - return True - - def return_ok_port(self, cookie, request): - if cookie.port: - req_port = request_port(request) - if req_port is None: - req_port = "80" - for p in cookie.port.split(","): - if p == req_port: - break - else: - _debug(" request port %s does not match cookie port %s", - req_port, cookie.port) - return False - return True - - def return_ok_domain(self, cookie, request): - req_host, erhn = eff_request_host(request) - domain = cookie.domain - - # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't - if (cookie.version == 0 and - (self.strict_ns_domain & self.DomainStrictNonDomain) and - not cookie.domain_specified and domain != erhn): - _debug(" cookie with unspecified domain does not string-compare " - "equal to request domain") - return False - - if cookie.version > 0 and not domain_match(erhn, domain): - _debug(" effective request-host name %s does not domain-match " - "RFC 2965 cookie domain %s", erhn, domain) - return False - if cookie.version == 0 and not ("."+erhn).endswith(domain): - _debug(" request-host %s does not match Netscape cookie domain " - "%s", req_host, domain) - return False - return True - - def domain_return_ok(self, domain, request): - # Liberal check of. This is here as an optimization to avoid - # having to load lots of MSIE cookie files unless necessary. - req_host, erhn = eff_request_host(request) - if not req_host.startswith("."): - req_host = "."+req_host - if not erhn.startswith("."): - erhn = "."+erhn - if not (req_host.endswith(domain) or erhn.endswith(domain)): - #_debug(" request domain %s does not match cookie domain %s", - # req_host, domain) - return False - - if self.is_blocked(domain): - _debug(" domain %s is in user block-list", domain) - return False - if self.is_not_allowed(domain): - _debug(" domain %s is not in user allow-list", domain) - return False - - return True - - def path_return_ok(self, path, request): - _debug("- checking cookie path=%s", path) - req_path = request_path(request) - if not req_path.startswith(path): - _debug(" %s does not path-match %s", req_path, path) - return False - return True - - -def vals_sorted_by_key(adict): - keys = sorted(adict.keys()) - return map(adict.get, keys) - -def deepvalues(mapping): - """Iterates over nested mapping, depth-first, in sorted order by key.""" - values = vals_sorted_by_key(mapping) - for obj in values: - mapping = False - try: - obj.items - except AttributeError: - pass - else: - mapping = True - for subobj in deepvalues(obj): - yield subobj - if not mapping: - yield obj - - -# Used as second parameter to dict.get() method, to distinguish absent -# dict key from one with a None value. -class Absent(object): pass - -class CookieJar(object): - """Collection of HTTP cookies. - - You may not need to know about this class: try - urllib.request.build_opener(HTTPCookieProcessor).open(url). - """ - - non_word_re = re.compile(r"\W") - quote_re = re.compile(r"([\"\\])") - strict_domain_re = re.compile(r"\.?[^.]*") - domain_re = re.compile(r"[^.]*") - dots_re = re.compile(r"^\.+") - - magic_re = re.compile(r"^\#LWP-Cookies-(\d+\.\d+)", re.ASCII) - - def __init__(self, policy=None): - if policy is None: - policy = DefaultCookiePolicy() - self._policy = policy - - self._cookies_lock = _threading.RLock() - self._cookies = {} - - def set_policy(self, policy): - self._policy = policy - - def _cookies_for_domain(self, domain, request): - cookies = [] - if not self._policy.domain_return_ok(domain, request): - return [] - _debug("Checking %s for cookies to return", domain) - cookies_by_path = self._cookies[domain] - for path in cookies_by_path.keys(): - if not self._policy.path_return_ok(path, request): - continue - cookies_by_name = cookies_by_path[path] - for cookie in cookies_by_name.values(): - if not self._policy.return_ok(cookie, request): - _debug(" not returning cookie") - continue - _debug(" it's a match") - cookies.append(cookie) - return cookies - - def _cookies_for_request(self, request): - """Return a list of cookies to be returned to server.""" - cookies = [] - for domain in self._cookies.keys(): - cookies.extend(self._cookies_for_domain(domain, request)) - return cookies - - def _cookie_attrs(self, cookies): - """Return a list of cookie-attributes to be returned to server. - - like ['foo="bar"; $Path="/"', ...] - - The $Version attribute is also added when appropriate (currently only - once per request). - - """ - # add cookies in order of most specific (ie. longest) path first - cookies.sort(key=lambda a: len(a.path), reverse=True) - - version_set = False - - attrs = [] - for cookie in cookies: - # set version of Cookie header - # XXX - # What should it be if multiple matching Set-Cookie headers have - # different versions themselves? - # Answer: there is no answer; was supposed to be settled by - # RFC 2965 errata, but that may never appear... - version = cookie.version - if not version_set: - version_set = True - if version > 0: - attrs.append("$Version=%s" % version) - - # quote cookie value if necessary - # (not for Netscape protocol, which already has any quotes - # intact, due to the poorly-specified Netscape Cookie: syntax) - if ((cookie.value is not None) and - self.non_word_re.search(cookie.value) and version > 0): - value = self.quote_re.sub(r"\\\1", cookie.value) - else: - value = cookie.value - - # add cookie-attributes to be returned in Cookie header - if cookie.value is None: - attrs.append(cookie.name) - else: - attrs.append("%s=%s" % (cookie.name, value)) - if version > 0: - if cookie.path_specified: - attrs.append('$Path="%s"' % cookie.path) - if cookie.domain.startswith("."): - domain = cookie.domain - if (not cookie.domain_initial_dot and - domain.startswith(".")): - domain = domain[1:] - attrs.append('$Domain="%s"' % domain) - if cookie.port is not None: - p = "$Port" - if cookie.port_specified: - p = p + ('="%s"' % cookie.port) - attrs.append(p) - - return attrs - - def add_cookie_header(self, request): - """Add correct Cookie: header to request (urllib.request.Request object). - - The Cookie2 header is also added unless policy.hide_cookie2 is true. - - """ - _debug("add_cookie_header") - self._cookies_lock.acquire() - try: - - self._policy._now = self._now = int(time.time()) - - cookies = self._cookies_for_request(request) - - attrs = self._cookie_attrs(cookies) - if attrs: - if not request.has_header("Cookie"): - request.add_unredirected_header( - "Cookie", "; ".join(attrs)) - - # if necessary, advertise that we know RFC 2965 - if (self._policy.rfc2965 and not self._policy.hide_cookie2 and - not request.has_header("Cookie2")): - for cookie in cookies: - if cookie.version != 1: - request.add_unredirected_header("Cookie2", '$Version="1"') - break - - finally: - self._cookies_lock.release() - - self.clear_expired_cookies() - - def _normalized_cookie_tuples(self, attrs_set): - """Return list of tuples containing normalised cookie information. - - attrs_set is the list of lists of key,value pairs extracted from - the Set-Cookie or Set-Cookie2 headers. - - Tuples are name, value, standard, rest, where name and value are the - cookie name and value, standard is a dictionary containing the standard - cookie-attributes (discard, secure, version, expires or max-age, - domain, path and port) and rest is a dictionary containing the rest of - the cookie-attributes. - - """ - cookie_tuples = [] - - boolean_attrs = "discard", "secure" - value_attrs = ("version", - "expires", "max-age", - "domain", "path", "port", - "comment", "commenturl") - - for cookie_attrs in attrs_set: - name, value = cookie_attrs[0] - - # Build dictionary of standard cookie-attributes (standard) and - # dictionary of other cookie-attributes (rest). - - # Note: expiry time is normalised to seconds since epoch. V0 - # cookies should have the Expires cookie-attribute, and V1 cookies - # should have Max-Age, but since V1 includes RFC 2109 cookies (and - # since V0 cookies may be a mish-mash of Netscape and RFC 2109), we - # accept either (but prefer Max-Age). - max_age_set = False - - bad_cookie = False - - standard = {} - rest = {} - for k, v in cookie_attrs[1:]: - lc = k.lower() - # don't lose case distinction for unknown fields - if lc in value_attrs or lc in boolean_attrs: - k = lc - if k in boolean_attrs and v is None: - # boolean cookie-attribute is present, but has no value - # (like "discard", rather than "port=80") - v = True - if k in standard: - # only first value is significant - continue - if k == "domain": - if v is None: - _debug(" missing value for domain attribute") - bad_cookie = True - break - # RFC 2965 section 3.3.3 - v = v.lower() - if k == "expires": - if max_age_set: - # Prefer max-age to expires (like Mozilla) - continue - if v is None: - _debug(" missing or invalid value for expires " - "attribute: treating as session cookie") - continue - if k == "max-age": - max_age_set = True - try: - v = int(v) - except ValueError: - _debug(" missing or invalid (non-numeric) value for " - "max-age attribute") - bad_cookie = True - break - # convert RFC 2965 Max-Age to seconds since epoch - # XXX Strictly you're supposed to follow RFC 2616 - # age-calculation rules. Remember that zero Max-Age is a - # is a request to discard (old and new) cookie, though. - k = "expires" - v = self._now + v - if (k in value_attrs) or (k in boolean_attrs): - if (v is None and - k not in ("port", "comment", "commenturl")): - _debug(" missing value for %s attribute" % k) - bad_cookie = True - break - standard[k] = v - else: - rest[k] = v - - if bad_cookie: - continue - - cookie_tuples.append((name, value, standard, rest)) - - return cookie_tuples - - def _cookie_from_cookie_tuple(self, tup, request): - # standard is dict of standard cookie-attributes, rest is dict of the - # rest of them - name, value, standard, rest = tup - - domain = standard.get("domain", Absent) - path = standard.get("path", Absent) - port = standard.get("port", Absent) - expires = standard.get("expires", Absent) - - # set the easy defaults - version = standard.get("version", None) - if version is not None: - try: - version = int(version) - except ValueError: - return None # invalid version, ignore cookie - secure = standard.get("secure", False) - # (discard is also set if expires is Absent) - discard = standard.get("discard", False) - comment = standard.get("comment", None) - comment_url = standard.get("commenturl", None) - - # set default path - if path is not Absent and path != "": - path_specified = True - path = escape_path(path) - else: - path_specified = False - path = request_path(request) - i = path.rfind("/") - if i != -1: - if version == 0: - # Netscape spec parts company from reality here - path = path[:i] - else: - path = path[:i+1] - if len(path) == 0: path = "/" - - # set default domain - domain_specified = domain is not Absent - # but first we have to remember whether it starts with a dot - domain_initial_dot = False - if domain_specified: - domain_initial_dot = bool(domain.startswith(".")) - if domain is Absent: - req_host, erhn = eff_request_host(request) - domain = erhn - elif not domain.startswith("."): - domain = "."+domain - - # set default port - port_specified = False - if port is not Absent: - if port is None: - # Port attr present, but has no value: default to request port. - # Cookie should then only be sent back on that port. - port = request_port(request) - else: - port_specified = True - port = re.sub(r"\s+", "", port) - else: - # No port attr present. Cookie can be sent back on any port. - port = None - - # set default expires and discard - if expires is Absent: - expires = None - discard = True - elif expires <= self._now: - # Expiry date in past is request to delete cookie. This can't be - # in DefaultCookiePolicy, because can't delete cookies there. - try: - self.clear(domain, path, name) - except KeyError: - pass - _debug("Expiring cookie, domain='%s', path='%s', name='%s'", - domain, path, name) - return None - - return Cookie(version, - name, value, - port, port_specified, - domain, domain_specified, domain_initial_dot, - path, path_specified, - secure, - expires, - discard, - comment, - comment_url, - rest) - - def _cookies_from_attrs_set(self, attrs_set, request): - cookie_tuples = self._normalized_cookie_tuples(attrs_set) - - cookies = [] - for tup in cookie_tuples: - cookie = self._cookie_from_cookie_tuple(tup, request) - if cookie: cookies.append(cookie) - return cookies - - def _process_rfc2109_cookies(self, cookies): - rfc2109_as_ns = getattr(self._policy, 'rfc2109_as_netscape', None) - if rfc2109_as_ns is None: - rfc2109_as_ns = not self._policy.rfc2965 - for cookie in cookies: - if cookie.version == 1: - cookie.rfc2109 = True - if rfc2109_as_ns: - # treat 2109 cookies as Netscape cookies rather than - # as RFC2965 cookies - cookie.version = 0 - - def make_cookies(self, response, request): - """Return sequence of Cookie objects extracted from response object.""" - # get cookie-attributes for RFC 2965 and Netscape protocols - headers = response.info() - rfc2965_hdrs = headers.get_all("Set-Cookie2", []) - ns_hdrs = headers.get_all("Set-Cookie", []) - - rfc2965 = self._policy.rfc2965 - netscape = self._policy.netscape - - if ((not rfc2965_hdrs and not ns_hdrs) or - (not ns_hdrs and not rfc2965) or - (not rfc2965_hdrs and not netscape) or - (not netscape and not rfc2965)): - return [] # no relevant cookie headers: quick exit - - try: - cookies = self._cookies_from_attrs_set( - split_header_words(rfc2965_hdrs), request) - except Exception: - _warn_unhandled_exception() - cookies = [] - - if ns_hdrs and netscape: - try: - # RFC 2109 and Netscape cookies - ns_cookies = self._cookies_from_attrs_set( - parse_ns_headers(ns_hdrs), request) - except Exception: - _warn_unhandled_exception() - ns_cookies = [] - self._process_rfc2109_cookies(ns_cookies) - - # Look for Netscape cookies (from Set-Cookie headers) that match - # corresponding RFC 2965 cookies (from Set-Cookie2 headers). - # For each match, keep the RFC 2965 cookie and ignore the Netscape - # cookie (RFC 2965 section 9.1). Actually, RFC 2109 cookies are - # bundled in with the Netscape cookies for this purpose, which is - # reasonable behaviour. - if rfc2965: - lookup = {} - for cookie in cookies: - lookup[(cookie.domain, cookie.path, cookie.name)] = None - - def no_matching_rfc2965(ns_cookie, lookup=lookup): - key = ns_cookie.domain, ns_cookie.path, ns_cookie.name - return key not in lookup - ns_cookies = filter(no_matching_rfc2965, ns_cookies) - - if ns_cookies: - cookies.extend(ns_cookies) - - return cookies - - def set_cookie_if_ok(self, cookie, request): - """Set a cookie if policy says it's OK to do so.""" - self._cookies_lock.acquire() - try: - self._policy._now = self._now = int(time.time()) - - if self._policy.set_ok(cookie, request): - self.set_cookie(cookie) - - - finally: - self._cookies_lock.release() - - def set_cookie(self, cookie): - """Set a cookie, without checking whether or not it should be set.""" - c = self._cookies - self._cookies_lock.acquire() - try: - if cookie.domain not in c: c[cookie.domain] = {} - c2 = c[cookie.domain] - if cookie.path not in c2: c2[cookie.path] = {} - c3 = c2[cookie.path] - c3[cookie.name] = cookie - finally: - self._cookies_lock.release() - - def extract_cookies(self, response, request): - """Extract cookies from response, where allowable given the request.""" - _debug("extract_cookies: %s", response.info()) - self._cookies_lock.acquire() - try: - self._policy._now = self._now = int(time.time()) - - for cookie in self.make_cookies(response, request): - if self._policy.set_ok(cookie, request): - _debug(" setting cookie: %s", cookie) - self.set_cookie(cookie) - finally: - self._cookies_lock.release() - - def clear(self, domain=None, path=None, name=None): - """Clear some cookies. - - Invoking this method without arguments will clear all cookies. If - given a single argument, only cookies belonging to that domain will be - removed. If given two arguments, cookies belonging to the specified - path within that domain are removed. If given three arguments, then - the cookie with the specified name, path and domain is removed. - - Raises KeyError if no matching cookie exists. - - """ - if name is not None: - if (domain is None) or (path is None): - raise ValueError( - "domain and path must be given to remove a cookie by name") - del self._cookies[domain][path][name] - elif path is not None: - if domain is None: - raise ValueError( - "domain must be given to remove cookies by path") - del self._cookies[domain][path] - elif domain is not None: - del self._cookies[domain] - else: - self._cookies = {} - - def clear_session_cookies(self): - """Discard all session cookies. - - Note that the .save() method won't save session cookies anyway, unless - you ask otherwise by passing a true ignore_discard argument. - - """ - self._cookies_lock.acquire() - try: - for cookie in self: - if cookie.discard: - self.clear(cookie.domain, cookie.path, cookie.name) - finally: - self._cookies_lock.release() - - def clear_expired_cookies(self): - """Discard all expired cookies. - - You probably don't need to call this method: expired cookies are never - sent back to the server (provided you're using DefaultCookiePolicy), - this method is called by CookieJar itself every so often, and the - .save() method won't save expired cookies anyway (unless you ask - otherwise by passing a true ignore_expires argument). - - """ - self._cookies_lock.acquire() - try: - now = time.time() - for cookie in self: - if cookie.is_expired(now): - self.clear(cookie.domain, cookie.path, cookie.name) - finally: - self._cookies_lock.release() - - def __iter__(self): - return deepvalues(self._cookies) - - def __len__(self): - """Return number of contained cookies.""" - i = 0 - for cookie in self: i = i + 1 - return i - - @as_native_str() - def __repr__(self): - r = [] - for cookie in self: r.append(repr(cookie)) - return "<%s[%s]>" % (self.__class__, ", ".join(r)) - - def __str__(self): - r = [] - for cookie in self: r.append(str(cookie)) - return "<%s[%s]>" % (self.__class__, ", ".join(r)) - - -# derives from IOError for backwards-compatibility with Python 2.4.0 -class LoadError(IOError): pass - -class FileCookieJar(CookieJar): - """CookieJar that can be loaded from and saved to a file.""" - - def __init__(self, filename=None, delayload=False, policy=None): - """ - Cookies are NOT loaded from the named file until either the .load() or - .revert() method is called. - - """ - CookieJar.__init__(self, policy) - if filename is not None: - try: - filename+"" - except: - raise ValueError("filename must be string-like") - self.filename = filename - self.delayload = bool(delayload) - - def save(self, filename=None, ignore_discard=False, ignore_expires=False): - """Save cookies to a file.""" - raise NotImplementedError() - - def load(self, filename=None, ignore_discard=False, ignore_expires=False): - """Load cookies from a file.""" - if filename is None: - if self.filename is not None: filename = self.filename - else: raise ValueError(MISSING_FILENAME_TEXT) - - f = open(filename) - try: - self._really_load(f, filename, ignore_discard, ignore_expires) - finally: - f.close() - - def revert(self, filename=None, - ignore_discard=False, ignore_expires=False): - """Clear all cookies and reload cookies from a saved file. - - Raises LoadError (or IOError) if reversion is not successful; the - object's state will not be altered if this happens. - - """ - if filename is None: - if self.filename is not None: filename = self.filename - else: raise ValueError(MISSING_FILENAME_TEXT) - - self._cookies_lock.acquire() - try: - - old_state = copy.deepcopy(self._cookies) - self._cookies = {} - try: - self.load(filename, ignore_discard, ignore_expires) - except (LoadError, IOError): - self._cookies = old_state - raise - - finally: - self._cookies_lock.release() - - -def lwp_cookie_str(cookie): - """Return string representation of Cookie in an the LWP cookie file format. - - Actually, the format is extended a bit -- see module docstring. - - """ - h = [(cookie.name, cookie.value), - ("path", cookie.path), - ("domain", cookie.domain)] - if cookie.port is not None: h.append(("port", cookie.port)) - if cookie.path_specified: h.append(("path_spec", None)) - if cookie.port_specified: h.append(("port_spec", None)) - if cookie.domain_initial_dot: h.append(("domain_dot", None)) - if cookie.secure: h.append(("secure", None)) - if cookie.expires: h.append(("expires", - time2isoz(float(cookie.expires)))) - if cookie.discard: h.append(("discard", None)) - if cookie.comment: h.append(("comment", cookie.comment)) - if cookie.comment_url: h.append(("commenturl", cookie.comment_url)) - - keys = sorted(cookie._rest.keys()) - for k in keys: - h.append((k, str(cookie._rest[k]))) - - h.append(("version", str(cookie.version))) - - return join_header_words([h]) - -class LWPCookieJar(FileCookieJar): - """ - The LWPCookieJar saves a sequence of "Set-Cookie3" lines. - "Set-Cookie3" is the format used by the libwww-perl libary, not known - to be compatible with any browser, but which is easy to read and - doesn't lose information about RFC 2965 cookies. - - Additional methods - - as_lwp_str(ignore_discard=True, ignore_expired=True) - - """ - - def as_lwp_str(self, ignore_discard=True, ignore_expires=True): - """Return cookies as a string of "\\n"-separated "Set-Cookie3" headers. - - ignore_discard and ignore_expires: see docstring for FileCookieJar.save - - """ - now = time.time() - r = [] - for cookie in self: - if not ignore_discard and cookie.discard: - continue - if not ignore_expires and cookie.is_expired(now): - continue - r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie)) - return "\n".join(r+[""]) - - def save(self, filename=None, ignore_discard=False, ignore_expires=False): - if filename is None: - if self.filename is not None: filename = self.filename - else: raise ValueError(MISSING_FILENAME_TEXT) - - f = open(filename, "w") - try: - # There really isn't an LWP Cookies 2.0 format, but this indicates - # that there is extra information in here (domain_dot and - # port_spec) while still being compatible with libwww-perl, I hope. - f.write("#LWP-Cookies-2.0\n") - f.write(self.as_lwp_str(ignore_discard, ignore_expires)) - finally: - f.close() - - def _really_load(self, f, filename, ignore_discard, ignore_expires): - magic = f.readline() - if not self.magic_re.search(magic): - msg = ("%r does not look like a Set-Cookie3 (LWP) format " - "file" % filename) - raise LoadError(msg) - - now = time.time() - - header = "Set-Cookie3:" - boolean_attrs = ("port_spec", "path_spec", "domain_dot", - "secure", "discard") - value_attrs = ("version", - "port", "path", "domain", - "expires", - "comment", "commenturl") - - try: - while 1: - line = f.readline() - if line == "": break - if not line.startswith(header): - continue - line = line[len(header):].strip() - - for data in split_header_words([line]): - name, value = data[0] - standard = {} - rest = {} - for k in boolean_attrs: - standard[k] = False - for k, v in data[1:]: - if k is not None: - lc = k.lower() - else: - lc = None - # don't lose case distinction for unknown fields - if (lc in value_attrs) or (lc in boolean_attrs): - k = lc - if k in boolean_attrs: - if v is None: v = True - standard[k] = v - elif k in value_attrs: - standard[k] = v - else: - rest[k] = v - - h = standard.get - expires = h("expires") - discard = h("discard") - if expires is not None: - expires = iso2time(expires) - if expires is None: - discard = True - domain = h("domain") - domain_specified = domain.startswith(".") - c = Cookie(h("version"), name, value, - h("port"), h("port_spec"), - domain, domain_specified, h("domain_dot"), - h("path"), h("path_spec"), - h("secure"), - expires, - discard, - h("comment"), - h("commenturl"), - rest) - if not ignore_discard and c.discard: - continue - if not ignore_expires and c.is_expired(now): - continue - self.set_cookie(c) - - except IOError: - raise - except Exception: - _warn_unhandled_exception() - raise LoadError("invalid Set-Cookie3 format file %r: %r" % - (filename, line)) - - -class MozillaCookieJar(FileCookieJar): - """ - - WARNING: you may want to backup your browser's cookies file if you use - this class to save cookies. I *think* it works, but there have been - bugs in the past! - - This class differs from CookieJar only in the format it uses to save and - load cookies to and from a file. This class uses the Mozilla/Netscape - `cookies.txt' format. lynx uses this file format, too. - - Don't expect cookies saved while the browser is running to be noticed by - the browser (in fact, Mozilla on unix will overwrite your saved cookies if - you change them on disk while it's running; on Windows, you probably can't - save at all while the browser is running). - - Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to - Netscape cookies on saving. - - In particular, the cookie version and port number information is lost, - together with information about whether or not Path, Port and Discard were - specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the - domain as set in the HTTP header started with a dot (yes, I'm aware some - domains in Netscape files start with a dot and some don't -- trust me, you - really don't want to know any more about this). - - Note that though Mozilla and Netscape use the same format, they use - slightly different headers. The class saves cookies using the Netscape - header by default (Mozilla can cope with that). - - """ - magic_re = re.compile("#( Netscape)? HTTP Cookie File") - header = """\ -# Netscape HTTP Cookie File -# http://www.netscape.com/newsref/std/cookie_spec.html -# This is a generated file! Do not edit. - -""" - - def _really_load(self, f, filename, ignore_discard, ignore_expires): - now = time.time() - - magic = f.readline() - if not self.magic_re.search(magic): - f.close() - raise LoadError( - "%r does not look like a Netscape format cookies file" % - filename) - - try: - while 1: - line = f.readline() - if line == "": break - - # last field may be absent, so keep any trailing tab - if line.endswith("\n"): line = line[:-1] - - # skip comments and blank lines XXX what is $ for? - if (line.strip().startswith(("#", "$")) or - line.strip() == ""): - continue - - domain, domain_specified, path, secure, expires, name, value = \ - line.split("\t") - secure = (secure == "TRUE") - domain_specified = (domain_specified == "TRUE") - if name == "": - # cookies.txt regards 'Set-Cookie: foo' as a cookie - # with no name, whereas http.cookiejar regards it as a - # cookie with no value. - name = value - value = None - - initial_dot = domain.startswith(".") - assert domain_specified == initial_dot - - discard = False - if expires == "": - expires = None - discard = True - - # assume path_specified is false - c = Cookie(0, name, value, - None, False, - domain, domain_specified, initial_dot, - path, False, - secure, - expires, - discard, - None, - None, - {}) - if not ignore_discard and c.discard: - continue - if not ignore_expires and c.is_expired(now): - continue - self.set_cookie(c) - - except IOError: - raise - except Exception: - _warn_unhandled_exception() - raise LoadError("invalid Netscape format cookies file %r: %r" % - (filename, line)) - - def save(self, filename=None, ignore_discard=False, ignore_expires=False): - if filename is None: - if self.filename is not None: filename = self.filename - else: raise ValueError(MISSING_FILENAME_TEXT) - - f = open(filename, "w") - try: - f.write(self.header) - now = time.time() - for cookie in self: - if not ignore_discard and cookie.discard: - continue - if not ignore_expires and cookie.is_expired(now): - continue - if cookie.secure: secure = "TRUE" - else: secure = "FALSE" - if cookie.domain.startswith("."): initial_dot = "TRUE" - else: initial_dot = "FALSE" - if cookie.expires is not None: - expires = str(cookie.expires) - else: - expires = "" - if cookie.value is None: - # cookies.txt regards 'Set-Cookie: foo' as a cookie - # with no name, whereas http.cookiejar regards it as a - # cookie with no value. - name = "" - value = cookie.name - else: - name = cookie.name - value = cookie.value - f.write( - "\t".join([cookie.domain, initial_dot, cookie.path, - secure, expires, name, value])+ - "\n") - finally: - f.close() diff --git a/pype/vendor/future/backports/http/cookies.py b/pype/vendor/future/backports/http/cookies.py deleted file mode 100644 index ae32ed7e71..0000000000 --- a/pype/vendor/future/backports/http/cookies.py +++ /dev/null @@ -1,597 +0,0 @@ -#### -# Copyright 2000 by Timothy O'Malley -# -# All Rights Reserved -# -# Permission to use, copy, modify, and distribute this software -# and its documentation for any purpose and without fee is hereby -# granted, provided that the above copyright notice appear in all -# copies and that both that copyright notice and this permission -# notice appear in supporting documentation, and that the name of -# Timothy O'Malley not be used in advertising or publicity -# pertaining to distribution of the software without specific, written -# prior permission. -# -# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS -# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR -# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS -# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -#### -# -# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp -# by Timothy O'Malley -# -# Cookie.py is a Python module for the handling of HTTP -# cookies as a Python dictionary. See RFC 2109 for more -# information on cookies. -# -# The original idea to treat Cookies as a dictionary came from -# Dave Mitchell (davem@magnet.com) in 1995, when he released the -# first version of nscookie.py. -# -#### - -r""" -http.cookies module ported to python-future from Py3.3 - -Here's a sample session to show how to use this module. -At the moment, this is the only documentation. - -The Basics ----------- - -Importing is easy... - - >>> from http import cookies - -Most of the time you start by creating a cookie. - - >>> C = cookies.SimpleCookie() - -Once you've created your Cookie, you can add values just as if it were -a dictionary. - - >>> C = cookies.SimpleCookie() - >>> C["fig"] = "newton" - >>> C["sugar"] = "wafer" - >>> C.output() - 'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer' - -Notice that the printable representation of a Cookie is the -appropriate format for a Set-Cookie: header. This is the -default behavior. You can change the header and printed -attributes by using the .output() function - - >>> C = cookies.SimpleCookie() - >>> C["rocky"] = "road" - >>> C["rocky"]["path"] = "/cookie" - >>> print(C.output(header="Cookie:")) - Cookie: rocky=road; Path=/cookie - >>> print(C.output(attrs=[], header="Cookie:")) - Cookie: rocky=road - -The load() method of a Cookie extracts cookies from a string. In a -CGI script, you would use this method to extract the cookies from the -HTTP_COOKIE environment variable. - - >>> C = cookies.SimpleCookie() - >>> C.load("chips=ahoy; vienna=finger") - >>> C.output() - 'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger' - -The load() method is darn-tootin smart about identifying cookies -within a string. Escaped quotation marks, nested semicolons, and other -such trickeries do not confuse it. - - >>> C = cookies.SimpleCookie() - >>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";') - >>> print(C) - Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;" - -Each element of the Cookie also supports all of the RFC 2109 -Cookie attributes. Here's an example which sets the Path -attribute. - - >>> C = cookies.SimpleCookie() - >>> C["oreo"] = "doublestuff" - >>> C["oreo"]["path"] = "/" - >>> print(C) - Set-Cookie: oreo=doublestuff; Path=/ - -Each dictionary element has a 'value' attribute, which gives you -back the value associated with the key. - - >>> C = cookies.SimpleCookie() - >>> C["twix"] = "none for you" - >>> C["twix"].value - 'none for you' - -The SimpleCookie expects that all values should be standard strings. -Just to be sure, SimpleCookie invokes the str() builtin to convert -the value to a string, when the values are set dictionary-style. - - >>> C = cookies.SimpleCookie() - >>> C["number"] = 7 - >>> C["string"] = "seven" - >>> C["number"].value - '7' - >>> C["string"].value - 'seven' - >>> C.output() - 'Set-Cookie: number=7\r\nSet-Cookie: string=seven' - -Finis. -""" -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future.builtins import chr, dict, int, str -from future.utils import PY2, as_native_str - -# -# Import our required modules -# -import re -re.ASCII = 0 # for py2 compatibility -import string - -__all__ = ["CookieError", "BaseCookie", "SimpleCookie"] - -_nulljoin = ''.join -_semispacejoin = '; '.join -_spacejoin = ' '.join - -# -# Define an exception visible to External modules -# -class CookieError(Exception): - pass - - -# These quoting routines conform to the RFC2109 specification, which in -# turn references the character definitions from RFC2068. They provide -# a two-way quoting algorithm. Any non-text character is translated -# into a 4 character sequence: a forward-slash followed by the -# three-digit octal equivalent of the character. Any '\' or '"' is -# quoted with a preceeding '\' slash. -# -# These are taken from RFC2068 and RFC2109. -# _LegalChars is the list of chars which don't require "'s -# _Translator hash-table for fast quoting -# -_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:" -_Translator = { - '\000' : '\\000', '\001' : '\\001', '\002' : '\\002', - '\003' : '\\003', '\004' : '\\004', '\005' : '\\005', - '\006' : '\\006', '\007' : '\\007', '\010' : '\\010', - '\011' : '\\011', '\012' : '\\012', '\013' : '\\013', - '\014' : '\\014', '\015' : '\\015', '\016' : '\\016', - '\017' : '\\017', '\020' : '\\020', '\021' : '\\021', - '\022' : '\\022', '\023' : '\\023', '\024' : '\\024', - '\025' : '\\025', '\026' : '\\026', '\027' : '\\027', - '\030' : '\\030', '\031' : '\\031', '\032' : '\\032', - '\033' : '\\033', '\034' : '\\034', '\035' : '\\035', - '\036' : '\\036', '\037' : '\\037', - - # Because of the way browsers really handle cookies (as opposed - # to what the RFC says) we also encode , and ; - - ',' : '\\054', ';' : '\\073', - - '"' : '\\"', '\\' : '\\\\', - - '\177' : '\\177', '\200' : '\\200', '\201' : '\\201', - '\202' : '\\202', '\203' : '\\203', '\204' : '\\204', - '\205' : '\\205', '\206' : '\\206', '\207' : '\\207', - '\210' : '\\210', '\211' : '\\211', '\212' : '\\212', - '\213' : '\\213', '\214' : '\\214', '\215' : '\\215', - '\216' : '\\216', '\217' : '\\217', '\220' : '\\220', - '\221' : '\\221', '\222' : '\\222', '\223' : '\\223', - '\224' : '\\224', '\225' : '\\225', '\226' : '\\226', - '\227' : '\\227', '\230' : '\\230', '\231' : '\\231', - '\232' : '\\232', '\233' : '\\233', '\234' : '\\234', - '\235' : '\\235', '\236' : '\\236', '\237' : '\\237', - '\240' : '\\240', '\241' : '\\241', '\242' : '\\242', - '\243' : '\\243', '\244' : '\\244', '\245' : '\\245', - '\246' : '\\246', '\247' : '\\247', '\250' : '\\250', - '\251' : '\\251', '\252' : '\\252', '\253' : '\\253', - '\254' : '\\254', '\255' : '\\255', '\256' : '\\256', - '\257' : '\\257', '\260' : '\\260', '\261' : '\\261', - '\262' : '\\262', '\263' : '\\263', '\264' : '\\264', - '\265' : '\\265', '\266' : '\\266', '\267' : '\\267', - '\270' : '\\270', '\271' : '\\271', '\272' : '\\272', - '\273' : '\\273', '\274' : '\\274', '\275' : '\\275', - '\276' : '\\276', '\277' : '\\277', '\300' : '\\300', - '\301' : '\\301', '\302' : '\\302', '\303' : '\\303', - '\304' : '\\304', '\305' : '\\305', '\306' : '\\306', - '\307' : '\\307', '\310' : '\\310', '\311' : '\\311', - '\312' : '\\312', '\313' : '\\313', '\314' : '\\314', - '\315' : '\\315', '\316' : '\\316', '\317' : '\\317', - '\320' : '\\320', '\321' : '\\321', '\322' : '\\322', - '\323' : '\\323', '\324' : '\\324', '\325' : '\\325', - '\326' : '\\326', '\327' : '\\327', '\330' : '\\330', - '\331' : '\\331', '\332' : '\\332', '\333' : '\\333', - '\334' : '\\334', '\335' : '\\335', '\336' : '\\336', - '\337' : '\\337', '\340' : '\\340', '\341' : '\\341', - '\342' : '\\342', '\343' : '\\343', '\344' : '\\344', - '\345' : '\\345', '\346' : '\\346', '\347' : '\\347', - '\350' : '\\350', '\351' : '\\351', '\352' : '\\352', - '\353' : '\\353', '\354' : '\\354', '\355' : '\\355', - '\356' : '\\356', '\357' : '\\357', '\360' : '\\360', - '\361' : '\\361', '\362' : '\\362', '\363' : '\\363', - '\364' : '\\364', '\365' : '\\365', '\366' : '\\366', - '\367' : '\\367', '\370' : '\\370', '\371' : '\\371', - '\372' : '\\372', '\373' : '\\373', '\374' : '\\374', - '\375' : '\\375', '\376' : '\\376', '\377' : '\\377' - } - -def _quote(str, LegalChars=_LegalChars): - r"""Quote a string for use in a cookie header. - - If the string does not need to be double-quoted, then just return the - string. Otherwise, surround the string in doublequotes and quote - (with a \) special characters. - """ - if all(c in LegalChars for c in str): - return str - else: - return '"' + _nulljoin(_Translator.get(s, s) for s in str) + '"' - - -_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") -_QuotePatt = re.compile(r"[\\].") - -def _unquote(mystr): - # If there aren't any doublequotes, - # then there can't be any special characters. See RFC 2109. - if len(mystr) < 2: - return mystr - if mystr[0] != '"' or mystr[-1] != '"': - return mystr - - # We have to assume that we must decode this string. - # Down to work. - - # Remove the "s - mystr = mystr[1:-1] - - # Check for special sequences. Examples: - # \012 --> \n - # \" --> " - # - i = 0 - n = len(mystr) - res = [] - while 0 <= i < n: - o_match = _OctalPatt.search(mystr, i) - q_match = _QuotePatt.search(mystr, i) - if not o_match and not q_match: # Neither matched - res.append(mystr[i:]) - break - # else: - j = k = -1 - if o_match: - j = o_match.start(0) - if q_match: - k = q_match.start(0) - if q_match and (not o_match or k < j): # QuotePatt matched - res.append(mystr[i:k]) - res.append(mystr[k+1]) - i = k + 2 - else: # OctalPatt matched - res.append(mystr[i:j]) - res.append(chr(int(mystr[j+1:j+4], 8))) - i = j + 4 - return _nulljoin(res) - -# The _getdate() routine is used to set the expiration time in the cookie's HTTP -# header. By default, _getdate() returns the current time in the appropriate -# "expires" format for a Set-Cookie header. The one optional argument is an -# offset from now, in seconds. For example, an offset of -3600 means "one hour -# ago". The offset may be a floating point number. -# - -_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] - -_monthname = [None, - 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', - 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] - -def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname): - from time import gmtime, time - now = time() - year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future) - return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % \ - (weekdayname[wd], day, monthname[month], year, hh, mm, ss) - - -class Morsel(dict): - """A class to hold ONE (key, value) pair. - - In a cookie, each such pair may have several attributes, so this class is - used to keep the attributes associated with the appropriate key,value pair. - This class also includes a coded_value attribute, which is used to hold - the network representation of the value. This is most useful when Python - objects are pickled for network transit. - """ - # RFC 2109 lists these attributes as reserved: - # path comment domain - # max-age secure version - # - # For historical reasons, these attributes are also reserved: - # expires - # - # This is an extension from Microsoft: - # httponly - # - # This dictionary provides a mapping from the lowercase - # variant on the left to the appropriate traditional - # formatting on the right. - _reserved = { - "expires" : "expires", - "path" : "Path", - "comment" : "Comment", - "domain" : "Domain", - "max-age" : "Max-Age", - "secure" : "secure", - "httponly" : "httponly", - "version" : "Version", - } - - _flags = set(['secure', 'httponly']) - - def __init__(self): - # Set defaults - self.key = self.value = self.coded_value = None - - # Set default attributes - for key in self._reserved: - dict.__setitem__(self, key, "") - - def __setitem__(self, K, V): - K = K.lower() - if not K in self._reserved: - raise CookieError("Invalid Attribute %s" % K) - dict.__setitem__(self, K, V) - - def isReservedKey(self, K): - return K.lower() in self._reserved - - def set(self, key, val, coded_val, LegalChars=_LegalChars): - # First we verify that the key isn't a reserved word - # Second we make sure it only contains legal characters - if key.lower() in self._reserved: - raise CookieError("Attempt to set a reserved key: %s" % key) - if any(c not in LegalChars for c in key): - raise CookieError("Illegal key value: %s" % key) - - # It's a good key, so save it. - self.key = key - self.value = val - self.coded_value = coded_val - - def output(self, attrs=None, header="Set-Cookie:"): - return "%s %s" % (header, self.OutputString(attrs)) - - __str__ = output - - @as_native_str() - def __repr__(self): - if PY2 and isinstance(self.value, unicode): - val = str(self.value) # make it a newstr to remove the u prefix - else: - val = self.value - return '<%s: %s=%s>' % (self.__class__.__name__, - str(self.key), repr(val)) - - def js_output(self, attrs=None): - # Print javascript - return """ - - """ % (self.OutputString(attrs).replace('"', r'\"')) - - def OutputString(self, attrs=None): - # Build up our result - # - result = [] - append = result.append - - # First, the key=value pair - append("%s=%s" % (self.key, self.coded_value)) - - # Now add any defined attributes - if attrs is None: - attrs = self._reserved - items = sorted(self.items()) - for key, value in items: - if value == "": - continue - if key not in attrs: - continue - if key == "expires" and isinstance(value, int): - append("%s=%s" % (self._reserved[key], _getdate(value))) - elif key == "max-age" and isinstance(value, int): - append("%s=%d" % (self._reserved[key], value)) - elif key == "secure": - append(str(self._reserved[key])) - elif key == "httponly": - append(str(self._reserved[key])) - else: - append("%s=%s" % (self._reserved[key], value)) - - # Return the result - return _semispacejoin(result) - - -# -# Pattern for finding cookie -# -# This used to be strict parsing based on the RFC2109 and RFC2068 -# specifications. I have since discovered that MSIE 3.0x doesn't -# follow the character rules outlined in those specs. As a -# result, the parsing rules here are less strict. -# - -_LegalCharsPatt = r"[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]" -_CookiePattern = re.compile(r""" - (?x) # This is a verbose pattern - (?P # Start of group 'key' - """ + _LegalCharsPatt + r"""+? # Any word of at least one letter - ) # End of group 'key' - ( # Optional group: there may not be a value. - \s*=\s* # Equal Sign - (?P # Start of group 'val' - "(?:[^\\"]|\\.)*" # Any doublequoted string - | # or - \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr - | # or - """ + _LegalCharsPatt + r"""* # Any word or empty string - ) # End of group 'val' - )? # End of optional value group - \s* # Any number of spaces. - (\s+|;|$) # Ending either at space, semicolon, or EOS. - """, re.ASCII) # May be removed if safe. - - -# At long last, here is the cookie class. Using this class is almost just like -# using a dictionary. See this module's docstring for example usage. -# -class BaseCookie(dict): - """A container class for a set of Morsels.""" - - def value_decode(self, val): - """real_value, coded_value = value_decode(STRING) - Called prior to setting a cookie's value from the network - representation. The VALUE is the value read from HTTP - header. - Override this function to modify the behavior of cookies. - """ - return val, val - - def value_encode(self, val): - """real_value, coded_value = value_encode(VALUE) - Called prior to setting a cookie's value from the dictionary - representation. The VALUE is the value being assigned. - Override this function to modify the behavior of cookies. - """ - strval = str(val) - return strval, strval - - def __init__(self, input=None): - if input: - self.load(input) - - def __set(self, key, real_value, coded_value): - """Private method for setting a cookie's value""" - M = self.get(key, Morsel()) - M.set(key, real_value, coded_value) - dict.__setitem__(self, key, M) - - def __setitem__(self, key, value): - """Dictionary style assignment.""" - rval, cval = self.value_encode(value) - self.__set(key, rval, cval) - - def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"): - """Return a string suitable for HTTP.""" - result = [] - items = sorted(self.items()) - for key, value in items: - result.append(value.output(attrs, header)) - return sep.join(result) - - __str__ = output - - @as_native_str() - def __repr__(self): - l = [] - items = sorted(self.items()) - for key, value in items: - if PY2 and isinstance(value.value, unicode): - val = str(value.value) # make it a newstr to remove the u prefix - else: - val = value.value - l.append('%s=%s' % (str(key), repr(val))) - return '<%s: %s>' % (self.__class__.__name__, _spacejoin(l)) - - def js_output(self, attrs=None): - """Return a string suitable for JavaScript.""" - result = [] - items = sorted(self.items()) - for key, value in items: - result.append(value.js_output(attrs)) - return _nulljoin(result) - - def load(self, rawdata): - """Load cookies from a string (presumably HTTP_COOKIE) or - from a dictionary. Loading cookies from a dictionary 'd' - is equivalent to calling: - map(Cookie.__setitem__, d.keys(), d.values()) - """ - if isinstance(rawdata, str): - self.__parse_string(rawdata) - else: - # self.update() wouldn't call our custom __setitem__ - for key, value in rawdata.items(): - self[key] = value - return - - def __parse_string(self, mystr, patt=_CookiePattern): - i = 0 # Our starting point - n = len(mystr) # Length of string - M = None # current morsel - - while 0 <= i < n: - # Start looking for a cookie - match = patt.search(mystr, i) - if not match: - # No more cookies - break - - key, value = match.group("key"), match.group("val") - - i = match.end(0) - - # Parse the key, value in case it's metainfo - if key[0] == "$": - # We ignore attributes which pertain to the cookie - # mechanism as a whole. See RFC 2109. - # (Does anyone care?) - if M: - M[key[1:]] = value - elif key.lower() in Morsel._reserved: - if M: - if value is None: - if key.lower() in Morsel._flags: - M[key] = True - else: - M[key] = _unquote(value) - elif value is not None: - rval, cval = self.value_decode(value) - self.__set(key, rval, cval) - M = self[key] - - -class SimpleCookie(BaseCookie): - """ - SimpleCookie supports strings as cookie values. When setting - the value using the dictionary assignment notation, SimpleCookie - calls the builtin str() to convert the value to a string. Values - received from HTTP are kept as strings. - """ - def value_decode(self, val): - return _unquote(val), val - - def value_encode(self, val): - strval = str(val) - return strval, _quote(strval) diff --git a/pype/vendor/future/backports/http/server.py b/pype/vendor/future/backports/http/server.py deleted file mode 100644 index b1c11e0c73..0000000000 --- a/pype/vendor/future/backports/http/server.py +++ /dev/null @@ -1,1226 +0,0 @@ -"""HTTP server classes. - -From Python 3.3 - -Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see -SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, -and CGIHTTPRequestHandler for CGI scripts. - -It does, however, optionally implement HTTP/1.1 persistent connections, -as of version 0.3. - -Notes on CGIHTTPRequestHandler ------------------------------- - -This class implements GET and POST requests to cgi-bin scripts. - -If the os.fork() function is not present (e.g. on Windows), -subprocess.Popen() is used as a fallback, with slightly altered semantics. - -In all cases, the implementation is intentionally naive -- all -requests are executed synchronously. - -SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL --- it may execute arbitrary Python code or external programs. - -Note that status code 200 is sent prior to execution of a CGI script, so -scripts cannot send other status codes such as 302 (redirect). - -XXX To do: - -- log requests even later (to capture byte count) -- log user-agent header and other interesting goodies -- send error log to separate file -""" - -from __future__ import (absolute_import, division, - print_function, unicode_literals) -from future import utils -from future.builtins import * - - -# See also: -# -# HTTP Working Group T. Berners-Lee -# INTERNET-DRAFT R. T. Fielding -# H. Frystyk Nielsen -# Expires September 8, 1995 March 8, 1995 -# -# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt -# -# and -# -# Network Working Group R. Fielding -# Request for Comments: 2616 et al -# Obsoletes: 2068 June 1999 -# Category: Standards Track -# -# URL: http://www.faqs.org/rfcs/rfc2616.html - -# Log files -# --------- -# -# Here's a quote from the NCSA httpd docs about log file format. -# -# | The logfile format is as follows. Each line consists of: -# | -# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb -# | -# | host: Either the DNS name or the IP number of the remote client -# | rfc931: Any information returned by identd for this person, -# | - otherwise. -# | authuser: If user sent a userid for authentication, the user name, -# | - otherwise. -# | DD: Day -# | Mon: Month (calendar name) -# | YYYY: Year -# | hh: hour (24-hour format, the machine's timezone) -# | mm: minutes -# | ss: seconds -# | request: The first line of the HTTP request as sent by the client. -# | ddd: the status code returned by the server, - if not available. -# | bbbb: the total number of bytes sent, -# | *not including the HTTP/1.0 header*, - if not available -# | -# | You can determine the name of the file accessed through request. -# -# (Actually, the latter is only true if you know the server configuration -# at the time the request was made!) - -__version__ = "0.6" - -__all__ = ["HTTPServer", "BaseHTTPRequestHandler"] - -from future.backports import html -from future.backports.http import client as http_client -from future.backports.urllib import parse as urllib_parse -from future.backports import socketserver - -import io -import mimetypes -import os -import posixpath -import select -import shutil -import socket # For gethostbyaddr() -import sys -import time -import copy -import argparse - - -# Default error message template -DEFAULT_ERROR_MESSAGE = """\ - - - - - Error response - - -

Error response

-

Error code: %(code)d

-

Message: %(message)s.

-

Error code explanation: %(code)s - %(explain)s.

- - -""" - -DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8" - -def _quote_html(html): - return html.replace("&", "&").replace("<", "<").replace(">", ">") - -class HTTPServer(socketserver.TCPServer): - - allow_reuse_address = 1 # Seems to make sense in testing environment - - def server_bind(self): - """Override server_bind to store the server name.""" - socketserver.TCPServer.server_bind(self) - host, port = self.socket.getsockname()[:2] - self.server_name = socket.getfqdn(host) - self.server_port = port - - -class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): - - """HTTP request handler base class. - - The following explanation of HTTP serves to guide you through the - code as well as to expose any misunderstandings I may have about - HTTP (so you don't need to read the code to figure out I'm wrong - :-). - - HTTP (HyperText Transfer Protocol) is an extensible protocol on - top of a reliable stream transport (e.g. TCP/IP). The protocol - recognizes three parts to a request: - - 1. One line identifying the request type and path - 2. An optional set of RFC-822-style headers - 3. An optional data part - - The headers and data are separated by a blank line. - - The first line of the request has the form - - - - where is a (case-sensitive) keyword such as GET or POST, - is a string containing path information for the request, - and should be the string "HTTP/1.0" or "HTTP/1.1". - is encoded using the URL encoding scheme (using %xx to signify - the ASCII character with hex code xx). - - The specification specifies that lines are separated by CRLF but - for compatibility with the widest range of clients recommends - servers also handle LF. Similarly, whitespace in the request line - is treated sensibly (allowing multiple spaces between components - and allowing trailing whitespace). - - Similarly, for output, lines ought to be separated by CRLF pairs - but most clients grok LF characters just fine. - - If the first line of the request has the form - - - - (i.e. is left out) then this is assumed to be an HTTP - 0.9 request; this form has no optional headers and data part and - the reply consists of just the data. - - The reply form of the HTTP 1.x protocol again has three parts: - - 1. One line giving the response code - 2. An optional set of RFC-822-style headers - 3. The data - - Again, the headers and data are separated by a blank line. - - The response code line has the form - - - - where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), - is a 3-digit response code indicating success or - failure of the request, and is an optional - human-readable string explaining what the response code means. - - This server parses the request and the headers, and then calls a - function specific to the request type (). Specifically, - a request SPAM will be handled by a method do_SPAM(). If no - such method exists the server sends an error response to the - client. If it exists, it is called with no arguments: - - do_SPAM() - - Note that the request name is case sensitive (i.e. SPAM and spam - are different requests). - - The various request details are stored in instance variables: - - - client_address is the client IP address in the form (host, - port); - - - command, path and version are the broken-down request line; - - - headers is an instance of email.message.Message (or a derived - class) containing the header information; - - - rfile is a file object open for reading positioned at the - start of the optional input data part; - - - wfile is a file object open for writing. - - IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! - - The first thing to be written must be the response line. Then - follow 0 or more header lines, then a blank line, and then the - actual data (if any). The meaning of the header lines depends on - the command executed by the server; in most cases, when data is - returned, there should be at least one header line of the form - - Content-type: / - - where and should be registered MIME types, - e.g. "text/html" or "text/plain". - - """ - - # The Python system version, truncated to its first component. - sys_version = "Python/" + sys.version.split()[0] - - # The server software version. You may want to override this. - # The format is multiple whitespace-separated strings, - # where each string is of the form name[/version]. - server_version = "BaseHTTP/" + __version__ - - error_message_format = DEFAULT_ERROR_MESSAGE - error_content_type = DEFAULT_ERROR_CONTENT_TYPE - - # The default request version. This only affects responses up until - # the point where the request line is parsed, so it mainly decides what - # the client gets back when sending a malformed request line. - # Most web servers default to HTTP 0.9, i.e. don't send a status line. - default_request_version = "HTTP/0.9" - - def parse_request(self): - """Parse a request (internal). - - The request should be stored in self.raw_requestline; the results - are in self.command, self.path, self.request_version and - self.headers. - - Return True for success, False for failure; on failure, an - error is sent back. - - """ - self.command = None # set in case of error on the first line - self.request_version = version = self.default_request_version - self.close_connection = 1 - requestline = str(self.raw_requestline, 'iso-8859-1') - requestline = requestline.rstrip('\r\n') - self.requestline = requestline - words = requestline.split() - if len(words) == 3: - command, path, version = words - if version[:5] != 'HTTP/': - self.send_error(400, "Bad request version (%r)" % version) - return False - try: - base_version_number = version.split('/', 1)[1] - version_number = base_version_number.split(".") - # RFC 2145 section 3.1 says there can be only one "." and - # - major and minor numbers MUST be treated as - # separate integers; - # - HTTP/2.4 is a lower version than HTTP/2.13, which in - # turn is lower than HTTP/12.3; - # - Leading zeros MUST be ignored by recipients. - if len(version_number) != 2: - raise ValueError - version_number = int(version_number[0]), int(version_number[1]) - except (ValueError, IndexError): - self.send_error(400, "Bad request version (%r)" % version) - return False - if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1": - self.close_connection = 0 - if version_number >= (2, 0): - self.send_error(505, - "Invalid HTTP Version (%s)" % base_version_number) - return False - elif len(words) == 2: - command, path = words - self.close_connection = 1 - if command != 'GET': - self.send_error(400, - "Bad HTTP/0.9 request type (%r)" % command) - return False - elif not words: - return False - else: - self.send_error(400, "Bad request syntax (%r)" % requestline) - return False - self.command, self.path, self.request_version = command, path, version - - # Examine the headers and look for a Connection directive. - try: - self.headers = http_client.parse_headers(self.rfile, - _class=self.MessageClass) - except http_client.LineTooLong: - self.send_error(400, "Line too long") - return False - - conntype = self.headers.get('Connection', "") - if conntype.lower() == 'close': - self.close_connection = 1 - elif (conntype.lower() == 'keep-alive' and - self.protocol_version >= "HTTP/1.1"): - self.close_connection = 0 - # Examine the headers and look for an Expect directive - expect = self.headers.get('Expect', "") - if (expect.lower() == "100-continue" and - self.protocol_version >= "HTTP/1.1" and - self.request_version >= "HTTP/1.1"): - if not self.handle_expect_100(): - return False - return True - - def handle_expect_100(self): - """Decide what to do with an "Expect: 100-continue" header. - - If the client is expecting a 100 Continue response, we must - respond with either a 100 Continue or a final response before - waiting for the request body. The default is to always respond - with a 100 Continue. You can behave differently (for example, - reject unauthorized requests) by overriding this method. - - This method should either return True (possibly after sending - a 100 Continue response) or send an error response and return - False. - - """ - self.send_response_only(100) - self.flush_headers() - return True - - def handle_one_request(self): - """Handle a single HTTP request. - - You normally don't need to override this method; see the class - __doc__ string for information on how to handle specific HTTP - commands such as GET and POST. - - """ - try: - self.raw_requestline = self.rfile.readline(65537) - if len(self.raw_requestline) > 65536: - self.requestline = '' - self.request_version = '' - self.command = '' - self.send_error(414) - return - if not self.raw_requestline: - self.close_connection = 1 - return - if not self.parse_request(): - # An error code has been sent, just exit - return - mname = 'do_' + self.command - if not hasattr(self, mname): - self.send_error(501, "Unsupported method (%r)" % self.command) - return - method = getattr(self, mname) - method() - self.wfile.flush() #actually send the response if not already done. - except socket.timeout as e: - #a read or a write timed out. Discard this connection - self.log_error("Request timed out: %r", e) - self.close_connection = 1 - return - - def handle(self): - """Handle multiple requests if necessary.""" - self.close_connection = 1 - - self.handle_one_request() - while not self.close_connection: - self.handle_one_request() - - def send_error(self, code, message=None): - """Send and log an error reply. - - Arguments are the error code, and a detailed message. - The detailed message defaults to the short entry matching the - response code. - - This sends an error response (so it must be called before any - output has been generated), logs the error, and finally sends - a piece of HTML explaining the error to the user. - - """ - - try: - shortmsg, longmsg = self.responses[code] - except KeyError: - shortmsg, longmsg = '???', '???' - if message is None: - message = shortmsg - explain = longmsg - self.log_error("code %d, message %s", code, message) - # using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201) - content = (self.error_message_format % - {'code': code, 'message': _quote_html(message), 'explain': explain}) - self.send_response(code, message) - self.send_header("Content-Type", self.error_content_type) - self.send_header('Connection', 'close') - self.end_headers() - if self.command != 'HEAD' and code >= 200 and code not in (204, 304): - self.wfile.write(content.encode('UTF-8', 'replace')) - - def send_response(self, code, message=None): - """Add the response header to the headers buffer and log the - response code. - - Also send two standard headers with the server software - version and the current date. - - """ - self.log_request(code) - self.send_response_only(code, message) - self.send_header('Server', self.version_string()) - self.send_header('Date', self.date_time_string()) - - def send_response_only(self, code, message=None): - """Send the response header only.""" - if message is None: - if code in self.responses: - message = self.responses[code][0] - else: - message = '' - if self.request_version != 'HTTP/0.9': - if not hasattr(self, '_headers_buffer'): - self._headers_buffer = [] - self._headers_buffer.append(("%s %d %s\r\n" % - (self.protocol_version, code, message)).encode( - 'latin-1', 'strict')) - - def send_header(self, keyword, value): - """Send a MIME header to the headers buffer.""" - if self.request_version != 'HTTP/0.9': - if not hasattr(self, '_headers_buffer'): - self._headers_buffer = [] - self._headers_buffer.append( - ("%s: %s\r\n" % (keyword, value)).encode('latin-1', 'strict')) - - if keyword.lower() == 'connection': - if value.lower() == 'close': - self.close_connection = 1 - elif value.lower() == 'keep-alive': - self.close_connection = 0 - - def end_headers(self): - """Send the blank line ending the MIME headers.""" - if self.request_version != 'HTTP/0.9': - self._headers_buffer.append(b"\r\n") - self.flush_headers() - - def flush_headers(self): - if hasattr(self, '_headers_buffer'): - self.wfile.write(b"".join(self._headers_buffer)) - self._headers_buffer = [] - - def log_request(self, code='-', size='-'): - """Log an accepted request. - - This is called by send_response(). - - """ - - self.log_message('"%s" %s %s', - self.requestline, str(code), str(size)) - - def log_error(self, format, *args): - """Log an error. - - This is called when a request cannot be fulfilled. By - default it passes the message on to log_message(). - - Arguments are the same as for log_message(). - - XXX This should go to the separate error log. - - """ - - self.log_message(format, *args) - - def log_message(self, format, *args): - """Log an arbitrary message. - - This is used by all other logging functions. Override - it if you have specific logging wishes. - - The first argument, FORMAT, is a format string for the - message to be logged. If the format string contains - any % escapes requiring parameters, they should be - specified as subsequent arguments (it's just like - printf!). - - The client ip and current date/time are prefixed to - every message. - - """ - - sys.stderr.write("%s - - [%s] %s\n" % - (self.address_string(), - self.log_date_time_string(), - format%args)) - - def version_string(self): - """Return the server software version string.""" - return self.server_version + ' ' + self.sys_version - - def date_time_string(self, timestamp=None): - """Return the current date and time formatted for a message header.""" - if timestamp is None: - timestamp = time.time() - year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp) - s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( - self.weekdayname[wd], - day, self.monthname[month], year, - hh, mm, ss) - return s - - def log_date_time_string(self): - """Return the current time formatted for logging.""" - now = time.time() - year, month, day, hh, mm, ss, x, y, z = time.localtime(now) - s = "%02d/%3s/%04d %02d:%02d:%02d" % ( - day, self.monthname[month], year, hh, mm, ss) - return s - - weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] - - monthname = [None, - 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', - 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] - - def address_string(self): - """Return the client address.""" - - return self.client_address[0] - - # Essentially static class variables - - # The version of the HTTP protocol we support. - # Set this to HTTP/1.1 to enable automatic keepalive - protocol_version = "HTTP/1.0" - - # MessageClass used to parse headers - MessageClass = http_client.HTTPMessage - - # Table mapping response codes to messages; entries have the - # form {code: (shortmessage, longmessage)}. - # See RFC 2616 and 6585. - responses = { - 100: ('Continue', 'Request received, please continue'), - 101: ('Switching Protocols', - 'Switching to new protocol; obey Upgrade header'), - - 200: ('OK', 'Request fulfilled, document follows'), - 201: ('Created', 'Document created, URL follows'), - 202: ('Accepted', - 'Request accepted, processing continues off-line'), - 203: ('Non-Authoritative Information', 'Request fulfilled from cache'), - 204: ('No Content', 'Request fulfilled, nothing follows'), - 205: ('Reset Content', 'Clear input form for further input.'), - 206: ('Partial Content', 'Partial content follows.'), - - 300: ('Multiple Choices', - 'Object has several resources -- see URI list'), - 301: ('Moved Permanently', 'Object moved permanently -- see URI list'), - 302: ('Found', 'Object moved temporarily -- see URI list'), - 303: ('See Other', 'Object moved -- see Method and URL list'), - 304: ('Not Modified', - 'Document has not changed since given time'), - 305: ('Use Proxy', - 'You must use proxy specified in Location to access this ' - 'resource.'), - 307: ('Temporary Redirect', - 'Object moved temporarily -- see URI list'), - - 400: ('Bad Request', - 'Bad request syntax or unsupported method'), - 401: ('Unauthorized', - 'No permission -- see authorization schemes'), - 402: ('Payment Required', - 'No payment -- see charging schemes'), - 403: ('Forbidden', - 'Request forbidden -- authorization will not help'), - 404: ('Not Found', 'Nothing matches the given URI'), - 405: ('Method Not Allowed', - 'Specified method is invalid for this resource.'), - 406: ('Not Acceptable', 'URI not available in preferred format.'), - 407: ('Proxy Authentication Required', 'You must authenticate with ' - 'this proxy before proceeding.'), - 408: ('Request Timeout', 'Request timed out; try again later.'), - 409: ('Conflict', 'Request conflict.'), - 410: ('Gone', - 'URI no longer exists and has been permanently removed.'), - 411: ('Length Required', 'Client must specify Content-Length.'), - 412: ('Precondition Failed', 'Precondition in headers is false.'), - 413: ('Request Entity Too Large', 'Entity is too large.'), - 414: ('Request-URI Too Long', 'URI is too long.'), - 415: ('Unsupported Media Type', 'Entity body in unsupported format.'), - 416: ('Requested Range Not Satisfiable', - 'Cannot satisfy request range.'), - 417: ('Expectation Failed', - 'Expect condition could not be satisfied.'), - 428: ('Precondition Required', - 'The origin server requires the request to be conditional.'), - 429: ('Too Many Requests', 'The user has sent too many requests ' - 'in a given amount of time ("rate limiting").'), - 431: ('Request Header Fields Too Large', 'The server is unwilling to ' - 'process the request because its header fields are too large.'), - - 500: ('Internal Server Error', 'Server got itself in trouble'), - 501: ('Not Implemented', - 'Server does not support this operation'), - 502: ('Bad Gateway', 'Invalid responses from another server/proxy.'), - 503: ('Service Unavailable', - 'The server cannot process the request due to a high load'), - 504: ('Gateway Timeout', - 'The gateway server did not receive a timely response'), - 505: ('HTTP Version Not Supported', 'Cannot fulfill request.'), - 511: ('Network Authentication Required', - 'The client needs to authenticate to gain network access.'), - } - - -class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): - - """Simple HTTP request handler with GET and HEAD commands. - - This serves files from the current directory and any of its - subdirectories. The MIME type for files is determined by - calling the .guess_type() method. - - The GET and HEAD requests are identical except that the HEAD - request omits the actual contents of the file. - - """ - - server_version = "SimpleHTTP/" + __version__ - - def do_GET(self): - """Serve a GET request.""" - f = self.send_head() - if f: - self.copyfile(f, self.wfile) - f.close() - - def do_HEAD(self): - """Serve a HEAD request.""" - f = self.send_head() - if f: - f.close() - - def send_head(self): - """Common code for GET and HEAD commands. - - This sends the response code and MIME headers. - - Return value is either a file object (which has to be copied - to the outputfile by the caller unless the command was HEAD, - and must be closed by the caller under all circumstances), or - None, in which case the caller has nothing further to do. - - """ - path = self.translate_path(self.path) - f = None - if os.path.isdir(path): - if not self.path.endswith('/'): - # redirect browser - doing basically what apache does - self.send_response(301) - self.send_header("Location", self.path + "/") - self.end_headers() - return None - for index in "index.html", "index.htm": - index = os.path.join(path, index) - if os.path.exists(index): - path = index - break - else: - return self.list_directory(path) - ctype = self.guess_type(path) - try: - f = open(path, 'rb') - except IOError: - self.send_error(404, "File not found") - return None - self.send_response(200) - self.send_header("Content-type", ctype) - fs = os.fstat(f.fileno()) - self.send_header("Content-Length", str(fs[6])) - self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) - self.end_headers() - return f - - def list_directory(self, path): - """Helper to produce a directory listing (absent index.html). - - Return value is either a file object, or None (indicating an - error). In either case, the headers are sent, making the - interface the same as for send_head(). - - """ - try: - list = os.listdir(path) - except os.error: - self.send_error(404, "No permission to list directory") - return None - list.sort(key=lambda a: a.lower()) - r = [] - displaypath = html.escape(urllib_parse.unquote(self.path)) - enc = sys.getfilesystemencoding() - title = 'Directory listing for %s' % displaypath - r.append('') - r.append('\n') - r.append('' % enc) - r.append('%s\n' % title) - r.append('\n

%s

' % title) - r.append('
\n
    ') - for name in list: - fullname = os.path.join(path, name) - displayname = linkname = name - # Append / for directories or @ for symbolic links - if os.path.isdir(fullname): - displayname = name + "/" - linkname = name + "/" - if os.path.islink(fullname): - displayname = name + "@" - # Note: a link to a directory displays with @ and links with / - r.append('
  • %s
  • ' - % (urllib_parse.quote(linkname), html.escape(displayname))) - # # Use this instead: - # r.append('
  • %s
  • ' - # % (urllib.quote(linkname), cgi.escape(displayname))) - r.append('
\n
\n\n\n') - encoded = '\n'.join(r).encode(enc) - f = io.BytesIO() - f.write(encoded) - f.seek(0) - self.send_response(200) - self.send_header("Content-type", "text/html; charset=%s" % enc) - self.send_header("Content-Length", str(len(encoded))) - self.end_headers() - return f - - def translate_path(self, path): - """Translate a /-separated PATH to the local filename syntax. - - Components that mean special things to the local file system - (e.g. drive or directory names) are ignored. (XXX They should - probably be diagnosed.) - - """ - # abandon query parameters - path = path.split('?',1)[0] - path = path.split('#',1)[0] - path = posixpath.normpath(urllib_parse.unquote(path)) - words = path.split('/') - words = filter(None, words) - path = os.getcwd() - for word in words: - drive, word = os.path.splitdrive(word) - head, word = os.path.split(word) - if word in (os.curdir, os.pardir): continue - path = os.path.join(path, word) - return path - - def copyfile(self, source, outputfile): - """Copy all data between two file objects. - - The SOURCE argument is a file object open for reading - (or anything with a read() method) and the DESTINATION - argument is a file object open for writing (or - anything with a write() method). - - The only reason for overriding this would be to change - the block size or perhaps to replace newlines by CRLF - -- note however that this the default server uses this - to copy binary data as well. - - """ - shutil.copyfileobj(source, outputfile) - - def guess_type(self, path): - """Guess the type of a file. - - Argument is a PATH (a filename). - - Return value is a string of the form type/subtype, - usable for a MIME Content-type header. - - The default implementation looks the file's extension - up in the table self.extensions_map, using application/octet-stream - as a default; however it would be permissible (if - slow) to look inside the data to make a better guess. - - """ - - base, ext = posixpath.splitext(path) - if ext in self.extensions_map: - return self.extensions_map[ext] - ext = ext.lower() - if ext in self.extensions_map: - return self.extensions_map[ext] - else: - return self.extensions_map[''] - - if not mimetypes.inited: - mimetypes.init() # try to read system mime.types - extensions_map = mimetypes.types_map.copy() - extensions_map.update({ - '': 'application/octet-stream', # Default - '.py': 'text/plain', - '.c': 'text/plain', - '.h': 'text/plain', - }) - - -# Utilities for CGIHTTPRequestHandler - -def _url_collapse_path(path): - """ - Given a URL path, remove extra '/'s and '.' path elements and collapse - any '..' references and returns a colllapsed path. - - Implements something akin to RFC-2396 5.2 step 6 to parse relative paths. - The utility of this function is limited to is_cgi method and helps - preventing some security attacks. - - Returns: A tuple of (head, tail) where tail is everything after the final / - and head is everything before it. Head will always start with a '/' and, - if it contains anything else, never have a trailing '/'. - - Raises: IndexError if too many '..' occur within the path. - - """ - # Similar to os.path.split(os.path.normpath(path)) but specific to URL - # path semantics rather than local operating system semantics. - path_parts = path.split('/') - head_parts = [] - for part in path_parts[:-1]: - if part == '..': - head_parts.pop() # IndexError if more '..' than prior parts - elif part and part != '.': - head_parts.append( part ) - if path_parts: - tail_part = path_parts.pop() - if tail_part: - if tail_part == '..': - head_parts.pop() - tail_part = '' - elif tail_part == '.': - tail_part = '' - else: - tail_part = '' - - splitpath = ('/' + '/'.join(head_parts), tail_part) - collapsed_path = "/".join(splitpath) - - return collapsed_path - - - -nobody = None - -def nobody_uid(): - """Internal routine to get nobody's uid""" - global nobody - if nobody: - return nobody - try: - import pwd - except ImportError: - return -1 - try: - nobody = pwd.getpwnam('nobody')[2] - except KeyError: - nobody = 1 + max(x[2] for x in pwd.getpwall()) - return nobody - - -def executable(path): - """Test for executable file.""" - return os.access(path, os.X_OK) - - -class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): - - """Complete HTTP server with GET, HEAD and POST commands. - - GET and HEAD also support running CGI scripts. - - The POST command is *only* implemented for CGI scripts. - - """ - - # Determine platform specifics - have_fork = hasattr(os, 'fork') - - # Make rfile unbuffered -- we need to read one line and then pass - # the rest to a subprocess, so we can't use buffered input. - rbufsize = 0 - - def do_POST(self): - """Serve a POST request. - - This is only implemented for CGI scripts. - - """ - - if self.is_cgi(): - self.run_cgi() - else: - self.send_error(501, "Can only POST to CGI scripts") - - def send_head(self): - """Version of send_head that support CGI scripts""" - if self.is_cgi(): - return self.run_cgi() - else: - return SimpleHTTPRequestHandler.send_head(self) - - def is_cgi(self): - """Test whether self.path corresponds to a CGI script. - - Returns True and updates the cgi_info attribute to the tuple - (dir, rest) if self.path requires running a CGI script. - Returns False otherwise. - - If any exception is raised, the caller should assume that - self.path was rejected as invalid and act accordingly. - - The default implementation tests whether the normalized url - path begins with one of the strings in self.cgi_directories - (and the next character is a '/' or the end of the string). - - """ - collapsed_path = _url_collapse_path(self.path) - dir_sep = collapsed_path.find('/', 1) - head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:] - if head in self.cgi_directories: - self.cgi_info = head, tail - return True - return False - - - cgi_directories = ['/cgi-bin', '/htbin'] - - def is_executable(self, path): - """Test whether argument path is an executable file.""" - return executable(path) - - def is_python(self, path): - """Test whether argument path is a Python script.""" - head, tail = os.path.splitext(path) - return tail.lower() in (".py", ".pyw") - - def run_cgi(self): - """Execute a CGI script.""" - path = self.path - dir, rest = self.cgi_info - - i = path.find('/', len(dir) + 1) - while i >= 0: - nextdir = path[:i] - nextrest = path[i+1:] - - scriptdir = self.translate_path(nextdir) - if os.path.isdir(scriptdir): - dir, rest = nextdir, nextrest - i = path.find('/', len(dir) + 1) - else: - break - - # find an explicit query string, if present. - i = rest.rfind('?') - if i >= 0: - rest, query = rest[:i], rest[i+1:] - else: - query = '' - - # dissect the part after the directory name into a script name & - # a possible additional path, to be stored in PATH_INFO. - i = rest.find('/') - if i >= 0: - script, rest = rest[:i], rest[i:] - else: - script, rest = rest, '' - - scriptname = dir + '/' + script - scriptfile = self.translate_path(scriptname) - if not os.path.exists(scriptfile): - self.send_error(404, "No such CGI script (%r)" % scriptname) - return - if not os.path.isfile(scriptfile): - self.send_error(403, "CGI script is not a plain file (%r)" % - scriptname) - return - ispy = self.is_python(scriptname) - if self.have_fork or not ispy: - if not self.is_executable(scriptfile): - self.send_error(403, "CGI script is not executable (%r)" % - scriptname) - return - - # Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html - # XXX Much of the following could be prepared ahead of time! - env = copy.deepcopy(os.environ) - env['SERVER_SOFTWARE'] = self.version_string() - env['SERVER_NAME'] = self.server.server_name - env['GATEWAY_INTERFACE'] = 'CGI/1.1' - env['SERVER_PROTOCOL'] = self.protocol_version - env['SERVER_PORT'] = str(self.server.server_port) - env['REQUEST_METHOD'] = self.command - uqrest = urllib_parse.unquote(rest) - env['PATH_INFO'] = uqrest - env['PATH_TRANSLATED'] = self.translate_path(uqrest) - env['SCRIPT_NAME'] = scriptname - if query: - env['QUERY_STRING'] = query - env['REMOTE_ADDR'] = self.client_address[0] - authorization = self.headers.get("authorization") - if authorization: - authorization = authorization.split() - if len(authorization) == 2: - import base64, binascii - env['AUTH_TYPE'] = authorization[0] - if authorization[0].lower() == "basic": - try: - authorization = authorization[1].encode('ascii') - if utils.PY3: - # In Py3.3, was: - authorization = base64.decodebytes(authorization).\ - decode('ascii') - else: - # Backport to Py2.7: - authorization = base64.decodestring(authorization).\ - decode('ascii') - except (binascii.Error, UnicodeError): - pass - else: - authorization = authorization.split(':') - if len(authorization) == 2: - env['REMOTE_USER'] = authorization[0] - # XXX REMOTE_IDENT - if self.headers.get('content-type') is None: - env['CONTENT_TYPE'] = self.headers.get_content_type() - else: - env['CONTENT_TYPE'] = self.headers['content-type'] - length = self.headers.get('content-length') - if length: - env['CONTENT_LENGTH'] = length - referer = self.headers.get('referer') - if referer: - env['HTTP_REFERER'] = referer - accept = [] - for line in self.headers.getallmatchingheaders('accept'): - if line[:1] in "\t\n\r ": - accept.append(line.strip()) - else: - accept = accept + line[7:].split(',') - env['HTTP_ACCEPT'] = ','.join(accept) - ua = self.headers.get('user-agent') - if ua: - env['HTTP_USER_AGENT'] = ua - co = filter(None, self.headers.get_all('cookie', [])) - cookie_str = ', '.join(co) - if cookie_str: - env['HTTP_COOKIE'] = cookie_str - # XXX Other HTTP_* headers - # Since we're setting the env in the parent, provide empty - # values to override previously set values - for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH', - 'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'): - env.setdefault(k, "") - - self.send_response(200, "Script output follows") - self.flush_headers() - - decoded_query = query.replace('+', ' ') - - if self.have_fork: - # Unix -- fork as we should - args = [script] - if '=' not in decoded_query: - args.append(decoded_query) - nobody = nobody_uid() - self.wfile.flush() # Always flush before forking - pid = os.fork() - if pid != 0: - # Parent - pid, sts = os.waitpid(pid, 0) - # throw away additional data [see bug #427345] - while select.select([self.rfile], [], [], 0)[0]: - if not self.rfile.read(1): - break - if sts: - self.log_error("CGI script exit status %#x", sts) - return - # Child - try: - try: - os.setuid(nobody) - except os.error: - pass - os.dup2(self.rfile.fileno(), 0) - os.dup2(self.wfile.fileno(), 1) - os.execve(scriptfile, args, env) - except: - self.server.handle_error(self.request, self.client_address) - os._exit(127) - - else: - # Non-Unix -- use subprocess - import subprocess - cmdline = [scriptfile] - if self.is_python(scriptfile): - interp = sys.executable - if interp.lower().endswith("w.exe"): - # On Windows, use python.exe, not pythonw.exe - interp = interp[:-5] + interp[-4:] - cmdline = [interp, '-u'] + cmdline - if '=' not in query: - cmdline.append(query) - self.log_message("command: %s", subprocess.list2cmdline(cmdline)) - try: - nbytes = int(length) - except (TypeError, ValueError): - nbytes = 0 - p = subprocess.Popen(cmdline, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env = env - ) - if self.command.lower() == "post" and nbytes > 0: - data = self.rfile.read(nbytes) - else: - data = None - # throw away additional data [see bug #427345] - while select.select([self.rfile._sock], [], [], 0)[0]: - if not self.rfile._sock.recv(1): - break - stdout, stderr = p.communicate(data) - self.wfile.write(stdout) - if stderr: - self.log_error('%s', stderr) - p.stderr.close() - p.stdout.close() - status = p.returncode - if status: - self.log_error("CGI script exit status %#x", status) - else: - self.log_message("CGI script exited OK") - - -def test(HandlerClass = BaseHTTPRequestHandler, - ServerClass = HTTPServer, protocol="HTTP/1.0", port=8000): - """Test the HTTP request handler class. - - This runs an HTTP server on port 8000 (or the first command line - argument). - - """ - server_address = ('', port) - - HandlerClass.protocol_version = protocol - httpd = ServerClass(server_address, HandlerClass) - - sa = httpd.socket.getsockname() - print("Serving HTTP on", sa[0], "port", sa[1], "...") - try: - httpd.serve_forever() - except KeyboardInterrupt: - print("\nKeyboard interrupt received, exiting.") - httpd.server_close() - sys.exit(0) - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--cgi', action='store_true', - help='Run as CGI Server') - parser.add_argument('port', action='store', - default=8000, type=int, - nargs='?', - help='Specify alternate port [default: 8000]') - args = parser.parse_args() - if args.cgi: - test(HandlerClass=CGIHTTPRequestHandler, port=args.port) - else: - test(HandlerClass=SimpleHTTPRequestHandler, port=args.port) diff --git a/pype/vendor/future/backports/misc.py b/pype/vendor/future/backports/misc.py deleted file mode 100644 index 31e713ae38..0000000000 --- a/pype/vendor/future/backports/misc.py +++ /dev/null @@ -1,940 +0,0 @@ -""" -Miscellaneous function (re)definitions from the Py3.4+ standard library -for Python 2.6/2.7. - -- math.ceil (for Python 2.7) -- collections.OrderedDict (for Python 2.6) -- collections.Counter (for Python 2.6) -- collections.ChainMap (for all versions prior to Python 3.3) -- itertools.count (for Python 2.6, with step parameter) -- subprocess.check_output (for Python 2.6) -- reprlib.recursive_repr (for Python 2.6+) -- functools.cmp_to_key (for Python 2.6) -""" - -from __future__ import absolute_import - -import subprocess -from math import ceil as oldceil -from collections import Mapping, MutableMapping - -from operator import itemgetter as _itemgetter, eq as _eq -import sys -import heapq as _heapq -from _weakref import proxy as _proxy -from itertools import repeat as _repeat, chain as _chain, starmap as _starmap -from socket import getaddrinfo, SOCK_STREAM, error, socket - -from future.utils import iteritems, itervalues, PY26, PY3 - - -def ceil(x): - """ - Return the ceiling of x as an int. - This is the smallest integral value >= x. - """ - return int(oldceil(x)) - - -######################################################################## -### reprlib.recursive_repr decorator from Py3.4 -######################################################################## - -from itertools import islice - -if PY3: - try: - from _thread import get_ident - except ImportError: - from _dummy_thread import get_ident -else: - try: - from thread import get_ident - except ImportError: - from dummy_thread import get_ident - - -def recursive_repr(fillvalue='...'): - 'Decorator to make a repr function return fillvalue for a recursive call' - - def decorating_function(user_function): - repr_running = set() - - def wrapper(self): - key = id(self), get_ident() - if key in repr_running: - return fillvalue - repr_running.add(key) - try: - result = user_function(self) - finally: - repr_running.discard(key) - return result - - # Can't use functools.wraps() here because of bootstrap issues - wrapper.__module__ = getattr(user_function, '__module__') - wrapper.__doc__ = getattr(user_function, '__doc__') - wrapper.__name__ = getattr(user_function, '__name__') - wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) - return wrapper - - return decorating_function - - -################################################################################ -### OrderedDict -################################################################################ - -class _Link(object): - __slots__ = 'prev', 'next', 'key', '__weakref__' - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as regular dictionaries. - - # The internal self.__map dict maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # The sentinel is in self.__hardroot with a weakref proxy in self.__root. - # The prev links are weakref proxies (to prevent circular references). - # Individual links are kept alive by the hard reference in self.__map. - # Those hard references disappear when a key is deleted from an OrderedDict. - - def __init__(*args, **kwds): - '''Initialize an ordered dictionary. The signature is the same as - regular dictionaries, but keyword arguments are not recommended because - their insertion order is arbitrary. - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'OrderedDict' object " - "needs an argument") - self = args[0] - args = args[1:] - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__hardroot = _Link() - self.__root = root = _proxy(self.__hardroot) - root.prev = root.next = root - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, - dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link at the end of the linked list, - # and the inherited dictionary is updated with the new key/value pair. - if key not in self: - self.__map[key] = link = Link() - root = self.__root - last = root.prev - link.prev, link.next, link.key = last, root, key - last.next = link - root.prev = proxy(link) - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which gets - # removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link = self.__map.pop(key) - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - # Traverse the linked list in order. - root = self.__root - curr = root.next - while curr is not root: - yield curr.key - curr = curr.next - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - # Traverse the linked list in reverse order. - root = self.__root - curr = root.prev - while curr is not root: - yield curr.key - curr = curr.prev - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - root = self.__root - root.prev = root.next = root - self.__map.clear() - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root.prev - link_prev = link.prev - link_prev.next = root - root.prev = link_prev - else: - link = root.next - link_next = link.next - root.next = link_next - link_next.prev = root - key = link.key - del self.__map[key] - value = dict.pop(self, key) - return key, value - - def move_to_end(self, key, last=True): - '''Move an existing element to the end (or beginning if last==False). - - Raises KeyError if the element does not exist. - When last=True, acts like a fast version of self[key]=self.pop(key). - - ''' - link = self.__map[key] - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - root = self.__root - if last: - last = root.prev - link.prev = last - link.next = root - last.next = root.prev = link - else: - first = root.next - link.prev = root - link.next = first - root.next = first.prev = link - - def __sizeof__(self): - sizeof = sys.getsizeof - n = len(self) + 1 # number of links including root - size = sizeof(self.__dict__) # instance dictionary - size += sizeof(self.__map) * 2 # internal dict and inherited dict - size += sizeof(self.__hardroot) * n # link objects - size += sizeof(self.__root) * n # proxy objects - return size - - update = __update = MutableMapping.update - keys = MutableMapping.keys - values = MutableMapping.values - items = MutableMapping.items - __ne__ = MutableMapping.__ne__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding - value. If key is not found, d is returned if given, otherwise KeyError - is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - @recursive_repr() - def __repr__(self): - 'od.__repr__() <==> repr(od)' - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, list(self.items())) - - def __reduce__(self): - 'Return state information for pickling' - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - return self.__class__, (), inst_dict or None, None, iter(self.items()) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. - If not specified, the value defaults to None. - - ''' - self = cls() - for key in iterable: - self[key] = value - return self - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return dict.__eq__(self, other) and all(map(_eq, self, other)) - return dict.__eq__(self, other) - - -# {{{ http://code.activestate.com/recipes/576611/ (r11) - -try: - from operator import itemgetter - from heapq import nlargest -except ImportError: - pass - -######################################################################## -### Counter -######################################################################## - -def _count_elements(mapping, iterable): - 'Tally elements from the iterable.' - mapping_get = mapping.get - for elem in iterable: - mapping[elem] = mapping_get(elem, 0) + 1 - -class Counter(dict): - '''Dict subclass for counting hashable items. Sometimes called a bag - or multiset. Elements are stored as dictionary keys and their counts - are stored as dictionary values. - - >>> c = Counter('abcdeabcdabcaba') # count elements from a string - - >>> c.most_common(3) # three most common elements - [('a', 5), ('b', 4), ('c', 3)] - >>> sorted(c) # list all unique elements - ['a', 'b', 'c', 'd', 'e'] - >>> ''.join(sorted(c.elements())) # list elements with repetitions - 'aaaaabbbbcccdde' - >>> sum(c.values()) # total of all counts - 15 - - >>> c['a'] # count of letter 'a' - 5 - >>> for elem in 'shazam': # update counts from an iterable - ... c[elem] += 1 # by adding 1 to each element's count - >>> c['a'] # now there are seven 'a' - 7 - >>> del c['b'] # remove all 'b' - >>> c['b'] # now there are zero 'b' - 0 - - >>> d = Counter('simsalabim') # make another counter - >>> c.update(d) # add in the second counter - >>> c['a'] # now there are nine 'a' - 9 - - >>> c.clear() # empty the counter - >>> c - Counter() - - Note: If a count is set to zero or reduced to zero, it will remain - in the counter until the entry is deleted or the counter is cleared: - - >>> c = Counter('aaabbc') - >>> c['b'] -= 2 # reduce the count of 'b' by two - >>> c.most_common() # 'b' is still in, but its count is zero - [('a', 3), ('c', 1), ('b', 0)] - - ''' - # References: - # http://en.wikipedia.org/wiki/Multiset - # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html - # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm - # http://code.activestate.com/recipes/259174/ - # Knuth, TAOCP Vol. II section 4.6.3 - - def __init__(*args, **kwds): - '''Create a new, empty Counter object. And if given, count elements - from an input iterable. Or, initialize the count from another mapping - of elements to their counts. - - >>> c = Counter() # a new, empty counter - >>> c = Counter('gallahad') # a new counter from an iterable - >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping - >>> c = Counter(a=4, b=2) # a new counter from keyword args - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'Counter' object " - "needs an argument") - self = args[0] - args = args[1:] - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - super(Counter, self).__init__() - self.update(*args, **kwds) - - def __missing__(self, key): - 'The count of elements not in the Counter is zero.' - # Needed so that self[missing_item] does not raise KeyError - return 0 - - def most_common(self, n=None): - '''List the n most common elements and their counts from the most - common to the least. If n is None, then list all element counts. - - >>> Counter('abcdeabcdabcaba').most_common(3) - [('a', 5), ('b', 4), ('c', 3)] - - ''' - # Emulate Bag.sortedByCount from Smalltalk - if n is None: - return sorted(self.items(), key=_itemgetter(1), reverse=True) - return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) - - def elements(self): - '''Iterator over elements repeating each as many times as its count. - - >>> c = Counter('ABCABC') - >>> sorted(c.elements()) - ['A', 'A', 'B', 'B', 'C', 'C'] - - # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 - >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) - >>> product = 1 - >>> for factor in prime_factors.elements(): # loop over factors - ... product *= factor # and multiply them - >>> product - 1836 - - Note, if an element's count has been set to zero or is a negative - number, elements() will ignore it. - - ''' - # Emulate Bag.do from Smalltalk and Multiset.begin from C++. - return _chain.from_iterable(_starmap(_repeat, self.items())) - - # Override dict methods where necessary - - @classmethod - def fromkeys(cls, iterable, v=None): - # There is no equivalent method for counters because setting v=1 - # means that no element can have a count greater than one. - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') - - def update(*args, **kwds): - '''Like dict.update() but add counts instead of replacing them. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.update('witch') # add elements from another iterable - >>> d = Counter('watch') - >>> c.update(d) # add elements from another counter - >>> c['h'] # four 'h' in which, witch, and watch - 4 - - ''' - # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts - # being mixed-in with all of the other counts for a mismash that - # doesn't have a straight-forward interpretation in most counting - # contexts. Instead, we implement straight-addition. Both the inputs - # and outputs are allowed to contain zero and negative counts. - - if not args: - raise TypeError("descriptor 'update' of 'Counter' object " - "needs an argument") - self = args[0] - args = args[1:] - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - if isinstance(iterable, Mapping): - if self: - self_get = self.get - for elem, count in iterable.items(): - self[elem] = count + self_get(elem, 0) - else: - super(Counter, self).update(iterable) # fast path when counter is empty - else: - _count_elements(self, iterable) - if kwds: - self.update(kwds) - - def subtract(*args, **kwds): - '''Like dict.update() but subtracts counts instead of replacing them. - Counts can be reduced below zero. Both the inputs and outputs are - allowed to contain zero and negative counts. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.subtract('witch') # subtract elements from another iterable - >>> c.subtract(Counter('watch')) # subtract elements from another counter - >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch - 0 - >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch - -1 - - ''' - if not args: - raise TypeError("descriptor 'subtract' of 'Counter' object " - "needs an argument") - self = args[0] - args = args[1:] - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - self_get = self.get - if isinstance(iterable, Mapping): - for elem, count in iterable.items(): - self[elem] = self_get(elem, 0) - count - else: - for elem in iterable: - self[elem] = self_get(elem, 0) - 1 - if kwds: - self.subtract(kwds) - - def copy(self): - 'Return a shallow copy.' - return self.__class__(self) - - def __reduce__(self): - return self.__class__, (dict(self),) - - def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' - if elem in self: - super(Counter, self).__delitem__(elem) - - def __repr__(self): - if not self: - return '%s()' % self.__class__.__name__ - try: - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) - except TypeError: - # handle case where values are not orderable - return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) - - # Multiset-style mathematical operations discussed in: - # Knuth TAOCP Volume II section 4.6.3 exercise 19 - # and at http://en.wikipedia.org/wiki/Multiset - # - # Outputs guaranteed to only include positive counts. - # - # To strip negative and zero counts, add-in an empty counter: - # c += Counter() - - def __add__(self, other): - '''Add counts from two counters. - - >>> Counter('abbb') + Counter('bcc') - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count + other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. - - >>> Counter('abbbc') - Counter('bccd') - Counter({'b': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count - other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count < 0: - result[elem] = 0 - count - return result - - def __or__(self, other): - '''Union is the maximum of value in either of the input counters. - - >>> Counter('abbb') | Counter('bcc') - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = other_count if count < other_count else count - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. - - >>> Counter('abbb') & Counter('bcc') - Counter({'b': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = count if count < other_count else other_count - if newcount > 0: - result[elem] = newcount - return result - - def __pos__(self): - 'Adds an empty counter, effectively stripping negative and zero counts' - return self + Counter() - - def __neg__(self): - '''Subtracts from an empty counter. Strips positive and zero counts, - and flips the sign on negative counts. - - ''' - return Counter() - self - - def _keep_positive(self): - '''Internal method to strip elements with a negative or zero count''' - nonpositive = [elem for elem, count in self.items() if not count > 0] - for elem in nonpositive: - del self[elem] - return self - - def __iadd__(self, other): - '''Inplace add from another counter, keeping only positive counts. - - >>> c = Counter('abbb') - >>> c += Counter('bcc') - >>> c - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] += count - return self._keep_positive() - - def __isub__(self, other): - '''Inplace subtract counter, but keep only results with positive counts. - - >>> c = Counter('abbbc') - >>> c -= Counter('bccd') - >>> c - Counter({'b': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] -= count - return self._keep_positive() - - def __ior__(self, other): - '''Inplace union is the maximum of value from either counter. - - >>> c = Counter('abbb') - >>> c |= Counter('bcc') - >>> c - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - for elem, other_count in other.items(): - count = self[elem] - if other_count > count: - self[elem] = other_count - return self._keep_positive() - - def __iand__(self, other): - '''Inplace intersection is the minimum of corresponding counts. - - >>> c = Counter('abbb') - >>> c &= Counter('bcc') - >>> c - Counter({'b': 1}) - - ''' - for elem, count in self.items(): - other_count = other[elem] - if other_count < count: - self[elem] = other_count - return self._keep_positive() - - -def check_output(*popenargs, **kwargs): - """ - For Python 2.6 compatibility: see - http://stackoverflow.com/questions/4814970/ - """ - - if 'stdout' in kwargs: - raise ValueError('stdout argument not allowed, it will be overridden.') - process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) - output, unused_err = process.communicate() - retcode = process.poll() - if retcode: - cmd = kwargs.get("args") - if cmd is None: - cmd = popenargs[0] - raise subprocess.CalledProcessError(retcode, cmd) - return output - - -def count(start=0, step=1): - """ - ``itertools.count`` in Py 2.6 doesn't accept a step - parameter. This is an enhanced version of ``itertools.count`` - for Py2.6 equivalent to ``itertools.count`` in Python 2.7+. - """ - while True: - yield start - start += step - - -######################################################################## -### ChainMap (helper for configparser and string.Template) -### From the Py3.4 source code. See also: -### https://github.com/kkxue/Py2ChainMap/blob/master/py2chainmap.py -######################################################################## - -class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - accessed or updated using the *maps* attribute. There is no other state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - def __bool__(self): - return any(self.maps) - - # Py2 compatibility: - __nonzero__ = __bool__ - - @recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self, m=None): # like Django's Context.push() - ''' - New ChainMap with a new map followed by all previous maps. If no - map is provided, an empty dict is used. - ''' - if m is None: - m = {} - return self.__class__(m, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - - -# Re-use the same sentinel as in the Python stdlib socket module: -from socket import _GLOBAL_DEFAULT_TIMEOUT -# Was: _GLOBAL_DEFAULT_TIMEOUT = object() - - -def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, - source_address=None): - """Backport of 3-argument create_connection() for Py2.6. - - Connect to *address* and return the socket object. - - Convenience function. Connect to *address* (a 2-tuple ``(host, - port)``) and return the socket object. Passing the optional - *timeout* parameter will set the timeout on the socket instance - before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` - is used. If *source_address* is set it must be a tuple of (host, port) - for the socket to bind as a source address before making the connection. - An host of '' or port 0 tells the OS to use the default. - """ - - host, port = address - err = None - for res in getaddrinfo(host, port, 0, SOCK_STREAM): - af, socktype, proto, canonname, sa = res - sock = None - try: - sock = socket(af, socktype, proto) - if timeout is not _GLOBAL_DEFAULT_TIMEOUT: - sock.settimeout(timeout) - if source_address: - sock.bind(source_address) - sock.connect(sa) - return sock - - except error as _: - err = _ - if sock is not None: - sock.close() - - if err is not None: - raise err - else: - raise error("getaddrinfo returns an empty list") - -# Backport from Py2.7 for Py2.6: -def cmp_to_key(mycmp): - """Convert a cmp= function into a key= function""" - class K(object): - __slots__ = ['obj'] - def __init__(self, obj, *args): - self.obj = obj - def __lt__(self, other): - return mycmp(self.obj, other.obj) < 0 - def __gt__(self, other): - return mycmp(self.obj, other.obj) > 0 - def __eq__(self, other): - return mycmp(self.obj, other.obj) == 0 - def __le__(self, other): - return mycmp(self.obj, other.obj) <= 0 - def __ge__(self, other): - return mycmp(self.obj, other.obj) >= 0 - def __ne__(self, other): - return mycmp(self.obj, other.obj) != 0 - def __hash__(self): - raise TypeError('hash not implemented') - return K - -# Back up our definitions above in case they're useful -_OrderedDict = OrderedDict -_Counter = Counter -_check_output = check_output -_count = count -_ceil = ceil -__count_elements = _count_elements -_recursive_repr = recursive_repr -_ChainMap = ChainMap -_create_connection = create_connection -_cmp_to_key = cmp_to_key - -# Overwrite the definitions above with the usual ones -# from the standard library: -if sys.version_info >= (2, 7): - from collections import OrderedDict, Counter - from itertools import count - from functools import cmp_to_key - try: - from subprocess import check_output - except ImportError: - # Not available. This happens with Google App Engine: see issue #231 - pass - from socket import create_connection - -if sys.version_info >= (3, 0): - from math import ceil - from collections import _count_elements - -if sys.version_info >= (3, 3): - from reprlib import recursive_repr - from collections import ChainMap diff --git a/pype/vendor/future/backports/socket.py b/pype/vendor/future/backports/socket.py deleted file mode 100644 index 930e1dae63..0000000000 --- a/pype/vendor/future/backports/socket.py +++ /dev/null @@ -1,454 +0,0 @@ -# Wrapper module for _socket, providing some additional facilities -# implemented in Python. - -"""\ -This module provides socket operations and some related functions. -On Unix, it supports IP (Internet Protocol) and Unix domain sockets. -On other systems, it only supports IP. Functions specific for a -socket are available as methods of the socket object. - -Functions: - -socket() -- create a new socket object -socketpair() -- create a pair of new socket objects [*] -fromfd() -- create a socket object from an open file descriptor [*] -fromshare() -- create a socket object from data received from socket.share() [*] -gethostname() -- return the current hostname -gethostbyname() -- map a hostname to its IP number -gethostbyaddr() -- map an IP number or hostname to DNS info -getservbyname() -- map a service name and a protocol name to a port number -getprotobyname() -- map a protocol name (e.g. 'tcp') to a number -ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order -htons(), htonl() -- convert 16, 32 bit int from host to network byte order -inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format -inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89) -socket.getdefaulttimeout() -- get the default timeout value -socket.setdefaulttimeout() -- set the default timeout value -create_connection() -- connects to an address, with an optional timeout and - optional source address. - - [*] not available on all platforms! - -Special objects: - -SocketType -- type object for socket objects -error -- exception raised for I/O errors -has_ipv6 -- boolean value indicating if IPv6 is supported - -Integer constants: - -AF_INET, AF_UNIX -- socket domains (first argument to socket() call) -SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument) - -Many other constants may be defined; these may be used in calls to -the setsockopt() and getsockopt() methods. -""" - -from __future__ import unicode_literals -from __future__ import print_function -from __future__ import division -from __future__ import absolute_import -from future.builtins import super - -import _socket -from _socket import * - -import os, sys, io - -try: - import errno -except ImportError: - errno = None -EBADF = getattr(errno, 'EBADF', 9) -EAGAIN = getattr(errno, 'EAGAIN', 11) -EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11) - -__all__ = ["getfqdn", "create_connection"] -__all__.extend(os._get_exports_list(_socket)) - - -_realsocket = socket - -# WSA error codes -if sys.platform.lower().startswith("win"): - errorTab = {} - errorTab[10004] = "The operation was interrupted." - errorTab[10009] = "A bad file handle was passed." - errorTab[10013] = "Permission denied." - errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT - errorTab[10022] = "An invalid operation was attempted." - errorTab[10035] = "The socket operation would block" - errorTab[10036] = "A blocking operation is already in progress." - errorTab[10048] = "The network address is in use." - errorTab[10054] = "The connection has been reset." - errorTab[10058] = "The network has been shut down." - errorTab[10060] = "The operation timed out." - errorTab[10061] = "Connection refused." - errorTab[10063] = "The name is too long." - errorTab[10064] = "The host is down." - errorTab[10065] = "The host is unreachable." - __all__.append("errorTab") - - -class socket(_socket.socket): - - """A subclass of _socket.socket adding the makefile() method.""" - - __slots__ = ["__weakref__", "_io_refs", "_closed"] - - def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None): - if fileno is None: - _socket.socket.__init__(self, family, type, proto) - else: - _socket.socket.__init__(self, family, type, proto, fileno) - self._io_refs = 0 - self._closed = False - - def __enter__(self): - return self - - def __exit__(self, *args): - if not self._closed: - self.close() - - def __repr__(self): - """Wrap __repr__() to reveal the real class name.""" - s = _socket.socket.__repr__(self) - if s.startswith(" socket object - - Return a new socket object connected to the same system resource. - """ - fd = dup(self.fileno()) - sock = self.__class__(self.family, self.type, self.proto, fileno=fd) - sock.settimeout(self.gettimeout()) - return sock - - def accept(self): - """accept() -> (socket object, address info) - - Wait for an incoming connection. Return a new socket - representing the connection, and the address of the client. - For IP sockets, the address info is a pair (hostaddr, port). - """ - fd, addr = self._accept() - sock = socket(self.family, self.type, self.proto, fileno=fd) - # Issue #7995: if no default timeout is set and the listening - # socket had a (non-zero) timeout, force the new socket in blocking - # mode to override platform-specific socket flags inheritance. - if getdefaulttimeout() is None and self.gettimeout(): - sock.setblocking(True) - return sock, addr - - def makefile(self, mode="r", buffering=None, **_3to2kwargs): - """makefile(...) -> an I/O stream connected to the socket - - The arguments are as for io.open() after the filename, - except the only mode characters supported are 'r', 'w' and 'b'. - The semantics are similar too. (XXX refactor to share code?) - """ - if 'newline' in _3to2kwargs: newline = _3to2kwargs['newline']; del _3to2kwargs['newline'] - else: newline = None - if 'errors' in _3to2kwargs: errors = _3to2kwargs['errors']; del _3to2kwargs['errors'] - else: errors = None - if 'encoding' in _3to2kwargs: encoding = _3to2kwargs['encoding']; del _3to2kwargs['encoding'] - else: encoding = None - for c in mode: - if c not in ("r", "w", "b"): - raise ValueError("invalid mode %r (only r, w, b allowed)") - writing = "w" in mode - reading = "r" in mode or not writing - assert reading or writing - binary = "b" in mode - rawmode = "" - if reading: - rawmode += "r" - if writing: - rawmode += "w" - raw = SocketIO(self, rawmode) - self._io_refs += 1 - if buffering is None: - buffering = -1 - if buffering < 0: - buffering = io.DEFAULT_BUFFER_SIZE - if buffering == 0: - if not binary: - raise ValueError("unbuffered streams must be binary") - return raw - if reading and writing: - buffer = io.BufferedRWPair(raw, raw, buffering) - elif reading: - buffer = io.BufferedReader(raw, buffering) - else: - assert writing - buffer = io.BufferedWriter(raw, buffering) - if binary: - return buffer - text = io.TextIOWrapper(buffer, encoding, errors, newline) - text.mode = mode - return text - - def _decref_socketios(self): - if self._io_refs > 0: - self._io_refs -= 1 - if self._closed: - self.close() - - def _real_close(self, _ss=_socket.socket): - # This function should not reference any globals. See issue #808164. - _ss.close(self) - - def close(self): - # This function should not reference any globals. See issue #808164. - self._closed = True - if self._io_refs <= 0: - self._real_close() - - def detach(self): - """detach() -> file descriptor - - Close the socket object without closing the underlying file descriptor. - The object cannot be used after this call, but the file descriptor - can be reused for other purposes. The file descriptor is returned. - """ - self._closed = True - return super().detach() - -def fromfd(fd, family, type, proto=0): - """ fromfd(fd, family, type[, proto]) -> socket object - - Create a socket object from a duplicate of the given file - descriptor. The remaining arguments are the same as for socket(). - """ - nfd = dup(fd) - return socket(family, type, proto, nfd) - -if hasattr(_socket.socket, "share"): - def fromshare(info): - """ fromshare(info) -> socket object - - Create a socket object from a the bytes object returned by - socket.share(pid). - """ - return socket(0, 0, 0, info) - -if hasattr(_socket, "socketpair"): - - def socketpair(family=None, type=SOCK_STREAM, proto=0): - """socketpair([family[, type[, proto]]]) -> (socket object, socket object) - - Create a pair of socket objects from the sockets returned by the platform - socketpair() function. - The arguments are the same as for socket() except the default family is - AF_UNIX if defined on the platform; otherwise, the default is AF_INET. - """ - if family is None: - try: - family = AF_UNIX - except NameError: - family = AF_INET - a, b = _socket.socketpair(family, type, proto) - a = socket(family, type, proto, a.detach()) - b = socket(family, type, proto, b.detach()) - return a, b - - -_blocking_errnos = set([EAGAIN, EWOULDBLOCK]) - -class SocketIO(io.RawIOBase): - - """Raw I/O implementation for stream sockets. - - This class supports the makefile() method on sockets. It provides - the raw I/O interface on top of a socket object. - """ - - # One might wonder why not let FileIO do the job instead. There are two - # main reasons why FileIO is not adapted: - # - it wouldn't work under Windows (where you can't used read() and - # write() on a socket handle) - # - it wouldn't work with socket timeouts (FileIO would ignore the - # timeout and consider the socket non-blocking) - - # XXX More docs - - def __init__(self, sock, mode): - if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): - raise ValueError("invalid mode: %r" % mode) - io.RawIOBase.__init__(self) - self._sock = sock - if "b" not in mode: - mode += "b" - self._mode = mode - self._reading = "r" in mode - self._writing = "w" in mode - self._timeout_occurred = False - - def readinto(self, b): - """Read up to len(b) bytes into the writable buffer *b* and return - the number of bytes read. If the socket is non-blocking and no bytes - are available, None is returned. - - If *b* is non-empty, a 0 return value indicates that the connection - was shutdown at the other end. - """ - self._checkClosed() - self._checkReadable() - if self._timeout_occurred: - raise IOError("cannot read from timed out object") - while True: - try: - return self._sock.recv_into(b) - except timeout: - self._timeout_occurred = True - raise - # except InterruptedError: - # continue - except error as e: - if e.args[0] in _blocking_errnos: - return None - raise - - def write(self, b): - """Write the given bytes or bytearray object *b* to the socket - and return the number of bytes written. This can be less than - len(b) if not all data could be written. If the socket is - non-blocking and no bytes could be written None is returned. - """ - self._checkClosed() - self._checkWritable() - try: - return self._sock.send(b) - except error as e: - # XXX what about EINTR? - if e.args[0] in _blocking_errnos: - return None - raise - - def readable(self): - """True if the SocketIO is open for reading. - """ - if self.closed: - raise ValueError("I/O operation on closed socket.") - return self._reading - - def writable(self): - """True if the SocketIO is open for writing. - """ - if self.closed: - raise ValueError("I/O operation on closed socket.") - return self._writing - - def seekable(self): - """True if the SocketIO is open for seeking. - """ - if self.closed: - raise ValueError("I/O operation on closed socket.") - return super().seekable() - - def fileno(self): - """Return the file descriptor of the underlying socket. - """ - self._checkClosed() - return self._sock.fileno() - - @property - def name(self): - if not self.closed: - return self.fileno() - else: - return -1 - - @property - def mode(self): - return self._mode - - def close(self): - """Close the SocketIO object. This doesn't close the underlying - socket, except if all references to it have disappeared. - """ - if self.closed: - return - io.RawIOBase.close(self) - self._sock._decref_socketios() - self._sock = None - - -def getfqdn(name=''): - """Get fully qualified domain name from name. - - An empty argument is interpreted as meaning the local host. - - First the hostname returned by gethostbyaddr() is checked, then - possibly existing aliases. In case no FQDN is available, hostname - from gethostname() is returned. - """ - name = name.strip() - if not name or name == '0.0.0.0': - name = gethostname() - try: - hostname, aliases, ipaddrs = gethostbyaddr(name) - except error: - pass - else: - aliases.insert(0, hostname) - for name in aliases: - if '.' in name: - break - else: - name = hostname - return name - - -# Re-use the same sentinel as in the Python stdlib socket module: -from socket import _GLOBAL_DEFAULT_TIMEOUT -# Was: _GLOBAL_DEFAULT_TIMEOUT = object() - - -def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, - source_address=None): - """Connect to *address* and return the socket object. - - Convenience function. Connect to *address* (a 2-tuple ``(host, - port)``) and return the socket object. Passing the optional - *timeout* parameter will set the timeout on the socket instance - before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` - is used. If *source_address* is set it must be a tuple of (host, port) - for the socket to bind as a source address before making the connection. - An host of '' or port 0 tells the OS to use the default. - """ - - host, port = address - err = None - for res in getaddrinfo(host, port, 0, SOCK_STREAM): - af, socktype, proto, canonname, sa = res - sock = None - try: - sock = socket(af, socktype, proto) - if timeout is not _GLOBAL_DEFAULT_TIMEOUT: - sock.settimeout(timeout) - if source_address: - sock.bind(source_address) - sock.connect(sa) - return sock - - except error as _: - err = _ - if sock is not None: - sock.close() - - if err is not None: - raise err - else: - raise error("getaddrinfo returns an empty list") diff --git a/pype/vendor/future/backports/socketserver.py b/pype/vendor/future/backports/socketserver.py deleted file mode 100644 index d1e24a6dd0..0000000000 --- a/pype/vendor/future/backports/socketserver.py +++ /dev/null @@ -1,747 +0,0 @@ -"""Generic socket server classes. - -This module tries to capture the various aspects of defining a server: - -For socket-based servers: - -- address family: - - AF_INET{,6}: IP (Internet Protocol) sockets (default) - - AF_UNIX: Unix domain sockets - - others, e.g. AF_DECNET are conceivable (see -- socket type: - - SOCK_STREAM (reliable stream, e.g. TCP) - - SOCK_DGRAM (datagrams, e.g. UDP) - -For request-based servers (including socket-based): - -- client address verification before further looking at the request - (This is actually a hook for any processing that needs to look - at the request before anything else, e.g. logging) -- how to handle multiple requests: - - synchronous (one request is handled at a time) - - forking (each request is handled by a new process) - - threading (each request is handled by a new thread) - -The classes in this module favor the server type that is simplest to -write: a synchronous TCP/IP server. This is bad class design, but -save some typing. (There's also the issue that a deep class hierarchy -slows down method lookups.) - -There are five classes in an inheritance diagram, four of which represent -synchronous servers of four types: - - +------------+ - | BaseServer | - +------------+ - | - v - +-----------+ +------------------+ - | TCPServer |------->| UnixStreamServer | - +-----------+ +------------------+ - | - v - +-----------+ +--------------------+ - | UDPServer |------->| UnixDatagramServer | - +-----------+ +--------------------+ - -Note that UnixDatagramServer derives from UDPServer, not from -UnixStreamServer -- the only difference between an IP and a Unix -stream server is the address family, which is simply repeated in both -unix server classes. - -Forking and threading versions of each type of server can be created -using the ForkingMixIn and ThreadingMixIn mix-in classes. For -instance, a threading UDP server class is created as follows: - - class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass - -The Mix-in class must come first, since it overrides a method defined -in UDPServer! Setting the various member variables also changes -the behavior of the underlying server mechanism. - -To implement a service, you must derive a class from -BaseRequestHandler and redefine its handle() method. You can then run -various versions of the service by combining one of the server classes -with your request handler class. - -The request handler class must be different for datagram or stream -services. This can be hidden by using the request handler -subclasses StreamRequestHandler or DatagramRequestHandler. - -Of course, you still have to use your head! - -For instance, it makes no sense to use a forking server if the service -contains state in memory that can be modified by requests (since the -modifications in the child process would never reach the initial state -kept in the parent process and passed to each child). In this case, -you can use a threading server, but you will probably have to use -locks to avoid two requests that come in nearly simultaneous to apply -conflicting changes to the server state. - -On the other hand, if you are building e.g. an HTTP server, where all -data is stored externally (e.g. in the file system), a synchronous -class will essentially render the service "deaf" while one request is -being handled -- which may be for a very long time if a client is slow -to read all the data it has requested. Here a threading or forking -server is appropriate. - -In some cases, it may be appropriate to process part of a request -synchronously, but to finish processing in a forked child depending on -the request data. This can be implemented by using a synchronous -server and doing an explicit fork in the request handler class -handle() method. - -Another approach to handling multiple simultaneous requests in an -environment that supports neither threads nor fork (or where these are -too expensive or inappropriate for the service) is to maintain an -explicit table of partially finished requests and to use select() to -decide which request to work on next (or whether to handle a new -incoming request). This is particularly important for stream services -where each client can potentially be connected for a long time (if -threads or subprocesses cannot be used). - -Future work: -- Standard classes for Sun RPC (which uses either UDP or TCP) -- Standard mix-in classes to implement various authentication - and encryption schemes -- Standard framework for select-based multiplexing - -XXX Open problems: -- What to do with out-of-band data? - -BaseServer: -- split generic "request" functionality out into BaseServer class. - Copyright (C) 2000 Luke Kenneth Casson Leighton - - example: read entries from a SQL database (requires overriding - get_request() to return a table entry from the database). - entry is processed by a RequestHandlerClass. - -""" - -# Author of the BaseServer patch: Luke Kenneth Casson Leighton - -# XXX Warning! -# There is a test suite for this module, but it cannot be run by the -# standard regression test. -# To run it manually, run Lib/test/test_socketserver.py. - -from __future__ import (absolute_import, print_function) - -__version__ = "0.4" - - -import socket -import select -import sys -import os -import errno -try: - import threading -except ImportError: - import dummy_threading as threading - -__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer", - "ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler", - "StreamRequestHandler","DatagramRequestHandler", - "ThreadingMixIn", "ForkingMixIn"] -if hasattr(socket, "AF_UNIX"): - __all__.extend(["UnixStreamServer","UnixDatagramServer", - "ThreadingUnixStreamServer", - "ThreadingUnixDatagramServer"]) - -def _eintr_retry(func, *args): - """restart a system call interrupted by EINTR""" - while True: - try: - return func(*args) - except OSError as e: - if e.errno != errno.EINTR: - raise - -class BaseServer(object): - - """Base class for server classes. - - Methods for the caller: - - - __init__(server_address, RequestHandlerClass) - - serve_forever(poll_interval=0.5) - - shutdown() - - handle_request() # if you do not use serve_forever() - - fileno() -> int # for select() - - Methods that may be overridden: - - - server_bind() - - server_activate() - - get_request() -> request, client_address - - handle_timeout() - - verify_request(request, client_address) - - server_close() - - process_request(request, client_address) - - shutdown_request(request) - - close_request(request) - - service_actions() - - handle_error() - - Methods for derived classes: - - - finish_request(request, client_address) - - Class variables that may be overridden by derived classes or - instances: - - - timeout - - address_family - - socket_type - - allow_reuse_address - - Instance variables: - - - RequestHandlerClass - - socket - - """ - - timeout = None - - def __init__(self, server_address, RequestHandlerClass): - """Constructor. May be extended, do not override.""" - self.server_address = server_address - self.RequestHandlerClass = RequestHandlerClass - self.__is_shut_down = threading.Event() - self.__shutdown_request = False - - def server_activate(self): - """Called by constructor to activate the server. - - May be overridden. - - """ - pass - - def serve_forever(self, poll_interval=0.5): - """Handle one request at a time until shutdown. - - Polls for shutdown every poll_interval seconds. Ignores - self.timeout. If you need to do periodic tasks, do them in - another thread. - """ - self.__is_shut_down.clear() - try: - while not self.__shutdown_request: - # XXX: Consider using another file descriptor or - # connecting to the socket to wake this up instead of - # polling. Polling reduces our responsiveness to a - # shutdown request and wastes cpu at all other times. - r, w, e = _eintr_retry(select.select, [self], [], [], - poll_interval) - if self in r: - self._handle_request_noblock() - - self.service_actions() - finally: - self.__shutdown_request = False - self.__is_shut_down.set() - - def shutdown(self): - """Stops the serve_forever loop. - - Blocks until the loop has finished. This must be called while - serve_forever() is running in another thread, or it will - deadlock. - """ - self.__shutdown_request = True - self.__is_shut_down.wait() - - def service_actions(self): - """Called by the serve_forever() loop. - - May be overridden by a subclass / Mixin to implement any code that - needs to be run during the loop. - """ - pass - - # The distinction between handling, getting, processing and - # finishing a request is fairly arbitrary. Remember: - # - # - handle_request() is the top-level call. It calls - # select, get_request(), verify_request() and process_request() - # - get_request() is different for stream or datagram sockets - # - process_request() is the place that may fork a new process - # or create a new thread to finish the request - # - finish_request() instantiates the request handler class; - # this constructor will handle the request all by itself - - def handle_request(self): - """Handle one request, possibly blocking. - - Respects self.timeout. - """ - # Support people who used socket.settimeout() to escape - # handle_request before self.timeout was available. - timeout = self.socket.gettimeout() - if timeout is None: - timeout = self.timeout - elif self.timeout is not None: - timeout = min(timeout, self.timeout) - fd_sets = _eintr_retry(select.select, [self], [], [], timeout) - if not fd_sets[0]: - self.handle_timeout() - return - self._handle_request_noblock() - - def _handle_request_noblock(self): - """Handle one request, without blocking. - - I assume that select.select has returned that the socket is - readable before this function was called, so there should be - no risk of blocking in get_request(). - """ - try: - request, client_address = self.get_request() - except socket.error: - return - if self.verify_request(request, client_address): - try: - self.process_request(request, client_address) - except: - self.handle_error(request, client_address) - self.shutdown_request(request) - - def handle_timeout(self): - """Called if no new request arrives within self.timeout. - - Overridden by ForkingMixIn. - """ - pass - - def verify_request(self, request, client_address): - """Verify the request. May be overridden. - - Return True if we should proceed with this request. - - """ - return True - - def process_request(self, request, client_address): - """Call finish_request. - - Overridden by ForkingMixIn and ThreadingMixIn. - - """ - self.finish_request(request, client_address) - self.shutdown_request(request) - - def server_close(self): - """Called to clean-up the server. - - May be overridden. - - """ - pass - - def finish_request(self, request, client_address): - """Finish one request by instantiating RequestHandlerClass.""" - self.RequestHandlerClass(request, client_address, self) - - def shutdown_request(self, request): - """Called to shutdown and close an individual request.""" - self.close_request(request) - - def close_request(self, request): - """Called to clean up an individual request.""" - pass - - def handle_error(self, request, client_address): - """Handle an error gracefully. May be overridden. - - The default is to print a traceback and continue. - - """ - print('-'*40) - print('Exception happened during processing of request from', end=' ') - print(client_address) - import traceback - traceback.print_exc() # XXX But this goes to stderr! - print('-'*40) - - -class TCPServer(BaseServer): - - """Base class for various socket-based server classes. - - Defaults to synchronous IP stream (i.e., TCP). - - Methods for the caller: - - - __init__(server_address, RequestHandlerClass, bind_and_activate=True) - - serve_forever(poll_interval=0.5) - - shutdown() - - handle_request() # if you don't use serve_forever() - - fileno() -> int # for select() - - Methods that may be overridden: - - - server_bind() - - server_activate() - - get_request() -> request, client_address - - handle_timeout() - - verify_request(request, client_address) - - process_request(request, client_address) - - shutdown_request(request) - - close_request(request) - - handle_error() - - Methods for derived classes: - - - finish_request(request, client_address) - - Class variables that may be overridden by derived classes or - instances: - - - timeout - - address_family - - socket_type - - request_queue_size (only for stream sockets) - - allow_reuse_address - - Instance variables: - - - server_address - - RequestHandlerClass - - socket - - """ - - address_family = socket.AF_INET - - socket_type = socket.SOCK_STREAM - - request_queue_size = 5 - - allow_reuse_address = False - - def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True): - """Constructor. May be extended, do not override.""" - BaseServer.__init__(self, server_address, RequestHandlerClass) - self.socket = socket.socket(self.address_family, - self.socket_type) - if bind_and_activate: - self.server_bind() - self.server_activate() - - def server_bind(self): - """Called by constructor to bind the socket. - - May be overridden. - - """ - if self.allow_reuse_address: - self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - self.socket.bind(self.server_address) - self.server_address = self.socket.getsockname() - - def server_activate(self): - """Called by constructor to activate the server. - - May be overridden. - - """ - self.socket.listen(self.request_queue_size) - - def server_close(self): - """Called to clean-up the server. - - May be overridden. - - """ - self.socket.close() - - def fileno(self): - """Return socket file number. - - Interface required by select(). - - """ - return self.socket.fileno() - - def get_request(self): - """Get the request and client address from the socket. - - May be overridden. - - """ - return self.socket.accept() - - def shutdown_request(self, request): - """Called to shutdown and close an individual request.""" - try: - #explicitly shutdown. socket.close() merely releases - #the socket and waits for GC to perform the actual close. - request.shutdown(socket.SHUT_WR) - except socket.error: - pass #some platforms may raise ENOTCONN here - self.close_request(request) - - def close_request(self, request): - """Called to clean up an individual request.""" - request.close() - - -class UDPServer(TCPServer): - - """UDP server class.""" - - allow_reuse_address = False - - socket_type = socket.SOCK_DGRAM - - max_packet_size = 8192 - - def get_request(self): - data, client_addr = self.socket.recvfrom(self.max_packet_size) - return (data, self.socket), client_addr - - def server_activate(self): - # No need to call listen() for UDP. - pass - - def shutdown_request(self, request): - # No need to shutdown anything. - self.close_request(request) - - def close_request(self, request): - # No need to close anything. - pass - -class ForkingMixIn(object): - - """Mix-in class to handle each request in a new process.""" - - timeout = 300 - active_children = None - max_children = 40 - - def collect_children(self): - """Internal routine to wait for children that have exited.""" - if self.active_children is None: return - while len(self.active_children) >= self.max_children: - # XXX: This will wait for any child process, not just ones - # spawned by this library. This could confuse other - # libraries that expect to be able to wait for their own - # children. - try: - pid, status = os.waitpid(0, 0) - except os.error: - pid = None - if pid not in self.active_children: continue - self.active_children.remove(pid) - - # XXX: This loop runs more system calls than it ought - # to. There should be a way to put the active_children into a - # process group and then use os.waitpid(-pgid) to wait for any - # of that set, but I couldn't find a way to allocate pgids - # that couldn't collide. - for child in self.active_children: - try: - pid, status = os.waitpid(child, os.WNOHANG) - except os.error: - pid = None - if not pid: continue - try: - self.active_children.remove(pid) - except ValueError as e: - raise ValueError('%s. x=%d and list=%r' % (e.message, pid, - self.active_children)) - - def handle_timeout(self): - """Wait for zombies after self.timeout seconds of inactivity. - - May be extended, do not override. - """ - self.collect_children() - - def service_actions(self): - """Collect the zombie child processes regularly in the ForkingMixIn. - - service_actions is called in the BaseServer's serve_forver loop. - """ - self.collect_children() - - def process_request(self, request, client_address): - """Fork a new subprocess to process the request.""" - pid = os.fork() - if pid: - # Parent process - if self.active_children is None: - self.active_children = [] - self.active_children.append(pid) - self.close_request(request) - return - else: - # Child process. - # This must never return, hence os._exit()! - try: - self.finish_request(request, client_address) - self.shutdown_request(request) - os._exit(0) - except: - try: - self.handle_error(request, client_address) - self.shutdown_request(request) - finally: - os._exit(1) - - -class ThreadingMixIn(object): - """Mix-in class to handle each request in a new thread.""" - - # Decides how threads will act upon termination of the - # main process - daemon_threads = False - - def process_request_thread(self, request, client_address): - """Same as in BaseServer but as a thread. - - In addition, exception handling is done here. - - """ - try: - self.finish_request(request, client_address) - self.shutdown_request(request) - except: - self.handle_error(request, client_address) - self.shutdown_request(request) - - def process_request(self, request, client_address): - """Start a new thread to process the request.""" - t = threading.Thread(target = self.process_request_thread, - args = (request, client_address)) - t.daemon = self.daemon_threads - t.start() - - -class ForkingUDPServer(ForkingMixIn, UDPServer): pass -class ForkingTCPServer(ForkingMixIn, TCPServer): pass - -class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass -class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass - -if hasattr(socket, 'AF_UNIX'): - - class UnixStreamServer(TCPServer): - address_family = socket.AF_UNIX - - class UnixDatagramServer(UDPServer): - address_family = socket.AF_UNIX - - class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass - - class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass - -class BaseRequestHandler(object): - - """Base class for request handler classes. - - This class is instantiated for each request to be handled. The - constructor sets the instance variables request, client_address - and server, and then calls the handle() method. To implement a - specific service, all you need to do is to derive a class which - defines a handle() method. - - The handle() method can find the request as self.request, the - client address as self.client_address, and the server (in case it - needs access to per-server information) as self.server. Since a - separate instance is created for each request, the handle() method - can define arbitrary other instance variariables. - - """ - - def __init__(self, request, client_address, server): - self.request = request - self.client_address = client_address - self.server = server - self.setup() - try: - self.handle() - finally: - self.finish() - - def setup(self): - pass - - def handle(self): - pass - - def finish(self): - pass - - -# The following two classes make it possible to use the same service -# class for stream or datagram servers. -# Each class sets up these instance variables: -# - rfile: a file object from which receives the request is read -# - wfile: a file object to which the reply is written -# When the handle() method returns, wfile is flushed properly - - -class StreamRequestHandler(BaseRequestHandler): - - """Define self.rfile and self.wfile for stream sockets.""" - - # Default buffer sizes for rfile, wfile. - # We default rfile to buffered because otherwise it could be - # really slow for large data (a getc() call per byte); we make - # wfile unbuffered because (a) often after a write() we want to - # read and we need to flush the line; (b) big writes to unbuffered - # files are typically optimized by stdio even when big reads - # aren't. - rbufsize = -1 - wbufsize = 0 - - # A timeout to apply to the request socket, if not None. - timeout = None - - # Disable nagle algorithm for this socket, if True. - # Use only when wbufsize != 0, to avoid small packets. - disable_nagle_algorithm = False - - def setup(self): - self.connection = self.request - if self.timeout is not None: - self.connection.settimeout(self.timeout) - if self.disable_nagle_algorithm: - self.connection.setsockopt(socket.IPPROTO_TCP, - socket.TCP_NODELAY, True) - self.rfile = self.connection.makefile('rb', self.rbufsize) - self.wfile = self.connection.makefile('wb', self.wbufsize) - - def finish(self): - if not self.wfile.closed: - try: - self.wfile.flush() - except socket.error: - # An final socket error may have occurred here, such as - # the local error ECONNABORTED. - pass - self.wfile.close() - self.rfile.close() - - -class DatagramRequestHandler(BaseRequestHandler): - - # XXX Regrettably, I cannot get this working on Linux; - # s.recvfrom() doesn't return a meaningful client address. - - """Define self.rfile and self.wfile for datagram sockets.""" - - def setup(self): - from io import BytesIO - self.packet, self.socket = self.request - self.rfile = BytesIO(self.packet) - self.wfile = BytesIO() - - def finish(self): - self.socket.sendto(self.wfile.getvalue(), self.client_address) diff --git a/pype/vendor/future/backports/test/__init__.py b/pype/vendor/future/backports/test/__init__.py deleted file mode 100644 index 0bba5e69a6..0000000000 --- a/pype/vendor/future/backports/test/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -test package backported for python-future. - -Its primary purpose is to allow use of "import test.support" for running -the Python standard library unit tests using the new Python 3 stdlib -import location. - -Python 3 renamed test.test_support to test.support. -""" diff --git a/pype/vendor/future/backports/test/badcert.pem b/pype/vendor/future/backports/test/badcert.pem deleted file mode 100644 index c4191460f9..0000000000 --- a/pype/vendor/future/backports/test/badcert.pem +++ /dev/null @@ -1,36 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIICXwIBAAKBgQC8ddrhm+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9L -opdJhTvbGfEj0DQs1IE8M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVH -fhi/VwovESJlaBOp+WMnfhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQAB -AoGBAK0FZpaKj6WnJZN0RqhhK+ggtBWwBnc0U/ozgKz2j1s3fsShYeiGtW6CK5nU -D1dZ5wzhbGThI7LiOXDvRucc9n7vUgi0alqPQ/PFodPxAN/eEYkmXQ7W2k7zwsDA -IUK0KUhktQbLu8qF/m8qM86ba9y9/9YkXuQbZ3COl5ahTZrhAkEA301P08RKv3KM -oXnGU2UHTuJ1MAD2hOrPxjD4/wxA/39EWG9bZczbJyggB4RHu0I3NOSFjAm3HQm0 -ANOu5QK9owJBANgOeLfNNcF4pp+UikRFqxk5hULqRAWzVxVrWe85FlPm0VVmHbb/ -loif7mqjU8o1jTd/LM7RD9f2usZyE2psaw8CQQCNLhkpX3KO5kKJmS9N7JMZSc4j -oog58yeYO8BBqKKzpug0LXuQultYv2K4veaIO04iL9VLe5z9S/Q1jaCHBBuXAkEA -z8gjGoi1AOp6PBBLZNsncCvcV/0aC+1se4HxTNo2+duKSDnbq+ljqOM+E7odU+Nq -ewvIWOG//e8fssd0mq3HywJBAJ8l/c8GVmrpFTx8r/nZ2Pyyjt3dH1widooDXYSV -q6Gbf41Llo5sYAtmxdndTLASuHKecacTgZVhy0FryZpLKrU= ------END RSA PRIVATE KEY----- ------BEGIN CERTIFICATE----- -Just bad cert data ------END CERTIFICATE----- ------BEGIN RSA PRIVATE KEY----- -MIICXwIBAAKBgQC8ddrhm+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9L -opdJhTvbGfEj0DQs1IE8M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVH -fhi/VwovESJlaBOp+WMnfhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQAB -AoGBAK0FZpaKj6WnJZN0RqhhK+ggtBWwBnc0U/ozgKz2j1s3fsShYeiGtW6CK5nU -D1dZ5wzhbGThI7LiOXDvRucc9n7vUgi0alqPQ/PFodPxAN/eEYkmXQ7W2k7zwsDA -IUK0KUhktQbLu8qF/m8qM86ba9y9/9YkXuQbZ3COl5ahTZrhAkEA301P08RKv3KM -oXnGU2UHTuJ1MAD2hOrPxjD4/wxA/39EWG9bZczbJyggB4RHu0I3NOSFjAm3HQm0 -ANOu5QK9owJBANgOeLfNNcF4pp+UikRFqxk5hULqRAWzVxVrWe85FlPm0VVmHbb/ -loif7mqjU8o1jTd/LM7RD9f2usZyE2psaw8CQQCNLhkpX3KO5kKJmS9N7JMZSc4j -oog58yeYO8BBqKKzpug0LXuQultYv2K4veaIO04iL9VLe5z9S/Q1jaCHBBuXAkEA -z8gjGoi1AOp6PBBLZNsncCvcV/0aC+1se4HxTNo2+duKSDnbq+ljqOM+E7odU+Nq -ewvIWOG//e8fssd0mq3HywJBAJ8l/c8GVmrpFTx8r/nZ2Pyyjt3dH1widooDXYSV -q6Gbf41Llo5sYAtmxdndTLASuHKecacTgZVhy0FryZpLKrU= ------END RSA PRIVATE KEY----- ------BEGIN CERTIFICATE----- -Just bad cert data ------END CERTIFICATE----- diff --git a/pype/vendor/future/backports/test/badkey.pem b/pype/vendor/future/backports/test/badkey.pem deleted file mode 100644 index 1c8a955719..0000000000 --- a/pype/vendor/future/backports/test/badkey.pem +++ /dev/null @@ -1,40 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -Bad Key, though the cert should be OK ------END RSA PRIVATE KEY----- ------BEGIN CERTIFICATE----- -MIICpzCCAhCgAwIBAgIJAP+qStv1cIGNMA0GCSqGSIb3DQEBBQUAMIGJMQswCQYD -VQQGEwJVUzERMA8GA1UECBMIRGVsYXdhcmUxEzARBgNVBAcTCldpbG1pbmd0b24x -IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMQwwCgYDVQQLEwNT -U0wxHzAdBgNVBAMTFnNvbWVtYWNoaW5lLnB5dGhvbi5vcmcwHhcNMDcwODI3MTY1 -NDUwWhcNMTMwMjE2MTY1NDUwWjCBiTELMAkGA1UEBhMCVVMxETAPBgNVBAgTCERl -bGF3YXJlMRMwEQYDVQQHEwpXaWxtaW5ndG9uMSMwIQYDVQQKExpQeXRob24gU29m -dHdhcmUgRm91bmRhdGlvbjEMMAoGA1UECxMDU1NMMR8wHQYDVQQDExZzb21lbWFj -aGluZS5weXRob24ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8ddrh -m+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9LopdJhTvbGfEj0DQs1IE8 -M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVHfhi/VwovESJlaBOp+WMn -fhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQABoxUwEzARBglghkgBhvhC -AQEEBAMCBkAwDQYJKoZIhvcNAQEFBQADgYEAF4Q5BVqmCOLv1n8je/Jw9K669VXb -08hyGzQhkemEBYQd6fzQ9A/1ZzHkJKb1P6yreOLSEh4KcxYPyrLRC1ll8nr5OlCx -CMhKkTnR6qBsdNV0XtdU2+N25hqW+Ma4ZeqsN/iiJVCGNOZGnvQuvCAGWF8+J/f/ -iHkC6gGdBJhogs4= ------END CERTIFICATE----- ------BEGIN RSA PRIVATE KEY----- -Bad Key, though the cert should be OK ------END RSA PRIVATE KEY----- ------BEGIN CERTIFICATE----- -MIICpzCCAhCgAwIBAgIJAP+qStv1cIGNMA0GCSqGSIb3DQEBBQUAMIGJMQswCQYD -VQQGEwJVUzERMA8GA1UECBMIRGVsYXdhcmUxEzARBgNVBAcTCldpbG1pbmd0b24x -IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMQwwCgYDVQQLEwNT -U0wxHzAdBgNVBAMTFnNvbWVtYWNoaW5lLnB5dGhvbi5vcmcwHhcNMDcwODI3MTY1 -NDUwWhcNMTMwMjE2MTY1NDUwWjCBiTELMAkGA1UEBhMCVVMxETAPBgNVBAgTCERl -bGF3YXJlMRMwEQYDVQQHEwpXaWxtaW5ndG9uMSMwIQYDVQQKExpQeXRob24gU29m -dHdhcmUgRm91bmRhdGlvbjEMMAoGA1UECxMDU1NMMR8wHQYDVQQDExZzb21lbWFj -aGluZS5weXRob24ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8ddrh -m+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9LopdJhTvbGfEj0DQs1IE8 -M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVHfhi/VwovESJlaBOp+WMn -fhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQABoxUwEzARBglghkgBhvhC -AQEEBAMCBkAwDQYJKoZIhvcNAQEFBQADgYEAF4Q5BVqmCOLv1n8je/Jw9K669VXb -08hyGzQhkemEBYQd6fzQ9A/1ZzHkJKb1P6yreOLSEh4KcxYPyrLRC1ll8nr5OlCx -CMhKkTnR6qBsdNV0XtdU2+N25hqW+Ma4ZeqsN/iiJVCGNOZGnvQuvCAGWF8+J/f/ -iHkC6gGdBJhogs4= ------END CERTIFICATE----- diff --git a/pype/vendor/future/backports/test/dh512.pem b/pype/vendor/future/backports/test/dh512.pem deleted file mode 100644 index 200d16cd89..0000000000 --- a/pype/vendor/future/backports/test/dh512.pem +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN DH PARAMETERS----- -MEYCQQD1Kv884bEpQBgRjXyEpwpy1obEAxnIByl6ypUM2Zafq9AKUJsCRtMIPWak -XUGfnHy9iUsiGSa6q6Jew1XpKgVfAgEC ------END DH PARAMETERS----- - -These are the 512 bit DH parameters from "Assigned Number for SKIP Protocols" -(http://www.skip-vpn.org/spec/numbers.html). -See there for how they were generated. -Note that g is not a generator, but this is not a problem since p is a safe prime. diff --git a/pype/vendor/future/backports/test/https_svn_python_org_root.pem b/pype/vendor/future/backports/test/https_svn_python_org_root.pem deleted file mode 100644 index e7dfc82947..0000000000 --- a/pype/vendor/future/backports/test/https_svn_python_org_root.pem +++ /dev/null @@ -1,41 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 -IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB -IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA -Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO -BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi -MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ -ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ -8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 -zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y -fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 -w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc -G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k -epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q -laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ -QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU -fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 -YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w -ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY -gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe -MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 -IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy -dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw -czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 -dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl -aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC -AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg -b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB -ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc -nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg -18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c -gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl -Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY -sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T -SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF -CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum -GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk -zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW -omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD ------END CERTIFICATE----- diff --git a/pype/vendor/future/backports/test/keycert.passwd.pem b/pype/vendor/future/backports/test/keycert.passwd.pem deleted file mode 100644 index e90574881d..0000000000 --- a/pype/vendor/future/backports/test/keycert.passwd.pem +++ /dev/null @@ -1,33 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -Proc-Type: 4,ENCRYPTED -DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A - -kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c -u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA -AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr -Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+ -YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P -6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+ -noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1 -94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l -7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo -cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO -zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt -L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo -2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ== ------END RSA PRIVATE KEY----- ------BEGIN CERTIFICATE----- -MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV -BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u -IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw -MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH -Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k -YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw -gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 -6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt -pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw -FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd -BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G -lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 -CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX ------END CERTIFICATE----- diff --git a/pype/vendor/future/backports/test/keycert.pem b/pype/vendor/future/backports/test/keycert.pem deleted file mode 100644 index 64318aa2e0..0000000000 --- a/pype/vendor/future/backports/test/keycert.pem +++ /dev/null @@ -1,31 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBANtb0+YrKuxevGpm -LrjaUhZSgz6zFAmuGFmKmUbdjmfv9zSmmdsQIksK++jK0Be9LeZy20j6ahOfuVa0 -ufEmPoP7Fy4hXegKZR9cCWcIe/A6H2xWF1IIJLRTLaU8ol/I7T+um5HD5AwAwNPP -USNU0Eegmvp+xxWu3NX2m1Veot85AgMBAAECgYA3ZdZ673X0oexFlq7AAmrutkHt -CL7LvwrpOiaBjhyTxTeSNWzvtQBkIU8DOI0bIazA4UreAFffwtvEuPmonDb3F+Iq -SMAu42XcGyVZEl+gHlTPU9XRX7nTOXVt+MlRRRxL6t9GkGfUAXI3XxJDXW3c0vBK -UL9xqD8cORXOfE06rQJBAP8mEX1ERkR64Ptsoe4281vjTlNfIbs7NMPkUnrn9N/Y -BLhjNIfQ3HFZG8BTMLfX7kCS9D593DW5tV4Z9BP/c6cCQQDcFzCcVArNh2JSywOQ -ZfTfRbJg/Z5Lt9Fkngv1meeGNPgIMLN8Sg679pAOOWmzdMO3V706rNPzSVMME7E5 -oPIfAkEA8pDddarP5tCvTTgUpmTFbakm0KoTZm2+FzHcnA4jRh+XNTjTOv98Y6Ik -eO5d1ZnKXseWvkZncQgxfdnMqqpj5wJAcNq/RVne1DbYlwWchT2Si65MYmmJ8t+F -0mcsULqjOnEMwf5e+ptq5LzwbyrHZYq5FNk7ocufPv/ZQrcSSC+cFwJBAKvOJByS -x56qyGeZLOQlWS2JS3KJo59XuLFGqcbgN9Om9xFa41Yb4N9NvplFivsvZdw3m1Q/ -SPIXQuT8RMPDVNQ= ------END PRIVATE KEY----- ------BEGIN CERTIFICATE----- -MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV -BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u -IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw -MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH -Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k -YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw -gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 -6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt -pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw -FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd -BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G -lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 -CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX ------END CERTIFICATE----- diff --git a/pype/vendor/future/backports/test/keycert2.pem b/pype/vendor/future/backports/test/keycert2.pem deleted file mode 100644 index e8a9e082b3..0000000000 --- a/pype/vendor/future/backports/test/keycert2.pem +++ /dev/null @@ -1,31 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAJnsJZVrppL+W5I9 -zGQrrawWwE5QJpBK9nWw17mXrZ03R1cD9BamLGivVISbPlRlAVnZBEyh1ATpsB7d -CUQ+WHEvALquvx4+Yw5l+fXeiYRjrLRBYZuVy8yNtXzU3iWcGObcYRkUdiXdOyP7 -sLF2YZHRvQZpzgDBKkrraeQ81w21AgMBAAECgYBEm7n07FMHWlE+0kT0sXNsLYfy -YE+QKZnJw9WkaDN+zFEEPELkhZVt5BjsMraJr6v2fIEqF0gGGJPkbenffVq2B5dC -lWUOxvJHufMK4sM3Cp6s/gOp3LP+QkzVnvJSfAyZU6l+4PGX5pLdUsXYjPxgzjzL -S36tF7/2Uv1WePyLUQJBAMsPhYzUXOPRgmbhcJiqi9A9c3GO8kvSDYTCKt3VMnqz -HBn6MQ4VQasCD1F+7jWTI0FU/3vdw8non/Fj8hhYqZcCQQDCDRdvmZqDiZnpMqDq -L6ZSrLTVtMvZXZbgwForaAD9uHj51TME7+eYT7EG2YCgJTXJ4YvRJEnPNyskwdKt -vTSTAkEAtaaN/vyemEJ82BIGStwONNw0ILsSr5cZ9tBHzqiA/tipY+e36HRFiXhP -QcU9zXlxyWkDH8iz9DSAmE2jbfoqwwJANlMJ65E543cjIlitGcKLMnvtCCLcKpb7 -xSG0XJB6Lo11OKPJ66jp0gcFTSCY1Lx2CXVd+gfJrfwI1Pp562+bhwJBAJ9IfDPU -R8OpO9v1SGd8x33Owm7uXOpB9d63/T70AD1QOXjKUC4eXYbt0WWfWuny/RNPRuyh -w7DXSfUF+kPKolU= ------END PRIVATE KEY----- ------BEGIN CERTIFICATE----- -MIICXTCCAcagAwIBAgIJAIO3upAG445fMA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV -BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u -IFNvZnR3YXJlIEZvdW5kYXRpb24xFTATBgNVBAMTDGZha2Vob3N0bmFtZTAeFw0x -MDEwMDkxNTAxMDBaFw0yMDEwMDYxNTAxMDBaMGIxCzAJBgNVBAYTAlhZMRcwFQYD -VQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZv -dW5kYXRpb24xFTATBgNVBAMTDGZha2Vob3N0bmFtZTCBnzANBgkqhkiG9w0BAQEF -AAOBjQAwgYkCgYEAmewllWumkv5bkj3MZCutrBbATlAmkEr2dbDXuZetnTdHVwP0 -FqYsaK9UhJs+VGUBWdkETKHUBOmwHt0JRD5YcS8Auq6/Hj5jDmX59d6JhGOstEFh -m5XLzI21fNTeJZwY5txhGRR2Jd07I/uwsXZhkdG9BmnOAMEqSutp5DzXDbUCAwEA -AaMbMBkwFwYDVR0RBBAwDoIMZmFrZWhvc3RuYW1lMA0GCSqGSIb3DQEBBQUAA4GB -AH+iMClLLGSaKWgwXsmdVo4FhTZZHo8Uprrtg3N9FxEeE50btpDVQysgRt5ias3K -m+bME9zbKwvbVWD5zZdjus4pDgzwF/iHyccL8JyYhxOvS/9zmvAtFXj/APIIbZFp -IT75d9f88ScIGEtknZQejnrdhB64tYki/EqluiuKBqKD ------END CERTIFICATE----- diff --git a/pype/vendor/future/backports/test/nokia.pem b/pype/vendor/future/backports/test/nokia.pem deleted file mode 100644 index 0d044df436..0000000000 --- a/pype/vendor/future/backports/test/nokia.pem +++ /dev/null @@ -1,31 +0,0 @@ -# Certificate for projects.developer.nokia.com:443 (see issue 13034) ------BEGIN CERTIFICATE----- -MIIFLDCCBBSgAwIBAgIQLubqdkCgdc7lAF9NfHlUmjANBgkqhkiG9w0BAQUFADCB -vDELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTswOQYDVQQLEzJUZXJtcyBvZiB1c2Ug -YXQgaHR0cHM6Ly93d3cudmVyaXNpZ24uY29tL3JwYSAoYykxMDE2MDQGA1UEAxMt -VmVyaVNpZ24gQ2xhc3MgMyBJbnRlcm5hdGlvbmFsIFNlcnZlciBDQSAtIEczMB4X -DTExMDkyMTAwMDAwMFoXDTEyMDkyMDIzNTk1OVowcTELMAkGA1UEBhMCRkkxDjAM -BgNVBAgTBUVzcG9vMQ4wDAYDVQQHFAVFc3BvbzEOMAwGA1UEChQFTm9raWExCzAJ -BgNVBAsUAkJJMSUwIwYDVQQDFBxwcm9qZWN0cy5kZXZlbG9wZXIubm9raWEuY29t -MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCr92w1bpHYSYxUEx8N/8Iddda2 -lYi+aXNtQfV/l2Fw9Ykv3Ipw4nLeGTj18FFlAZgMdPRlgrzF/NNXGw/9l3/qKdow -CypkQf8lLaxb9Ze1E/KKmkRJa48QTOqvo6GqKuTI6HCeGlG1RxDb8YSKcQWLiytn -yj3Wp4MgRQO266xmMQIDAQABo4IB9jCCAfIwQQYDVR0RBDowOIIccHJvamVjdHMu -ZGV2ZWxvcGVyLm5va2lhLmNvbYIYcHJvamVjdHMuZm9ydW0ubm9raWEuY29tMAkG -A1UdEwQCMAAwCwYDVR0PBAQDAgWgMEEGA1UdHwQ6MDgwNqA0oDKGMGh0dHA6Ly9T -VlJJbnRsLUczLWNybC52ZXJpc2lnbi5jb20vU1ZSSW50bEczLmNybDBEBgNVHSAE -PTA7MDkGC2CGSAGG+EUBBxcDMCowKAYIKwYBBQUHAgEWHGh0dHBzOi8vd3d3LnZl -cmlzaWduLmNvbS9ycGEwKAYDVR0lBCEwHwYJYIZIAYb4QgQBBggrBgEFBQcDAQYI -KwYBBQUHAwIwcgYIKwYBBQUHAQEEZjBkMCQGCCsGAQUFBzABhhhodHRwOi8vb2Nz -cC52ZXJpc2lnbi5jb20wPAYIKwYBBQUHMAKGMGh0dHA6Ly9TVlJJbnRsLUczLWFp -YS52ZXJpc2lnbi5jb20vU1ZSSW50bEczLmNlcjBuBggrBgEFBQcBDARiMGChXqBc -MFowWDBWFglpbWFnZS9naWYwITAfMAcGBSsOAwIaBBRLa7kolgYMu9BSOJsprEsH -iyEFGDAmFiRodHRwOi8vbG9nby52ZXJpc2lnbi5jb20vdnNsb2dvMS5naWYwDQYJ -KoZIhvcNAQEFBQADggEBACQuPyIJqXwUyFRWw9x5yDXgMW4zYFopQYOw/ItRY522 -O5BsySTh56BWS6mQB07XVfxmYUGAvRQDA5QHpmY8jIlNwSmN3s8RKo+fAtiNRlcL -x/mWSfuMs3D/S6ev3D6+dpEMZtjrhOdctsarMKp8n/hPbwhAbg5hVjpkW5n8vz2y -0KxvvkA1AxpLwpVv7OlK17ttzIHw8bp9HTlHBU5s8bKz4a565V/a5HI0CSEv/+0y -ko4/ghTnZc1CkmUngKKeFMSah/mT/xAh8XnE2l1AazFa8UKuYki1e+ArHaGZc4ix -UYOtiRphwfuYQhRZ7qX9q2MMkCMI65XNK/SaFrAbbG0= ------END CERTIFICATE----- diff --git a/pype/vendor/future/backports/test/nullbytecert.pem b/pype/vendor/future/backports/test/nullbytecert.pem deleted file mode 100644 index 447186c950..0000000000 --- a/pype/vendor/future/backports/test/nullbytecert.pem +++ /dev/null @@ -1,90 +0,0 @@ -Certificate: - Data: - Version: 3 (0x2) - Serial Number: 0 (0x0) - Signature Algorithm: sha1WithRSAEncryption - Issuer: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org - Validity - Not Before: Aug 7 13:11:52 2013 GMT - Not After : Aug 7 13:12:52 2013 GMT - Subject: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org - Subject Public Key Info: - Public Key Algorithm: rsaEncryption - Public-Key: (2048 bit) - Modulus: - 00:b5:ea:ed:c9:fb:46:7d:6f:3b:76:80:dd:3a:f3: - 03:94:0b:a7:a6:db:ec:1d:df:ff:23:74:08:9d:97: - 16:3f:a3:a4:7b:3e:1b:0e:96:59:25:03:a7:26:e2: - 88:a9:cf:79:cd:f7:04:56:b0:ab:79:32:6e:59:c1: - 32:30:54:eb:58:a8:cb:91:f0:42:a5:64:27:cb:d4: - 56:31:88:52:ad:cf:bd:7f:f0:06:64:1f:cc:27:b8: - a3:8b:8c:f3:d8:29:1f:25:0b:f5:46:06:1b:ca:02: - 45:ad:7b:76:0a:9c:bf:bb:b9:ae:0d:16:ab:60:75: - ae:06:3e:9c:7c:31:dc:92:2f:29:1a:e0:4b:0c:91: - 90:6c:e9:37:c5:90:d7:2a:d7:97:15:a3:80:8f:5d: - 7b:49:8f:54:30:d4:97:2c:1c:5b:37:b5:ab:69:30: - 68:43:d3:33:78:4b:02:60:f5:3c:44:80:a1:8f:e7: - f0:0f:d1:5e:87:9e:46:cf:62:fc:f9:bf:0c:65:12: - f1:93:c8:35:79:3f:c8:ec:ec:47:f5:ef:be:44:d5: - ae:82:1e:2d:9a:9f:98:5a:67:65:e1:74:70:7c:cb: - d3:c2:ce:0e:45:49:27:dc:e3:2d:d4:fb:48:0e:2f: - 9e:77:b8:14:46:c0:c4:36:ca:02:ae:6a:91:8c:da: - 2f:85 - Exponent: 65537 (0x10001) - X509v3 extensions: - X509v3 Basic Constraints: critical - CA:FALSE - X509v3 Subject Key Identifier: - 88:5A:55:C0:52:FF:61:CD:52:A3:35:0F:EA:5A:9C:24:38:22:F7:5C - X509v3 Key Usage: - Digital Signature, Non Repudiation, Key Encipherment - X509v3 Subject Alternative Name: - ************************************************************* - WARNING: The values for DNS, email and URI are WRONG. OpenSSL - doesn't print the text after a NULL byte. - ************************************************************* - DNS:altnull.python.org, email:null@python.org, URI:http://null.python.org, IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1 - Signature Algorithm: sha1WithRSAEncryption - ac:4f:45:ef:7d:49:a8:21:70:8e:88:59:3e:d4:36:42:70:f5: - a3:bd:8b:d7:a8:d0:58:f6:31:4a:b1:a4:a6:dd:6f:d9:e8:44: - 3c:b6:0a:71:d6:7f:b1:08:61:9d:60:ce:75:cf:77:0c:d2:37: - 86:02:8d:5e:5d:f9:0f:71:b4:16:a8:c1:3d:23:1c:f1:11:b3: - 56:6e:ca:d0:8d:34:94:e6:87:2a:99:f2:ae:ae:cc:c2:e8:86: - de:08:a8:7f:c5:05:fa:6f:81:a7:82:e6:d0:53:9d:34:f4:ac: - 3e:40:fe:89:57:7a:29:a4:91:7e:0b:c6:51:31:e5:10:2f:a4: - 60:76:cd:95:51:1a:be:8b:a1:b0:fd:ad:52:bd:d7:1b:87:60: - d2:31:c7:17:c4:18:4f:2d:08:25:a3:a7:4f:b7:92:ca:e2:f5: - 25:f1:54:75:81:9d:b3:3d:61:a2:f7:da:ed:e1:c6:6f:2c:60: - 1f:d8:6f:c5:92:05:ab:c9:09:62:49:a9:14:ad:55:11:cc:d6: - 4a:19:94:99:97:37:1d:81:5f:8b:cf:a3:a8:96:44:51:08:3d: - 0b:05:65:12:eb:b6:70:80:88:48:72:4f:c6:c2:da:cf:cd:8e: - 5b:ba:97:2f:60:b4:96:56:49:5e:3a:43:76:63:04:be:2a:f6: - c1:ca:a9:94 ------BEGIN CERTIFICATE----- -MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx -DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ -eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg -RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y -ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw -NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI -DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv -ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt -ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq -hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j -pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P -vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv -KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA -oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL -08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV -HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E -BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu -Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251 -bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA -AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9 -i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j -HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk -kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx -VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW -RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ= ------END CERTIFICATE----- diff --git a/pype/vendor/future/backports/test/nullcert.pem b/pype/vendor/future/backports/test/nullcert.pem deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/future/backports/test/pystone.py b/pype/vendor/future/backports/test/pystone.py deleted file mode 100644 index 7652027b48..0000000000 --- a/pype/vendor/future/backports/test/pystone.py +++ /dev/null @@ -1,272 +0,0 @@ -#!/usr/bin/env python3 - -""" -"PYSTONE" Benchmark Program - -Version: Python/1.1 (corresponds to C/1.1 plus 2 Pystone fixes) - -Author: Reinhold P. Weicker, CACM Vol 27, No 10, 10/84 pg. 1013. - - Translated from ADA to C by Rick Richardson. - Every method to preserve ADA-likeness has been used, - at the expense of C-ness. - - Translated from C to Python by Guido van Rossum. - -Version History: - - Version 1.1 corrects two bugs in version 1.0: - - First, it leaked memory: in Proc1(), NextRecord ends - up having a pointer to itself. I have corrected this - by zapping NextRecord.PtrComp at the end of Proc1(). - - Second, Proc3() used the operator != to compare a - record to None. This is rather inefficient and not - true to the intention of the original benchmark (where - a pointer comparison to None is intended; the != - operator attempts to find a method __cmp__ to do value - comparison of the record). Version 1.1 runs 5-10 - percent faster than version 1.0, so benchmark figures - of different versions can't be compared directly. - -""" - -from __future__ import print_function - -from time import clock - -LOOPS = 50000 - -__version__ = "1.1" - -[Ident1, Ident2, Ident3, Ident4, Ident5] = range(1, 6) - -class Record(object): - - def __init__(self, PtrComp = None, Discr = 0, EnumComp = 0, - IntComp = 0, StringComp = 0): - self.PtrComp = PtrComp - self.Discr = Discr - self.EnumComp = EnumComp - self.IntComp = IntComp - self.StringComp = StringComp - - def copy(self): - return Record(self.PtrComp, self.Discr, self.EnumComp, - self.IntComp, self.StringComp) - -TRUE = 1 -FALSE = 0 - -def main(loops=LOOPS): - benchtime, stones = pystones(loops) - print("Pystone(%s) time for %d passes = %g" % \ - (__version__, loops, benchtime)) - print("This machine benchmarks at %g pystones/second" % stones) - - -def pystones(loops=LOOPS): - return Proc0(loops) - -IntGlob = 0 -BoolGlob = FALSE -Char1Glob = '\0' -Char2Glob = '\0' -Array1Glob = [0]*51 -Array2Glob = [x[:] for x in [Array1Glob]*51] -PtrGlb = None -PtrGlbNext = None - -def Proc0(loops=LOOPS): - global IntGlob - global BoolGlob - global Char1Glob - global Char2Glob - global Array1Glob - global Array2Glob - global PtrGlb - global PtrGlbNext - - starttime = clock() - for i in range(loops): - pass - nulltime = clock() - starttime - - PtrGlbNext = Record() - PtrGlb = Record() - PtrGlb.PtrComp = PtrGlbNext - PtrGlb.Discr = Ident1 - PtrGlb.EnumComp = Ident3 - PtrGlb.IntComp = 40 - PtrGlb.StringComp = "DHRYSTONE PROGRAM, SOME STRING" - String1Loc = "DHRYSTONE PROGRAM, 1'ST STRING" - Array2Glob[8][7] = 10 - - starttime = clock() - - for i in range(loops): - Proc5() - Proc4() - IntLoc1 = 2 - IntLoc2 = 3 - String2Loc = "DHRYSTONE PROGRAM, 2'ND STRING" - EnumLoc = Ident2 - BoolGlob = not Func2(String1Loc, String2Loc) - while IntLoc1 < IntLoc2: - IntLoc3 = 5 * IntLoc1 - IntLoc2 - IntLoc3 = Proc7(IntLoc1, IntLoc2) - IntLoc1 = IntLoc1 + 1 - Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3) - PtrGlb = Proc1(PtrGlb) - CharIndex = 'A' - while CharIndex <= Char2Glob: - if EnumLoc == Func1(CharIndex, 'C'): - EnumLoc = Proc6(Ident1) - CharIndex = chr(ord(CharIndex)+1) - IntLoc3 = IntLoc2 * IntLoc1 - IntLoc2 = IntLoc3 / IntLoc1 - IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1 - IntLoc1 = Proc2(IntLoc1) - - benchtime = clock() - starttime - nulltime - if benchtime == 0.0: - loopsPerBenchtime = 0.0 - else: - loopsPerBenchtime = (loops / benchtime) - return benchtime, loopsPerBenchtime - -def Proc1(PtrParIn): - PtrParIn.PtrComp = NextRecord = PtrGlb.copy() - PtrParIn.IntComp = 5 - NextRecord.IntComp = PtrParIn.IntComp - NextRecord.PtrComp = PtrParIn.PtrComp - NextRecord.PtrComp = Proc3(NextRecord.PtrComp) - if NextRecord.Discr == Ident1: - NextRecord.IntComp = 6 - NextRecord.EnumComp = Proc6(PtrParIn.EnumComp) - NextRecord.PtrComp = PtrGlb.PtrComp - NextRecord.IntComp = Proc7(NextRecord.IntComp, 10) - else: - PtrParIn = NextRecord.copy() - NextRecord.PtrComp = None - return PtrParIn - -def Proc2(IntParIO): - IntLoc = IntParIO + 10 - while 1: - if Char1Glob == 'A': - IntLoc = IntLoc - 1 - IntParIO = IntLoc - IntGlob - EnumLoc = Ident1 - if EnumLoc == Ident1: - break - return IntParIO - -def Proc3(PtrParOut): - global IntGlob - - if PtrGlb is not None: - PtrParOut = PtrGlb.PtrComp - else: - IntGlob = 100 - PtrGlb.IntComp = Proc7(10, IntGlob) - return PtrParOut - -def Proc4(): - global Char2Glob - - BoolLoc = Char1Glob == 'A' - BoolLoc = BoolLoc or BoolGlob - Char2Glob = 'B' - -def Proc5(): - global Char1Glob - global BoolGlob - - Char1Glob = 'A' - BoolGlob = FALSE - -def Proc6(EnumParIn): - EnumParOut = EnumParIn - if not Func3(EnumParIn): - EnumParOut = Ident4 - if EnumParIn == Ident1: - EnumParOut = Ident1 - elif EnumParIn == Ident2: - if IntGlob > 100: - EnumParOut = Ident1 - else: - EnumParOut = Ident4 - elif EnumParIn == Ident3: - EnumParOut = Ident2 - elif EnumParIn == Ident4: - pass - elif EnumParIn == Ident5: - EnumParOut = Ident3 - return EnumParOut - -def Proc7(IntParI1, IntParI2): - IntLoc = IntParI1 + 2 - IntParOut = IntParI2 + IntLoc - return IntParOut - -def Proc8(Array1Par, Array2Par, IntParI1, IntParI2): - global IntGlob - - IntLoc = IntParI1 + 5 - Array1Par[IntLoc] = IntParI2 - Array1Par[IntLoc+1] = Array1Par[IntLoc] - Array1Par[IntLoc+30] = IntLoc - for IntIndex in range(IntLoc, IntLoc+2): - Array2Par[IntLoc][IntIndex] = IntLoc - Array2Par[IntLoc][IntLoc-1] = Array2Par[IntLoc][IntLoc-1] + 1 - Array2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc] - IntGlob = 5 - -def Func1(CharPar1, CharPar2): - CharLoc1 = CharPar1 - CharLoc2 = CharLoc1 - if CharLoc2 != CharPar2: - return Ident1 - else: - return Ident2 - -def Func2(StrParI1, StrParI2): - IntLoc = 1 - while IntLoc <= 1: - if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1: - CharLoc = 'A' - IntLoc = IntLoc + 1 - if CharLoc >= 'W' and CharLoc <= 'Z': - IntLoc = 7 - if CharLoc == 'X': - return TRUE - else: - if StrParI1 > StrParI2: - IntLoc = IntLoc + 7 - return TRUE - else: - return FALSE - -def Func3(EnumParIn): - EnumLoc = EnumParIn - if EnumLoc == Ident3: return TRUE - return FALSE - -if __name__ == '__main__': - import sys - def error(msg): - print(msg, end=' ', file=sys.stderr) - print("usage: %s [number_of_loops]" % sys.argv[0], file=sys.stderr) - sys.exit(100) - nargs = len(sys.argv) - 1 - if nargs > 1: - error("%d arguments are too many;" % nargs) - elif nargs == 1: - try: loops = int(sys.argv[1]) - except ValueError: - error("Invalid argument %r;" % sys.argv[1]) - else: - loops = LOOPS - main(loops) diff --git a/pype/vendor/future/backports/test/sha256.pem b/pype/vendor/future/backports/test/sha256.pem deleted file mode 100644 index d3db4b85c0..0000000000 --- a/pype/vendor/future/backports/test/sha256.pem +++ /dev/null @@ -1,128 +0,0 @@ -# Certificate chain for https://sha256.tbs-internet.com - 0 s:/C=FR/postalCode=14000/ST=Calvados/L=CAEN/street=22 rue de Bretagne/O=TBS INTERNET/OU=0002 440443810/OU=sha-256 production/CN=sha256.tbs-internet.com - i:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC ------BEGIN CERTIFICATE----- -MIIGXDCCBUSgAwIBAgIRAKpVmHgg9nfCodAVwcP4siwwDQYJKoZIhvcNAQELBQAw -gcQxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl -bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u -ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv -cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEYMBYGA1UEAxMPVEJTIFg1MDkg -Q0EgU0dDMB4XDTEyMDEwNDAwMDAwMFoXDTE0MDIxNzIzNTk1OVowgcsxCzAJBgNV -BAYTAkZSMQ4wDAYDVQQREwUxNDAwMDERMA8GA1UECBMIQ2FsdmFkb3MxDTALBgNV -BAcTBENBRU4xGzAZBgNVBAkTEjIyIHJ1ZSBkZSBCcmV0YWduZTEVMBMGA1UEChMM -VEJTIElOVEVSTkVUMRcwFQYDVQQLEw4wMDAyIDQ0MDQ0MzgxMDEbMBkGA1UECxMS -c2hhLTI1NiBwcm9kdWN0aW9uMSAwHgYDVQQDExdzaGEyNTYudGJzLWludGVybmV0 -LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKQIX/zdJcyxty0m -PM1XQSoSSifueS3AVcgqMsaIKS/u+rYzsv4hQ/qA6vLn5m5/ewUcZDj7zdi6rBVf -PaVNXJ6YinLX0tkaW8TEjeVuZG5yksGZlhCt1CJ1Ho9XLiLaP4uJ7MCoNUntpJ+E -LfrOdgsIj91kPmwjDJeztVcQCvKzhjVJA/KxdInc0JvOATn7rpaSmQI5bvIjufgo -qVsTPwVFzuUYULXBk7KxRT7MiEqnd5HvviNh0285QC478zl3v0I0Fb5El4yD3p49 -IthcRnxzMKc0UhU5ogi0SbONyBfm/mzONVfSxpM+MlyvZmJqrbuuLoEDzJD+t8PU -xSuzgbcCAwEAAaOCAj4wggI6MB8GA1UdIwQYMBaAFAdEdoWTKLx/bXjSCuv6TEvf -2YIfMB0GA1UdDgQWBBT/qTGYdaj+f61c2IRFL/B1eEsM8DAOBgNVHQ8BAf8EBAMC -BaAwDAYDVR0TAQH/BAIwADA0BgNVHSUELTArBggrBgEFBQcDAQYIKwYBBQUHAwIG -CisGAQQBgjcKAwMGCWCGSAGG+EIEATBLBgNVHSAERDBCMEAGCisGAQQB5TcCBAEw -MjAwBggrBgEFBQcCARYkaHR0cHM6Ly93d3cudGJzLWludGVybmV0LmNvbS9DQS9D -UFM0MG0GA1UdHwRmMGQwMqAwoC6GLGh0dHA6Ly9jcmwudGJzLWludGVybmV0LmNv -bS9UQlNYNTA5Q0FTR0MuY3JsMC6gLKAqhihodHRwOi8vY3JsLnRicy14NTA5LmNv -bS9UQlNYNTA5Q0FTR0MuY3JsMIGmBggrBgEFBQcBAQSBmTCBljA4BggrBgEFBQcw -AoYsaHR0cDovL2NydC50YnMtaW50ZXJuZXQuY29tL1RCU1g1MDlDQVNHQy5jcnQw -NAYIKwYBBQUHMAKGKGh0dHA6Ly9jcnQudGJzLXg1MDkuY29tL1RCU1g1MDlDQVNH -Qy5jcnQwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLnRicy14NTA5LmNvbTA/BgNV -HREEODA2ghdzaGEyNTYudGJzLWludGVybmV0LmNvbYIbd3d3LnNoYTI1Ni50YnMt -aW50ZXJuZXQuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQA0pOuL8QvAa5yksTbGShzX -ABApagunUGoEydv4YJT1MXy9tTp7DrWaozZSlsqBxrYAXP1d9r2fuKbEniYHxaQ0 -UYaf1VSIlDo1yuC8wE7wxbHDIpQ/E5KAyxiaJ8obtDhFstWAPAH+UoGXq0kj2teN -21sFQ5dXgA95nldvVFsFhrRUNB6xXAcaj0VZFhttI0ZfQZmQwEI/P+N9Jr40OGun -aa+Dn0TMeUH4U20YntfLbu2nDcJcYfyurm+8/0Tr4HznLnedXu9pCPYj0TaddrgT -XO0oFiyy7qGaY6+qKh71yD64Y3ycCJ/HR9Wm39mjZYc9ezYwT4noP6r7Lk8YO7/q ------END CERTIFICATE----- - 1 s:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC - i:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root ------BEGIN CERTIFICATE----- -MIIFVjCCBD6gAwIBAgIQXpDZ0ETJMV02WTx3GTnhhTANBgkqhkiG9w0BAQUFADBv -MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFk -ZFRydXN0IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBF -eHRlcm5hbCBDQSBSb290MB4XDTA1MTIwMTAwMDAwMFoXDTE5MDYyNDE5MDYzMFow -gcQxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl -bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u -ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv -cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEYMBYGA1UEAxMPVEJTIFg1MDkg -Q0EgU0dDMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsgOkO3f7wzN6 -rOjg45tR5vjBfzK7qmV9IBxb/QW9EEXxG+E7FNhZqQLtwGBKoSsHTnQqV75wWMk0 -9tinWvftBkSpj5sTi/8cbzJfUvTSVYh3Qxv6AVVjMMH/ruLjE6y+4PoaPs8WoYAQ -ts5R4Z1g8c/WnTepLst2x0/Wv7GmuoQi+gXvHU6YrBiu7XkeYhzc95QdviWSJRDk -owhb5K43qhcvjRmBfO/paGlCliDGZp8mHwrI21mwobWpVjTxZRwYO3bd4+TGcI4G -Ie5wmHwE8F7SK1tgSqbBacKjDa93j7txKkfz/Yd2n7TGqOXiHPsJpG655vrKtnXk -9vs1zoDeJQIDAQABo4IBljCCAZIwHQYDVR0OBBYEFAdEdoWTKLx/bXjSCuv6TEvf -2YIfMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMCAGA1UdJQQZ -MBcGCisGAQQBgjcKAwMGCWCGSAGG+EIEATAYBgNVHSAEETAPMA0GCysGAQQBgOU3 -AgQBMHsGA1UdHwR0MHIwOKA2oDSGMmh0dHA6Ly9jcmwuY29tb2RvY2EuY29tL0Fk -ZFRydXN0RXh0ZXJuYWxDQVJvb3QuY3JsMDagNKAyhjBodHRwOi8vY3JsLmNvbW9k -by5uZXQvQWRkVHJ1c3RFeHRlcm5hbENBUm9vdC5jcmwwgYAGCCsGAQUFBwEBBHQw -cjA4BggrBgEFBQcwAoYsaHR0cDovL2NydC5jb21vZG9jYS5jb20vQWRkVHJ1c3RV -VE5TR0NDQS5jcnQwNgYIKwYBBQUHMAKGKmh0dHA6Ly9jcnQuY29tb2RvLm5ldC9B -ZGRUcnVzdFVUTlNHQ0NBLmNydDARBglghkgBhvhCAQEEBAMCAgQwDQYJKoZIhvcN -AQEFBQADggEBAK2zEzs+jcIrVK9oDkdDZNvhuBYTdCfpxfFs+OAujW0bIfJAy232 -euVsnJm6u/+OrqKudD2tad2BbejLLXhMZViaCmK7D9nrXHx4te5EP8rL19SUVqLY -1pTnv5dhNgEgvA7n5lIzDSYs7yRLsr7HJsYPr6SeYSuZizyX1SNz7ooJ32/F3X98 -RB0Mlc/E0OyOrkQ9/y5IrnpnaSora8CnUrV5XNOg+kyCz9edCyx4D5wXYcwZPVWz -8aDqquESrezPyjtfi4WRO4s/VD3HLZvOxzMrWAVYCDG9FxaOhF0QGuuG1F7F3GKV -v6prNyCl016kRl2j1UT+a7gLd8fA25A4C9E= ------END CERTIFICATE----- - 2 s:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root - i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC ------BEGIN CERTIFICATE----- -MIIEZjCCA06gAwIBAgIQUSYKkxzif5zDpV954HKugjANBgkqhkiG9w0BAQUFADCB -kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug -Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho -dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw -IFNHQzAeFw0wNTA2MDcwODA5MTBaFw0xOTA2MjQxOTA2MzBaMG8xCzAJBgNVBAYT -AlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0 -ZXJuYWwgVFRQIE5ldHdvcmsxIjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENB -IFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC39xoz5vIABC05 -4E5b7R+8bA/Ntfojts7emxEzl6QpTH2Tn71KvJPtAxrjj8/lbVBa1pcplFqAsEl6 -2y6V/bjKvzc4LR4+kUGtcFbH8E8/6DKedMrIkFTpxl8PeJ2aQDwOrGGqXhSPnoeh -alDc15pOrwWzpnGUnHGzUGAKxxOdOAeGAqjpqGkmGJCrTLBPI6s6T4TY386f4Wlv -u9dC12tE5Met7m1BX3JacQg3s3llpFmglDf3AC8NwpJy2tA4ctsUqEXEXSp9t7TW -xO6szRNEt8kr3UMAJfphuWlqWCMRt6czj1Z1WfXNKddGtworZbbTQm8Vsrh7++/p -XVPVNFonAgMBAAGjgdgwgdUwHwYDVR0jBBgwFoAUUzLRs89/+uDxoF2FTpLSnkUd -tE8wHQYDVR0OBBYEFK29mHo0tCb3+sQmVO8DveAky1QaMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MBEGCWCGSAGG+EIBAQQEAwIBAjAgBgNVHSUEGTAX -BgorBgEEAYI3CgMDBglghkgBhvhCBAEwPQYDVR0fBDYwNDAyoDCgLoYsaHR0cDov -L2NybC51c2VydHJ1c3QuY29tL1VUTi1EQVRBQ29ycFNHQy5jcmwwDQYJKoZIhvcN -AQEFBQADggEBAMbuUxdoFLJRIh6QWA2U/b3xcOWGLcM2MY9USEbnLQg3vGwKYOEO -rVE04BKT6b64q7gmtOmWPSiPrmQH/uAB7MXjkesYoPF1ftsK5p+R26+udd8jkWjd -FwBaS/9kbHDrARrQkNnHptZt9hPk/7XJ0h4qy7ElQyZ42TCbTg0evmnv3+r+LbPM -+bDdtRTKkdSytaX7ARmjR3mfnYyVhzT4HziS2jamEfpr62vp3EV4FTkG101B5CHI -3C+H0be/SGB1pWLLJN47YaApIKa+xWycxOkKaSLvkTr6Jq/RW0GnOuL4OAdCq8Fb -+M5tug8EPzI0rNwEKNdwMBQmBsTkm5jVz3g= ------END CERTIFICATE----- - 3 s:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC - i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC ------BEGIN CERTIFICATE----- -MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB -kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug -Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho -dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw -IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG -EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD -VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu -dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 -E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ -D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK -4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq -lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW -bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB -o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT -MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js -LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr -BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB -AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft -Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj -j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH -KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv -2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 -mfnGV/TJVTl4uix5yaaIK/QI ------END CERTIFICATE----- diff --git a/pype/vendor/future/backports/test/ssl_cert.pem b/pype/vendor/future/backports/test/ssl_cert.pem deleted file mode 100644 index 47a7d7e37e..0000000000 --- a/pype/vendor/future/backports/test/ssl_cert.pem +++ /dev/null @@ -1,15 +0,0 @@ ------BEGIN CERTIFICATE----- -MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV -BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u -IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw -MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH -Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k -YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw -gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 -6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt -pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw -FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd -BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G -lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 -CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX ------END CERTIFICATE----- diff --git a/pype/vendor/future/backports/test/ssl_key.passwd.pem b/pype/vendor/future/backports/test/ssl_key.passwd.pem deleted file mode 100644 index 2524672e70..0000000000 --- a/pype/vendor/future/backports/test/ssl_key.passwd.pem +++ /dev/null @@ -1,18 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -Proc-Type: 4,ENCRYPTED -DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A - -kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c -u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA -AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr -Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+ -YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P -6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+ -noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1 -94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l -7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo -cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO -zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt -L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo -2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ== ------END RSA PRIVATE KEY----- diff --git a/pype/vendor/future/backports/test/ssl_key.pem b/pype/vendor/future/backports/test/ssl_key.pem deleted file mode 100644 index 3fd3bbd54a..0000000000 --- a/pype/vendor/future/backports/test/ssl_key.pem +++ /dev/null @@ -1,16 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBANtb0+YrKuxevGpm -LrjaUhZSgz6zFAmuGFmKmUbdjmfv9zSmmdsQIksK++jK0Be9LeZy20j6ahOfuVa0 -ufEmPoP7Fy4hXegKZR9cCWcIe/A6H2xWF1IIJLRTLaU8ol/I7T+um5HD5AwAwNPP -USNU0Eegmvp+xxWu3NX2m1Veot85AgMBAAECgYA3ZdZ673X0oexFlq7AAmrutkHt -CL7LvwrpOiaBjhyTxTeSNWzvtQBkIU8DOI0bIazA4UreAFffwtvEuPmonDb3F+Iq -SMAu42XcGyVZEl+gHlTPU9XRX7nTOXVt+MlRRRxL6t9GkGfUAXI3XxJDXW3c0vBK -UL9xqD8cORXOfE06rQJBAP8mEX1ERkR64Ptsoe4281vjTlNfIbs7NMPkUnrn9N/Y -BLhjNIfQ3HFZG8BTMLfX7kCS9D593DW5tV4Z9BP/c6cCQQDcFzCcVArNh2JSywOQ -ZfTfRbJg/Z5Lt9Fkngv1meeGNPgIMLN8Sg679pAOOWmzdMO3V706rNPzSVMME7E5 -oPIfAkEA8pDddarP5tCvTTgUpmTFbakm0KoTZm2+FzHcnA4jRh+XNTjTOv98Y6Ik -eO5d1ZnKXseWvkZncQgxfdnMqqpj5wJAcNq/RVne1DbYlwWchT2Si65MYmmJ8t+F -0mcsULqjOnEMwf5e+ptq5LzwbyrHZYq5FNk7ocufPv/ZQrcSSC+cFwJBAKvOJByS -x56qyGeZLOQlWS2JS3KJo59XuLFGqcbgN9Om9xFa41Yb4N9NvplFivsvZdw3m1Q/ -SPIXQuT8RMPDVNQ= ------END PRIVATE KEY----- diff --git a/pype/vendor/future/backports/test/ssl_servers.py b/pype/vendor/future/backports/test/ssl_servers.py deleted file mode 100644 index 87a3fb8557..0000000000 --- a/pype/vendor/future/backports/test/ssl_servers.py +++ /dev/null @@ -1,207 +0,0 @@ -from __future__ import absolute_import, division, print_function, unicode_literals -from future.builtins import filter, str -from future import utils -import os -import sys -import ssl -import pprint -import socket -from future.backports.urllib import parse as urllib_parse -from future.backports.http.server import (HTTPServer as _HTTPServer, - SimpleHTTPRequestHandler, BaseHTTPRequestHandler) -from future.backports.test import support -threading = support.import_module("threading") - -here = os.path.dirname(__file__) - -HOST = support.HOST -CERTFILE = os.path.join(here, 'keycert.pem') - -# This one's based on HTTPServer, which is based on SocketServer - -class HTTPSServer(_HTTPServer): - - def __init__(self, server_address, handler_class, context): - _HTTPServer.__init__(self, server_address, handler_class) - self.context = context - - def __str__(self): - return ('<%s %s:%s>' % - (self.__class__.__name__, - self.server_name, - self.server_port)) - - def get_request(self): - # override this to wrap socket with SSL - try: - sock, addr = self.socket.accept() - sslconn = self.context.wrap_socket(sock, server_side=True) - except socket.error as e: - # socket errors are silenced by the caller, print them here - if support.verbose: - sys.stderr.write("Got an error:\n%s\n" % e) - raise - return sslconn, addr - -class RootedHTTPRequestHandler(SimpleHTTPRequestHandler): - # need to override translate_path to get a known root, - # instead of using os.curdir, since the test could be - # run from anywhere - - server_version = "TestHTTPS/1.0" - root = here - # Avoid hanging when a request gets interrupted by the client - timeout = 5 - - def translate_path(self, path): - """Translate a /-separated PATH to the local filename syntax. - - Components that mean special things to the local file system - (e.g. drive or directory names) are ignored. (XXX They should - probably be diagnosed.) - - """ - # abandon query parameters - path = urllib.parse.urlparse(path)[2] - path = os.path.normpath(urllib.parse.unquote(path)) - words = path.split('/') - words = filter(None, words) - path = self.root - for word in words: - drive, word = os.path.splitdrive(word) - head, word = os.path.split(word) - path = os.path.join(path, word) - return path - - def log_message(self, format, *args): - # we override this to suppress logging unless "verbose" - if support.verbose: - sys.stdout.write(" server (%s:%d %s):\n [%s] %s\n" % - (self.server.server_address, - self.server.server_port, - self.request.cipher(), - self.log_date_time_string(), - format%args)) - - -class StatsRequestHandler(BaseHTTPRequestHandler): - """Example HTTP request handler which returns SSL statistics on GET - requests. - """ - - server_version = "StatsHTTPS/1.0" - - def do_GET(self, send_body=True): - """Serve a GET request.""" - sock = self.rfile.raw._sock - context = sock.context - stats = { - 'session_cache': context.session_stats(), - 'cipher': sock.cipher(), - 'compression': sock.compression(), - } - body = pprint.pformat(stats) - body = body.encode('utf-8') - self.send_response(200) - self.send_header("Content-type", "text/plain; charset=utf-8") - self.send_header("Content-Length", str(len(body))) - self.end_headers() - if send_body: - self.wfile.write(body) - - def do_HEAD(self): - """Serve a HEAD request.""" - self.do_GET(send_body=False) - - def log_request(self, format, *args): - if support.verbose: - BaseHTTPRequestHandler.log_request(self, format, *args) - - -class HTTPSServerThread(threading.Thread): - - def __init__(self, context, host=HOST, handler_class=None): - self.flag = None - self.server = HTTPSServer((host, 0), - handler_class or RootedHTTPRequestHandler, - context) - self.port = self.server.server_port - threading.Thread.__init__(self) - self.daemon = True - - def __str__(self): - return "<%s %s>" % (self.__class__.__name__, self.server) - - def start(self, flag=None): - self.flag = flag - threading.Thread.start(self) - - def run(self): - if self.flag: - self.flag.set() - try: - self.server.serve_forever(0.05) - finally: - self.server.server_close() - - def stop(self): - self.server.shutdown() - - -def make_https_server(case, certfile=CERTFILE, host=HOST, handler_class=None): - # we assume the certfile contains both private key and certificate - context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - context.load_cert_chain(certfile) - server = HTTPSServerThread(context, host, handler_class) - flag = threading.Event() - server.start(flag) - flag.wait() - def cleanup(): - if support.verbose: - sys.stdout.write('stopping HTTPS server\n') - server.stop() - if support.verbose: - sys.stdout.write('joining HTTPS thread\n') - server.join() - case.addCleanup(cleanup) - return server - - -if __name__ == "__main__": - import argparse - parser = argparse.ArgumentParser( - description='Run a test HTTPS server. ' - 'By default, the current directory is served.') - parser.add_argument('-p', '--port', type=int, default=4433, - help='port to listen on (default: %(default)s)') - parser.add_argument('-q', '--quiet', dest='verbose', default=True, - action='store_false', help='be less verbose') - parser.add_argument('-s', '--stats', dest='use_stats_handler', default=False, - action='store_true', help='always return stats page') - parser.add_argument('--curve-name', dest='curve_name', type=str, - action='store', - help='curve name for EC-based Diffie-Hellman') - parser.add_argument('--dh', dest='dh_file', type=str, action='store', - help='PEM file containing DH parameters') - args = parser.parse_args() - - support.verbose = args.verbose - if args.use_stats_handler: - handler_class = StatsRequestHandler - else: - handler_class = RootedHTTPRequestHandler - if utils.PY2: - handler_class.root = os.getcwdu() - else: - handler_class.root = os.getcwd() - context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) - context.load_cert_chain(CERTFILE) - if args.curve_name: - context.set_ecdh_curve(args.curve_name) - if args.dh_file: - context.load_dh_params(args.dh_file) - - server = HTTPSServer(("", args.port), handler_class, context) - if args.verbose: - print("Listening on https://localhost:{0.port}".format(args)) - server.serve_forever(0.1) diff --git a/pype/vendor/future/backports/test/support.py b/pype/vendor/future/backports/test/support.py deleted file mode 100644 index b59c4ff73b..0000000000 --- a/pype/vendor/future/backports/test/support.py +++ /dev/null @@ -1,2048 +0,0 @@ -# -*- coding: utf-8 -*- -"""Supporting definitions for the Python regression tests. - -Backported for python-future from Python 3.3 test/support.py. -""" - -from __future__ import (absolute_import, division, - print_function, unicode_literals) -from future import utils -from future.builtins import str, range, open, int, map, list - -import contextlib -import errno -import functools -import gc -import socket -import sys -import os -import platform -import shutil -import warnings -import unittest -# For Python 2.6 compatibility: -if not hasattr(unittest, 'skip'): - import unittest2 as unittest - -import importlib -# import collections.abc # not present on Py2.7 -import re -import subprocess -import imp -import time -try: - import sysconfig -except ImportError: - # sysconfig is not available on Python 2.6. Try using distutils.sysconfig instead: - from distutils import sysconfig -import fnmatch -import logging.handlers -import struct -import tempfile - -try: - if utils.PY3: - import _thread, threading - else: - import thread as _thread, threading -except ImportError: - _thread = None - threading = None -try: - import multiprocessing.process -except ImportError: - multiprocessing = None - -try: - import zlib -except ImportError: - zlib = None - -try: - import gzip -except ImportError: - gzip = None - -try: - import bz2 -except ImportError: - bz2 = None - -try: - import lzma -except ImportError: - lzma = None - -__all__ = [ - "Error", "TestFailed", "ResourceDenied", "import_module", "verbose", - "use_resources", "max_memuse", "record_original_stdout", - "get_original_stdout", "unload", "unlink", "rmtree", "forget", - "is_resource_enabled", "requires", "requires_freebsd_version", - "requires_linux_version", "requires_mac_ver", "find_unused_port", - "bind_port", "IPV6_ENABLED", "is_jython", "TESTFN", "HOST", "SAVEDCWD", - "temp_cwd", "findfile", "create_empty_file", "sortdict", - "check_syntax_error", "open_urlresource", "check_warnings", "CleanImport", - "EnvironmentVarGuard", "TransientResource", "captured_stdout", - "captured_stdin", "captured_stderr", "time_out", "socket_peer_reset", - "ioerror_peer_reset", "run_with_locale", 'temp_umask', - "transient_internet", "set_memlimit", "bigmemtest", "bigaddrspacetest", - "BasicTestRunner", "run_unittest", "run_doctest", "threading_setup", - "threading_cleanup", "reap_children", "cpython_only", "check_impl_detail", - "get_attribute", "swap_item", "swap_attr", "requires_IEEE_754", - "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink", - "skip_unless_xattr", "import_fresh_module", "requires_zlib", - "PIPE_MAX_SIZE", "failfast", "anticipate_failure", "run_with_tz", - "requires_gzip", "requires_bz2", "requires_lzma", "suppress_crash_popup", - ] - -class Error(Exception): - """Base class for regression test exceptions.""" - -class TestFailed(Error): - """Test failed.""" - -class ResourceDenied(unittest.SkipTest): - """Test skipped because it requested a disallowed resource. - - This is raised when a test calls requires() for a resource that - has not be enabled. It is used to distinguish between expected - and unexpected skips. - """ - -@contextlib.contextmanager -def _ignore_deprecated_imports(ignore=True): - """Context manager to suppress package and module deprecation - warnings when importing them. - - If ignore is False, this context manager has no effect.""" - if ignore: - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", ".+ (module|package)", - DeprecationWarning) - yield - else: - yield - - -def import_module(name, deprecated=False): - """Import and return the module to be tested, raising SkipTest if - it is not available. - - If deprecated is True, any module or package deprecation messages - will be suppressed.""" - with _ignore_deprecated_imports(deprecated): - try: - return importlib.import_module(name) - except ImportError as msg: - raise unittest.SkipTest(str(msg)) - - -def _save_and_remove_module(name, orig_modules): - """Helper function to save and remove a module from sys.modules - - Raise ImportError if the module can't be imported. - """ - # try to import the module and raise an error if it can't be imported - if name not in sys.modules: - __import__(name) - del sys.modules[name] - for modname in list(sys.modules): - if modname == name or modname.startswith(name + '.'): - orig_modules[modname] = sys.modules[modname] - del sys.modules[modname] - -def _save_and_block_module(name, orig_modules): - """Helper function to save and block a module in sys.modules - - Return True if the module was in sys.modules, False otherwise. - """ - saved = True - try: - orig_modules[name] = sys.modules[name] - except KeyError: - saved = False - sys.modules[name] = None - return saved - - -def anticipate_failure(condition): - """Decorator to mark a test that is known to be broken in some cases - - Any use of this decorator should have a comment identifying the - associated tracker issue. - """ - if condition: - return unittest.expectedFailure - return lambda f: f - - -def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): - """Import and return a module, deliberately bypassing sys.modules. - This function imports and returns a fresh copy of the named Python module - by removing the named module from sys.modules before doing the import. - Note that unlike reload, the original module is not affected by - this operation. - - *fresh* is an iterable of additional module names that are also removed - from the sys.modules cache before doing the import. - - *blocked* is an iterable of module names that are replaced with None - in the module cache during the import to ensure that attempts to import - them raise ImportError. - - The named module and any modules named in the *fresh* and *blocked* - parameters are saved before starting the import and then reinserted into - sys.modules when the fresh import is complete. - - Module and package deprecation messages are suppressed during this import - if *deprecated* is True. - - This function will raise ImportError if the named module cannot be - imported. - - If deprecated is True, any module or package deprecation messages - will be suppressed. - """ - # NOTE: test_heapq, test_json and test_warnings include extra sanity checks - # to make sure that this utility function is working as expected - with _ignore_deprecated_imports(deprecated): - # Keep track of modules saved for later restoration as well - # as those which just need a blocking entry removed - orig_modules = {} - names_to_remove = [] - _save_and_remove_module(name, orig_modules) - try: - for fresh_name in fresh: - _save_and_remove_module(fresh_name, orig_modules) - for blocked_name in blocked: - if not _save_and_block_module(blocked_name, orig_modules): - names_to_remove.append(blocked_name) - fresh_module = importlib.import_module(name) - except ImportError: - fresh_module = None - finally: - for orig_name, module in orig_modules.items(): - sys.modules[orig_name] = module - for name_to_remove in names_to_remove: - del sys.modules[name_to_remove] - return fresh_module - - -def get_attribute(obj, name): - """Get an attribute, raising SkipTest if AttributeError is raised.""" - try: - attribute = getattr(obj, name) - except AttributeError: - raise unittest.SkipTest("object %r has no attribute %r" % (obj, name)) - else: - return attribute - -verbose = 1 # Flag set to 0 by regrtest.py -use_resources = None # Flag set to [] by regrtest.py -max_memuse = 0 # Disable bigmem tests (they will still be run with - # small sizes, to make sure they work.) -real_max_memuse = 0 -failfast = False -match_tests = None - -# _original_stdout is meant to hold stdout at the time regrtest began. -# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever. -# The point is to have some flavor of stdout the user can actually see. -_original_stdout = None -def record_original_stdout(stdout): - global _original_stdout - _original_stdout = stdout - -def get_original_stdout(): - return _original_stdout or sys.stdout - -def unload(name): - try: - del sys.modules[name] - except KeyError: - pass - -if sys.platform.startswith("win"): - def _waitfor(func, pathname, waitall=False): - # Perform the operation - func(pathname) - # Now setup the wait loop - if waitall: - dirname = pathname - else: - dirname, name = os.path.split(pathname) - dirname = dirname or '.' - # Check for `pathname` to be removed from the filesystem. - # The exponential backoff of the timeout amounts to a total - # of ~1 second after which the deletion is probably an error - # anyway. - # Testing on a i7@4.3GHz shows that usually only 1 iteration is - # required when contention occurs. - timeout = 0.001 - while timeout < 1.0: - # Note we are only testing for the existence of the file(s) in - # the contents of the directory regardless of any security or - # access rights. If we have made it this far, we have sufficient - # permissions to do that much using Python's equivalent of the - # Windows API FindFirstFile. - # Other Windows APIs can fail or give incorrect results when - # dealing with files that are pending deletion. - L = os.listdir(dirname) - if not (L if waitall else name in L): - return - # Increase the timeout and try again - time.sleep(timeout) - timeout *= 2 - warnings.warn('tests may fail, delete still pending for ' + pathname, - RuntimeWarning, stacklevel=4) - - def _unlink(filename): - _waitfor(os.unlink, filename) - - def _rmdir(dirname): - _waitfor(os.rmdir, dirname) - - def _rmtree(path): - def _rmtree_inner(path): - for name in os.listdir(path): - fullname = os.path.join(path, name) - if os.path.isdir(fullname): - _waitfor(_rmtree_inner, fullname, waitall=True) - os.rmdir(fullname) - else: - os.unlink(fullname) - _waitfor(_rmtree_inner, path, waitall=True) - _waitfor(os.rmdir, path) -else: - _unlink = os.unlink - _rmdir = os.rmdir - _rmtree = shutil.rmtree - -def unlink(filename): - try: - _unlink(filename) - except OSError as error: - # The filename need not exist. - if error.errno not in (errno.ENOENT, errno.ENOTDIR): - raise - -def rmdir(dirname): - try: - _rmdir(dirname) - except OSError as error: - # The directory need not exist. - if error.errno != errno.ENOENT: - raise - -def rmtree(path): - try: - _rmtree(path) - except OSError as error: - if error.errno != errno.ENOENT: - raise - -def make_legacy_pyc(source): - """Move a PEP 3147 pyc/pyo file to its legacy pyc/pyo location. - - The choice of .pyc or .pyo extension is done based on the __debug__ flag - value. - - :param source: The file system path to the source file. The source file - does not need to exist, however the PEP 3147 pyc file must exist. - :return: The file system path to the legacy pyc file. - """ - pyc_file = imp.cache_from_source(source) - up_one = os.path.dirname(os.path.abspath(source)) - legacy_pyc = os.path.join(up_one, source + ('c' if __debug__ else 'o')) - os.rename(pyc_file, legacy_pyc) - return legacy_pyc - -def forget(modname): - """'Forget' a module was ever imported. - - This removes the module from sys.modules and deletes any PEP 3147 or - legacy .pyc and .pyo files. - """ - unload(modname) - for dirname in sys.path: - source = os.path.join(dirname, modname + '.py') - # It doesn't matter if they exist or not, unlink all possible - # combinations of PEP 3147 and legacy pyc and pyo files. - unlink(source + 'c') - unlink(source + 'o') - unlink(imp.cache_from_source(source, debug_override=True)) - unlink(imp.cache_from_source(source, debug_override=False)) - -# On some platforms, should not run gui test even if it is allowed -# in `use_resources'. -if sys.platform.startswith('win'): - import ctypes - import ctypes.wintypes - def _is_gui_available(): - UOI_FLAGS = 1 - WSF_VISIBLE = 0x0001 - class USEROBJECTFLAGS(ctypes.Structure): - _fields_ = [("fInherit", ctypes.wintypes.BOOL), - ("fReserved", ctypes.wintypes.BOOL), - ("dwFlags", ctypes.wintypes.DWORD)] - dll = ctypes.windll.user32 - h = dll.GetProcessWindowStation() - if not h: - raise ctypes.WinError() - uof = USEROBJECTFLAGS() - needed = ctypes.wintypes.DWORD() - res = dll.GetUserObjectInformationW(h, - UOI_FLAGS, - ctypes.byref(uof), - ctypes.sizeof(uof), - ctypes.byref(needed)) - if not res: - raise ctypes.WinError() - return bool(uof.dwFlags & WSF_VISIBLE) -else: - def _is_gui_available(): - return True - -def is_resource_enabled(resource): - """Test whether a resource is enabled. Known resources are set by - regrtest.py.""" - return use_resources is not None and resource in use_resources - -def requires(resource, msg=None): - """Raise ResourceDenied if the specified resource is not available. - - If the caller's module is __main__ then automatically return True. The - possibility of False being returned occurs when regrtest.py is - executing. - """ - if resource == 'gui' and not _is_gui_available(): - raise unittest.SkipTest("Cannot use the 'gui' resource") - # see if the caller's module is __main__ - if so, treat as if - # the resource was set - if sys._getframe(1).f_globals.get("__name__") == "__main__": - return - if not is_resource_enabled(resource): - if msg is None: - msg = "Use of the %r resource not enabled" % resource - raise ResourceDenied(msg) - -def _requires_unix_version(sysname, min_version): - """Decorator raising SkipTest if the OS is `sysname` and the version is less - than `min_version`. - - For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if - the FreeBSD version is less than 7.2. - """ - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kw): - if platform.system() == sysname: - version_txt = platform.release().split('-', 1)[0] - try: - version = tuple(map(int, version_txt.split('.'))) - except ValueError: - pass - else: - if version < min_version: - min_version_txt = '.'.join(map(str, min_version)) - raise unittest.SkipTest( - "%s version %s or higher required, not %s" - % (sysname, min_version_txt, version_txt)) - return func(*args, **kw) - wrapper.min_version = min_version - return wrapper - return decorator - -def requires_freebsd_version(*min_version): - """Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is - less than `min_version`. - - For example, @requires_freebsd_version(7, 2) raises SkipTest if the FreeBSD - version is less than 7.2. - """ - return _requires_unix_version('FreeBSD', min_version) - -def requires_linux_version(*min_version): - """Decorator raising SkipTest if the OS is Linux and the Linux version is - less than `min_version`. - - For example, @requires_linux_version(2, 6, 32) raises SkipTest if the Linux - version is less than 2.6.32. - """ - return _requires_unix_version('Linux', min_version) - -def requires_mac_ver(*min_version): - """Decorator raising SkipTest if the OS is Mac OS X and the OS X - version if less than min_version. - - For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version - is lesser than 10.5. - """ - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kw): - if sys.platform == 'darwin': - version_txt = platform.mac_ver()[0] - try: - version = tuple(map(int, version_txt.split('.'))) - except ValueError: - pass - else: - if version < min_version: - min_version_txt = '.'.join(map(str, min_version)) - raise unittest.SkipTest( - "Mac OS X %s or higher required, not %s" - % (min_version_txt, version_txt)) - return func(*args, **kw) - wrapper.min_version = min_version - return wrapper - return decorator - -# Don't use "localhost", since resolving it uses the DNS under recent -# Windows versions (see issue #18792). -HOST = "127.0.0.1" -HOSTv6 = "::1" - - -def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): - """Returns an unused port that should be suitable for binding. This is - achieved by creating a temporary socket with the same family and type as - the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to - the specified host address (defaults to 0.0.0.0) with the port set to 0, - eliciting an unused ephemeral port from the OS. The temporary socket is - then closed and deleted, and the ephemeral port is returned. - - Either this method or bind_port() should be used for any tests where a - server socket needs to be bound to a particular port for the duration of - the test. Which one to use depends on whether the calling code is creating - a python socket, or if an unused port needs to be provided in a constructor - or passed to an external program (i.e. the -accept argument to openssl's - s_server mode). Always prefer bind_port() over find_unused_port() where - possible. Hard coded ports should *NEVER* be used. As soon as a server - socket is bound to a hard coded port, the ability to run multiple instances - of the test simultaneously on the same host is compromised, which makes the - test a ticking time bomb in a buildbot environment. On Unix buildbots, this - may simply manifest as a failed test, which can be recovered from without - intervention in most cases, but on Windows, the entire python process can - completely and utterly wedge, requiring someone to log in to the buildbot - and manually kill the affected process. - - (This is easy to reproduce on Windows, unfortunately, and can be traced to - the SO_REUSEADDR socket option having different semantics on Windows versus - Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind, - listen and then accept connections on identical host/ports. An EADDRINUSE - socket.error will be raised at some point (depending on the platform and - the order bind and listen were called on each socket). - - However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE - will ever be raised when attempting to bind two identical host/ports. When - accept() is called on each socket, the second caller's process will steal - the port from the first caller, leaving them both in an awkwardly wedged - state where they'll no longer respond to any signals or graceful kills, and - must be forcibly killed via OpenProcess()/TerminateProcess(). - - The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option - instead of SO_REUSEADDR, which effectively affords the same semantics as - SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open - Source world compared to Windows ones, this is a common mistake. A quick - look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when - openssl.exe is called with the 's_server' option, for example. See - http://bugs.python.org/issue2550 for more info. The following site also - has a very thorough description about the implications of both REUSEADDR - and EXCLUSIVEADDRUSE on Windows: - http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx) - - XXX: although this approach is a vast improvement on previous attempts to - elicit unused ports, it rests heavily on the assumption that the ephemeral - port returned to us by the OS won't immediately be dished back out to some - other process when we close and delete our temporary socket but before our - calling code has a chance to bind the returned port. We can deal with this - issue if/when we come across it. - """ - - tempsock = socket.socket(family, socktype) - port = bind_port(tempsock) - tempsock.close() - del tempsock - return port - -def bind_port(sock, host=HOST): - """Bind the socket to a free port and return the port number. Relies on - ephemeral ports in order to ensure we are using an unbound port. This is - important as many tests may be running simultaneously, especially in a - buildbot environment. This method raises an exception if the sock.family - is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR - or SO_REUSEPORT set on it. Tests should *never* set these socket options - for TCP/IP sockets. The only case for setting these options is testing - multicasting via multiple UDP sockets. - - Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e. - on Windows), it will be set on the socket. This will prevent anyone else - from bind()'ing to our host/port for the duration of the test. - """ - - if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM: - if hasattr(socket, 'SO_REUSEADDR'): - if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1: - raise TestFailed("tests should never set the SO_REUSEADDR " \ - "socket option on TCP/IP sockets!") - if hasattr(socket, 'SO_REUSEPORT'): - try: - if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1: - raise TestFailed("tests should never set the SO_REUSEPORT " \ - "socket option on TCP/IP sockets!") - except socket.error: - # Python's socket module was compiled using modern headers - # thus defining SO_REUSEPORT but this process is running - # under an older kernel that does not support SO_REUSEPORT. - pass - if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'): - sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) - - sock.bind((host, 0)) - port = sock.getsockname()[1] - return port - -def _is_ipv6_enabled(): - """Check whether IPv6 is enabled on this host.""" - if socket.has_ipv6: - sock = None - try: - sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) - sock.bind(('::1', 0)) - return True - except (socket.error, socket.gaierror): - pass - finally: - if sock: - sock.close() - return False - -IPV6_ENABLED = _is_ipv6_enabled() - - -# A constant likely larger than the underlying OS pipe buffer size, to -# make writes blocking. -# Windows limit seems to be around 512 B, and many Unix kernels have a -# 64 KiB pipe buffer size or 16 * PAGE_SIZE: take a few megs to be sure. -# (see issue #17835 for a discussion of this number). -PIPE_MAX_SIZE = 4 * 1024 * 1024 + 1 - -# A constant likely larger than the underlying OS socket buffer size, to make -# writes blocking. -# The socket buffer sizes can usually be tuned system-wide (e.g. through sysctl -# on Linux), or on a per-socket basis (SO_SNDBUF/SO_RCVBUF). See issue #18643 -# for a discussion of this number). -SOCK_MAX_SIZE = 16 * 1024 * 1024 + 1 - -# # decorator for skipping tests on non-IEEE 754 platforms -# requires_IEEE_754 = unittest.skipUnless( -# float.__getformat__("double").startswith("IEEE"), -# "test requires IEEE 754 doubles") - -requires_zlib = unittest.skipUnless(zlib, 'requires zlib') - -requires_bz2 = unittest.skipUnless(bz2, 'requires bz2') - -requires_lzma = unittest.skipUnless(lzma, 'requires lzma') - -is_jython = sys.platform.startswith('java') - -# Filename used for testing -if os.name == 'java': - # Jython disallows @ in module names - TESTFN = '$test' -else: - TESTFN = '@test' - -# Disambiguate TESTFN for parallel testing, while letting it remain a valid -# module name. -TESTFN = "{0}_{1}_tmp".format(TESTFN, os.getpid()) - -# # FS_NONASCII: non-ASCII character encodable by os.fsencode(), -# # or None if there is no such character. -# FS_NONASCII = None -# for character in ( -# # First try printable and common characters to have a readable filename. -# # For each character, the encoding list are just example of encodings able -# # to encode the character (the list is not exhaustive). -# -# # U+00E6 (Latin Small Letter Ae): cp1252, iso-8859-1 -# '\u00E6', -# # U+0130 (Latin Capital Letter I With Dot Above): cp1254, iso8859_3 -# '\u0130', -# # U+0141 (Latin Capital Letter L With Stroke): cp1250, cp1257 -# '\u0141', -# # U+03C6 (Greek Small Letter Phi): cp1253 -# '\u03C6', -# # U+041A (Cyrillic Capital Letter Ka): cp1251 -# '\u041A', -# # U+05D0 (Hebrew Letter Alef): Encodable to cp424 -# '\u05D0', -# # U+060C (Arabic Comma): cp864, cp1006, iso8859_6, mac_arabic -# '\u060C', -# # U+062A (Arabic Letter Teh): cp720 -# '\u062A', -# # U+0E01 (Thai Character Ko Kai): cp874 -# '\u0E01', -# -# # Then try more "special" characters. "special" because they may be -# # interpreted or displayed differently depending on the exact locale -# # encoding and the font. -# -# # U+00A0 (No-Break Space) -# '\u00A0', -# # U+20AC (Euro Sign) -# '\u20AC', -# ): -# try: -# os.fsdecode(os.fsencode(character)) -# except UnicodeError: -# pass -# else: -# FS_NONASCII = character -# break -# -# # TESTFN_UNICODE is a non-ascii filename -# TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" -# if sys.platform == 'darwin': -# # In Mac OS X's VFS API file names are, by definition, canonically -# # decomposed Unicode, encoded using UTF-8. See QA1173: -# # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html -# import unicodedata -# TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) -# TESTFN_ENCODING = sys.getfilesystemencoding() -# -# # TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be -# # encoded by the filesystem encoding (in strict mode). It can be None if we -# # cannot generate such filename. -# TESTFN_UNENCODABLE = None -# if os.name in ('nt', 'ce'): -# # skip win32s (0) or Windows 9x/ME (1) -# if sys.getwindowsversion().platform >= 2: -# # Different kinds of characters from various languages to minimize the -# # probability that the whole name is encodable to MBCS (issue #9819) -# TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" -# try: -# TESTFN_UNENCODABLE.encode(TESTFN_ENCODING) -# except UnicodeEncodeError: -# pass -# else: -# print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). ' -# 'Unicode filename tests may not be effective' -# % (TESTFN_UNENCODABLE, TESTFN_ENCODING)) -# TESTFN_UNENCODABLE = None -# # Mac OS X denies unencodable filenames (invalid utf-8) -# elif sys.platform != 'darwin': -# try: -# # ascii and utf-8 cannot encode the byte 0xff -# b'\xff'.decode(TESTFN_ENCODING) -# except UnicodeDecodeError: -# # 0xff will be encoded using the surrogate character u+DCFF -# TESTFN_UNENCODABLE = TESTFN \ -# + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape') -# else: -# # File system encoding (eg. ISO-8859-* encodings) can encode -# # the byte 0xff. Skip some unicode filename tests. -# pass -# -# # TESTFN_UNDECODABLE is a filename (bytes type) that should *not* be able to be -# # decoded from the filesystem encoding (in strict mode). It can be None if we -# # cannot generate such filename (ex: the latin1 encoding can decode any byte -# # sequence). On UNIX, TESTFN_UNDECODABLE can be decoded by os.fsdecode() thanks -# # to the surrogateescape error handler (PEP 383), but not from the filesystem -# # encoding in strict mode. -# TESTFN_UNDECODABLE = None -# for name in ( -# # b'\xff' is not decodable by os.fsdecode() with code page 932. Windows -# # accepts it to create a file or a directory, or don't accept to enter to -# # such directory (when the bytes name is used). So test b'\xe7' first: it is -# # not decodable from cp932. -# b'\xe7w\xf0', -# # undecodable from ASCII, UTF-8 -# b'\xff', -# # undecodable from iso8859-3, iso8859-6, iso8859-7, cp424, iso8859-8, cp856 -# # and cp857 -# b'\xae\xd5' -# # undecodable from UTF-8 (UNIX and Mac OS X) -# b'\xed\xb2\x80', b'\xed\xb4\x80', -# # undecodable from shift_jis, cp869, cp874, cp932, cp1250, cp1251, cp1252, -# # cp1253, cp1254, cp1255, cp1257, cp1258 -# b'\x81\x98', -# ): -# try: -# name.decode(TESTFN_ENCODING) -# except UnicodeDecodeError: -# TESTFN_UNDECODABLE = os.fsencode(TESTFN) + name -# break -# -# if FS_NONASCII: -# TESTFN_NONASCII = TESTFN + '-' + FS_NONASCII -# else: -# TESTFN_NONASCII = None - -# Save the initial cwd -SAVEDCWD = os.getcwd() - -@contextlib.contextmanager -def temp_cwd(name='tempcwd', quiet=False, path=None): - """ - Context manager that temporarily changes the CWD. - - An existing path may be provided as *path*, in which case this - function makes no changes to the file system. - - Otherwise, the new CWD is created in the current directory and it's - named *name*. If *quiet* is False (default) and it's not possible to - create or change the CWD, an error is raised. If it's True, only a - warning is raised and the original CWD is used. - """ - saved_dir = os.getcwd() - is_temporary = False - if path is None: - path = name - try: - os.mkdir(name) - is_temporary = True - except OSError: - if not quiet: - raise - warnings.warn('tests may fail, unable to create temp CWD ' + name, - RuntimeWarning, stacklevel=3) - try: - os.chdir(path) - except OSError: - if not quiet: - raise - warnings.warn('tests may fail, unable to change the CWD to ' + path, - RuntimeWarning, stacklevel=3) - try: - yield os.getcwd() - finally: - os.chdir(saved_dir) - if is_temporary: - rmtree(name) - - -if hasattr(os, "umask"): - @contextlib.contextmanager - def temp_umask(umask): - """Context manager that temporarily sets the process umask.""" - oldmask = os.umask(umask) - try: - yield - finally: - os.umask(oldmask) - - -def findfile(file, here=__file__, subdir=None): - """Try to find a file on sys.path and the working directory. If it is not - found the argument passed to the function is returned (this does not - necessarily signal failure; could still be the legitimate path).""" - if os.path.isabs(file): - return file - if subdir is not None: - file = os.path.join(subdir, file) - path = sys.path - path = [os.path.dirname(here)] + path - for dn in path: - fn = os.path.join(dn, file) - if os.path.exists(fn): return fn - return file - -def create_empty_file(filename): - """Create an empty file. If the file already exists, truncate it.""" - fd = os.open(filename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) - os.close(fd) - -def sortdict(dict): - "Like repr(dict), but in sorted order." - items = sorted(dict.items()) - reprpairs = ["%r: %r" % pair for pair in items] - withcommas = ", ".join(reprpairs) - return "{%s}" % withcommas - -def make_bad_fd(): - """ - Create an invalid file descriptor by opening and closing a file and return - its fd. - """ - file = open(TESTFN, "wb") - try: - return file.fileno() - finally: - file.close() - unlink(TESTFN) - -def check_syntax_error(testcase, statement): - testcase.assertRaises(SyntaxError, compile, statement, - '', 'exec') - -def open_urlresource(url, *args, **kw): - from future.backports.urllib import (request as urllib_request, - parse as urllib_parse) - - check = kw.pop('check', None) - - filename = urllib_parse.urlparse(url)[2].split('/')[-1] # '/': it's URL! - - fn = os.path.join(os.path.dirname(__file__), "data", filename) - - def check_valid_file(fn): - f = open(fn, *args, **kw) - if check is None: - return f - elif check(f): - f.seek(0) - return f - f.close() - - if os.path.exists(fn): - f = check_valid_file(fn) - if f is not None: - return f - unlink(fn) - - # Verify the requirement before downloading the file - requires('urlfetch') - - print('\tfetching %s ...' % url, file=get_original_stdout()) - f = urllib_request.urlopen(url, timeout=15) - try: - with open(fn, "wb") as out: - s = f.read() - while s: - out.write(s) - s = f.read() - finally: - f.close() - - f = check_valid_file(fn) - if f is not None: - return f - raise TestFailed('invalid resource %r' % fn) - - -class WarningsRecorder(object): - """Convenience wrapper for the warnings list returned on - entry to the warnings.catch_warnings() context manager. - """ - def __init__(self, warnings_list): - self._warnings = warnings_list - self._last = 0 - - def __getattr__(self, attr): - if len(self._warnings) > self._last: - return getattr(self._warnings[-1], attr) - elif attr in warnings.WarningMessage._WARNING_DETAILS: - return None - raise AttributeError("%r has no attribute %r" % (self, attr)) - - @property - def warnings(self): - return self._warnings[self._last:] - - def reset(self): - self._last = len(self._warnings) - - -def _filterwarnings(filters, quiet=False): - """Catch the warnings, then check if all the expected - warnings have been raised and re-raise unexpected warnings. - If 'quiet' is True, only re-raise the unexpected warnings. - """ - # Clear the warning registry of the calling module - # in order to re-raise the warnings. - frame = sys._getframe(2) - registry = frame.f_globals.get('__warningregistry__') - if registry: - if utils.PY3: - registry.clear() - else: - # Py2-compatible: - for i in range(len(registry)): - registry.pop() - with warnings.catch_warnings(record=True) as w: - # Set filter "always" to record all warnings. Because - # test_warnings swap the module, we need to look up in - # the sys.modules dictionary. - sys.modules['warnings'].simplefilter("always") - yield WarningsRecorder(w) - # Filter the recorded warnings - reraise = list(w) - missing = [] - for msg, cat in filters: - seen = False - for w in reraise[:]: - warning = w.message - # Filter out the matching messages - if (re.match(msg, str(warning), re.I) and - issubclass(warning.__class__, cat)): - seen = True - reraise.remove(w) - if not seen and not quiet: - # This filter caught nothing - missing.append((msg, cat.__name__)) - if reraise: - raise AssertionError("unhandled warning %s" % reraise[0]) - if missing: - raise AssertionError("filter (%r, %s) did not catch any warning" % - missing[0]) - - -@contextlib.contextmanager -def check_warnings(*filters, **kwargs): - """Context manager to silence warnings. - - Accept 2-tuples as positional arguments: - ("message regexp", WarningCategory) - - Optional argument: - - if 'quiet' is True, it does not fail if a filter catches nothing - (default True without argument, - default False if some filters are defined) - - Without argument, it defaults to: - check_warnings(("", Warning), quiet=True) - """ - quiet = kwargs.get('quiet') - if not filters: - filters = (("", Warning),) - # Preserve backward compatibility - if quiet is None: - quiet = True - return _filterwarnings(filters, quiet) - - -class CleanImport(object): - """Context manager to force import to return a new module reference. - - This is useful for testing module-level behaviours, such as - the emission of a DeprecationWarning on import. - - Use like this: - - with CleanImport("foo"): - importlib.import_module("foo") # new reference - """ - - def __init__(self, *module_names): - self.original_modules = sys.modules.copy() - for module_name in module_names: - if module_name in sys.modules: - module = sys.modules[module_name] - # It is possible that module_name is just an alias for - # another module (e.g. stub for modules renamed in 3.x). - # In that case, we also need delete the real module to clear - # the import cache. - if module.__name__ != module_name: - del sys.modules[module.__name__] - del sys.modules[module_name] - - def __enter__(self): - return self - - def __exit__(self, *ignore_exc): - sys.modules.update(self.original_modules) - -### Added for python-future: -if utils.PY3: - import collections.abc - mybase = collections.abc.MutableMapping -else: - import UserDict - mybase = UserDict.DictMixin -### - -class EnvironmentVarGuard(mybase): - - """Class to help protect the environment variable properly. Can be used as - a context manager.""" - - def __init__(self): - self._environ = os.environ - self._changed = {} - - def __getitem__(self, envvar): - return self._environ[envvar] - - def __setitem__(self, envvar, value): - # Remember the initial value on the first access - if envvar not in self._changed: - self._changed[envvar] = self._environ.get(envvar) - self._environ[envvar] = value - - def __delitem__(self, envvar): - # Remember the initial value on the first access - if envvar not in self._changed: - self._changed[envvar] = self._environ.get(envvar) - if envvar in self._environ: - del self._environ[envvar] - - def keys(self): - return self._environ.keys() - - def __iter__(self): - return iter(self._environ) - - def __len__(self): - return len(self._environ) - - def set(self, envvar, value): - self[envvar] = value - - def unset(self, envvar): - del self[envvar] - - def __enter__(self): - return self - - def __exit__(self, *ignore_exc): - for (k, v) in self._changed.items(): - if v is None: - if k in self._environ: - del self._environ[k] - else: - self._environ[k] = v - os.environ = self._environ - - -class DirsOnSysPath(object): - """Context manager to temporarily add directories to sys.path. - - This makes a copy of sys.path, appends any directories given - as positional arguments, then reverts sys.path to the copied - settings when the context ends. - - Note that *all* sys.path modifications in the body of the - context manager, including replacement of the object, - will be reverted at the end of the block. - """ - - def __init__(self, *paths): - self.original_value = sys.path[:] - self.original_object = sys.path - sys.path.extend(paths) - - def __enter__(self): - return self - - def __exit__(self, *ignore_exc): - sys.path = self.original_object - sys.path[:] = self.original_value - - -class TransientResource(object): - - """Raise ResourceDenied if an exception is raised while the context manager - is in effect that matches the specified exception and attributes.""" - - def __init__(self, exc, **kwargs): - self.exc = exc - self.attrs = kwargs - - def __enter__(self): - return self - - def __exit__(self, type_=None, value=None, traceback=None): - """If type_ is a subclass of self.exc and value has attributes matching - self.attrs, raise ResourceDenied. Otherwise let the exception - propagate (if any).""" - if type_ is not None and issubclass(self.exc, type_): - for attr, attr_value in self.attrs.items(): - if not hasattr(value, attr): - break - if getattr(value, attr) != attr_value: - break - else: - raise ResourceDenied("an optional resource is not available") - -# Context managers that raise ResourceDenied when various issues -# with the Internet connection manifest themselves as exceptions. -# XXX deprecate these and use transient_internet() instead -time_out = TransientResource(IOError, errno=errno.ETIMEDOUT) -socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET) -ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET) - - -@contextlib.contextmanager -def transient_internet(resource_name, timeout=30.0, errnos=()): - """Return a context manager that raises ResourceDenied when various issues - with the Internet connection manifest themselves as exceptions.""" - default_errnos = [ - ('ECONNREFUSED', 111), - ('ECONNRESET', 104), - ('EHOSTUNREACH', 113), - ('ENETUNREACH', 101), - ('ETIMEDOUT', 110), - ] - default_gai_errnos = [ - ('EAI_AGAIN', -3), - ('EAI_FAIL', -4), - ('EAI_NONAME', -2), - ('EAI_NODATA', -5), - # Encountered when trying to resolve IPv6-only hostnames - ('WSANO_DATA', 11004), - ] - - denied = ResourceDenied("Resource %r is not available" % resource_name) - captured_errnos = errnos - gai_errnos = [] - if not captured_errnos: - captured_errnos = [getattr(errno, name, num) - for (name, num) in default_errnos] - gai_errnos = [getattr(socket, name, num) - for (name, num) in default_gai_errnos] - - def filter_error(err): - n = getattr(err, 'errno', None) - if (isinstance(err, socket.timeout) or - (isinstance(err, socket.gaierror) and n in gai_errnos) or - n in captured_errnos): - if not verbose: - sys.stderr.write(denied.args[0] + "\n") - # Was: raise denied from err - # For Python-Future: - exc = denied - exc.__cause__ = err - raise exc - - old_timeout = socket.getdefaulttimeout() - try: - if timeout is not None: - socket.setdefaulttimeout(timeout) - yield - except IOError as err: - # urllib can wrap original socket errors multiple times (!), we must - # unwrap to get at the original error. - while True: - a = err.args - if len(a) >= 1 and isinstance(a[0], IOError): - err = a[0] - # The error can also be wrapped as args[1]: - # except socket.error as msg: - # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2]) - elif len(a) >= 2 and isinstance(a[1], IOError): - err = a[1] - else: - break - filter_error(err) - raise - # XXX should we catch generic exceptions and look for their - # __cause__ or __context__? - finally: - socket.setdefaulttimeout(old_timeout) - - -@contextlib.contextmanager -def captured_output(stream_name): - """Return a context manager used by captured_stdout/stdin/stderr - that temporarily replaces the sys stream *stream_name* with a StringIO.""" - import io - orig_stdout = getattr(sys, stream_name) - setattr(sys, stream_name, io.StringIO()) - try: - yield getattr(sys, stream_name) - finally: - setattr(sys, stream_name, orig_stdout) - -def captured_stdout(): - """Capture the output of sys.stdout: - - with captured_stdout() as s: - print("hello") - self.assertEqual(s.getvalue(), "hello") - """ - return captured_output("stdout") - -def captured_stderr(): - return captured_output("stderr") - -def captured_stdin(): - return captured_output("stdin") - - -def gc_collect(): - """Force as many objects as possible to be collected. - - In non-CPython implementations of Python, this is needed because timely - deallocation is not guaranteed by the garbage collector. (Even in CPython - this can be the case in case of reference cycles.) This means that __del__ - methods may be called later than expected and weakrefs may remain alive for - longer than expected. This function tries its best to force all garbage - objects to disappear. - """ - gc.collect() - if is_jython: - time.sleep(0.1) - gc.collect() - gc.collect() - -@contextlib.contextmanager -def disable_gc(): - have_gc = gc.isenabled() - gc.disable() - try: - yield - finally: - if have_gc: - gc.enable() - - -def python_is_optimized(): - """Find if Python was built with optimizations.""" - # We don't have sysconfig on Py2.6: - import sysconfig - cflags = sysconfig.get_config_var('PY_CFLAGS') or '' - final_opt = "" - for opt in cflags.split(): - if opt.startswith('-O'): - final_opt = opt - return final_opt != '' and final_opt != '-O0' - - -_header = 'nP' -_align = '0n' -if hasattr(sys, "gettotalrefcount"): - _header = '2P' + _header - _align = '0P' -_vheader = _header + 'n' - -def calcobjsize(fmt): - return struct.calcsize(_header + fmt + _align) - -def calcvobjsize(fmt): - return struct.calcsize(_vheader + fmt + _align) - - -_TPFLAGS_HAVE_GC = 1<<14 -_TPFLAGS_HEAPTYPE = 1<<9 - -def check_sizeof(test, o, size): - result = sys.getsizeof(o) - # add GC header size - if ((type(o) == type) and (o.__flags__ & _TPFLAGS_HEAPTYPE) or\ - ((type(o) != type) and (type(o).__flags__ & _TPFLAGS_HAVE_GC))): - size += _testcapi.SIZEOF_PYGC_HEAD - msg = 'wrong size for %s: got %d, expected %d' \ - % (type(o), result, size) - test.assertEqual(result, size, msg) - -#======================================================================= -# Decorator for running a function in a different locale, correctly resetting -# it afterwards. - -def run_with_locale(catstr, *locales): - def decorator(func): - def inner(*args, **kwds): - try: - import locale - category = getattr(locale, catstr) - orig_locale = locale.setlocale(category) - except AttributeError: - # if the test author gives us an invalid category string - raise - except: - # cannot retrieve original locale, so do nothing - locale = orig_locale = None - else: - for loc in locales: - try: - locale.setlocale(category, loc) - break - except: - pass - - # now run the function, resetting the locale on exceptions - try: - return func(*args, **kwds) - finally: - if locale and orig_locale: - locale.setlocale(category, orig_locale) - inner.__name__ = func.__name__ - inner.__doc__ = func.__doc__ - return inner - return decorator - -#======================================================================= -# Decorator for running a function in a specific timezone, correctly -# resetting it afterwards. - -def run_with_tz(tz): - def decorator(func): - def inner(*args, **kwds): - try: - tzset = time.tzset - except AttributeError: - raise unittest.SkipTest("tzset required") - if 'TZ' in os.environ: - orig_tz = os.environ['TZ'] - else: - orig_tz = None - os.environ['TZ'] = tz - tzset() - - # now run the function, resetting the tz on exceptions - try: - return func(*args, **kwds) - finally: - if orig_tz is None: - del os.environ['TZ'] - else: - os.environ['TZ'] = orig_tz - time.tzset() - - inner.__name__ = func.__name__ - inner.__doc__ = func.__doc__ - return inner - return decorator - -#======================================================================= -# Big-memory-test support. Separate from 'resources' because memory use -# should be configurable. - -# Some handy shorthands. Note that these are used for byte-limits as well -# as size-limits, in the various bigmem tests -_1M = 1024*1024 -_1G = 1024 * _1M -_2G = 2 * _1G -_4G = 4 * _1G - -MAX_Py_ssize_t = sys.maxsize - -def set_memlimit(limit): - global max_memuse - global real_max_memuse - sizes = { - 'k': 1024, - 'm': _1M, - 'g': _1G, - 't': 1024*_1G, - } - m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit, - re.IGNORECASE | re.VERBOSE) - if m is None: - raise ValueError('Invalid memory limit %r' % (limit,)) - memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()]) - real_max_memuse = memlimit - if memlimit > MAX_Py_ssize_t: - memlimit = MAX_Py_ssize_t - if memlimit < _2G - 1: - raise ValueError('Memory limit %r too low to be useful' % (limit,)) - max_memuse = memlimit - -class _MemoryWatchdog(object): - """An object which periodically watches the process' memory consumption - and prints it out. - """ - - def __init__(self): - self.procfile = '/proc/{pid}/statm'.format(pid=os.getpid()) - self.started = False - - def start(self): - try: - f = open(self.procfile, 'r') - except OSError as e: - warnings.warn('/proc not available for stats: {0}'.format(e), - RuntimeWarning) - sys.stderr.flush() - return - - watchdog_script = findfile("memory_watchdog.py") - self.mem_watchdog = subprocess.Popen([sys.executable, watchdog_script], - stdin=f, stderr=subprocess.DEVNULL) - f.close() - self.started = True - - def stop(self): - if self.started: - self.mem_watchdog.terminate() - self.mem_watchdog.wait() - - -def bigmemtest(size, memuse, dry_run=True): - """Decorator for bigmem tests. - - 'minsize' is the minimum useful size for the test (in arbitrary, - test-interpreted units.) 'memuse' is the number of 'bytes per size' for - the test, or a good estimate of it. - - if 'dry_run' is False, it means the test doesn't support dummy runs - when -M is not specified. - """ - def decorator(f): - def wrapper(self): - size = wrapper.size - memuse = wrapper.memuse - if not real_max_memuse: - maxsize = 5147 - else: - maxsize = size - - if ((real_max_memuse or not dry_run) - and real_max_memuse < maxsize * memuse): - raise unittest.SkipTest( - "not enough memory: %.1fG minimum needed" - % (size * memuse / (1024 ** 3))) - - if real_max_memuse and verbose: - print() - print(" ... expected peak memory use: {peak:.1f}G" - .format(peak=size * memuse / (1024 ** 3))) - watchdog = _MemoryWatchdog() - watchdog.start() - else: - watchdog = None - - try: - return f(self, maxsize) - finally: - if watchdog: - watchdog.stop() - - wrapper.size = size - wrapper.memuse = memuse - return wrapper - return decorator - -def bigaddrspacetest(f): - """Decorator for tests that fill the address space.""" - def wrapper(self): - if max_memuse < MAX_Py_ssize_t: - if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31: - raise unittest.SkipTest( - "not enough memory: try a 32-bit build instead") - else: - raise unittest.SkipTest( - "not enough memory: %.1fG minimum needed" - % (MAX_Py_ssize_t / (1024 ** 3))) - else: - return f(self) - return wrapper - -#======================================================================= -# unittest integration. - -class BasicTestRunner(object): - def run(self, test): - result = unittest.TestResult() - test(result) - return result - -def _id(obj): - return obj - -def requires_resource(resource): - if resource == 'gui' and not _is_gui_available(): - return unittest.skip("resource 'gui' is not available") - if is_resource_enabled(resource): - return _id - else: - return unittest.skip("resource {0!r} is not enabled".format(resource)) - -def cpython_only(test): - """ - Decorator for tests only applicable on CPython. - """ - return impl_detail(cpython=True)(test) - -def impl_detail(msg=None, **guards): - if check_impl_detail(**guards): - return _id - if msg is None: - guardnames, default = _parse_guards(guards) - if default: - msg = "implementation detail not available on {0}" - else: - msg = "implementation detail specific to {0}" - guardnames = sorted(guardnames.keys()) - msg = msg.format(' or '.join(guardnames)) - return unittest.skip(msg) - -def _parse_guards(guards): - # Returns a tuple ({platform_name: run_me}, default_value) - if not guards: - return ({'cpython': True}, False) - is_true = list(guards.values())[0] - assert list(guards.values()) == [is_true] * len(guards) # all True or all False - return (guards, not is_true) - -# Use the following check to guard CPython's implementation-specific tests -- -# or to run them only on the implementation(s) guarded by the arguments. -def check_impl_detail(**guards): - """This function returns True or False depending on the host platform. - Examples: - if check_impl_detail(): # only on CPython (default) - if check_impl_detail(jython=True): # only on Jython - if check_impl_detail(cpython=False): # everywhere except on CPython - """ - guards, default = _parse_guards(guards) - return guards.get(platform.python_implementation().lower(), default) - - -def no_tracing(func): - """Decorator to temporarily turn off tracing for the duration of a test.""" - if not hasattr(sys, 'gettrace'): - return func - else: - @functools.wraps(func) - def wrapper(*args, **kwargs): - original_trace = sys.gettrace() - try: - sys.settrace(None) - return func(*args, **kwargs) - finally: - sys.settrace(original_trace) - return wrapper - - -def refcount_test(test): - """Decorator for tests which involve reference counting. - - To start, the decorator does not run the test if is not run by CPython. - After that, any trace function is unset during the test to prevent - unexpected refcounts caused by the trace function. - - """ - return no_tracing(cpython_only(test)) - - -def _filter_suite(suite, pred): - """Recursively filter test cases in a suite based on a predicate.""" - newtests = [] - for test in suite._tests: - if isinstance(test, unittest.TestSuite): - _filter_suite(test, pred) - newtests.append(test) - else: - if pred(test): - newtests.append(test) - suite._tests = newtests - -def _run_suite(suite): - """Run tests from a unittest.TestSuite-derived class.""" - if verbose: - runner = unittest.TextTestRunner(sys.stdout, verbosity=2, - failfast=failfast) - else: - runner = BasicTestRunner() - - result = runner.run(suite) - if not result.wasSuccessful(): - if len(result.errors) == 1 and not result.failures: - err = result.errors[0][1] - elif len(result.failures) == 1 and not result.errors: - err = result.failures[0][1] - else: - err = "multiple errors occurred" - if not verbose: err += "; run in verbose mode for details" - raise TestFailed(err) - - -def run_unittest(*classes): - """Run tests from unittest.TestCase-derived classes.""" - valid_types = (unittest.TestSuite, unittest.TestCase) - suite = unittest.TestSuite() - for cls in classes: - if isinstance(cls, str): - if cls in sys.modules: - suite.addTest(unittest.findTestCases(sys.modules[cls])) - else: - raise ValueError("str arguments must be keys in sys.modules") - elif isinstance(cls, valid_types): - suite.addTest(cls) - else: - suite.addTest(unittest.makeSuite(cls)) - def case_pred(test): - if match_tests is None: - return True - for name in test.id().split("."): - if fnmatch.fnmatchcase(name, match_tests): - return True - return False - _filter_suite(suite, case_pred) - _run_suite(suite) - -# We don't have sysconfig on Py2.6: -# #======================================================================= -# # Check for the presence of docstrings. -# -# HAVE_DOCSTRINGS = (check_impl_detail(cpython=False) or -# sys.platform == 'win32' or -# sysconfig.get_config_var('WITH_DOC_STRINGS')) -# -# requires_docstrings = unittest.skipUnless(HAVE_DOCSTRINGS, -# "test requires docstrings") -# -# -# #======================================================================= -# doctest driver. - -def run_doctest(module, verbosity=None, optionflags=0): - """Run doctest on the given module. Return (#failures, #tests). - - If optional argument verbosity is not specified (or is None), pass - support's belief about verbosity on to doctest. Else doctest's - usual behavior is used (it searches sys.argv for -v). - """ - - import doctest - - if verbosity is None: - verbosity = verbose - else: - verbosity = None - - f, t = doctest.testmod(module, verbose=verbosity, optionflags=optionflags) - if f: - raise TestFailed("%d of %d doctests failed" % (f, t)) - if verbose: - print('doctest (%s) ... %d tests with zero failures' % - (module.__name__, t)) - return f, t - - -#======================================================================= -# Support for saving and restoring the imported modules. - -def modules_setup(): - return sys.modules.copy(), - -def modules_cleanup(oldmodules): - # Encoders/decoders are registered permanently within the internal - # codec cache. If we destroy the corresponding modules their - # globals will be set to None which will trip up the cached functions. - encodings = [(k, v) for k, v in sys.modules.items() - if k.startswith('encodings.')] - # Was: - # sys.modules.clear() - # Py2-compatible: - for i in range(len(sys.modules)): - sys.modules.pop() - - sys.modules.update(encodings) - # XXX: This kind of problem can affect more than just encodings. In particular - # extension modules (such as _ssl) don't cope with reloading properly. - # Really, test modules should be cleaning out the test specific modules they - # know they added (ala test_runpy) rather than relying on this function (as - # test_importhooks and test_pkg do currently). - # Implicitly imported *real* modules should be left alone (see issue 10556). - sys.modules.update(oldmodules) - -#======================================================================= -# Backported versions of threading_setup() and threading_cleanup() which don't refer -# to threading._dangling (not available on Py2.7). - -# Threading support to prevent reporting refleaks when running regrtest.py -R - -# NOTE: we use thread._count() rather than threading.enumerate() (or the -# moral equivalent thereof) because a threading.Thread object is still alive -# until its __bootstrap() method has returned, even after it has been -# unregistered from the threading module. -# thread._count(), on the other hand, only gets decremented *after* the -# __bootstrap() method has returned, which gives us reliable reference counts -# at the end of a test run. - -def threading_setup(): - if _thread: - return _thread._count(), - else: - return 1, - -def threading_cleanup(nb_threads): - if not _thread: - return - - _MAX_COUNT = 10 - for count in range(_MAX_COUNT): - n = _thread._count() - if n == nb_threads: - break - time.sleep(0.1) - # XXX print a warning in case of failure? - -def reap_threads(func): - """Use this function when threads are being used. This will - ensure that the threads are cleaned up even when the test fails. - If threading is unavailable this function does nothing. - """ - if not _thread: - return func - - @functools.wraps(func) - def decorator(*args): - key = threading_setup() - try: - return func(*args) - finally: - threading_cleanup(*key) - return decorator - -def reap_children(): - """Use this function at the end of test_main() whenever sub-processes - are started. This will help ensure that no extra children (zombies) - stick around to hog resources and create problems when looking - for refleaks. - """ - - # Reap all our dead child processes so we don't leave zombies around. - # These hog resources and might be causing some of the buildbots to die. - if hasattr(os, 'waitpid'): - any_process = -1 - while True: - try: - # This will raise an exception on Windows. That's ok. - pid, status = os.waitpid(any_process, os.WNOHANG) - if pid == 0: - break - except: - break - -@contextlib.contextmanager -def swap_attr(obj, attr, new_val): - """Temporary swap out an attribute with a new object. - - Usage: - with swap_attr(obj, "attr", 5): - ... - - This will set obj.attr to 5 for the duration of the with: block, - restoring the old value at the end of the block. If `attr` doesn't - exist on `obj`, it will be created and then deleted at the end of the - block. - """ - if hasattr(obj, attr): - real_val = getattr(obj, attr) - setattr(obj, attr, new_val) - try: - yield - finally: - setattr(obj, attr, real_val) - else: - setattr(obj, attr, new_val) - try: - yield - finally: - delattr(obj, attr) - -@contextlib.contextmanager -def swap_item(obj, item, new_val): - """Temporary swap out an item with a new object. - - Usage: - with swap_item(obj, "item", 5): - ... - - This will set obj["item"] to 5 for the duration of the with: block, - restoring the old value at the end of the block. If `item` doesn't - exist on `obj`, it will be created and then deleted at the end of the - block. - """ - if item in obj: - real_val = obj[item] - obj[item] = new_val - try: - yield - finally: - obj[item] = real_val - else: - obj[item] = new_val - try: - yield - finally: - del obj[item] - -def strip_python_stderr(stderr): - """Strip the stderr of a Python process from potential debug output - emitted by the interpreter. - - This will typically be run on the result of the communicate() method - of a subprocess.Popen object. - """ - stderr = re.sub(br"\[\d+ refs\]\r?\n?", b"", stderr).strip() - return stderr - -def args_from_interpreter_flags(): - """Return a list of command-line arguments reproducing the current - settings in sys.flags and sys.warnoptions.""" - return subprocess._args_from_interpreter_flags() - -#============================================================ -# Support for assertions about logging. -#============================================================ - -class TestHandler(logging.handlers.BufferingHandler): - def __init__(self, matcher): - # BufferingHandler takes a "capacity" argument - # so as to know when to flush. As we're overriding - # shouldFlush anyway, we can set a capacity of zero. - # You can call flush() manually to clear out the - # buffer. - logging.handlers.BufferingHandler.__init__(self, 0) - self.matcher = matcher - - def shouldFlush(self): - return False - - def emit(self, record): - self.format(record) - self.buffer.append(record.__dict__) - - def matches(self, **kwargs): - """ - Look for a saved dict whose keys/values match the supplied arguments. - """ - result = False - for d in self.buffer: - if self.matcher.matches(d, **kwargs): - result = True - break - return result - -class Matcher(object): - - _partial_matches = ('msg', 'message') - - def matches(self, d, **kwargs): - """ - Try to match a single dict with the supplied arguments. - - Keys whose values are strings and which are in self._partial_matches - will be checked for partial (i.e. substring) matches. You can extend - this scheme to (for example) do regular expression matching, etc. - """ - result = True - for k in kwargs: - v = kwargs[k] - dv = d.get(k) - if not self.match_value(k, dv, v): - result = False - break - return result - - def match_value(self, k, dv, v): - """ - Try to match a single stored value (dv) with a supplied value (v). - """ - if type(v) != type(dv): - result = False - elif type(dv) is not str or k not in self._partial_matches: - result = (v == dv) - else: - result = dv.find(v) >= 0 - return result - - -_can_symlink = None -def can_symlink(): - global _can_symlink - if _can_symlink is not None: - return _can_symlink - symlink_path = TESTFN + "can_symlink" - try: - os.symlink(TESTFN, symlink_path) - can = True - except (OSError, NotImplementedError, AttributeError): - can = False - else: - os.remove(symlink_path) - _can_symlink = can - return can - -def skip_unless_symlink(test): - """Skip decorator for tests that require functional symlink""" - ok = can_symlink() - msg = "Requires functional symlink implementation" - return test if ok else unittest.skip(msg)(test) - -_can_xattr = None -def can_xattr(): - global _can_xattr - if _can_xattr is not None: - return _can_xattr - if not hasattr(os, "setxattr"): - can = False - else: - tmp_fp, tmp_name = tempfile.mkstemp() - try: - with open(TESTFN, "wb") as fp: - try: - # TESTFN & tempfile may use different file systems with - # different capabilities - os.setxattr(tmp_fp, b"user.test", b"") - os.setxattr(fp.fileno(), b"user.test", b"") - # Kernels < 2.6.39 don't respect setxattr flags. - kernel_version = platform.release() - m = re.match("2.6.(\d{1,2})", kernel_version) - can = m is None or int(m.group(1)) >= 39 - except OSError: - can = False - finally: - unlink(TESTFN) - unlink(tmp_name) - _can_xattr = can - return can - -def skip_unless_xattr(test): - """Skip decorator for tests that require functional extended attributes""" - ok = can_xattr() - msg = "no non-broken extended attribute support" - return test if ok else unittest.skip(msg)(test) - - -if sys.platform.startswith('win'): - @contextlib.contextmanager - def suppress_crash_popup(): - """Disable Windows Error Reporting dialogs using SetErrorMode.""" - # see http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621%28v=vs.85%29.aspx - # GetErrorMode is not available on Windows XP and Windows Server 2003, - # but SetErrorMode returns the previous value, so we can use that - import ctypes - k32 = ctypes.windll.kernel32 - SEM_NOGPFAULTERRORBOX = 0x02 - old_error_mode = k32.SetErrorMode(SEM_NOGPFAULTERRORBOX) - k32.SetErrorMode(old_error_mode | SEM_NOGPFAULTERRORBOX) - try: - yield - finally: - k32.SetErrorMode(old_error_mode) -else: - # this is a no-op for other platforms - @contextlib.contextmanager - def suppress_crash_popup(): - yield - - -def patch(test_instance, object_to_patch, attr_name, new_value): - """Override 'object_to_patch'.'attr_name' with 'new_value'. - - Also, add a cleanup procedure to 'test_instance' to restore - 'object_to_patch' value for 'attr_name'. - The 'attr_name' should be a valid attribute for 'object_to_patch'. - - """ - # check that 'attr_name' is a real attribute for 'object_to_patch' - # will raise AttributeError if it does not exist - getattr(object_to_patch, attr_name) - - # keep a copy of the old value - attr_is_local = False - try: - old_value = object_to_patch.__dict__[attr_name] - except (AttributeError, KeyError): - old_value = getattr(object_to_patch, attr_name, None) - else: - attr_is_local = True - - # restore the value when the test is done - def cleanup(): - if attr_is_local: - setattr(object_to_patch, attr_name, old_value) - else: - delattr(object_to_patch, attr_name) - - test_instance.addCleanup(cleanup) - - # actually override the attribute - setattr(object_to_patch, attr_name, new_value) diff --git a/pype/vendor/future/backports/total_ordering.py b/pype/vendor/future/backports/total_ordering.py deleted file mode 100644 index 760f06d6c3..0000000000 --- a/pype/vendor/future/backports/total_ordering.py +++ /dev/null @@ -1,38 +0,0 @@ -""" -For Python < 2.7.2. total_ordering in versions prior to 2.7.2 is buggy. -See http://bugs.python.org/issue10042 for details. For these versions use -code borrowed from Python 2.7.3. - -From django.utils. -""" - -import sys -if sys.version_info >= (2, 7, 2): - from functools import total_ordering -else: - def total_ordering(cls): - """Class decorator that fills in missing ordering methods""" - convert = { - '__lt__': [('__gt__', lambda self, other: not (self < other or self == other)), - ('__le__', lambda self, other: self < other or self == other), - ('__ge__', lambda self, other: not self < other)], - '__le__': [('__ge__', lambda self, other: not self <= other or self == other), - ('__lt__', lambda self, other: self <= other and not self == other), - ('__gt__', lambda self, other: not self <= other)], - '__gt__': [('__lt__', lambda self, other: not (self > other or self == other)), - ('__ge__', lambda self, other: self > other or self == other), - ('__le__', lambda self, other: not self > other)], - '__ge__': [('__le__', lambda self, other: (not self >= other) or self == other), - ('__gt__', lambda self, other: self >= other and not self == other), - ('__lt__', lambda self, other: not self >= other)] - } - roots = set(dir(cls)) & set(convert) - if not roots: - raise ValueError('must define at least one ordering operation: < > <= >=') - root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__ - for opname, opfunc in convert[root]: - if opname not in roots: - opfunc.__name__ = opname - opfunc.__doc__ = getattr(int, opname).__doc__ - setattr(cls, opname, opfunc) - return cls diff --git a/pype/vendor/future/backports/urllib/__init__.py b/pype/vendor/future/backports/urllib/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/future/backports/urllib/error.py b/pype/vendor/future/backports/urllib/error.py deleted file mode 100644 index a473e4453d..0000000000 --- a/pype/vendor/future/backports/urllib/error.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Exception classes raised by urllib. - -The base exception class is URLError, which inherits from IOError. It -doesn't define any behavior of its own, but is the base class for all -exceptions defined in this package. - -HTTPError is an exception class that is also a valid HTTP response -instance. It behaves this way because HTTP protocol errors are valid -responses, with a status code, headers, and a body. In some contexts, -an application may want to handle an exception like a regular -response. -""" -from __future__ import absolute_import, division, unicode_literals -from future import standard_library - -from future.backports.urllib import response as urllib_response - - -__all__ = ['URLError', 'HTTPError', 'ContentTooShortError'] - - -# do these error classes make sense? -# make sure all of the IOError stuff is overridden. we just want to be -# subtypes. - -class URLError(IOError): - # URLError is a sub-type of IOError, but it doesn't share any of - # the implementation. need to override __init__ and __str__. - # It sets self.args for compatibility with other EnvironmentError - # subclasses, but args doesn't have the typical format with errno in - # slot 0 and strerror in slot 1. This may be better than nothing. - def __init__(self, reason, filename=None): - self.args = reason, - self.reason = reason - if filename is not None: - self.filename = filename - - def __str__(self): - return '' % self.reason - -class HTTPError(URLError, urllib_response.addinfourl): - """Raised when HTTP error occurs, but also acts like non-error return""" - __super_init = urllib_response.addinfourl.__init__ - - def __init__(self, url, code, msg, hdrs, fp): - self.code = code - self.msg = msg - self.hdrs = hdrs - self.fp = fp - self.filename = url - # The addinfourl classes depend on fp being a valid file - # object. In some cases, the HTTPError may not have a valid - # file object. If this happens, the simplest workaround is to - # not initialize the base classes. - if fp is not None: - self.__super_init(fp, hdrs, url, code) - - def __str__(self): - return 'HTTP Error %s: %s' % (self.code, self.msg) - - # since URLError specifies a .reason attribute, HTTPError should also - # provide this attribute. See issue13211 for discussion. - @property - def reason(self): - return self.msg - - def info(self): - return self.hdrs - - -# exception raised when downloaded size does not match content-length -class ContentTooShortError(URLError): - def __init__(self, message, content): - URLError.__init__(self, message) - self.content = content diff --git a/pype/vendor/future/backports/urllib/parse.py b/pype/vendor/future/backports/urllib/parse.py deleted file mode 100644 index ada2f8bb4c..0000000000 --- a/pype/vendor/future/backports/urllib/parse.py +++ /dev/null @@ -1,991 +0,0 @@ -""" -Ported using Python-Future from the Python 3.3 standard library. - -Parse (absolute and relative) URLs. - -urlparse module is based upon the following RFC specifications. - -RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding -and L. Masinter, January 2005. - -RFC 2732 : "Format for Literal IPv6 Addresses in URL's by R.Hinden, B.Carpenter -and L.Masinter, December 1999. - -RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T. -Berners-Lee, R. Fielding, and L. Masinter, August 1998. - -RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zawinski, July 1998. - -RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June -1995. - -RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M. -McCahill, December 1994 - -RFC 3986 is considered the current standard and any future changes to -urlparse module should conform with it. The urlparse module is -currently not entirely compliant with this RFC due to defacto -scenarios for parsing, and for backward compatibility purposes, some -parsing quirks from older RFCs are retained. The testcases in -test_urlparse.py provides a good indicator of parsing behavior. -""" -from __future__ import absolute_import, division, unicode_literals -from future.builtins import bytes, chr, dict, int, range, str -from future.utils import raise_with_traceback - -import re -import sys -import collections - -__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag", - "urlsplit", "urlunsplit", "urlencode", "parse_qs", - "parse_qsl", "quote", "quote_plus", "quote_from_bytes", - "unquote", "unquote_plus", "unquote_to_bytes"] - -# A classification of schemes ('' means apply by default) -uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap', - 'wais', 'file', 'https', 'shttp', 'mms', - 'prospero', 'rtsp', 'rtspu', '', 'sftp', - 'svn', 'svn+ssh'] -uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet', - 'imap', 'wais', 'file', 'mms', 'https', 'shttp', - 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '', - 'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh'] -uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap', - 'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips', - 'mms', '', 'sftp', 'tel'] - -# These are not actually used anymore, but should stay for backwards -# compatibility. (They are undocumented, but have a public-looking name.) -non_hierarchical = ['gopher', 'hdl', 'mailto', 'news', - 'telnet', 'wais', 'imap', 'snews', 'sip', 'sips'] -uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms', - 'gopher', 'rtsp', 'rtspu', 'sip', 'sips', ''] -uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news', - 'nntp', 'wais', 'https', 'shttp', 'snews', - 'file', 'prospero', ''] - -# Characters valid in scheme names -scheme_chars = ('abcdefghijklmnopqrstuvwxyz' - 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' - '0123456789' - '+-.') - -# XXX: Consider replacing with functools.lru_cache -MAX_CACHE_SIZE = 20 -_parse_cache = {} - -def clear_cache(): - """Clear the parse cache and the quoters cache.""" - _parse_cache.clear() - _safe_quoters.clear() - - -# Helpers for bytes handling -# For 3.2, we deliberately require applications that -# handle improperly quoted URLs to do their own -# decoding and encoding. If valid use cases are -# presented, we may relax this by using latin-1 -# decoding internally for 3.3 -_implicit_encoding = 'ascii' -_implicit_errors = 'strict' - -def _noop(obj): - return obj - -def _encode_result(obj, encoding=_implicit_encoding, - errors=_implicit_errors): - return obj.encode(encoding, errors) - -def _decode_args(args, encoding=_implicit_encoding, - errors=_implicit_errors): - return tuple(x.decode(encoding, errors) if x else '' for x in args) - -def _coerce_args(*args): - # Invokes decode if necessary to create str args - # and returns the coerced inputs along with - # an appropriate result coercion function - # - noop for str inputs - # - encoding function otherwise - str_input = isinstance(args[0], str) - for arg in args[1:]: - # We special-case the empty string to support the - # "scheme=''" default argument to some functions - if arg and isinstance(arg, str) != str_input: - raise TypeError("Cannot mix str and non-str arguments") - if str_input: - return args + (_noop,) - return _decode_args(args) + (_encode_result,) - -# Result objects are more helpful than simple tuples -class _ResultMixinStr(object): - """Standard approach to encoding parsed results from str to bytes""" - __slots__ = () - - def encode(self, encoding='ascii', errors='strict'): - return self._encoded_counterpart(*(x.encode(encoding, errors) for x in self)) - - -class _ResultMixinBytes(object): - """Standard approach to decoding parsed results from bytes to str""" - __slots__ = () - - def decode(self, encoding='ascii', errors='strict'): - return self._decoded_counterpart(*(x.decode(encoding, errors) for x in self)) - - -class _NetlocResultMixinBase(object): - """Shared methods for the parsed result objects containing a netloc element""" - __slots__ = () - - @property - def username(self): - return self._userinfo[0] - - @property - def password(self): - return self._userinfo[1] - - @property - def hostname(self): - hostname = self._hostinfo[0] - if not hostname: - hostname = None - elif hostname is not None: - hostname = hostname.lower() - return hostname - - @property - def port(self): - port = self._hostinfo[1] - if port is not None: - port = int(port, 10) - # Return None on an illegal port - if not ( 0 <= port <= 65535): - return None - return port - - -class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr): - __slots__ = () - - @property - def _userinfo(self): - netloc = self.netloc - userinfo, have_info, hostinfo = netloc.rpartition('@') - if have_info: - username, have_password, password = userinfo.partition(':') - if not have_password: - password = None - else: - username = password = None - return username, password - - @property - def _hostinfo(self): - netloc = self.netloc - _, _, hostinfo = netloc.rpartition('@') - _, have_open_br, bracketed = hostinfo.partition('[') - if have_open_br: - hostname, _, port = bracketed.partition(']') - _, have_port, port = port.partition(':') - else: - hostname, have_port, port = hostinfo.partition(':') - if not have_port: - port = None - return hostname, port - - -class _NetlocResultMixinBytes(_NetlocResultMixinBase, _ResultMixinBytes): - __slots__ = () - - @property - def _userinfo(self): - netloc = self.netloc - userinfo, have_info, hostinfo = netloc.rpartition(b'@') - if have_info: - username, have_password, password = userinfo.partition(b':') - if not have_password: - password = None - else: - username = password = None - return username, password - - @property - def _hostinfo(self): - netloc = self.netloc - _, _, hostinfo = netloc.rpartition(b'@') - _, have_open_br, bracketed = hostinfo.partition(b'[') - if have_open_br: - hostname, _, port = bracketed.partition(b']') - _, have_port, port = port.partition(b':') - else: - hostname, have_port, port = hostinfo.partition(b':') - if not have_port: - port = None - return hostname, port - - -from collections import namedtuple - -_DefragResultBase = namedtuple('DefragResult', 'url fragment') -_SplitResultBase = namedtuple('SplitResult', 'scheme netloc path query fragment') -_ParseResultBase = namedtuple('ParseResult', 'scheme netloc path params query fragment') - -# For backwards compatibility, alias _NetlocResultMixinStr -# ResultBase is no longer part of the documented API, but it is -# retained since deprecating it isn't worth the hassle -ResultBase = _NetlocResultMixinStr - -# Structured result objects for string data -class DefragResult(_DefragResultBase, _ResultMixinStr): - __slots__ = () - def geturl(self): - if self.fragment: - return self.url + '#' + self.fragment - else: - return self.url - -class SplitResult(_SplitResultBase, _NetlocResultMixinStr): - __slots__ = () - def geturl(self): - return urlunsplit(self) - -class ParseResult(_ParseResultBase, _NetlocResultMixinStr): - __slots__ = () - def geturl(self): - return urlunparse(self) - -# Structured result objects for bytes data -class DefragResultBytes(_DefragResultBase, _ResultMixinBytes): - __slots__ = () - def geturl(self): - if self.fragment: - return self.url + b'#' + self.fragment - else: - return self.url - -class SplitResultBytes(_SplitResultBase, _NetlocResultMixinBytes): - __slots__ = () - def geturl(self): - return urlunsplit(self) - -class ParseResultBytes(_ParseResultBase, _NetlocResultMixinBytes): - __slots__ = () - def geturl(self): - return urlunparse(self) - -# Set up the encode/decode result pairs -def _fix_result_transcoding(): - _result_pairs = ( - (DefragResult, DefragResultBytes), - (SplitResult, SplitResultBytes), - (ParseResult, ParseResultBytes), - ) - for _decoded, _encoded in _result_pairs: - _decoded._encoded_counterpart = _encoded - _encoded._decoded_counterpart = _decoded - -_fix_result_transcoding() -del _fix_result_transcoding - -def urlparse(url, scheme='', allow_fragments=True): - """Parse a URL into 6 components: - :///;?# - Return a 6-tuple: (scheme, netloc, path, params, query, fragment). - Note that we don't break the components up in smaller bits - (e.g. netloc is a single string) and we don't expand % escapes.""" - url, scheme, _coerce_result = _coerce_args(url, scheme) - splitresult = urlsplit(url, scheme, allow_fragments) - scheme, netloc, url, query, fragment = splitresult - if scheme in uses_params and ';' in url: - url, params = _splitparams(url) - else: - params = '' - result = ParseResult(scheme, netloc, url, params, query, fragment) - return _coerce_result(result) - -def _splitparams(url): - if '/' in url: - i = url.find(';', url.rfind('/')) - if i < 0: - return url, '' - else: - i = url.find(';') - return url[:i], url[i+1:] - -def _splitnetloc(url, start=0): - delim = len(url) # position of end of domain part of url, default is end - for c in '/?#': # look for delimiters; the order is NOT important - wdelim = url.find(c, start) # find first of this delim - if wdelim >= 0: # if found - delim = min(delim, wdelim) # use earliest delim position - return url[start:delim], url[delim:] # return (domain, rest) - -def urlsplit(url, scheme='', allow_fragments=True): - """Parse a URL into 5 components: - :///?# - Return a 5-tuple: (scheme, netloc, path, query, fragment). - Note that we don't break the components up in smaller bits - (e.g. netloc is a single string) and we don't expand % escapes.""" - url, scheme, _coerce_result = _coerce_args(url, scheme) - allow_fragments = bool(allow_fragments) - key = url, scheme, allow_fragments, type(url), type(scheme) - cached = _parse_cache.get(key, None) - if cached: - return _coerce_result(cached) - if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth - clear_cache() - netloc = query = fragment = '' - i = url.find(':') - if i > 0: - if url[:i] == 'http': # optimize the common case - scheme = url[:i].lower() - url = url[i+1:] - if url[:2] == '//': - netloc, url = _splitnetloc(url, 2) - if (('[' in netloc and ']' not in netloc) or - (']' in netloc and '[' not in netloc)): - raise ValueError("Invalid IPv6 URL") - if allow_fragments and '#' in url: - url, fragment = url.split('#', 1) - if '?' in url: - url, query = url.split('?', 1) - v = SplitResult(scheme, netloc, url, query, fragment) - _parse_cache[key] = v - return _coerce_result(v) - for c in url[:i]: - if c not in scheme_chars: - break - else: - # make sure "url" is not actually a port number (in which case - # "scheme" is really part of the path) - rest = url[i+1:] - if not rest or any(c not in '0123456789' for c in rest): - # not a port number - scheme, url = url[:i].lower(), rest - - if url[:2] == '//': - netloc, url = _splitnetloc(url, 2) - if (('[' in netloc and ']' not in netloc) or - (']' in netloc and '[' not in netloc)): - raise ValueError("Invalid IPv6 URL") - if allow_fragments and '#' in url: - url, fragment = url.split('#', 1) - if '?' in url: - url, query = url.split('?', 1) - v = SplitResult(scheme, netloc, url, query, fragment) - _parse_cache[key] = v - return _coerce_result(v) - -def urlunparse(components): - """Put a parsed URL back together again. This may result in a - slightly different, but equivalent URL, if the URL that was parsed - originally had redundant delimiters, e.g. a ? with an empty query - (the draft states that these are equivalent).""" - scheme, netloc, url, params, query, fragment, _coerce_result = ( - _coerce_args(*components)) - if params: - url = "%s;%s" % (url, params) - return _coerce_result(urlunsplit((scheme, netloc, url, query, fragment))) - -def urlunsplit(components): - """Combine the elements of a tuple as returned by urlsplit() into a - complete URL as a string. The data argument can be any five-item iterable. - This may result in a slightly different, but equivalent URL, if the URL that - was parsed originally had unnecessary delimiters (for example, a ? with an - empty query; the RFC states that these are equivalent).""" - scheme, netloc, url, query, fragment, _coerce_result = ( - _coerce_args(*components)) - if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'): - if url and url[:1] != '/': url = '/' + url - url = '//' + (netloc or '') + url - if scheme: - url = scheme + ':' + url - if query: - url = url + '?' + query - if fragment: - url = url + '#' + fragment - return _coerce_result(url) - -def urljoin(base, url, allow_fragments=True): - """Join a base URL and a possibly relative URL to form an absolute - interpretation of the latter.""" - if not base: - return url - if not url: - return base - base, url, _coerce_result = _coerce_args(base, url) - bscheme, bnetloc, bpath, bparams, bquery, bfragment = \ - urlparse(base, '', allow_fragments) - scheme, netloc, path, params, query, fragment = \ - urlparse(url, bscheme, allow_fragments) - if scheme != bscheme or scheme not in uses_relative: - return _coerce_result(url) - if scheme in uses_netloc: - if netloc: - return _coerce_result(urlunparse((scheme, netloc, path, - params, query, fragment))) - netloc = bnetloc - if path[:1] == '/': - return _coerce_result(urlunparse((scheme, netloc, path, - params, query, fragment))) - if not path and not params: - path = bpath - params = bparams - if not query: - query = bquery - return _coerce_result(urlunparse((scheme, netloc, path, - params, query, fragment))) - segments = bpath.split('/')[:-1] + path.split('/') - # XXX The stuff below is bogus in various ways... - if segments[-1] == '.': - segments[-1] = '' - while '.' in segments: - segments.remove('.') - while 1: - i = 1 - n = len(segments) - 1 - while i < n: - if (segments[i] == '..' - and segments[i-1] not in ('', '..')): - del segments[i-1:i+1] - break - i = i+1 - else: - break - if segments == ['', '..']: - segments[-1] = '' - elif len(segments) >= 2 and segments[-1] == '..': - segments[-2:] = [''] - return _coerce_result(urlunparse((scheme, netloc, '/'.join(segments), - params, query, fragment))) - -def urldefrag(url): - """Removes any existing fragment from URL. - - Returns a tuple of the defragmented URL and the fragment. If - the URL contained no fragments, the second element is the - empty string. - """ - url, _coerce_result = _coerce_args(url) - if '#' in url: - s, n, p, a, q, frag = urlparse(url) - defrag = urlunparse((s, n, p, a, q, '')) - else: - frag = '' - defrag = url - return _coerce_result(DefragResult(defrag, frag)) - -_hexdig = '0123456789ABCDEFabcdef' -_hextobyte = dict(((a + b).encode(), bytes([int(a + b, 16)])) - for a in _hexdig for b in _hexdig) - -def unquote_to_bytes(string): - """unquote_to_bytes('abc%20def') -> b'abc def'.""" - # Note: strings are encoded as UTF-8. This is only an issue if it contains - # unescaped non-ASCII characters, which URIs should not. - if not string: - # Is it a string-like object? - string.split - return bytes(b'') - if isinstance(string, str): - string = string.encode('utf-8') - ### For Python-Future: - # It is already a byte-string object, but force it to be newbytes here on - # Py2: - string = bytes(string) - ### - bits = string.split(b'%') - if len(bits) == 1: - return string - res = [bits[0]] - append = res.append - for item in bits[1:]: - try: - append(_hextobyte[item[:2]]) - append(item[2:]) - except KeyError: - append(b'%') - append(item) - return bytes(b'').join(res) - -_asciire = re.compile('([\x00-\x7f]+)') - -def unquote(string, encoding='utf-8', errors='replace'): - """Replace %xx escapes by their single-character equivalent. The optional - encoding and errors parameters specify how to decode percent-encoded - sequences into Unicode characters, as accepted by the bytes.decode() - method. - By default, percent-encoded sequences are decoded with UTF-8, and invalid - sequences are replaced by a placeholder character. - - unquote('abc%20def') -> 'abc def'. - """ - if '%' not in string: - string.split - return string - if encoding is None: - encoding = 'utf-8' - if errors is None: - errors = 'replace' - bits = _asciire.split(string) - res = [bits[0]] - append = res.append - for i in range(1, len(bits), 2): - append(unquote_to_bytes(bits[i]).decode(encoding, errors)) - append(bits[i + 1]) - return ''.join(res) - -def parse_qs(qs, keep_blank_values=False, strict_parsing=False, - encoding='utf-8', errors='replace'): - """Parse a query given as a string argument. - - Arguments: - - qs: percent-encoded query string to be parsed - - keep_blank_values: flag indicating whether blank values in - percent-encoded queries should be treated as blank strings. - A true value indicates that blanks should be retained as - blank strings. The default false value indicates that - blank values are to be ignored and treated as if they were - not included. - - strict_parsing: flag indicating what to do with parsing errors. - If false (the default), errors are silently ignored. - If true, errors raise a ValueError exception. - - encoding and errors: specify how to decode percent-encoded sequences - into Unicode characters, as accepted by the bytes.decode() method. - """ - parsed_result = {} - pairs = parse_qsl(qs, keep_blank_values, strict_parsing, - encoding=encoding, errors=errors) - for name, value in pairs: - if name in parsed_result: - parsed_result[name].append(value) - else: - parsed_result[name] = [value] - return parsed_result - -def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, - encoding='utf-8', errors='replace'): - """Parse a query given as a string argument. - - Arguments: - - qs: percent-encoded query string to be parsed - - keep_blank_values: flag indicating whether blank values in - percent-encoded queries should be treated as blank strings. A - true value indicates that blanks should be retained as blank - strings. The default false value indicates that blank values - are to be ignored and treated as if they were not included. - - strict_parsing: flag indicating what to do with parsing errors. If - false (the default), errors are silently ignored. If true, - errors raise a ValueError exception. - - encoding and errors: specify how to decode percent-encoded sequences - into Unicode characters, as accepted by the bytes.decode() method. - - Returns a list, as G-d intended. - """ - qs, _coerce_result = _coerce_args(qs) - pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')] - r = [] - for name_value in pairs: - if not name_value and not strict_parsing: - continue - nv = name_value.split('=', 1) - if len(nv) != 2: - if strict_parsing: - raise ValueError("bad query field: %r" % (name_value,)) - # Handle case of a control-name with no equal sign - if keep_blank_values: - nv.append('') - else: - continue - if len(nv[1]) or keep_blank_values: - name = nv[0].replace('+', ' ') - name = unquote(name, encoding=encoding, errors=errors) - name = _coerce_result(name) - value = nv[1].replace('+', ' ') - value = unquote(value, encoding=encoding, errors=errors) - value = _coerce_result(value) - r.append((name, value)) - return r - -def unquote_plus(string, encoding='utf-8', errors='replace'): - """Like unquote(), but also replace plus signs by spaces, as required for - unquoting HTML form values. - - unquote_plus('%7e/abc+def') -> '~/abc def' - """ - string = string.replace('+', ' ') - return unquote(string, encoding, errors) - -_ALWAYS_SAFE = frozenset(bytes(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ' - b'abcdefghijklmnopqrstuvwxyz' - b'0123456789' - b'_.-')) -_ALWAYS_SAFE_BYTES = bytes(_ALWAYS_SAFE) -_safe_quoters = {} - -class Quoter(collections.defaultdict): - """A mapping from bytes (in range(0,256)) to strings. - - String values are percent-encoded byte values, unless the key < 128, and - in the "safe" set (either the specified safe set, or default set). - """ - # Keeps a cache internally, using defaultdict, for efficiency (lookups - # of cached keys don't call Python code at all). - def __init__(self, safe): - """safe: bytes object.""" - self.safe = _ALWAYS_SAFE.union(bytes(safe)) - - def __repr__(self): - # Without this, will just display as a defaultdict - return "" % dict(self) - - def __missing__(self, b): - # Handle a cache miss. Store quoted string in cache and return. - res = chr(b) if b in self.safe else '%{0:02X}'.format(b) - self[b] = res - return res - -def quote(string, safe='/', encoding=None, errors=None): - """quote('abc def') -> 'abc%20def' - - Each part of a URL, e.g. the path info, the query, etc., has a - different set of reserved characters that must be quoted. - - RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists - the following reserved characters. - - reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | - "$" | "," - - Each of these characters is reserved in some component of a URL, - but not necessarily in all of them. - - By default, the quote function is intended for quoting the path - section of a URL. Thus, it will not encode '/'. This character - is reserved, but in typical usage the quote function is being - called on a path where the existing slash characters are used as - reserved characters. - - string and safe may be either str or bytes objects. encoding must - not be specified if string is a str. - - The optional encoding and errors parameters specify how to deal with - non-ASCII characters, as accepted by the str.encode method. - By default, encoding='utf-8' (characters are encoded with UTF-8), and - errors='strict' (unsupported characters raise a UnicodeEncodeError). - """ - if isinstance(string, str): - if not string: - return string - if encoding is None: - encoding = 'utf-8' - if errors is None: - errors = 'strict' - string = string.encode(encoding, errors) - else: - if encoding is not None: - raise TypeError("quote() doesn't support 'encoding' for bytes") - if errors is not None: - raise TypeError("quote() doesn't support 'errors' for bytes") - return quote_from_bytes(string, safe) - -def quote_plus(string, safe='', encoding=None, errors=None): - """Like quote(), but also replace ' ' with '+', as required for quoting - HTML form values. Plus signs in the original string are escaped unless - they are included in safe. It also does not have safe default to '/'. - """ - # Check if ' ' in string, where string may either be a str or bytes. If - # there are no spaces, the regular quote will produce the right answer. - if ((isinstance(string, str) and ' ' not in string) or - (isinstance(string, bytes) and b' ' not in string)): - return quote(string, safe, encoding, errors) - if isinstance(safe, str): - space = str(' ') - else: - space = bytes(b' ') - string = quote(string, safe + space, encoding, errors) - return string.replace(' ', '+') - -def quote_from_bytes(bs, safe='/'): - """Like quote(), but accepts a bytes object rather than a str, and does - not perform string-to-bytes encoding. It always returns an ASCII string. - quote_from_bytes(b'abc def\x3f') -> 'abc%20def%3f' - """ - if not isinstance(bs, (bytes, bytearray)): - raise TypeError("quote_from_bytes() expected bytes") - if not bs: - return str('') - ### For Python-Future: - bs = bytes(bs) - ### - if isinstance(safe, str): - # Normalize 'safe' by converting to bytes and removing non-ASCII chars - safe = str(safe).encode('ascii', 'ignore') - else: - ### For Python-Future: - safe = bytes(safe) - ### - safe = bytes([c for c in safe if c < 128]) - if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe): - return bs.decode() - try: - quoter = _safe_quoters[safe] - except KeyError: - _safe_quoters[safe] = quoter = Quoter(safe).__getitem__ - return str('').join([quoter(char) for char in bs]) - -def urlencode(query, doseq=False, safe='', encoding=None, errors=None): - """Encode a sequence of two-element tuples or dictionary into a URL query string. - - If any values in the query arg are sequences and doseq is true, each - sequence element is converted to a separate parameter. - - If the query arg is a sequence of two-element tuples, the order of the - parameters in the output will match the order of parameters in the - input. - - The query arg may be either a string or a bytes type. When query arg is a - string, the safe, encoding and error parameters are sent the quote_plus for - encoding. - """ - - if hasattr(query, "items"): - query = query.items() - else: - # It's a bother at times that strings and string-like objects are - # sequences. - try: - # non-sequence items should not work with len() - # non-empty strings will fail this - if len(query) and not isinstance(query[0], tuple): - raise TypeError - # Zero-length sequences of all types will get here and succeed, - # but that's a minor nit. Since the original implementation - # allowed empty dicts that type of behavior probably should be - # preserved for consistency - except TypeError: - ty, va, tb = sys.exc_info() - raise_with_traceback(TypeError("not a valid non-string sequence " - "or mapping object"), tb) - - l = [] - if not doseq: - for k, v in query: - if isinstance(k, bytes): - k = quote_plus(k, safe) - else: - k = quote_plus(str(k), safe, encoding, errors) - - if isinstance(v, bytes): - v = quote_plus(v, safe) - else: - v = quote_plus(str(v), safe, encoding, errors) - l.append(k + '=' + v) - else: - for k, v in query: - if isinstance(k, bytes): - k = quote_plus(k, safe) - else: - k = quote_plus(str(k), safe, encoding, errors) - - if isinstance(v, bytes): - v = quote_plus(v, safe) - l.append(k + '=' + v) - elif isinstance(v, str): - v = quote_plus(v, safe, encoding, errors) - l.append(k + '=' + v) - else: - try: - # Is this a sufficient test for sequence-ness? - x = len(v) - except TypeError: - # not a sequence - v = quote_plus(str(v), safe, encoding, errors) - l.append(k + '=' + v) - else: - # loop over the sequence - for elt in v: - if isinstance(elt, bytes): - elt = quote_plus(elt, safe) - else: - elt = quote_plus(str(elt), safe, encoding, errors) - l.append(k + '=' + elt) - return str('&').join(l) - -# Utilities to parse URLs (most of these return None for missing parts): -# unwrap('') --> 'type://host/path' -# splittype('type:opaquestring') --> 'type', 'opaquestring' -# splithost('//host[:port]/path') --> 'host[:port]', '/path' -# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]' -# splitpasswd('user:passwd') -> 'user', 'passwd' -# splitport('host:port') --> 'host', 'port' -# splitquery('/path?query') --> '/path', 'query' -# splittag('/path#tag') --> '/path', 'tag' -# splitattr('/path;attr1=value1;attr2=value2;...') -> -# '/path', ['attr1=value1', 'attr2=value2', ...] -# splitvalue('attr=value') --> 'attr', 'value' -# urllib.parse.unquote('abc%20def') -> 'abc def' -# quote('abc def') -> 'abc%20def') - -def to_bytes(url): - """to_bytes(u"URL") --> 'URL'.""" - # Most URL schemes require ASCII. If that changes, the conversion - # can be relaxed. - # XXX get rid of to_bytes() - if isinstance(url, str): - try: - url = url.encode("ASCII").decode() - except UnicodeError: - raise UnicodeError("URL " + repr(url) + - " contains non-ASCII characters") - return url - -def unwrap(url): - """unwrap('') --> 'type://host/path'.""" - url = str(url).strip() - if url[:1] == '<' and url[-1:] == '>': - url = url[1:-1].strip() - if url[:4] == 'URL:': url = url[4:].strip() - return url - -_typeprog = None -def splittype(url): - """splittype('type:opaquestring') --> 'type', 'opaquestring'.""" - global _typeprog - if _typeprog is None: - import re - _typeprog = re.compile('^([^/:]+):') - - match = _typeprog.match(url) - if match: - scheme = match.group(1) - return scheme.lower(), url[len(scheme) + 1:] - return None, url - -_hostprog = None -def splithost(url): - """splithost('//host[:port]/path') --> 'host[:port]', '/path'.""" - global _hostprog - if _hostprog is None: - import re - _hostprog = re.compile('^//([^/?]*)(.*)$') - - match = _hostprog.match(url) - if match: - host_port = match.group(1) - path = match.group(2) - if path and not path.startswith('/'): - path = '/' + path - return host_port, path - return None, url - -_userprog = None -def splituser(host): - """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" - global _userprog - if _userprog is None: - import re - _userprog = re.compile('^(.*)@(.*)$') - - match = _userprog.match(host) - if match: return match.group(1, 2) - return None, host - -_passwdprog = None -def splitpasswd(user): - """splitpasswd('user:passwd') -> 'user', 'passwd'.""" - global _passwdprog - if _passwdprog is None: - import re - _passwdprog = re.compile('^([^:]*):(.*)$',re.S) - - match = _passwdprog.match(user) - if match: return match.group(1, 2) - return user, None - -# splittag('/path#tag') --> '/path', 'tag' -_portprog = None -def splitport(host): - """splitport('host:port') --> 'host', 'port'.""" - global _portprog - if _portprog is None: - import re - _portprog = re.compile('^(.*):([0-9]+)$') - - match = _portprog.match(host) - if match: return match.group(1, 2) - return host, None - -_nportprog = None -def splitnport(host, defport=-1): - """Split host and port, returning numeric port. - Return given default port if no ':' found; defaults to -1. - Return numerical port if a valid number are found after ':'. - Return None if ':' but not a valid number.""" - global _nportprog - if _nportprog is None: - import re - _nportprog = re.compile('^(.*):(.*)$') - - match = _nportprog.match(host) - if match: - host, port = match.group(1, 2) - try: - if not port: raise ValueError("no digits") - nport = int(port) - except ValueError: - nport = None - return host, nport - return host, defport - -_queryprog = None -def splitquery(url): - """splitquery('/path?query') --> '/path', 'query'.""" - global _queryprog - if _queryprog is None: - import re - _queryprog = re.compile('^(.*)\?([^?]*)$') - - match = _queryprog.match(url) - if match: return match.group(1, 2) - return url, None - -_tagprog = None -def splittag(url): - """splittag('/path#tag') --> '/path', 'tag'.""" - global _tagprog - if _tagprog is None: - import re - _tagprog = re.compile('^(.*)#([^#]*)$') - - match = _tagprog.match(url) - if match: return match.group(1, 2) - return url, None - -def splitattr(url): - """splitattr('/path;attr1=value1;attr2=value2;...') -> - '/path', ['attr1=value1', 'attr2=value2', ...].""" - words = url.split(';') - return words[0], words[1:] - -_valueprog = None -def splitvalue(attr): - """splitvalue('attr=value') --> 'attr', 'value'.""" - global _valueprog - if _valueprog is None: - import re - _valueprog = re.compile('^([^=]*)=(.*)$') - - match = _valueprog.match(attr) - if match: return match.group(1, 2) - return attr, None diff --git a/pype/vendor/future/backports/urllib/request.py b/pype/vendor/future/backports/urllib/request.py deleted file mode 100644 index b1545ca0f5..0000000000 --- a/pype/vendor/future/backports/urllib/request.py +++ /dev/null @@ -1,2641 +0,0 @@ -""" -Ported using Python-Future from the Python 3.3 standard library. - -An extensible library for opening URLs using a variety of protocols - -The simplest way to use this module is to call the urlopen function, -which accepts a string containing a URL or a Request object (described -below). It opens the URL and returns the results as file-like -object; the returned object has some extra methods described below. - -The OpenerDirector manages a collection of Handler objects that do -all the actual work. Each Handler implements a particular protocol or -option. The OpenerDirector is a composite object that invokes the -Handlers needed to open the requested URL. For example, the -HTTPHandler performs HTTP GET and POST requests and deals with -non-error returns. The HTTPRedirectHandler automatically deals with -HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler -deals with digest authentication. - -urlopen(url, data=None) -- Basic usage is the same as original -urllib. pass the url and optionally data to post to an HTTP URL, and -get a file-like object back. One difference is that you can also pass -a Request instance instead of URL. Raises a URLError (subclass of -IOError); for HTTP errors, raises an HTTPError, which can also be -treated as a valid response. - -build_opener -- Function that creates a new OpenerDirector instance. -Will install the default handlers. Accepts one or more Handlers as -arguments, either instances or Handler classes that it will -instantiate. If one of the argument is a subclass of the default -handler, the argument will be installed instead of the default. - -install_opener -- Installs a new opener as the default opener. - -objects of interest: - -OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages -the Handler classes, while dealing with requests and responses. - -Request -- An object that encapsulates the state of a request. The -state can be as simple as the URL. It can also include extra HTTP -headers, e.g. a User-Agent. - -BaseHandler -- - -internals: -BaseHandler and parent -_call_chain conventions - -Example usage: - -import urllib.request - -# set up authentication info -authinfo = urllib.request.HTTPBasicAuthHandler() -authinfo.add_password(realm='PDQ Application', - uri='https://mahler:8092/site-updates.py', - user='klem', - passwd='geheim$parole') - -proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"}) - -# build a new opener that adds authentication and caching FTP handlers -opener = urllib.request.build_opener(proxy_support, authinfo, - urllib.request.CacheFTPHandler) - -# install it -urllib.request.install_opener(opener) - -f = urllib.request.urlopen('http://www.python.org/') -""" - -# XXX issues: -# If an authentication error handler that tries to perform -# authentication for some reason but fails, how should the error be -# signalled? The client needs to know the HTTP error code. But if -# the handler knows that the problem was, e.g., that it didn't know -# that hash algo that requested in the challenge, it would be good to -# pass that information along to the client, too. -# ftp errors aren't handled cleanly -# check digest against correct (i.e. non-apache) implementation - -# Possible extensions: -# complex proxies XXX not sure what exactly was meant by this -# abstract factory for opener - -from __future__ import absolute_import, division, print_function, unicode_literals -from future.builtins import bytes, dict, filter, input, int, map, open, str -from future.utils import PY2, PY3, raise_with_traceback - -import base64 -import bisect -import hashlib -import array - -from future.backports import email -from future.backports.http import client as http_client -from .error import URLError, HTTPError, ContentTooShortError -from .parse import ( - urlparse, urlsplit, urljoin, unwrap, quote, unquote, - splittype, splithost, splitport, splituser, splitpasswd, - splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse) -from .response import addinfourl, addclosehook - -import io -import os -import posixpath -import re -import socket -import sys -import time -import collections -import tempfile -import contextlib -import warnings - -# check for SSL -try: - import ssl - # Not available in the SSL module in Py2: - from ssl import SSLContext -except ImportError: - _have_ssl = False -else: - _have_ssl = True - -__all__ = [ - # Classes - 'Request', 'OpenerDirector', 'BaseHandler', 'HTTPDefaultErrorHandler', - 'HTTPRedirectHandler', 'HTTPCookieProcessor', 'ProxyHandler', - 'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm', - 'AbstractBasicAuthHandler', 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler', - 'AbstractDigestAuthHandler', 'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler', - 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler', - 'UnknownHandler', 'HTTPErrorProcessor', - # Functions - 'urlopen', 'install_opener', 'build_opener', - 'pathname2url', 'url2pathname', 'getproxies', - # Legacy interface - 'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener', -] - -# used in User-Agent header sent -__version__ = sys.version[:3] - -_opener = None -def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **_3to2kwargs): - if 'cadefault' in _3to2kwargs: cadefault = _3to2kwargs['cadefault']; del _3to2kwargs['cadefault'] - else: cadefault = False - if 'capath' in _3to2kwargs: capath = _3to2kwargs['capath']; del _3to2kwargs['capath'] - else: capath = None - if 'cafile' in _3to2kwargs: cafile = _3to2kwargs['cafile']; del _3to2kwargs['cafile'] - else: cafile = None - global _opener - if cafile or capath or cadefault: - if not _have_ssl: - raise ValueError('SSL support not available') - context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - context.options |= ssl.OP_NO_SSLv2 - context.verify_mode = ssl.CERT_REQUIRED - if cafile or capath: - context.load_verify_locations(cafile, capath) - else: - context.set_default_verify_paths() - https_handler = HTTPSHandler(context=context, check_hostname=True) - opener = build_opener(https_handler) - elif _opener is None: - _opener = opener = build_opener() - else: - opener = _opener - return opener.open(url, data, timeout) - -def install_opener(opener): - global _opener - _opener = opener - -_url_tempfiles = [] -def urlretrieve(url, filename=None, reporthook=None, data=None): - """ - Retrieve a URL into a temporary location on disk. - - Requires a URL argument. If a filename is passed, it is used as - the temporary file location. The reporthook argument should be - a callable that accepts a block number, a read size, and the - total file size of the URL target. The data argument should be - valid URL encoded data. - - If a filename is passed and the URL points to a local resource, - the result is a copy from local file to new file. - - Returns a tuple containing the path to the newly created - data file as well as the resulting HTTPMessage object. - """ - url_type, path = splittype(url) - - with contextlib.closing(urlopen(url, data)) as fp: - headers = fp.info() - - # Just return the local path and the "headers" for file:// - # URLs. No sense in performing a copy unless requested. - if url_type == "file" and not filename: - return os.path.normpath(path), headers - - # Handle temporary file setup. - if filename: - tfp = open(filename, 'wb') - else: - tfp = tempfile.NamedTemporaryFile(delete=False) - filename = tfp.name - _url_tempfiles.append(filename) - - with tfp: - result = filename, headers - bs = 1024*8 - size = -1 - read = 0 - blocknum = 0 - if "content-length" in headers: - size = int(headers["Content-Length"]) - - if reporthook: - reporthook(blocknum, bs, size) - - while True: - block = fp.read(bs) - if not block: - break - read += len(block) - tfp.write(block) - blocknum += 1 - if reporthook: - reporthook(blocknum, bs, size) - - if size >= 0 and read < size: - raise ContentTooShortError( - "retrieval incomplete: got only %i out of %i bytes" - % (read, size), result) - - return result - -def urlcleanup(): - for temp_file in _url_tempfiles: - try: - os.unlink(temp_file) - except EnvironmentError: - pass - - del _url_tempfiles[:] - global _opener - if _opener: - _opener = None - -if PY3: - _cut_port_re = re.compile(r":\d+$", re.ASCII) -else: - _cut_port_re = re.compile(r":\d+$") - -def request_host(request): - - """Return request-host, as defined by RFC 2965. - - Variation from RFC: returned value is lowercased, for convenient - comparison. - - """ - url = request.full_url - host = urlparse(url)[1] - if host == "": - host = request.get_header("Host", "") - - # remove port, if present - host = _cut_port_re.sub("", host, 1) - return host.lower() - -class Request(object): - - def __init__(self, url, data=None, headers={}, - origin_req_host=None, unverifiable=False, - method=None): - # unwrap('') --> 'type://host/path' - self.full_url = unwrap(url) - self.full_url, self.fragment = splittag(self.full_url) - self.data = data - self.headers = {} - self._tunnel_host = None - for key, value in headers.items(): - self.add_header(key, value) - self.unredirected_hdrs = {} - if origin_req_host is None: - origin_req_host = request_host(self) - self.origin_req_host = origin_req_host - self.unverifiable = unverifiable - self.method = method - self._parse() - - def _parse(self): - self.type, rest = splittype(self.full_url) - if self.type is None: - raise ValueError("unknown url type: %r" % self.full_url) - self.host, self.selector = splithost(rest) - if self.host: - self.host = unquote(self.host) - - def get_method(self): - """Return a string indicating the HTTP request method.""" - if self.method is not None: - return self.method - elif self.data is not None: - return "POST" - else: - return "GET" - - def get_full_url(self): - if self.fragment: - return '%s#%s' % (self.full_url, self.fragment) - else: - return self.full_url - - # Begin deprecated methods - - def add_data(self, data): - msg = "Request.add_data method is deprecated." - warnings.warn(msg, DeprecationWarning, stacklevel=1) - self.data = data - - def has_data(self): - msg = "Request.has_data method is deprecated." - warnings.warn(msg, DeprecationWarning, stacklevel=1) - return self.data is not None - - def get_data(self): - msg = "Request.get_data method is deprecated." - warnings.warn(msg, DeprecationWarning, stacklevel=1) - return self.data - - def get_type(self): - msg = "Request.get_type method is deprecated." - warnings.warn(msg, DeprecationWarning, stacklevel=1) - return self.type - - def get_host(self): - msg = "Request.get_host method is deprecated." - warnings.warn(msg, DeprecationWarning, stacklevel=1) - return self.host - - def get_selector(self): - msg = "Request.get_selector method is deprecated." - warnings.warn(msg, DeprecationWarning, stacklevel=1) - return self.selector - - def is_unverifiable(self): - msg = "Request.is_unverifiable method is deprecated." - warnings.warn(msg, DeprecationWarning, stacklevel=1) - return self.unverifiable - - def get_origin_req_host(self): - msg = "Request.get_origin_req_host method is deprecated." - warnings.warn(msg, DeprecationWarning, stacklevel=1) - return self.origin_req_host - - # End deprecated methods - - def set_proxy(self, host, type): - if self.type == 'https' and not self._tunnel_host: - self._tunnel_host = self.host - else: - self.type= type - self.selector = self.full_url - self.host = host - - def has_proxy(self): - return self.selector == self.full_url - - def add_header(self, key, val): - # useful for something like authentication - self.headers[key.capitalize()] = val - - def add_unredirected_header(self, key, val): - # will not be added to a redirected request - self.unredirected_hdrs[key.capitalize()] = val - - def has_header(self, header_name): - return (header_name in self.headers or - header_name in self.unredirected_hdrs) - - def get_header(self, header_name, default=None): - return self.headers.get( - header_name, - self.unredirected_hdrs.get(header_name, default)) - - def header_items(self): - hdrs = self.unredirected_hdrs.copy() - hdrs.update(self.headers) - return list(hdrs.items()) - -class OpenerDirector(object): - def __init__(self): - client_version = "Python-urllib/%s" % __version__ - self.addheaders = [('User-agent', client_version)] - # self.handlers is retained only for backward compatibility - self.handlers = [] - # manage the individual handlers - self.handle_open = {} - self.handle_error = {} - self.process_response = {} - self.process_request = {} - - def add_handler(self, handler): - if not hasattr(handler, "add_parent"): - raise TypeError("expected BaseHandler instance, got %r" % - type(handler)) - - added = False - for meth in dir(handler): - if meth in ["redirect_request", "do_open", "proxy_open"]: - # oops, coincidental match - continue - - i = meth.find("_") - protocol = meth[:i] - condition = meth[i+1:] - - if condition.startswith("error"): - j = condition.find("_") + i + 1 - kind = meth[j+1:] - try: - kind = int(kind) - except ValueError: - pass - lookup = self.handle_error.get(protocol, {}) - self.handle_error[protocol] = lookup - elif condition == "open": - kind = protocol - lookup = self.handle_open - elif condition == "response": - kind = protocol - lookup = self.process_response - elif condition == "request": - kind = protocol - lookup = self.process_request - else: - continue - - handlers = lookup.setdefault(kind, []) - if handlers: - bisect.insort(handlers, handler) - else: - handlers.append(handler) - added = True - - if added: - bisect.insort(self.handlers, handler) - handler.add_parent(self) - - def close(self): - # Only exists for backwards compatibility. - pass - - def _call_chain(self, chain, kind, meth_name, *args): - # Handlers raise an exception if no one else should try to handle - # the request, or return None if they can't but another handler - # could. Otherwise, they return the response. - handlers = chain.get(kind, ()) - for handler in handlers: - func = getattr(handler, meth_name) - result = func(*args) - if result is not None: - return result - - def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): - """ - Accept a URL or a Request object - - Python-Future: if the URL is passed as a byte-string, decode it first. - """ - if isinstance(fullurl, bytes): - fullurl = fullurl.decode() - if isinstance(fullurl, str): - req = Request(fullurl, data) - else: - req = fullurl - if data is not None: - req.data = data - - req.timeout = timeout - protocol = req.type - - # pre-process request - meth_name = protocol+"_request" - for processor in self.process_request.get(protocol, []): - meth = getattr(processor, meth_name) - req = meth(req) - - response = self._open(req, data) - - # post-process response - meth_name = protocol+"_response" - for processor in self.process_response.get(protocol, []): - meth = getattr(processor, meth_name) - response = meth(req, response) - - return response - - def _open(self, req, data=None): - result = self._call_chain(self.handle_open, 'default', - 'default_open', req) - if result: - return result - - protocol = req.type - result = self._call_chain(self.handle_open, protocol, protocol + - '_open', req) - if result: - return result - - return self._call_chain(self.handle_open, 'unknown', - 'unknown_open', req) - - def error(self, proto, *args): - if proto in ('http', 'https'): - # XXX http[s] protocols are special-cased - dict = self.handle_error['http'] # https is not different than http - proto = args[2] # YUCK! - meth_name = 'http_error_%s' % proto - http_err = 1 - orig_args = args - else: - dict = self.handle_error - meth_name = proto + '_error' - http_err = 0 - args = (dict, proto, meth_name) + args - result = self._call_chain(*args) - if result: - return result - - if http_err: - args = (dict, 'default', 'http_error_default') + orig_args - return self._call_chain(*args) - -# XXX probably also want an abstract factory that knows when it makes -# sense to skip a superclass in favor of a subclass and when it might -# make sense to include both - -def build_opener(*handlers): - """Create an opener object from a list of handlers. - - The opener will use several default handlers, including support - for HTTP, FTP and when applicable HTTPS. - - If any of the handlers passed as arguments are subclasses of the - default handlers, the default handlers will not be used. - """ - def isclass(obj): - return isinstance(obj, type) or hasattr(obj, "__bases__") - - opener = OpenerDirector() - default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, - HTTPDefaultErrorHandler, HTTPRedirectHandler, - FTPHandler, FileHandler, HTTPErrorProcessor] - if hasattr(http_client, "HTTPSConnection"): - default_classes.append(HTTPSHandler) - skip = set() - for klass in default_classes: - for check in handlers: - if isclass(check): - if issubclass(check, klass): - skip.add(klass) - elif isinstance(check, klass): - skip.add(klass) - for klass in skip: - default_classes.remove(klass) - - for klass in default_classes: - opener.add_handler(klass()) - - for h in handlers: - if isclass(h): - h = h() - opener.add_handler(h) - return opener - -class BaseHandler(object): - handler_order = 500 - - def add_parent(self, parent): - self.parent = parent - - def close(self): - # Only exists for backwards compatibility - pass - - def __lt__(self, other): - if not hasattr(other, "handler_order"): - # Try to preserve the old behavior of having custom classes - # inserted after default ones (works only for custom user - # classes which are not aware of handler_order). - return True - return self.handler_order < other.handler_order - - -class HTTPErrorProcessor(BaseHandler): - """Process HTTP error responses.""" - handler_order = 1000 # after all other processing - - def http_response(self, request, response): - code, msg, hdrs = response.code, response.msg, response.info() - - # According to RFC 2616, "2xx" code indicates that the client's - # request was successfully received, understood, and accepted. - if not (200 <= code < 300): - response = self.parent.error( - 'http', request, response, code, msg, hdrs) - - return response - - https_response = http_response - -class HTTPDefaultErrorHandler(BaseHandler): - def http_error_default(self, req, fp, code, msg, hdrs): - raise HTTPError(req.full_url, code, msg, hdrs, fp) - -class HTTPRedirectHandler(BaseHandler): - # maximum number of redirections to any single URL - # this is needed because of the state that cookies introduce - max_repeats = 4 - # maximum total number of redirections (regardless of URL) before - # assuming we're in a loop - max_redirections = 10 - - def redirect_request(self, req, fp, code, msg, headers, newurl): - """Return a Request or None in response to a redirect. - - This is called by the http_error_30x methods when a - redirection response is received. If a redirection should - take place, return a new Request to allow http_error_30x to - perform the redirect. Otherwise, raise HTTPError if no-one - else should try to handle this url. Return None if you can't - but another Handler might. - """ - m = req.get_method() - if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD") - or code in (301, 302, 303) and m == "POST")): - raise HTTPError(req.full_url, code, msg, headers, fp) - - # Strictly (according to RFC 2616), 301 or 302 in response to - # a POST MUST NOT cause a redirection without confirmation - # from the user (of urllib.request, in this case). In practice, - # essentially all clients do redirect in this case, so we do - # the same. - # be conciliant with URIs containing a space - newurl = newurl.replace(' ', '%20') - CONTENT_HEADERS = ("content-length", "content-type") - newheaders = dict((k, v) for k, v in req.headers.items() - if k.lower() not in CONTENT_HEADERS) - return Request(newurl, - headers=newheaders, - origin_req_host=req.origin_req_host, - unverifiable=True) - - # Implementation note: To avoid the server sending us into an - # infinite loop, the request object needs to track what URLs we - # have already seen. Do this by adding a handler-specific - # attribute to the Request object. - def http_error_302(self, req, fp, code, msg, headers): - # Some servers (incorrectly) return multiple Location headers - # (so probably same goes for URI). Use first header. - if "location" in headers: - newurl = headers["location"] - elif "uri" in headers: - newurl = headers["uri"] - else: - return - - # fix a possible malformed URL - urlparts = urlparse(newurl) - - # For security reasons we don't allow redirection to anything other - # than http, https or ftp. - - if urlparts.scheme not in ('http', 'https', 'ftp', ''): - raise HTTPError( - newurl, code, - "%s - Redirection to url '%s' is not allowed" % (msg, newurl), - headers, fp) - - if not urlparts.path: - urlparts = list(urlparts) - urlparts[2] = "/" - newurl = urlunparse(urlparts) - - newurl = urljoin(req.full_url, newurl) - - # XXX Probably want to forget about the state of the current - # request, although that might interact poorly with other - # handlers that also use handler-specific request attributes - new = self.redirect_request(req, fp, code, msg, headers, newurl) - if new is None: - return - - # loop detection - # .redirect_dict has a key url if url was previously visited. - if hasattr(req, 'redirect_dict'): - visited = new.redirect_dict = req.redirect_dict - if (visited.get(newurl, 0) >= self.max_repeats or - len(visited) >= self.max_redirections): - raise HTTPError(req.full_url, code, - self.inf_msg + msg, headers, fp) - else: - visited = new.redirect_dict = req.redirect_dict = {} - visited[newurl] = visited.get(newurl, 0) + 1 - - # Don't close the fp until we are sure that we won't use it - # with HTTPError. - fp.read() - fp.close() - - return self.parent.open(new, timeout=req.timeout) - - http_error_301 = http_error_303 = http_error_307 = http_error_302 - - inf_msg = "The HTTP server returned a redirect error that would " \ - "lead to an infinite loop.\n" \ - "The last 30x error message was:\n" - - -def _parse_proxy(proxy): - """Return (scheme, user, password, host/port) given a URL or an authority. - - If a URL is supplied, it must have an authority (host:port) component. - According to RFC 3986, having an authority component means the URL must - have two slashes after the scheme: - - >>> _parse_proxy('file:/ftp.example.com/') - Traceback (most recent call last): - ValueError: proxy URL with no authority: 'file:/ftp.example.com/' - - The first three items of the returned tuple may be None. - - Examples of authority parsing: - - >>> _parse_proxy('proxy.example.com') - (None, None, None, 'proxy.example.com') - >>> _parse_proxy('proxy.example.com:3128') - (None, None, None, 'proxy.example.com:3128') - - The authority component may optionally include userinfo (assumed to be - username:password): - - >>> _parse_proxy('joe:password@proxy.example.com') - (None, 'joe', 'password', 'proxy.example.com') - >>> _parse_proxy('joe:password@proxy.example.com:3128') - (None, 'joe', 'password', 'proxy.example.com:3128') - - Same examples, but with URLs instead: - - >>> _parse_proxy('http://proxy.example.com/') - ('http', None, None, 'proxy.example.com') - >>> _parse_proxy('http://proxy.example.com:3128/') - ('http', None, None, 'proxy.example.com:3128') - >>> _parse_proxy('http://joe:password@proxy.example.com/') - ('http', 'joe', 'password', 'proxy.example.com') - >>> _parse_proxy('http://joe:password@proxy.example.com:3128') - ('http', 'joe', 'password', 'proxy.example.com:3128') - - Everything after the authority is ignored: - - >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128') - ('ftp', 'joe', 'password', 'proxy.example.com') - - Test for no trailing '/' case: - - >>> _parse_proxy('http://joe:password@proxy.example.com') - ('http', 'joe', 'password', 'proxy.example.com') - - """ - scheme, r_scheme = splittype(proxy) - if not r_scheme.startswith("/"): - # authority - scheme = None - authority = proxy - else: - # URL - if not r_scheme.startswith("//"): - raise ValueError("proxy URL with no authority: %r" % proxy) - # We have an authority, so for RFC 3986-compliant URLs (by ss 3. - # and 3.3.), path is empty or starts with '/' - end = r_scheme.find("/", 2) - if end == -1: - end = None - authority = r_scheme[2:end] - userinfo, hostport = splituser(authority) - if userinfo is not None: - user, password = splitpasswd(userinfo) - else: - user = password = None - return scheme, user, password, hostport - -class ProxyHandler(BaseHandler): - # Proxies must be in front - handler_order = 100 - - def __init__(self, proxies=None): - if proxies is None: - proxies = getproxies() - assert hasattr(proxies, 'keys'), "proxies must be a mapping" - self.proxies = proxies - for type, url in proxies.items(): - setattr(self, '%s_open' % type, - lambda r, proxy=url, type=type, meth=self.proxy_open: - meth(r, proxy, type)) - - def proxy_open(self, req, proxy, type): - orig_type = req.type - proxy_type, user, password, hostport = _parse_proxy(proxy) - if proxy_type is None: - proxy_type = orig_type - - if req.host and proxy_bypass(req.host): - return None - - if user and password: - user_pass = '%s:%s' % (unquote(user), - unquote(password)) - creds = base64.b64encode(user_pass.encode()).decode("ascii") - req.add_header('Proxy-authorization', 'Basic ' + creds) - hostport = unquote(hostport) - req.set_proxy(hostport, proxy_type) - if orig_type == proxy_type or orig_type == 'https': - # let other handlers take care of it - return None - else: - # need to start over, because the other handlers don't - # grok the proxy's URL type - # e.g. if we have a constructor arg proxies like so: - # {'http': 'ftp://proxy.example.com'}, we may end up turning - # a request for http://acme.example.com/a into one for - # ftp://proxy.example.com/a - return self.parent.open(req, timeout=req.timeout) - -class HTTPPasswordMgr(object): - - def __init__(self): - self.passwd = {} - - def add_password(self, realm, uri, user, passwd): - # uri could be a single URI or a sequence - if isinstance(uri, str): - uri = [uri] - if realm not in self.passwd: - self.passwd[realm] = {} - for default_port in True, False: - reduced_uri = tuple( - [self.reduce_uri(u, default_port) for u in uri]) - self.passwd[realm][reduced_uri] = (user, passwd) - - def find_user_password(self, realm, authuri): - domains = self.passwd.get(realm, {}) - for default_port in True, False: - reduced_authuri = self.reduce_uri(authuri, default_port) - for uris, authinfo in domains.items(): - for uri in uris: - if self.is_suburi(uri, reduced_authuri): - return authinfo - return None, None - - def reduce_uri(self, uri, default_port=True): - """Accept authority or URI and extract only the authority and path.""" - # note HTTP URLs do not have a userinfo component - parts = urlsplit(uri) - if parts[1]: - # URI - scheme = parts[0] - authority = parts[1] - path = parts[2] or '/' - else: - # host or host:port - scheme = None - authority = uri - path = '/' - host, port = splitport(authority) - if default_port and port is None and scheme is not None: - dport = {"http": 80, - "https": 443, - }.get(scheme) - if dport is not None: - authority = "%s:%d" % (host, dport) - return authority, path - - def is_suburi(self, base, test): - """Check if test is below base in a URI tree - - Both args must be URIs in reduced form. - """ - if base == test: - return True - if base[0] != test[0]: - return False - common = posixpath.commonprefix((base[1], test[1])) - if len(common) == len(base[1]): - return True - return False - - -class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): - - def find_user_password(self, realm, authuri): - user, password = HTTPPasswordMgr.find_user_password(self, realm, - authuri) - if user is not None: - return user, password - return HTTPPasswordMgr.find_user_password(self, None, authuri) - - -class AbstractBasicAuthHandler(object): - - # XXX this allows for multiple auth-schemes, but will stupidly pick - # the last one with a realm specified. - - # allow for double- and single-quoted realm values - # (single quotes are a violation of the RFC, but appear in the wild) - rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+' - 'realm=(["\']?)([^"\']*)\\2', re.I) - - # XXX could pre-emptively send auth info already accepted (RFC 2617, - # end of section 2, and section 1.2 immediately after "credentials" - # production). - - def __init__(self, password_mgr=None): - if password_mgr is None: - password_mgr = HTTPPasswordMgr() - self.passwd = password_mgr - self.add_password = self.passwd.add_password - self.retried = 0 - - def reset_retry_count(self): - self.retried = 0 - - def http_error_auth_reqed(self, authreq, host, req, headers): - # host may be an authority (without userinfo) or a URL with an - # authority - # XXX could be multiple headers - authreq = headers.get(authreq, None) - - if self.retried > 5: - # retry sending the username:password 5 times before failing. - raise HTTPError(req.get_full_url(), 401, "basic auth failed", - headers, None) - else: - self.retried += 1 - - if authreq: - scheme = authreq.split()[0] - if scheme.lower() != 'basic': - raise ValueError("AbstractBasicAuthHandler does not" - " support the following scheme: '%s'" % - scheme) - else: - mo = AbstractBasicAuthHandler.rx.search(authreq) - if mo: - scheme, quote, realm = mo.groups() - if quote not in ['"',"'"]: - warnings.warn("Basic Auth Realm was unquoted", - UserWarning, 2) - if scheme.lower() == 'basic': - response = self.retry_http_basic_auth(host, req, realm) - if response and response.code != 401: - self.retried = 0 - return response - - def retry_http_basic_auth(self, host, req, realm): - user, pw = self.passwd.find_user_password(realm, host) - if pw is not None: - raw = "%s:%s" % (user, pw) - auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii") - if req.headers.get(self.auth_header, None) == auth: - return None - req.add_unredirected_header(self.auth_header, auth) - return self.parent.open(req, timeout=req.timeout) - else: - return None - - -class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): - - auth_header = 'Authorization' - - def http_error_401(self, req, fp, code, msg, headers): - url = req.full_url - response = self.http_error_auth_reqed('www-authenticate', - url, req, headers) - self.reset_retry_count() - return response - - -class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): - - auth_header = 'Proxy-authorization' - - def http_error_407(self, req, fp, code, msg, headers): - # http_error_auth_reqed requires that there is no userinfo component in - # authority. Assume there isn't one, since urllib.request does not (and - # should not, RFC 3986 s. 3.2.1) support requests for URLs containing - # userinfo. - authority = req.host - response = self.http_error_auth_reqed('proxy-authenticate', - authority, req, headers) - self.reset_retry_count() - return response - - -# Return n random bytes. -_randombytes = os.urandom - - -class AbstractDigestAuthHandler(object): - # Digest authentication is specified in RFC 2617. - - # XXX The client does not inspect the Authentication-Info header - # in a successful response. - - # XXX It should be possible to test this implementation against - # a mock server that just generates a static set of challenges. - - # XXX qop="auth-int" supports is shaky - - def __init__(self, passwd=None): - if passwd is None: - passwd = HTTPPasswordMgr() - self.passwd = passwd - self.add_password = self.passwd.add_password - self.retried = 0 - self.nonce_count = 0 - self.last_nonce = None - - def reset_retry_count(self): - self.retried = 0 - - def http_error_auth_reqed(self, auth_header, host, req, headers): - authreq = headers.get(auth_header, None) - if self.retried > 5: - # Don't fail endlessly - if we failed once, we'll probably - # fail a second time. Hm. Unless the Password Manager is - # prompting for the information. Crap. This isn't great - # but it's better than the current 'repeat until recursion - # depth exceeded' approach - raise HTTPError(req.full_url, 401, "digest auth failed", - headers, None) - else: - self.retried += 1 - if authreq: - scheme = authreq.split()[0] - if scheme.lower() == 'digest': - return self.retry_http_digest_auth(req, authreq) - elif scheme.lower() != 'basic': - raise ValueError("AbstractDigestAuthHandler does not support" - " the following scheme: '%s'" % scheme) - - def retry_http_digest_auth(self, req, auth): - token, challenge = auth.split(' ', 1) - chal = parse_keqv_list(filter(None, parse_http_list(challenge))) - auth = self.get_authorization(req, chal) - if auth: - auth_val = 'Digest %s' % auth - if req.headers.get(self.auth_header, None) == auth_val: - return None - req.add_unredirected_header(self.auth_header, auth_val) - resp = self.parent.open(req, timeout=req.timeout) - return resp - - def get_cnonce(self, nonce): - # The cnonce-value is an opaque - # quoted string value provided by the client and used by both client - # and server to avoid chosen plaintext attacks, to provide mutual - # authentication, and to provide some message integrity protection. - # This isn't a fabulous effort, but it's probably Good Enough. - s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime()) - b = s.encode("ascii") + _randombytes(8) - dig = hashlib.sha1(b).hexdigest() - return dig[:16] - - def get_authorization(self, req, chal): - try: - realm = chal['realm'] - nonce = chal['nonce'] - qop = chal.get('qop') - algorithm = chal.get('algorithm', 'MD5') - # mod_digest doesn't send an opaque, even though it isn't - # supposed to be optional - opaque = chal.get('opaque', None) - except KeyError: - return None - - H, KD = self.get_algorithm_impls(algorithm) - if H is None: - return None - - user, pw = self.passwd.find_user_password(realm, req.full_url) - if user is None: - return None - - # XXX not implemented yet - if req.data is not None: - entdig = self.get_entity_digest(req.data, chal) - else: - entdig = None - - A1 = "%s:%s:%s" % (user, realm, pw) - A2 = "%s:%s" % (req.get_method(), - # XXX selector: what about proxies and full urls - req.selector) - if qop == 'auth': - if nonce == self.last_nonce: - self.nonce_count += 1 - else: - self.nonce_count = 1 - self.last_nonce = nonce - ncvalue = '%08x' % self.nonce_count - cnonce = self.get_cnonce(nonce) - noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2)) - respdig = KD(H(A1), noncebit) - elif qop is None: - respdig = KD(H(A1), "%s:%s" % (nonce, H(A2))) - else: - # XXX handle auth-int. - raise URLError("qop '%s' is not supported." % qop) - - # XXX should the partial digests be encoded too? - - base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ - 'response="%s"' % (user, realm, nonce, req.selector, - respdig) - if opaque: - base += ', opaque="%s"' % opaque - if entdig: - base += ', digest="%s"' % entdig - base += ', algorithm="%s"' % algorithm - if qop: - base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce) - return base - - def get_algorithm_impls(self, algorithm): - # lambdas assume digest modules are imported at the top level - if algorithm == 'MD5': - H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest() - elif algorithm == 'SHA': - H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest() - # XXX MD5-sess - KD = lambda s, d: H("%s:%s" % (s, d)) - return H, KD - - def get_entity_digest(self, data, chal): - # XXX not implemented yet - return None - - -class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): - """An authentication protocol defined by RFC 2069 - - Digest authentication improves on basic authentication because it - does not transmit passwords in the clear. - """ - - auth_header = 'Authorization' - handler_order = 490 # before Basic auth - - def http_error_401(self, req, fp, code, msg, headers): - host = urlparse(req.full_url)[1] - retry = self.http_error_auth_reqed('www-authenticate', - host, req, headers) - self.reset_retry_count() - return retry - - -class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): - - auth_header = 'Proxy-Authorization' - handler_order = 490 # before Basic auth - - def http_error_407(self, req, fp, code, msg, headers): - host = req.host - retry = self.http_error_auth_reqed('proxy-authenticate', - host, req, headers) - self.reset_retry_count() - return retry - -class AbstractHTTPHandler(BaseHandler): - - def __init__(self, debuglevel=0): - self._debuglevel = debuglevel - - def set_http_debuglevel(self, level): - self._debuglevel = level - - def do_request_(self, request): - host = request.host - if not host: - raise URLError('no host given') - - if request.data is not None: # POST - data = request.data - if isinstance(data, str): - msg = "POST data should be bytes or an iterable of bytes. " \ - "It cannot be of type str." - raise TypeError(msg) - if not request.has_header('Content-type'): - request.add_unredirected_header( - 'Content-type', - 'application/x-www-form-urlencoded') - if not request.has_header('Content-length'): - size = None - try: - ### For Python-Future: - if PY2 and isinstance(data, array.array): - # memoryviews of arrays aren't supported - # in Py2.7. (e.g. memoryview(array.array('I', - # [1, 2, 3, 4])) raises a TypeError.) - # So we calculate the size manually instead: - size = len(data) * data.itemsize - ### - else: - mv = memoryview(data) - size = len(mv) * mv.itemsize - except TypeError: - if isinstance(data, collections.Iterable): - raise ValueError("Content-Length should be specified " - "for iterable data of type %r %r" % (type(data), - data)) - else: - request.add_unredirected_header( - 'Content-length', '%d' % size) - - sel_host = host - if request.has_proxy(): - scheme, sel = splittype(request.selector) - sel_host, sel_path = splithost(sel) - if not request.has_header('Host'): - request.add_unredirected_header('Host', sel_host) - for name, value in self.parent.addheaders: - name = name.capitalize() - if not request.has_header(name): - request.add_unredirected_header(name, value) - - return request - - def do_open(self, http_class, req, **http_conn_args): - """Return an HTTPResponse object for the request, using http_class. - - http_class must implement the HTTPConnection API from http.client. - """ - host = req.host - if not host: - raise URLError('no host given') - - # will parse host:port - h = http_class(host, timeout=req.timeout, **http_conn_args) - - headers = dict(req.unredirected_hdrs) - headers.update(dict((k, v) for k, v in req.headers.items() - if k not in headers)) - - # TODO(jhylton): Should this be redesigned to handle - # persistent connections? - - # We want to make an HTTP/1.1 request, but the addinfourl - # class isn't prepared to deal with a persistent connection. - # It will try to read all remaining data from the socket, - # which will block while the server waits for the next request. - # So make sure the connection gets closed after the (only) - # request. - headers["Connection"] = "close" - headers = dict((name.title(), val) for name, val in headers.items()) - - if req._tunnel_host: - tunnel_headers = {} - proxy_auth_hdr = "Proxy-Authorization" - if proxy_auth_hdr in headers: - tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] - # Proxy-Authorization should not be sent to origin - # server. - del headers[proxy_auth_hdr] - h.set_tunnel(req._tunnel_host, headers=tunnel_headers) - - try: - h.request(req.get_method(), req.selector, req.data, headers) - except socket.error as err: # timeout error - h.close() - raise URLError(err) - else: - r = h.getresponse() - # If the server does not send us a 'Connection: close' header, - # HTTPConnection assumes the socket should be left open. Manually - # mark the socket to be closed when this response object goes away. - if h.sock: - h.sock.close() - h.sock = None - - - r.url = req.get_full_url() - # This line replaces the .msg attribute of the HTTPResponse - # with .headers, because urllib clients expect the response to - # have the reason in .msg. It would be good to mark this - # attribute is deprecated and get then to use info() or - # .headers. - r.msg = r.reason - return r - - -class HTTPHandler(AbstractHTTPHandler): - - def http_open(self, req): - return self.do_open(http_client.HTTPConnection, req) - - http_request = AbstractHTTPHandler.do_request_ - -if hasattr(http_client, 'HTTPSConnection'): - - class HTTPSHandler(AbstractHTTPHandler): - - def __init__(self, debuglevel=0, context=None, check_hostname=None): - AbstractHTTPHandler.__init__(self, debuglevel) - self._context = context - self._check_hostname = check_hostname - - def https_open(self, req): - return self.do_open(http_client.HTTPSConnection, req, - context=self._context, check_hostname=self._check_hostname) - - https_request = AbstractHTTPHandler.do_request_ - - __all__.append('HTTPSHandler') - -class HTTPCookieProcessor(BaseHandler): - def __init__(self, cookiejar=None): - import future.backports.http.cookiejar as http_cookiejar - if cookiejar is None: - cookiejar = http_cookiejar.CookieJar() - self.cookiejar = cookiejar - - def http_request(self, request): - self.cookiejar.add_cookie_header(request) - return request - - def http_response(self, request, response): - self.cookiejar.extract_cookies(response, request) - return response - - https_request = http_request - https_response = http_response - -class UnknownHandler(BaseHandler): - def unknown_open(self, req): - type = req.type - raise URLError('unknown url type: %s' % type) - -def parse_keqv_list(l): - """Parse list of key=value strings where keys are not duplicated.""" - parsed = {} - for elt in l: - k, v = elt.split('=', 1) - if v[0] == '"' and v[-1] == '"': - v = v[1:-1] - parsed[k] = v - return parsed - -def parse_http_list(s): - """Parse lists as described by RFC 2068 Section 2. - - In particular, parse comma-separated lists where the elements of - the list may include quoted-strings. A quoted-string could - contain a comma. A non-quoted string could have quotes in the - middle. Neither commas nor quotes count if they are escaped. - Only double-quotes count, not single-quotes. - """ - res = [] - part = '' - - escape = quote = False - for cur in s: - if escape: - part += cur - escape = False - continue - if quote: - if cur == '\\': - escape = True - continue - elif cur == '"': - quote = False - part += cur - continue - - if cur == ',': - res.append(part) - part = '' - continue - - if cur == '"': - quote = True - - part += cur - - # append last part - if part: - res.append(part) - - return [part.strip() for part in res] - -class FileHandler(BaseHandler): - # Use local file or FTP depending on form of URL - def file_open(self, req): - url = req.selector - if url[:2] == '//' and url[2:3] != '/' and (req.host and - req.host != 'localhost'): - if not req.host is self.get_names(): - raise URLError("file:// scheme is supported only on localhost") - else: - return self.open_local_file(req) - - # names for the localhost - names = None - def get_names(self): - if FileHandler.names is None: - try: - FileHandler.names = tuple( - socket.gethostbyname_ex('localhost')[2] + - socket.gethostbyname_ex(socket.gethostname())[2]) - except socket.gaierror: - FileHandler.names = (socket.gethostbyname('localhost'),) - return FileHandler.names - - # not entirely sure what the rules are here - def open_local_file(self, req): - import future.backports.email.utils as email_utils - import mimetypes - host = req.host - filename = req.selector - localfile = url2pathname(filename) - try: - stats = os.stat(localfile) - size = stats.st_size - modified = email_utils.formatdate(stats.st_mtime, usegmt=True) - mtype = mimetypes.guess_type(filename)[0] - headers = email.message_from_string( - 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' % - (mtype or 'text/plain', size, modified)) - if host: - host, port = splitport(host) - if not host or \ - (not port and _safe_gethostbyname(host) in self.get_names()): - if host: - origurl = 'file://' + host + filename - else: - origurl = 'file://' + filename - return addinfourl(open(localfile, 'rb'), headers, origurl) - except OSError as exp: - # users shouldn't expect OSErrors coming from urlopen() - raise URLError(exp) - raise URLError('file not on local host') - -def _safe_gethostbyname(host): - try: - return socket.gethostbyname(host) - except socket.gaierror: - return None - -class FTPHandler(BaseHandler): - def ftp_open(self, req): - import ftplib - import mimetypes - host = req.host - if not host: - raise URLError('ftp error: no host given') - host, port = splitport(host) - if port is None: - port = ftplib.FTP_PORT - else: - port = int(port) - - # username/password handling - user, host = splituser(host) - if user: - user, passwd = splitpasswd(user) - else: - passwd = None - host = unquote(host) - user = user or '' - passwd = passwd or '' - - try: - host = socket.gethostbyname(host) - except socket.error as msg: - raise URLError(msg) - path, attrs = splitattr(req.selector) - dirs = path.split('/') - dirs = list(map(unquote, dirs)) - dirs, file = dirs[:-1], dirs[-1] - if dirs and not dirs[0]: - dirs = dirs[1:] - try: - fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout) - type = file and 'I' or 'D' - for attr in attrs: - attr, value = splitvalue(attr) - if attr.lower() == 'type' and \ - value in ('a', 'A', 'i', 'I', 'd', 'D'): - type = value.upper() - fp, retrlen = fw.retrfile(file, type) - headers = "" - mtype = mimetypes.guess_type(req.full_url)[0] - if mtype: - headers += "Content-type: %s\n" % mtype - if retrlen is not None and retrlen >= 0: - headers += "Content-length: %d\n" % retrlen - headers = email.message_from_string(headers) - return addinfourl(fp, headers, req.full_url) - except ftplib.all_errors as exp: - exc = URLError('ftp error: %r' % exp) - raise_with_traceback(exc) - - def connect_ftp(self, user, passwd, host, port, dirs, timeout): - return ftpwrapper(user, passwd, host, port, dirs, timeout, - persistent=False) - -class CacheFTPHandler(FTPHandler): - # XXX would be nice to have pluggable cache strategies - # XXX this stuff is definitely not thread safe - def __init__(self): - self.cache = {} - self.timeout = {} - self.soonest = 0 - self.delay = 60 - self.max_conns = 16 - - def setTimeout(self, t): - self.delay = t - - def setMaxConns(self, m): - self.max_conns = m - - def connect_ftp(self, user, passwd, host, port, dirs, timeout): - key = user, host, port, '/'.join(dirs), timeout - if key in self.cache: - self.timeout[key] = time.time() + self.delay - else: - self.cache[key] = ftpwrapper(user, passwd, host, port, - dirs, timeout) - self.timeout[key] = time.time() + self.delay - self.check_cache() - return self.cache[key] - - def check_cache(self): - # first check for old ones - t = time.time() - if self.soonest <= t: - for k, v in list(self.timeout.items()): - if v < t: - self.cache[k].close() - del self.cache[k] - del self.timeout[k] - self.soonest = min(list(self.timeout.values())) - - # then check the size - if len(self.cache) == self.max_conns: - for k, v in list(self.timeout.items()): - if v == self.soonest: - del self.cache[k] - del self.timeout[k] - break - self.soonest = min(list(self.timeout.values())) - - def clear_cache(self): - for conn in self.cache.values(): - conn.close() - self.cache.clear() - self.timeout.clear() - - -# Code move from the old urllib module - -MAXFTPCACHE = 10 # Trim the ftp cache beyond this size - -# Helper for non-unix systems -if os.name == 'nt': - from nturl2path import url2pathname, pathname2url -else: - def url2pathname(pathname): - """OS-specific conversion from a relative URL of the 'file' scheme - to a file system path; not recommended for general use.""" - return unquote(pathname) - - def pathname2url(pathname): - """OS-specific conversion from a file system path to a relative URL - of the 'file' scheme; not recommended for general use.""" - return quote(pathname) - -# This really consists of two pieces: -# (1) a class which handles opening of all sorts of URLs -# (plus assorted utilities etc.) -# (2) a set of functions for parsing URLs -# XXX Should these be separated out into different modules? - - -ftpcache = {} -class URLopener(object): - """Class to open URLs. - This is a class rather than just a subroutine because we may need - more than one set of global protocol-specific options. - Note -- this is a base class for those who don't want the - automatic handling of errors type 302 (relocated) and 401 - (authorization needed).""" - - __tempfiles = None - - version = "Python-urllib/%s" % __version__ - - # Constructor - def __init__(self, proxies=None, **x509): - msg = "%(class)s style of invoking requests is deprecated. " \ - "Use newer urlopen functions/methods" % {'class': self.__class__.__name__} - warnings.warn(msg, DeprecationWarning, stacklevel=3) - if proxies is None: - proxies = getproxies() - assert hasattr(proxies, 'keys'), "proxies must be a mapping" - self.proxies = proxies - self.key_file = x509.get('key_file') - self.cert_file = x509.get('cert_file') - self.addheaders = [('User-Agent', self.version)] - self.__tempfiles = [] - self.__unlink = os.unlink # See cleanup() - self.tempcache = None - # Undocumented feature: if you assign {} to tempcache, - # it is used to cache files retrieved with - # self.retrieve(). This is not enabled by default - # since it does not work for changing documents (and I - # haven't got the logic to check expiration headers - # yet). - self.ftpcache = ftpcache - # Undocumented feature: you can use a different - # ftp cache by assigning to the .ftpcache member; - # in case you want logically independent URL openers - # XXX This is not threadsafe. Bah. - - def __del__(self): - self.close() - - def close(self): - self.cleanup() - - def cleanup(self): - # This code sometimes runs when the rest of this module - # has already been deleted, so it can't use any globals - # or import anything. - if self.__tempfiles: - for file in self.__tempfiles: - try: - self.__unlink(file) - except OSError: - pass - del self.__tempfiles[:] - if self.tempcache: - self.tempcache.clear() - - def addheader(self, *args): - """Add a header to be used by the HTTP interface only - e.g. u.addheader('Accept', 'sound/basic')""" - self.addheaders.append(args) - - # External interface - def open(self, fullurl, data=None): - """Use URLopener().open(file) instead of open(file, 'r').""" - fullurl = unwrap(to_bytes(fullurl)) - fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|") - if self.tempcache and fullurl in self.tempcache: - filename, headers = self.tempcache[fullurl] - fp = open(filename, 'rb') - return addinfourl(fp, headers, fullurl) - urltype, url = splittype(fullurl) - if not urltype: - urltype = 'file' - if urltype in self.proxies: - proxy = self.proxies[urltype] - urltype, proxyhost = splittype(proxy) - host, selector = splithost(proxyhost) - url = (host, fullurl) # Signal special case to open_*() - else: - proxy = None - name = 'open_' + urltype - self.type = urltype - name = name.replace('-', '_') - if not hasattr(self, name): - if proxy: - return self.open_unknown_proxy(proxy, fullurl, data) - else: - return self.open_unknown(fullurl, data) - try: - if data is None: - return getattr(self, name)(url) - else: - return getattr(self, name)(url, data) - except HTTPError: - raise - except socket.error as msg: - raise_with_traceback(IOError('socket error', msg)) - - def open_unknown(self, fullurl, data=None): - """Overridable interface to open unknown URL type.""" - type, url = splittype(fullurl) - raise IOError('url error', 'unknown url type', type) - - def open_unknown_proxy(self, proxy, fullurl, data=None): - """Overridable interface to open unknown URL type.""" - type, url = splittype(fullurl) - raise IOError('url error', 'invalid proxy for %s' % type, proxy) - - # External interface - def retrieve(self, url, filename=None, reporthook=None, data=None): - """retrieve(url) returns (filename, headers) for a local object - or (tempfilename, headers) for a remote object.""" - url = unwrap(to_bytes(url)) - if self.tempcache and url in self.tempcache: - return self.tempcache[url] - type, url1 = splittype(url) - if filename is None and (not type or type == 'file'): - try: - fp = self.open_local_file(url1) - hdrs = fp.info() - fp.close() - return url2pathname(splithost(url1)[1]), hdrs - except IOError as msg: - pass - fp = self.open(url, data) - try: - headers = fp.info() - if filename: - tfp = open(filename, 'wb') - else: - import tempfile - garbage, path = splittype(url) - garbage, path = splithost(path or "") - path, garbage = splitquery(path or "") - path, garbage = splitattr(path or "") - suffix = os.path.splitext(path)[1] - (fd, filename) = tempfile.mkstemp(suffix) - self.__tempfiles.append(filename) - tfp = os.fdopen(fd, 'wb') - try: - result = filename, headers - if self.tempcache is not None: - self.tempcache[url] = result - bs = 1024*8 - size = -1 - read = 0 - blocknum = 0 - if "content-length" in headers: - size = int(headers["Content-Length"]) - if reporthook: - reporthook(blocknum, bs, size) - while 1: - block = fp.read(bs) - if not block: - break - read += len(block) - tfp.write(block) - blocknum += 1 - if reporthook: - reporthook(blocknum, bs, size) - finally: - tfp.close() - finally: - fp.close() - - # raise exception if actual size does not match content-length header - if size >= 0 and read < size: - raise ContentTooShortError( - "retrieval incomplete: got only %i out of %i bytes" - % (read, size), result) - - return result - - # Each method named open_ knows how to open that type of URL - - def _open_generic_http(self, connection_factory, url, data): - """Make an HTTP connection using connection_class. - - This is an internal method that should be called from - open_http() or open_https(). - - Arguments: - - connection_factory should take a host name and return an - HTTPConnection instance. - - url is the url to retrieval or a host, relative-path pair. - - data is payload for a POST request or None. - """ - - user_passwd = None - proxy_passwd= None - if isinstance(url, str): - host, selector = splithost(url) - if host: - user_passwd, host = splituser(host) - host = unquote(host) - realhost = host - else: - host, selector = url - # check whether the proxy contains authorization information - proxy_passwd, host = splituser(host) - # now we proceed with the url we want to obtain - urltype, rest = splittype(selector) - url = rest - user_passwd = None - if urltype.lower() != 'http': - realhost = None - else: - realhost, rest = splithost(rest) - if realhost: - user_passwd, realhost = splituser(realhost) - if user_passwd: - selector = "%s://%s%s" % (urltype, realhost, rest) - if proxy_bypass(realhost): - host = realhost - - if not host: raise IOError('http error', 'no host given') - - if proxy_passwd: - proxy_passwd = unquote(proxy_passwd) - proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii') - else: - proxy_auth = None - - if user_passwd: - user_passwd = unquote(user_passwd) - auth = base64.b64encode(user_passwd.encode()).decode('ascii') - else: - auth = None - http_conn = connection_factory(host) - headers = {} - if proxy_auth: - headers["Proxy-Authorization"] = "Basic %s" % proxy_auth - if auth: - headers["Authorization"] = "Basic %s" % auth - if realhost: - headers["Host"] = realhost - - # Add Connection:close as we don't support persistent connections yet. - # This helps in closing the socket and avoiding ResourceWarning - - headers["Connection"] = "close" - - for header, value in self.addheaders: - headers[header] = value - - if data is not None: - headers["Content-Type"] = "application/x-www-form-urlencoded" - http_conn.request("POST", selector, data, headers) - else: - http_conn.request("GET", selector, headers=headers) - - try: - response = http_conn.getresponse() - except http_client.BadStatusLine: - # something went wrong with the HTTP status line - raise URLError("http protocol error: bad status line") - - # According to RFC 2616, "2xx" code indicates that the client's - # request was successfully received, understood, and accepted. - if 200 <= response.status < 300: - return addinfourl(response, response.msg, "http:" + url, - response.status) - else: - return self.http_error( - url, response.fp, - response.status, response.reason, response.msg, data) - - def open_http(self, url, data=None): - """Use HTTP protocol.""" - return self._open_generic_http(http_client.HTTPConnection, url, data) - - def http_error(self, url, fp, errcode, errmsg, headers, data=None): - """Handle http errors. - - Derived class can override this, or provide specific handlers - named http_error_DDD where DDD is the 3-digit error code.""" - # First check if there's a specific handler for this error - name = 'http_error_%d' % errcode - if hasattr(self, name): - method = getattr(self, name) - if data is None: - result = method(url, fp, errcode, errmsg, headers) - else: - result = method(url, fp, errcode, errmsg, headers, data) - if result: return result - return self.http_error_default(url, fp, errcode, errmsg, headers) - - def http_error_default(self, url, fp, errcode, errmsg, headers): - """Default error handler: close the connection and raise IOError.""" - fp.close() - raise HTTPError(url, errcode, errmsg, headers, None) - - if _have_ssl: - def _https_connection(self, host): - return http_client.HTTPSConnection(host, - key_file=self.key_file, - cert_file=self.cert_file) - - def open_https(self, url, data=None): - """Use HTTPS protocol.""" - return self._open_generic_http(self._https_connection, url, data) - - def open_file(self, url): - """Use local file or FTP depending on form of URL.""" - if not isinstance(url, str): - raise URLError('file error: proxy support for file protocol currently not implemented') - if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/': - raise ValueError("file:// scheme is supported only on localhost") - else: - return self.open_local_file(url) - - def open_local_file(self, url): - """Use local file.""" - import future.backports.email.utils as email_utils - import mimetypes - host, file = splithost(url) - localname = url2pathname(file) - try: - stats = os.stat(localname) - except OSError as e: - raise URLError(e.strerror, e.filename) - size = stats.st_size - modified = email_utils.formatdate(stats.st_mtime, usegmt=True) - mtype = mimetypes.guess_type(url)[0] - headers = email.message_from_string( - 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % - (mtype or 'text/plain', size, modified)) - if not host: - urlfile = file - if file[:1] == '/': - urlfile = 'file://' + file - return addinfourl(open(localname, 'rb'), headers, urlfile) - host, port = splitport(host) - if (not port - and socket.gethostbyname(host) in ((localhost(),) + thishost())): - urlfile = file - if file[:1] == '/': - urlfile = 'file://' + file - elif file[:2] == './': - raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url) - return addinfourl(open(localname, 'rb'), headers, urlfile) - raise URLError('local file error: not on local host') - - def open_ftp(self, url): - """Use FTP protocol.""" - if not isinstance(url, str): - raise URLError('ftp error: proxy support for ftp protocol currently not implemented') - import mimetypes - host, path = splithost(url) - if not host: raise URLError('ftp error: no host given') - host, port = splitport(host) - user, host = splituser(host) - if user: user, passwd = splitpasswd(user) - else: passwd = None - host = unquote(host) - user = unquote(user or '') - passwd = unquote(passwd or '') - host = socket.gethostbyname(host) - if not port: - import ftplib - port = ftplib.FTP_PORT - else: - port = int(port) - path, attrs = splitattr(path) - path = unquote(path) - dirs = path.split('/') - dirs, file = dirs[:-1], dirs[-1] - if dirs and not dirs[0]: dirs = dirs[1:] - if dirs and not dirs[0]: dirs[0] = '/' - key = user, host, port, '/'.join(dirs) - # XXX thread unsafe! - if len(self.ftpcache) > MAXFTPCACHE: - # Prune the cache, rather arbitrarily - for k in self.ftpcache.keys(): - if k != key: - v = self.ftpcache[k] - del self.ftpcache[k] - v.close() - try: - if key not in self.ftpcache: - self.ftpcache[key] = \ - ftpwrapper(user, passwd, host, port, dirs) - if not file: type = 'D' - else: type = 'I' - for attr in attrs: - attr, value = splitvalue(attr) - if attr.lower() == 'type' and \ - value in ('a', 'A', 'i', 'I', 'd', 'D'): - type = value.upper() - (fp, retrlen) = self.ftpcache[key].retrfile(file, type) - mtype = mimetypes.guess_type("ftp:" + url)[0] - headers = "" - if mtype: - headers += "Content-Type: %s\n" % mtype - if retrlen is not None and retrlen >= 0: - headers += "Content-Length: %d\n" % retrlen - headers = email.message_from_string(headers) - return addinfourl(fp, headers, "ftp:" + url) - except ftperrors() as exp: - raise_with_traceback(URLError('ftp error %r' % exp)) - - def open_data(self, url, data=None): - """Use "data" URL.""" - if not isinstance(url, str): - raise URLError('data error: proxy support for data protocol currently not implemented') - # ignore POSTed data - # - # syntax of data URLs: - # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data - # mediatype := [ type "/" subtype ] *( ";" parameter ) - # data := *urlchar - # parameter := attribute "=" value - try: - [type, data] = url.split(',', 1) - except ValueError: - raise IOError('data error', 'bad data URL') - if not type: - type = 'text/plain;charset=US-ASCII' - semi = type.rfind(';') - if semi >= 0 and '=' not in type[semi:]: - encoding = type[semi+1:] - type = type[:semi] - else: - encoding = '' - msg = [] - msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT', - time.gmtime(time.time()))) - msg.append('Content-type: %s' % type) - if encoding == 'base64': - # XXX is this encoding/decoding ok? - data = base64.decodebytes(data.encode('ascii')).decode('latin-1') - else: - data = unquote(data) - msg.append('Content-Length: %d' % len(data)) - msg.append('') - msg.append(data) - msg = '\n'.join(msg) - headers = email.message_from_string(msg) - f = io.StringIO(msg) - #f.fileno = None # needed for addinfourl - return addinfourl(f, headers, url) - - -class FancyURLopener(URLopener): - """Derived class with handlers for errors we can handle (perhaps).""" - - def __init__(self, *args, **kwargs): - URLopener.__init__(self, *args, **kwargs) - self.auth_cache = {} - self.tries = 0 - self.maxtries = 10 - - def http_error_default(self, url, fp, errcode, errmsg, headers): - """Default error handling -- don't raise an exception.""" - return addinfourl(fp, headers, "http:" + url, errcode) - - def http_error_302(self, url, fp, errcode, errmsg, headers, data=None): - """Error 302 -- relocated (temporarily).""" - self.tries += 1 - if self.maxtries and self.tries >= self.maxtries: - if hasattr(self, "http_error_500"): - meth = self.http_error_500 - else: - meth = self.http_error_default - self.tries = 0 - return meth(url, fp, 500, - "Internal Server Error: Redirect Recursion", headers) - result = self.redirect_internal(url, fp, errcode, errmsg, headers, - data) - self.tries = 0 - return result - - def redirect_internal(self, url, fp, errcode, errmsg, headers, data): - if 'location' in headers: - newurl = headers['location'] - elif 'uri' in headers: - newurl = headers['uri'] - else: - return - fp.close() - - # In case the server sent a relative URL, join with original: - newurl = urljoin(self.type + ":" + url, newurl) - - urlparts = urlparse(newurl) - - # For security reasons, we don't allow redirection to anything other - # than http, https and ftp. - - # We are using newer HTTPError with older redirect_internal method - # This older method will get deprecated in 3.3 - - if urlparts.scheme not in ('http', 'https', 'ftp', ''): - raise HTTPError(newurl, errcode, - errmsg + - " Redirection to url '%s' is not allowed." % newurl, - headers, fp) - - return self.open(newurl) - - def http_error_301(self, url, fp, errcode, errmsg, headers, data=None): - """Error 301 -- also relocated (permanently).""" - return self.http_error_302(url, fp, errcode, errmsg, headers, data) - - def http_error_303(self, url, fp, errcode, errmsg, headers, data=None): - """Error 303 -- also relocated (essentially identical to 302).""" - return self.http_error_302(url, fp, errcode, errmsg, headers, data) - - def http_error_307(self, url, fp, errcode, errmsg, headers, data=None): - """Error 307 -- relocated, but turn POST into error.""" - if data is None: - return self.http_error_302(url, fp, errcode, errmsg, headers, data) - else: - return self.http_error_default(url, fp, errcode, errmsg, headers) - - def http_error_401(self, url, fp, errcode, errmsg, headers, data=None, - retry=False): - """Error 401 -- authentication required. - This function supports Basic authentication only.""" - if 'www-authenticate' not in headers: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - stuff = headers['www-authenticate'] - match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) - if not match: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - scheme, realm = match.groups() - if scheme.lower() != 'basic': - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - if not retry: - URLopener.http_error_default(self, url, fp, errcode, errmsg, - headers) - name = 'retry_' + self.type + '_basic_auth' - if data is None: - return getattr(self,name)(url, realm) - else: - return getattr(self,name)(url, realm, data) - - def http_error_407(self, url, fp, errcode, errmsg, headers, data=None, - retry=False): - """Error 407 -- proxy authentication required. - This function supports Basic authentication only.""" - if 'proxy-authenticate' not in headers: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - stuff = headers['proxy-authenticate'] - match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) - if not match: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - scheme, realm = match.groups() - if scheme.lower() != 'basic': - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - if not retry: - URLopener.http_error_default(self, url, fp, errcode, errmsg, - headers) - name = 'retry_proxy_' + self.type + '_basic_auth' - if data is None: - return getattr(self,name)(url, realm) - else: - return getattr(self,name)(url, realm, data) - - def retry_proxy_http_basic_auth(self, url, realm, data=None): - host, selector = splithost(url) - newurl = 'http://' + host + selector - proxy = self.proxies['http'] - urltype, proxyhost = splittype(proxy) - proxyhost, proxyselector = splithost(proxyhost) - i = proxyhost.find('@') + 1 - proxyhost = proxyhost[i:] - user, passwd = self.get_user_passwd(proxyhost, realm, i) - if not (user or passwd): return None - proxyhost = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), proxyhost) - self.proxies['http'] = 'http://' + proxyhost + proxyselector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def retry_proxy_https_basic_auth(self, url, realm, data=None): - host, selector = splithost(url) - newurl = 'https://' + host + selector - proxy = self.proxies['https'] - urltype, proxyhost = splittype(proxy) - proxyhost, proxyselector = splithost(proxyhost) - i = proxyhost.find('@') + 1 - proxyhost = proxyhost[i:] - user, passwd = self.get_user_passwd(proxyhost, realm, i) - if not (user or passwd): return None - proxyhost = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), proxyhost) - self.proxies['https'] = 'https://' + proxyhost + proxyselector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def retry_http_basic_auth(self, url, realm, data=None): - host, selector = splithost(url) - i = host.find('@') + 1 - host = host[i:] - user, passwd = self.get_user_passwd(host, realm, i) - if not (user or passwd): return None - host = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), host) - newurl = 'http://' + host + selector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def retry_https_basic_auth(self, url, realm, data=None): - host, selector = splithost(url) - i = host.find('@') + 1 - host = host[i:] - user, passwd = self.get_user_passwd(host, realm, i) - if not (user or passwd): return None - host = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), host) - newurl = 'https://' + host + selector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def get_user_passwd(self, host, realm, clear_cache=0): - key = realm + '@' + host.lower() - if key in self.auth_cache: - if clear_cache: - del self.auth_cache[key] - else: - return self.auth_cache[key] - user, passwd = self.prompt_user_passwd(host, realm) - if user or passwd: self.auth_cache[key] = (user, passwd) - return user, passwd - - def prompt_user_passwd(self, host, realm): - """Override this in a GUI environment!""" - import getpass - try: - user = input("Enter username for %s at %s: " % (realm, host)) - passwd = getpass.getpass("Enter password for %s in %s at %s: " % - (user, realm, host)) - return user, passwd - except KeyboardInterrupt: - print() - return None, None - - -# Utility functions - -_localhost = None -def localhost(): - """Return the IP address of the magic hostname 'localhost'.""" - global _localhost - if _localhost is None: - _localhost = socket.gethostbyname('localhost') - return _localhost - -_thishost = None -def thishost(): - """Return the IP addresses of the current host.""" - global _thishost - if _thishost is None: - try: - _thishost = tuple(socket.gethostbyname_ex(socket.gethostname())[2]) - except socket.gaierror: - _thishost = tuple(socket.gethostbyname_ex('localhost')[2]) - return _thishost - -_ftperrors = None -def ftperrors(): - """Return the set of errors raised by the FTP class.""" - global _ftperrors - if _ftperrors is None: - import ftplib - _ftperrors = ftplib.all_errors - return _ftperrors - -_noheaders = None -def noheaders(): - """Return an empty email Message object.""" - global _noheaders - if _noheaders is None: - _noheaders = email.message_from_string("") - return _noheaders - - -# Utility classes - -class ftpwrapper(object): - """Class used by open_ftp() for cache of open FTP connections.""" - - def __init__(self, user, passwd, host, port, dirs, timeout=None, - persistent=True): - self.user = user - self.passwd = passwd - self.host = host - self.port = port - self.dirs = dirs - self.timeout = timeout - self.refcount = 0 - self.keepalive = persistent - self.init() - - def init(self): - import ftplib - self.busy = 0 - self.ftp = ftplib.FTP() - self.ftp.connect(self.host, self.port, self.timeout) - self.ftp.login(self.user, self.passwd) - _target = '/'.join(self.dirs) - self.ftp.cwd(_target) - - def retrfile(self, file, type): - import ftplib - self.endtransfer() - if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1 - else: cmd = 'TYPE ' + type; isdir = 0 - try: - self.ftp.voidcmd(cmd) - except ftplib.all_errors: - self.init() - self.ftp.voidcmd(cmd) - conn = None - if file and not isdir: - # Try to retrieve as a file - try: - cmd = 'RETR ' + file - conn, retrlen = self.ftp.ntransfercmd(cmd) - except ftplib.error_perm as reason: - if str(reason)[:3] != '550': - raise_with_traceback(URLError('ftp error: %r' % reason)) - if not conn: - # Set transfer mode to ASCII! - self.ftp.voidcmd('TYPE A') - # Try a directory listing. Verify that directory exists. - if file: - pwd = self.ftp.pwd() - try: - try: - self.ftp.cwd(file) - except ftplib.error_perm as reason: - ### Was: - # raise URLError('ftp error: %r' % reason) from reason - exc = URLError('ftp error: %r' % reason) - exc.__cause__ = reason - raise exc - finally: - self.ftp.cwd(pwd) - cmd = 'LIST ' + file - else: - cmd = 'LIST' - conn, retrlen = self.ftp.ntransfercmd(cmd) - self.busy = 1 - - ftpobj = addclosehook(conn.makefile('rb'), self.file_close) - self.refcount += 1 - conn.close() - # Pass back both a suitably decorated object and a retrieval length - return (ftpobj, retrlen) - - def endtransfer(self): - self.busy = 0 - - def close(self): - self.keepalive = False - if self.refcount <= 0: - self.real_close() - - def file_close(self): - self.endtransfer() - self.refcount -= 1 - if self.refcount <= 0 and not self.keepalive: - self.real_close() - - def real_close(self): - self.endtransfer() - try: - self.ftp.close() - except ftperrors(): - pass - -# Proxy handling -def getproxies_environment(): - """Return a dictionary of scheme -> proxy server URL mappings. - - Scan the environment for variables named _proxy; - this seems to be the standard convention. If you need a - different way, you can pass a proxies dictionary to the - [Fancy]URLopener constructor. - - """ - proxies = {} - for name, value in os.environ.items(): - name = name.lower() - if value and name[-6:] == '_proxy': - proxies[name[:-6]] = value - return proxies - -def proxy_bypass_environment(host): - """Test if proxies should not be used for a particular host. - - Checks the environment for a variable named no_proxy, which should - be a list of DNS suffixes separated by commas, or '*' for all hosts. - """ - no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '') - # '*' is special case for always bypass - if no_proxy == '*': - return 1 - # strip port off host - hostonly, port = splitport(host) - # check if the host ends with any of the DNS suffixes - no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')] - for name in no_proxy_list: - if name and (hostonly.endswith(name) or host.endswith(name)): - return 1 - # otherwise, don't bypass - return 0 - - -# This code tests an OSX specific data structure but is testable on all -# platforms -def _proxy_bypass_macosx_sysconf(host, proxy_settings): - """ - Return True iff this host shouldn't be accessed using a proxy - - This function uses the MacOSX framework SystemConfiguration - to fetch the proxy information. - - proxy_settings come from _scproxy._get_proxy_settings or get mocked ie: - { 'exclude_simple': bool, - 'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16'] - } - """ - from fnmatch import fnmatch - - hostonly, port = splitport(host) - - def ip2num(ipAddr): - parts = ipAddr.split('.') - parts = list(map(int, parts)) - if len(parts) != 4: - parts = (parts + [0, 0, 0, 0])[:4] - return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3] - - # Check for simple host names: - if '.' not in host: - if proxy_settings['exclude_simple']: - return True - - hostIP = None - - for value in proxy_settings.get('exceptions', ()): - # Items in the list are strings like these: *.local, 169.254/16 - if not value: continue - - m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value) - if m is not None: - if hostIP is None: - try: - hostIP = socket.gethostbyname(hostonly) - hostIP = ip2num(hostIP) - except socket.error: - continue - - base = ip2num(m.group(1)) - mask = m.group(2) - if mask is None: - mask = 8 * (m.group(1).count('.') + 1) - else: - mask = int(mask[1:]) - mask = 32 - mask - - if (hostIP >> mask) == (base >> mask): - return True - - elif fnmatch(host, value): - return True - - return False - - -if sys.platform == 'darwin': - from _scproxy import _get_proxy_settings, _get_proxies - - def proxy_bypass_macosx_sysconf(host): - proxy_settings = _get_proxy_settings() - return _proxy_bypass_macosx_sysconf(host, proxy_settings) - - def getproxies_macosx_sysconf(): - """Return a dictionary of scheme -> proxy server URL mappings. - - This function uses the MacOSX framework SystemConfiguration - to fetch the proxy information. - """ - return _get_proxies() - - - - def proxy_bypass(host): - if getproxies_environment(): - return proxy_bypass_environment(host) - else: - return proxy_bypass_macosx_sysconf(host) - - def getproxies(): - return getproxies_environment() or getproxies_macosx_sysconf() - - -elif os.name == 'nt': - def getproxies_registry(): - """Return a dictionary of scheme -> proxy server URL mappings. - - Win32 uses the registry to store proxies. - - """ - proxies = {} - try: - import winreg - except ImportError: - # Std module, so should be around - but you never know! - return proxies - try: - internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, - r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') - proxyEnable = winreg.QueryValueEx(internetSettings, - 'ProxyEnable')[0] - if proxyEnable: - # Returned as Unicode but problems if not converted to ASCII - proxyServer = str(winreg.QueryValueEx(internetSettings, - 'ProxyServer')[0]) - if '=' in proxyServer: - # Per-protocol settings - for p in proxyServer.split(';'): - protocol, address = p.split('=', 1) - # See if address has a type:// prefix - if not re.match('^([^/:]+)://', address): - address = '%s://%s' % (protocol, address) - proxies[protocol] = address - else: - # Use one setting for all protocols - if proxyServer[:5] == 'http:': - proxies['http'] = proxyServer - else: - proxies['http'] = 'http://%s' % proxyServer - proxies['https'] = 'https://%s' % proxyServer - proxies['ftp'] = 'ftp://%s' % proxyServer - internetSettings.Close() - except (WindowsError, ValueError, TypeError): - # Either registry key not found etc, or the value in an - # unexpected format. - # proxies already set up to be empty so nothing to do - pass - return proxies - - def getproxies(): - """Return a dictionary of scheme -> proxy server URL mappings. - - Returns settings gathered from the environment, if specified, - or the registry. - - """ - return getproxies_environment() or getproxies_registry() - - def proxy_bypass_registry(host): - try: - import winreg - except ImportError: - # Std modules, so should be around - but you never know! - return 0 - try: - internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, - r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') - proxyEnable = winreg.QueryValueEx(internetSettings, - 'ProxyEnable')[0] - proxyOverride = str(winreg.QueryValueEx(internetSettings, - 'ProxyOverride')[0]) - # ^^^^ Returned as Unicode but problems if not converted to ASCII - except WindowsError: - return 0 - if not proxyEnable or not proxyOverride: - return 0 - # try to make a host list from name and IP address. - rawHost, port = splitport(host) - host = [rawHost] - try: - addr = socket.gethostbyname(rawHost) - if addr != rawHost: - host.append(addr) - except socket.error: - pass - try: - fqdn = socket.getfqdn(rawHost) - if fqdn != rawHost: - host.append(fqdn) - except socket.error: - pass - # make a check value list from the registry entry: replace the - # '' string by the localhost entry and the corresponding - # canonical entry. - proxyOverride = proxyOverride.split(';') - # now check if we match one of the registry values. - for test in proxyOverride: - if test == '': - if '.' not in rawHost: - return 1 - test = test.replace(".", r"\.") # mask dots - test = test.replace("*", r".*") # change glob sequence - test = test.replace("?", r".") # change glob char - for val in host: - if re.match(test, val, re.I): - return 1 - return 0 - - def proxy_bypass(host): - """Return a dictionary of scheme -> proxy server URL mappings. - - Returns settings gathered from the environment, if specified, - or the registry. - - """ - if getproxies_environment(): - return proxy_bypass_environment(host) - else: - return proxy_bypass_registry(host) - -else: - # By default use environment variables - getproxies = getproxies_environment - proxy_bypass = proxy_bypass_environment diff --git a/pype/vendor/future/backports/urllib/response.py b/pype/vendor/future/backports/urllib/response.py deleted file mode 100644 index adbf6e5ae3..0000000000 --- a/pype/vendor/future/backports/urllib/response.py +++ /dev/null @@ -1,103 +0,0 @@ -"""Response classes used by urllib. - -The base class, addbase, defines a minimal file-like interface, -including read() and readline(). The typical response object is an -addinfourl instance, which defines an info() method that returns -headers and a geturl() method that returns the url. -""" -from __future__ import absolute_import, division, unicode_literals -from future.builtins import object - -class addbase(object): - """Base class for addinfo and addclosehook.""" - - # XXX Add a method to expose the timeout on the underlying socket? - - def __init__(self, fp): - # TODO(jhylton): Is there a better way to delegate using io? - self.fp = fp - self.read = self.fp.read - self.readline = self.fp.readline - # TODO(jhylton): Make sure an object with readlines() is also iterable - if hasattr(self.fp, "readlines"): - self.readlines = self.fp.readlines - if hasattr(self.fp, "fileno"): - self.fileno = self.fp.fileno - else: - self.fileno = lambda: None - - def __iter__(self): - # Assigning `__iter__` to the instance doesn't work as intended - # because the iter builtin does something like `cls.__iter__(obj)` - # and thus fails to find the _bound_ method `obj.__iter__`. - # Returning just `self.fp` works for built-in file objects but - # might not work for general file-like objects. - return iter(self.fp) - - def __repr__(self): - return '<%s at %r whose fp = %r>' % (self.__class__.__name__, - id(self), self.fp) - - def close(self): - if self.fp: - self.fp.close() - self.fp = None - self.read = None - self.readline = None - self.readlines = None - self.fileno = None - self.__iter__ = None - self.__next__ = None - - def __enter__(self): - if self.fp is None: - raise ValueError("I/O operation on closed file") - return self - - def __exit__(self, type, value, traceback): - self.close() - -class addclosehook(addbase): - """Class to add a close hook to an open file.""" - - def __init__(self, fp, closehook, *hookargs): - addbase.__init__(self, fp) - self.closehook = closehook - self.hookargs = hookargs - - def close(self): - if self.closehook: - self.closehook(*self.hookargs) - self.closehook = None - self.hookargs = None - addbase.close(self) - -class addinfo(addbase): - """class to add an info() method to an open file.""" - - def __init__(self, fp, headers): - addbase.__init__(self, fp) - self.headers = headers - - def info(self): - return self.headers - -class addinfourl(addbase): - """class to add info() and geturl() methods to an open file.""" - - def __init__(self, fp, headers, url, code=None): - addbase.__init__(self, fp) - self.headers = headers - self.url = url - self.code = code - - def info(self): - return self.headers - - def getcode(self): - return self.code - - def geturl(self): - return self.url - -del absolute_import, division, unicode_literals, object diff --git a/pype/vendor/future/backports/urllib/robotparser.py b/pype/vendor/future/backports/urllib/robotparser.py deleted file mode 100644 index a0f36511b4..0000000000 --- a/pype/vendor/future/backports/urllib/robotparser.py +++ /dev/null @@ -1,211 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals -from future.builtins import str -""" robotparser.py - - Copyright (C) 2000 Bastian Kleineidam - - You can choose between two licenses when using this package: - 1) GNU GPLv2 - 2) PSF license for Python 2.2 - - The robots.txt Exclusion Protocol is implemented as specified in - http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html -""" - -# Was: import urllib.parse, urllib.request -from future.backports import urllib -from future.backports.urllib import parse as _parse, request as _request -urllib.parse = _parse -urllib.request = _request - - -__all__ = ["RobotFileParser"] - -class RobotFileParser(object): - """ This class provides a set of methods to read, parse and answer - questions about a single robots.txt file. - - """ - - def __init__(self, url=''): - self.entries = [] - self.default_entry = None - self.disallow_all = False - self.allow_all = False - self.set_url(url) - self.last_checked = 0 - - def mtime(self): - """Returns the time the robots.txt file was last fetched. - - This is useful for long-running web spiders that need to - check for new robots.txt files periodically. - - """ - return self.last_checked - - def modified(self): - """Sets the time the robots.txt file was last fetched to the - current time. - - """ - import time - self.last_checked = time.time() - - def set_url(self, url): - """Sets the URL referring to a robots.txt file.""" - self.url = url - self.host, self.path = urllib.parse.urlparse(url)[1:3] - - def read(self): - """Reads the robots.txt URL and feeds it to the parser.""" - try: - f = urllib.request.urlopen(self.url) - except urllib.error.HTTPError as err: - if err.code in (401, 403): - self.disallow_all = True - elif err.code >= 400: - self.allow_all = True - else: - raw = f.read() - self.parse(raw.decode("utf-8").splitlines()) - - def _add_entry(self, entry): - if "*" in entry.useragents: - # the default entry is considered last - if self.default_entry is None: - # the first default entry wins - self.default_entry = entry - else: - self.entries.append(entry) - - def parse(self, lines): - """Parse the input lines from a robots.txt file. - - We allow that a user-agent: line is not preceded by - one or more blank lines. - """ - # states: - # 0: start state - # 1: saw user-agent line - # 2: saw an allow or disallow line - state = 0 - entry = Entry() - - for line in lines: - if not line: - if state == 1: - entry = Entry() - state = 0 - elif state == 2: - self._add_entry(entry) - entry = Entry() - state = 0 - # remove optional comment and strip line - i = line.find('#') - if i >= 0: - line = line[:i] - line = line.strip() - if not line: - continue - line = line.split(':', 1) - if len(line) == 2: - line[0] = line[0].strip().lower() - line[1] = urllib.parse.unquote(line[1].strip()) - if line[0] == "user-agent": - if state == 2: - self._add_entry(entry) - entry = Entry() - entry.useragents.append(line[1]) - state = 1 - elif line[0] == "disallow": - if state != 0: - entry.rulelines.append(RuleLine(line[1], False)) - state = 2 - elif line[0] == "allow": - if state != 0: - entry.rulelines.append(RuleLine(line[1], True)) - state = 2 - if state == 2: - self._add_entry(entry) - - - def can_fetch(self, useragent, url): - """using the parsed robots.txt decide if useragent can fetch url""" - if self.disallow_all: - return False - if self.allow_all: - return True - # search for given user agent matches - # the first match counts - parsed_url = urllib.parse.urlparse(urllib.parse.unquote(url)) - url = urllib.parse.urlunparse(('','',parsed_url.path, - parsed_url.params,parsed_url.query, parsed_url.fragment)) - url = urllib.parse.quote(url) - if not url: - url = "/" - for entry in self.entries: - if entry.applies_to(useragent): - return entry.allowance(url) - # try the default entry last - if self.default_entry: - return self.default_entry.allowance(url) - # agent not found ==> access granted - return True - - def __str__(self): - return ''.join([str(entry) + "\n" for entry in self.entries]) - - -class RuleLine(object): - """A rule line is a single "Allow:" (allowance==True) or "Disallow:" - (allowance==False) followed by a path.""" - def __init__(self, path, allowance): - if path == '' and not allowance: - # an empty value means allow all - allowance = True - self.path = urllib.parse.quote(path) - self.allowance = allowance - - def applies_to(self, filename): - return self.path == "*" or filename.startswith(self.path) - - def __str__(self): - return (self.allowance and "Allow" or "Disallow") + ": " + self.path - - -class Entry(object): - """An entry has one or more user-agents and zero or more rulelines""" - def __init__(self): - self.useragents = [] - self.rulelines = [] - - def __str__(self): - ret = [] - for agent in self.useragents: - ret.extend(["User-agent: ", agent, "\n"]) - for line in self.rulelines: - ret.extend([str(line), "\n"]) - return ''.join(ret) - - def applies_to(self, useragent): - """check if this entry applies to the specified agent""" - # split the name token and make it lower case - useragent = useragent.split("/")[0].lower() - for agent in self.useragents: - if agent == '*': - # we have the catch-all agent - return True - agent = agent.lower() - if agent in useragent: - return True - return False - - def allowance(self, filename): - """Preconditions: - - our agent applies to this entry - - filename is URL decoded""" - for line in self.rulelines: - if line.applies_to(filename): - return line.allowance - return True diff --git a/pype/vendor/future/backports/xmlrpc/__init__.py b/pype/vendor/future/backports/xmlrpc/__init__.py deleted file mode 100644 index 196d378857..0000000000 --- a/pype/vendor/future/backports/xmlrpc/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# This directory is a Python package. diff --git a/pype/vendor/future/backports/xmlrpc/client.py b/pype/vendor/future/backports/xmlrpc/client.py deleted file mode 100644 index b78e5bad64..0000000000 --- a/pype/vendor/future/backports/xmlrpc/client.py +++ /dev/null @@ -1,1496 +0,0 @@ -# -# XML-RPC CLIENT LIBRARY -# $Id$ -# -# an XML-RPC client interface for Python. -# -# the marshalling and response parser code can also be used to -# implement XML-RPC servers. -# -# Notes: -# this version is designed to work with Python 2.1 or newer. -# -# History: -# 1999-01-14 fl Created -# 1999-01-15 fl Changed dateTime to use localtime -# 1999-01-16 fl Added Binary/base64 element, default to RPC2 service -# 1999-01-19 fl Fixed array data element (from Skip Montanaro) -# 1999-01-21 fl Fixed dateTime constructor, etc. -# 1999-02-02 fl Added fault handling, handle empty sequences, etc. -# 1999-02-10 fl Fixed problem with empty responses (from Skip Montanaro) -# 1999-06-20 fl Speed improvements, pluggable parsers/transports (0.9.8) -# 2000-11-28 fl Changed boolean to check the truth value of its argument -# 2001-02-24 fl Added encoding/Unicode/SafeTransport patches -# 2001-02-26 fl Added compare support to wrappers (0.9.9/1.0b1) -# 2001-03-28 fl Make sure response tuple is a singleton -# 2001-03-29 fl Don't require empty params element (from Nicholas Riley) -# 2001-06-10 fl Folded in _xmlrpclib accelerator support (1.0b2) -# 2001-08-20 fl Base xmlrpclib.Error on built-in Exception (from Paul Prescod) -# 2001-09-03 fl Allow Transport subclass to override getparser -# 2001-09-10 fl Lazy import of urllib, cgi, xmllib (20x import speedup) -# 2001-10-01 fl Remove containers from memo cache when done with them -# 2001-10-01 fl Use faster escape method (80% dumps speedup) -# 2001-10-02 fl More dumps microtuning -# 2001-10-04 fl Make sure import expat gets a parser (from Guido van Rossum) -# 2001-10-10 sm Allow long ints to be passed as ints if they don't overflow -# 2001-10-17 sm Test for int and long overflow (allows use on 64-bit systems) -# 2001-11-12 fl Use repr() to marshal doubles (from Paul Felix) -# 2002-03-17 fl Avoid buffered read when possible (from James Rucker) -# 2002-04-07 fl Added pythondoc comments -# 2002-04-16 fl Added __str__ methods to datetime/binary wrappers -# 2002-05-15 fl Added error constants (from Andrew Kuchling) -# 2002-06-27 fl Merged with Python CVS version -# 2002-10-22 fl Added basic authentication (based on code from Phillip Eby) -# 2003-01-22 sm Add support for the bool type -# 2003-02-27 gvr Remove apply calls -# 2003-04-24 sm Use cStringIO if available -# 2003-04-25 ak Add support for nil -# 2003-06-15 gn Add support for time.struct_time -# 2003-07-12 gp Correct marshalling of Faults -# 2003-10-31 mvl Add multicall support -# 2004-08-20 mvl Bump minimum supported Python version to 2.1 -# -# Copyright (c) 1999-2002 by Secret Labs AB. -# Copyright (c) 1999-2002 by Fredrik Lundh. -# -# info@pythonware.com -# http://www.pythonware.com -# -# -------------------------------------------------------------------- -# The XML-RPC client interface is -# -# Copyright (c) 1999-2002 by Secret Labs AB -# Copyright (c) 1999-2002 by Fredrik Lundh -# -# By obtaining, using, and/or copying this software and/or its -# associated documentation, you agree that you have read, understood, -# and will comply with the following terms and conditions: -# -# Permission to use, copy, modify, and distribute this software and -# its associated documentation for any purpose and without fee is -# hereby granted, provided that the above copyright notice appears in -# all copies, and that both that copyright notice and this permission -# notice appear in supporting documentation, and that the name of -# Secret Labs AB or the author not be used in advertising or publicity -# pertaining to distribution of the software without specific, written -# prior permission. -# -# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD -# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- -# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR -# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY -# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS -# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE -# OF THIS SOFTWARE. -# -------------------------------------------------------------------- - -""" -Ported using Python-Future from the Python 3.3 standard library. - -An XML-RPC client interface for Python. - -The marshalling and response parser code can also be used to -implement XML-RPC servers. - -Exported exceptions: - - Error Base class for client errors - ProtocolError Indicates an HTTP protocol error - ResponseError Indicates a broken response package - Fault Indicates an XML-RPC fault package - -Exported classes: - - ServerProxy Represents a logical connection to an XML-RPC server - - MultiCall Executor of boxcared xmlrpc requests - DateTime dateTime wrapper for an ISO 8601 string or time tuple or - localtime integer value to generate a "dateTime.iso8601" - XML-RPC value - Binary binary data wrapper - - Marshaller Generate an XML-RPC params chunk from a Python data structure - Unmarshaller Unmarshal an XML-RPC response from incoming XML event message - Transport Handles an HTTP transaction to an XML-RPC server - SafeTransport Handles an HTTPS transaction to an XML-RPC server - -Exported constants: - - (none) - -Exported functions: - - getparser Create instance of the fastest available parser & attach - to an unmarshalling object - dumps Convert an argument tuple or a Fault instance to an XML-RPC - request (or response, if the methodresponse option is used). - loads Convert an XML-RPC packet to unmarshalled data plus a method - name (None if not present). -""" - -from __future__ import (absolute_import, division, print_function, - unicode_literals) -from future.builtins import bytes, dict, int, range, str - -import base64 -# Py2.7 compatibility hack -base64.encodebytes = base64.encodestring -base64.decodebytes = base64.decodestring -import sys -import time -from datetime import datetime -from future.backports.http import client as http_client -from future.backports.urllib import parse as urllib_parse -from future.utils import ensure_new_type -from xml.parsers import expat -import socket -import errno -from io import BytesIO -try: - import gzip -except ImportError: - gzip = None #python can be built without zlib/gzip support - -# -------------------------------------------------------------------- -# Internal stuff - -def escape(s): - s = s.replace("&", "&") - s = s.replace("<", "<") - return s.replace(">", ">",) - -# used in User-Agent header sent -__version__ = sys.version[:3] - -# xmlrpc integer limits -MAXINT = 2**31-1 -MININT = -2**31 - -# -------------------------------------------------------------------- -# Error constants (from Dan Libby's specification at -# http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php) - -# Ranges of errors -PARSE_ERROR = -32700 -SERVER_ERROR = -32600 -APPLICATION_ERROR = -32500 -SYSTEM_ERROR = -32400 -TRANSPORT_ERROR = -32300 - -# Specific errors -NOT_WELLFORMED_ERROR = -32700 -UNSUPPORTED_ENCODING = -32701 -INVALID_ENCODING_CHAR = -32702 -INVALID_XMLRPC = -32600 -METHOD_NOT_FOUND = -32601 -INVALID_METHOD_PARAMS = -32602 -INTERNAL_ERROR = -32603 - -# -------------------------------------------------------------------- -# Exceptions - -## -# Base class for all kinds of client-side errors. - -class Error(Exception): - """Base class for client errors.""" - def __str__(self): - return repr(self) - -## -# Indicates an HTTP-level protocol error. This is raised by the HTTP -# transport layer, if the server returns an error code other than 200 -# (OK). -# -# @param url The target URL. -# @param errcode The HTTP error code. -# @param errmsg The HTTP error message. -# @param headers The HTTP header dictionary. - -class ProtocolError(Error): - """Indicates an HTTP protocol error.""" - def __init__(self, url, errcode, errmsg, headers): - Error.__init__(self) - self.url = url - self.errcode = errcode - self.errmsg = errmsg - self.headers = headers - def __repr__(self): - return ( - "" % - (self.url, self.errcode, self.errmsg) - ) - -## -# Indicates a broken XML-RPC response package. This exception is -# raised by the unmarshalling layer, if the XML-RPC response is -# malformed. - -class ResponseError(Error): - """Indicates a broken response package.""" - pass - -## -# Indicates an XML-RPC fault response package. This exception is -# raised by the unmarshalling layer, if the XML-RPC response contains -# a fault string. This exception can also be used as a class, to -# generate a fault XML-RPC message. -# -# @param faultCode The XML-RPC fault code. -# @param faultString The XML-RPC fault string. - -class Fault(Error): - """Indicates an XML-RPC fault package.""" - def __init__(self, faultCode, faultString, **extra): - Error.__init__(self) - self.faultCode = faultCode - self.faultString = faultString - def __repr__(self): - return "" % (ensure_new_type(self.faultCode), - ensure_new_type(self.faultString)) - -# -------------------------------------------------------------------- -# Special values - -## -# Backwards compatibility - -boolean = Boolean = bool - -## -# Wrapper for XML-RPC DateTime values. This converts a time value to -# the format used by XML-RPC. -#

-# The value can be given as a datetime object, as a string in the -# format "yyyymmddThh:mm:ss", as a 9-item time tuple (as returned by -# time.localtime()), or an integer value (as returned by time.time()). -# The wrapper uses time.localtime() to convert an integer to a time -# tuple. -# -# @param value The time, given as a datetime object, an ISO 8601 string, -# a time tuple, or an integer time value. - - -### For Python-Future: -def _iso8601_format(value): - return "%04d%02d%02dT%02d:%02d:%02d" % ( - value.year, value.month, value.day, - value.hour, value.minute, value.second) -### -# Issue #13305: different format codes across platforms -# _day0 = datetime(1, 1, 1) -# if _day0.strftime('%Y') == '0001': # Mac OS X -# def _iso8601_format(value): -# return value.strftime("%Y%m%dT%H:%M:%S") -# elif _day0.strftime('%4Y') == '0001': # Linux -# def _iso8601_format(value): -# return value.strftime("%4Y%m%dT%H:%M:%S") -# else: -# def _iso8601_format(value): -# return value.strftime("%Y%m%dT%H:%M:%S").zfill(17) -# del _day0 - - -def _strftime(value): - if isinstance(value, datetime): - return _iso8601_format(value) - - if not isinstance(value, (tuple, time.struct_time)): - if value == 0: - value = time.time() - value = time.localtime(value) - - return "%04d%02d%02dT%02d:%02d:%02d" % value[:6] - -class DateTime(object): - """DateTime wrapper for an ISO 8601 string or time tuple or - localtime integer value to generate 'dateTime.iso8601' XML-RPC - value. - """ - - def __init__(self, value=0): - if isinstance(value, str): - self.value = value - else: - self.value = _strftime(value) - - def make_comparable(self, other): - if isinstance(other, DateTime): - s = self.value - o = other.value - elif isinstance(other, datetime): - s = self.value - o = _iso8601_format(other) - elif isinstance(other, str): - s = self.value - o = other - elif hasattr(other, "timetuple"): - s = self.timetuple() - o = other.timetuple() - else: - otype = (hasattr(other, "__class__") - and other.__class__.__name__ - or type(other)) - raise TypeError("Can't compare %s and %s" % - (self.__class__.__name__, otype)) - return s, o - - def __lt__(self, other): - s, o = self.make_comparable(other) - return s < o - - def __le__(self, other): - s, o = self.make_comparable(other) - return s <= o - - def __gt__(self, other): - s, o = self.make_comparable(other) - return s > o - - def __ge__(self, other): - s, o = self.make_comparable(other) - return s >= o - - def __eq__(self, other): - s, o = self.make_comparable(other) - return s == o - - def __ne__(self, other): - s, o = self.make_comparable(other) - return s != o - - def timetuple(self): - return time.strptime(self.value, "%Y%m%dT%H:%M:%S") - - ## - # Get date/time value. - # - # @return Date/time value, as an ISO 8601 string. - - def __str__(self): - return self.value - - def __repr__(self): - return "" % (ensure_new_type(self.value), id(self)) - - def decode(self, data): - self.value = str(data).strip() - - def encode(self, out): - out.write("") - out.write(self.value) - out.write("\n") - -def _datetime(data): - # decode xml element contents into a DateTime structure. - value = DateTime() - value.decode(data) - return value - -def _datetime_type(data): - return datetime.strptime(data, "%Y%m%dT%H:%M:%S") - -## -# Wrapper for binary data. This can be used to transport any kind -# of binary data over XML-RPC, using BASE64 encoding. -# -# @param data An 8-bit string containing arbitrary data. - -class Binary(object): - """Wrapper for binary data.""" - - def __init__(self, data=None): - if data is None: - data = b"" - else: - if not isinstance(data, (bytes, bytearray)): - raise TypeError("expected bytes or bytearray, not %s" % - data.__class__.__name__) - data = bytes(data) # Make a copy of the bytes! - self.data = data - - ## - # Get buffer contents. - # - # @return Buffer contents, as an 8-bit string. - - def __str__(self): - return str(self.data, "latin-1") # XXX encoding?! - - def __eq__(self, other): - if isinstance(other, Binary): - other = other.data - return self.data == other - - def __ne__(self, other): - if isinstance(other, Binary): - other = other.data - return self.data != other - - def decode(self, data): - self.data = base64.decodebytes(data) - - def encode(self, out): - out.write("\n") - encoded = base64.encodebytes(self.data) - out.write(encoded.decode('ascii')) - out.write("\n") - -def _binary(data): - # decode xml element contents into a Binary structure - value = Binary() - value.decode(data) - return value - -WRAPPERS = (DateTime, Binary) - -# -------------------------------------------------------------------- -# XML parsers - -class ExpatParser(object): - # fast expat parser for Python 2.0 and later. - def __init__(self, target): - self._parser = parser = expat.ParserCreate(None, None) - self._target = target - parser.StartElementHandler = target.start - parser.EndElementHandler = target.end - parser.CharacterDataHandler = target.data - encoding = None - target.xml(encoding, None) - - def feed(self, data): - self._parser.Parse(data, 0) - - def close(self): - self._parser.Parse("", 1) # end of data - del self._target, self._parser # get rid of circular references - -# -------------------------------------------------------------------- -# XML-RPC marshalling and unmarshalling code - -## -# XML-RPC marshaller. -# -# @param encoding Default encoding for 8-bit strings. The default -# value is None (interpreted as UTF-8). -# @see dumps - -class Marshaller(object): - """Generate an XML-RPC params chunk from a Python data structure. - - Create a Marshaller instance for each set of parameters, and use - the "dumps" method to convert your data (represented as a tuple) - to an XML-RPC params chunk. To write a fault response, pass a - Fault instance instead. You may prefer to use the "dumps" module - function for this purpose. - """ - - # by the way, if you don't understand what's going on in here, - # that's perfectly ok. - - def __init__(self, encoding=None, allow_none=False): - self.memo = {} - self.data = None - self.encoding = encoding - self.allow_none = allow_none - - dispatch = {} - - def dumps(self, values): - out = [] - write = out.append - dump = self.__dump - if isinstance(values, Fault): - # fault instance - write("\n") - dump({'faultCode': values.faultCode, - 'faultString': values.faultString}, - write) - write("\n") - else: - # parameter block - # FIXME: the xml-rpc specification allows us to leave out - # the entire block if there are no parameters. - # however, changing this may break older code (including - # old versions of xmlrpclib.py), so this is better left as - # is for now. See @XMLRPC3 for more information. /F - write("\n") - for v in values: - write("\n") - dump(v, write) - write("\n") - write("\n") - result = "".join(out) - return str(result) - - def __dump(self, value, write): - try: - f = self.dispatch[type(ensure_new_type(value))] - except KeyError: - # check if this object can be marshalled as a structure - if not hasattr(value, '__dict__'): - raise TypeError("cannot marshal %s objects" % type(value)) - # check if this class is a sub-class of a basic type, - # because we don't know how to marshal these types - # (e.g. a string sub-class) - for type_ in type(value).__mro__: - if type_ in self.dispatch.keys(): - raise TypeError("cannot marshal %s objects" % type(value)) - # XXX(twouters): using "_arbitrary_instance" as key as a quick-fix - # for the p3yk merge, this should probably be fixed more neatly. - f = self.dispatch["_arbitrary_instance"] - f(self, value, write) - - def dump_nil (self, value, write): - if not self.allow_none: - raise TypeError("cannot marshal None unless allow_none is enabled") - write("") - dispatch[type(None)] = dump_nil - - def dump_bool(self, value, write): - write("") - write(value and "1" or "0") - write("\n") - dispatch[bool] = dump_bool - - def dump_long(self, value, write): - if value > MAXINT or value < MININT: - raise OverflowError("long int exceeds XML-RPC limits") - write("") - write(str(int(value))) - write("\n") - dispatch[int] = dump_long - - # backward compatible - dump_int = dump_long - - def dump_double(self, value, write): - write("") - write(repr(ensure_new_type(value))) - write("\n") - dispatch[float] = dump_double - - def dump_unicode(self, value, write, escape=escape): - write("") - write(escape(value)) - write("\n") - dispatch[str] = dump_unicode - - def dump_bytes(self, value, write): - write("\n") - encoded = base64.encodebytes(value) - write(encoded.decode('ascii')) - write("\n") - dispatch[bytes] = dump_bytes - dispatch[bytearray] = dump_bytes - - def dump_array(self, value, write): - i = id(value) - if i in self.memo: - raise TypeError("cannot marshal recursive sequences") - self.memo[i] = None - dump = self.__dump - write("\n") - for v in value: - dump(v, write) - write("\n") - del self.memo[i] - dispatch[tuple] = dump_array - dispatch[list] = dump_array - - def dump_struct(self, value, write, escape=escape): - i = id(value) - if i in self.memo: - raise TypeError("cannot marshal recursive dictionaries") - self.memo[i] = None - dump = self.__dump - write("\n") - for k, v in value.items(): - write("\n") - if not isinstance(k, str): - raise TypeError("dictionary key must be string") - write("%s\n" % escape(k)) - dump(v, write) - write("\n") - write("\n") - del self.memo[i] - dispatch[dict] = dump_struct - - def dump_datetime(self, value, write): - write("") - write(_strftime(value)) - write("\n") - dispatch[datetime] = dump_datetime - - def dump_instance(self, value, write): - # check for special wrappers - if value.__class__ in WRAPPERS: - self.write = write - value.encode(self) - del self.write - else: - # store instance attributes as a struct (really?) - self.dump_struct(value.__dict__, write) - dispatch[DateTime] = dump_instance - dispatch[Binary] = dump_instance - # XXX(twouters): using "_arbitrary_instance" as key as a quick-fix - # for the p3yk merge, this should probably be fixed more neatly. - dispatch["_arbitrary_instance"] = dump_instance - -## -# XML-RPC unmarshaller. -# -# @see loads - -class Unmarshaller(object): - """Unmarshal an XML-RPC response, based on incoming XML event - messages (start, data, end). Call close() to get the resulting - data structure. - - Note that this reader is fairly tolerant, and gladly accepts bogus - XML-RPC data without complaining (but not bogus XML). - """ - - # and again, if you don't understand what's going on in here, - # that's perfectly ok. - - def __init__(self, use_datetime=False, use_builtin_types=False): - self._type = None - self._stack = [] - self._marks = [] - self._data = [] - self._methodname = None - self._encoding = "utf-8" - self.append = self._stack.append - self._use_datetime = use_builtin_types or use_datetime - self._use_bytes = use_builtin_types - - def close(self): - # return response tuple and target method - if self._type is None or self._marks: - raise ResponseError() - if self._type == "fault": - raise Fault(**self._stack[0]) - return tuple(self._stack) - - def getmethodname(self): - return self._methodname - - # - # event handlers - - def xml(self, encoding, standalone): - self._encoding = encoding - # FIXME: assert standalone == 1 ??? - - def start(self, tag, attrs): - # prepare to handle this element - if tag == "array" or tag == "struct": - self._marks.append(len(self._stack)) - self._data = [] - self._value = (tag == "value") - - def data(self, text): - self._data.append(text) - - def end(self, tag): - # call the appropriate end tag handler - try: - f = self.dispatch[tag] - except KeyError: - pass # unknown tag ? - else: - return f(self, "".join(self._data)) - - # - # accelerator support - - def end_dispatch(self, tag, data): - # dispatch data - try: - f = self.dispatch[tag] - except KeyError: - pass # unknown tag ? - else: - return f(self, data) - - # - # element decoders - - dispatch = {} - - def end_nil (self, data): - self.append(None) - self._value = 0 - dispatch["nil"] = end_nil - - def end_boolean(self, data): - if data == "0": - self.append(False) - elif data == "1": - self.append(True) - else: - raise TypeError("bad boolean value") - self._value = 0 - dispatch["boolean"] = end_boolean - - def end_int(self, data): - self.append(int(data)) - self._value = 0 - dispatch["i4"] = end_int - dispatch["i8"] = end_int - dispatch["int"] = end_int - - def end_double(self, data): - self.append(float(data)) - self._value = 0 - dispatch["double"] = end_double - - def end_string(self, data): - if self._encoding: - data = data.decode(self._encoding) - self.append(data) - self._value = 0 - dispatch["string"] = end_string - dispatch["name"] = end_string # struct keys are always strings - - def end_array(self, data): - mark = self._marks.pop() - # map arrays to Python lists - self._stack[mark:] = [self._stack[mark:]] - self._value = 0 - dispatch["array"] = end_array - - def end_struct(self, data): - mark = self._marks.pop() - # map structs to Python dictionaries - dict = {} - items = self._stack[mark:] - for i in range(0, len(items), 2): - dict[items[i]] = items[i+1] - self._stack[mark:] = [dict] - self._value = 0 - dispatch["struct"] = end_struct - - def end_base64(self, data): - value = Binary() - value.decode(data.encode("ascii")) - if self._use_bytes: - value = value.data - self.append(value) - self._value = 0 - dispatch["base64"] = end_base64 - - def end_dateTime(self, data): - value = DateTime() - value.decode(data) - if self._use_datetime: - value = _datetime_type(data) - self.append(value) - dispatch["dateTime.iso8601"] = end_dateTime - - def end_value(self, data): - # if we stumble upon a value element with no internal - # elements, treat it as a string element - if self._value: - self.end_string(data) - dispatch["value"] = end_value - - def end_params(self, data): - self._type = "params" - dispatch["params"] = end_params - - def end_fault(self, data): - self._type = "fault" - dispatch["fault"] = end_fault - - def end_methodName(self, data): - if self._encoding: - data = data.decode(self._encoding) - self._methodname = data - self._type = "methodName" # no params - dispatch["methodName"] = end_methodName - -## Multicall support -# - -class _MultiCallMethod(object): - # some lesser magic to store calls made to a MultiCall object - # for batch execution - def __init__(self, call_list, name): - self.__call_list = call_list - self.__name = name - def __getattr__(self, name): - return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name)) - def __call__(self, *args): - self.__call_list.append((self.__name, args)) - -class MultiCallIterator(object): - """Iterates over the results of a multicall. Exceptions are - raised in response to xmlrpc faults.""" - - def __init__(self, results): - self.results = results - - def __getitem__(self, i): - item = self.results[i] - if isinstance(type(item), dict): - raise Fault(item['faultCode'], item['faultString']) - elif type(item) == type([]): - return item[0] - else: - raise ValueError("unexpected type in multicall result") - -class MultiCall(object): - """server -> a object used to boxcar method calls - - server should be a ServerProxy object. - - Methods can be added to the MultiCall using normal - method call syntax e.g.: - - multicall = MultiCall(server_proxy) - multicall.add(2,3) - multicall.get_address("Guido") - - To execute the multicall, call the MultiCall object e.g.: - - add_result, address = multicall() - """ - - def __init__(self, server): - self.__server = server - self.__call_list = [] - - def __repr__(self): - return "" % id(self) - - __str__ = __repr__ - - def __getattr__(self, name): - return _MultiCallMethod(self.__call_list, name) - - def __call__(self): - marshalled_list = [] - for name, args in self.__call_list: - marshalled_list.append({'methodName' : name, 'params' : args}) - - return MultiCallIterator(self.__server.system.multicall(marshalled_list)) - -# -------------------------------------------------------------------- -# convenience functions - -FastMarshaller = FastParser = FastUnmarshaller = None - -## -# Create a parser object, and connect it to an unmarshalling instance. -# This function picks the fastest available XML parser. -# -# return A (parser, unmarshaller) tuple. - -def getparser(use_datetime=False, use_builtin_types=False): - """getparser() -> parser, unmarshaller - - Create an instance of the fastest available parser, and attach it - to an unmarshalling object. Return both objects. - """ - if FastParser and FastUnmarshaller: - if use_builtin_types: - mkdatetime = _datetime_type - mkbytes = base64.decodebytes - elif use_datetime: - mkdatetime = _datetime_type - mkbytes = _binary - else: - mkdatetime = _datetime - mkbytes = _binary - target = FastUnmarshaller(True, False, mkbytes, mkdatetime, Fault) - parser = FastParser(target) - else: - target = Unmarshaller(use_datetime=use_datetime, use_builtin_types=use_builtin_types) - if FastParser: - parser = FastParser(target) - else: - parser = ExpatParser(target) - return parser, target - -## -# Convert a Python tuple or a Fault instance to an XML-RPC packet. -# -# @def dumps(params, **options) -# @param params A tuple or Fault instance. -# @keyparam methodname If given, create a methodCall request for -# this method name. -# @keyparam methodresponse If given, create a methodResponse packet. -# If used with a tuple, the tuple must be a singleton (that is, -# it must contain exactly one element). -# @keyparam encoding The packet encoding. -# @return A string containing marshalled data. - -def dumps(params, methodname=None, methodresponse=None, encoding=None, - allow_none=False): - """data [,options] -> marshalled data - - Convert an argument tuple or a Fault instance to an XML-RPC - request (or response, if the methodresponse option is used). - - In addition to the data object, the following options can be given - as keyword arguments: - - methodname: the method name for a methodCall packet - - methodresponse: true to create a methodResponse packet. - If this option is used with a tuple, the tuple must be - a singleton (i.e. it can contain only one element). - - encoding: the packet encoding (default is UTF-8) - - All byte strings in the data structure are assumed to use the - packet encoding. Unicode strings are automatically converted, - where necessary. - """ - - assert isinstance(params, (tuple, Fault)), "argument must be tuple or Fault instance" - if isinstance(params, Fault): - methodresponse = 1 - elif methodresponse and isinstance(params, tuple): - assert len(params) == 1, "response tuple must be a singleton" - - if not encoding: - encoding = "utf-8" - - if FastMarshaller: - m = FastMarshaller(encoding) - else: - m = Marshaller(encoding, allow_none) - - data = m.dumps(params) - - if encoding != "utf-8": - xmlheader = "\n" % str(encoding) - else: - xmlheader = "\n" # utf-8 is default - - # standard XML-RPC wrappings - if methodname: - # a method call - if not isinstance(methodname, str): - methodname = methodname.encode(encoding) - data = ( - xmlheader, - "\n" - "", methodname, "\n", - data, - "\n" - ) - elif methodresponse: - # a method response, or a fault structure - data = ( - xmlheader, - "\n", - data, - "\n" - ) - else: - return data # return as is - return str("").join(data) - -## -# Convert an XML-RPC packet to a Python object. If the XML-RPC packet -# represents a fault condition, this function raises a Fault exception. -# -# @param data An XML-RPC packet, given as an 8-bit string. -# @return A tuple containing the unpacked data, and the method name -# (None if not present). -# @see Fault - -def loads(data, use_datetime=False, use_builtin_types=False): - """data -> unmarshalled data, method name - - Convert an XML-RPC packet to unmarshalled data plus a method - name (None if not present). - - If the XML-RPC packet represents a fault condition, this function - raises a Fault exception. - """ - p, u = getparser(use_datetime=use_datetime, use_builtin_types=use_builtin_types) - p.feed(data) - p.close() - return u.close(), u.getmethodname() - -## -# Encode a string using the gzip content encoding such as specified by the -# Content-Encoding: gzip -# in the HTTP header, as described in RFC 1952 -# -# @param data the unencoded data -# @return the encoded data - -def gzip_encode(data): - """data -> gzip encoded data - - Encode data using the gzip content encoding as described in RFC 1952 - """ - if not gzip: - raise NotImplementedError - f = BytesIO() - gzf = gzip.GzipFile(mode="wb", fileobj=f, compresslevel=1) - gzf.write(data) - gzf.close() - encoded = f.getvalue() - f.close() - return encoded - -## -# Decode a string using the gzip content encoding such as specified by the -# Content-Encoding: gzip -# in the HTTP header, as described in RFC 1952 -# -# @param data The encoded data -# @return the unencoded data -# @raises ValueError if data is not correctly coded. - -def gzip_decode(data): - """gzip encoded data -> unencoded data - - Decode data using the gzip content encoding as described in RFC 1952 - """ - if not gzip: - raise NotImplementedError - f = BytesIO(data) - gzf = gzip.GzipFile(mode="rb", fileobj=f) - try: - decoded = gzf.read() - except IOError: - raise ValueError("invalid data") - f.close() - gzf.close() - return decoded - -## -# Return a decoded file-like object for the gzip encoding -# as described in RFC 1952. -# -# @param response A stream supporting a read() method -# @return a file-like object that the decoded data can be read() from - -class GzipDecodedResponse(gzip.GzipFile if gzip else object): - """a file-like object to decode a response encoded with the gzip - method, as described in RFC 1952. - """ - def __init__(self, response): - #response doesn't support tell() and read(), required by - #GzipFile - if not gzip: - raise NotImplementedError - self.io = BytesIO(response.read()) - gzip.GzipFile.__init__(self, mode="rb", fileobj=self.io) - - def close(self): - gzip.GzipFile.close(self) - self.io.close() - - -# -------------------------------------------------------------------- -# request dispatcher - -class _Method(object): - # some magic to bind an XML-RPC method to an RPC server. - # supports "nested" methods (e.g. examples.getStateName) - def __init__(self, send, name): - self.__send = send - self.__name = name - def __getattr__(self, name): - return _Method(self.__send, "%s.%s" % (self.__name, name)) - def __call__(self, *args): - return self.__send(self.__name, args) - -## -# Standard transport class for XML-RPC over HTTP. -#

-# You can create custom transports by subclassing this method, and -# overriding selected methods. - -class Transport(object): - """Handles an HTTP transaction to an XML-RPC server.""" - - # client identifier (may be overridden) - user_agent = "Python-xmlrpc/%s" % __version__ - - #if true, we'll request gzip encoding - accept_gzip_encoding = True - - # if positive, encode request using gzip if it exceeds this threshold - # note that many server will get confused, so only use it if you know - # that they can decode such a request - encode_threshold = None #None = don't encode - - def __init__(self, use_datetime=False, use_builtin_types=False): - self._use_datetime = use_datetime - self._use_builtin_types = use_builtin_types - self._connection = (None, None) - self._extra_headers = [] - - ## - # Send a complete request, and parse the response. - # Retry request if a cached connection has disconnected. - # - # @param host Target host. - # @param handler Target PRC handler. - # @param request_body XML-RPC request body. - # @param verbose Debugging flag. - # @return Parsed response. - - def request(self, host, handler, request_body, verbose=False): - #retry request once if cached connection has gone cold - for i in (0, 1): - try: - return self.single_request(host, handler, request_body, verbose) - except socket.error as e: - if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE): - raise - except http_client.BadStatusLine: #close after we sent request - if i: - raise - - def single_request(self, host, handler, request_body, verbose=False): - # issue XML-RPC request - try: - http_conn = self.send_request(host, handler, request_body, verbose) - resp = http_conn.getresponse() - if resp.status == 200: - self.verbose = verbose - return self.parse_response(resp) - - except Fault: - raise - except Exception: - #All unexpected errors leave connection in - # a strange state, so we clear it. - self.close() - raise - - #We got an error response. - #Discard any response data and raise exception - if resp.getheader("content-length", ""): - resp.read() - raise ProtocolError( - host + handler, - resp.status, resp.reason, - dict(resp.getheaders()) - ) - - - ## - # Create parser. - # - # @return A 2-tuple containing a parser and a unmarshaller. - - def getparser(self): - # get parser and unmarshaller - return getparser(use_datetime=self._use_datetime, - use_builtin_types=self._use_builtin_types) - - ## - # Get authorization info from host parameter - # Host may be a string, or a (host, x509-dict) tuple; if a string, - # it is checked for a "user:pw@host" format, and a "Basic - # Authentication" header is added if appropriate. - # - # @param host Host descriptor (URL or (URL, x509 info) tuple). - # @return A 3-tuple containing (actual host, extra headers, - # x509 info). The header and x509 fields may be None. - - def get_host_info(self, host): - - x509 = {} - if isinstance(host, tuple): - host, x509 = host - - auth, host = urllib_parse.splituser(host) - - if auth: - auth = urllib_parse.unquote_to_bytes(auth) - auth = base64.encodebytes(auth).decode("utf-8") - auth = "".join(auth.split()) # get rid of whitespace - extra_headers = [ - ("Authorization", "Basic " + auth) - ] - else: - extra_headers = [] - - return host, extra_headers, x509 - - ## - # Connect to server. - # - # @param host Target host. - # @return An HTTPConnection object - - def make_connection(self, host): - #return an existing connection if possible. This allows - #HTTP/1.1 keep-alive. - if self._connection and host == self._connection[0]: - return self._connection[1] - # create a HTTP connection object from a host descriptor - chost, self._extra_headers, x509 = self.get_host_info(host) - self._connection = host, http_client.HTTPConnection(chost) - return self._connection[1] - - ## - # Clear any cached connection object. - # Used in the event of socket errors. - # - def close(self): - if self._connection[1]: - self._connection[1].close() - self._connection = (None, None) - - ## - # Send HTTP request. - # - # @param host Host descriptor (URL or (URL, x509 info) tuple). - # @param handler Targer RPC handler (a path relative to host) - # @param request_body The XML-RPC request body - # @param debug Enable debugging if debug is true. - # @return An HTTPConnection. - - def send_request(self, host, handler, request_body, debug): - connection = self.make_connection(host) - headers = self._extra_headers[:] - if debug: - connection.set_debuglevel(1) - if self.accept_gzip_encoding and gzip: - connection.putrequest("POST", handler, skip_accept_encoding=True) - headers.append(("Accept-Encoding", "gzip")) - else: - connection.putrequest("POST", handler) - headers.append(("Content-Type", "text/xml")) - headers.append(("User-Agent", self.user_agent)) - self.send_headers(connection, headers) - self.send_content(connection, request_body) - return connection - - ## - # Send request headers. - # This function provides a useful hook for subclassing - # - # @param connection httpConnection. - # @param headers list of key,value pairs for HTTP headers - - def send_headers(self, connection, headers): - for key, val in headers: - connection.putheader(key, val) - - ## - # Send request body. - # This function provides a useful hook for subclassing - # - # @param connection httpConnection. - # @param request_body XML-RPC request body. - - def send_content(self, connection, request_body): - #optionally encode the request - if (self.encode_threshold is not None and - self.encode_threshold < len(request_body) and - gzip): - connection.putheader("Content-Encoding", "gzip") - request_body = gzip_encode(request_body) - - connection.putheader("Content-Length", str(len(request_body))) - connection.endheaders(request_body) - - ## - # Parse response. - # - # @param file Stream. - # @return Response tuple and target method. - - def parse_response(self, response): - # read response data from httpresponse, and parse it - # Check for new http response object, otherwise it is a file object. - if hasattr(response, 'getheader'): - if response.getheader("Content-Encoding", "") == "gzip": - stream = GzipDecodedResponse(response) - else: - stream = response - else: - stream = response - - p, u = self.getparser() - - while 1: - data = stream.read(1024) - if not data: - break - if self.verbose: - print("body:", repr(data)) - p.feed(data) - - if stream is not response: - stream.close() - p.close() - - return u.close() - -## -# Standard transport class for XML-RPC over HTTPS. - -class SafeTransport(Transport): - """Handles an HTTPS transaction to an XML-RPC server.""" - - # FIXME: mostly untested - - def make_connection(self, host): - if self._connection and host == self._connection[0]: - return self._connection[1] - - if not hasattr(http_client, "HTTPSConnection"): - raise NotImplementedError( - "your version of http.client doesn't support HTTPS") - # create a HTTPS connection object from a host descriptor - # host may be a string, or a (host, x509-dict) tuple - chost, self._extra_headers, x509 = self.get_host_info(host) - self._connection = host, http_client.HTTPSConnection(chost, - None, **(x509 or {})) - return self._connection[1] - -## -# Standard server proxy. This class establishes a virtual connection -# to an XML-RPC server. -#

-# This class is available as ServerProxy and Server. New code should -# use ServerProxy, to avoid confusion. -# -# @def ServerProxy(uri, **options) -# @param uri The connection point on the server. -# @keyparam transport A transport factory, compatible with the -# standard transport class. -# @keyparam encoding The default encoding used for 8-bit strings -# (default is UTF-8). -# @keyparam verbose Use a true value to enable debugging output. -# (printed to standard output). -# @see Transport - -class ServerProxy(object): - """uri [,options] -> a logical connection to an XML-RPC server - - uri is the connection point on the server, given as - scheme://host/target. - - The standard implementation always supports the "http" scheme. If - SSL socket support is available (Python 2.0), it also supports - "https". - - If the target part and the slash preceding it are both omitted, - "/RPC2" is assumed. - - The following options can be given as keyword arguments: - - transport: a transport factory - encoding: the request encoding (default is UTF-8) - - All 8-bit strings passed to the server proxy are assumed to use - the given encoding. - """ - - def __init__(self, uri, transport=None, encoding=None, verbose=False, - allow_none=False, use_datetime=False, use_builtin_types=False): - # establish a "logical" server connection - - # get the url - type, uri = urllib_parse.splittype(uri) - if type not in ("http", "https"): - raise IOError("unsupported XML-RPC protocol") - self.__host, self.__handler = urllib_parse.splithost(uri) - if not self.__handler: - self.__handler = "/RPC2" - - if transport is None: - if type == "https": - handler = SafeTransport - else: - handler = Transport - transport = handler(use_datetime=use_datetime, - use_builtin_types=use_builtin_types) - self.__transport = transport - - self.__encoding = encoding or 'utf-8' - self.__verbose = verbose - self.__allow_none = allow_none - - def __close(self): - self.__transport.close() - - def __request(self, methodname, params): - # call a method on the remote server - - request = dumps(params, methodname, encoding=self.__encoding, - allow_none=self.__allow_none).encode(self.__encoding) - - response = self.__transport.request( - self.__host, - self.__handler, - request, - verbose=self.__verbose - ) - - if len(response) == 1: - response = response[0] - - return response - - def __repr__(self): - return ( - "" % - (self.__host, self.__handler) - ) - - __str__ = __repr__ - - def __getattr__(self, name): - # magic method dispatcher - return _Method(self.__request, name) - - # note: to call a remote object with an non-standard name, use - # result getattr(server, "strange-python-name")(args) - - def __call__(self, attr): - """A workaround to get special attributes on the ServerProxy - without interfering with the magic __getattr__ - """ - if attr == "close": - return self.__close - elif attr == "transport": - return self.__transport - raise AttributeError("Attribute %r not found" % (attr,)) - -# compatibility - -Server = ServerProxy - -# -------------------------------------------------------------------- -# test code - -if __name__ == "__main__": - - # simple test program (from the XML-RPC specification) - - # local server, available from Lib/xmlrpc/server.py - server = ServerProxy("http://localhost:8000") - - try: - print(server.currentTime.getCurrentTime()) - except Error as v: - print("ERROR", v) - - multi = MultiCall(server) - multi.getData() - multi.pow(2,9) - multi.add(1,2) - try: - for response in multi(): - print(response) - except Error as v: - print("ERROR", v) diff --git a/pype/vendor/future/backports/xmlrpc/server.py b/pype/vendor/future/backports/xmlrpc/server.py deleted file mode 100644 index 28072bfecd..0000000000 --- a/pype/vendor/future/backports/xmlrpc/server.py +++ /dev/null @@ -1,999 +0,0 @@ -r""" -Ported using Python-Future from the Python 3.3 standard library. - -XML-RPC Servers. - -This module can be used to create simple XML-RPC servers -by creating a server and either installing functions, a -class instance, or by extending the SimpleXMLRPCServer -class. - -It can also be used to handle XML-RPC requests in a CGI -environment using CGIXMLRPCRequestHandler. - -The Doc* classes can be used to create XML-RPC servers that -serve pydoc-style documentation in response to HTTP -GET requests. This documentation is dynamically generated -based on the functions and methods registered with the -server. - -A list of possible usage patterns follows: - -1. Install functions: - -server = SimpleXMLRPCServer(("localhost", 8000)) -server.register_function(pow) -server.register_function(lambda x,y: x+y, 'add') -server.serve_forever() - -2. Install an instance: - -class MyFuncs: - def __init__(self): - # make all of the sys functions available through sys.func_name - import sys - self.sys = sys - def _listMethods(self): - # implement this method so that system.listMethods - # knows to advertise the sys methods - return list_public_methods(self) + \ - ['sys.' + method for method in list_public_methods(self.sys)] - def pow(self, x, y): return pow(x, y) - def add(self, x, y) : return x + y - -server = SimpleXMLRPCServer(("localhost", 8000)) -server.register_introspection_functions() -server.register_instance(MyFuncs()) -server.serve_forever() - -3. Install an instance with custom dispatch method: - -class Math: - def _listMethods(self): - # this method must be present for system.listMethods - # to work - return ['add', 'pow'] - def _methodHelp(self, method): - # this method must be present for system.methodHelp - # to work - if method == 'add': - return "add(2,3) => 5" - elif method == 'pow': - return "pow(x, y[, z]) => number" - else: - # By convention, return empty - # string if no help is available - return "" - def _dispatch(self, method, params): - if method == 'pow': - return pow(*params) - elif method == 'add': - return params[0] + params[1] - else: - raise ValueError('bad method') - -server = SimpleXMLRPCServer(("localhost", 8000)) -server.register_introspection_functions() -server.register_instance(Math()) -server.serve_forever() - -4. Subclass SimpleXMLRPCServer: - -class MathServer(SimpleXMLRPCServer): - def _dispatch(self, method, params): - try: - # We are forcing the 'export_' prefix on methods that are - # callable through XML-RPC to prevent potential security - # problems - func = getattr(self, 'export_' + method) - except AttributeError: - raise Exception('method "%s" is not supported' % method) - else: - return func(*params) - - def export_add(self, x, y): - return x + y - -server = MathServer(("localhost", 8000)) -server.serve_forever() - -5. CGI script: - -server = CGIXMLRPCRequestHandler() -server.register_function(pow) -server.handle_request() -""" - -from __future__ import absolute_import, division, print_function, unicode_literals -from future.builtins import int, str - -# Written by Brian Quinlan (brian@sweetapp.com). -# Based on code written by Fredrik Lundh. - -from future.backports.xmlrpc.client import Fault, dumps, loads, gzip_encode, gzip_decode -from future.backports.http.server import BaseHTTPRequestHandler -import future.backports.http.server as http_server -from future.backports import socketserver -import sys -import os -import re -import pydoc -import inspect -import traceback -try: - import fcntl -except ImportError: - fcntl = None - -def resolve_dotted_attribute(obj, attr, allow_dotted_names=True): - """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d - - Resolves a dotted attribute name to an object. Raises - an AttributeError if any attribute in the chain starts with a '_'. - - If the optional allow_dotted_names argument is false, dots are not - supported and this function operates similar to getattr(obj, attr). - """ - - if allow_dotted_names: - attrs = attr.split('.') - else: - attrs = [attr] - - for i in attrs: - if i.startswith('_'): - raise AttributeError( - 'attempt to access private attribute "%s"' % i - ) - else: - obj = getattr(obj,i) - return obj - -def list_public_methods(obj): - """Returns a list of attribute strings, found in the specified - object, which represent callable attributes""" - - return [member for member in dir(obj) - if not member.startswith('_') and - callable(getattr(obj, member))] - -class SimpleXMLRPCDispatcher(object): - """Mix-in class that dispatches XML-RPC requests. - - This class is used to register XML-RPC method handlers - and then to dispatch them. This class doesn't need to be - instanced directly when used by SimpleXMLRPCServer but it - can be instanced when used by the MultiPathXMLRPCServer - """ - - def __init__(self, allow_none=False, encoding=None, - use_builtin_types=False): - self.funcs = {} - self.instance = None - self.allow_none = allow_none - self.encoding = encoding or 'utf-8' - self.use_builtin_types = use_builtin_types - - def register_instance(self, instance, allow_dotted_names=False): - """Registers an instance to respond to XML-RPC requests. - - Only one instance can be installed at a time. - - If the registered instance has a _dispatch method then that - method will be called with the name of the XML-RPC method and - its parameters as a tuple - e.g. instance._dispatch('add',(2,3)) - - If the registered instance does not have a _dispatch method - then the instance will be searched to find a matching method - and, if found, will be called. Methods beginning with an '_' - are considered private and will not be called by - SimpleXMLRPCServer. - - If a registered function matches a XML-RPC request, then it - will be called instead of the registered instance. - - If the optional allow_dotted_names argument is true and the - instance does not have a _dispatch method, method names - containing dots are supported and resolved, as long as none of - the name segments start with an '_'. - - *** SECURITY WARNING: *** - - Enabling the allow_dotted_names options allows intruders - to access your module's global variables and may allow - intruders to execute arbitrary code on your machine. Only - use this option on a secure, closed network. - - """ - - self.instance = instance - self.allow_dotted_names = allow_dotted_names - - def register_function(self, function, name=None): - """Registers a function to respond to XML-RPC requests. - - The optional name argument can be used to set a Unicode name - for the function. - """ - - if name is None: - name = function.__name__ - self.funcs[name] = function - - def register_introspection_functions(self): - """Registers the XML-RPC introspection methods in the system - namespace. - - see http://xmlrpc.usefulinc.com/doc/reserved.html - """ - - self.funcs.update({'system.listMethods' : self.system_listMethods, - 'system.methodSignature' : self.system_methodSignature, - 'system.methodHelp' : self.system_methodHelp}) - - def register_multicall_functions(self): - """Registers the XML-RPC multicall method in the system - namespace. - - see http://www.xmlrpc.com/discuss/msgReader$1208""" - - self.funcs.update({'system.multicall' : self.system_multicall}) - - def _marshaled_dispatch(self, data, dispatch_method = None, path = None): - """Dispatches an XML-RPC method from marshalled (XML) data. - - XML-RPC methods are dispatched from the marshalled (XML) data - using the _dispatch method and the result is returned as - marshalled data. For backwards compatibility, a dispatch - function can be provided as an argument (see comment in - SimpleXMLRPCRequestHandler.do_POST) but overriding the - existing method through subclassing is the preferred means - of changing method dispatch behavior. - """ - - try: - params, method = loads(data, use_builtin_types=self.use_builtin_types) - - # generate response - if dispatch_method is not None: - response = dispatch_method(method, params) - else: - response = self._dispatch(method, params) - # wrap response in a singleton tuple - response = (response,) - response = dumps(response, methodresponse=1, - allow_none=self.allow_none, encoding=self.encoding) - except Fault as fault: - response = dumps(fault, allow_none=self.allow_none, - encoding=self.encoding) - except: - # report exception back to server - exc_type, exc_value, exc_tb = sys.exc_info() - response = dumps( - Fault(1, "%s:%s" % (exc_type, exc_value)), - encoding=self.encoding, allow_none=self.allow_none, - ) - - return response.encode(self.encoding) - - def system_listMethods(self): - """system.listMethods() => ['add', 'subtract', 'multiple'] - - Returns a list of the methods supported by the server.""" - - methods = set(self.funcs.keys()) - if self.instance is not None: - # Instance can implement _listMethod to return a list of - # methods - if hasattr(self.instance, '_listMethods'): - methods |= set(self.instance._listMethods()) - # if the instance has a _dispatch method then we - # don't have enough information to provide a list - # of methods - elif not hasattr(self.instance, '_dispatch'): - methods |= set(list_public_methods(self.instance)) - return sorted(methods) - - def system_methodSignature(self, method_name): - """system.methodSignature('add') => [double, int, int] - - Returns a list describing the signature of the method. In the - above example, the add method takes two integers as arguments - and returns a double result. - - This server does NOT support system.methodSignature.""" - - # See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html - - return 'signatures not supported' - - def system_methodHelp(self, method_name): - """system.methodHelp('add') => "Adds two integers together" - - Returns a string containing documentation for the specified method.""" - - method = None - if method_name in self.funcs: - method = self.funcs[method_name] - elif self.instance is not None: - # Instance can implement _methodHelp to return help for a method - if hasattr(self.instance, '_methodHelp'): - return self.instance._methodHelp(method_name) - # if the instance has a _dispatch method then we - # don't have enough information to provide help - elif not hasattr(self.instance, '_dispatch'): - try: - method = resolve_dotted_attribute( - self.instance, - method_name, - self.allow_dotted_names - ) - except AttributeError: - pass - - # Note that we aren't checking that the method actually - # be a callable object of some kind - if method is None: - return "" - else: - return pydoc.getdoc(method) - - def system_multicall(self, call_list): - """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \ -[[4], ...] - - Allows the caller to package multiple XML-RPC calls into a single - request. - - See http://www.xmlrpc.com/discuss/msgReader$1208 - """ - - results = [] - for call in call_list: - method_name = call['methodName'] - params = call['params'] - - try: - # XXX A marshalling error in any response will fail the entire - # multicall. If someone cares they should fix this. - results.append([self._dispatch(method_name, params)]) - except Fault as fault: - results.append( - {'faultCode' : fault.faultCode, - 'faultString' : fault.faultString} - ) - except: - exc_type, exc_value, exc_tb = sys.exc_info() - results.append( - {'faultCode' : 1, - 'faultString' : "%s:%s" % (exc_type, exc_value)} - ) - return results - - def _dispatch(self, method, params): - """Dispatches the XML-RPC method. - - XML-RPC calls are forwarded to a registered function that - matches the called XML-RPC method name. If no such function - exists then the call is forwarded to the registered instance, - if available. - - If the registered instance has a _dispatch method then that - method will be called with the name of the XML-RPC method and - its parameters as a tuple - e.g. instance._dispatch('add',(2,3)) - - If the registered instance does not have a _dispatch method - then the instance will be searched to find a matching method - and, if found, will be called. - - Methods beginning with an '_' are considered private and will - not be called. - """ - - func = None - try: - # check to see if a matching function has been registered - func = self.funcs[method] - except KeyError: - if self.instance is not None: - # check for a _dispatch method - if hasattr(self.instance, '_dispatch'): - return self.instance._dispatch(method, params) - else: - # call instance method directly - try: - func = resolve_dotted_attribute( - self.instance, - method, - self.allow_dotted_names - ) - except AttributeError: - pass - - if func is not None: - return func(*params) - else: - raise Exception('method "%s" is not supported' % method) - -class SimpleXMLRPCRequestHandler(BaseHTTPRequestHandler): - """Simple XML-RPC request handler class. - - Handles all HTTP POST requests and attempts to decode them as - XML-RPC requests. - """ - - # Class attribute listing the accessible path components; - # paths not on this list will result in a 404 error. - rpc_paths = ('/', '/RPC2') - - #if not None, encode responses larger than this, if possible - encode_threshold = 1400 #a common MTU - - #Override form StreamRequestHandler: full buffering of output - #and no Nagle. - wbufsize = -1 - disable_nagle_algorithm = True - - # a re to match a gzip Accept-Encoding - aepattern = re.compile(r""" - \s* ([^\s;]+) \s* #content-coding - (;\s* q \s*=\s* ([0-9\.]+))? #q - """, re.VERBOSE | re.IGNORECASE) - - def accept_encodings(self): - r = {} - ae = self.headers.get("Accept-Encoding", "") - for e in ae.split(","): - match = self.aepattern.match(e) - if match: - v = match.group(3) - v = float(v) if v else 1.0 - r[match.group(1)] = v - return r - - def is_rpc_path_valid(self): - if self.rpc_paths: - return self.path in self.rpc_paths - else: - # If .rpc_paths is empty, just assume all paths are legal - return True - - def do_POST(self): - """Handles the HTTP POST request. - - Attempts to interpret all HTTP POST requests as XML-RPC calls, - which are forwarded to the server's _dispatch method for handling. - """ - - # Check that the path is legal - if not self.is_rpc_path_valid(): - self.report_404() - return - - try: - # Get arguments by reading body of request. - # We read this in chunks to avoid straining - # socket.read(); around the 10 or 15Mb mark, some platforms - # begin to have problems (bug #792570). - max_chunk_size = 10*1024*1024 - size_remaining = int(self.headers["content-length"]) - L = [] - while size_remaining: - chunk_size = min(size_remaining, max_chunk_size) - chunk = self.rfile.read(chunk_size) - if not chunk: - break - L.append(chunk) - size_remaining -= len(L[-1]) - data = b''.join(L) - - data = self.decode_request_content(data) - if data is None: - return #response has been sent - - # In previous versions of SimpleXMLRPCServer, _dispatch - # could be overridden in this class, instead of in - # SimpleXMLRPCDispatcher. To maintain backwards compatibility, - # check to see if a subclass implements _dispatch and dispatch - # using that method if present. - response = self.server._marshaled_dispatch( - data, getattr(self, '_dispatch', None), self.path - ) - except Exception as e: # This should only happen if the module is buggy - # internal error, report as HTTP server error - self.send_response(500) - - # Send information about the exception if requested - if hasattr(self.server, '_send_traceback_header') and \ - self.server._send_traceback_header: - self.send_header("X-exception", str(e)) - trace = traceback.format_exc() - trace = str(trace.encode('ASCII', 'backslashreplace'), 'ASCII') - self.send_header("X-traceback", trace) - - self.send_header("Content-length", "0") - self.end_headers() - else: - self.send_response(200) - self.send_header("Content-type", "text/xml") - if self.encode_threshold is not None: - if len(response) > self.encode_threshold: - q = self.accept_encodings().get("gzip", 0) - if q: - try: - response = gzip_encode(response) - self.send_header("Content-Encoding", "gzip") - except NotImplementedError: - pass - self.send_header("Content-length", str(len(response))) - self.end_headers() - self.wfile.write(response) - - def decode_request_content(self, data): - #support gzip encoding of request - encoding = self.headers.get("content-encoding", "identity").lower() - if encoding == "identity": - return data - if encoding == "gzip": - try: - return gzip_decode(data) - except NotImplementedError: - self.send_response(501, "encoding %r not supported" % encoding) - except ValueError: - self.send_response(400, "error decoding gzip content") - else: - self.send_response(501, "encoding %r not supported" % encoding) - self.send_header("Content-length", "0") - self.end_headers() - - def report_404 (self): - # Report a 404 error - self.send_response(404) - response = b'No such page' - self.send_header("Content-type", "text/plain") - self.send_header("Content-length", str(len(response))) - self.end_headers() - self.wfile.write(response) - - def log_request(self, code='-', size='-'): - """Selectively log an accepted request.""" - - if self.server.logRequests: - BaseHTTPRequestHandler.log_request(self, code, size) - -class SimpleXMLRPCServer(socketserver.TCPServer, - SimpleXMLRPCDispatcher): - """Simple XML-RPC server. - - Simple XML-RPC server that allows functions and a single instance - to be installed to handle requests. The default implementation - attempts to dispatch XML-RPC calls to the functions or instance - installed in the server. Override the _dispatch method inherited - from SimpleXMLRPCDispatcher to change this behavior. - """ - - allow_reuse_address = True - - # Warning: this is for debugging purposes only! Never set this to True in - # production code, as will be sending out sensitive information (exception - # and stack trace details) when exceptions are raised inside - # SimpleXMLRPCRequestHandler.do_POST - _send_traceback_header = False - - def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, - logRequests=True, allow_none=False, encoding=None, - bind_and_activate=True, use_builtin_types=False): - self.logRequests = logRequests - - SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types) - socketserver.TCPServer.__init__(self, addr, requestHandler, bind_and_activate) - - # [Bug #1222790] If possible, set close-on-exec flag; if a - # method spawns a subprocess, the subprocess shouldn't have - # the listening socket open. - if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'): - flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD) - flags |= fcntl.FD_CLOEXEC - fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags) - -class MultiPathXMLRPCServer(SimpleXMLRPCServer): - """Multipath XML-RPC Server - This specialization of SimpleXMLRPCServer allows the user to create - multiple Dispatcher instances and assign them to different - HTTP request paths. This makes it possible to run two or more - 'virtual XML-RPC servers' at the same port. - Make sure that the requestHandler accepts the paths in question. - """ - def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, - logRequests=True, allow_none=False, encoding=None, - bind_and_activate=True, use_builtin_types=False): - - SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, allow_none, - encoding, bind_and_activate, use_builtin_types) - self.dispatchers = {} - self.allow_none = allow_none - self.encoding = encoding or 'utf-8' - - def add_dispatcher(self, path, dispatcher): - self.dispatchers[path] = dispatcher - return dispatcher - - def get_dispatcher(self, path): - return self.dispatchers[path] - - def _marshaled_dispatch(self, data, dispatch_method = None, path = None): - try: - response = self.dispatchers[path]._marshaled_dispatch( - data, dispatch_method, path) - except: - # report low level exception back to server - # (each dispatcher should have handled their own - # exceptions) - exc_type, exc_value = sys.exc_info()[:2] - response = dumps( - Fault(1, "%s:%s" % (exc_type, exc_value)), - encoding=self.encoding, allow_none=self.allow_none) - response = response.encode(self.encoding) - return response - -class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): - """Simple handler for XML-RPC data passed through CGI.""" - - def __init__(self, allow_none=False, encoding=None, use_builtin_types=False): - SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types) - - def handle_xmlrpc(self, request_text): - """Handle a single XML-RPC request""" - - response = self._marshaled_dispatch(request_text) - - print('Content-Type: text/xml') - print('Content-Length: %d' % len(response)) - print() - sys.stdout.flush() - sys.stdout.buffer.write(response) - sys.stdout.buffer.flush() - - def handle_get(self): - """Handle a single HTTP GET request. - - Default implementation indicates an error because - XML-RPC uses the POST method. - """ - - code = 400 - message, explain = BaseHTTPRequestHandler.responses[code] - - response = http_server.DEFAULT_ERROR_MESSAGE % \ - { - 'code' : code, - 'message' : message, - 'explain' : explain - } - response = response.encode('utf-8') - print('Status: %d %s' % (code, message)) - print('Content-Type: %s' % http_server.DEFAULT_ERROR_CONTENT_TYPE) - print('Content-Length: %d' % len(response)) - print() - sys.stdout.flush() - sys.stdout.buffer.write(response) - sys.stdout.buffer.flush() - - def handle_request(self, request_text=None): - """Handle a single XML-RPC request passed through a CGI post method. - - If no XML data is given then it is read from stdin. The resulting - XML-RPC response is printed to stdout along with the correct HTTP - headers. - """ - - if request_text is None and \ - os.environ.get('REQUEST_METHOD', None) == 'GET': - self.handle_get() - else: - # POST data is normally available through stdin - try: - length = int(os.environ.get('CONTENT_LENGTH', None)) - except (ValueError, TypeError): - length = -1 - if request_text is None: - request_text = sys.stdin.read(length) - - self.handle_xmlrpc(request_text) - - -# ----------------------------------------------------------------------------- -# Self documenting XML-RPC Server. - -class ServerHTMLDoc(pydoc.HTMLDoc): - """Class used to generate pydoc HTML document for a server""" - - def markup(self, text, escape=None, funcs={}, classes={}, methods={}): - """Mark up some plain text, given a context of symbols to look for. - Each context dictionary maps object names to anchor names.""" - escape = escape or self.escape - results = [] - here = 0 - - # XXX Note that this regular expression does not allow for the - # hyperlinking of arbitrary strings being used as method - # names. Only methods with names consisting of word characters - # and '.'s are hyperlinked. - pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|' - r'RFC[- ]?(\d+)|' - r'PEP[- ]?(\d+)|' - r'(self\.)?((?:\w|\.)+))\b') - while 1: - match = pattern.search(text, here) - if not match: break - start, end = match.span() - results.append(escape(text[here:start])) - - all, scheme, rfc, pep, selfdot, name = match.groups() - if scheme: - url = escape(all).replace('"', '"') - results.append('%s' % (url, url)) - elif rfc: - url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc) - results.append('%s' % (url, escape(all))) - elif pep: - url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep) - results.append('%s' % (url, escape(all))) - elif text[end:end+1] == '(': - results.append(self.namelink(name, methods, funcs, classes)) - elif selfdot: - results.append('self.%s' % name) - else: - results.append(self.namelink(name, classes)) - here = end - results.append(escape(text[here:])) - return ''.join(results) - - def docroutine(self, object, name, mod=None, - funcs={}, classes={}, methods={}, cl=None): - """Produce HTML documentation for a function or method object.""" - - anchor = (cl and cl.__name__ or '') + '-' + name - note = '' - - title = '%s' % ( - self.escape(anchor), self.escape(name)) - - if inspect.ismethod(object): - args = inspect.getfullargspec(object) - # exclude the argument bound to the instance, it will be - # confusing to the non-Python user - argspec = inspect.formatargspec ( - args.args[1:], - args.varargs, - args.varkw, - args.defaults, - annotations=args.annotations, - formatvalue=self.formatvalue - ) - elif inspect.isfunction(object): - args = inspect.getfullargspec(object) - argspec = inspect.formatargspec( - args.args, args.varargs, args.varkw, args.defaults, - annotations=args.annotations, - formatvalue=self.formatvalue) - else: - argspec = '(...)' - - if isinstance(object, tuple): - argspec = object[0] or argspec - docstring = object[1] or "" - else: - docstring = pydoc.getdoc(object) - - decl = title + argspec + (note and self.grey( - '%s' % note)) - - doc = self.markup( - docstring, self.preformat, funcs, classes, methods) - doc = doc and '

%s
' % doc - return '
%s
%s
\n' % (decl, doc) - - def docserver(self, server_name, package_documentation, methods): - """Produce HTML documentation for an XML-RPC server.""" - - fdict = {} - for key, value in methods.items(): - fdict[key] = '#-' + key - fdict[value] = fdict[key] - - server_name = self.escape(server_name) - head = '%s' % server_name - result = self.heading(head, '#ffffff', '#7799ee') - - doc = self.markup(package_documentation, self.preformat, fdict) - doc = doc and '%s' % doc - result = result + '

%s

\n' % doc - - contents = [] - method_items = sorted(methods.items()) - for key, value in method_items: - contents.append(self.docroutine(value, key, funcs=fdict)) - result = result + self.bigsection( - 'Methods', '#ffffff', '#eeaa77', ''.join(contents)) - - return result - -class XMLRPCDocGenerator(object): - """Generates documentation for an XML-RPC server. - - This class is designed as mix-in and should not - be constructed directly. - """ - - def __init__(self): - # setup variables used for HTML documentation - self.server_name = 'XML-RPC Server Documentation' - self.server_documentation = \ - "This server exports the following methods through the XML-RPC "\ - "protocol." - self.server_title = 'XML-RPC Server Documentation' - - def set_server_title(self, server_title): - """Set the HTML title of the generated server documentation""" - - self.server_title = server_title - - def set_server_name(self, server_name): - """Set the name of the generated HTML server documentation""" - - self.server_name = server_name - - def set_server_documentation(self, server_documentation): - """Set the documentation string for the entire server.""" - - self.server_documentation = server_documentation - - def generate_html_documentation(self): - """generate_html_documentation() => html documentation for the server - - Generates HTML documentation for the server using introspection for - installed functions and instances that do not implement the - _dispatch method. Alternatively, instances can choose to implement - the _get_method_argstring(method_name) method to provide the - argument string used in the documentation and the - _methodHelp(method_name) method to provide the help text used - in the documentation.""" - - methods = {} - - for method_name in self.system_listMethods(): - if method_name in self.funcs: - method = self.funcs[method_name] - elif self.instance is not None: - method_info = [None, None] # argspec, documentation - if hasattr(self.instance, '_get_method_argstring'): - method_info[0] = self.instance._get_method_argstring(method_name) - if hasattr(self.instance, '_methodHelp'): - method_info[1] = self.instance._methodHelp(method_name) - - method_info = tuple(method_info) - if method_info != (None, None): - method = method_info - elif not hasattr(self.instance, '_dispatch'): - try: - method = resolve_dotted_attribute( - self.instance, - method_name - ) - except AttributeError: - method = method_info - else: - method = method_info - else: - assert 0, "Could not find method in self.functions and no "\ - "instance installed" - - methods[method_name] = method - - documenter = ServerHTMLDoc() - documentation = documenter.docserver( - self.server_name, - self.server_documentation, - methods - ) - - return documenter.page(self.server_title, documentation) - -class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): - """XML-RPC and documentation request handler class. - - Handles all HTTP POST requests and attempts to decode them as - XML-RPC requests. - - Handles all HTTP GET requests and interprets them as requests - for documentation. - """ - - def do_GET(self): - """Handles the HTTP GET request. - - Interpret all HTTP GET requests as requests for server - documentation. - """ - # Check that the path is legal - if not self.is_rpc_path_valid(): - self.report_404() - return - - response = self.server.generate_html_documentation().encode('utf-8') - self.send_response(200) - self.send_header("Content-type", "text/html") - self.send_header("Content-length", str(len(response))) - self.end_headers() - self.wfile.write(response) - -class DocXMLRPCServer( SimpleXMLRPCServer, - XMLRPCDocGenerator): - """XML-RPC and HTML documentation server. - - Adds the ability to serve server documentation to the capabilities - of SimpleXMLRPCServer. - """ - - def __init__(self, addr, requestHandler=DocXMLRPCRequestHandler, - logRequests=True, allow_none=False, encoding=None, - bind_and_activate=True, use_builtin_types=False): - SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, - allow_none, encoding, bind_and_activate, - use_builtin_types) - XMLRPCDocGenerator.__init__(self) - -class DocCGIXMLRPCRequestHandler( CGIXMLRPCRequestHandler, - XMLRPCDocGenerator): - """Handler for XML-RPC data and documentation requests passed through - CGI""" - - def handle_get(self): - """Handles the HTTP GET request. - - Interpret all HTTP GET requests as requests for server - documentation. - """ - - response = self.generate_html_documentation().encode('utf-8') - - print('Content-Type: text/html') - print('Content-Length: %d' % len(response)) - print() - sys.stdout.flush() - sys.stdout.buffer.write(response) - sys.stdout.buffer.flush() - - def __init__(self): - CGIXMLRPCRequestHandler.__init__(self) - XMLRPCDocGenerator.__init__(self) - - -if __name__ == '__main__': - import datetime - - class ExampleService: - def getData(self): - return '42' - - class currentTime: - @staticmethod - def getCurrentTime(): - return datetime.datetime.now() - - server = SimpleXMLRPCServer(("localhost", 8000)) - server.register_function(pow) - server.register_function(lambda x,y: x+y, 'add') - server.register_instance(ExampleService(), allow_dotted_names=True) - server.register_multicall_functions() - print('Serving XML-RPC on localhost port 8000') - print('It is advisable to run this example server within a secure, closed network.') - try: - server.serve_forever() - except KeyboardInterrupt: - print("\nKeyboard interrupt received, exiting.") - server.server_close() - sys.exit(0) diff --git a/pype/vendor/future/builtins/__init__.py b/pype/vendor/future/builtins/__init__.py deleted file mode 100644 index 94011f9795..0000000000 --- a/pype/vendor/future/builtins/__init__.py +++ /dev/null @@ -1,51 +0,0 @@ -""" -A module that brings in equivalents of the new and modified Python 3 -builtins into Py2. Has no effect on Py3. - -See the docs `here `_ -(``docs/what-else.rst``) for more information. - -""" - -from future.builtins.iterators import (filter, map, zip) -# The isinstance import is no longer needed. We provide it only for -# backward-compatibility with future v0.8.2. It will be removed in future v1.0. -from future.builtins.misc import (ascii, chr, hex, input, isinstance, next, - oct, open, pow, round, super) -from future.utils import PY3 - -if PY3: - import builtins - bytes = builtins.bytes - dict = builtins.dict - int = builtins.int - list = builtins.list - object = builtins.object - range = builtins.range - str = builtins.str - __all__ = [] -else: - from future.types import (newbytes as bytes, - newdict as dict, - newint as int, - newlist as list, - newobject as object, - newrange as range, - newstr as str) -from future import utils - - -if not utils.PY3: - # We only import names that shadow the builtins on Py2. No other namespace - # pollution on Py2. - - # Only shadow builtins on Py2; no new names - __all__ = ['filter', 'map', 'zip', - 'ascii', 'chr', 'hex', 'input', 'next', 'oct', 'open', 'pow', - 'round', 'super', - 'bytes', 'dict', 'int', 'list', 'object', 'range', 'str', - ] - -else: - # No namespace pollution on Py3 - __all__ = [] diff --git a/pype/vendor/future/builtins/disabled.py b/pype/vendor/future/builtins/disabled.py deleted file mode 100644 index f6d6ea9b80..0000000000 --- a/pype/vendor/future/builtins/disabled.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -This disables builtin functions (and one exception class) which are -removed from Python 3.3. - -This module is designed to be used like this:: - - from future.builtins.disabled import * - -This disables the following obsolete Py2 builtin functions:: - - apply, cmp, coerce, execfile, file, input, long, - raw_input, reduce, reload, unicode, xrange - -We don't hack __builtin__, which is very fragile because it contaminates -imported modules too. Instead, we just create new functions with -the same names as the obsolete builtins from Python 2 which raise -NameError exceptions when called. - -Note that both ``input()`` and ``raw_input()`` are among the disabled -functions (in this module). Although ``input()`` exists as a builtin in -Python 3, the Python 2 ``input()`` builtin is unsafe to use because it -can lead to shell injection. Therefore we shadow it by default upon ``from -future.builtins.disabled import *``, in case someone forgets to import our -replacement ``input()`` somehow and expects Python 3 semantics. - -See the ``future.builtins.misc`` module for a working version of -``input`` with Python 3 semantics. - -(Note that callable() is not among the functions disabled; this was -reintroduced into Python 3.2.) - -This exception class is also disabled: - - StandardError - -""" - -from __future__ import division, absolute_import, print_function - -from future import utils - - -OBSOLETE_BUILTINS = ['apply', 'chr', 'cmp', 'coerce', 'execfile', 'file', - 'input', 'long', 'raw_input', 'reduce', 'reload', - 'unicode', 'xrange', 'StandardError'] - - -def disabled_function(name): - ''' - Returns a function that cannot be called - ''' - def disabled(*args, **kwargs): - ''' - A function disabled by the ``future`` module. This function is - no longer a builtin in Python 3. - ''' - raise NameError('obsolete Python 2 builtin {0} is disabled'.format(name)) - return disabled - - -if not utils.PY3: - for fname in OBSOLETE_BUILTINS: - locals()[fname] = disabled_function(fname) - __all__ = OBSOLETE_BUILTINS -else: - __all__ = [] diff --git a/pype/vendor/future/builtins/iterators.py b/pype/vendor/future/builtins/iterators.py deleted file mode 100644 index b82f29f2a0..0000000000 --- a/pype/vendor/future/builtins/iterators.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -This module is designed to be used as follows:: - - from future.builtins.iterators import * - -And then, for example:: - - for i in range(10**15): - pass - - for (a, b) in zip(range(10**15), range(-10**15, 0)): - pass - -Note that this is standard Python 3 code, plus some imports that do -nothing on Python 3. - -The iterators this brings in are:: - -- ``range`` -- ``filter`` -- ``map`` -- ``zip`` - -On Python 2, ``range`` is a pure-Python backport of Python 3's ``range`` -iterator with slicing support. The other iterators (``filter``, ``map``, -``zip``) are from the ``itertools`` module on Python 2. On Python 3 these -are available in the module namespace but not exported for * imports via -__all__ (zero no namespace pollution). - -Note that these are also available in the standard library -``future_builtins`` module on Python 2 -- but not Python 3, so using -the standard library version is not portable, nor anywhere near complete. -""" - -from __future__ import division, absolute_import, print_function - -import itertools -from future import utils - -if not utils.PY3: - filter = itertools.ifilter - map = itertools.imap - from future.types import newrange as range - zip = itertools.izip - __all__ = ['filter', 'map', 'range', 'zip'] -else: - import builtins - filter = builtins.filter - map = builtins.map - range = builtins.range - zip = builtins.zip - __all__ = [] - diff --git a/pype/vendor/future/builtins/misc.py b/pype/vendor/future/builtins/misc.py deleted file mode 100644 index 90dc384ad5..0000000000 --- a/pype/vendor/future/builtins/misc.py +++ /dev/null @@ -1,124 +0,0 @@ -""" -A module that brings in equivalents of various modified Python 3 builtins -into Py2. Has no effect on Py3. - -The builtin functions are: - -- ``ascii`` (from Py2's future_builtins module) -- ``hex`` (from Py2's future_builtins module) -- ``oct`` (from Py2's future_builtins module) -- ``chr`` (equivalent to ``unichr`` on Py2) -- ``input`` (equivalent to ``raw_input`` on Py2) -- ``next`` (calls ``__next__`` if it exists, else ``next`` method) -- ``open`` (equivalent to io.open on Py2) -- ``super`` (backport of Py3's magic zero-argument super() function -- ``round`` (new "Banker's Rounding" behaviour from Py3) - -``isinstance`` is also currently exported for backwards compatibility -with v0.8.2, although this has been deprecated since v0.9. - - -input() -------- -Like the new ``input()`` function from Python 3 (without eval()), except -that it returns bytes. Equivalent to Python 2's ``raw_input()``. - -Warning: By default, importing this module *removes* the old Python 2 -input() function entirely from ``__builtin__`` for safety. This is -because forgetting to import the new ``input`` from ``future`` might -otherwise lead to a security vulnerability (shell injection) on Python 2. - -To restore it, you can retrieve it yourself from -``__builtin__._old_input``. - -Fortunately, ``input()`` seems to be seldom used in the wild in Python -2... - -""" - -from future import utils - - -if utils.PY2: - from io import open - from future_builtins import ascii, oct, hex - from __builtin__ import unichr as chr, pow as _builtin_pow - import __builtin__ - - # Only for backward compatibility with future v0.8.2: - isinstance = __builtin__.isinstance - - # Warning: Python 2's input() is unsafe and MUST not be able to be used - # accidentally by someone who expects Python 3 semantics but forgets - # to import it on Python 2. Versions of ``future`` prior to 0.11 - # deleted it from __builtin__. Now we keep in __builtin__ but shadow - # the name like all others. Just be sure to import ``input``. - - input = raw_input - - from future.builtins.newnext import newnext as next - from future.builtins.newround import newround as round - from future.builtins.newsuper import newsuper as super - from future.types.newint import newint - - _SENTINEL = object() - - def pow(x, y, z=_SENTINEL): - """ - pow(x, y[, z]) -> number - - With two arguments, equivalent to x**y. With three arguments, - equivalent to (x**y) % z, but may be more efficient (e.g. for ints). - """ - # Handle newints - if isinstance(x, newint): - x = long(x) - if isinstance(y, newint): - y = long(y) - if isinstance(z, newint): - z = long(z) - - try: - if z == _SENTINEL: - return _builtin_pow(x, y) - else: - return _builtin_pow(x, y, z) - except ValueError: - if z == _SENTINEL: - return _builtin_pow(x+0j, y) - else: - return _builtin_pow(x+0j, y, z) - - # ``future`` doesn't support Py3.0/3.1. If we ever did, we'd add this: - # callable = __builtin__.callable - - __all__ = ['ascii', 'chr', 'hex', 'input', 'isinstance', 'next', 'oct', - 'open', 'pow', 'round', 'super'] - -else: - import builtins - ascii = builtins.ascii - chr = builtins.chr - hex = builtins.hex - input = builtins.input - next = builtins.next - # Only for backward compatibility with future v0.8.2: - isinstance = builtins.isinstance - oct = builtins.oct - open = builtins.open - pow = builtins.pow - round = builtins.round - super = builtins.super - - __all__ = [] - - # The callable() function was removed from Py3.0 and 3.1 and - # reintroduced into Py3.2+. ``future`` doesn't support Py3.0/3.1. If we ever - # did, we'd add this: - # try: - # callable = builtins.callable - # except AttributeError: - # # Definition from Pandas - # def callable(obj): - # return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - # __all__.append('callable') diff --git a/pype/vendor/future/builtins/newnext.py b/pype/vendor/future/builtins/newnext.py deleted file mode 100644 index 9364023aa2..0000000000 --- a/pype/vendor/future/builtins/newnext.py +++ /dev/null @@ -1,71 +0,0 @@ -''' -This module provides a newnext() function in Python 2 that mimics the -behaviour of ``next()`` in Python 3, falling back to Python 2's behaviour for -compatibility if this fails. - -``newnext(iterator)`` calls the iterator's ``__next__()`` method if it exists. If this -doesn't exist, it falls back to calling a ``next()`` method. - -For example: - - >>> class Odds(object): - ... def __init__(self, start=1): - ... self.value = start - 2 - ... def __next__(self): # note the Py3 interface - ... self.value += 2 - ... return self.value - ... def __iter__(self): - ... return self - ... - >>> iterator = Odds() - >>> next(iterator) - 1 - >>> next(iterator) - 3 - -If you are defining your own custom iterator class as above, it is preferable -to explicitly decorate the class with the @implements_iterator decorator from -``future.utils`` as follows: - - >>> @implements_iterator - ... class Odds(object): - ... # etc - ... pass - -This next() function is primarily for consuming iterators defined in Python 3 -code elsewhere that we would like to run on Python 2 or 3. -''' - -_builtin_next = next - -_SENTINEL = object() - -def newnext(iterator, default=_SENTINEL): - """ - next(iterator[, default]) - - Return the next item from the iterator. If default is given and the iterator - is exhausted, it is returned instead of raising StopIteration. - """ - - # args = [] - # if default is not _SENTINEL: - # args.append(default) - try: - try: - return iterator.__next__() - except AttributeError: - try: - return iterator.next() - except AttributeError: - raise TypeError("'{0}' object is not an iterator".format( - iterator.__class__.__name__)) - except StopIteration as e: - if default is _SENTINEL: - raise e - else: - return default - - -__all__ = ['newnext'] - diff --git a/pype/vendor/future/builtins/newround.py b/pype/vendor/future/builtins/newround.py deleted file mode 100644 index f59b35b325..0000000000 --- a/pype/vendor/future/builtins/newround.py +++ /dev/null @@ -1,99 +0,0 @@ -""" -``python-future``: pure Python implementation of Python 3 round(). -""" - -from future.utils import PYPY, PY26, bind_method - -# Use the decimal module for simplicity of implementation (and -# hopefully correctness). -from decimal import Decimal, ROUND_HALF_EVEN - - -def newround(number, ndigits=None): - """ - See Python 3 documentation: uses Banker's Rounding. - - Delegates to the __round__ method if for some reason this exists. - - If not, rounds a number to a given precision in decimal digits (default - 0 digits). This returns an int when called with one argument, - otherwise the same type as the number. ndigits may be negative. - - See the test_round method in future/tests/test_builtins.py for - examples. - """ - return_int = False - if ndigits is None: - return_int = True - ndigits = 0 - if hasattr(number, '__round__'): - return number.__round__(ndigits) - - if ndigits < 0: - raise NotImplementedError('negative ndigits not supported yet') - exponent = Decimal('10') ** (-ndigits) - - if PYPY: - # Work around issue #24: round() breaks on PyPy with NumPy's types - if 'numpy' in repr(type(number)): - number = float(number) - - if not PY26: - d = Decimal.from_float(number).quantize(exponent, - rounding=ROUND_HALF_EVEN) - else: - d = from_float_26(number).quantize(exponent, rounding=ROUND_HALF_EVEN) - - if return_int: - return int(d) - else: - return float(d) - - -### From Python 2.7's decimal.py. Only needed to support Py2.6: - -def from_float_26(f): - """Converts a float to a decimal number, exactly. - - Note that Decimal.from_float(0.1) is not the same as Decimal('0.1'). - Since 0.1 is not exactly representable in binary floating point, the - value is stored as the nearest representable value which is - 0x1.999999999999ap-4. The exact equivalent of the value in decimal - is 0.1000000000000000055511151231257827021181583404541015625. - - >>> Decimal.from_float(0.1) - Decimal('0.1000000000000000055511151231257827021181583404541015625') - >>> Decimal.from_float(float('nan')) - Decimal('NaN') - >>> Decimal.from_float(float('inf')) - Decimal('Infinity') - >>> Decimal.from_float(-float('inf')) - Decimal('-Infinity') - >>> Decimal.from_float(-0.0) - Decimal('-0') - - """ - import math as _math - from decimal import _dec_from_triple # only available on Py2.6 and Py2.7 (not 3.3) - - if isinstance(f, (int, long)): # handle integer inputs - return Decimal(f) - if _math.isinf(f) or _math.isnan(f): # raises TypeError if not a float - return Decimal(repr(f)) - if _math.copysign(1.0, f) == 1.0: - sign = 0 - else: - sign = 1 - n, d = abs(f).as_integer_ratio() - # int.bit_length() method doesn't exist on Py2.6: - def bit_length(d): - if d != 0: - return len(bin(abs(d))) - 2 - else: - return 0 - k = bit_length(d) - 1 - result = _dec_from_triple(sign, str(n*5**k), -k) - return result - - -__all__ = ['newround'] diff --git a/pype/vendor/future/builtins/newsuper.py b/pype/vendor/future/builtins/newsuper.py deleted file mode 100644 index a787f4bb9c..0000000000 --- a/pype/vendor/future/builtins/newsuper.py +++ /dev/null @@ -1,115 +0,0 @@ -''' -This module provides a newsuper() function in Python 2 that mimics the -behaviour of super() in Python 3. It is designed to be used as follows: - - from __future__ import division, absolute_import, print_function - from future.builtins import super - -And then, for example: - - class VerboseList(list): - def append(self, item): - print('Adding an item') - super().append(item) # new simpler super() function - -Importing this module on Python 3 has no effect. - -This is based on (i.e. almost identical to) Ryan Kelly's magicsuper -module here: - - https://github.com/rfk/magicsuper.git - -Excerpts from Ryan's docstring: - - "Of course, you can still explicitly pass in the arguments if you want - to do something strange. Sometimes you really do want that, e.g. to - skip over some classes in the method resolution order. - - "How does it work? By inspecting the calling frame to determine the - function object being executed and the object on which it's being - called, and then walking the object's __mro__ chain to find out where - that function was defined. Yuck, but it seems to work..." -''' - -from __future__ import absolute_import -import sys -from types import FunctionType - -from future.utils import PY3, PY26 - - -_builtin_super = super - -_SENTINEL = object() - -def newsuper(typ=_SENTINEL, type_or_obj=_SENTINEL, framedepth=1): - '''Like builtin super(), but capable of magic. - - This acts just like the builtin super() function, but if called - without any arguments it attempts to infer them at runtime. - ''' - # Infer the correct call if used without arguments. - if typ is _SENTINEL: - # We'll need to do some frame hacking. - f = sys._getframe(framedepth) - - try: - # Get the function's first positional argument. - type_or_obj = f.f_locals[f.f_code.co_varnames[0]] - except (IndexError, KeyError,): - raise RuntimeError('super() used in a function with no args') - - try: - # Get the MRO so we can crawl it. - mro = type_or_obj.__mro__ - except (AttributeError, RuntimeError): # see issue #160 - try: - mro = type_or_obj.__class__.__mro__ - except AttributeError: - raise RuntimeError('super() used with a non-newstyle class') - - # A ``for...else`` block? Yes! It's odd, but useful. - # If unfamiliar with for...else, see: - # - # http://psung.blogspot.com/2007/12/for-else-in-python.html - for typ in mro: - # Find the class that owns the currently-executing method. - for meth in typ.__dict__.values(): - # Drill down through any wrappers to the underlying func. - # This handles e.g. classmethod() and staticmethod(). - try: - while not isinstance(meth,FunctionType): - if isinstance(meth, property): - # Calling __get__ on the property will invoke - # user code which might throw exceptions or have - # side effects - meth = meth.fget - else: - try: - meth = meth.__func__ - except AttributeError: - meth = meth.__get__(type_or_obj) - except (AttributeError, TypeError): - continue - if meth.func_code is f.f_code: - break # Aha! Found you. - else: - continue # Not found! Move onto the next class in MRO. - break # Found! Break out of the search loop. - else: - raise RuntimeError('super() called outside a method') - - # Dispatch to builtin super(). - if type_or_obj is not _SENTINEL: - return _builtin_super(typ, type_or_obj) - return _builtin_super(typ) - - -def superm(*args, **kwds): - f = sys._getframe(1) - nm = f.f_code.co_name - return getattr(newsuper(framedepth=2),nm)(*args, **kwds) - - -__all__ = ['newsuper'] - diff --git a/pype/vendor/future/moves/__init__.py b/pype/vendor/future/moves/__init__.py deleted file mode 100644 index 040fdcf01a..0000000000 --- a/pype/vendor/future/moves/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# future.moves package -from __future__ import absolute_import -import sys -__future_module__ = True -from future.standard_library import import_top_level_modules - -if sys.version_info[0] == 3: - import_top_level_modules() diff --git a/pype/vendor/future/moves/_dummy_thread.py b/pype/vendor/future/moves/_dummy_thread.py deleted file mode 100644 index 688d249bbe..0000000000 --- a/pype/vendor/future/moves/_dummy_thread.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from _dummy_thread import * -else: - __future_module__ = True - from dummy_thread import * diff --git a/pype/vendor/future/moves/_markupbase.py b/pype/vendor/future/moves/_markupbase.py deleted file mode 100644 index f9fb4bbf28..0000000000 --- a/pype/vendor/future/moves/_markupbase.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from _markupbase import * -else: - __future_module__ = True - from markupbase import * diff --git a/pype/vendor/future/moves/_thread.py b/pype/vendor/future/moves/_thread.py deleted file mode 100644 index c68018bb11..0000000000 --- a/pype/vendor/future/moves/_thread.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from _thread import * -else: - __future_module__ = True - from thread import * diff --git a/pype/vendor/future/moves/builtins.py b/pype/vendor/future/moves/builtins.py deleted file mode 100644 index e4b6221d59..0000000000 --- a/pype/vendor/future/moves/builtins.py +++ /dev/null @@ -1,10 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from builtins import * -else: - __future_module__ = True - from __builtin__ import * - # Overwrite any old definitions with the equivalent future.builtins ones: - from future.builtins import * diff --git a/pype/vendor/future/moves/collections.py b/pype/vendor/future/moves/collections.py deleted file mode 100644 index 664ee6a3d0..0000000000 --- a/pype/vendor/future/moves/collections.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import absolute_import -import sys - -from future.utils import PY2, PY26 -__future_module__ = True - -from collections import * - -if PY2: - from UserDict import UserDict - from UserList import UserList - from UserString import UserString - -if PY26: - from future.backports.misc import OrderedDict, Counter - -if sys.version_info < (3, 3): - from future.backports.misc import ChainMap, _count_elements diff --git a/pype/vendor/future/moves/configparser.py b/pype/vendor/future/moves/configparser.py deleted file mode 100644 index 33d9cf9533..0000000000 --- a/pype/vendor/future/moves/configparser.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY2 - -if PY2: - from ConfigParser import * -else: - from configparser import * diff --git a/pype/vendor/future/moves/copyreg.py b/pype/vendor/future/moves/copyreg.py deleted file mode 100644 index 21c7a42f21..0000000000 --- a/pype/vendor/future/moves/copyreg.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from copyreg import * -else: - __future_module__ = True - from copy_reg import * diff --git a/pype/vendor/future/moves/dbm/__init__.py b/pype/vendor/future/moves/dbm/__init__.py deleted file mode 100644 index 626b406f7f..0000000000 --- a/pype/vendor/future/moves/dbm/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from dbm import * -else: - __future_module__ = True - from whichdb import * - from anydbm import * - -# Py3.3's dbm/__init__.py imports ndbm but doesn't expose it via __all__. -# In case some (badly written) code depends on dbm.ndbm after import dbm, -# we simulate this: -if PY3: - from dbm import ndbm -else: - try: - from future.moves.dbm import ndbm - except ImportError: - ndbm = None diff --git a/pype/vendor/future/moves/dbm/dumb.py b/pype/vendor/future/moves/dbm/dumb.py deleted file mode 100644 index 528383f6d8..0000000000 --- a/pype/vendor/future/moves/dbm/dumb.py +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from dbm.dumb import * -else: - __future_module__ = True - from dumbdbm import * diff --git a/pype/vendor/future/moves/dbm/gnu.py b/pype/vendor/future/moves/dbm/gnu.py deleted file mode 100644 index 68ccf67b9a..0000000000 --- a/pype/vendor/future/moves/dbm/gnu.py +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from dbm.gnu import * -else: - __future_module__ = True - from gdbm import * diff --git a/pype/vendor/future/moves/dbm/ndbm.py b/pype/vendor/future/moves/dbm/ndbm.py deleted file mode 100644 index 8c6fff8ab7..0000000000 --- a/pype/vendor/future/moves/dbm/ndbm.py +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from dbm.ndbm import * -else: - __future_module__ = True - from dbm import * diff --git a/pype/vendor/future/moves/html/__init__.py b/pype/vendor/future/moves/html/__init__.py deleted file mode 100644 index 22ed6e7d2c..0000000000 --- a/pype/vendor/future/moves/html/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 -__future_module__ = True - -if PY3: - from html import * -else: - # cgi.escape isn't good enough for the single Py3.3 html test to pass. - # Define it inline here instead. From the Py3.4 stdlib. Note that the - # html.escape() function from the Py3.3 stdlib is not suitable for use on - # Py2.x. - """ - General functions for HTML manipulation. - """ - - def escape(s, quote=True): - """ - Replace special characters "&", "<" and ">" to HTML-safe sequences. - If the optional flag quote is true (the default), the quotation mark - characters, both double quote (") and single quote (') characters are also - translated. - """ - s = s.replace("&", "&") # Must be done first! - s = s.replace("<", "<") - s = s.replace(">", ">") - if quote: - s = s.replace('"', """) - s = s.replace('\'', "'") - return s - - __all__ = ['escape'] diff --git a/pype/vendor/future/moves/html/entities.py b/pype/vendor/future/moves/html/entities.py deleted file mode 100644 index 56a8860911..0000000000 --- a/pype/vendor/future/moves/html/entities.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from html.entities import * -else: - __future_module__ = True - from htmlentitydefs import * diff --git a/pype/vendor/future/moves/html/parser.py b/pype/vendor/future/moves/html/parser.py deleted file mode 100644 index a6115b59f0..0000000000 --- a/pype/vendor/future/moves/html/parser.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 -__future_module__ = True - -if PY3: - from html.parser import * -else: - from HTMLParser import * diff --git a/pype/vendor/future/moves/http/__init__.py b/pype/vendor/future/moves/http/__init__.py deleted file mode 100644 index 917b3d71ac..0000000000 --- a/pype/vendor/future/moves/http/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from future.utils import PY3 - -if not PY3: - __future_module__ = True diff --git a/pype/vendor/future/moves/http/client.py b/pype/vendor/future/moves/http/client.py deleted file mode 100644 index 55f9c9c1ae..0000000000 --- a/pype/vendor/future/moves/http/client.py +++ /dev/null @@ -1,8 +0,0 @@ -from future.utils import PY3 - -if PY3: - from http.client import * -else: - from httplib import * - from httplib import HTTPMessage - __future_module__ = True diff --git a/pype/vendor/future/moves/http/cookiejar.py b/pype/vendor/future/moves/http/cookiejar.py deleted file mode 100644 index ea00df7720..0000000000 --- a/pype/vendor/future/moves/http/cookiejar.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from http.cookiejar import * -else: - __future_module__ = True - from cookielib import * diff --git a/pype/vendor/future/moves/http/cookies.py b/pype/vendor/future/moves/http/cookies.py deleted file mode 100644 index 1b74fe2dd7..0000000000 --- a/pype/vendor/future/moves/http/cookies.py +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from http.cookies import * -else: - __future_module__ = True - from Cookie import * - from Cookie import Morsel # left out of __all__ on Py2.7! diff --git a/pype/vendor/future/moves/http/server.py b/pype/vendor/future/moves/http/server.py deleted file mode 100644 index 4e75cc1dec..0000000000 --- a/pype/vendor/future/moves/http/server.py +++ /dev/null @@ -1,20 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from http.server import * -else: - __future_module__ = True - from BaseHTTPServer import * - from CGIHTTPServer import * - from SimpleHTTPServer import * - try: - from CGIHTTPServer import _url_collapse_path # needed for a test - except ImportError: - try: - # Python 2.7.0 to 2.7.3 - from CGIHTTPServer import ( - _url_collapse_path_split as _url_collapse_path) - except ImportError: - # Doesn't exist on Python 2.6.x. Ignore it. - pass diff --git a/pype/vendor/future/moves/itertools.py b/pype/vendor/future/moves/itertools.py deleted file mode 100644 index e5eb20d5d5..0000000000 --- a/pype/vendor/future/moves/itertools.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import - -from itertools import * -try: - zip_longest = izip_longest - filterfalse = ifilterfalse -except NameError: - pass diff --git a/pype/vendor/future/moves/pickle.py b/pype/vendor/future/moves/pickle.py deleted file mode 100644 index c53d693925..0000000000 --- a/pype/vendor/future/moves/pickle.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from pickle import * -else: - __future_module__ = True - try: - from cPickle import * - except ImportError: - from pickle import * diff --git a/pype/vendor/future/moves/queue.py b/pype/vendor/future/moves/queue.py deleted file mode 100644 index 1cb1437d74..0000000000 --- a/pype/vendor/future/moves/queue.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from queue import * -else: - __future_module__ = True - from Queue import * diff --git a/pype/vendor/future/moves/reprlib.py b/pype/vendor/future/moves/reprlib.py deleted file mode 100644 index a313a13a49..0000000000 --- a/pype/vendor/future/moves/reprlib.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from reprlib import * -else: - __future_module__ = True - from repr import * diff --git a/pype/vendor/future/moves/socketserver.py b/pype/vendor/future/moves/socketserver.py deleted file mode 100644 index 062e0848de..0000000000 --- a/pype/vendor/future/moves/socketserver.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from socketserver import * -else: - __future_module__ = True - from SocketServer import * diff --git a/pype/vendor/future/moves/subprocess.py b/pype/vendor/future/moves/subprocess.py deleted file mode 100644 index 43ffd2ac23..0000000000 --- a/pype/vendor/future/moves/subprocess.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY2, PY26 - -from subprocess import * - -if PY2: - __future_module__ = True - from commands import getoutput, getstatusoutput - -if PY26: - from future.backports.misc import check_output diff --git a/pype/vendor/future/moves/sys.py b/pype/vendor/future/moves/sys.py deleted file mode 100644 index 1293bcb070..0000000000 --- a/pype/vendor/future/moves/sys.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY2 - -from sys import * - -if PY2: - from __builtin__ import intern diff --git a/pype/vendor/future/moves/test/__init__.py b/pype/vendor/future/moves/test/__init__.py deleted file mode 100644 index 5cf428b6ec..0000000000 --- a/pype/vendor/future/moves/test/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if not PY3: - __future_module__ = True diff --git a/pype/vendor/future/moves/test/support.py b/pype/vendor/future/moves/test/support.py deleted file mode 100644 index ab189f4060..0000000000 --- a/pype/vendor/future/moves/test/support.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import absolute_import -from future.standard_library import suspend_hooks -from future.utils import PY3 - -if PY3: - from test.support import * -else: - __future_module__ = True - with suspend_hooks(): - from test.test_support import * - diff --git a/pype/vendor/future/moves/tkinter/__init__.py b/pype/vendor/future/moves/tkinter/__init__.py deleted file mode 100644 index bc50b4c602..0000000000 --- a/pype/vendor/future/moves/tkinter/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 -__future_module__ = True - -if not PY3: - from Tkinter import * - from Tkinter import (_cnfmerge, _default_root, _flatten, _join, _setit, - _splitdict, _stringify, _support_default_root, _test, - _tkinter) -else: - from tkinter import * diff --git a/pype/vendor/future/moves/tkinter/colorchooser.py b/pype/vendor/future/moves/tkinter/colorchooser.py deleted file mode 100644 index 5e7c97f444..0000000000 --- a/pype/vendor/future/moves/tkinter/colorchooser.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.colorchooser import * -else: - try: - from tkColorChooser import * - except ImportError: - raise ImportError('The tkColorChooser module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/tkinter/commondialog.py b/pype/vendor/future/moves/tkinter/commondialog.py deleted file mode 100644 index 7747a0ba0b..0000000000 --- a/pype/vendor/future/moves/tkinter/commondialog.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.commondialog import * -else: - try: - from tkCommonDialog import * - except ImportError: - raise ImportError('The tkCommonDialog module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/tkinter/constants.py b/pype/vendor/future/moves/tkinter/constants.py deleted file mode 100644 index 99216f33d0..0000000000 --- a/pype/vendor/future/moves/tkinter/constants.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.constants import * -else: - try: - from Tkconstants import * - except ImportError: - raise ImportError('The Tkconstants module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/tkinter/dialog.py b/pype/vendor/future/moves/tkinter/dialog.py deleted file mode 100644 index a5b777815a..0000000000 --- a/pype/vendor/future/moves/tkinter/dialog.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.dialog import * -else: - try: - from Dialog import * - except ImportError: - raise ImportError('The Dialog module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/tkinter/dnd.py b/pype/vendor/future/moves/tkinter/dnd.py deleted file mode 100644 index 748b111a2b..0000000000 --- a/pype/vendor/future/moves/tkinter/dnd.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.dnd import * -else: - try: - from Tkdnd import * - except ImportError: - raise ImportError('The Tkdnd module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/tkinter/filedialog.py b/pype/vendor/future/moves/tkinter/filedialog.py deleted file mode 100644 index 35e21ac0eb..0000000000 --- a/pype/vendor/future/moves/tkinter/filedialog.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.filedialog import * -else: - try: - from FileDialog import * - except ImportError: - raise ImportError('The FileDialog module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/tkinter/font.py b/pype/vendor/future/moves/tkinter/font.py deleted file mode 100644 index 63d86dc73c..0000000000 --- a/pype/vendor/future/moves/tkinter/font.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.font import * -else: - try: - from tkFont import * - except ImportError: - raise ImportError('The tkFont module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/tkinter/messagebox.py b/pype/vendor/future/moves/tkinter/messagebox.py deleted file mode 100644 index 3ed52e1fec..0000000000 --- a/pype/vendor/future/moves/tkinter/messagebox.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.messagebox import * -else: - try: - from tkMessageBox import * - except ImportError: - raise ImportError('The tkMessageBox module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/tkinter/scrolledtext.py b/pype/vendor/future/moves/tkinter/scrolledtext.py deleted file mode 100644 index 13bd660d96..0000000000 --- a/pype/vendor/future/moves/tkinter/scrolledtext.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.scrolledtext import * -else: - try: - from ScrolledText import * - except ImportError: - raise ImportError('The ScrolledText module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/tkinter/simpledialog.py b/pype/vendor/future/moves/tkinter/simpledialog.py deleted file mode 100644 index e952fa994e..0000000000 --- a/pype/vendor/future/moves/tkinter/simpledialog.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.simpledialog import * -else: - try: - from SimpleDialog import * - except ImportError: - raise ImportError('The SimpleDialog module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/tkinter/tix.py b/pype/vendor/future/moves/tkinter/tix.py deleted file mode 100644 index 019df6f73f..0000000000 --- a/pype/vendor/future/moves/tkinter/tix.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.tix import * -else: - try: - from Tix import * - except ImportError: - raise ImportError('The Tix module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/tkinter/ttk.py b/pype/vendor/future/moves/tkinter/ttk.py deleted file mode 100644 index 22ac9774c4..0000000000 --- a/pype/vendor/future/moves/tkinter/ttk.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import absolute_import - -from future.utils import PY3 - -if PY3: - from tkinter.ttk import * -else: - try: - from ttk import * - except ImportError: - raise ImportError('The ttk module is missing. Does your Py2 ' - 'installation include tkinter?') - diff --git a/pype/vendor/future/moves/urllib/__init__.py b/pype/vendor/future/moves/urllib/__init__.py deleted file mode 100644 index 8d1298c9f7..0000000000 --- a/pype/vendor/future/moves/urllib/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if not PY3: - __future_module__ = True - diff --git a/pype/vendor/future/moves/urllib/error.py b/pype/vendor/future/moves/urllib/error.py deleted file mode 100644 index ae49255f08..0000000000 --- a/pype/vendor/future/moves/urllib/error.py +++ /dev/null @@ -1,16 +0,0 @@ -from __future__ import absolute_import -from future.standard_library import suspend_hooks - -from future.utils import PY3 - -if PY3: - from urllib.error import * -else: - __future_module__ = True - - # We use this method to get at the original Py2 urllib before any renaming magic - # ContentTooShortError = sys.py2_modules['urllib'].ContentTooShortError - - with suspend_hooks(): - from urllib import ContentTooShortError - from urllib2 import URLError, HTTPError diff --git a/pype/vendor/future/moves/urllib/parse.py b/pype/vendor/future/moves/urllib/parse.py deleted file mode 100644 index 832dfb5164..0000000000 --- a/pype/vendor/future/moves/urllib/parse.py +++ /dev/null @@ -1,28 +0,0 @@ -from __future__ import absolute_import -from future.standard_library import suspend_hooks - -from future.utils import PY3 - -if PY3: - from urllib.parse import * -else: - __future_module__ = True - from urlparse import (ParseResult, SplitResult, parse_qs, parse_qsl, - urldefrag, urljoin, urlparse, urlsplit, - urlunparse, urlunsplit) - - # we use this method to get at the original py2 urllib before any renaming - # quote = sys.py2_modules['urllib'].quote - # quote_plus = sys.py2_modules['urllib'].quote_plus - # unquote = sys.py2_modules['urllib'].unquote - # unquote_plus = sys.py2_modules['urllib'].unquote_plus - # urlencode = sys.py2_modules['urllib'].urlencode - # splitquery = sys.py2_modules['urllib'].splitquery - - with suspend_hooks(): - from urllib import (quote, - quote_plus, - unquote, - unquote_plus, - urlencode, - splitquery) diff --git a/pype/vendor/future/moves/urllib/request.py b/pype/vendor/future/moves/urllib/request.py deleted file mode 100644 index 375dc29f85..0000000000 --- a/pype/vendor/future/moves/urllib/request.py +++ /dev/null @@ -1,93 +0,0 @@ -from __future__ import absolute_import - -from future.standard_library import suspend_hooks -from future.utils import PY3 - -if PY3: - from urllib.request import * - # This aren't in __all__: - from urllib.request import (getproxies, - pathname2url, - proxy_bypass, - quote, - request_host, - splitattr, - splithost, - splitpasswd, - splitport, - splitquery, - splittag, - splittype, - splituser, - splitvalue, - thishost, - to_bytes, - unquote, - unwrap, - url2pathname, - urlcleanup, - urljoin, - urlopen, - urlparse, - urlretrieve, - urlsplit, - urlunparse) -else: - __future_module__ = True - with suspend_hooks(): - from urllib import * - from urllib2 import * - from urlparse import * - - # Rename: - from urllib import toBytes # missing from __all__ on Py2.6 - to_bytes = toBytes - - # from urllib import (pathname2url, - # url2pathname, - # getproxies, - # urlretrieve, - # urlcleanup, - # URLopener, - # FancyURLopener, - # proxy_bypass) - - # from urllib2 import ( - # AbstractBasicAuthHandler, - # AbstractDigestAuthHandler, - # BaseHandler, - # CacheFTPHandler, - # FileHandler, - # FTPHandler, - # HTTPBasicAuthHandler, - # HTTPCookieProcessor, - # HTTPDefaultErrorHandler, - # HTTPDigestAuthHandler, - # HTTPErrorProcessor, - # HTTPHandler, - # HTTPPasswordMgr, - # HTTPPasswordMgrWithDefaultRealm, - # HTTPRedirectHandler, - # HTTPSHandler, - # URLError, - # build_opener, - # install_opener, - # OpenerDirector, - # ProxyBasicAuthHandler, - # ProxyDigestAuthHandler, - # ProxyHandler, - # Request, - # UnknownHandler, - # urlopen, - # ) - - # from urlparse import ( - # urldefrag - # urljoin, - # urlparse, - # urlunparse, - # urlsplit, - # urlunsplit, - # parse_qs, - # parse_q" - # ) diff --git a/pype/vendor/future/moves/urllib/response.py b/pype/vendor/future/moves/urllib/response.py deleted file mode 100644 index 120ea13e7e..0000000000 --- a/pype/vendor/future/moves/urllib/response.py +++ /dev/null @@ -1,13 +0,0 @@ -from future import standard_library -from future.utils import PY3 - -if PY3: - from urllib.response import * -else: - __future_module__ = True - with standard_library.suspend_hooks(): - from urllib import (addbase, - addclosehook, - addinfo, - addinfourl) - diff --git a/pype/vendor/future/moves/urllib/robotparser.py b/pype/vendor/future/moves/urllib/robotparser.py deleted file mode 100644 index 0dc8f5715c..0000000000 --- a/pype/vendor/future/moves/urllib/robotparser.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from urllib.robotparser import * -else: - __future_module__ = True - from robotparser import * diff --git a/pype/vendor/future/moves/winreg.py b/pype/vendor/future/moves/winreg.py deleted file mode 100644 index c8b147568c..0000000000 --- a/pype/vendor/future/moves/winreg.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from winreg import * -else: - __future_module__ = True - from _winreg import * diff --git a/pype/vendor/future/moves/xmlrpc/__init__.py b/pype/vendor/future/moves/xmlrpc/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/future/moves/xmlrpc/client.py b/pype/vendor/future/moves/xmlrpc/client.py deleted file mode 100644 index 4708cf8992..0000000000 --- a/pype/vendor/future/moves/xmlrpc/client.py +++ /dev/null @@ -1,7 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from xmlrpc.client import * -else: - from xmlrpclib import * diff --git a/pype/vendor/future/moves/xmlrpc/server.py b/pype/vendor/future/moves/xmlrpc/server.py deleted file mode 100644 index 1a8af3454b..0000000000 --- a/pype/vendor/future/moves/xmlrpc/server.py +++ /dev/null @@ -1,7 +0,0 @@ -from __future__ import absolute_import -from future.utils import PY3 - -if PY3: - from xmlrpc.server import * -else: - from xmlrpclib import * diff --git a/pype/vendor/future/standard_library/__init__.py b/pype/vendor/future/standard_library/__init__.py deleted file mode 100644 index aca58c3e5c..0000000000 --- a/pype/vendor/future/standard_library/__init__.py +++ /dev/null @@ -1,813 +0,0 @@ -""" -Python 3 reorganized the standard library (PEP 3108). This module exposes -several standard library modules to Python 2 under their new Python 3 -names. - -It is designed to be used as follows:: - - from future import standard_library - standard_library.install_aliases() - -And then these normal Py3 imports work on both Py3 and Py2:: - - import builtins - import copyreg - import queue - import reprlib - import socketserver - import winreg # on Windows only - import test.support - import html, html.parser, html.entites - import http, http.client, http.server - import http.cookies, http.cookiejar - import urllib.parse, urllib.request, urllib.response, urllib.error, urllib.robotparser - import xmlrpc.client, xmlrpc.server - - import _thread - import _dummy_thread - import _markupbase - - from itertools import filterfalse, zip_longest - from sys import intern - from collections import UserDict, UserList, UserString - from collections import OrderedDict, Counter # even on Py2.6 - from subprocess import getoutput, getstatusoutput - from subprocess import check_output # even on Py2.6 - -(The renamed modules and functions are still available under their old -names on Python 2.) - -This is a cleaner alternative to this idiom (see -http://docs.pythonsprints.com/python3_porting/py-porting.html):: - - try: - import queue - except ImportError: - import Queue as queue - - -Limitations ------------ -We don't currently support these modules, but would like to:: - - import dbm - import dbm.dumb - import dbm.gnu - import collections.abc # on Py33 - import pickle # should (optionally) bring in cPickle on Python 2 - -""" - -from __future__ import absolute_import, division, print_function - -import sys -import logging -import imp -import contextlib -import types -import copy -import os - -# Make a dedicated logger; leave the root logger to be configured -# by the application. -flog = logging.getLogger('future_stdlib') -_formatter = logging.Formatter(logging.BASIC_FORMAT) -_handler = logging.StreamHandler() -_handler.setFormatter(_formatter) -flog.addHandler(_handler) -flog.setLevel(logging.WARN) - -from future.utils import PY2, PY3 - -# The modules that are defined under the same names on Py3 but with -# different contents in a significant way (e.g. submodules) are: -# pickle (fast one) -# dbm -# urllib -# test -# email - -REPLACED_MODULES = set(['test', 'urllib', 'pickle', 'dbm']) # add email and dbm when we support it - -# The following module names are not present in Python 2.x, so they cause no -# potential clashes between the old and new names: -# http -# html -# tkinter -# xmlrpc -# Keys: Py2 / real module names -# Values: Py3 / simulated module names -RENAMES = { - # 'cStringIO': 'io', # there's a new io module in Python 2.6 - # that provides StringIO and BytesIO - # 'StringIO': 'io', # ditto - # 'cPickle': 'pickle', - '__builtin__': 'builtins', - 'copy_reg': 'copyreg', - 'Queue': 'queue', - 'future.moves.socketserver': 'socketserver', - 'ConfigParser': 'configparser', - 'repr': 'reprlib', - # 'FileDialog': 'tkinter.filedialog', - # 'tkFileDialog': 'tkinter.filedialog', - # 'SimpleDialog': 'tkinter.simpledialog', - # 'tkSimpleDialog': 'tkinter.simpledialog', - # 'tkColorChooser': 'tkinter.colorchooser', - # 'tkCommonDialog': 'tkinter.commondialog', - # 'Dialog': 'tkinter.dialog', - # 'Tkdnd': 'tkinter.dnd', - # 'tkFont': 'tkinter.font', - # 'tkMessageBox': 'tkinter.messagebox', - # 'ScrolledText': 'tkinter.scrolledtext', - # 'Tkconstants': 'tkinter.constants', - # 'Tix': 'tkinter.tix', - # 'ttk': 'tkinter.ttk', - # 'Tkinter': 'tkinter', - '_winreg': 'winreg', - 'thread': '_thread', - 'dummy_thread': '_dummy_thread', - # 'anydbm': 'dbm', # causes infinite import loop - # 'whichdb': 'dbm', # causes infinite import loop - # anydbm and whichdb are handled by fix_imports2 - # 'dbhash': 'dbm.bsd', - # 'dumbdbm': 'dbm.dumb', - # 'dbm': 'dbm.ndbm', - # 'gdbm': 'dbm.gnu', - 'future.moves.xmlrpc': 'xmlrpc', - # 'future.backports.email': 'email', # for use by urllib - # 'DocXMLRPCServer': 'xmlrpc.server', - # 'SimpleXMLRPCServer': 'xmlrpc.server', - # 'httplib': 'http.client', - # 'htmlentitydefs' : 'html.entities', - # 'HTMLParser' : 'html.parser', - # 'Cookie': 'http.cookies', - # 'cookielib': 'http.cookiejar', - # 'BaseHTTPServer': 'http.server', - # 'SimpleHTTPServer': 'http.server', - # 'CGIHTTPServer': 'http.server', - # 'future.backports.test': 'test', # primarily for renaming test_support to support - # 'commands': 'subprocess', - # 'urlparse' : 'urllib.parse', - # 'robotparser' : 'urllib.robotparser', - # 'abc': 'collections.abc', # for Py33 - # 'future.utils.six.moves.html': 'html', - # 'future.utils.six.moves.http': 'http', - 'future.moves.html': 'html', - 'future.moves.http': 'http', - # 'future.backports.urllib': 'urllib', - # 'future.utils.six.moves.urllib': 'urllib', - 'future.moves._markupbase': '_markupbase', - } - - -# It is complicated and apparently brittle to mess around with the -# ``sys.modules`` cache in order to support "import urllib" meaning two -# different things (Py2.7 urllib and backported Py3.3-like urllib) in different -# contexts. So we require explicit imports for these modules. -assert len(set(RENAMES.values()) & set(REPLACED_MODULES)) == 0 - - -# Harmless renames that we can insert. -# These modules need names from elsewhere being added to them: -# subprocess: should provide getoutput and other fns from commands -# module but these fns are missing: getstatus, mk2arg, -# mkarg -# re: needs an ASCII constant that works compatibly with Py3 - -# etc: see lib2to3/fixes/fix_imports.py - -# (New module name, new object name, old module name, old object name) -MOVES = [('collections', 'UserList', 'UserList', 'UserList'), - ('collections', 'UserDict', 'UserDict', 'UserDict'), - ('collections', 'UserString','UserString', 'UserString'), - ('itertools', 'filterfalse','itertools', 'ifilterfalse'), - ('itertools', 'zip_longest','itertools', 'izip_longest'), - ('sys', 'intern','__builtin__', 'intern'), - # The re module has no ASCII flag in Py2, but this is the default. - # Set re.ASCII to a zero constant. stat.ST_MODE just happens to be one - # (and it exists on Py2.6+). - ('re', 'ASCII','stat', 'ST_MODE'), - ('base64', 'encodebytes','base64', 'encodestring'), - ('base64', 'decodebytes','base64', 'decodestring'), - ('subprocess', 'getoutput', 'commands', 'getoutput'), - ('subprocess', 'getstatusoutput', 'commands', 'getstatusoutput'), - ('subprocess', 'check_output', 'future.backports.misc', 'check_output'), - ('math', 'ceil', 'future.backports.misc', 'ceil'), - ('collections', 'OrderedDict', 'future.backports.misc', 'OrderedDict'), - ('collections', 'Counter', 'future.backports.misc', 'Counter'), - ('itertools', 'count', 'future.backports.misc', 'count'), - ('reprlib', 'recursive_repr', 'future.backports.misc', 'recursive_repr'), - ('functools', 'cmp_to_key', 'future.backports.misc', 'cmp_to_key'), - -# This is no use, since "import urllib.request" etc. still fails: -# ('urllib', 'error', 'future.moves.urllib', 'error'), -# ('urllib', 'parse', 'future.moves.urllib', 'parse'), -# ('urllib', 'request', 'future.moves.urllib', 'request'), -# ('urllib', 'response', 'future.moves.urllib', 'response'), -# ('urllib', 'robotparser', 'future.moves.urllib', 'robotparser'), - ] - - -# A minimal example of an import hook: -# class WarnOnImport(object): -# def __init__(self, *args): -# self.module_names = args -# -# def find_module(self, fullname, path=None): -# if fullname in self.module_names: -# self.path = path -# return self -# return None -# -# def load_module(self, name): -# if name in sys.modules: -# return sys.modules[name] -# module_info = imp.find_module(name, self.path) -# module = imp.load_module(name, *module_info) -# sys.modules[name] = module -# flog.warning("Imported deprecated module %s", name) -# return module - - -class RenameImport(object): - """ - A class for import hooks mapping Py3 module names etc. to the Py2 equivalents. - """ - # Different RenameImport classes are created when importing this module from - # different source files. This causes isinstance(hook, RenameImport) checks - # to produce inconsistent results. We add this RENAMER attribute here so - # remove_hooks() and install_hooks() can find instances of these classes - # easily: - RENAMER = True - - def __init__(self, old_to_new): - ''' - Pass in a dictionary-like object mapping from old names to new - names. E.g. {'ConfigParser': 'configparser', 'cPickle': 'pickle'} - ''' - self.old_to_new = old_to_new - both = set(old_to_new.keys()) & set(old_to_new.values()) - assert (len(both) == 0 and - len(set(old_to_new.values())) == len(old_to_new.values())), \ - 'Ambiguity in renaming (handler not implemented)' - self.new_to_old = dict((new, old) for (old, new) in old_to_new.items()) - - def find_module(self, fullname, path=None): - # Handles hierarchical importing: package.module.module2 - new_base_names = set([s.split('.')[0] for s in self.new_to_old]) - # Before v0.12: Was: if fullname in set(self.old_to_new) | new_base_names: - if fullname in new_base_names: - return self - return None - - def load_module(self, name): - path = None - if name in sys.modules: - return sys.modules[name] - elif name in self.new_to_old: - # New name. Look up the corresponding old (Py2) name: - oldname = self.new_to_old[name] - module = self._find_and_load_module(oldname) - # module.__future_module__ = True - else: - module = self._find_and_load_module(name) - # In any case, make it available under the requested (Py3) name - sys.modules[name] = module - return module - - def _find_and_load_module(self, name, path=None): - """ - Finds and loads it. But if there's a . in the name, handles it - properly. - """ - bits = name.split('.') - while len(bits) > 1: - # Treat the first bit as a package - packagename = bits.pop(0) - package = self._find_and_load_module(packagename, path) - try: - path = package.__path__ - except AttributeError: - # This could be e.g. moves. - flog.debug('Package {0} has no __path__.'.format(package)) - if name in sys.modules: - return sys.modules[name] - flog.debug('What to do here?') - - name = bits[0] - module_info = imp.find_module(name, path) - return imp.load_module(name, *module_info) - - -class hooks(object): - """ - Acts as a context manager. Saves the state of sys.modules and restores it - after the 'with' block. - - Use like this: - - >>> from future import standard_library - >>> with standard_library.hooks(): - ... import http.client - >>> import requests - - For this to work, http.client will be scrubbed from sys.modules after the - 'with' block. That way the modules imported in the 'with' block will - continue to be accessible in the current namespace but not from any - imported modules (like requests). - """ - def __enter__(self): - # flog.debug('Entering hooks context manager') - self.old_sys_modules = copy.copy(sys.modules) - self.hooks_were_installed = detect_hooks() - # self.scrubbed = scrub_py2_sys_modules() - install_hooks() - return self - - def __exit__(self, *args): - # flog.debug('Exiting hooks context manager') - # restore_sys_modules(self.scrubbed) - if not self.hooks_were_installed: - remove_hooks() - # scrub_future_sys_modules() - -# Sanity check for is_py2_stdlib_module(): We aren't replacing any -# builtin modules names: -if PY2: - assert len(set(RENAMES.values()) & set(sys.builtin_module_names)) == 0 - - -def is_py2_stdlib_module(m): - """ - Tries to infer whether the module m is from the Python 2 standard library. - This may not be reliable on all systems. - """ - if PY3: - return False - if not 'stdlib_path' in is_py2_stdlib_module.__dict__: - stdlib_files = [contextlib.__file__, os.__file__, copy.__file__] - stdlib_paths = [os.path.split(f)[0] for f in stdlib_files] - if not len(set(stdlib_paths)) == 1: - # This seems to happen on travis-ci.org. Very strange. We'll try to - # ignore it. - flog.warn('Multiple locations found for the Python standard ' - 'library: %s' % stdlib_paths) - # Choose the first one arbitrarily - is_py2_stdlib_module.stdlib_path = stdlib_paths[0] - - if m.__name__ in sys.builtin_module_names: - return True - - if hasattr(m, '__file__'): - modpath = os.path.split(m.__file__) - if (modpath[0].startswith(is_py2_stdlib_module.stdlib_path) and - 'site-packages' not in modpath[0]): - return True - - return False - - -def scrub_py2_sys_modules(): - """ - Removes any Python 2 standard library modules from ``sys.modules`` that - would interfere with Py3-style imports using import hooks. Examples are - modules with the same names (like urllib or email). - - (Note that currently import hooks are disabled for modules like these - with ambiguous names anyway ...) - """ - if PY3: - return {} - scrubbed = {} - for modulename in REPLACED_MODULES & set(RENAMES.keys()): - if not modulename in sys.modules: - continue - - module = sys.modules[modulename] - - if is_py2_stdlib_module(module): - flog.debug('Deleting (Py2) {} from sys.modules'.format(modulename)) - scrubbed[modulename] = sys.modules[modulename] - del sys.modules[modulename] - return scrubbed - - -def scrub_future_sys_modules(): - """ - Deprecated. - """ - return {} - -class suspend_hooks(object): - """ - Acts as a context manager. Use like this: - - >>> from future import standard_library - >>> standard_library.install_hooks() - >>> import http.client - >>> # ... - >>> with standard_library.suspend_hooks(): - >>> import requests # incompatible with ``future``'s standard library hooks - - If the hooks were disabled before the context, they are not installed when - the context is left. - """ - def __enter__(self): - self.hooks_were_installed = detect_hooks() - remove_hooks() - # self.scrubbed = scrub_future_sys_modules() - return self - - def __exit__(self, *args): - if self.hooks_were_installed: - install_hooks() - # restore_sys_modules(self.scrubbed) - - -def restore_sys_modules(scrubbed): - """ - Add any previously scrubbed modules back to the sys.modules cache, - but only if it's safe to do so. - """ - clash = set(sys.modules) & set(scrubbed) - if len(clash) != 0: - # If several, choose one arbitrarily to raise an exception about - first = list(clash)[0] - raise ImportError('future module {} clashes with Py2 module' - .format(first)) - sys.modules.update(scrubbed) - - -def install_aliases(): - """ - Monkey-patches the standard library in Py2.6/7 to provide - aliases for better Py3 compatibility. - """ - if PY3: - return - # if hasattr(install_aliases, 'run_already'): - # return - for (newmodname, newobjname, oldmodname, oldobjname) in MOVES: - __import__(newmodname) - # We look up the module in sys.modules because __import__ just returns the - # top-level package: - newmod = sys.modules[newmodname] - # newmod.__future_module__ = True - - __import__(oldmodname) - oldmod = sys.modules[oldmodname] - - obj = getattr(oldmod, oldobjname) - setattr(newmod, newobjname, obj) - - # Hack for urllib so it appears to have the same structure on Py2 as on Py3 - import urllib - from future.backports.urllib import request - from future.backports.urllib import response - from future.backports.urllib import parse - from future.backports.urllib import error - from future.backports.urllib import robotparser - urllib.request = request - urllib.response = response - urllib.parse = parse - urllib.error = error - urllib.robotparser = robotparser - sys.modules['urllib.request'] = request - sys.modules['urllib.response'] = response - sys.modules['urllib.parse'] = parse - sys.modules['urllib.error'] = error - sys.modules['urllib.robotparser'] = robotparser - - # Patch the test module so it appears to have the same structure on Py2 as on Py3 - try: - import test - except ImportError: - pass - try: - from future.moves.test import support - except ImportError: - pass - else: - test.support = support - sys.modules['test.support'] = support - - # Patch the dbm module so it appears to have the same structure on Py2 as on Py3 - try: - import dbm - except ImportError: - pass - else: - from future.moves.dbm import dumb - dbm.dumb = dumb - sys.modules['dbm.dumb'] = dumb - try: - from future.moves.dbm import gnu - except ImportError: - pass - else: - dbm.gnu = gnu - sys.modules['dbm.gnu'] = gnu - try: - from future.moves.dbm import ndbm - except ImportError: - pass - else: - dbm.ndbm = ndbm - sys.modules['dbm.ndbm'] = ndbm - - # install_aliases.run_already = True - - -def install_hooks(): - """ - This function installs the future.standard_library import hook into - sys.meta_path. - """ - if PY3: - return - - install_aliases() - - flog.debug('sys.meta_path was: {0}'.format(sys.meta_path)) - flog.debug('Installing hooks ...') - - # Add it unless it's there already - newhook = RenameImport(RENAMES) - if not detect_hooks(): - sys.meta_path.append(newhook) - flog.debug('sys.meta_path is now: {0}'.format(sys.meta_path)) - - -def enable_hooks(): - """ - Deprecated. Use install_hooks() instead. This will be removed by - ``future`` v1.0. - """ - install_hooks() - - -def remove_hooks(scrub_sys_modules=False): - """ - This function removes the import hook from sys.meta_path. - """ - if PY3: - return - flog.debug('Uninstalling hooks ...') - # Loop backwards, so deleting items keeps the ordering: - for i, hook in list(enumerate(sys.meta_path))[::-1]: - if hasattr(hook, 'RENAMER'): - del sys.meta_path[i] - - # Explicit is better than implicit. In the future the interface should - # probably change so that scrubbing the import hooks requires a separate - # function call. Left as is for now for backward compatibility with - # v0.11.x. - if scrub_sys_modules: - scrub_future_sys_modules() - - -def disable_hooks(): - """ - Deprecated. Use remove_hooks() instead. This will be removed by - ``future`` v1.0. - """ - remove_hooks() - - -def detect_hooks(): - """ - Returns True if the import hooks are installed, False if not. - """ - flog.debug('Detecting hooks ...') - present = any([hasattr(hook, 'RENAMER') for hook in sys.meta_path]) - if present: - flog.debug('Detected.') - else: - flog.debug('Not detected.') - return present - - -# As of v0.12, this no longer happens implicitly: -# if not PY3: -# install_hooks() - - -if not hasattr(sys, 'py2_modules'): - sys.py2_modules = {} - -def cache_py2_modules(): - """ - Currently this function is unneeded, as we are not attempting to provide import hooks - for modules with ambiguous names: email, urllib, pickle. - """ - if len(sys.py2_modules) != 0: - return - assert not detect_hooks() - import urllib - sys.py2_modules['urllib'] = urllib - - import email - sys.py2_modules['email'] = email - - import pickle - sys.py2_modules['pickle'] = pickle - - # Not all Python installations have test module. (Anaconda doesn't, for example.) - # try: - # import test - # except ImportError: - # sys.py2_modules['test'] = None - # sys.py2_modules['test'] = test - - # import dbm - # sys.py2_modules['dbm'] = dbm - - -def import_(module_name, backport=False): - """ - Pass a (potentially dotted) module name of a Python 3 standard library - module. This function imports the module compatibly on Py2 and Py3 and - returns the top-level module. - - Example use: - >>> http = import_('http.client') - >>> http = import_('http.server') - >>> urllib = import_('urllib.request') - - Then: - >>> conn = http.client.HTTPConnection(...) - >>> response = urllib.request.urlopen('http://mywebsite.com') - >>> # etc. - - Use as follows: - >>> package_name = import_(module_name) - - On Py3, equivalent to this: - - >>> import module_name - - On Py2, equivalent to this if backport=False: - - >>> from future.moves import module_name - - or to this if backport=True: - - >>> from future.backports import module_name - - except that it also handles dotted module names such as ``http.client`` - The effect then is like this: - - >>> from future.backports import module - >>> from future.backports.module import submodule - >>> module.submodule = submodule - - Note that this would be a SyntaxError in Python: - - >>> from future.backports import http.client - - """ - # Python 2.6 doesn't have importlib in the stdlib, so it requires - # the backported ``importlib`` package from PyPI as a dependency to use - # this function: - import importlib - - if PY3: - return __import__(module_name) - else: - # client.blah = blah - # Then http.client = client - # etc. - if backport: - prefix = 'future.backports' - else: - prefix = 'future.moves' - parts = prefix.split('.') + module_name.split('.') - - modules = [] - for i, part in enumerate(parts): - sofar = '.'.join(parts[:i+1]) - modules.append(importlib.import_module(sofar)) - for i, part in reversed(list(enumerate(parts))): - if i == 0: - break - setattr(modules[i-1], part, modules[i]) - - # Return the next-most top-level module after future.backports / future.moves: - return modules[2] - - -def from_import(module_name, *symbol_names, **kwargs): - """ - Example use: - >>> HTTPConnection = from_import('http.client', 'HTTPConnection') - >>> HTTPServer = from_import('http.server', 'HTTPServer') - >>> urlopen, urlparse = from_import('urllib.request', 'urlopen', 'urlparse') - - Equivalent to this on Py3: - - >>> from module_name import symbol_names[0], symbol_names[1], ... - - and this on Py2: - - >>> from future.moves.module_name import symbol_names[0], ... - - or: - - >>> from future.backports.module_name import symbol_names[0], ... - - except that it also handles dotted module names such as ``http.client``. - """ - - if PY3: - return __import__(module_name) - else: - if 'backport' in kwargs and bool(kwargs['backport']): - prefix = 'future.backports' - else: - prefix = 'future.moves' - parts = prefix.split('.') + module_name.split('.') - module = importlib.import_module(prefix + '.' + module_name) - output = [getattr(module, name) for name in symbol_names] - if len(output) == 1: - return output[0] - else: - return output - - -class exclude_local_folder_imports(object): - """ - A context-manager that prevents standard library modules like configparser - from being imported from the local python-future source folder on Py3. - - (This was need prior to v0.16.0 because the presence of a configparser - folder would otherwise have prevented setuptools from running on Py3. Maybe - it's not needed any more?) - """ - def __init__(self, *args): - assert len(args) > 0 - self.module_names = args - # Disallow dotted module names like http.client: - if any(['.' in m for m in self.module_names]): - raise NotImplementedError('Dotted module names are not supported') - - def __enter__(self): - self.old_sys_path = copy.copy(sys.path) - self.old_sys_modules = copy.copy(sys.modules) - if sys.version_info[0] < 3: - return - # The presence of all these indicates we've found our source folder, - # because `builtins` won't have been installed in site-packages by setup.py: - FUTURE_SOURCE_SUBFOLDERS = ['future', 'past', 'libfuturize', 'libpasteurize', 'builtins'] - - # Look for the future source folder: - for folder in self.old_sys_path: - if all([os.path.exists(os.path.join(folder, subfolder)) - for subfolder in FUTURE_SOURCE_SUBFOLDERS]): - # Found it. Remove it. - sys.path.remove(folder) - - # Ensure we import the system module: - for m in self.module_names: - # Delete the module and any submodules from sys.modules: - # for key in list(sys.modules): - # if key == m or key.startswith(m + '.'): - # try: - # del sys.modules[key] - # except KeyError: - # pass - try: - module = __import__(m, level=0) - except ImportError: - # There's a problem importing the system module. E.g. the - # winreg module is not available except on Windows. - pass - - def __exit__(self, *args): - # Restore sys.path and sys.modules: - sys.path = self.old_sys_path - for m in set(self.old_sys_modules.keys()) - set(sys.modules.keys()): - sys.modules[m] = self.old_sys_modules[m] - -TOP_LEVEL_MODULES = ['builtins', - 'copyreg', - 'html', - 'http', - 'queue', - 'reprlib', - 'socketserver', - 'test', - 'tkinter', - 'winreg', - 'xmlrpc', - '_dummy_thread', - '_markupbase', - '_thread', - ] - -def import_top_level_modules(): - with exclude_local_folder_imports(*TOP_LEVEL_MODULES): - for m in TOP_LEVEL_MODULES: - try: - __import__(m) - except ImportError: # e.g. winreg - pass diff --git a/pype/vendor/future/tests/__init__.py b/pype/vendor/future/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/future/tests/base.py b/pype/vendor/future/tests/base.py deleted file mode 100644 index 546c779b09..0000000000 --- a/pype/vendor/future/tests/base.py +++ /dev/null @@ -1,531 +0,0 @@ -from __future__ import print_function, absolute_import -import os -import tempfile -import unittest -import sys -import re -import warnings -import io -from textwrap import dedent - -from future.utils import bind_method, PY26, PY3, PY2, PY27 -from future.moves.subprocess import check_output, STDOUT, CalledProcessError - -if PY26: - import unittest2 as unittest - - -def reformat_code(code): - """ - Removes any leading \n and dedents. - """ - if code.startswith('\n'): - code = code[1:] - return dedent(code) - - -def order_future_lines(code): - """ - Returns the code block with any ``__future__`` import lines sorted, and - then any ``future`` import lines sorted, then any ``builtins`` import lines - sorted. - - This only sorts the lines within the expected blocks. - - See test_order_future_lines() for an example. - """ - - # We need .splitlines(keepends=True), which doesn't exist on Py2, - # so we use this instead: - lines = code.split('\n') - - uufuture_line_numbers = [i for i, line in enumerate(lines) - if line.startswith('from __future__ import ')] - - future_line_numbers = [i for i, line in enumerate(lines) - if line.startswith('from future') - or line.startswith('from past')] - - builtins_line_numbers = [i for i, line in enumerate(lines) - if line.startswith('from builtins')] - - assert code.lstrip() == code, ('internal usage error: ' - 'dedent the code before calling order_future_lines()') - - def mymax(numbers): - return max(numbers) if len(numbers) > 0 else 0 - - def mymin(numbers): - return min(numbers) if len(numbers) > 0 else float('inf') - - assert mymax(uufuture_line_numbers) <= mymin(future_line_numbers), \ - 'the __future__ and future imports are out of order' - - # assert mymax(future_line_numbers) <= mymin(builtins_line_numbers), \ - # 'the future and builtins imports are out of order' - - uul = sorted([lines[i] for i in uufuture_line_numbers]) - sorted_uufuture_lines = dict(zip(uufuture_line_numbers, uul)) - - fl = sorted([lines[i] for i in future_line_numbers]) - sorted_future_lines = dict(zip(future_line_numbers, fl)) - - bl = sorted([lines[i] for i in builtins_line_numbers]) - sorted_builtins_lines = dict(zip(builtins_line_numbers, bl)) - - # Replace the old unsorted "from __future__ import ..." lines with the - # new sorted ones: - new_lines = [] - for i in range(len(lines)): - if i in uufuture_line_numbers: - new_lines.append(sorted_uufuture_lines[i]) - elif i in future_line_numbers: - new_lines.append(sorted_future_lines[i]) - elif i in builtins_line_numbers: - new_lines.append(sorted_builtins_lines[i]) - else: - new_lines.append(lines[i]) - return '\n'.join(new_lines) - - -class VerboseCalledProcessError(CalledProcessError): - """ - Like CalledProcessError, but it displays more information (message and - script output) for diagnosing test failures etc. - """ - def __init__(self, msg, returncode, cmd, output=None): - self.msg = msg - self.returncode = returncode - self.cmd = cmd - self.output = output - - def __str__(self): - return ("Command '%s' failed with exit status %d\nMessage: %s\nOutput: %s" - % (self.cmd, self.returncode, self.msg, self.output)) - -class FuturizeError(VerboseCalledProcessError): - pass - -class PasteurizeError(VerboseCalledProcessError): - pass - - -class CodeHandler(unittest.TestCase): - """ - Handy mixin for test classes for writing / reading / futurizing / - running .py files in the test suite. - """ - def setUp(self): - """ - The outputs from the various futurize stages should have the - following headers: - """ - # After stage1: - # TODO: use this form after implementing a fixer to consolidate - # __future__ imports into a single line: - # self.headers1 = """ - # from __future__ import absolute_import, division, print_function - # """ - self.headers1 = reformat_code(""" - from __future__ import absolute_import - from __future__ import division - from __future__ import print_function - """) - - # After stage2 --all-imports: - # TODO: use this form after implementing a fixer to consolidate - # __future__ imports into a single line: - # self.headers2 = """ - # from __future__ import (absolute_import, division, - # print_function, unicode_literals) - # from future import standard_library - # from future.builtins import * - # """ - self.headers2 = reformat_code(""" - from __future__ import absolute_import - from __future__ import division - from __future__ import print_function - from __future__ import unicode_literals - from future import standard_library - standard_library.install_aliases() - from builtins import * - """) - self.interpreters = [sys.executable] - self.tempdir = tempfile.mkdtemp() + os.path.sep - pypath = os.getenv('PYTHONPATH') - if pypath: - self.env = {'PYTHONPATH': os.getcwd() + os.pathsep + pypath} - else: - self.env = {'PYTHONPATH': os.getcwd()} - - def convert(self, code, stages=(1, 2), all_imports=False, from3=False, - reformat=True, run=True, conservative=False): - """ - Converts the code block using ``futurize`` and returns the - resulting code. - - Passing stages=[1] or stages=[2] passes the flag ``--stage1`` or - ``stage2`` to ``futurize``. Passing both stages runs ``futurize`` - with both stages by default. - - If from3 is False, runs ``futurize``, converting from Python 2 to - both 2 and 3. If from3 is True, runs ``pasteurize`` to convert - from Python 3 to both 2 and 3. - - Optionally reformats the code block first using the reformat() function. - - If run is True, runs the resulting code under all Python - interpreters in self.interpreters. - """ - if reformat: - code = reformat_code(code) - self._write_test_script(code) - self._futurize_test_script(stages=stages, all_imports=all_imports, - from3=from3, conservative=conservative) - output = self._read_test_script() - if run: - for interpreter in self.interpreters: - _ = self._run_test_script(interpreter=interpreter) - return output - - def compare(self, output, expected, ignore_imports=True): - """ - Compares whether the code blocks are equal. If not, raises an - exception so the test fails. Ignores any trailing whitespace like - blank lines. - - If ignore_imports is True, passes the code blocks into the - strip_future_imports method. - - If one code block is a unicode string and the other a - byte-string, it assumes the byte-string is encoded as utf-8. - """ - if ignore_imports: - output = self.strip_future_imports(output) - expected = self.strip_future_imports(expected) - if isinstance(output, bytes) and not isinstance(expected, bytes): - output = output.decode('utf-8') - if isinstance(expected, bytes) and not isinstance(output, bytes): - expected = expected.decode('utf-8') - self.assertEqual(order_future_lines(output.rstrip()), - expected.rstrip()) - - def strip_future_imports(self, code): - """ - Strips any of these import lines: - - from __future__ import - from future - from future. - from builtins - - or any line containing: - install_hooks() - or: - install_aliases() - - Limitation: doesn't handle imports split across multiple lines like - this: - - from __future__ import (absolute_import, division, print_function, - unicode_literals) - """ - output = [] - # We need .splitlines(keepends=True), which doesn't exist on Py2, - # so we use this instead: - for line in code.split('\n'): - if not (line.startswith('from __future__ import ') - or line.startswith('from future ') - or line.startswith('from builtins ') - or 'install_hooks()' in line - or 'install_aliases()' in line - # but don't match "from future_builtins" :) - or line.startswith('from future.')): - output.append(line) - return '\n'.join(output) - - def convert_check(self, before, expected, stages=(1, 2), all_imports=False, - ignore_imports=True, from3=False, run=True, - conservative=False): - """ - Convenience method that calls convert() and compare(). - - Reformats the code blocks automatically using the reformat_code() - function. - - If all_imports is passed, we add the appropriate import headers - for the stage(s) selected to the ``expected`` code-block, so they - needn't appear repeatedly in the test code. - - If ignore_imports is True, ignores the presence of any lines - beginning: - - from __future__ import ... - from future import ... - - for the purpose of the comparison. - """ - output = self.convert(before, stages=stages, all_imports=all_imports, - from3=from3, run=run, conservative=conservative) - if all_imports: - headers = self.headers2 if 2 in stages else self.headers1 - else: - headers = '' - - self.compare(output, headers + reformat_code(expected), - ignore_imports=ignore_imports) - - def unchanged(self, code, **kwargs): - """ - Convenience method to ensure the code is unchanged by the - futurize process. - """ - self.convert_check(code, code, **kwargs) - - def _write_test_script(self, code, filename='mytestscript.py'): - """ - Dedents the given code (a multiline string) and writes it out to - a file in a temporary folder like /tmp/tmpUDCn7x/mytestscript.py. - """ - if isinstance(code, bytes): - code = code.decode('utf-8') - # Be explicit about encoding the temp file as UTF-8 (issue #63): - with io.open(self.tempdir + filename, 'wt', encoding='utf-8') as f: - f.write(dedent(code)) - - def _read_test_script(self, filename='mytestscript.py'): - with io.open(self.tempdir + filename, 'rt', encoding='utf-8') as f: - newsource = f.read() - return newsource - - def _futurize_test_script(self, filename='mytestscript.py', stages=(1, 2), - all_imports=False, from3=False, - conservative=False): - params = [] - stages = list(stages) - if all_imports: - params.append('--all-imports') - if from3: - script = 'pasteurize.py' - else: - script = 'futurize.py' - if stages == [1]: - params.append('--stage1') - elif stages == [2]: - params.append('--stage2') - else: - assert stages == [1, 2] - if conservative: - params.append('--conservative') - # No extra params needed - - # Absolute file path: - fn = self.tempdir + filename - call_args = [sys.executable, script] + params + ['-w', fn] - try: - output = check_output(call_args, stderr=STDOUT, env=self.env) - except CalledProcessError as e: - with open(fn) as f: - msg = ( - 'Error running the command %s\n' - '%s\n' - 'Contents of file %s:\n' - '\n' - '%s') % ( - ' '.join(call_args), - 'env=%s' % self.env, - fn, - '----\n%s\n----' % f.read(), - ) - ErrorClass = (FuturizeError if 'futurize' in script else PasteurizeError) - raise ErrorClass(msg, e.returncode, e.cmd, output=e.output) - return output - - def _run_test_script(self, filename='mytestscript.py', - interpreter=sys.executable): - # Absolute file path: - fn = self.tempdir + filename - try: - output = check_output([interpreter, fn], - env=self.env, stderr=STDOUT) - except CalledProcessError as e: - with open(fn) as f: - msg = ( - 'Error running the command %s\n' - '%s\n' - 'Contents of file %s:\n' - '\n' - '%s') % ( - ' '.join([interpreter, fn]), - 'env=%s' % self.env, - fn, - '----\n%s\n----' % f.read(), - ) - if not hasattr(e, 'output'): - # The attribute CalledProcessError.output doesn't exist on Py2.6 - e.output = None - raise VerboseCalledProcessError(msg, e.returncode, e.cmd, output=e.output) - return output - - -# Decorator to skip some tests on Python 2.6 ... -skip26 = unittest.skipIf(PY26, "this test is known to fail on Py2.6") - - -def expectedFailurePY3(func): - if not PY3: - return func - return unittest.expectedFailure(func) - -def expectedFailurePY26(func): - if not PY26: - return func - return unittest.expectedFailure(func) - - -def expectedFailurePY27(func): - if not PY27: - return func - return unittest.expectedFailure(func) - - -def expectedFailurePY2(func): - if not PY2: - return func - return unittest.expectedFailure(func) - - -# Renamed in Py3.3: -if not hasattr(unittest.TestCase, 'assertRaisesRegex'): - unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp - -# From Py3.3: -def assertRegex(self, text, expected_regex, msg=None): - """Fail the test unless the text matches the regular expression.""" - if isinstance(expected_regex, (str, unicode)): - assert expected_regex, "expected_regex must not be empty." - expected_regex = re.compile(expected_regex) - if not expected_regex.search(text): - msg = msg or "Regex didn't match" - msg = '%s: %r not found in %r' % (msg, expected_regex.pattern, text) - raise self.failureException(msg) - -if not hasattr(unittest.TestCase, 'assertRegex'): - bind_method(unittest.TestCase, 'assertRegex', assertRegex) - -class _AssertRaisesBaseContext(object): - - def __init__(self, expected, test_case, callable_obj=None, - expected_regex=None): - self.expected = expected - self.test_case = test_case - if callable_obj is not None: - try: - self.obj_name = callable_obj.__name__ - except AttributeError: - self.obj_name = str(callable_obj) - else: - self.obj_name = None - if isinstance(expected_regex, (bytes, str)): - expected_regex = re.compile(expected_regex) - self.expected_regex = expected_regex - self.msg = None - - def _raiseFailure(self, standardMsg): - msg = self.test_case._formatMessage(self.msg, standardMsg) - raise self.test_case.failureException(msg) - - def handle(self, name, callable_obj, args, kwargs): - """ - If callable_obj is None, assertRaises/Warns is being used as a - context manager, so check for a 'msg' kwarg and return self. - If callable_obj is not None, call it passing args and kwargs. - """ - if callable_obj is None: - self.msg = kwargs.pop('msg', None) - return self - with self: - callable_obj(*args, **kwargs) - -class _AssertWarnsContext(_AssertRaisesBaseContext): - """A context manager used to implement TestCase.assertWarns* methods.""" - - def __enter__(self): - # The __warningregistry__'s need to be in a pristine state for tests - # to work properly. - for v in sys.modules.values(): - if getattr(v, '__warningregistry__', None): - v.__warningregistry__ = {} - self.warnings_manager = warnings.catch_warnings(record=True) - self.warnings = self.warnings_manager.__enter__() - warnings.simplefilter("always", self.expected) - return self - - def __exit__(self, exc_type, exc_value, tb): - self.warnings_manager.__exit__(exc_type, exc_value, tb) - if exc_type is not None: - # let unexpected exceptions pass through - return - try: - exc_name = self.expected.__name__ - except AttributeError: - exc_name = str(self.expected) - first_matching = None - for m in self.warnings: - w = m.message - if not isinstance(w, self.expected): - continue - if first_matching is None: - first_matching = w - if (self.expected_regex is not None and - not self.expected_regex.search(str(w))): - continue - # store warning for later retrieval - self.warning = w - self.filename = m.filename - self.lineno = m.lineno - return - # Now we simply try to choose a helpful failure message - if first_matching is not None: - self._raiseFailure('"{}" does not match "{}"'.format( - self.expected_regex.pattern, str(first_matching))) - if self.obj_name: - self._raiseFailure("{} not triggered by {}".format(exc_name, - self.obj_name)) - else: - self._raiseFailure("{} not triggered".format(exc_name)) - - -def assertWarns(self, expected_warning, callable_obj=None, *args, **kwargs): - """Fail unless a warning of class warnClass is triggered - by callable_obj when invoked with arguments args and keyword - arguments kwargs. If a different type of warning is - triggered, it will not be handled: depending on the other - warning filtering rules in effect, it might be silenced, printed - out, or raised as an exception. - - If called with callable_obj omitted or None, will return a - context object used like this:: - - with self.assertWarns(SomeWarning): - do_something() - - An optional keyword argument 'msg' can be provided when assertWarns - is used as a context object. - - The context manager keeps a reference to the first matching - warning as the 'warning' attribute; similarly, the 'filename' - and 'lineno' attributes give you information about the line - of Python code from which the warning was triggered. - This allows you to inspect the warning after the assertion:: - - with self.assertWarns(SomeWarning) as cm: - do_something() - the_warning = cm.warning - self.assertEqual(the_warning.some_attribute, 147) - """ - context = _AssertWarnsContext(expected_warning, self, callable_obj) - return context.handle('assertWarns', callable_obj, args, kwargs) - -if not hasattr(unittest.TestCase, 'assertWarns'): - bind_method(unittest.TestCase, 'assertWarns', assertWarns) diff --git a/pype/vendor/future/types/__init__.py b/pype/vendor/future/types/__init__.py deleted file mode 100644 index 71279bbb49..0000000000 --- a/pype/vendor/future/types/__init__.py +++ /dev/null @@ -1,258 +0,0 @@ -""" -This module contains backports the data types that were significantly changed -in the transition from Python 2 to Python 3. - -- an implementation of Python 3's bytes object (pure Python subclass of - Python 2's builtin 8-bit str type) -- an implementation of Python 3's str object (pure Python subclass of - Python 2's builtin unicode type) -- a backport of the range iterator from Py3 with slicing support - -It is used as follows:: - - from __future__ import division, absolute_import, print_function - from builtins import bytes, dict, int, range, str - -to bring in the new semantics for these functions from Python 3. And -then, for example:: - - b = bytes(b'ABCD') - assert list(b) == [65, 66, 67, 68] - assert repr(b) == "b'ABCD'" - assert [65, 66] in b - - # These raise TypeErrors: - # b + u'EFGH' - # b.split(u'B') - # bytes(b',').join([u'Fred', u'Bill']) - - - s = str(u'ABCD') - - # These raise TypeErrors: - # s.join([b'Fred', b'Bill']) - # s.startswith(b'A') - # b'B' in s - # s.find(b'A') - # s.replace(u'A', b'a') - - # This raises an AttributeError: - # s.decode('utf-8') - - assert repr(s) == 'ABCD' # consistent repr with Py3 (no u prefix) - - - for i in range(10**11)[:10]: - pass - -and:: - - class VerboseList(list): - def append(self, item): - print('Adding an item') - super().append(item) # new simpler super() function - -For more information: ---------------------- - -- future.types.newbytes -- future.types.newdict -- future.types.newint -- future.types.newobject -- future.types.newrange -- future.types.newstr - - -Notes -===== - -range() -------- -``range`` is a custom class that backports the slicing behaviour from -Python 3 (based on the ``xrange`` module by Dan Crosta). See the -``newrange`` module docstring for more details. - - -super() -------- -``super()`` is based on Ryan Kelly's ``magicsuper`` module. See the -``newsuper`` module docstring for more details. - - -round() -------- -Python 3 modifies the behaviour of ``round()`` to use "Banker's Rounding". -See http://stackoverflow.com/a/10825998. See the ``newround`` module -docstring for more details. - -""" - -from __future__ import absolute_import, division, print_function - -import functools -from numbers import Integral - -from future import utils - - -# Some utility functions to enforce strict type-separation of unicode str and -# bytes: -def disallow_types(argnums, disallowed_types): - """ - A decorator that raises a TypeError if any of the given numbered - arguments is of the corresponding given type (e.g. bytes or unicode - string). - - For example: - - @disallow_types([0, 1], [unicode, bytes]) - def f(a, b): - pass - - raises a TypeError when f is called if a unicode object is passed as - `a` or a bytes object is passed as `b`. - - This also skips over keyword arguments, so - - @disallow_types([0, 1], [unicode, bytes]) - def g(a, b=None): - pass - - doesn't raise an exception if g is called with only one argument a, - e.g.: - - g(b'Byte string') - - Example use: - - >>> class newbytes(object): - ... @disallow_types([1], [unicode]) - ... def __add__(self, other): - ... pass - - >>> newbytes('1234') + u'1234' #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - TypeError: can't concat 'bytes' to (unicode) str - """ - - def decorator(function): - - @functools.wraps(function) - def wrapper(*args, **kwargs): - # These imports are just for this decorator, and are defined here - # to prevent circular imports: - from .newbytes import newbytes - from .newint import newint - from .newstr import newstr - - errmsg = "argument can't be {0}" - for (argnum, mytype) in zip(argnums, disallowed_types): - # Handle the case where the type is passed as a string like 'newbytes'. - if isinstance(mytype, str) or isinstance(mytype, bytes): - mytype = locals()[mytype] - - # Only restrict kw args only if they are passed: - if len(args) <= argnum: - break - - # Here we use type() rather than isinstance() because - # __instancecheck__ is being overridden. E.g. - # isinstance(b'abc', newbytes) is True on Py2. - if type(args[argnum]) == mytype: - raise TypeError(errmsg.format(mytype)) - - return function(*args, **kwargs) - return wrapper - return decorator - - -def no(mytype, argnums=(1,)): - """ - A shortcut for the disallow_types decorator that disallows only one type - (in any position in argnums). - - Example use: - - >>> class newstr(object): - ... @no('bytes') - ... def __add__(self, other): - ... pass - - >>> newstr(u'1234') + b'1234' #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - TypeError: argument can't be bytes - - The object can also be passed directly, but passing the string helps - to prevent circular import problems. - """ - if isinstance(argnums, Integral): - argnums = (argnums,) - disallowed_types = [mytype] * len(argnums) - return disallow_types(argnums, disallowed_types) - - -def issubset(list1, list2): - """ - Examples: - - >>> issubset([], [65, 66, 67]) - True - >>> issubset([65], [65, 66, 67]) - True - >>> issubset([65, 66], [65, 66, 67]) - True - >>> issubset([65, 67], [65, 66, 67]) - False - """ - n = len(list1) - for startpos in range(len(list2) - n + 1): - if list2[startpos:startpos+n] == list1: - return True - return False - - -if utils.PY3: - import builtins - bytes = builtins.bytes - dict = builtins.dict - int = builtins.int - list = builtins.list - object = builtins.object - range = builtins.range - str = builtins.str - - # The identity mapping - newtypes = {bytes: bytes, - dict: dict, - int: int, - list: list, - object: object, - range: range, - str: str} - - __all__ = ['newtypes'] - -else: - - from .newbytes import newbytes - from .newdict import newdict - from .newint import newint - from .newlist import newlist - from .newrange import newrange - from .newobject import newobject - from .newstr import newstr - - newtypes = {bytes: newbytes, - dict: newdict, - int: newint, - long: newint, - list: newlist, - object: newobject, - range: newrange, - str: newbytes, - unicode: newstr} - - __all__ = ['newbytes', 'newdict', 'newint', 'newlist', 'newrange', 'newstr', 'newtypes'] - diff --git a/pype/vendor/future/types/newbytes.py b/pype/vendor/future/types/newbytes.py deleted file mode 100644 index 85e6501cba..0000000000 --- a/pype/vendor/future/types/newbytes.py +++ /dev/null @@ -1,431 +0,0 @@ -""" -Pure-Python implementation of a Python 3-like bytes object for Python 2. - -Why do this? Without it, the Python 2 bytes object is a very, very -different beast to the Python 3 bytes object. -""" - -from collections import Iterable -from numbers import Integral -import string -import copy - -from future.utils import istext, isbytes, PY3, with_metaclass -from future.types import no, issubset -from future.types.newobject import newobject - - -_builtin_bytes = bytes - -if PY3: - # We'll probably never use newstr on Py3 anyway... - unicode = str - - -class BaseNewBytes(type): - def __instancecheck__(cls, instance): - if cls == newbytes: - return isinstance(instance, _builtin_bytes) - else: - return issubclass(instance.__class__, cls) - - -def _newchr(x): - if isinstance(x, str): # this happens on pypy - return x.encode('ascii') - else: - return chr(x) - - -class newbytes(with_metaclass(BaseNewBytes, _builtin_bytes)): - """ - A backport of the Python 3 bytes object to Py2 - """ - def __new__(cls, *args, **kwargs): - """ - From the Py3 bytes docstring: - - bytes(iterable_of_ints) -> bytes - bytes(string, encoding[, errors]) -> bytes - bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer - bytes(int) -> bytes object of size given by the parameter initialized with null bytes - bytes() -> empty bytes object - - Construct an immutable array of bytes from: - - an iterable yielding integers in range(256) - - a text string encoded using the specified encoding - - any object implementing the buffer API. - - an integer - """ - - encoding = None - errors = None - - if len(args) == 0: - return super(newbytes, cls).__new__(cls) - elif len(args) >= 2: - args = list(args) - if len(args) == 3: - errors = args.pop() - encoding=args.pop() - # Was: elif isinstance(args[0], newbytes): - # We use type() instead of the above because we're redefining - # this to be True for all unicode string subclasses. Warning: - # This may render newstr un-subclassable. - if type(args[0]) == newbytes: - # Special-case: for consistency with Py3.3, we return the same object - # (with the same id) if a newbytes object is passed into the - # newbytes constructor. - return args[0] - elif isinstance(args[0], _builtin_bytes): - value = args[0] - elif isinstance(args[0], unicode): - try: - if 'encoding' in kwargs: - assert encoding is None - encoding = kwargs['encoding'] - if 'errors' in kwargs: - assert errors is None - errors = kwargs['errors'] - except AssertionError: - raise TypeError('Argument given by name and position') - if encoding is None: - raise TypeError('unicode string argument without an encoding') - ### - # Was: value = args[0].encode(**kwargs) - # Python 2.6 string encode() method doesn't take kwargs: - # Use this instead: - newargs = [encoding] - if errors is not None: - newargs.append(errors) - value = args[0].encode(*newargs) - ### - elif isinstance(args[0], Iterable): - if len(args[0]) == 0: - # This could be an empty list or tuple. Return b'' as on Py3. - value = b'' - else: - # Was: elif len(args[0])>0 and isinstance(args[0][0], Integral): - # # It's a list of integers - # But then we can't index into e.g. frozensets. Try to proceed - # anyway. - try: - value = bytearray([_newchr(x) for x in args[0]]) - except: - raise ValueError('bytes must be in range(0, 256)') - elif isinstance(args[0], Integral): - if args[0] < 0: - raise ValueError('negative count') - value = b'\x00' * args[0] - else: - value = args[0] - if type(value) == newbytes: - # Above we use type(...) rather than isinstance(...) because the - # newbytes metaclass overrides __instancecheck__. - # oldbytes(value) gives the wrong thing on Py2: the same - # result as str(value) on Py3, e.g. "b'abc'". (Issue #193). - # So we handle this case separately: - return copy.copy(value) - else: - return super(newbytes, cls).__new__(cls, value) - - def __repr__(self): - return 'b' + super(newbytes, self).__repr__() - - def __str__(self): - return 'b' + "'{0}'".format(super(newbytes, self).__str__()) - - def __getitem__(self, y): - value = super(newbytes, self).__getitem__(y) - if isinstance(y, Integral): - return ord(value) - else: - return newbytes(value) - - def __getslice__(self, *args): - return self.__getitem__(slice(*args)) - - def __contains__(self, key): - if isinstance(key, int): - newbyteskey = newbytes([key]) - # Don't use isinstance() here because we only want to catch - # newbytes, not Python 2 str: - elif type(key) == newbytes: - newbyteskey = key - else: - newbyteskey = newbytes(key) - return issubset(list(newbyteskey), list(self)) - - @no(unicode) - def __add__(self, other): - return newbytes(super(newbytes, self).__add__(other)) - - @no(unicode) - def __radd__(self, left): - return newbytes(left) + self - - @no(unicode) - def __mul__(self, other): - return newbytes(super(newbytes, self).__mul__(other)) - - @no(unicode) - def __rmul__(self, other): - return newbytes(super(newbytes, self).__rmul__(other)) - - def join(self, iterable_of_bytes): - errmsg = 'sequence item {0}: expected bytes, {1} found' - if isbytes(iterable_of_bytes) or istext(iterable_of_bytes): - raise TypeError(errmsg.format(0, type(iterable_of_bytes))) - for i, item in enumerate(iterable_of_bytes): - if istext(item): - raise TypeError(errmsg.format(i, type(item))) - return newbytes(super(newbytes, self).join(iterable_of_bytes)) - - @classmethod - def fromhex(cls, string): - # Only on Py2: - return cls(string.replace(' ', '').decode('hex')) - - @no(unicode) - def find(self, sub, *args): - return super(newbytes, self).find(sub, *args) - - @no(unicode) - def rfind(self, sub, *args): - return super(newbytes, self).rfind(sub, *args) - - @no(unicode, (1, 2)) - def replace(self, old, new, *args): - return newbytes(super(newbytes, self).replace(old, new, *args)) - - def encode(self, *args): - raise AttributeError("encode method has been disabled in newbytes") - - def decode(self, encoding='utf-8', errors='strict'): - """ - Returns a newstr (i.e. unicode subclass) - - Decode B using the codec registered for encoding. Default encoding - is 'utf-8'. errors may be given to set a different error - handling scheme. Default is 'strict' meaning that encoding errors raise - a UnicodeDecodeError. Other possible values are 'ignore' and 'replace' - as well as any other name registered with codecs.register_error that is - able to handle UnicodeDecodeErrors. - """ - # Py2 str.encode() takes encoding and errors as optional parameter, - # not keyword arguments as in Python 3 str. - - from future.types.newstr import newstr - - if errors == 'surrogateescape': - from future.utils.surrogateescape import register_surrogateescape - register_surrogateescape() - - return newstr(super(newbytes, self).decode(encoding, errors)) - - # This is currently broken: - # # We implement surrogateescape error handling here in addition rather - # # than relying on the custom error handler from - # # future.utils.surrogateescape to be registered globally, even though - # # that is fine in the case of decoding. (But not encoding: see the - # # comments in newstr.encode()``.) - # - # if errors == 'surrogateescape': - # # Decode char by char - # mybytes = [] - # for code in self: - # # Code is an int - # if 0x80 <= code <= 0xFF: - # b = 0xDC00 + code - # elif code <= 0x7F: - # b = _unichr(c).decode(encoding=encoding) - # else: - # # # It may be a bad byte - # # FIXME: What to do in this case? See the Py3 docs / tests. - # # # Try swallowing it. - # # continue - # # print("RAISE!") - # raise NotASurrogateError - # mybytes.append(b) - # return newbytes(mybytes) - # return newbytes(super(newstr, self).decode(encoding, errors)) - - @no(unicode) - def startswith(self, prefix, *args): - return super(newbytes, self).startswith(prefix, *args) - - @no(unicode) - def endswith(self, prefix, *args): - return super(newbytes, self).endswith(prefix, *args) - - @no(unicode) - def split(self, sep=None, maxsplit=-1): - # Py2 str.split() takes maxsplit as an optional parameter, not as a - # keyword argument as in Python 3 bytes. - parts = super(newbytes, self).split(sep, maxsplit) - return [newbytes(part) for part in parts] - - def splitlines(self, keepends=False): - """ - B.splitlines([keepends]) -> list of lines - - Return a list of the lines in B, breaking at line boundaries. - Line breaks are not included in the resulting list unless keepends - is given and true. - """ - # Py2 str.splitlines() takes keepends as an optional parameter, - # not as a keyword argument as in Python 3 bytes. - parts = super(newbytes, self).splitlines(keepends) - return [newbytes(part) for part in parts] - - @no(unicode) - def rsplit(self, sep=None, maxsplit=-1): - # Py2 str.rsplit() takes maxsplit as an optional parameter, not as a - # keyword argument as in Python 3 bytes. - parts = super(newbytes, self).rsplit(sep, maxsplit) - return [newbytes(part) for part in parts] - - @no(unicode) - def partition(self, sep): - parts = super(newbytes, self).partition(sep) - return tuple(newbytes(part) for part in parts) - - @no(unicode) - def rpartition(self, sep): - parts = super(newbytes, self).rpartition(sep) - return tuple(newbytes(part) for part in parts) - - @no(unicode, (1,)) - def rindex(self, sub, *args): - ''' - S.rindex(sub [,start [,end]]) -> int - - Like S.rfind() but raise ValueError when the substring is not found. - ''' - pos = self.rfind(sub, *args) - if pos == -1: - raise ValueError('substring not found') - - @no(unicode) - def index(self, sub, *args): - ''' - Returns index of sub in bytes. - Raises ValueError if byte is not in bytes and TypeError if can't - be converted bytes or its length is not 1. - ''' - if isinstance(sub, int): - if len(args) == 0: - start, end = 0, len(self) - elif len(args) == 1: - start = args[0] - elif len(args) == 2: - start, end = args - else: - raise TypeError('takes at most 3 arguments') - return list(self)[start:end].index(sub) - if not isinstance(sub, bytes): - try: - sub = self.__class__(sub) - except (TypeError, ValueError): - raise TypeError("can't convert sub to bytes") - try: - return super(newbytes, self).index(sub, *args) - except ValueError: - raise ValueError('substring not found') - - def __eq__(self, other): - if isinstance(other, (_builtin_bytes, bytearray)): - return super(newbytes, self).__eq__(other) - else: - return False - - def __ne__(self, other): - if isinstance(other, _builtin_bytes): - return super(newbytes, self).__ne__(other) - else: - return True - - unorderable_err = 'unorderable types: bytes() and {0}' - - def __lt__(self, other): - if not isbytes(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newbytes, self).__lt__(other) - - def __le__(self, other): - if not isbytes(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newbytes, self).__le__(other) - - def __gt__(self, other): - if not isbytes(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newbytes, self).__gt__(other) - - def __ge__(self, other): - if not isbytes(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newbytes, self).__ge__(other) - - def __native__(self): - # We can't just feed a newbytes object into str(), because - # newbytes.__str__() returns e.g. "b'blah'", consistent with Py3 bytes. - return super(newbytes, self).__str__() - - def __getattribute__(self, name): - """ - A trick to cause the ``hasattr`` builtin-fn to return False for - the 'encode' method on Py2. - """ - if name in ['encode', u'encode']: - raise AttributeError("encode method has been disabled in newbytes") - return super(newbytes, self).__getattribute__(name) - - @no(unicode) - def rstrip(self, bytes_to_strip=None): - """ - Strip trailing bytes contained in the argument. - If the argument is omitted, strip trailing ASCII whitespace. - """ - return newbytes(super(newbytes, self).rstrip(bytes_to_strip)) - - @no(unicode) - def strip(self, bytes_to_strip=None): - """ - Strip leading and trailing bytes contained in the argument. - If the argument is omitted, strip trailing ASCII whitespace. - """ - return newbytes(super(newbytes, self).strip(bytes_to_strip)) - - def lower(self): - """ - b.lower() -> copy of b - - Return a copy of b with all ASCII characters converted to lowercase. - """ - return newbytes(super(newbytes, self).lower()) - - @no(unicode) - def upper(self): - """ - b.upper() -> copy of b - - Return a copy of b with all ASCII characters converted to uppercase. - """ - return newbytes(super(newbytes, self).upper()) - - @classmethod - @no(unicode) - def maketrans(cls, frm, to): - """ - B.maketrans(frm, to) -> translation table - - Return a translation table (a bytes object of length 256) suitable - for use in the bytes or bytearray translate method where each byte - in frm is mapped to the byte at the same position in to. - The bytes objects frm and to must be of the same length. - """ - return newbytes(string.maketrans(frm, to)) - - -__all__ = ['newbytes'] diff --git a/pype/vendor/future/types/newdict.py b/pype/vendor/future/types/newdict.py deleted file mode 100644 index 5dbcc4b7df..0000000000 --- a/pype/vendor/future/types/newdict.py +++ /dev/null @@ -1,111 +0,0 @@ -""" -A dict subclass for Python 2 that behaves like Python 3's dict - -Example use: - ->>> from builtins import dict ->>> d1 = dict() # instead of {} for an empty dict ->>> d2 = dict(key1='value1', key2='value2') - -The keys, values and items methods now return iterators on Python 2.x -(with set-like behaviour on Python 2.7). - ->>> for d in (d1, d2): -... assert not isinstance(d.keys(), list) -... assert not isinstance(d.values(), list) -... assert not isinstance(d.items(), list) -""" - -import sys - -from future.utils import with_metaclass -from future.types.newobject import newobject - - -_builtin_dict = dict -ver = sys.version_info[:2] - - -class BaseNewDict(type): - def __instancecheck__(cls, instance): - if cls == newdict: - return isinstance(instance, _builtin_dict) - else: - return issubclass(instance.__class__, cls) - - -class newdict(with_metaclass(BaseNewDict, _builtin_dict)): - """ - A backport of the Python 3 dict object to Py2 - """ - def items(self): - """ - On Python 2.7+: - D.items() -> a set-like object providing a view on D's items - On Python 2.6: - D.items() -> an iterator over D's items - """ - if ver == (2, 7): - return self.viewitems() - elif ver == (2, 6): - return self.iteritems() - elif ver >= (3, 0): - return self.items() - - def keys(self): - """ - On Python 2.7+: - D.keys() -> a set-like object providing a view on D's keys - On Python 2.6: - D.keys() -> an iterator over D's keys - """ - if ver == (2, 7): - return self.viewkeys() - elif ver == (2, 6): - return self.iterkeys() - elif ver >= (3, 0): - return self.keys() - - def values(self): - """ - On Python 2.7+: - D.values() -> a set-like object providing a view on D's values - On Python 2.6: - D.values() -> an iterator over D's values - """ - if ver == (2, 7): - return self.viewvalues() - elif ver == (2, 6): - return self.itervalues() - elif ver >= (3, 0): - return self.values() - - def __new__(cls, *args, **kwargs): - """ - dict() -> new empty dictionary - dict(mapping) -> new dictionary initialized from a mapping object's - (key, value) pairs - dict(iterable) -> new dictionary initialized as if via: - d = {} - for k, v in iterable: - d[k] = v - dict(**kwargs) -> new dictionary initialized with the name=value pairs - in the keyword argument list. For example: dict(one=1, two=2) - """ - - if len(args) == 0: - return super(newdict, cls).__new__(cls) - elif type(args[0]) == newdict: - value = args[0] - else: - value = args[0] - return super(newdict, cls).__new__(cls, value) - - def __native__(self): - """ - Hook for the future.utils.native() function - """ - return dict(self) - - -__all__ = ['newdict'] diff --git a/pype/vendor/future/types/newint.py b/pype/vendor/future/types/newint.py deleted file mode 100644 index 705b8fa95b..0000000000 --- a/pype/vendor/future/types/newint.py +++ /dev/null @@ -1,379 +0,0 @@ -""" -Backport of Python 3's int, based on Py2's long. - -They are very similar. The most notable difference is: - -- representation: trailing L in Python 2 removed in Python 3 -""" -from __future__ import division - -import struct -import collections - -from future.types.newbytes import newbytes -from future.types.newobject import newobject -from future.utils import PY3, isint, istext, isbytes, with_metaclass, native - - -if PY3: - long = int - - -class BaseNewInt(type): - def __instancecheck__(cls, instance): - if cls == newint: - # Special case for Py2 short or long int - return isinstance(instance, (int, long)) - else: - return issubclass(instance.__class__, cls) - - -class newint(with_metaclass(BaseNewInt, long)): - """ - A backport of the Python 3 int object to Py2 - """ - def __new__(cls, x=0, base=10): - """ - From the Py3 int docstring: - - | int(x=0) -> integer - | int(x, base=10) -> integer - | - | Convert a number or string to an integer, or return 0 if no - | arguments are given. If x is a number, return x.__int__(). For - | floating point numbers, this truncates towards zero. - | - | If x is not a number or if base is given, then x must be a string, - | bytes, or bytearray instance representing an integer literal in the - | given base. The literal can be preceded by '+' or '-' and be - | surrounded by whitespace. The base defaults to 10. Valid bases are - | 0 and 2-36. Base 0 means to interpret the base from the string as an - | integer literal. - | >>> int('0b100', base=0) - | 4 - - """ - try: - val = x.__int__() - except AttributeError: - val = x - else: - if not isint(val): - raise TypeError('__int__ returned non-int ({0})'.format( - type(val))) - - if base != 10: - # Explicit base - if not (istext(val) or isbytes(val) or isinstance(val, bytearray)): - raise TypeError( - "int() can't convert non-string with explicit base") - try: - return super(newint, cls).__new__(cls, val, base) - except TypeError: - return super(newint, cls).__new__(cls, newbytes(val), base) - # After here, base is 10 - try: - return super(newint, cls).__new__(cls, val) - except TypeError: - # Py2 long doesn't handle bytearray input with an explicit base, so - # handle this here. - # Py3: int(bytearray(b'10'), 2) == 2 - # Py2: int(bytearray(b'10'), 2) == 2 raises TypeError - # Py2: long(bytearray(b'10'), 2) == 2 raises TypeError - try: - return super(newint, cls).__new__(cls, newbytes(val)) - except: - raise TypeError("newint argument must be a string or a number," - "not '{0}'".format(type(val))) - - def __repr__(self): - """ - Without the L suffix - """ - value = super(newint, self).__repr__() - assert value[-1] == 'L' - return value[:-1] - - def __add__(self, other): - value = super(newint, self).__add__(other) - if value is NotImplemented: - return long(self) + other - return newint(value) - - def __radd__(self, other): - value = super(newint, self).__radd__(other) - if value is NotImplemented: - return other + long(self) - return newint(value) - - def __sub__(self, other): - value = super(newint, self).__sub__(other) - if value is NotImplemented: - return long(self) - other - return newint(value) - - def __rsub__(self, other): - value = super(newint, self).__rsub__(other) - if value is NotImplemented: - return other - long(self) - return newint(value) - - def __mul__(self, other): - value = super(newint, self).__mul__(other) - if isint(value): - return newint(value) - elif value is NotImplemented: - return long(self) * other - return value - - def __rmul__(self, other): - value = super(newint, self).__rmul__(other) - if isint(value): - return newint(value) - elif value is NotImplemented: - return other * long(self) - return value - - def __div__(self, other): - # We override this rather than e.g. relying on object.__div__ or - # long.__div__ because we want to wrap the value in a newint() - # call if other is another int - value = long(self) / other - if isinstance(other, (int, long)): - return newint(value) - else: - return value - - def __rdiv__(self, other): - value = other / long(self) - if isinstance(other, (int, long)): - return newint(value) - else: - return value - - def __idiv__(self, other): - # long has no __idiv__ method. Use __itruediv__ and cast back to - # newint: - value = self.__itruediv__(other) - if isinstance(other, (int, long)): - return newint(value) - else: - return value - - def __truediv__(self, other): - value = super(newint, self).__truediv__(other) - if value is NotImplemented: - value = long(self) / other - return value - - def __rtruediv__(self, other): - return super(newint, self).__rtruediv__(other) - - def __itruediv__(self, other): - # long has no __itruediv__ method - mylong = long(self) - mylong /= other - return mylong - - def __floordiv__(self, other): - return newint(super(newint, self).__floordiv__(other)) - - def __rfloordiv__(self, other): - return newint(super(newint, self).__rfloordiv__(other)) - - def __ifloordiv__(self, other): - # long has no __ifloordiv__ method - mylong = long(self) - mylong //= other - return newint(mylong) - - def __mod__(self, other): - value = super(newint, self).__mod__(other) - if value is NotImplemented: - return long(self) % other - return newint(value) - - def __rmod__(self, other): - value = super(newint, self).__rmod__(other) - if value is NotImplemented: - return other % long(self) - return newint(value) - - def __divmod__(self, other): - value = super(newint, self).__divmod__(other) - if value is NotImplemented: - mylong = long(self) - return (mylong // other, mylong % other) - return (newint(value[0]), newint(value[1])) - - def __rdivmod__(self, other): - value = super(newint, self).__rdivmod__(other) - if value is NotImplemented: - mylong = long(self) - return (other // mylong, other % mylong) - return (newint(value[0]), newint(value[1])) - - def __pow__(self, other): - value = super(newint, self).__pow__(other) - if value is NotImplemented: - return long(self) ** other - return newint(value) - - def __rpow__(self, other): - value = super(newint, self).__rpow__(other) - if value is NotImplemented: - return other ** long(self) - return newint(value) - - def __lshift__(self, other): - if not isint(other): - raise TypeError( - "unsupported operand type(s) for <<: '%s' and '%s'" % - (type(self).__name__, type(other).__name__)) - return newint(super(newint, self).__lshift__(other)) - - def __rshift__(self, other): - if not isint(other): - raise TypeError( - "unsupported operand type(s) for >>: '%s' and '%s'" % - (type(self).__name__, type(other).__name__)) - return newint(super(newint, self).__rshift__(other)) - - def __and__(self, other): - if not isint(other): - raise TypeError( - "unsupported operand type(s) for &: '%s' and '%s'" % - (type(self).__name__, type(other).__name__)) - return newint(super(newint, self).__and__(other)) - - def __or__(self, other): - if not isint(other): - raise TypeError( - "unsupported operand type(s) for |: '%s' and '%s'" % - (type(self).__name__, type(other).__name__)) - return newint(super(newint, self).__or__(other)) - - def __xor__(self, other): - if not isint(other): - raise TypeError( - "unsupported operand type(s) for ^: '%s' and '%s'" % - (type(self).__name__, type(other).__name__)) - return newint(super(newint, self).__xor__(other)) - - def __neg__(self): - return newint(super(newint, self).__neg__()) - - def __pos__(self): - return newint(super(newint, self).__pos__()) - - def __abs__(self): - return newint(super(newint, self).__abs__()) - - def __invert__(self): - return newint(super(newint, self).__invert__()) - - def __int__(self): - return self - - def __nonzero__(self): - return self.__bool__() - - def __bool__(self): - """ - So subclasses can override this, Py3-style - """ - return super(newint, self).__nonzero__() - - def __native__(self): - return long(self) - - def to_bytes(self, length, byteorder='big', signed=False): - """ - Return an array of bytes representing an integer. - - The integer is represented using length bytes. An OverflowError is - raised if the integer is not representable with the given number of - bytes. - - The byteorder argument determines the byte order used to represent the - integer. If byteorder is 'big', the most significant byte is at the - beginning of the byte array. If byteorder is 'little', the most - significant byte is at the end of the byte array. To request the native - byte order of the host system, use `sys.byteorder' as the byte order value. - - The signed keyword-only argument determines whether two's complement is - used to represent the integer. If signed is False and a negative integer - is given, an OverflowError is raised. - """ - if length < 0: - raise ValueError("length argument must be non-negative") - if length == 0 and self == 0: - return newbytes() - if signed and self < 0: - bits = length * 8 - num = (2**bits) + self - if num <= 0: - raise OverflowError("int too smal to convert") - else: - if self < 0: - raise OverflowError("can't convert negative int to unsigned") - num = self - if byteorder not in ('little', 'big'): - raise ValueError("byteorder must be either 'little' or 'big'") - h = b'%x' % num - s = newbytes((b'0'*(len(h) % 2) + h).zfill(length*2).decode('hex')) - if signed: - high_set = s[0] & 0x80 - if self > 0 and high_set: - raise OverflowError("int too big to convert") - if self < 0 and not high_set: - raise OverflowError("int too small to convert") - if len(s) > length: - raise OverflowError("int too big to convert") - return s if byteorder == 'big' else s[::-1] - - @classmethod - def from_bytes(cls, mybytes, byteorder='big', signed=False): - """ - Return the integer represented by the given array of bytes. - - The mybytes argument must either support the buffer protocol or be an - iterable object producing bytes. Bytes and bytearray are examples of - built-in objects that support the buffer protocol. - - The byteorder argument determines the byte order used to represent the - integer. If byteorder is 'big', the most significant byte is at the - beginning of the byte array. If byteorder is 'little', the most - significant byte is at the end of the byte array. To request the native - byte order of the host system, use `sys.byteorder' as the byte order value. - - The signed keyword-only argument indicates whether two's complement is - used to represent the integer. - """ - if byteorder not in ('little', 'big'): - raise ValueError("byteorder must be either 'little' or 'big'") - if isinstance(mybytes, unicode): - raise TypeError("cannot convert unicode objects to bytes") - # mybytes can also be passed as a sequence of integers on Py3. - # Test for this: - elif isinstance(mybytes, collections.Iterable): - mybytes = newbytes(mybytes) - b = mybytes if byteorder == 'big' else mybytes[::-1] - if len(b) == 0: - b = b'\x00' - # The encode() method has been disabled by newbytes, but Py2's - # str has it: - num = int(native(b).encode('hex'), 16) - if signed and (b[0] & 0x80): - num = num - (2 ** (len(b)*8)) - return cls(num) - - -# def _twos_comp(val, bits): -# """compute the 2's compliment of int value val""" -# if( (val&(1<<(bits-1))) != 0 ): -# val = val - (1<>> from builtins import list ->>> l1 = list() # instead of {} for an empty list ->>> l1.append('hello') ->>> l2 = l1.copy() - -""" - -import sys -import copy - -from future.utils import with_metaclass -from future.types.newobject import newobject - - -_builtin_list = list -ver = sys.version_info[:2] - - -class BaseNewList(type): - def __instancecheck__(cls, instance): - if cls == newlist: - return isinstance(instance, _builtin_list) - else: - return issubclass(instance.__class__, cls) - - -class newlist(with_metaclass(BaseNewList, _builtin_list)): - """ - A backport of the Python 3 list object to Py2 - """ - def copy(self): - """ - L.copy() -> list -- a shallow copy of L - """ - return copy.copy(self) - - def clear(self): - """L.clear() -> None -- remove all items from L""" - for i in range(len(self)): - self.pop() - - def __new__(cls, *args, **kwargs): - """ - list() -> new empty list - list(iterable) -> new list initialized from iterable's items - """ - - if len(args) == 0: - return super(newlist, cls).__new__(cls) - elif type(args[0]) == newlist: - value = args[0] - else: - value = args[0] - return super(newlist, cls).__new__(cls, value) - - def __add__(self, value): - return newlist(super(newlist, self).__add__(value)) - - def __radd__(self, left): - " left + self " - try: - return newlist(left) + self - except: - return NotImplemented - - def __getitem__(self, y): - """ - x.__getitem__(y) <==> x[y] - - Warning: a bug in Python 2.x prevents indexing via a slice from - returning a newlist object. - """ - if isinstance(y, slice): - return newlist(super(newlist, self).__getitem__(y)) - else: - return super(newlist, self).__getitem__(y) - - def __native__(self): - """ - Hook for the future.utils.native() function - """ - return list(self) - - def __nonzero__(self): - return len(self) > 0 - - -__all__ = ['newlist'] diff --git a/pype/vendor/future/types/newmemoryview.py b/pype/vendor/future/types/newmemoryview.py deleted file mode 100644 index 72c6990a77..0000000000 --- a/pype/vendor/future/types/newmemoryview.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -A pretty lame implementation of a memoryview object for Python 2.6. -""" - -from collections import Iterable -from numbers import Integral -import string - -from future.utils import istext, isbytes, PY3, with_metaclass -from future.types import no, issubset - - -# class BaseNewBytes(type): -# def __instancecheck__(cls, instance): -# return isinstance(instance, _builtin_bytes) - - -class newmemoryview(object): # with_metaclass(BaseNewBytes, _builtin_bytes)): - """ - A pretty lame backport of the Python 2.7 and Python 3.x - memoryviewview object to Py2.6. - """ - def __init__(self, obj): - return obj - - -__all__ = ['newmemoryview'] diff --git a/pype/vendor/future/types/newobject.py b/pype/vendor/future/types/newobject.py deleted file mode 100644 index 1ec09ff5b4..0000000000 --- a/pype/vendor/future/types/newobject.py +++ /dev/null @@ -1,134 +0,0 @@ -""" -An object subclass for Python 2 that gives new-style classes written in the -style of Python 3 (with ``__next__`` and unicode-returning ``__str__`` methods) -the appropriate Python 2-style ``next`` and ``__unicode__`` methods for compatible. - -Example use:: - - from builtins import object - - my_unicode_str = u'Unicode string: \u5b54\u5b50' - - class A(object): - def __str__(self): - return my_unicode_str - - a = A() - print(str(a)) - - # On Python 2, these relations hold: - assert unicode(a) == my_unicode_string - assert str(a) == my_unicode_string.encode('utf-8') - - -Another example:: - - from builtins import object - - class Upper(object): - def __init__(self, iterable): - self._iter = iter(iterable) - def __next__(self): # note the Py3 interface - return next(self._iter).upper() - def __iter__(self): - return self - - assert list(Upper('hello')) == list('HELLO') - -""" - -import sys - -from future.utils import with_metaclass - - -_builtin_object = object -ver = sys.version_info[:2] - - -# We no longer define a metaclass for newobject because this breaks multiple -# inheritance and custom metaclass use with this exception: - -# TypeError: Error when calling the metaclass bases -# metaclass conflict: the metaclass of a derived class must be a -# (non-strict) subclass of the metaclasses of all its bases - -# See issues #91 and #96. - - -class newobject(object): - """ - A magical object class that provides Python 2 compatibility methods:: - next - __unicode__ - __nonzero__ - - Subclasses of this class can merely define the Python 3 methods (__next__, - __str__, and __bool__). - """ - def next(self): - if hasattr(self, '__next__'): - return type(self).__next__(self) - raise TypeError('newobject is not an iterator') - - def __unicode__(self): - # All subclasses of the builtin object should have __str__ defined. - # Note that old-style classes do not have __str__ defined. - if hasattr(self, '__str__'): - s = type(self).__str__(self) - else: - s = str(self) - if isinstance(s, unicode): - return s - else: - return s.decode('utf-8') - - def __nonzero__(self): - if hasattr(self, '__bool__'): - return type(self).__bool__(self) - if hasattr(self, '__len__'): - return type(self).__len__(self) - # object has no __nonzero__ method - return True - - # Are these ever needed? - # def __div__(self): - # return self.__truediv__() - - # def __idiv__(self, other): - # return self.__itruediv__(other) - - def __long__(self): - if not hasattr(self, '__int__'): - return NotImplemented - return self.__int__() # not type(self).__int__(self) - - # def __new__(cls, *args, **kwargs): - # """ - # dict() -> new empty dictionary - # dict(mapping) -> new dictionary initialized from a mapping object's - # (key, value) pairs - # dict(iterable) -> new dictionary initialized as if via: - # d = {} - # for k, v in iterable: - # d[k] = v - # dict(**kwargs) -> new dictionary initialized with the name=value pairs - # in the keyword argument list. For example: dict(one=1, two=2) - # """ - - # if len(args) == 0: - # return super(newdict, cls).__new__(cls) - # elif type(args[0]) == newdict: - # return args[0] - # else: - # value = args[0] - # return super(newdict, cls).__new__(cls, value) - - def __native__(self): - """ - Hook for the future.utils.native() function - """ - return object(self) - - -__all__ = ['newobject'] diff --git a/pype/vendor/future/types/newopen.py b/pype/vendor/future/types/newopen.py deleted file mode 100644 index 8da064274f..0000000000 --- a/pype/vendor/future/types/newopen.py +++ /dev/null @@ -1,33 +0,0 @@ -""" -A substitute for the Python 3 open() function. - -Note that io.open() is more complete but maybe slower. Even so, the -completeness may be a better default. TODO: compare these -""" - -_builtin_open = open - -class newopen(object): - """Wrapper providing key part of Python 3 open() interface. - - From IPython's py3compat.py module. License: BSD. - """ - def __init__(self, fname, mode="r", encoding="utf-8"): - self.f = _builtin_open(fname, mode) - self.enc = encoding - - def write(self, s): - return self.f.write(s.encode(self.enc)) - - def read(self, size=-1): - return self.f.read(size).decode(self.enc) - - def close(self): - return self.f.close() - - def __enter__(self): - return self - - def __exit__(self, etype, value, traceback): - self.f.close() - diff --git a/pype/vendor/future/types/newrange.py b/pype/vendor/future/types/newrange.py deleted file mode 100644 index c190ba9e3d..0000000000 --- a/pype/vendor/future/types/newrange.py +++ /dev/null @@ -1,159 +0,0 @@ -""" -Nearly identical to xrange.py, by Dan Crosta, from - - https://github.com/dcrosta/xrange.git - -This is included here in the ``future`` package rather than pointed to as -a dependency because there is no package for ``xrange`` on PyPI. It is -also tweaked to appear like a regular Python 3 ``range`` object rather -than a Python 2 xrange. - -From Dan Crosta's README: - - "A pure-Python implementation of Python 2.7's xrange built-in, with - some features backported from the Python 3.x range built-in (which - replaced xrange) in that version." - - Read more at - https://late.am/post/2012/06/18/what-the-heck-is-an-xrange -""" -from __future__ import absolute_import - -from collections import Sequence, Iterator -from itertools import islice - -from future.backports.misc import count # with step parameter on Py2.6 -# For backward compatibility with python-future versions < 0.14.4: -_count = count - - -class newrange(Sequence): - """ - Pure-Python backport of Python 3's range object. See `the CPython - documentation for details: - `_ - """ - - def __init__(self, *args): - if len(args) == 1: - start, stop, step = 0, args[0], 1 - elif len(args) == 2: - start, stop, step = args[0], args[1], 1 - elif len(args) == 3: - start, stop, step = args - else: - raise TypeError('range() requires 1-3 int arguments') - - try: - start, stop, step = int(start), int(stop), int(step) - except ValueError: - raise TypeError('an integer is required') - - if step == 0: - raise ValueError('range() arg 3 must not be zero') - elif step < 0: - stop = min(stop, start) - else: - stop = max(stop, start) - - self._start = start - self._stop = stop - self._step = step - self._len = (stop - start) // step + bool((stop - start) % step) - - @property - def start(self): - return self._start - - @property - def stop(self): - return self._stop - - @property - def step(self): - return self._step - - def __repr__(self): - if self._step == 1: - return 'range(%d, %d)' % (self._start, self._stop) - return 'range(%d, %d, %d)' % (self._start, self._stop, self._step) - - def __eq__(self, other): - return (isinstance(other, newrange) and - (self._len == 0 == other._len or - (self._start, self._step, self._len) == - (other._start, other._step, self._len))) - - def __len__(self): - return self._len - - def index(self, value): - """Return the 0-based position of integer `value` in - the sequence this range represents.""" - diff = value - self._start - quotient, remainder = divmod(diff, self._step) - if remainder == 0 and 0 <= quotient < self._len: - return abs(quotient) - raise ValueError('%r is not in range' % value) - - def count(self, value): - """Return the number of ocurrences of integer `value` - in the sequence this range represents.""" - # a value can occur exactly zero or one times - return int(value in self) - - def __contains__(self, value): - """Return ``True`` if the integer `value` occurs in - the sequence this range represents.""" - try: - self.index(value) - return True - except ValueError: - return False - - def __reversed__(self): - return iter(self[::-1]) - - def __getitem__(self, index): - """Return the element at position ``index`` in the sequence - this range represents, or raise :class:`IndexError` if the - position is out of range.""" - if isinstance(index, slice): - return self.__getitem_slice(index) - if index < 0: - # negative indexes access from the end - index = self._len + index - if index < 0 or index >= self._len: - raise IndexError('range object index out of range') - return self._start + index * self._step - - def __getitem_slice(self, slce): - """Return a range which represents the requested slce - of the sequence represented by this range. - """ - scaled_indices = (self._step * n for n in slce.indices(self._len)) - start_offset, stop_offset, new_step = scaled_indices - return newrange(self._start + start_offset, - self._start + stop_offset, - new_step) - - def __iter__(self): - """Return an iterator which enumerates the elements of the - sequence this range represents.""" - return range_iterator(self) - - -class range_iterator(Iterator): - """An iterator for a :class:`range`. - """ - def __init__(self, range_): - self._stepper = islice(count(range_.start, range_.step), len(range_)) - - def __iter__(self): - return self - - def next(self): - return next(self._stepper) - - -__all__ = ['newrange'] diff --git a/pype/vendor/future/types/newstr.py b/pype/vendor/future/types/newstr.py deleted file mode 100644 index fd8615aff1..0000000000 --- a/pype/vendor/future/types/newstr.py +++ /dev/null @@ -1,412 +0,0 @@ -""" -This module redefines ``str`` on Python 2.x to be a subclass of the Py2 -``unicode`` type that behaves like the Python 3.x ``str``. - -The main differences between ``newstr`` and Python 2.x's ``unicode`` type are -the stricter type-checking and absence of a `u''` prefix in the representation. - -It is designed to be used together with the ``unicode_literals`` import -as follows: - - >>> from __future__ import unicode_literals - >>> from builtins import str, isinstance - -On Python 3.x and normally on Python 2.x, these expressions hold - - >>> str('blah') is 'blah' - True - >>> isinstance('blah', str) - True - -However, on Python 2.x, with this import: - - >>> from __future__ import unicode_literals - -the same expressions are False: - - >>> str('blah') is 'blah' - False - >>> isinstance('blah', str) - False - -This module is designed to be imported together with ``unicode_literals`` on -Python 2 to bring the meaning of ``str`` back into alignment with unprefixed -string literals (i.e. ``unicode`` subclasses). - -Note that ``str()`` (and ``print()``) would then normally call the -``__unicode__`` method on objects in Python 2. To define string -representations of your objects portably across Py3 and Py2, use the -:func:`python_2_unicode_compatible` decorator in :mod:`future.utils`. - -""" - -from collections import Iterable -from numbers import Number - -from future.utils import PY3, istext, with_metaclass, isnewbytes -from future.types import no, issubset -from future.types.newobject import newobject - - -if PY3: - # We'll probably never use newstr on Py3 anyway... - unicode = str - - -class BaseNewStr(type): - def __instancecheck__(cls, instance): - if cls == newstr: - return isinstance(instance, unicode) - else: - return issubclass(instance.__class__, cls) - - -class newstr(with_metaclass(BaseNewStr, unicode)): - """ - A backport of the Python 3 str object to Py2 - """ - no_convert_msg = "Can't convert '{0}' object to str implicitly" - - def __new__(cls, *args, **kwargs): - """ - From the Py3 str docstring: - - str(object='') -> str - str(bytes_or_buffer[, encoding[, errors]]) -> str - - Create a new string object from the given object. If encoding or - errors is specified, then the object must expose a data buffer - that will be decoded using the given encoding and error handler. - Otherwise, returns the result of object.__str__() (if defined) - or repr(object). - encoding defaults to sys.getdefaultencoding(). - errors defaults to 'strict'. - - """ - if len(args) == 0: - return super(newstr, cls).__new__(cls) - # Special case: If someone requests str(str(u'abc')), return the same - # object (same id) for consistency with Py3.3. This is not true for - # other objects like list or dict. - elif type(args[0]) == newstr and cls == newstr: - return args[0] - elif isinstance(args[0], unicode): - value = args[0] - elif isinstance(args[0], bytes): # i.e. Py2 bytes or newbytes - if 'encoding' in kwargs or len(args) > 1: - value = args[0].decode(*args[1:], **kwargs) - else: - value = args[0].__str__() - else: - value = args[0] - return super(newstr, cls).__new__(cls, value) - - def __repr__(self): - """ - Without the u prefix - """ - value = super(newstr, self).__repr__() - # assert value[0] == u'u' - return value[1:] - - def __getitem__(self, y): - """ - Warning: Python <= 2.7.6 has a bug that causes this method never to be called - when y is a slice object. Therefore the type of newstr()[:2] is wrong - (unicode instead of newstr). - """ - return newstr(super(newstr, self).__getitem__(y)) - - def __contains__(self, key): - errmsg = "'in ' requires string as left operand, not {0}" - # Don't use isinstance() here because we only want to catch - # newstr, not Python 2 unicode: - if type(key) == newstr: - newkey = key - elif isinstance(key, unicode) or isinstance(key, bytes) and not isnewbytes(key): - newkey = newstr(key) - else: - raise TypeError(errmsg.format(type(key))) - return issubset(list(newkey), list(self)) - - @no('newbytes') - def __add__(self, other): - return newstr(super(newstr, self).__add__(other)) - - @no('newbytes') - def __radd__(self, left): - " left + self " - try: - return newstr(left) + self - except: - return NotImplemented - - def __mul__(self, other): - return newstr(super(newstr, self).__mul__(other)) - - def __rmul__(self, other): - return newstr(super(newstr, self).__rmul__(other)) - - def join(self, iterable): - errmsg = 'sequence item {0}: expected unicode string, found bytes' - for i, item in enumerate(iterable): - # Here we use type() rather than isinstance() because - # __instancecheck__ is being overridden. E.g. - # isinstance(b'abc', newbytes) is True on Py2. - if isnewbytes(item): - raise TypeError(errmsg.format(i)) - # Support use as a staticmethod: str.join('-', ['a', 'b']) - if type(self) == newstr: - return newstr(super(newstr, self).join(iterable)) - else: - return newstr(super(newstr, newstr(self)).join(iterable)) - - @no('newbytes') - def find(self, sub, *args): - return super(newstr, self).find(sub, *args) - - @no('newbytes') - def rfind(self, sub, *args): - return super(newstr, self).rfind(sub, *args) - - @no('newbytes', (1, 2)) - def replace(self, old, new, *args): - return newstr(super(newstr, self).replace(old, new, *args)) - - def decode(self, *args): - raise AttributeError("decode method has been disabled in newstr") - - def encode(self, encoding='utf-8', errors='strict'): - """ - Returns bytes - - Encode S using the codec registered for encoding. Default encoding - is 'utf-8'. errors may be given to set a different error - handling scheme. Default is 'strict' meaning that encoding errors raise - a UnicodeEncodeError. Other possible values are 'ignore', 'replace' and - 'xmlcharrefreplace' as well as any other name registered with - codecs.register_error that can handle UnicodeEncodeErrors. - """ - from future.types.newbytes import newbytes - # Py2 unicode.encode() takes encoding and errors as optional parameter, - # not keyword arguments as in Python 3 str. - - # For the surrogateescape error handling mechanism, the - # codecs.register_error() function seems to be inadequate for an - # implementation of it when encoding. (Decoding seems fine, however.) - # For example, in the case of - # u'\udcc3'.encode('ascii', 'surrogateescape_handler') - # after registering the ``surrogateescape_handler`` function in - # future.utils.surrogateescape, both Python 2.x and 3.x raise an - # exception anyway after the function is called because the unicode - # string it has to return isn't encodable strictly as ASCII. - - if errors == 'surrogateescape': - if encoding == 'utf-16': - # Known to fail here. See test_encoding_works_normally() - raise NotImplementedError('FIXME: surrogateescape handling is ' - 'not yet implemented properly') - # Encode char by char, building up list of byte-strings - mybytes = [] - for c in self: - code = ord(c) - if 0xD800 <= code <= 0xDCFF: - mybytes.append(newbytes([code - 0xDC00])) - else: - mybytes.append(c.encode(encoding=encoding)) - return newbytes(b'').join(mybytes) - return newbytes(super(newstr, self).encode(encoding, errors)) - - @no('newbytes', 1) - def startswith(self, prefix, *args): - if isinstance(prefix, Iterable): - for thing in prefix: - if isnewbytes(thing): - raise TypeError(self.no_convert_msg.format(type(thing))) - return super(newstr, self).startswith(prefix, *args) - - @no('newbytes', 1) - def endswith(self, prefix, *args): - # Note we need the decorator above as well as the isnewbytes() - # check because prefix can be either a bytes object or e.g. a - # tuple of possible prefixes. (If it's a bytes object, each item - # in it is an int.) - if isinstance(prefix, Iterable): - for thing in prefix: - if isnewbytes(thing): - raise TypeError(self.no_convert_msg.format(type(thing))) - return super(newstr, self).endswith(prefix, *args) - - @no('newbytes', 1) - def split(self, sep=None, maxsplit=-1): - # Py2 unicode.split() takes maxsplit as an optional parameter, - # not as a keyword argument as in Python 3 str. - parts = super(newstr, self).split(sep, maxsplit) - return [newstr(part) for part in parts] - - @no('newbytes', 1) - def rsplit(self, sep=None, maxsplit=-1): - # Py2 unicode.rsplit() takes maxsplit as an optional parameter, - # not as a keyword argument as in Python 3 str. - parts = super(newstr, self).rsplit(sep, maxsplit) - return [newstr(part) for part in parts] - - @no('newbytes', 1) - def partition(self, sep): - parts = super(newstr, self).partition(sep) - return tuple(newstr(part) for part in parts) - - @no('newbytes', 1) - def rpartition(self, sep): - parts = super(newstr, self).rpartition(sep) - return tuple(newstr(part) for part in parts) - - @no('newbytes', 1) - def index(self, sub, *args): - """ - Like newstr.find() but raise ValueError when the substring is not - found. - """ - pos = self.find(sub, *args) - if pos == -1: - raise ValueError('substring not found') - return pos - - def splitlines(self, keepends=False): - """ - S.splitlines(keepends=False) -> list of strings - - Return a list of the lines in S, breaking at line boundaries. - Line breaks are not included in the resulting list unless keepends - is given and true. - """ - # Py2 unicode.splitlines() takes keepends as an optional parameter, - # not as a keyword argument as in Python 3 str. - parts = super(newstr, self).splitlines(keepends) - return [newstr(part) for part in parts] - - def __eq__(self, other): - if (isinstance(other, unicode) or - isinstance(other, bytes) and not isnewbytes(other)): - return super(newstr, self).__eq__(other) - else: - return False - - def __ne__(self, other): - if (isinstance(other, unicode) or - isinstance(other, bytes) and not isnewbytes(other)): - return super(newstr, self).__ne__(other) - else: - return True - - unorderable_err = 'unorderable types: str() and {0}' - - def __lt__(self, other): - if not istext(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newstr, self).__lt__(other) - - def __le__(self, other): - if not istext(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newstr, self).__le__(other) - - def __gt__(self, other): - if not istext(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newstr, self).__gt__(other) - - def __ge__(self, other): - if not istext(other): - raise TypeError(self.unorderable_err.format(type(other))) - return super(newstr, self).__ge__(other) - - def __getattribute__(self, name): - """ - A trick to cause the ``hasattr`` builtin-fn to return False for - the 'decode' method on Py2. - """ - if name in ['decode', u'decode']: - raise AttributeError("decode method has been disabled in newstr") - return super(newstr, self).__getattribute__(name) - - def __native__(self): - """ - A hook for the future.utils.native() function. - """ - return unicode(self) - - @staticmethod - def maketrans(x, y=None, z=None): - """ - Return a translation table usable for str.translate(). - - If there is only one argument, it must be a dictionary mapping Unicode - ordinals (integers) or characters to Unicode ordinals, strings or None. - Character keys will be then converted to ordinals. - If there are two arguments, they must be strings of equal length, and - in the resulting dictionary, each character in x will be mapped to the - character at the same position in y. If there is a third argument, it - must be a string, whose characters will be mapped to None in the result. - """ - - if y is None: - assert z is None - if not isinstance(x, dict): - raise TypeError('if you give only one argument to maketrans it must be a dict') - result = {} - for (key, value) in x.items(): - if len(key) > 1: - raise ValueError('keys in translate table must be strings or integers') - result[ord(key)] = value - else: - if not isinstance(x, unicode) and isinstance(y, unicode): - raise TypeError('x and y must be unicode strings') - if not len(x) == len(y): - raise ValueError('the first two maketrans arguments must have equal length') - result = {} - for (xi, yi) in zip(x, y): - if len(xi) > 1: - raise ValueError('keys in translate table must be strings or integers') - result[ord(xi)] = ord(yi) - - if z is not None: - for char in z: - result[ord(char)] = None - return result - - def translate(self, table): - """ - S.translate(table) -> str - - Return a copy of the string S, where all characters have been mapped - through the given translation table, which must be a mapping of - Unicode ordinals to Unicode ordinals, strings, or None. - Unmapped characters are left untouched. Characters mapped to None - are deleted. - """ - l = [] - for c in self: - if ord(c) in table: - val = table[ord(c)] - if val is None: - continue - elif isinstance(val, unicode): - l.append(val) - else: - l.append(chr(val)) - else: - l.append(c) - return ''.join(l) - - def isprintable(self): - raise NotImplementedError('fixme') - - def isidentifier(self): - raise NotImplementedError('fixme') - - def format_map(self): - raise NotImplementedError('fixme') - - -__all__ = ['newstr'] diff --git a/pype/vendor/future/utils/__init__.py b/pype/vendor/future/utils/__init__.py deleted file mode 100644 index cb4ade35df..0000000000 --- a/pype/vendor/future/utils/__init__.py +++ /dev/null @@ -1,739 +0,0 @@ -""" -A selection of cross-compatible functions for Python 2 and 3. - -This module exports useful functions for 2/3 compatible code: - - * bind_method: binds functions to classes - * ``native_str_to_bytes`` and ``bytes_to_native_str`` - * ``native_str``: always equal to the native platform string object (because - this may be shadowed by imports from future.builtins) - * lists: lrange(), lmap(), lzip(), lfilter() - * iterable method compatibility: - - iteritems, iterkeys, itervalues - - viewitems, viewkeys, viewvalues - - These use the original method if available, otherwise they use items, - keys, values. - - * types: - - * text_type: unicode in Python 2, str in Python 3 - * binary_type: str in Python 2, bythes in Python 3 - * string_types: basestring in Python 2, str in Python 3 - - * bchr(c): - Take an integer and make a 1-character byte string - * bord(c) - Take the result of indexing on a byte string and make an integer - * tobytes(s) - Take a text string, a byte string, or a sequence of characters taken - from a byte string, and make a byte string. - - * raise_from() - * raise_with_traceback() - -This module also defines these decorators: - - * ``python_2_unicode_compatible`` - * ``with_metaclass`` - * ``implements_iterator`` - -Some of the functions in this module come from the following sources: - - * Jinja2 (BSD licensed: see - https://github.com/mitsuhiko/jinja2/blob/master/LICENSE) - * Pandas compatibility module pandas.compat - * six.py by Benjamin Peterson - * Django -""" - -import types -import sys -import numbers -import functools -import copy -import inspect - - -PY3 = sys.version_info[0] == 3 -PY2 = sys.version_info[0] == 2 -PY26 = sys.version_info[0:2] == (2, 6) -PY27 = sys.version_info[0:2] == (2, 7) -PYPY = hasattr(sys, 'pypy_translation_info') - - -def python_2_unicode_compatible(cls): - """ - A decorator that defines __unicode__ and __str__ methods under Python - 2. Under Python 3, this decorator is a no-op. - - To support Python 2 and 3 with a single code base, define a __str__ - method returning unicode text and apply this decorator to the class, like - this:: - - >>> from future.utils import python_2_unicode_compatible - - >>> @python_2_unicode_compatible - ... class MyClass(object): - ... def __str__(self): - ... return u'Unicode string: \u5b54\u5b50' - - >>> a = MyClass() - - Then, after this import: - - >>> from future.builtins import str - - the following is ``True`` on both Python 3 and 2:: - - >>> str(a) == a.encode('utf-8').decode('utf-8') - True - - and, on a Unicode-enabled terminal with the right fonts, these both print the - Chinese characters for Confucius:: - - >>> print(a) - >>> print(str(a)) - - The implementation comes from django.utils.encoding. - """ - if not PY3: - cls.__unicode__ = cls.__str__ - cls.__str__ = lambda self: self.__unicode__().encode('utf-8') - return cls - - -def with_metaclass(meta, *bases): - """ - Function from jinja2/_compat.py. License: BSD. - - Use it like this:: - - class BaseForm(object): - pass - - class FormType(type): - pass - - class Form(with_metaclass(FormType, BaseForm)): - pass - - This requires a bit of explanation: the basic idea is to make a - dummy metaclass for one level of class instantiation that replaces - itself with the actual metaclass. Because of internal type checks - we also need to make sure that we downgrade the custom metaclass - for one level to something closer to type (that's why __call__ and - __init__ comes back from type etc.). - - This has the advantage over six.with_metaclass of not introducing - dummy classes into the final MRO. - """ - class metaclass(meta): - __call__ = type.__call__ - __init__ = type.__init__ - def __new__(cls, name, this_bases, d): - if this_bases is None: - return type.__new__(cls, name, (), d) - return meta(name, bases, d) - return metaclass('temporary_class', None, {}) - - -# Definitions from pandas.compat and six.py follow: -if PY3: - def bchr(s): - return bytes([s]) - def bstr(s): - if isinstance(s, str): - return bytes(s, 'latin-1') - else: - return bytes(s) - def bord(s): - return s - - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - -else: - # Python 2 - def bchr(s): - return chr(s) - def bstr(s): - return str(s) - def bord(s): - return ord(s) - - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - -### - -if PY3: - def tobytes(s): - if isinstance(s, bytes): - return s - else: - if isinstance(s, str): - return s.encode('latin-1') - else: - return bytes(s) -else: - # Python 2 - def tobytes(s): - if isinstance(s, unicode): - return s.encode('latin-1') - else: - return ''.join(s) - -tobytes.__doc__ = """ - Encodes to latin-1 (where the first 256 chars are the same as - ASCII.) - """ - -if PY3: - def native_str_to_bytes(s, encoding='utf-8'): - return s.encode(encoding) - - def bytes_to_native_str(b, encoding='utf-8'): - return b.decode(encoding) - - def text_to_native_str(t, encoding=None): - return t -else: - # Python 2 - def native_str_to_bytes(s, encoding=None): - from future.types import newbytes # to avoid a circular import - return newbytes(s) - - def bytes_to_native_str(b, encoding=None): - return native(b) - - def text_to_native_str(t, encoding='ascii'): - """ - Use this to create a Py2 native string when "from __future__ import - unicode_literals" is in effect. - """ - return unicode(t).encode(encoding) - -native_str_to_bytes.__doc__ = """ - On Py3, returns an encoded string. - On Py2, returns a newbytes type, ignoring the ``encoding`` argument. - """ - -if PY3: - # list-producing versions of the major Python iterating functions - def lrange(*args, **kwargs): - return list(range(*args, **kwargs)) - - def lzip(*args, **kwargs): - return list(zip(*args, **kwargs)) - - def lmap(*args, **kwargs): - return list(map(*args, **kwargs)) - - def lfilter(*args, **kwargs): - return list(filter(*args, **kwargs)) -else: - import __builtin__ - # Python 2-builtin ranges produce lists - lrange = __builtin__.range - lzip = __builtin__.zip - lmap = __builtin__.map - lfilter = __builtin__.filter - - -def isidentifier(s, dotted=False): - ''' - A function equivalent to the str.isidentifier method on Py3 - ''' - if dotted: - return all(isidentifier(a) for a in s.split('.')) - if PY3: - return s.isidentifier() - else: - import re - _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") - return bool(_name_re.match(s)) - - -def viewitems(obj, **kwargs): - """ - Function for iterating over dictionary items with the same set-like - behaviour on Py2.7 as on Py3. - - Passes kwargs to method.""" - func = getattr(obj, "viewitems", None) - if not func: - func = obj.items - return func(**kwargs) - - -def viewkeys(obj, **kwargs): - """ - Function for iterating over dictionary keys with the same set-like - behaviour on Py2.7 as on Py3. - - Passes kwargs to method.""" - func = getattr(obj, "viewkeys", None) - if not func: - func = obj.keys - return func(**kwargs) - - -def viewvalues(obj, **kwargs): - """ - Function for iterating over dictionary values with the same set-like - behaviour on Py2.7 as on Py3. - - Passes kwargs to method.""" - func = getattr(obj, "viewvalues", None) - if not func: - func = obj.values - return func(**kwargs) - - -def iteritems(obj, **kwargs): - """Use this only if compatibility with Python versions before 2.7 is - required. Otherwise, prefer viewitems(). - """ - func = getattr(obj, "iteritems", None) - if not func: - func = obj.items - return func(**kwargs) - - -def iterkeys(obj, **kwargs): - """Use this only if compatibility with Python versions before 2.7 is - required. Otherwise, prefer viewkeys(). - """ - func = getattr(obj, "iterkeys", None) - if not func: - func = obj.keys - return func(**kwargs) - - -def itervalues(obj, **kwargs): - """Use this only if compatibility with Python versions before 2.7 is - required. Otherwise, prefer viewvalues(). - """ - func = getattr(obj, "itervalues", None) - if not func: - func = obj.values - return func(**kwargs) - - -def bind_method(cls, name, func): - """Bind a method to class, python 2 and python 3 compatible. - - Parameters - ---------- - - cls : type - class to receive bound method - name : basestring - name of method on class instance - func : function - function to be bound as method - - Returns - ------- - None - """ - # only python 2 has an issue with bound/unbound methods - if not PY3: - setattr(cls, name, types.MethodType(func, None, cls)) - else: - setattr(cls, name, func) - - -def getexception(): - return sys.exc_info()[1] - - -def _get_caller_globals_and_locals(): - """ - Returns the globals and locals of the calling frame. - - Is there an alternative to frame hacking here? - """ - caller_frame = inspect.stack()[2] - myglobals = caller_frame[0].f_globals - mylocals = caller_frame[0].f_locals - return myglobals, mylocals - - -def _repr_strip(mystring): - """ - Returns the string without any initial or final quotes. - """ - r = repr(mystring) - if r.startswith("'") and r.endswith("'"): - return r[1:-1] - else: - return r - - -if PY3: - def raise_from(exc, cause): - """ - Equivalent to: - - raise EXCEPTION from CAUSE - - on Python 3. (See PEP 3134). - """ - myglobals, mylocals = _get_caller_globals_and_locals() - - # We pass the exception and cause along with other globals - # when we exec(): - myglobals = myglobals.copy() - myglobals['__python_future_raise_from_exc'] = exc - myglobals['__python_future_raise_from_cause'] = cause - execstr = "raise __python_future_raise_from_exc from __python_future_raise_from_cause" - exec(execstr, myglobals, mylocals) - - def raise_(tp, value=None, tb=None): - """ - A function that matches the Python 2.x ``raise`` statement. This - allows re-raising exceptions with the cls value and traceback on - Python 2 and 3. - """ - if value is not None and isinstance(tp, Exception): - raise TypeError("instance exception may not have a separate value") - if value is not None: - exc = tp(value) - else: - exc = tp - if exc.__traceback__ is not tb: - raise exc.with_traceback(tb) - raise exc - - def raise_with_traceback(exc, traceback=Ellipsis): - if traceback == Ellipsis: - _, _, traceback = sys.exc_info() - raise exc.with_traceback(traceback) - -else: - def raise_from(exc, cause): - """ - Equivalent to: - - raise EXCEPTION from CAUSE - - on Python 3. (See PEP 3134). - """ - # Is either arg an exception class (e.g. IndexError) rather than - # instance (e.g. IndexError('my message here')? If so, pass the - # name of the class undisturbed through to "raise ... from ...". - if isinstance(exc, type) and issubclass(exc, Exception): - e = exc() - # exc = exc.__name__ - # execstr = "e = " + _repr_strip(exc) + "()" - # myglobals, mylocals = _get_caller_globals_and_locals() - # exec(execstr, myglobals, mylocals) - else: - e = exc - e.__suppress_context__ = False - if isinstance(cause, type) and issubclass(cause, Exception): - e.__cause__ = cause() - e.__suppress_context__ = True - elif cause is None: - e.__cause__ = None - e.__suppress_context__ = True - elif isinstance(cause, BaseException): - e.__cause__ = cause - e.__suppress_context__ = True - else: - raise TypeError("exception causes must derive from BaseException") - e.__context__ = sys.exc_info()[1] - raise e - - exec(''' -def raise_(tp, value=None, tb=None): - raise tp, value, tb - -def raise_with_traceback(exc, traceback=Ellipsis): - if traceback == Ellipsis: - _, _, traceback = sys.exc_info() - raise exc, None, traceback -'''.strip()) - - -raise_with_traceback.__doc__ = ( -"""Raise exception with existing traceback. -If traceback is not passed, uses sys.exc_info() to get traceback.""" -) - - -# Deprecated alias for backward compatibility with ``future`` versions < 0.11: -reraise = raise_ - - -def implements_iterator(cls): - ''' - From jinja2/_compat.py. License: BSD. - - Use as a decorator like this:: - - @implements_iterator - class UppercasingIterator(object): - def __init__(self, iterable): - self._iter = iter(iterable) - def __iter__(self): - return self - def __next__(self): - return next(self._iter).upper() - - ''' - if PY3: - return cls - else: - cls.next = cls.__next__ - del cls.__next__ - return cls - -if PY3: - get_next = lambda x: x.next -else: - get_next = lambda x: x.__next__ - - -def encode_filename(filename): - if PY3: - return filename - else: - if isinstance(filename, unicode): - return filename.encode('utf-8') - return filename - - -def is_new_style(cls): - """ - Python 2.7 has both new-style and old-style classes. Old-style classes can - be pesky in some circumstances, such as when using inheritance. Use this - function to test for whether a class is new-style. (Python 3 only has - new-style classes.) - """ - return hasattr(cls, '__class__') and ('__dict__' in dir(cls) - or hasattr(cls, '__slots__')) - -# The native platform string and bytes types. Useful because ``str`` and -# ``bytes`` are redefined on Py2 by ``from future.builtins import *``. -native_str = str -native_bytes = bytes - - -def istext(obj): - """ - Deprecated. Use:: - >>> isinstance(obj, str) - after this import: - >>> from future.builtins import str - """ - return isinstance(obj, type(u'')) - - -def isbytes(obj): - """ - Deprecated. Use:: - >>> isinstance(obj, bytes) - after this import: - >>> from future.builtins import bytes - """ - return isinstance(obj, type(b'')) - - -def isnewbytes(obj): - """ - Equivalent to the result of ``isinstance(obj, newbytes)`` were - ``__instancecheck__`` not overridden on the newbytes subclass. In - other words, it is REALLY a newbytes instance, not a Py2 native str - object? - """ - # TODO: generalize this so that it works with subclasses of newbytes - # Import is here to avoid circular imports: - from future.types.newbytes import newbytes - return type(obj) == newbytes - - -def isint(obj): - """ - Deprecated. Tests whether an object is a Py3 ``int`` or either a Py2 ``int`` or - ``long``. - - Instead of using this function, you can use: - - >>> from future.builtins import int - >>> isinstance(obj, int) - - The following idiom is equivalent: - - >>> from numbers import Integral - >>> isinstance(obj, Integral) - """ - - return isinstance(obj, numbers.Integral) - - -def native(obj): - """ - On Py3, this is a no-op: native(obj) -> obj - - On Py2, returns the corresponding native Py2 types that are - superclasses for backported objects from Py3: - - >>> from builtins import str, bytes, int - - >>> native(str(u'ABC')) - u'ABC' - >>> type(native(str(u'ABC'))) - unicode - - >>> native(bytes(b'ABC')) - b'ABC' - >>> type(native(bytes(b'ABC'))) - bytes - - >>> native(int(10**20)) - 100000000000000000000L - >>> type(native(int(10**20))) - long - - Existing native types on Py2 will be returned unchanged: - - >>> type(native(u'ABC')) - unicode - """ - if hasattr(obj, '__native__'): - return obj.__native__() - else: - return obj - - -# Implementation of exec_ is from ``six``: -if PY3: - import builtins - exec_ = getattr(builtins, "exec") -else: - def exec_(code, globs=None, locs=None): - """Execute code in a namespace.""" - if globs is None: - frame = sys._getframe(1) - globs = frame.f_globals - if locs is None: - locs = frame.f_locals - del frame - elif locs is None: - locs = globs - exec("""exec code in globs, locs""") - - -# Defined here for backward compatibility: -def old_div(a, b): - """ - DEPRECATED: import ``old_div`` from ``past.utils`` instead. - - Equivalent to ``a / b`` on Python 2 without ``from __future__ import - division``. - - TODO: generalize this to other objects (like arrays etc.) - """ - if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral): - return a // b - else: - return a / b - - -def as_native_str(encoding='utf-8'): - ''' - A decorator to turn a function or method call that returns text, i.e. - unicode, into one that returns a native platform str. - - Use it as a decorator like this:: - - from __future__ import unicode_literals - - class MyClass(object): - @as_native_str(encoding='ascii') - def __repr__(self): - return next(self._iter).upper() - ''' - if PY3: - return lambda f: f - else: - def encoder(f): - @functools.wraps(f) - def wrapper(*args, **kwargs): - return f(*args, **kwargs).encode(encoding=encoding) - return wrapper - return encoder - -# listvalues and listitems definitions from Nick Coghlan's (withdrawn) -# PEP 496: -try: - dict.iteritems -except AttributeError: - # Python 3 - def listvalues(d): - return list(d.values()) - def listitems(d): - return list(d.items()) -else: - # Python 2 - def listvalues(d): - return d.values() - def listitems(d): - return d.items() - -if PY3: - def ensure_new_type(obj): - return obj -else: - def ensure_new_type(obj): - from future.types.newbytes import newbytes - from future.types.newstr import newstr - from future.types.newint import newint - from future.types.newdict import newdict - - native_type = type(native(obj)) - - # Upcast only if the type is already a native (non-future) type - if issubclass(native_type, type(obj)): - # Upcast - if native_type == str: # i.e. Py2 8-bit str - return newbytes(obj) - elif native_type == unicode: - return newstr(obj) - elif native_type == int: - return newint(obj) - elif native_type == long: - return newint(obj) - elif native_type == dict: - return newdict(obj) - else: - return obj - else: - # Already a new type - assert type(obj) in [newbytes, newstr] - return obj - - -__all__ = ['PY2', 'PY26', 'PY3', 'PYPY', - 'as_native_str', 'bind_method', 'bord', 'bstr', - 'bytes_to_native_str', 'encode_filename', 'ensure_new_type', - 'exec_', 'get_next', 'getexception', 'implements_iterator', - 'is_new_style', 'isbytes', 'isidentifier', 'isint', - 'isnewbytes', 'istext', 'iteritems', 'iterkeys', 'itervalues', - 'lfilter', 'listitems', 'listvalues', 'lmap', 'lrange', - 'lzip', 'native', 'native_bytes', 'native_str', - 'native_str_to_bytes', 'old_div', - 'python_2_unicode_compatible', 'raise_', - 'raise_with_traceback', 'reraise', 'text_to_native_str', - 'tobytes', 'viewitems', 'viewkeys', 'viewvalues', - 'with_metaclass' - ] diff --git a/pype/vendor/future/utils/surrogateescape.py b/pype/vendor/future/utils/surrogateescape.py deleted file mode 100644 index 398c3531b6..0000000000 --- a/pype/vendor/future/utils/surrogateescape.py +++ /dev/null @@ -1,200 +0,0 @@ -""" -This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error -handler of Python 3. - -Source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/misc -""" - -# This code is released under the Python license and the BSD 2-clause license - -import codecs -import sys - -from future import utils - - -FS_ERRORS = 'surrogateescape' - -# # -- Python 2/3 compatibility ------------------------------------- -# FS_ERRORS = 'my_surrogateescape' - -def u(text): - if utils.PY3: - return text - else: - return text.decode('unicode_escape') - -def b(data): - if utils.PY3: - return data.encode('latin1') - else: - return data - -if utils.PY3: - _unichr = chr - bytes_chr = lambda code: bytes((code,)) -else: - _unichr = unichr - bytes_chr = chr - -def surrogateescape_handler(exc): - """ - Pure Python implementation of the PEP 383: the "surrogateescape" error - handler of Python 3. Undecodable bytes will be replaced by a Unicode - character U+DCxx on decoding, and these are translated into the - original bytes on encoding. - """ - mystring = exc.object[exc.start:exc.end] - - try: - if isinstance(exc, UnicodeDecodeError): - # mystring is a byte-string in this case - decoded = replace_surrogate_decode(mystring) - elif isinstance(exc, UnicodeEncodeError): - # In the case of u'\udcc3'.encode('ascii', - # 'this_surrogateescape_handler'), both Python 2.x and 3.x raise an - # exception anyway after this function is called, even though I think - # it's doing what it should. It seems that the strict encoder is called - # to encode the unicode string that this function returns ... - decoded = replace_surrogate_encode(mystring) - else: - raise exc - except NotASurrogateError: - raise exc - return (decoded, exc.end) - - -class NotASurrogateError(Exception): - pass - - -def replace_surrogate_encode(mystring): - """ - Returns a (unicode) string, not the more logical bytes, because the codecs - register_error functionality expects this. - """ - decoded = [] - for ch in mystring: - # if utils.PY3: - # code = ch - # else: - code = ord(ch) - - # The following magic comes from Py3.3's Python/codecs.c file: - if not 0xD800 <= code <= 0xDCFF: - # Not a surrogate. Fail with the original exception. - raise exc - # mybytes = [0xe0 | (code >> 12), - # 0x80 | ((code >> 6) & 0x3f), - # 0x80 | (code & 0x3f)] - # Is this a good idea? - if 0xDC00 <= code <= 0xDC7F: - decoded.append(_unichr(code - 0xDC00)) - elif code <= 0xDCFF: - decoded.append(_unichr(code - 0xDC00)) - else: - raise NotASurrogateError - return str().join(decoded) - - -def replace_surrogate_decode(mybytes): - """ - Returns a (unicode) string - """ - decoded = [] - for ch in mybytes: - # We may be parsing newbytes (in which case ch is an int) or a native - # str on Py2 - if isinstance(ch, int): - code = ch - else: - code = ord(ch) - if 0x80 <= code <= 0xFF: - decoded.append(_unichr(0xDC00 + code)) - elif code <= 0x7F: - decoded.append(_unichr(code)) - else: - # # It may be a bad byte - # # Try swallowing it. - # continue - # print("RAISE!") - raise NotASurrogateError - return str().join(decoded) - - -def encodefilename(fn): - if FS_ENCODING == 'ascii': - # ASCII encoder of Python 2 expects that the error handler returns a - # Unicode string encodable to ASCII, whereas our surrogateescape error - # handler has to return bytes in 0x80-0xFF range. - encoded = [] - for index, ch in enumerate(fn): - code = ord(ch) - if code < 128: - ch = bytes_chr(code) - elif 0xDC80 <= code <= 0xDCFF: - ch = bytes_chr(code - 0xDC00) - else: - raise UnicodeEncodeError(FS_ENCODING, - fn, index, index+1, - 'ordinal not in range(128)') - encoded.append(ch) - return bytes().join(encoded) - elif FS_ENCODING == 'utf-8': - # UTF-8 encoder of Python 2 encodes surrogates, so U+DC80-U+DCFF - # doesn't go through our error handler - encoded = [] - for index, ch in enumerate(fn): - code = ord(ch) - if 0xD800 <= code <= 0xDFFF: - if 0xDC80 <= code <= 0xDCFF: - ch = bytes_chr(code - 0xDC00) - encoded.append(ch) - else: - raise UnicodeEncodeError( - FS_ENCODING, - fn, index, index+1, 'surrogates not allowed') - else: - ch_utf8 = ch.encode('utf-8') - encoded.append(ch_utf8) - return bytes().join(encoded) - else: - return fn.encode(FS_ENCODING, FS_ERRORS) - -def decodefilename(fn): - return fn.decode(FS_ENCODING, FS_ERRORS) - -FS_ENCODING = 'ascii'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') -# FS_ENCODING = 'cp932'; fn = b('[abc\x81\x00]'); encoded = u('[abc\udc81\x00]') -# FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') - - -# normalize the filesystem encoding name. -# For example, we expect "utf-8", not "UTF8". -FS_ENCODING = codecs.lookup(FS_ENCODING).name - - -def register_surrogateescape(): - """ - Registers the surrogateescape error handler on Python 2 (only) - """ - if utils.PY3: - return - try: - codecs.lookup_error(FS_ERRORS) - except LookupError: - codecs.register_error(FS_ERRORS, surrogateescape_handler) - - -if __name__ == '__main__': - pass - # # Tests: - # register_surrogateescape() - - # b = decodefilename(fn) - # assert b == encoded, "%r != %r" % (b, encoded) - # c = encodefilename(b) - # assert c == fn, '%r != %r' % (c, fn) - # # print("ok") - - diff --git a/pype/vendor/libfuturize/__init__.py b/pype/vendor/libfuturize/__init__.py deleted file mode 100644 index 4cb1cbcd63..0000000000 --- a/pype/vendor/libfuturize/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty to make this a package diff --git a/pype/vendor/libfuturize/fixer_util.py b/pype/vendor/libfuturize/fixer_util.py deleted file mode 100644 index ce1e9753b6..0000000000 --- a/pype/vendor/libfuturize/fixer_util.py +++ /dev/null @@ -1,518 +0,0 @@ -""" -Utility functions from 2to3, 3to2 and python-modernize (and some home-grown -ones). - -Licences: -2to3: PSF License v2 -3to2: Apache Software License (from 3to2/setup.py) -python-modernize licence: BSD (from python-modernize/LICENSE) -""" - -from lib2to3.fixer_util import (FromImport, Newline, is_import, - find_root, does_tree_import, Comma) -from lib2to3.pytree import Leaf, Node -from lib2to3.pygram import python_symbols as syms, python_grammar -from lib2to3.pygram import token -from lib2to3.fixer_util import (Node, Call, Name, syms, Comma, Number) -import re - - -def canonical_fix_name(fix, avail_fixes): - """ - Examples: - >>> canonical_fix_name('fix_wrap_text_literals') - 'libfuturize.fixes.fix_wrap_text_literals' - >>> canonical_fix_name('wrap_text_literals') - 'libfuturize.fixes.fix_wrap_text_literals' - >>> canonical_fix_name('wrap_te') - ValueError("unknown fixer name") - >>> canonical_fix_name('wrap') - ValueError("ambiguous fixer name") - """ - if ".fix_" in fix: - return fix - else: - if fix.startswith('fix_'): - fix = fix[4:] - # Infer the full module name for the fixer. - # First ensure that no names clash (e.g. - # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): - found = [f for f in avail_fixes - if f.endswith('fix_{0}'.format(fix))] - if len(found) > 1: - raise ValueError("Ambiguous fixer name. Choose a fully qualified " - "module name instead from these:\n" + - "\n".join(" " + myf for myf in found)) - elif len(found) == 0: - raise ValueError("Unknown fixer. Use --list-fixes or -l for a list.") - return found[0] - - - -## These functions are from 3to2 by Joe Amenta: - -def Star(prefix=None): - return Leaf(token.STAR, u'*', prefix=prefix) - -def DoubleStar(prefix=None): - return Leaf(token.DOUBLESTAR, u'**', prefix=prefix) - -def Minus(prefix=None): - return Leaf(token.MINUS, u'-', prefix=prefix) - -def commatize(leafs): - """ - Accepts/turns: (Name, Name, ..., Name, Name) - Returns/into: (Name, Comma, Name, Comma, ..., Name, Comma, Name) - """ - new_leafs = [] - for leaf in leafs: - new_leafs.append(leaf) - new_leafs.append(Comma()) - del new_leafs[-1] - return new_leafs - -def indentation(node): - """ - Returns the indentation for this node - Iff a node is in a suite, then it has indentation. - """ - while node.parent is not None and node.parent.type != syms.suite: - node = node.parent - if node.parent is None: - return u"" - # The first three children of a suite are NEWLINE, INDENT, (some other node) - # INDENT.value contains the indentation for this suite - # anything after (some other node) has the indentation as its prefix. - if node.type == token.INDENT: - return node.value - elif node.prev_sibling is not None and node.prev_sibling.type == token.INDENT: - return node.prev_sibling.value - elif node.prev_sibling is None: - return u"" - else: - return node.prefix - -def indentation_step(node): - """ - Dirty little trick to get the difference between each indentation level - Implemented by finding the shortest indentation string - (technically, the "least" of all of the indentation strings, but - tabs and spaces mixed won't get this far, so those are synonymous.) - """ - r = find_root(node) - # Collect all indentations into one set. - all_indents = set(i.value for i in r.pre_order() if i.type == token.INDENT) - if not all_indents: - # nothing is indented anywhere, so we get to pick what we want - return u" " # four spaces is a popular convention - else: - return min(all_indents) - -def suitify(parent): - """ - Turn the stuff after the first colon in parent's children - into a suite, if it wasn't already - """ - for node in parent.children: - if node.type == syms.suite: - # already in the prefered format, do nothing - return - - # One-liners have no suite node, we have to fake one up - for i, node in enumerate(parent.children): - if node.type == token.COLON: - break - else: - raise ValueError(u"No class suite and no ':'!") - # Move everything into a suite node - suite = Node(syms.suite, [Newline(), Leaf(token.INDENT, indentation(node) + indentation_step(node))]) - one_node = parent.children[i+1] - one_node.remove() - one_node.prefix = u'' - suite.append_child(one_node) - parent.append_child(suite) - -def NameImport(package, as_name=None, prefix=None): - """ - Accepts a package (Name node), name to import it as (string), and - optional prefix and returns a node: - import [as ] - """ - if prefix is None: - prefix = u"" - children = [Name(u"import", prefix=prefix), package] - if as_name is not None: - children.extend([Name(u"as", prefix=u" "), - Name(as_name, prefix=u" ")]) - return Node(syms.import_name, children) - -_compound_stmts = (syms.if_stmt, syms.while_stmt, syms.for_stmt, syms.try_stmt, syms.with_stmt) -_import_stmts = (syms.import_name, syms.import_from) - -def import_binding_scope(node): - """ - Generator yields all nodes for which a node (an import_stmt) has scope - The purpose of this is for a call to _find() on each of them - """ - # import_name / import_from are small_stmts - assert node.type in _import_stmts - test = node.next_sibling - # A small_stmt can only be followed by a SEMI or a NEWLINE. - while test.type == token.SEMI: - nxt = test.next_sibling - # A SEMI can only be followed by a small_stmt or a NEWLINE - if nxt.type == token.NEWLINE: - break - else: - yield nxt - # A small_stmt can only be followed by either a SEMI or a NEWLINE - test = nxt.next_sibling - # Covered all subsequent small_stmts after the import_stmt - # Now to cover all subsequent stmts after the parent simple_stmt - parent = node.parent - assert parent.type == syms.simple_stmt - test = parent.next_sibling - while test is not None: - # Yes, this will yield NEWLINE and DEDENT. Deal with it. - yield test - test = test.next_sibling - - context = parent.parent - # Recursively yield nodes following imports inside of a if/while/for/try/with statement - if context.type in _compound_stmts: - # import is in a one-liner - c = context - while c.next_sibling is not None: - yield c.next_sibling - c = c.next_sibling - context = context.parent - - # Can't chain one-liners on one line, so that takes care of that. - - p = context.parent - if p is None: - return - - # in a multi-line suite - - while p.type in _compound_stmts: - - if context.type == syms.suite: - yield context - - context = context.next_sibling - - if context is None: - context = p.parent - p = context.parent - if p is None: - break - -def ImportAsName(name, as_name, prefix=None): - new_name = Name(name) - new_as = Name(u"as", prefix=u" ") - new_as_name = Name(as_name, prefix=u" ") - new_node = Node(syms.import_as_name, [new_name, new_as, new_as_name]) - if prefix is not None: - new_node.prefix = prefix - return new_node - - -def is_docstring(node): - """ - Returns True if the node appears to be a docstring - """ - return (node.type == syms.simple_stmt and - len(node.children) > 0 and node.children[0].type == token.STRING) - - -def future_import(feature, node): - """ - This seems to work - """ - root = find_root(node) - - if does_tree_import(u"__future__", feature, node): - return - - # Look for a shebang or encoding line - shebang_encoding_idx = None - - for idx, node in enumerate(root.children): - # Is it a shebang or encoding line? - if is_shebang_comment(node) or is_encoding_comment(node): - shebang_encoding_idx = idx - if is_docstring(node): - # skip over docstring - continue - names = check_future_import(node) - if not names: - # not a future statement; need to insert before this - break - if feature in names: - # already imported - return - - import_ = FromImport(u'__future__', [Leaf(token.NAME, feature, prefix=" ")]) - if shebang_encoding_idx == 0 and idx == 0: - # If this __future__ import would go on the first line, - # detach the shebang / encoding prefix from the current first line. - # and attach it to our new __future__ import node. - import_.prefix = root.children[0].prefix - root.children[0].prefix = u'' - # End the __future__ import line with a newline and add a blank line - # afterwards: - children = [import_ , Newline()] - root.insert_child(idx, Node(syms.simple_stmt, children)) - - -def future_import2(feature, node): - """ - An alternative to future_import() which might not work ... - """ - root = find_root(node) - - if does_tree_import(u"__future__", feature, node): - return - - insert_pos = 0 - for idx, node in enumerate(root.children): - if node.type == syms.simple_stmt and node.children and \ - node.children[0].type == token.STRING: - insert_pos = idx + 1 - break - - for thing_after in root.children[insert_pos:]: - if thing_after.type == token.NEWLINE: - insert_pos += 1 - continue - - prefix = thing_after.prefix - thing_after.prefix = u"" - break - else: - prefix = u"" - - import_ = FromImport(u"__future__", [Leaf(token.NAME, feature, prefix=u" ")]) - - children = [import_, Newline()] - root.insert_child(insert_pos, Node(syms.simple_stmt, children, prefix=prefix)) - -def parse_args(arglist, scheme): - u""" - Parse a list of arguments into a dict - """ - arglist = [i for i in arglist if i.type != token.COMMA] - - ret_mapping = dict([(k, None) for k in scheme]) - - for i, arg in enumerate(arglist): - if arg.type == syms.argument and arg.children[1].type == token.EQUAL: - # argument < NAME '=' any > - slot = arg.children[0].value - ret_mapping[slot] = arg.children[2] - else: - slot = scheme[i] - ret_mapping[slot] = arg - - return ret_mapping - - -# def is_import_from(node): -# """Returns true if the node is a statement "from ... import ..." -# """ -# return node.type == syms.import_from - - -def is_import_stmt(node): - return (node.type == syms.simple_stmt and node.children and - is_import(node.children[0])) - - -def touch_import_top(package, name_to_import, node): - """Works like `does_tree_import` but adds an import statement at the - top if it was not imported (but below any __future__ imports) and below any - comments such as shebang lines). - - Based on lib2to3.fixer_util.touch_import() - - Calling this multiple times adds the imports in reverse order. - - Also adds "standard_library.install_aliases()" after "from future import - standard_library". This should probably be factored into another function. - """ - - root = find_root(node) - - if does_tree_import(package, name_to_import, root): - return - - # Ideally, we would look for whether futurize --all-imports has been run, - # as indicated by the presence of ``from builtins import (ascii, ..., - # zip)`` -- and, if it has, we wouldn't import the name again. - - # Look for __future__ imports and insert below them - found = False - for name in ['absolute_import', 'division', 'print_function', - 'unicode_literals']: - if does_tree_import('__future__', name, root): - found = True - break - if found: - # At least one __future__ import. We want to loop until we've seen them - # all. - start, end = None, None - for idx, node in enumerate(root.children): - if check_future_import(node): - start = idx - # Start looping - idx2 = start - while node: - node = node.next_sibling - idx2 += 1 - if not check_future_import(node): - end = idx2 - break - break - assert start is not None - assert end is not None - insert_pos = end - else: - # No __future__ imports. - # We look for a docstring and insert the new node below that. If no docstring - # exists, just insert the node at the top. - for idx, node in enumerate(root.children): - if node.type != syms.simple_stmt: - break - if not is_docstring(node): - # This is the usual case. - break - insert_pos = idx - - if package is None: - import_ = Node(syms.import_name, [ - Leaf(token.NAME, u"import"), - Leaf(token.NAME, name_to_import, prefix=u" ") - ]) - else: - import_ = FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")]) - if name_to_import == u'standard_library': - # Add: - # standard_library.install_aliases() - # after: - # from future import standard_library - install_hooks = Node(syms.simple_stmt, - [Node(syms.power, - [Leaf(token.NAME, u'standard_library'), - Node(syms.trailer, [Leaf(token.DOT, u'.'), - Leaf(token.NAME, u'install_aliases')]), - Node(syms.trailer, [Leaf(token.LPAR, u'('), - Leaf(token.RPAR, u')')]) - ]) - ] - ) - children_hooks = [install_hooks, Newline()] - else: - children_hooks = [] - - # FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")]) - - children_import = [import_, Newline()] - old_prefix = root.children[insert_pos].prefix - root.children[insert_pos].prefix = u'' - root.insert_child(insert_pos, Node(syms.simple_stmt, children_import, prefix=old_prefix)) - if len(children_hooks) > 0: - root.insert_child(insert_pos + 1, Node(syms.simple_stmt, children_hooks)) - - -## The following functions are from python-modernize by Armin Ronacher: -# (a little edited). - -def check_future_import(node): - """If this is a future import, return set of symbols that are imported, - else return None.""" - # node should be the import statement here - savenode = node - if not (node.type == syms.simple_stmt and node.children): - return set() - node = node.children[0] - # now node is the import_from node - if not (node.type == syms.import_from and - # node.type == token.NAME and # seems to break it - hasattr(node.children[1], 'value') and - node.children[1].value == u'__future__'): - return set() - node = node.children[3] - # now node is the import_as_name[s] - # print(python_grammar.number2symbol[node.type]) # breaks sometimes - if node.type == syms.import_as_names: - result = set() - for n in node.children: - if n.type == token.NAME: - result.add(n.value) - elif n.type == syms.import_as_name: - n = n.children[0] - assert n.type == token.NAME - result.add(n.value) - return result - elif node.type == syms.import_as_name: - node = node.children[0] - assert node.type == token.NAME - return set([node.value]) - elif node.type == token.NAME: - return set([node.value]) - else: - # TODO: handle brackets like this: - # from __future__ import (absolute_import, division) - assert False, "strange import: %s" % savenode - - -SHEBANG_REGEX = r'^#!.*python' -ENCODING_REGEX = r"^#.*coding[:=]\s*([-\w.]+)" - - -def is_shebang_comment(node): - """ - Comments are prefixes for Leaf nodes. Returns whether the given node has a - prefix that looks like a shebang line or an encoding line: - - #!/usr/bin/env python - #!/usr/bin/python3 - """ - return bool(re.match(SHEBANG_REGEX, node.prefix)) - - -def is_encoding_comment(node): - """ - Comments are prefixes for Leaf nodes. Returns whether the given node has a - prefix that looks like an encoding line: - - # coding: utf-8 - # encoding: utf-8 - # -*- coding: -*- - # vim: set fileencoding= : - """ - return bool(re.match(ENCODING_REGEX, node.prefix)) - - -def wrap_in_fn_call(fn_name, args, prefix=None): - """ - Example: - >>> wrap_in_fn_call("oldstr", (arg,)) - oldstr(arg) - - >>> wrap_in_fn_call("olddiv", (arg1, arg2)) - olddiv(arg1, arg2) - """ - assert len(args) > 0 - if len(args) == 1: - newargs = args - elif len(args) == 2: - expr1, expr2 = args - newargs = [expr1, Comma(), expr2] - else: - assert NotImplementedError('write me') - return Call(Name(fn_name), newargs, prefix=prefix) - - diff --git a/pype/vendor/libfuturize/fixes/__init__.py b/pype/vendor/libfuturize/fixes/__init__.py deleted file mode 100644 index a059c949dd..0000000000 --- a/pype/vendor/libfuturize/fixes/__init__.py +++ /dev/null @@ -1,97 +0,0 @@ -import sys -from lib2to3 import refactor - -# The following fixers are "safe": they convert Python 2 code to more -# modern Python 2 code. They should be uncontroversial to apply to most -# projects that are happy to drop support for Py2.5 and below. Applying -# them first will reduce the size of the patch set for the real porting. -lib2to3_fix_names_stage1 = set([ - 'lib2to3.fixes.fix_apply', - 'lib2to3.fixes.fix_except', - 'lib2to3.fixes.fix_exec', - 'lib2to3.fixes.fix_exitfunc', - 'lib2to3.fixes.fix_funcattrs', - 'lib2to3.fixes.fix_has_key', - 'lib2to3.fixes.fix_idioms', - # 'lib2to3.fixes.fix_import', # makes any implicit relative imports explicit. (Use with ``from __future__ import absolute_import) - 'lib2to3.fixes.fix_intern', - 'lib2to3.fixes.fix_isinstance', - 'lib2to3.fixes.fix_methodattrs', - 'lib2to3.fixes.fix_ne', - # 'lib2to3.fixes.fix_next', # would replace ``next`` method names - # with ``__next__``. - 'lib2to3.fixes.fix_numliterals', # turns 1L into 1, 0755 into 0o755 - 'lib2to3.fixes.fix_paren', - # 'lib2to3.fixes.fix_print', # see the libfuturize fixer that also - # adds ``from __future__ import print_function`` - # 'lib2to3.fixes.fix_raise', # uses incompatible with_traceback() method on exceptions - 'lib2to3.fixes.fix_reduce', # reduce is available in functools on Py2.6/Py2.7 - 'lib2to3.fixes.fix_renames', # sys.maxint -> sys.maxsize - # 'lib2to3.fixes.fix_set_literal', # this is unnecessary and breaks Py2.6 support - 'lib2to3.fixes.fix_repr', - 'lib2to3.fixes.fix_standarderror', - 'lib2to3.fixes.fix_sys_exc', - 'lib2to3.fixes.fix_throw', - 'lib2to3.fixes.fix_tuple_params', - 'lib2to3.fixes.fix_types', - 'lib2to3.fixes.fix_ws_comma', # can perhaps decrease readability: see issue #58 - 'lib2to3.fixes.fix_xreadlines', -]) - -# The following fixers add a dependency on the ``future`` package on order to -# support Python 2: -lib2to3_fix_names_stage2 = set([ - # 'lib2to3.fixes.fix_buffer', # perhaps not safe. Test this. - # 'lib2to3.fixes.fix_callable', # not needed in Py3.2+ - 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. and move to stage2 - # 'lib2to3.fixes.fix_execfile', # some problems: see issue #37. - # We use a custom fixer instead (see below) - # 'lib2to3.fixes.fix_future', # we don't want to remove __future__ imports - 'lib2to3.fixes.fix_getcwdu', - # 'lib2to3.fixes.fix_imports', # called by libfuturize.fixes.fix_future_standard_library - # 'lib2to3.fixes.fix_imports2', # we don't handle this yet (dbm) - 'lib2to3.fixes.fix_input', - 'lib2to3.fixes.fix_itertools', - 'lib2to3.fixes.fix_itertools_imports', - 'lib2to3.fixes.fix_filter', - 'lib2to3.fixes.fix_long', - 'lib2to3.fixes.fix_map', - # 'lib2to3.fixes.fix_metaclass', # causes SyntaxError in Py2! Use the one from ``six`` instead - 'lib2to3.fixes.fix_next', - 'lib2to3.fixes.fix_nonzero', # TODO: cause this to import ``object`` and/or add a decorator for mapping __bool__ to __nonzero__ - 'lib2to3.fixes.fix_operator', # we will need support for this by e.g. extending the Py2 operator module to provide those functions in Py3 - 'lib2to3.fixes.fix_raw_input', - # 'lib2to3.fixes.fix_unicode', # strips off the u'' prefix, which removes a potentially helpful source of information for disambiguating unicode/byte strings - # 'lib2to3.fixes.fix_urllib', # included in libfuturize.fix_future_standard_library_urllib - # 'lib2to3.fixes.fix_xrange', # custom one because of a bug with Py3.3's lib2to3 - 'lib2to3.fixes.fix_zip', -]) - -libfuturize_fix_names_stage1 = set([ - 'libfuturize.fixes.fix_absolute_import', - 'libfuturize.fixes.fix_next_call', # obj.next() -> next(obj). Unlike - # lib2to3.fixes.fix_next, doesn't change - # the ``next`` method to ``__next__``. - 'libfuturize.fixes.fix_print_with_import', - 'libfuturize.fixes.fix_raise', - # 'libfuturize.fixes.fix_order___future__imports', # TODO: consolidate to a single line to simplify testing -]) - -libfuturize_fix_names_stage2 = set([ - 'libfuturize.fixes.fix_basestring', - # 'libfuturize.fixes.fix_add__future__imports_except_unicode_literals', # just in case - 'libfuturize.fixes.fix_cmp', - 'libfuturize.fixes.fix_division_safe', - 'libfuturize.fixes.fix_execfile', - 'libfuturize.fixes.fix_future_builtins', - 'libfuturize.fixes.fix_future_standard_library', - 'libfuturize.fixes.fix_future_standard_library_urllib', - 'libfuturize.fixes.fix_metaclass', - 'libpasteurize.fixes.fix_newstyle', - 'libfuturize.fixes.fix_object', - # 'libfuturize.fixes.fix_order___future__imports', # TODO: consolidate to a single line to simplify testing - 'libfuturize.fixes.fix_unicode_keep_u', - # 'libfuturize.fixes.fix_unicode_literals_import', - 'libfuturize.fixes.fix_xrange_with_import', # custom one because of a bug with Py3.3's lib2to3 -]) - diff --git a/pype/vendor/libfuturize/fixes/fix_UserDict.py b/pype/vendor/libfuturize/fixes/fix_UserDict.py deleted file mode 100644 index 73b1cfb88e..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_UserDict.py +++ /dev/null @@ -1,103 +0,0 @@ -"""Fix UserDict. - -Incomplete! - -TODO: base this on fix_urllib perhaps? -""" - - -# Local imports -from lib2to3 import fixer_base -from lib2to3.fixer_util import Name, attr_chain -from lib2to3.fixes.fix_imports import alternates, build_pattern, FixImports - -MAPPING = {'UserDict': 'collections', -} - -# def alternates(members): -# return "(" + "|".join(map(repr, members)) + ")" -# -# -# def build_pattern(mapping=MAPPING): -# mod_list = ' | '.join(["module_name='%s'" % key for key in mapping]) -# bare_names = alternates(mapping.keys()) -# -# yield """name_import=import_name< 'import' ((%s) | -# multiple_imports=dotted_as_names< any* (%s) any* >) > -# """ % (mod_list, mod_list) -# yield """import_from< 'from' (%s) 'import' ['('] -# ( any | import_as_name< any 'as' any > | -# import_as_names< any* >) [')'] > -# """ % mod_list -# yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > | -# multiple_imports=dotted_as_names< -# any* dotted_as_name< (%s) 'as' any > any* >) > -# """ % (mod_list, mod_list) -# -# # Find usages of module members in code e.g. thread.foo(bar) -# yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names - - -# class FixUserDict(fixer_base.BaseFix): -class FixUserdict(FixImports): - - BM_compatible = True - keep_line_order = True - # This is overridden in fix_imports2. - mapping = MAPPING - - # We want to run this fixer late, so fix_import doesn't try to make stdlib - # renames into relative imports. - run_order = 6 - - def build_pattern(self): - return "|".join(build_pattern(self.mapping)) - - def compile_pattern(self): - # We override this, so MAPPING can be pragmatically altered and the - # changes will be reflected in PATTERN. - self.PATTERN = self.build_pattern() - super(FixImports, self).compile_pattern() - - # Don't match the node if it's within another match. - def match(self, node): - match = super(FixImports, self).match - results = match(node) - if results: - # Module usage could be in the trailer of an attribute lookup, so we - # might have nested matches when "bare_with_attr" is present. - if "bare_with_attr" not in results and \ - any(match(obj) for obj in attr_chain(node, "parent")): - return False - return results - return False - - def start_tree(self, tree, filename): - super(FixImports, self).start_tree(tree, filename) - self.replace = {} - - def transform(self, node, results): - import_mod = results.get("module_name") - if import_mod: - mod_name = import_mod.value - new_name = unicode(self.mapping[mod_name]) - import_mod.replace(Name(new_name, prefix=import_mod.prefix)) - if "name_import" in results: - # If it's not a "from x import x, y" or "import x as y" import, - # marked its usage to be replaced. - self.replace[mod_name] = new_name - if "multiple_imports" in results: - # This is a nasty hack to fix multiple imports on a line (e.g., - # "import StringIO, urlparse"). The problem is that I can't - # figure out an easy way to make a pattern recognize the keys of - # MAPPING randomly sprinkled in an import statement. - results = self.match(node) - if results: - self.transform(node, results) - else: - # Replace usage of the module. - bare_name = results["bare_with_attr"][0] - new_name = self.replace.get(bare_name.value) - if new_name: - bare_name.replace(Name(new_name, prefix=bare_name.prefix)) - diff --git a/pype/vendor/libfuturize/fixes/fix_absolute_import.py b/pype/vendor/libfuturize/fixes/fix_absolute_import.py deleted file mode 100644 index ab6a7647cb..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_absolute_import.py +++ /dev/null @@ -1,92 +0,0 @@ -""" -Fixer for import statements, with a __future__ import line. - -Based on lib2to3/fixes/fix_import.py, but extended slightly so it also -supports Cython modules. - -If spam is being imported from the local directory, this import: - from spam import eggs -becomes: - from __future__ import absolute_import - from .spam import eggs - -and this import: - import spam -becomes: - from __future__ import absolute_import - from . import spam -""" - -from os.path import dirname, join, exists, sep -from lib2to3.fixes.fix_import import FixImport -from lib2to3.fixer_util import FromImport, syms -from lib2to3.fixes.fix_import import traverse_imports - -from libfuturize.fixer_util import future_import - - -class FixAbsoluteImport(FixImport): - run_order = 9 - - def transform(self, node, results): - """ - Copied from FixImport.transform(), but with this line added in - any modules that had implicit relative imports changed: - - from __future__ import absolute_import" - """ - if self.skip: - return - imp = results['imp'] - - if node.type == syms.import_from: - # Some imps are top-level (eg: 'import ham') - # some are first level (eg: 'import ham.eggs') - # some are third level (eg: 'import ham.eggs as spam') - # Hence, the loop - while not hasattr(imp, 'value'): - imp = imp.children[0] - if self.probably_a_local_import(imp.value): - imp.value = u"." + imp.value - imp.changed() - future_import(u"absolute_import", node) - else: - have_local = False - have_absolute = False - for mod_name in traverse_imports(imp): - if self.probably_a_local_import(mod_name): - have_local = True - else: - have_absolute = True - if have_absolute: - if have_local: - # We won't handle both sibling and absolute imports in the - # same statement at the moment. - self.warning(node, "absolute and local imports together") - return - - new = FromImport(u".", [imp]) - new.prefix = node.prefix - future_import(u"absolute_import", node) - return new - - def probably_a_local_import(self, imp_name): - """ - Like the corresponding method in the base class, but this also - supports Cython modules. - """ - if imp_name.startswith(u"."): - # Relative imports are certainly not local imports. - return False - imp_name = imp_name.split(u".", 1)[0] - base_path = dirname(self.filename) - base_path = join(base_path, imp_name) - # If there is no __init__.py next to the file its not in a package - # so can't be a relative import. - if not exists(join(dirname(base_path), "__init__.py")): - return False - for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]: - if exists(base_path + ext): - return True - return False - diff --git a/pype/vendor/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py b/pype/vendor/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py deleted file mode 100644 index 1904d37b4d..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Fixer for adding: - - from __future__ import absolute_import - from __future__ import division - from __future__ import print_function - -This is "stage 1": hopefully uncontroversial changes. - -Stage 2 adds ``unicode_literals``. -""" - -from lib2to3 import fixer_base -from libfuturize.fixer_util import future_import - -class FixAddFutureImportsExceptUnicodeLiterals(fixer_base.BaseFix): - BM_compatible = True - PATTERN = "file_input" - - run_order = 9 - - def transform(self, node, results): - # Reverse order: - future_import(u"print_function", node) - future_import(u"division", node) - future_import(u"absolute_import", node) - diff --git a/pype/vendor/libfuturize/fixes/fix_basestring.py b/pype/vendor/libfuturize/fixes/fix_basestring.py deleted file mode 100644 index 8c6ec6ced3..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_basestring.py +++ /dev/null @@ -1,18 +0,0 @@ -""" -Fixer that adds ``from past.builtins import basestring`` if there is a -reference to ``basestring`` -""" - -from lib2to3 import fixer_base - -from libfuturize.fixer_util import touch_import_top - - -class FixBasestring(fixer_base.BaseFix): - BM_compatible = True - - PATTERN = "'basestring'" - - def transform(self, node, results): - touch_import_top(u'past.builtins', 'basestring', node) - diff --git a/pype/vendor/libfuturize/fixes/fix_bytes.py b/pype/vendor/libfuturize/fixes/fix_bytes.py deleted file mode 100644 index 42021223ab..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_bytes.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Optional fixer that changes all unprefixed string literals "..." to b"...". - -br'abcd' is a SyntaxError on Python 2 but valid on Python 3. -ur'abcd' is a SyntaxError on Python 3 but valid on Python 2. - -""" -from __future__ import unicode_literals - -import re -from lib2to3.pgen2 import token -from lib2to3 import fixer_base - -_literal_re = re.compile(r"[^bBuUrR]?[\'\"]") - -class FixBytes(fixer_base.BaseFix): - BM_compatible = True - PATTERN = "STRING" - - def transform(self, node, results): - if node.type == token.STRING: - if _literal_re.match(node.value): - new = node.clone() - new.value = u'b' + new.value - return new diff --git a/pype/vendor/libfuturize/fixes/fix_cmp.py b/pype/vendor/libfuturize/fixes/fix_cmp.py deleted file mode 100644 index be56507eb5..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_cmp.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding: utf-8 -""" -Fixer for the cmp() function on Py2, which was removed in Py3. - -Adds this import line:: - - from past.builtins import cmp - -if cmp() is called in the code. -""" - -from __future__ import unicode_literals -from lib2to3 import fixer_base - -from libfuturize.fixer_util import touch_import_top - - -expression = "name='cmp'" - - -class FixCmp(fixer_base.BaseFix): - BM_compatible = True - run_order = 9 - - PATTERN = """ - power< - ({0}) trailer< '(' args=[any] ')' > - rest=any* > - """.format(expression) - - def transform(self, node, results): - name = results["name"] - touch_import_top(u'past.builtins', name.value, node) - diff --git a/pype/vendor/libfuturize/fixes/fix_division.py b/pype/vendor/libfuturize/fixes/fix_division.py deleted file mode 100644 index 4874550442..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_division.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -UNFINISHED -For the ``future`` package. - -Adds this import line: - - from __future__ import division - -at the top so the code runs identically on Py3 and Py2.6/2.7 -""" - -from libpasteurize.fixes.fix_division import FixDivision - diff --git a/pype/vendor/libfuturize/fixes/fix_division_safe.py b/pype/vendor/libfuturize/fixes/fix_division_safe.py deleted file mode 100644 index 5e16b0a514..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_division_safe.py +++ /dev/null @@ -1,72 +0,0 @@ -""" -For the ``future`` package. - -Adds this import line: - - from __future__ import division - -at the top and changes any old-style divisions to be calls to -past.utils.old_div so the code runs as before on Py2.6/2.7 and has the same -behaviour on Py3. - -If "from __future__ import division" is already in effect, this fixer does -nothing. -""" - -from lib2to3 import fixer_base -from lib2to3.fixer_util import syms, does_tree_import -from libfuturize.fixer_util import (token, future_import, touch_import_top, - wrap_in_fn_call) - - -def match_division(node): - u""" - __future__.division redefines the meaning of a single slash for division, - so we match that and only that. - """ - slash = token.SLASH - return node.type == slash and not node.next_sibling.type == slash and \ - not node.prev_sibling.type == slash - - -class FixDivisionSafe(fixer_base.BaseFix): - # BM_compatible = True - run_order = 4 # this seems to be ignored? - - _accept_type = token.SLASH - - PATTERN = """ - term<(not('/') any)+ '/' ((not('/') any))> - """ - - def start_tree(self, tree, name): - """ - Skip this fixer if "__future__.division" is already imported. - """ - super(FixDivisionSafe, self).start_tree(tree, name) - self.skip = "division" in tree.future_features - - def match(self, node): - u""" - Since the tree needs to be fixed once and only once if and only if it - matches, we can start discarding matches after the first. - """ - if (node.type == self.syms.term and - len(node.children) == 3 and - match_division(node.children[1])): - expr1, expr2 = node.children[0], node.children[2] - return expr1, expr2 - else: - return False - - def transform(self, node, results): - if self.skip: - return - future_import(u"division", node) - - touch_import_top(u'past.utils', u'old_div', node) - expr1, expr2 = results[0].clone(), results[1].clone() - # Strip any leading space for the first number: - expr1.prefix = u'' - return wrap_in_fn_call("old_div", (expr1, expr2), prefix=node.prefix) - diff --git a/pype/vendor/libfuturize/fixes/fix_execfile.py b/pype/vendor/libfuturize/fixes/fix_execfile.py deleted file mode 100644 index 2b794c882f..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_execfile.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding: utf-8 -""" -Fixer for the execfile() function on Py2, which was removed in Py3. - -The Lib/lib2to3/fixes/fix_execfile.py module has some problems: see -python-future issue #37. This fixer merely imports execfile() from -past.builtins and leaves the code alone. - -Adds this import line:: - - from past.builtins import execfile - -for the function execfile() that was removed from Py3. -""" - -from __future__ import unicode_literals -from lib2to3 import fixer_base - -from libfuturize.fixer_util import touch_import_top - - -expression = "name='execfile'" - - -class FixExecfile(fixer_base.BaseFix): - BM_compatible = True - run_order = 9 - - PATTERN = """ - power< - ({0}) trailer< '(' args=[any] ')' > - rest=any* > - """.format(expression) - - def transform(self, node, results): - name = results["name"] - touch_import_top(u'past.builtins', name.value, node) - diff --git a/pype/vendor/libfuturize/fixes/fix_future_builtins.py b/pype/vendor/libfuturize/fixes/fix_future_builtins.py deleted file mode 100644 index bf3aba40e1..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_future_builtins.py +++ /dev/null @@ -1,60 +0,0 @@ -""" -For the ``future`` package. - -Adds this import line:: - - from builtins import XYZ - -for each of the functions XYZ that is used in the module. - -Adds these imports after any other imports (in an initial block of them). -""" - -from __future__ import unicode_literals - -from lib2to3 import fixer_base -from lib2to3.pygram import python_symbols as syms -from lib2to3.fixer_util import Name, Call, in_special_context - -from libfuturize.fixer_util import touch_import_top - -# All builtins are: -# from future.builtins.iterators import (filter, map, zip) -# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super) -# from future.types import (bytes, dict, int, range, str) -# We don't need isinstance any more. - -replaced_builtin_fns = '''filter map zip - ascii chr hex input next oct - bytes range str raw_input'''.split() - # This includes raw_input as a workaround for the - # lib2to3 fixer for raw_input on Py3 (only), allowing - # the correct import to be included. (Py3 seems to run - # the fixers the wrong way around, perhaps ignoring the - # run_order class attribute below ...) - -expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtin_fns]) - - -class FixFutureBuiltins(fixer_base.BaseFix): - BM_compatible = True - run_order = 7 - - # Currently we only match uses as a function. This doesn't match e.g.: - # if isinstance(s, str): - # ... - PATTERN = """ - power< - ({0}) trailer< '(' [arglist=any] ')' > - rest=any* > - | - power< - 'map' trailer< '(' [arglist=any] ')' > - > - """.format(expression) - - def transform(self, node, results): - name = results["name"] - touch_import_top(u'builtins', name.value, node) - # name.replace(Name(u"input", prefix=name.prefix)) - diff --git a/pype/vendor/libfuturize/fixes/fix_future_standard_library.py b/pype/vendor/libfuturize/fixes/fix_future_standard_library.py deleted file mode 100644 index 501c2a940e..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_future_standard_library.py +++ /dev/null @@ -1,26 +0,0 @@ -""" -For the ``future`` package. - -Changes any imports needed to reflect the standard library reorganization. Also -Also adds these import lines: - - from future import standard_library - standard_library.install_aliases() - -after any __future__ imports but before any other imports. -""" - -from lib2to3.fixes.fix_imports import FixImports -from libfuturize.fixer_util import touch_import_top - - -class FixFutureStandardLibrary(FixImports): - run_order = 8 - - def transform(self, node, results): - result = super(FixFutureStandardLibrary, self).transform(node, results) - # TODO: add a blank line between any __future__ imports and this? - touch_import_top(u'future', u'standard_library', node) - return result - - diff --git a/pype/vendor/libfuturize/fixes/fix_future_standard_library_urllib.py b/pype/vendor/libfuturize/fixes/fix_future_standard_library_urllib.py deleted file mode 100644 index 3d62959fd2..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_future_standard_library_urllib.py +++ /dev/null @@ -1,30 +0,0 @@ -""" -For the ``future`` package. - -A special fixer that ensures that these lines have been added:: - - from future import standard_library - standard_library.install_hooks() - -even if the only module imported was ``urllib``, in which case the regular fixer -wouldn't have added these lines. - -""" - -from lib2to3.fixes.fix_urllib import FixUrllib -from libfuturize.fixer_util import touch_import_top, find_root - - -class FixFutureStandardLibraryUrllib(FixUrllib): # not a subclass of FixImports - run_order = 8 - - def transform(self, node, results): - # transform_member() in lib2to3/fixes/fix_urllib.py breaks node so find_root(node) - # no longer works after the super() call below. So we find the root first: - root = find_root(node) - result = super(FixFutureStandardLibraryUrllib, self).transform(node, results) - # TODO: add a blank line between any __future__ imports and this? - touch_import_top(u'future', u'standard_library', root) - return result - - diff --git a/pype/vendor/libfuturize/fixes/fix_metaclass.py b/pype/vendor/libfuturize/fixes/fix_metaclass.py deleted file mode 100644 index 2ac41c9728..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_metaclass.py +++ /dev/null @@ -1,262 +0,0 @@ -# coding: utf-8 -"""Fixer for __metaclass__ = X -> (future.utils.with_metaclass(X)) methods. - - The various forms of classef (inherits nothing, inherits once, inherints - many) don't parse the same in the CST so we look at ALL classes for - a __metaclass__ and if we find one normalize the inherits to all be - an arglist. - - For one-liner classes ('class X: pass') there is no indent/dedent so - we normalize those into having a suite. - - Moving the __metaclass__ into the classdef can also cause the class - body to be empty so there is some special casing for that as well. - - This fixer also tries very hard to keep original indenting and spacing - in all those corner cases. -""" -# This is a derived work of Lib/lib2to3/fixes/fix_metaclass.py under the -# copyright of the Python Software Foundation, licensed under the Python -# Software Foundation License 2. -# -# Copyright notice: -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013 Python Software Foundation. All rights reserved. -# -# Full license text: http://docs.python.org/3.4/license.html - -# Author: Jack Diederich, Daniel Neuhäuser - -# Local imports -from lib2to3 import fixer_base -from lib2to3.pygram import token -from lib2to3.fixer_util import Name, syms, Node, Leaf, touch_import, Call, \ - String, Comma, parenthesize - - -def has_metaclass(parent): - """ we have to check the cls_node without changing it. - There are two possiblities: - 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') - 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') - """ - for node in parent.children: - if node.type == syms.suite: - return has_metaclass(node) - elif node.type == syms.simple_stmt and node.children: - expr_node = node.children[0] - if expr_node.type == syms.expr_stmt and expr_node.children: - left_side = expr_node.children[0] - if isinstance(left_side, Leaf) and \ - left_side.value == '__metaclass__': - return True - return False - - -def fixup_parse_tree(cls_node): - """ one-line classes don't get a suite in the parse tree so we add - one to normalize the tree - """ - for node in cls_node.children: - if node.type == syms.suite: - # already in the preferred format, do nothing - return - - # !%@#! oneliners have no suite node, we have to fake one up - for i, node in enumerate(cls_node.children): - if node.type == token.COLON: - break - else: - raise ValueError("No class suite and no ':'!") - - # move everything into a suite node - suite = Node(syms.suite, []) - while cls_node.children[i+1:]: - move_node = cls_node.children[i+1] - suite.append_child(move_node.clone()) - move_node.remove() - cls_node.append_child(suite) - node = suite - - -def fixup_simple_stmt(parent, i, stmt_node): - """ if there is a semi-colon all the parts count as part of the same - simple_stmt. We just want the __metaclass__ part so we move - everything efter the semi-colon into its own simple_stmt node - """ - for semi_ind, node in enumerate(stmt_node.children): - if node.type == token.SEMI: # *sigh* - break - else: - return - - node.remove() # kill the semicolon - new_expr = Node(syms.expr_stmt, []) - new_stmt = Node(syms.simple_stmt, [new_expr]) - while stmt_node.children[semi_ind:]: - move_node = stmt_node.children[semi_ind] - new_expr.append_child(move_node.clone()) - move_node.remove() - parent.insert_child(i, new_stmt) - new_leaf1 = new_stmt.children[0].children[0] - old_leaf1 = stmt_node.children[0].children[0] - new_leaf1.prefix = old_leaf1.prefix - - -def remove_trailing_newline(node): - if node.children and node.children[-1].type == token.NEWLINE: - node.children[-1].remove() - - -def find_metas(cls_node): - # find the suite node (Mmm, sweet nodes) - for node in cls_node.children: - if node.type == syms.suite: - break - else: - raise ValueError("No class suite!") - - # look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ] - for i, simple_node in list(enumerate(node.children)): - if simple_node.type == syms.simple_stmt and simple_node.children: - expr_node = simple_node.children[0] - if expr_node.type == syms.expr_stmt and expr_node.children: - # Check if the expr_node is a simple assignment. - left_node = expr_node.children[0] - if isinstance(left_node, Leaf) and \ - left_node.value == u'__metaclass__': - # We found a assignment to __metaclass__. - fixup_simple_stmt(node, i, simple_node) - remove_trailing_newline(simple_node) - yield (node, i, simple_node) - - -def fixup_indent(suite): - """ If an INDENT is followed by a thing with a prefix then nuke the prefix - Otherwise we get in trouble when removing __metaclass__ at suite start - """ - kids = suite.children[::-1] - # find the first indent - while kids: - node = kids.pop() - if node.type == token.INDENT: - break - - # find the first Leaf - while kids: - node = kids.pop() - if isinstance(node, Leaf) and node.type != token.DEDENT: - if node.prefix: - node.prefix = u'' - return - else: - kids.extend(node.children[::-1]) - - -class FixMetaclass(fixer_base.BaseFix): - BM_compatible = True - - PATTERN = """ - classdef - """ - - def transform(self, node, results): - if not has_metaclass(node): - return - - fixup_parse_tree(node) - - # find metaclasses, keep the last one - last_metaclass = None - for suite, i, stmt in find_metas(node): - last_metaclass = stmt - stmt.remove() - - text_type = node.children[0].type # always Leaf(nnn, 'class') - - # figure out what kind of classdef we have - if len(node.children) == 7: - # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite]) - # 0 1 2 3 4 5 6 - if node.children[3].type == syms.arglist: - arglist = node.children[3] - # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite]) - else: - parent = node.children[3].clone() - arglist = Node(syms.arglist, [parent]) - node.set_child(3, arglist) - elif len(node.children) == 6: - # Node(classdef, ['class', 'name', '(', ')', ':', suite]) - # 0 1 2 3 4 5 - arglist = Node(syms.arglist, []) - node.insert_child(3, arglist) - elif len(node.children) == 4: - # Node(classdef, ['class', 'name', ':', suite]) - # 0 1 2 3 - arglist = Node(syms.arglist, []) - node.insert_child(2, Leaf(token.RPAR, u')')) - node.insert_child(2, arglist) - node.insert_child(2, Leaf(token.LPAR, u'(')) - else: - raise ValueError("Unexpected class definition") - - # now stick the metaclass in the arglist - meta_txt = last_metaclass.children[0].children[0] - meta_txt.value = 'metaclass' - orig_meta_prefix = meta_txt.prefix - - # Was: touch_import(None, u'future.utils', node) - touch_import(u'future.utils', u'with_metaclass', node) - - metaclass = last_metaclass.children[0].children[2].clone() - metaclass.prefix = u'' - - arguments = [metaclass] - - if arglist.children: - if len(arglist.children) == 1: - base = arglist.children[0].clone() - base.prefix = u' ' - else: - # Unfortunately six.with_metaclass() only allows one base - # class, so we have to dynamically generate a base class if - # there is more than one. - bases = parenthesize(arglist.clone()) - bases.prefix = u' ' - base = Call(Name('type'), [ - String("'NewBase'"), - Comma(), - bases, - Comma(), - Node( - syms.atom, - [Leaf(token.LBRACE, u'{'), Leaf(token.RBRACE, u'}')], - prefix=u' ' - ) - ], prefix=u' ') - arguments.extend([Comma(), base]) - - arglist.replace(Call( - Name(u'with_metaclass', prefix=arglist.prefix), - arguments - )) - - fixup_indent(suite) - - # check for empty suite - if not suite.children: - # one-liner that was just __metaclass_ - suite.remove() - pass_leaf = Leaf(text_type, u'pass') - pass_leaf.prefix = orig_meta_prefix - node.append_child(pass_leaf) - node.append_child(Leaf(token.NEWLINE, u'\n')) - - elif len(suite.children) > 1 and \ - (suite.children[-2].type == token.INDENT and - suite.children[-1].type == token.DEDENT): - # there was only one line in the class body and it was __metaclass__ - pass_leaf = Leaf(text_type, u'pass') - suite.insert_child(-1, pass_leaf) - suite.insert_child(-1, Leaf(token.NEWLINE, u'\n')) diff --git a/pype/vendor/libfuturize/fixes/fix_next_call.py b/pype/vendor/libfuturize/fixes/fix_next_call.py deleted file mode 100644 index 282f185226..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_next_call.py +++ /dev/null @@ -1,104 +0,0 @@ -""" -Based on fix_next.py by Collin Winter. - -Replaces it.next() -> next(it), per PEP 3114. - -Unlike fix_next.py, this fixer doesn't replace the name of a next method with __next__, -which would break Python 2 compatibility without further help from fixers in -stage 2. -""" - -# Local imports -from lib2to3.pgen2 import token -from lib2to3.pygram import python_symbols as syms -from lib2to3 import fixer_base -from lib2to3.fixer_util import Name, Call, find_binding - -bind_warning = "Calls to builtin next() possibly shadowed by global binding" - - -class FixNextCall(fixer_base.BaseFix): - BM_compatible = True - PATTERN = """ - power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > - | - power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > - | - global=global_stmt< 'global' any* 'next' any* > - """ - - order = "pre" # Pre-order tree traversal - - def start_tree(self, tree, filename): - super(FixNextCall, self).start_tree(tree, filename) - - n = find_binding('next', tree) - if n: - self.warning(n, bind_warning) - self.shadowed_next = True - else: - self.shadowed_next = False - - def transform(self, node, results): - assert results - - base = results.get("base") - attr = results.get("attr") - name = results.get("name") - - if base: - if self.shadowed_next: - # Omit this: - # attr.replace(Name("__next__", prefix=attr.prefix)) - pass - else: - base = [n.clone() for n in base] - base[0].prefix = "" - node.replace(Call(Name("next", prefix=node.prefix), base)) - elif name: - # Omit this: - # n = Name("__next__", prefix=name.prefix) - # name.replace(n) - pass - elif attr: - # We don't do this transformation if we're assigning to "x.next". - # Unfortunately, it doesn't seem possible to do this in PATTERN, - # so it's being done here. - if is_assign_target(node): - head = results["head"] - if "".join([str(n) for n in head]).strip() == '__builtin__': - self.warning(node, bind_warning) - return - # Omit this: - # attr.replace(Name("__next__")) - elif "global" in results: - self.warning(node, bind_warning) - self.shadowed_next = True - - -### The following functions help test if node is part of an assignment -### target. - -def is_assign_target(node): - assign = find_assign(node) - if assign is None: - return False - - for child in assign.children: - if child.type == token.EQUAL: - return False - elif is_subtree(child, node): - return True - return False - -def find_assign(node): - if node.type == syms.expr_stmt: - return node - if node.type == syms.simple_stmt or node.parent is None: - return None - return find_assign(node.parent) - -def is_subtree(root, node): - if root == node: - return True - return any(is_subtree(c, node) for c in root.children) diff --git a/pype/vendor/libfuturize/fixes/fix_object.py b/pype/vendor/libfuturize/fixes/fix_object.py deleted file mode 100644 index accf2c52e8..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_object.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Fixer that adds ``from builtins import object`` if there is a line -like this: - class Foo(object): -""" - -from lib2to3 import fixer_base - -from libfuturize.fixer_util import touch_import_top - - -class FixObject(fixer_base.BaseFix): - - PATTERN = u"classdef< 'class' NAME '(' name='object' ')' colon=':' any >" - - def transform(self, node, results): - touch_import_top(u'builtins', 'object', node) diff --git a/pype/vendor/libfuturize/fixes/fix_oldstr_wrap.py b/pype/vendor/libfuturize/fixes/fix_oldstr_wrap.py deleted file mode 100644 index ad58771d5f..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_oldstr_wrap.py +++ /dev/null @@ -1,39 +0,0 @@ -""" -For the ``future`` package. - -Adds this import line: - - from past.builtins import str as oldstr - -at the top and wraps any unadorned string literals 'abc' or explicit byte-string -literals b'abc' in oldstr() calls so the code has the same behaviour on Py3 as -on Py2.6/2.7. -""" - -from __future__ import unicode_literals -import re -from lib2to3 import fixer_base -from lib2to3.pgen2 import token -from lib2to3.fixer_util import syms -from libfuturize.fixer_util import (future_import, touch_import_top, - wrap_in_fn_call) - - -_literal_re = re.compile(r"[^uUrR]?[\'\"]") - - -class FixOldstrWrap(fixer_base.BaseFix): - BM_compatible = True - PATTERN = "STRING" - - def transform(self, node, results): - if node.type == token.STRING: - touch_import_top(u'past.types', u'oldstr', node) - if _literal_re.match(node.value): - new = node.clone() - # Strip any leading space or comments: - # TODO: check: do we really want to do this? - new.prefix = u'' - new.value = u'b' + new.value - wrapped = wrap_in_fn_call("oldstr", [new], prefix=node.prefix) - return wrapped diff --git a/pype/vendor/libfuturize/fixes/fix_order___future__imports.py b/pype/vendor/libfuturize/fixes/fix_order___future__imports.py deleted file mode 100644 index 143126394d..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_order___future__imports.py +++ /dev/null @@ -1,37 +0,0 @@ -""" -UNFINISHED - -Fixer for turning multiple lines like these: - - from __future__ import division - from __future__ import absolute_import - from __future__ import print_function - -into a single line like this: - - from __future__ import (absolute_import, division, print_function) - -This helps with testing of ``futurize``. -""" - -from lib2to3 import fixer_base -from libfuturize.fixer_util import future_import - -class FixOrderFutureImports(fixer_base.BaseFix): - BM_compatible = True - PATTERN = "file_input" - - run_order = 10 - - # def match(self, node): - # """ - # Match only once per file - # """ - # if hasattr(node, 'type') and node.type == syms.file_input: - # return True - # return False - - def transform(self, node, results): - # TODO # write me - pass - diff --git a/pype/vendor/libfuturize/fixes/fix_print.py b/pype/vendor/libfuturize/fixes/fix_print.py deleted file mode 100644 index 247b91b849..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_print.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright 2006 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Fixer for print. - -Change: - "print" into "print()" - "print ..." into "print(...)" - "print(...)" not changed - "print ... ," into "print(..., end=' ')" - "print >>x, ..." into "print(..., file=x)" - -No changes are applied if print_function is imported from __future__ - -""" - -# Local imports -from lib2to3 import patcomp, pytree, fixer_base -from lib2to3.pgen2 import token -from lib2to3.fixer_util import Name, Call, Comma, String -# from libmodernize import add_future - -parend_expr = patcomp.compile_pattern( - """atom< '(' [arith_expr|atom|power|term|STRING|NAME] ')' >""" - ) - - -class FixPrint(fixer_base.BaseFix): - - BM_compatible = True - - PATTERN = """ - simple_stmt< any* bare='print' any* > | print_stmt - """ - - def transform(self, node, results): - assert results - - bare_print = results.get("bare") - - if bare_print: - # Special-case print all by itself. - bare_print.replace(Call(Name(u"print"), [], - prefix=bare_print.prefix)) - # The "from __future__ import print_function"" declaration is added - # by the fix_print_with_import fixer, so we skip it here. - # add_future(node, u'print_function') - return - assert node.children[0] == Name(u"print") - args = node.children[1:] - if len(args) == 1 and parend_expr.match(args[0]): - # We don't want to keep sticking parens around an - # already-parenthesised expression. - return - - sep = end = file = None - if args and args[-1] == Comma(): - args = args[:-1] - end = " " - if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, u">>"): - assert len(args) >= 2 - file = args[1].clone() - args = args[3:] # Strip a possible comma after the file expression - # Now synthesize a print(args, sep=..., end=..., file=...) node. - l_args = [arg.clone() for arg in args] - if l_args: - l_args[0].prefix = u"" - if sep is not None or end is not None or file is not None: - if sep is not None: - self.add_kwarg(l_args, u"sep", String(repr(sep))) - if end is not None: - self.add_kwarg(l_args, u"end", String(repr(end))) - if file is not None: - self.add_kwarg(l_args, u"file", file) - n_stmt = Call(Name(u"print"), l_args) - n_stmt.prefix = node.prefix - - # Note that there are corner cases where adding this future-import is - # incorrect, for example when the file also has a 'print ()' statement - # that was intended to print "()". - # add_future(node, u'print_function') - return n_stmt - - def add_kwarg(self, l_nodes, s_kwd, n_expr): - # XXX All this prefix-setting may lose comments (though rarely) - n_expr.prefix = u"" - n_argument = pytree.Node(self.syms.argument, - (Name(s_kwd), - pytree.Leaf(token.EQUAL, u"="), - n_expr)) - if l_nodes: - l_nodes.append(Comma()) - n_argument.prefix = u" " - l_nodes.append(n_argument) diff --git a/pype/vendor/libfuturize/fixes/fix_print_with_import.py b/pype/vendor/libfuturize/fixes/fix_print_with_import.py deleted file mode 100644 index 5308d9252a..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_print_with_import.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -For the ``future`` package. - -Turns any print statements into functions and adds this import line: - - from __future__ import print_function - -at the top to retain compatibility with Python 2.6+. -""" - -from libfuturize.fixes.fix_print import FixPrint -from libfuturize.fixer_util import future_import - -class FixPrintWithImport(FixPrint): - run_order = 7 - def transform(self, node, results): - # Add the __future__ import first. (Otherwise any shebang or encoding - # comment line attached as a prefix to the print statement will be - # copied twice and appear twice.) - future_import(u'print_function', node) - n_stmt = super(FixPrintWithImport, self).transform(node, results) - return n_stmt - diff --git a/pype/vendor/libfuturize/fixes/fix_raise.py b/pype/vendor/libfuturize/fixes/fix_raise.py deleted file mode 100644 index 3e8323de2c..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_raise.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Fixer for 'raise E, V' - -From Armin Ronacher's ``python-modernize``. - -raise -> raise -raise E -> raise E -raise E, V -> raise E(V) - -raise (((E, E'), E''), E'''), V -> raise E(V) - - -CAVEATS: -1) "raise E, V" will be incorrectly translated if V is an exception - instance. The correct Python 3 idiom is - - raise E from V - - but since we can't detect instance-hood by syntax alone and since - any client code would have to be changed as well, we don't automate - this. -""" -# Author: Collin Winter, Armin Ronacher - -# Local imports -from lib2to3 import pytree, fixer_base -from lib2to3.pgen2 import token -from lib2to3.fixer_util import Name, Call, is_tuple - -class FixRaise(fixer_base.BaseFix): - - BM_compatible = True - PATTERN = """ - raise_stmt< 'raise' exc=any [',' val=any] > - """ - - def transform(self, node, results): - syms = self.syms - - exc = results["exc"].clone() - if exc.type == token.STRING: - msg = "Python 3 does not support string exceptions" - self.cannot_convert(node, msg) - return - - # Python 2 supports - # raise ((((E1, E2), E3), E4), E5), V - # as a synonym for - # raise E1, V - # Since Python 3 will not support this, we recurse down any tuple - # literals, always taking the first element. - if is_tuple(exc): - while is_tuple(exc): - # exc.children[1:-1] is the unparenthesized tuple - # exc.children[1].children[0] is the first element of the tuple - exc = exc.children[1].children[0].clone() - exc.prefix = u" " - - if "val" not in results: - # One-argument raise - new = pytree.Node(syms.raise_stmt, [Name(u"raise"), exc]) - new.prefix = node.prefix - return new - - val = results["val"].clone() - if is_tuple(val): - args = [c.clone() for c in val.children[1:-1]] - else: - val.prefix = u"" - args = [val] - - return pytree.Node(syms.raise_stmt, - [Name(u"raise"), Call(exc, args)], - prefix=node.prefix) diff --git a/pype/vendor/libfuturize/fixes/fix_remove_old__future__imports.py b/pype/vendor/libfuturize/fixes/fix_remove_old__future__imports.py deleted file mode 100644 index 060eb00417..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_remove_old__future__imports.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Fixer for removing any of these lines: - - from __future__ import with_statement - from __future__ import nested_scopes - from __future__ import generators - -The reason is that __future__ imports like these are required to be the first -line of code (after docstrings) on Python 2.6+, which can get in the way. - -These imports are always enabled in Python 2.6+, which is the minimum sane -version to target for Py2/3 compatibility. -""" - -from lib2to3 import fixer_base -from libfuturize.fixer_util import remove_future_import - -class FixRemoveOldFutureImports(fixer_base.BaseFix): - BM_compatible = True - PATTERN = "file_input" - run_order = 1 - - def transform(self, node, results): - remove_future_import(u"with_statement", node) - remove_future_import(u"nested_scopes", node) - remove_future_import(u"generators", node) - diff --git a/pype/vendor/libfuturize/fixes/fix_unicode_keep_u.py b/pype/vendor/libfuturize/fixes/fix_unicode_keep_u.py deleted file mode 100644 index a6f70f092a..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_unicode_keep_u.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Fixer that changes unicode to str and unichr to chr, but -- unlike the -lib2to3 fix_unicode.py fixer, does not change u"..." into "...". - -The reason is that Py3.3+ supports the u"..." string prefix, and, if -present, the prefix may provide useful information for disambiguating -between byte strings and unicode strings, which is often the hardest part -of the porting task. - -""" - -from lib2to3.pgen2 import token -from lib2to3 import fixer_base - -_mapping = {u"unichr" : u"chr", u"unicode" : u"str"} - -class FixUnicodeKeepU(fixer_base.BaseFix): - BM_compatible = True - PATTERN = "'unicode' | 'unichr'" - - def transform(self, node, results): - if node.type == token.NAME: - new = node.clone() - new.value = _mapping[node.value] - return new - diff --git a/pype/vendor/libfuturize/fixes/fix_unicode_literals_import.py b/pype/vendor/libfuturize/fixes/fix_unicode_literals_import.py deleted file mode 100644 index 9f21d7c69f..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_unicode_literals_import.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Adds this import: - - from __future__ import unicode_literals - -""" - -from lib2to3 import fixer_base -from libfuturize.fixer_util import future_import - -class FixUnicodeLiteralsImport(fixer_base.BaseFix): - BM_compatible = True - PATTERN = "file_input" - - run_order = 9 - - def transform(self, node, results): - future_import(u"unicode_literals", node) - diff --git a/pype/vendor/libfuturize/fixes/fix_xrange_with_import.py b/pype/vendor/libfuturize/fixes/fix_xrange_with_import.py deleted file mode 100644 index c910f8165b..0000000000 --- a/pype/vendor/libfuturize/fixes/fix_xrange_with_import.py +++ /dev/null @@ -1,20 +0,0 @@ -""" -For the ``future`` package. - -Turns any xrange calls into range calls and adds this import line: - - from builtins import range - -at the top. -""" - -from lib2to3.fixes.fix_xrange import FixXrange - -from libfuturize.fixer_util import touch_import_top - - -class FixXrangeWithImport(FixXrange): - def transform(self, node, results): - result = super(FixXrangeWithImport, self).transform(node, results) - touch_import_top('builtins', 'range', node) - return result diff --git a/pype/vendor/libfuturize/main.py b/pype/vendor/libfuturize/main.py deleted file mode 100644 index 18f33ec0a4..0000000000 --- a/pype/vendor/libfuturize/main.py +++ /dev/null @@ -1,302 +0,0 @@ -""" -futurize: automatic conversion to clean 2/3 code using ``python-future`` -====================================================================== - -Like Armin Ronacher's modernize.py, ``futurize`` attempts to produce clean -standard Python 3 code that runs on both Py2 and Py3. - -One pass --------- - -Use it like this on Python 2 code: - - $ futurize --verbose mypython2script.py - -This will attempt to port the code to standard Py3 code that also -provides Py2 compatibility with the help of the right imports from -``future``. - -To write changes to the files, use the -w flag. - -Two stages ----------- - -The ``futurize`` script can also be called in two separate stages. First: - - $ futurize --stage1 mypython2script.py - -This produces more modern Python 2 code that is not yet compatible with Python -3. The tests should still run and the diff should be uncontroversial to apply to -most Python projects that are willing to drop support for Python 2.5 and lower. - -After this, the recommended approach is to explicitly mark all strings that must -be byte-strings with a b'' prefix and all text (unicode) strings with a u'' -prefix, and then invoke the second stage of Python 2 to 2/3 conversion with:: - - $ futurize --stage2 mypython2script.py - -Stage 2 adds a dependency on ``future``. It converts most remaining Python -2-specific code to Python 3 code and adds appropriate imports from ``future`` -to restore Py2 support. - -The command above leaves all unadorned string literals as native strings -(byte-strings on Py2, unicode strings on Py3). If instead you would like all -unadorned string literals to be promoted to unicode, you can also pass this -flag: - - $ futurize --stage2 --unicode-literals mypython2script.py - -This adds the declaration ``from __future__ import unicode_literals`` to the -top of each file, which implicitly declares all unadorned string literals to be -unicode strings (``unicode`` on Py2). - -All imports ------------ - -The --all-imports option forces adding all ``__future__`` imports, -``builtins`` imports, and standard library aliases, even if they don't -seem necessary for the current state of each module. (This can simplify -testing, and can reduce the need to think about Py2 compatibility when editing -the code further.) - -""" - -from __future__ import (absolute_import, print_function, unicode_literals) -import future.utils -from future import __version__ - -import sys -import logging -import optparse -import os - -from lib2to3.main import main, warn, StdoutRefactoringTool -from lib2to3 import refactor - -from libfuturize.fixes import (lib2to3_fix_names_stage1, - lib2to3_fix_names_stage2, - libfuturize_fix_names_stage1, - libfuturize_fix_names_stage2) - -fixer_pkg = 'libfuturize.fixes' - - -def main(args=None): - """Main program. - - Args: - fixer_pkg: the name of a package where the fixers are located. - args: optional; a list of command line arguments. If omitted, - sys.argv[1:] is used. - - Returns a suggested exit status (0, 1, 2). - """ - - # Set up option parser - parser = optparse.OptionParser(usage="futurize [options] file|dir ...") - parser.add_option("-V", "--version", action="store_true", - help="Report the version number of futurize") - parser.add_option("-a", "--all-imports", action="store_true", - help="Add all __future__ and future imports to each module") - parser.add_option("-1", "--stage1", action="store_true", - help="Modernize Python 2 code only; no compatibility with Python 3 (or dependency on ``future``)") - parser.add_option("-2", "--stage2", action="store_true", - help="Take modernized (stage1) code and add a dependency on ``future`` to provide Py3 compatibility.") - parser.add_option("-0", "--both-stages", action="store_true", - help="Apply both stages 1 and 2") - parser.add_option("-u", "--unicode-literals", action="store_true", - help="Add ``from __future__ import unicode_literals`` to implicitly convert all unadorned string literals '' into unicode strings") - parser.add_option("-f", "--fix", action="append", default=[], - help="Each FIX specifies a transformation; default: all.\nEither use '-f division -f metaclass' etc. or use the fully-qualified module name: '-f lib2to3.fixes.fix_types -f libfuturize.fixes.fix_unicode_keep_u'") - parser.add_option("-j", "--processes", action="store", default=1, - type="int", help="Run 2to3 concurrently") - parser.add_option("-x", "--nofix", action="append", default=[], - help="Prevent a fixer from being run.") - parser.add_option("-l", "--list-fixes", action="store_true", - help="List available transformations") - parser.add_option("-p", "--print-function", action="store_true", - help="Modify the grammar so that print() is a function") - parser.add_option("-v", "--verbose", action="store_true", - help="More verbose logging") - parser.add_option("--no-diffs", action="store_true", - help="Don't show diffs of the refactoring") - parser.add_option("-w", "--write", action="store_true", - help="Write back modified files") - parser.add_option("-n", "--nobackups", action="store_true", default=False, - help="Don't write backups for modified files.") - parser.add_option("-o", "--output-dir", action="store", type="str", - default="", help="Put output files in this directory " - "instead of overwriting the input files. Requires -n. " - "For Python >= 2.7 only.") - parser.add_option("-W", "--write-unchanged-files", action="store_true", - help="Also write files even if no changes were required" - " (useful with --output-dir); implies -w.") - parser.add_option("--add-suffix", action="store", type="str", default="", - help="Append this string to all output filenames." - " Requires -n if non-empty. For Python >= 2.7 only." - "ex: --add-suffix='3' will generate .py3 files.") - - # Parse command line arguments - flags = {} - refactor_stdin = False - options, args = parser.parse_args(args) - - if options.write_unchanged_files: - flags["write_unchanged_files"] = True - if not options.write: - warn("--write-unchanged-files/-W implies -w.") - options.write = True - # If we allowed these, the original files would be renamed to backup names - # but not replaced. - if options.output_dir and not options.nobackups: - parser.error("Can't use --output-dir/-o without -n.") - if options.add_suffix and not options.nobackups: - parser.error("Can't use --add-suffix without -n.") - - if not options.write and options.no_diffs: - warn("not writing files and not printing diffs; that's not very useful") - if not options.write and options.nobackups: - parser.error("Can't use -n without -w") - if "-" in args: - refactor_stdin = True - if options.write: - print("Can't write to stdin.", file=sys.stderr) - return 2 - # Is this ever necessary? - if options.print_function: - flags["print_function"] = True - - # Set up logging handler - level = logging.DEBUG if options.verbose else logging.INFO - logging.basicConfig(format='%(name)s: %(message)s', level=level) - logger = logging.getLogger('libfuturize.main') - - if options.stage1 or options.stage2: - assert options.both_stages is None - options.both_stages = False - else: - options.both_stages = True - - avail_fixes = set() - - if options.stage1 or options.both_stages: - avail_fixes.update(lib2to3_fix_names_stage1) - avail_fixes.update(libfuturize_fix_names_stage1) - if options.stage2 or options.both_stages: - avail_fixes.update(lib2to3_fix_names_stage2) - avail_fixes.update(libfuturize_fix_names_stage2) - - if options.unicode_literals: - avail_fixes.add('libfuturize.fixes.fix_unicode_literals_import') - - if options.version: - print(__version__) - return 0 - if options.list_fixes: - print("Available transformations for the -f/--fix option:") - # for fixname in sorted(refactor.get_all_fix_names(fixer_pkg)): - for fixname in sorted(avail_fixes): - print(fixname) - if not args: - return 0 - if not args: - print("At least one file or directory argument required.", - file=sys.stderr) - print("Use --help to show usage.", file=sys.stderr) - return 2 - - unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix) - - extra_fixes = set() - if options.all_imports: - if options.stage1: - prefix = 'libfuturize.fixes.' - extra_fixes.add(prefix + - 'fix_add__future__imports_except_unicode_literals') - else: - # In case the user hasn't run stage1 for some reason: - prefix = 'libpasteurize.fixes.' - extra_fixes.add(prefix + 'fix_add_all__future__imports') - extra_fixes.add(prefix + 'fix_add_future_standard_library_import') - extra_fixes.add(prefix + 'fix_add_all_future_builtins') - explicit = set() - if options.fix: - all_present = False - for fix in options.fix: - if fix == 'all': - all_present = True - else: - if ".fix_" in fix: - explicit.add(fix) - else: - # Infer the full module name for the fixer. - # First ensure that no names clash (e.g. - # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): - found = [f for f in avail_fixes - if f.endswith('fix_{0}'.format(fix))] - if len(found) > 1: - print("Ambiguous fixer name. Choose a fully qualified " - "module name instead from these:\n" + - "\n".join(" " + myf for myf in found), - file=sys.stderr) - return 2 - elif len(found) == 0: - print("Unknown fixer. Use --list-fixes or -l for a list.", - file=sys.stderr) - return 2 - explicit.add(found[0]) - if len(explicit & unwanted_fixes) > 0: - print("Conflicting usage: the following fixers have been " - "simultaneously requested and disallowed:\n" + - "\n".join(" " + myf for myf in (explicit & unwanted_fixes)), - file=sys.stderr) - return 2 - requested = avail_fixes.union(explicit) if all_present else explicit - else: - requested = avail_fixes.union(explicit) - fixer_names = (requested | extra_fixes) - unwanted_fixes - - input_base_dir = os.path.commonprefix(args) - if (input_base_dir and not input_base_dir.endswith(os.sep) - and not os.path.isdir(input_base_dir)): - # One or more similar names were passed, their directory is the base. - # os.path.commonprefix() is ignorant of path elements, this corrects - # for that weird API. - input_base_dir = os.path.dirname(input_base_dir) - if options.output_dir: - input_base_dir = input_base_dir.rstrip(os.sep) - logger.info('Output in %r will mirror the input directory %r layout.', - options.output_dir, input_base_dir) - - # Initialize the refactoring tool - if future.utils.PY26: - extra_kwargs = {} - else: - extra_kwargs = { - 'append_suffix': options.add_suffix, - 'output_dir': options.output_dir, - 'input_base_dir': input_base_dir, - } - - rt = StdoutRefactoringTool( - sorted(fixer_names), flags, sorted(explicit), - options.nobackups, not options.no_diffs, - **extra_kwargs) - - # Refactor all files and directories passed as arguments - if not rt.errors: - if refactor_stdin: - rt.refactor_stdin() - else: - try: - rt.refactor(args, options.write, None, - options.processes) - except refactor.MultiprocessingUnsupported: - assert options.processes > 1 - print("Sorry, -j isn't " \ - "supported on this platform.", file=sys.stderr) - return 1 - rt.summarize() - - # Return error status (0 if rt.errors is zero) - return int(bool(rt.errors)) diff --git a/pype/vendor/libpasteurize/__init__.py b/pype/vendor/libpasteurize/__init__.py deleted file mode 100644 index 4cb1cbcd63..0000000000 --- a/pype/vendor/libpasteurize/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# empty to make this a package diff --git a/pype/vendor/libpasteurize/fixes/__init__.py b/pype/vendor/libpasteurize/fixes/__init__.py deleted file mode 100644 index c362ada23d..0000000000 --- a/pype/vendor/libpasteurize/fixes/__init__.py +++ /dev/null @@ -1,55 +0,0 @@ -import sys -from lib2to3 import refactor - -# The original set of these fixes comes from lib3to2 (https://bitbucket.org/amentajo/lib3to2): -fix_names = set([ - 'libpasteurize.fixes.fix_add_all__future__imports', # from __future__ import absolute_import etc. on separate lines - 'libpasteurize.fixes.fix_add_future_standard_library_import', # we force adding this import for now, even if it doesn't seem necessary to the fix_future_standard_library fixer, for ease of testing - # 'libfuturize.fixes.fix_order___future__imports', # consolidates to a single line to simplify testing -- UNFINISHED - 'libpasteurize.fixes.fix_future_builtins', # adds "from future.builtins import *" - 'libfuturize.fixes.fix_future_standard_library', # adds "from future import standard_library" - - 'libpasteurize.fixes.fix_annotations', - # 'libpasteurize.fixes.fix_bitlength', # ints have this in Py2.7 - # 'libpasteurize.fixes.fix_bool', # need a decorator or Mixin - # 'libpasteurize.fixes.fix_bytes', # leave bytes as bytes - # 'libpasteurize.fixes.fix_classdecorator', # available in - # Py2.6+ - # 'libpasteurize.fixes.fix_collections', hmmm ... - # 'libpasteurize.fixes.fix_dctsetcomp', # avail in Py27 - 'libpasteurize.fixes.fix_division', # yes - # 'libpasteurize.fixes.fix_except', # avail in Py2.6+ - # 'libpasteurize.fixes.fix_features', # ? - 'libpasteurize.fixes.fix_fullargspec', - # 'libpasteurize.fixes.fix_funcattrs', - 'libpasteurize.fixes.fix_getcwd', - 'libpasteurize.fixes.fix_imports', # adds "from future import standard_library" - 'libpasteurize.fixes.fix_imports2', - # 'libpasteurize.fixes.fix_input', - # 'libpasteurize.fixes.fix_int', - # 'libpasteurize.fixes.fix_intern', - # 'libpasteurize.fixes.fix_itertools', - 'libpasteurize.fixes.fix_kwargs', # yes, we want this - # 'libpasteurize.fixes.fix_memoryview', - # 'libpasteurize.fixes.fix_metaclass', # write a custom handler for - # this - # 'libpasteurize.fixes.fix_methodattrs', # __func__ and __self__ seem to be defined on Py2.7 already - 'libpasteurize.fixes.fix_newstyle', # yes, we want this: explicit inheritance from object. Without new-style classes in Py2, super() will break etc. - # 'libpasteurize.fixes.fix_next', # use a decorator for this - # 'libpasteurize.fixes.fix_numliterals', # prob not - # 'libpasteurize.fixes.fix_open', # huh? - # 'libpasteurize.fixes.fix_print', # no way - 'libpasteurize.fixes.fix_printfunction', # adds __future__ import print_function - # 'libpasteurize.fixes.fix_raise_', # TODO: get this working! - - # 'libpasteurize.fixes.fix_range', # nope - # 'libpasteurize.fixes.fix_reduce', - # 'libpasteurize.fixes.fix_setliteral', - # 'libpasteurize.fixes.fix_str', - # 'libpasteurize.fixes.fix_super', # maybe, if our magic super() isn't robust enough - 'libpasteurize.fixes.fix_throw', # yes, if Py3 supports it - # 'libpasteurize.fixes.fix_unittest', - 'libpasteurize.fixes.fix_unpacking', # yes, this is useful - # 'libpasteurize.fixes.fix_with' # way out of date - ]) - diff --git a/pype/vendor/libpasteurize/fixes/feature_base.py b/pype/vendor/libpasteurize/fixes/feature_base.py deleted file mode 100644 index 8a264964c9..0000000000 --- a/pype/vendor/libpasteurize/fixes/feature_base.py +++ /dev/null @@ -1,57 +0,0 @@ -u""" -Base classes for features that are backwards-incompatible. - -Usage: -features = Features() -features.add(Feature("py3k_feature", "power< 'py3k' any* >", "2.7")) -PATTERN = features.PATTERN -""" - -pattern_unformatted = u"%s=%s" # name=pattern, for dict lookups -message_unformatted = u""" -%s is only supported in Python %s and above.""" - -class Feature(object): - u""" - A feature has a name, a pattern, and a minimum version of Python 2.x - required to use the feature (or 3.x if there is no backwards-compatible - version of 2.x) - """ - def __init__(self, name, PATTERN, version): - self.name = name - self._pattern = PATTERN - self.version = version - - def message_text(self): - u""" - Format the above text with the name and minimum version required. - """ - return message_unformatted % (self.name, self.version) - -class Features(set): - u""" - A set of features that generates a pattern for the features it contains. - This set will act like a mapping in that we map names to patterns. - """ - mapping = {} - - def update_mapping(self): - u""" - Called every time we care about the mapping of names to features. - """ - self.mapping = dict([(f.name, f) for f in iter(self)]) - - @property - def PATTERN(self): - u""" - Uses the mapping of names to features to return a PATTERN suitable - for using the lib2to3 patcomp. - """ - self.update_mapping() - return u" |\n".join([pattern_unformatted % (f.name, f._pattern) for f in iter(self)]) - - def __getitem__(self, key): - u""" - Implement a simple mapping to get patterns from names. - """ - return self.mapping[key] diff --git a/pype/vendor/libpasteurize/fixes/fix_add_all__future__imports.py b/pype/vendor/libpasteurize/fixes/fix_add_all__future__imports.py deleted file mode 100644 index 32f89ec18f..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_add_all__future__imports.py +++ /dev/null @@ -1,25 +0,0 @@ -""" -Fixer for adding: - - from __future__ import absolute_import - from __future__ import division - from __future__ import print_function - from __future__ import unicode_literals - -This is done when converting from Py3 to both Py3/Py2. -""" - -from lib2to3 import fixer_base -from libfuturize.fixer_util import future_import - -class FixAddAllFutureImports(fixer_base.BaseFix): - BM_compatible = True - PATTERN = "file_input" - run_order = 1 - - def transform(self, node, results): - future_import(u"unicode_literals", node) - future_import(u"print_function", node) - future_import(u"division", node) - future_import(u"absolute_import", node) - diff --git a/pype/vendor/libpasteurize/fixes/fix_add_all_future_builtins.py b/pype/vendor/libpasteurize/fixes/fix_add_all_future_builtins.py deleted file mode 100644 index 97204b584b..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_add_all_future_builtins.py +++ /dev/null @@ -1,38 +0,0 @@ -""" -For the ``future`` package. - -Adds this import line:: - - from builtins import (ascii, bytes, chr, dict, filter, hex, input, - int, list, map, next, object, oct, open, pow, - range, round, str, super, zip) - -to a module, irrespective of whether each definition is used. - -Adds these imports after any other imports (in an initial block of them). -""" - -from __future__ import unicode_literals - -from lib2to3 import fixer_base - -from libfuturize.fixer_util import touch_import_top - - -class FixAddAllFutureBuiltins(fixer_base.BaseFix): - BM_compatible = True - PATTERN = "file_input" - run_order = 1 - - def transform(self, node, results): - # import_str = """(ascii, bytes, chr, dict, filter, hex, input, - # int, list, map, next, object, oct, open, pow, - # range, round, str, super, zip)""" - touch_import_top(u'builtins', '*', node) - - # builtins = """ascii bytes chr dict filter hex input - # int list map next object oct open pow - # range round str super zip""" - # for builtin in sorted(builtins.split(), reverse=True): - # touch_import_top(u'builtins', builtin, node) - diff --git a/pype/vendor/libpasteurize/fixes/fix_add_future_standard_library_import.py b/pype/vendor/libpasteurize/fixes/fix_add_future_standard_library_import.py deleted file mode 100644 index 0778406a88..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_add_future_standard_library_import.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -For the ``future`` package. - -Adds this import line: - - from future import standard_library - -after any __future__ imports but before any other imports. Doesn't actually -change the imports to Py3 style. -""" - -from lib2to3 import fixer_base -from libfuturize.fixer_util import touch_import_top - -class FixAddFutureStandardLibraryImport(fixer_base.BaseFix): - BM_compatible = True - PATTERN = "file_input" - run_order = 8 - - def transform(self, node, results): - # TODO: add a blank line between any __future__ imports and this? - touch_import_top(u'future', u'standard_library', node) - # TODO: also add standard_library.install_hooks() diff --git a/pype/vendor/libpasteurize/fixes/fix_annotations.py b/pype/vendor/libpasteurize/fixes/fix_annotations.py deleted file mode 100644 index 1926288c23..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_annotations.py +++ /dev/null @@ -1,48 +0,0 @@ -u""" -Fixer to remove function annotations -""" - -from lib2to3 import fixer_base -from lib2to3.pgen2 import token -from lib2to3.fixer_util import syms - -warning_text = u"Removing function annotations completely." - -def param_without_annotations(node): - return node.children[0] - -class FixAnnotations(fixer_base.BaseFix): - - warned = False - - def warn_once(self, node, reason): - if not self.warned: - self.warned = True - self.warning(node, reason=reason) - - PATTERN = u""" - funcdef< 'def' any parameters< '(' [params=any] ')' > ['->' ret=any] ':' any* > - """ - - def transform(self, node, results): - u""" - This just strips annotations from the funcdef completely. - """ - params = results.get(u"params") - ret = results.get(u"ret") - if ret is not None: - assert ret.prev_sibling.type == token.RARROW, u"Invalid return annotation" - self.warn_once(node, reason=warning_text) - ret.prev_sibling.remove() - ret.remove() - if params is None: return - if params.type == syms.typedargslist: - # more than one param in a typedargslist - for param in params.children: - if param.type == syms.tname: - self.warn_once(node, reason=warning_text) - param.replace(param_without_annotations(param)) - elif params.type == syms.tname: - # one param - self.warn_once(node, reason=warning_text) - params.replace(param_without_annotations(params)) diff --git a/pype/vendor/libpasteurize/fixes/fix_division.py b/pype/vendor/libpasteurize/fixes/fix_division.py deleted file mode 100644 index 6a048710f4..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_division.py +++ /dev/null @@ -1,28 +0,0 @@ -u""" -Fixer for division: from __future__ import division if needed -""" - -from lib2to3 import fixer_base -from libfuturize.fixer_util import token, future_import - -def match_division(node): - u""" - __future__.division redefines the meaning of a single slash for division, - so we match that and only that. - """ - slash = token.SLASH - return node.type == slash and not node.next_sibling.type == slash and \ - not node.prev_sibling.type == slash - -class FixDivision(fixer_base.BaseFix): - run_order = 4 # this seems to be ignored? - - def match(self, node): - u""" - Since the tree needs to be fixed once and only once if and only if it - matches, then we can start discarding matches after we make the first. - """ - return match_division(node) - - def transform(self, node, results): - future_import(u"division", node) diff --git a/pype/vendor/libpasteurize/fixes/fix_features.py b/pype/vendor/libpasteurize/fixes/fix_features.py deleted file mode 100644 index 7e5f545aff..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_features.py +++ /dev/null @@ -1,86 +0,0 @@ -u""" -Warn about features that are not present in Python 2.5, giving a message that -points to the earliest version of Python 2.x (or 3.x, if none) that supports it -""" - -from .feature_base import Feature, Features -from lib2to3 import fixer_base - -FEATURES = [ - #(FeatureName, - # FeaturePattern, - # FeatureMinVersion, - #), - (u"memoryview", - u"power < 'memoryview' trailer < '(' any* ')' > any* >", - u"2.7", - ), - (u"numbers", - u"""import_from< 'from' 'numbers' 'import' any* > | - import_name< 'import' ('numbers' dotted_as_names< any* 'numbers' any* >) >""", - u"2.6", - ), - (u"abc", - u"""import_name< 'import' ('abc' dotted_as_names< any* 'abc' any* >) > | - import_from< 'from' 'abc' 'import' any* >""", - u"2.6", - ), - (u"io", - u"""import_name< 'import' ('io' dotted_as_names< any* 'io' any* >) > | - import_from< 'from' 'io' 'import' any* >""", - u"2.6", - ), - (u"bin", - u"power< 'bin' trailer< '(' any* ')' > any* >", - u"2.6", - ), - (u"formatting", - u"power< any trailer< '.' 'format' > trailer< '(' any* ')' > >", - u"2.6", - ), - (u"nonlocal", - u"global_stmt< 'nonlocal' any* >", - u"3.0", - ), - (u"with_traceback", - u"trailer< '.' 'with_traceback' >", - u"3.0", - ), -] - -class FixFeatures(fixer_base.BaseFix): - - run_order = 9 # Wait until all other fixers have run to check for these - - # To avoid spamming, we only want to warn for each feature once. - features_warned = set() - - # Build features from the list above - features = Features([Feature(name, pattern, version) for \ - name, pattern, version in FEATURES]) - - PATTERN = features.PATTERN - - def match(self, node): - to_ret = super(FixFeatures, self).match(node) - # We want the mapping only to tell us the node's specific information. - try: - del to_ret[u'node'] - except Exception: - # We want it to delete the 'node' from the results - # if it's there, so we don't care if it fails for normal reasons. - pass - return to_ret - - def transform(self, node, results): - for feature_name in results: - if feature_name in self.features_warned: - continue - else: - curr_feature = self.features[feature_name] - if curr_feature.version >= u"3": - fail = self.cannot_convert - else: - fail = self.warning - fail(node, reason=curr_feature.message_text()) - self.features_warned.add(feature_name) diff --git a/pype/vendor/libpasteurize/fixes/fix_fullargspec.py b/pype/vendor/libpasteurize/fixes/fix_fullargspec.py deleted file mode 100644 index 489295f7b8..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_fullargspec.py +++ /dev/null @@ -1,16 +0,0 @@ -u""" -Fixer for getfullargspec -> getargspec -""" - -from lib2to3 import fixer_base -from lib2to3.fixer_util import Name - -warn_msg = u"some of the values returned by getfullargspec are not valid in Python 2 and have no equivalent." - -class FixFullargspec(fixer_base.BaseFix): - - PATTERN = u"'getfullargspec'" - - def transform(self, node, results): - self.warning(node, warn_msg) - return Name(u"getargspec", prefix=node.prefix) diff --git a/pype/vendor/libpasteurize/fixes/fix_future_builtins.py b/pype/vendor/libpasteurize/fixes/fix_future_builtins.py deleted file mode 100644 index 27339abc3b..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_future_builtins.py +++ /dev/null @@ -1,47 +0,0 @@ -""" -Adds this import line: - - from builtins import XYZ - -for each of the functions XYZ that is used in the module. -""" - -from __future__ import unicode_literals - -from lib2to3 import fixer_base -from lib2to3.pygram import python_symbols as syms -from lib2to3.fixer_util import Name, Call, in_special_context - -from libfuturize.fixer_util import touch_import_top - -# All builtins are: -# from future.builtins.iterators import (filter, map, zip) -# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super) -# from future.types import (bytes, dict, int, range, str) -# We don't need isinstance any more. - -replaced_builtins = '''filter map zip - ascii chr hex input next oct open round super - bytes dict int range str'''.split() - -expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtins]) - - -class FixFutureBuiltins(fixer_base.BaseFix): - BM_compatible = True - run_order = 9 - - # Currently we only match uses as a function. This doesn't match e.g.: - # if isinstance(s, str): - # ... - PATTERN = """ - power< - ({0}) trailer< '(' args=[any] ')' > - rest=any* > - """.format(expression) - - def transform(self, node, results): - name = results["name"] - touch_import_top(u'builtins', name.value, node) - # name.replace(Name(u"input", prefix=name.prefix)) - diff --git a/pype/vendor/libpasteurize/fixes/fix_getcwd.py b/pype/vendor/libpasteurize/fixes/fix_getcwd.py deleted file mode 100644 index 9b7f002b3c..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_getcwd.py +++ /dev/null @@ -1,26 +0,0 @@ -u""" -Fixer for os.getcwd() -> os.getcwdu(). -Also warns about "from os import getcwd", suggesting the above form. -""" - -from lib2to3 import fixer_base -from lib2to3.fixer_util import Name - -class FixGetcwd(fixer_base.BaseFix): - - PATTERN = u""" - power< 'os' trailer< dot='.' name='getcwd' > any* > - | - import_from< 'from' 'os' 'import' bad='getcwd' > - """ - - def transform(self, node, results): - if u"name" in results: - name = results[u"name"] - name.replace(Name(u"getcwdu", prefix=name.prefix)) - elif u"bad" in results: - # Can't convert to getcwdu and then expect to catch every use. - self.cannot_convert(node, u"import os, use os.getcwd() instead.") - return - else: - raise ValueError(u"For some reason, the pattern matcher failed.") diff --git a/pype/vendor/libpasteurize/fixes/fix_imports.py b/pype/vendor/libpasteurize/fixes/fix_imports.py deleted file mode 100644 index 4db0d548f5..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_imports.py +++ /dev/null @@ -1,113 +0,0 @@ -u""" -Fixer for standard library imports renamed in Python 3 -""" - -from lib2to3 import fixer_base -from lib2to3.fixer_util import Name, is_probably_builtin, Newline, does_tree_import -from lib2to3.pygram import python_symbols as syms -from lib2to3.pgen2 import token -from lib2to3.pytree import Node, Leaf - -from libfuturize.fixer_util import touch_import_top -# from ..fixer_util import NameImport - -# used in simple_mapping_to_pattern() -MAPPING = {u"reprlib": u"repr", - u"winreg": u"_winreg", - u"configparser": u"ConfigParser", - u"copyreg": u"copy_reg", - u"queue": u"Queue", - u"socketserver": u"SocketServer", - u"_markupbase": u"markupbase", - u"test.support": u"test.test_support", - u"dbm.bsd": u"dbhash", - u"dbm.ndbm": u"dbm", - u"dbm.dumb": u"dumbdbm", - u"dbm.gnu": u"gdbm", - u"html.parser": u"HTMLParser", - u"html.entities": u"htmlentitydefs", - u"http.client": u"httplib", - u"http.cookies": u"Cookie", - u"http.cookiejar": u"cookielib", -# "tkinter": "Tkinter", - u"tkinter.dialog": u"Dialog", - u"tkinter._fix": u"FixTk", - u"tkinter.scrolledtext": u"ScrolledText", - u"tkinter.tix": u"Tix", - u"tkinter.constants": u"Tkconstants", - u"tkinter.dnd": u"Tkdnd", - u"tkinter.__init__": u"Tkinter", - u"tkinter.colorchooser": u"tkColorChooser", - u"tkinter.commondialog": u"tkCommonDialog", - u"tkinter.font": u"tkFont", - u"tkinter.ttk": u"ttk", - u"tkinter.messagebox": u"tkMessageBox", - u"tkinter.turtle": u"turtle", - u"urllib.robotparser": u"robotparser", - u"xmlrpc.client": u"xmlrpclib", - u"builtins": u"__builtin__", -} - -# generic strings to help build patterns -# these variables mean (with http.client.HTTPConnection as an example): -# name = http -# attr = client -# used = HTTPConnection -# fmt_name is a formatted subpattern (simple_name_match or dotted_name_match) - -# helps match 'queue', as in 'from queue import ...' -simple_name_match = u"name='%s'" -# helps match 'client', to be used if client has been imported from http -subname_match = u"attr='%s'" -# helps match 'http.client', as in 'import urllib.request' -dotted_name_match = u"dotted_name=dotted_name< %s '.' %s >" -# helps match 'queue', as in 'queue.Queue(...)' -power_onename_match = u"%s" -# helps match 'http.client', as in 'http.client.HTTPConnection(...)' -power_twoname_match = u"power< %s trailer< '.' %s > any* >" -# helps match 'client.HTTPConnection', if 'client' has been imported from http -power_subname_match = u"power< %s any* >" -# helps match 'from http.client import HTTPConnection' -from_import_match = u"from_import=import_from< 'from' %s 'import' imported=any >" -# helps match 'from http import client' -from_import_submod_match = u"from_import_submod=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* > ) >" -# helps match 'import urllib.request' -name_import_match = u"name_import=import_name< 'import' %s > | name_import=import_name< 'import' dotted_as_name< %s 'as' renamed=any > >" -# helps match 'import http.client, winreg' -multiple_name_import_match = u"name_import=import_name< 'import' dotted_as_names< names=any* > >" - -def all_patterns(name): - u""" - Accepts a string and returns a pattern of possible patterns involving that name - Called by simple_mapping_to_pattern for each name in the mapping it receives. - """ - - # i_ denotes an import-like node - # u_ denotes a node that appears to be a usage of the name - if u'.' in name: - name, attr = name.split(u'.', 1) - simple_name = simple_name_match % (name) - simple_attr = subname_match % (attr) - dotted_name = dotted_name_match % (simple_name, simple_attr) - i_from = from_import_match % (dotted_name) - i_from_submod = from_import_submod_match % (simple_name, simple_attr, simple_attr, simple_attr, simple_attr) - i_name = name_import_match % (dotted_name, dotted_name) - u_name = power_twoname_match % (simple_name, simple_attr) - u_subname = power_subname_match % (simple_attr) - return u' | \n'.join((i_name, i_from, i_from_submod, u_name, u_subname)) - else: - simple_name = simple_name_match % (name) - i_name = name_import_match % (simple_name, simple_name) - i_from = from_import_match % (simple_name) - u_name = power_onename_match % (simple_name) - return u' | \n'.join((i_name, i_from, u_name)) - - -class FixImports(fixer_base.BaseFix): - - PATTERN = u' | \n'.join([all_patterns(name) for name in MAPPING]) - PATTERN = u' | \n'.join((PATTERN, multiple_name_import_match)) - - def transform(self, node, results): - touch_import_top(u'future', u'standard_library', node) - diff --git a/pype/vendor/libpasteurize/fixes/fix_imports2.py b/pype/vendor/libpasteurize/fixes/fix_imports2.py deleted file mode 100644 index 5b30b5f5db..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_imports2.py +++ /dev/null @@ -1,175 +0,0 @@ -u""" -Fixer for complicated imports -""" - -from lib2to3 import fixer_base -from lib2to3.fixer_util import Name, String, FromImport, Newline, Comma -from libfuturize.fixer_util import touch_import_top - - -TK_BASE_NAMES = (u'ACTIVE', u'ALL', u'ANCHOR', u'ARC',u'BASELINE', u'BEVEL', u'BOTH', - u'BOTTOM', u'BROWSE', u'BUTT', u'CASCADE', u'CENTER', u'CHAR', - u'CHECKBUTTON', u'CHORD', u'COMMAND', u'CURRENT', u'DISABLED', - u'DOTBOX', u'E', u'END', u'EW', u'EXCEPTION', u'EXTENDED', u'FALSE', - u'FIRST', u'FLAT', u'GROOVE', u'HIDDEN', u'HORIZONTAL', u'INSERT', - u'INSIDE', u'LAST', u'LEFT', u'MITER', u'MOVETO', u'MULTIPLE', u'N', - u'NE', u'NO', u'NONE', u'NORMAL', u'NS', u'NSEW', u'NUMERIC', u'NW', - u'OFF', u'ON', u'OUTSIDE', u'PAGES', u'PIESLICE', u'PROJECTING', - u'RADIOBUTTON', u'RAISED', u'READABLE', u'RIDGE', u'RIGHT', - u'ROUND', u'S', u'SCROLL', u'SE', u'SEL', u'SEL_FIRST', u'SEL_LAST', - u'SEPARATOR', u'SINGLE', u'SOLID', u'SUNKEN', u'SW', u'StringTypes', - u'TOP', u'TRUE', u'TclVersion', u'TkVersion', u'UNDERLINE', - u'UNITS', u'VERTICAL', u'W', u'WORD', u'WRITABLE', u'X', u'Y', u'YES', - u'wantobjects') - -PY2MODULES = { - u'urllib2' : ( - u'AbstractBasicAuthHandler', u'AbstractDigestAuthHandler', - u'AbstractHTTPHandler', u'BaseHandler', u'CacheFTPHandler', - u'FTPHandler', u'FileHandler', u'HTTPBasicAuthHandler', - u'HTTPCookieProcessor', u'HTTPDefaultErrorHandler', - u'HTTPDigestAuthHandler', u'HTTPError', u'HTTPErrorProcessor', - u'HTTPHandler', u'HTTPPasswordMgr', - u'HTTPPasswordMgrWithDefaultRealm', u'HTTPRedirectHandler', - u'HTTPSHandler', u'OpenerDirector', u'ProxyBasicAuthHandler', - u'ProxyDigestAuthHandler', u'ProxyHandler', u'Request', - u'StringIO', u'URLError', u'UnknownHandler', u'addinfourl', - u'build_opener', u'install_opener', u'parse_http_list', - u'parse_keqv_list', u'randombytes', u'request_host', u'urlopen'), - u'urllib' : ( - u'ContentTooShortError', u'FancyURLopener',u'URLopener', - u'basejoin', u'ftperrors', u'getproxies', - u'getproxies_environment', u'localhost', u'pathname2url', - u'quote', u'quote_plus', u'splitattr', u'splithost', - u'splitnport', u'splitpasswd', u'splitport', u'splitquery', - u'splittag', u'splittype', u'splituser', u'splitvalue', - u'thishost', u'unquote', u'unquote_plus', u'unwrap', - u'url2pathname', u'urlcleanup', u'urlencode', u'urlopen', - u'urlretrieve',), - u'urlparse' : ( - u'parse_qs', u'parse_qsl', u'urldefrag', u'urljoin', - u'urlparse', u'urlsplit', u'urlunparse', u'urlunsplit'), - u'dbm' : ( - u'ndbm', u'gnu', u'dumb'), - u'anydbm' : ( - u'error', u'open'), - u'whichdb' : ( - u'whichdb',), - u'BaseHTTPServer' : ( - u'BaseHTTPRequestHandler', u'HTTPServer'), - u'CGIHTTPServer' : ( - u'CGIHTTPRequestHandler',), - u'SimpleHTTPServer' : ( - u'SimpleHTTPRequestHandler',), - u'FileDialog' : TK_BASE_NAMES + ( - u'FileDialog', u'LoadFileDialog', u'SaveFileDialog', - u'dialogstates', u'test'), - u'tkFileDialog' : ( - u'Directory', u'Open', u'SaveAs', u'_Dialog', u'askdirectory', - u'askopenfile', u'askopenfilename', u'askopenfilenames', - u'askopenfiles', u'asksaveasfile', u'asksaveasfilename'), - u'SimpleDialog' : TK_BASE_NAMES + ( - u'SimpleDialog',), - u'tkSimpleDialog' : TK_BASE_NAMES + ( - u'askfloat', u'askinteger', u'askstring', u'Dialog'), - u'SimpleXMLRPCServer' : ( - u'CGIXMLRPCRequestHandler', u'SimpleXMLRPCDispatcher', - u'SimpleXMLRPCRequestHandler', u'SimpleXMLRPCServer', - u'list_public_methods', u'remove_duplicates', - u'resolve_dotted_attribute'), - u'DocXMLRPCServer' : ( - u'DocCGIXMLRPCRequestHandler', u'DocXMLRPCRequestHandler', - u'DocXMLRPCServer', u'ServerHTMLDoc',u'XMLRPCDocGenerator'), - } - -MAPPING = { u'urllib.request' : - (u'urllib2', u'urllib'), - u'urllib.error' : - (u'urllib2', u'urllib'), - u'urllib.parse' : - (u'urllib2', u'urllib', u'urlparse'), - u'dbm.__init__' : - (u'anydbm', u'whichdb'), - u'http.server' : - (u'CGIHTTPServer', u'SimpleHTTPServer', u'BaseHTTPServer'), - u'tkinter.filedialog' : - (u'tkFileDialog', u'FileDialog'), - u'tkinter.simpledialog' : - (u'tkSimpleDialog', u'SimpleDialog'), - u'xmlrpc.server' : - (u'DocXMLRPCServer', u'SimpleXMLRPCServer'), - } - -# helps match 'http', as in 'from http.server import ...' -simple_name = u"name='%s'" -# helps match 'server', as in 'from http.server import ...' -simple_attr = u"attr='%s'" -# helps match 'HTTPServer', as in 'from http.server import HTTPServer' -simple_using = u"using='%s'" -# helps match 'urllib.request', as in 'import urllib.request' -dotted_name = u"dotted_name=dotted_name< %s '.' %s >" -# helps match 'http.server', as in 'http.server.HTTPServer(...)' -power_twoname = u"pow=power< %s trailer< '.' %s > trailer< '.' using=any > any* >" -# helps match 'dbm.whichdb', as in 'dbm.whichdb(...)' -power_onename = u"pow=power< %s trailer< '.' using=any > any* >" -# helps match 'from http.server import HTTPServer' -# also helps match 'from http.server import HTTPServer, SimpleHTTPRequestHandler' -# also helps match 'from http.server import *' -from_import = u"from_import=import_from< 'from' %s 'import' (import_as_name< using=any 'as' renamed=any> | in_list=import_as_names< using=any* > | using='*' | using=NAME) >" -# helps match 'import urllib.request' -name_import = u"name_import=import_name< 'import' (%s | in_list=dotted_as_names< imp_list=any* >) >" - -############# -# WON'T FIX # -############# - -# helps match 'import urllib.request as name' -name_import_rename = u"name_import_rename=dotted_as_name< %s 'as' renamed=any >" -# helps match 'from http import server' -from_import_rename = u"from_import_rename=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | in_list=import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* >) >" - - -def all_modules_subpattern(): - u""" - Builds a pattern for all toplevel names - (urllib, http, etc) - """ - names_dot_attrs = [mod.split(u".") for mod in MAPPING] - ret = u"( " + u" | ".join([dotted_name % (simple_name % (mod[0]), - simple_attr % (mod[1])) for mod in names_dot_attrs]) - ret += u" | " - ret += u" | ".join([simple_name % (mod[0]) for mod in names_dot_attrs if mod[1] == u"__init__"]) + u" )" - return ret - - -def build_import_pattern(mapping1, mapping2): - u""" - mapping1: A dict mapping py3k modules to all possible py2k replacements - mapping2: A dict mapping py2k modules to the things they do - This builds a HUGE pattern to match all ways that things can be imported - """ - # py3k: urllib.request, py2k: ('urllib2', 'urllib') - yield from_import % (all_modules_subpattern()) - for py3k, py2k in mapping1.items(): - name, attr = py3k.split(u'.') - s_name = simple_name % (name) - s_attr = simple_attr % (attr) - d_name = dotted_name % (s_name, s_attr) - yield name_import % (d_name) - yield power_twoname % (s_name, s_attr) - if attr == u'__init__': - yield name_import % (s_name) - yield power_onename % (s_name) - yield name_import_rename % (d_name) - yield from_import_rename % (s_name, s_attr, s_attr, s_attr, s_attr) - - -class FixImports2(fixer_base.BaseFix): - - run_order = 4 - - PATTERN = u" | \n".join(build_import_pattern(MAPPING, PY2MODULES)) - - def transform(self, node, results): - touch_import_top(u'future', u'standard_library', node) - diff --git a/pype/vendor/libpasteurize/fixes/fix_kwargs.py b/pype/vendor/libpasteurize/fixes/fix_kwargs.py deleted file mode 100644 index 59a3043bef..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_kwargs.py +++ /dev/null @@ -1,148 +0,0 @@ -u""" -Fixer for Python 3 function parameter syntax -This fixer is rather sensitive to incorrect py3k syntax. -""" - -# Note: "relevant" parameters are parameters following the first STAR in the list. - -from lib2to3 import fixer_base -from lib2to3.fixer_util import token, String, Newline, Comma, Name -from libfuturize.fixer_util import indentation, suitify, DoubleStar - -_assign_template = u"%(name)s = %(kwargs)s['%(name)s']; del %(kwargs)s['%(name)s']" -_if_template = u"if '%(name)s' in %(kwargs)s: %(assign)s" -_else_template = u"else: %(name)s = %(default)s" -_kwargs_default_name = u"_3to2kwargs" - -def gen_params(raw_params): - u""" - Generator that yields tuples of (name, default_value) for each parameter in the list - If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None')) - """ - assert raw_params[0].type == token.STAR and len(raw_params) > 2 - curr_idx = 2 # the first place a keyword-only parameter name can be is index 2 - max_idx = len(raw_params) - while curr_idx < max_idx: - curr_item = raw_params[curr_idx] - prev_item = curr_item.prev_sibling - if curr_item.type != token.NAME: - curr_idx += 1 - continue - if prev_item is not None and prev_item.type == token.DOUBLESTAR: - break - name = curr_item.value - nxt = curr_item.next_sibling - if nxt is not None and nxt.type == token.EQUAL: - default_value = nxt.next_sibling - curr_idx += 2 - else: - default_value = None - yield (name, default_value) - curr_idx += 1 - -def remove_params(raw_params, kwargs_default=_kwargs_default_name): - u""" - Removes all keyword-only args from the params list and a bare star, if any. - Does not add the kwargs dict if needed. - Returns True if more action is needed, False if not - (more action is needed if no kwargs dict exists) - """ - assert raw_params[0].type == token.STAR - if raw_params[1].type == token.COMMA: - raw_params[0].remove() - raw_params[1].remove() - kw_params = raw_params[2:] - else: - kw_params = raw_params[3:] - for param in kw_params: - if param.type != token.DOUBLESTAR: - param.remove() - else: - return False - else: - return True - -def needs_fixing(raw_params, kwargs_default=_kwargs_default_name): - u""" - Returns string with the name of the kwargs dict if the params after the first star need fixing - Otherwise returns empty string - """ - found_kwargs = False - needs_fix = False - - for t in raw_params[2:]: - if t.type == token.COMMA: - # Commas are irrelevant at this stage. - continue - elif t.type == token.NAME and not found_kwargs: - # Keyword-only argument: definitely need to fix. - needs_fix = True - elif t.type == token.NAME and found_kwargs: - # Return 'foobar' of **foobar, if needed. - return t.value if needs_fix else u'' - elif t.type == token.DOUBLESTAR: - # Found either '*' from **foobar. - found_kwargs = True - else: - # Never found **foobar. Return a synthetic name, if needed. - return kwargs_default if needs_fix else u'' - -class FixKwargs(fixer_base.BaseFix): - - run_order = 7 # Run after function annotations are removed - - PATTERN = u"funcdef< 'def' NAME parameters< '(' arglist=typedargslist< params=any* > ')' > ':' suite=any >" - - def transform(self, node, results): - params_rawlist = results[u"params"] - for i, item in enumerate(params_rawlist): - if item.type == token.STAR: - params_rawlist = params_rawlist[i:] - break - else: - return - # params is guaranteed to be a list starting with *. - # if fixing is needed, there will be at least 3 items in this list: - # [STAR, COMMA, NAME] is the minimum that we need to worry about. - new_kwargs = needs_fixing(params_rawlist) - # new_kwargs is the name of the kwargs dictionary. - if not new_kwargs: - return - suitify(node) - - # At this point, params_rawlist is guaranteed to be a list - # beginning with a star that includes at least one keyword-only param - # e.g., [STAR, NAME, COMMA, NAME, COMMA, DOUBLESTAR, NAME] or - # [STAR, COMMA, NAME], or [STAR, COMMA, NAME, COMMA, DOUBLESTAR, NAME] - - # Anatomy of a funcdef: ['def', 'name', parameters, ':', suite] - # Anatomy of that suite: [NEWLINE, INDENT, first_stmt, all_other_stmts] - # We need to insert our new stuff before the first_stmt and change the - # first_stmt's prefix. - - suite = node.children[4] - first_stmt = suite.children[2] - ident = indentation(first_stmt) - - for name, default_value in gen_params(params_rawlist): - if default_value is None: - suite.insert_child(2, Newline()) - suite.insert_child(2, String(_assign_template %{u'name':name, u'kwargs':new_kwargs}, prefix=ident)) - else: - suite.insert_child(2, Newline()) - suite.insert_child(2, String(_else_template %{u'name':name, u'default':default_value}, prefix=ident)) - suite.insert_child(2, Newline()) - suite.insert_child(2, String(_if_template %{u'assign':_assign_template %{u'name':name, u'kwargs':new_kwargs}, u'name':name, u'kwargs':new_kwargs}, prefix=ident)) - first_stmt.prefix = ident - suite.children[2].prefix = u"" - - # Now, we need to fix up the list of params. - - must_add_kwargs = remove_params(params_rawlist) - if must_add_kwargs: - arglist = results[u'arglist'] - if len(arglist.children) > 0 and arglist.children[-1].type != token.COMMA: - arglist.append_child(Comma()) - arglist.append_child(DoubleStar(prefix=u" ")) - arglist.append_child(Name(new_kwargs)) - diff --git a/pype/vendor/libpasteurize/fixes/fix_memoryview.py b/pype/vendor/libpasteurize/fixes/fix_memoryview.py deleted file mode 100644 index a20f6f3f2d..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_memoryview.py +++ /dev/null @@ -1,21 +0,0 @@ -u""" -Fixer for memoryview(s) -> buffer(s). -Explicit because some memoryview methods are invalid on buffer objects. -""" - -from lib2to3 import fixer_base -from lib2to3.fixer_util import Name - - -class FixMemoryview(fixer_base.BaseFix): - - explicit = True # User must specify that they want this. - - PATTERN = u""" - power< name='memoryview' trailer< '(' [any] ')' > - rest=any* > - """ - - def transform(self, node, results): - name = results[u"name"] - name.replace(Name(u"buffer", prefix=name.prefix)) diff --git a/pype/vendor/libpasteurize/fixes/fix_metaclass.py b/pype/vendor/libpasteurize/fixes/fix_metaclass.py deleted file mode 100644 index 5e6e64d812..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_metaclass.py +++ /dev/null @@ -1,78 +0,0 @@ -u""" -Fixer for (metaclass=X) -> __metaclass__ = X -Some semantics (see PEP 3115) may be altered in the translation.""" - -from lib2to3 import fixer_base -from lib2to3.fixer_util import Name, syms, Node, Leaf, Newline, find_root -from lib2to3.pygram import token -from libfuturize.fixer_util import indentation, suitify -# from ..fixer_util import Name, syms, Node, Leaf, Newline, find_root, indentation, suitify - -def has_metaclass(parent): - results = None - for node in parent.children: - kids = node.children - if node.type == syms.argument: - if kids[0] == Leaf(token.NAME, u"metaclass") and \ - kids[1] == Leaf(token.EQUAL, u"=") and \ - kids[2]: - #Hack to avoid "class X(=):" with this case. - results = [node] + kids - break - elif node.type == syms.arglist: - # Argument list... loop through it looking for: - # Node(*, [*, Leaf(token.NAME, u"metaclass"), Leaf(token.EQUAL, u"="), Leaf(*, *)] - for child in node.children: - if results: break - if child.type == token.COMMA: - #Store the last comma, which precedes the metaclass - comma = child - elif type(child) == Node: - meta = equal = name = None - for arg in child.children: - if arg == Leaf(token.NAME, u"metaclass"): - #We have the (metaclass) part - meta = arg - elif meta and arg == Leaf(token.EQUAL, u"="): - #We have the (metaclass=) part - equal = arg - elif meta and equal: - #Here we go, we have (metaclass=X) - name = arg - results = (comma, meta, equal, name) - break - return results - - -class FixMetaclass(fixer_base.BaseFix): - - PATTERN = u""" - classdef - """ - - def transform(self, node, results): - meta_results = has_metaclass(node) - if not meta_results: return - for meta in meta_results: - meta.remove() - target = Leaf(token.NAME, u"__metaclass__") - equal = Leaf(token.EQUAL, u"=", prefix=u" ") - # meta is the last item in what was returned by has_metaclass(): name - name = meta - name.prefix = u" " - stmt_node = Node(syms.atom, [target, equal, name]) - - suitify(node) - for item in node.children: - if item.type == syms.suite: - for stmt in item.children: - if stmt.type == token.INDENT: - # Insert, in reverse order, the statement, a newline, - # and an indent right after the first indented line - loc = item.children.index(stmt) + 1 - # Keep consistent indentation form - ident = Leaf(token.INDENT, stmt.value) - item.insert_child(loc, ident) - item.insert_child(loc, Newline()) - item.insert_child(loc, stmt_node) - break diff --git a/pype/vendor/libpasteurize/fixes/fix_newstyle.py b/pype/vendor/libpasteurize/fixes/fix_newstyle.py deleted file mode 100644 index cc6b3adcb0..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_newstyle.py +++ /dev/null @@ -1,33 +0,0 @@ -u""" -Fixer for "class Foo: ..." -> "class Foo(object): ..." -""" - -from lib2to3 import fixer_base -from lib2to3.fixer_util import LParen, RParen, Name - -from libfuturize.fixer_util import touch_import_top - - -def insert_object(node, idx): - node.insert_child(idx, RParen()) - node.insert_child(idx, Name(u"object")) - node.insert_child(idx, LParen()) - -class FixNewstyle(fixer_base.BaseFix): - - # Match: - # class Blah: - # and: - # class Blah(): - - PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >" - - def transform(self, node, results): - colon = results[u"colon"] - idx = node.children.index(colon) - if (node.children[idx-2].value == '(' and - node.children[idx-1].value == ')'): - del node.children[idx-2:idx] - idx -= 2 - insert_object(node, idx) - touch_import_top(u'builtins', 'object', node) diff --git a/pype/vendor/libpasteurize/fixes/fix_next.py b/pype/vendor/libpasteurize/fixes/fix_next.py deleted file mode 100644 index 9ecb6c0439..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_next.py +++ /dev/null @@ -1,43 +0,0 @@ -u""" -Fixer for: -it.__next__() -> it.next(). -next(it) -> it.next(). -""" - -from lib2to3.pgen2 import token -from lib2to3.pygram import python_symbols as syms -from lib2to3 import fixer_base -from lib2to3.fixer_util import Name, Call, find_binding, Attr - -bind_warning = u"Calls to builtin next() possibly shadowed by global binding" - - -class FixNext(fixer_base.BaseFix): - - PATTERN = u""" - power< base=any+ trailer< '.' attr='__next__' > any* > - | - power< head='next' trailer< '(' arg=any ')' > any* > - | - classdef< 'class' base=any+ ':' - suite< any* - funcdef< 'def' - attr='__next__' - parameters< '(' NAME ')' > any+ > - any* > > - """ - - def transform(self, node, results): - assert results - - base = results.get(u"base") - attr = results.get(u"attr") - head = results.get(u"head") - arg_ = results.get(u"arg") - if arg_: - arg = arg_.clone() - head.replace(Attr(Name(unicode(arg),prefix=head.prefix), - Name(u"next"))) - arg_.remove() - elif base: - attr.replace(Name(u"next", prefix=attr.prefix)) diff --git a/pype/vendor/libpasteurize/fixes/fix_printfunction.py b/pype/vendor/libpasteurize/fixes/fix_printfunction.py deleted file mode 100644 index a2a6e08437..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_printfunction.py +++ /dev/null @@ -1,17 +0,0 @@ -u""" -Fixer for print: from __future__ import print_function. -""" - -from lib2to3 import fixer_base -from libfuturize.fixer_util import future_import - -class FixPrintfunction(fixer_base.BaseFix): - - # explicit = True - - PATTERN = u""" - power< 'print' trailer < '(' any* ')' > any* > - """ - - def transform(self, node, results): - future_import(u"print_function", node) diff --git a/pype/vendor/libpasteurize/fixes/fix_raise.py b/pype/vendor/libpasteurize/fixes/fix_raise.py deleted file mode 100644 index 9c9c192f85..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_raise.py +++ /dev/null @@ -1,25 +0,0 @@ -u"""Fixer for 'raise E(V).with_traceback(T)' -> 'raise E, V, T'""" - -from lib2to3 import fixer_base -from lib2to3.fixer_util import Comma, Node, Leaf, token, syms - -class FixRaise(fixer_base.BaseFix): - - PATTERN = u""" - raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >] - [trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >""" - - def transform(self, node, results): - name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc")) - chain = results.get(u"chain") - if chain is not None: - self.warning(node, u"explicit exception chaining is not supported in Python 2") - chain.prev_sibling.remove() - chain.remove() - if trc is not None: - val = val[0] if val else Leaf(token.NAME, u"None") - val.prefix = trc.prefix = u" " - kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(), - val.clone(), Comma(), trc.clone()] - raise_stmt = Node(syms.raise_stmt, kids) - node.replace(raise_stmt) diff --git a/pype/vendor/libpasteurize/fixes/fix_raise_.py b/pype/vendor/libpasteurize/fixes/fix_raise_.py deleted file mode 100644 index 0f020c454f..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_raise_.py +++ /dev/null @@ -1,35 +0,0 @@ -u"""Fixer for - raise E(V).with_traceback(T) - to: - from future.utils import raise_ - ... - raise_(E, V, T) - -TODO: FIXME!! - -""" - -from lib2to3 import fixer_base -from lib2to3.fixer_util import Comma, Node, Leaf, token, syms - -class FixRaise(fixer_base.BaseFix): - - PATTERN = u""" - raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >] - [trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >""" - - def transform(self, node, results): - FIXME - name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc")) - chain = results.get(u"chain") - if chain is not None: - self.warning(node, u"explicit exception chaining is not supported in Python 2") - chain.prev_sibling.remove() - chain.remove() - if trc is not None: - val = val[0] if val else Leaf(token.NAME, u"None") - val.prefix = trc.prefix = u" " - kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(), - val.clone(), Comma(), trc.clone()] - raise_stmt = Node(syms.raise_stmt, kids) - node.replace(raise_stmt) diff --git a/pype/vendor/libpasteurize/fixes/fix_throw.py b/pype/vendor/libpasteurize/fixes/fix_throw.py deleted file mode 100644 index c0feed1ead..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_throw.py +++ /dev/null @@ -1,23 +0,0 @@ -u"""Fixer for 'g.throw(E(V).with_traceback(T))' -> 'g.throw(E, V, T)'""" - -from lib2to3 import fixer_base -from lib2to3.pytree import Node, Leaf -from lib2to3.pgen2 import token -from lib2to3.fixer_util import Comma - -class FixThrow(fixer_base.BaseFix): - - PATTERN = u""" - power< any trailer< '.' 'throw' > - trailer< '(' args=power< exc=any trailer< '(' val=any* ')' > - trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' > > ')' > > - """ - - def transform(self, node, results): - syms = self.syms - exc, val, trc = (results[u"exc"], results[u"val"], results[u"trc"]) - val = val[0] if val else Leaf(token.NAME, u"None") - val.prefix = trc.prefix = u" " - kids = [exc.clone(), Comma(), val.clone(), Comma(), trc.clone()] - args = results[u"args"] - args.children = kids diff --git a/pype/vendor/libpasteurize/fixes/fix_unpacking.py b/pype/vendor/libpasteurize/fixes/fix_unpacking.py deleted file mode 100644 index 1e53a9bf77..0000000000 --- a/pype/vendor/libpasteurize/fixes/fix_unpacking.py +++ /dev/null @@ -1,120 +0,0 @@ -u""" -Fixer for: -(a,)* *b (,c)* [,] = s -for (a,)* *b (,c)* [,] in d: ... -""" - -from lib2to3 import fixer_base -from itertools import count -from lib2to3.fixer_util import (Assign, Comma, Call, Newline, Name, - Number, token, syms, Node, Leaf) -from libfuturize.fixer_util import indentation, suitify, commatize -# from libfuturize.fixer_util import Assign, Comma, Call, Newline, Name, Number, indentation, suitify, commatize, token, syms, Node, Leaf - -def assignment_source(num_pre, num_post, LISTNAME, ITERNAME): - u""" - Accepts num_pre and num_post, which are counts of values - before and after the starg (not including the starg) - Returns a source fit for Assign() from fixer_util - """ - children = [] - pre = unicode(num_pre) - post = unicode(num_post) - # This code builds the assignment source from lib2to3 tree primitives. - # It's not very readable, but it seems like the most correct way to do it. - if num_pre > 0: - pre_part = Node(syms.power, [Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Leaf(token.COLON, u":"), Number(pre)]), Leaf(token.RSQB, u"]")])]) - children.append(pre_part) - children.append(Leaf(token.PLUS, u"+", prefix=u" ")) - main_part = Node(syms.power, [Leaf(token.LSQB, u"[", prefix=u" "), Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Number(pre) if num_pre > 0 else Leaf(1, u""), Leaf(token.COLON, u":"), Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]) if num_post > 0 else Leaf(1, u"")]), Leaf(token.RSQB, u"]"), Leaf(token.RSQB, u"]")])]) - children.append(main_part) - if num_post > 0: - children.append(Leaf(token.PLUS, u"+", prefix=u" ")) - post_part = Node(syms.power, [Name(LISTNAME, prefix=u" "), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]), Leaf(token.COLON, u":")]), Leaf(token.RSQB, u"]")])]) - children.append(post_part) - source = Node(syms.arith_expr, children) - return source - -class FixUnpacking(fixer_base.BaseFix): - - PATTERN = u""" - expl=expr_stmt< testlist_star_expr< - pre=(any ',')* - star_expr< '*' name=NAME > - post=(',' any)* [','] > '=' source=any > | - impl=for_stmt< 'for' lst=exprlist< - pre=(any ',')* - star_expr< '*' name=NAME > - post=(',' any)* [','] > 'in' it=any ':' suite=any>""" - - def fix_explicit_context(self, node, results): - pre, name, post, source = (results.get(n) for n in (u"pre", u"name", u"post", u"source")) - pre = [n.clone() for n in pre if n.type == token.NAME] - name.prefix = u" " - post = [n.clone() for n in post if n.type == token.NAME] - target = [n.clone() for n in commatize(pre + [name.clone()] + post)] - # to make the special-case fix for "*z, = ..." correct with the least - # amount of modification, make the left-side into a guaranteed tuple - target.append(Comma()) - source.prefix = u"" - setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [source.clone()])) - power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME)) - return setup_line, power_line - - def fix_implicit_context(self, node, results): - u""" - Only example of the implicit context is - a for loop, so only fix that. - """ - pre, name, post, it = (results.get(n) for n in (u"pre", u"name", u"post", u"it")) - pre = [n.clone() for n in pre if n.type == token.NAME] - name.prefix = u" " - post = [n.clone() for n in post if n.type == token.NAME] - target = [n.clone() for n in commatize(pre + [name.clone()] + post)] - # to make the special-case fix for "*z, = ..." correct with the least - # amount of modification, make the left-side into a guaranteed tuple - target.append(Comma()) - source = it.clone() - source.prefix = u"" - setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [Name(self.ITERNAME)])) - power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME)) - return setup_line, power_line - - def transform(self, node, results): - u""" - a,b,c,d,e,f,*g,h,i = range(100) changes to - _3to2list = list(range(100)) - a,b,c,d,e,f,g,h,i, = _3to2list[:6] + [_3to2list[6:-2]] + _3to2list[-2:] - - and - - for a,b,*c,d,e in iter_of_iters: do_stuff changes to - for _3to2iter in iter_of_iters: - _3to2list = list(_3to2iter) - a,b,c,d,e, = _3to2list[:2] + [_3to2list[2:-2]] + _3to2list[-2:] - do_stuff - """ - self.LISTNAME = self.new_name(u"_3to2list") - self.ITERNAME = self.new_name(u"_3to2iter") - expl, impl = results.get(u"expl"), results.get(u"impl") - if expl is not None: - setup_line, power_line = self.fix_explicit_context(node, results) - setup_line.prefix = expl.prefix - power_line.prefix = indentation(expl.parent) - setup_line.append_child(Newline()) - parent = node.parent - i = node.remove() - parent.insert_child(i, power_line) - parent.insert_child(i, setup_line) - elif impl is not None: - setup_line, power_line = self.fix_implicit_context(node, results) - suitify(node) - suite = [k for k in node.children if k.type == syms.suite][0] - setup_line.prefix = u"" - power_line.prefix = suite.children[1].value - suite.children[2].prefix = indentation(suite.children[2]) - suite.insert_child(2, Newline()) - suite.insert_child(2, power_line) - suite.insert_child(2, Newline()) - suite.insert_child(2, setup_line) - results.get(u"lst").replace(Name(self.ITERNAME, prefix=u" ")) diff --git a/pype/vendor/libpasteurize/main.py b/pype/vendor/libpasteurize/main.py deleted file mode 100644 index cc0e6ec352..0000000000 --- a/pype/vendor/libpasteurize/main.py +++ /dev/null @@ -1,149 +0,0 @@ -""" -pasteurize: automatic conversion of Python 3 code to clean 2/3 code -=================================================================== - -``pasteurize`` attempts to convert existing Python 3 code into source-compatible -Python 2 and 3 code. - -Use it like this on Python 3 code: - - $ pasteurize --verbose mypython3script.py - -This removes any Py3-only syntax (e.g. new metaclasses) and adds these -import lines: - - from __future__ import absolute_import - from __future__ import division - from __future__ import print_function - from __future__ import unicode_literals - from future import standard_library - standard_library.install_hooks() - from builtins import * - -To write changes to the files, use the -w flag. - -It also adds any other wrappers needed for Py2/3 compatibility. - -Note that separate stages are not available (or needed) when converting from -Python 3 with ``pasteurize`` as they are when converting from Python 2 with -``futurize``. - -The --all-imports option forces adding all ``__future__`` imports, -``builtins`` imports, and standard library aliases, even if they don't -seem necessary for the current state of each module. (This can simplify -testing, and can reduce the need to think about Py2 compatibility when editing -the code further.) - -""" - -from __future__ import (absolute_import, print_function, unicode_literals) - -import sys -import logging -import optparse -from lib2to3.main import main, warn, StdoutRefactoringTool -from lib2to3 import refactor - -from future import __version__ -from libpasteurize.fixes import fix_names - - -def main(args=None): - """Main program. - - Returns a suggested exit status (0, 1, 2). - """ - # Set up option parser - parser = optparse.OptionParser(usage="pasteurize [options] file|dir ...") - parser.add_option("-V", "--version", action="store_true", - help="Report the version number of pasteurize") - parser.add_option("-a", "--all-imports", action="store_true", - help="Adds all __future__ and future imports to each module") - parser.add_option("-f", "--fix", action="append", default=[], - help="Each FIX specifies a transformation; default: all") - parser.add_option("-j", "--processes", action="store", default=1, - type="int", help="Run 2to3 concurrently") - parser.add_option("-x", "--nofix", action="append", default=[], - help="Prevent a fixer from being run.") - parser.add_option("-l", "--list-fixes", action="store_true", - help="List available transformations") - # parser.add_option("-p", "--print-function", action="store_true", - # help="Modify the grammar so that print() is a function") - parser.add_option("-v", "--verbose", action="store_true", - help="More verbose logging") - parser.add_option("--no-diffs", action="store_true", - help="Don't show diffs of the refactoring") - parser.add_option("-w", "--write", action="store_true", - help="Write back modified files") - parser.add_option("-n", "--nobackups", action="store_true", default=False, - help="Don't write backups for modified files.") - - # Parse command line arguments - refactor_stdin = False - flags = {} - options, args = parser.parse_args(args) - fixer_pkg = 'libpasteurize.fixes' - avail_fixes = fix_names - flags["print_function"] = True - - if not options.write and options.no_diffs: - warn("not writing files and not printing diffs; that's not very useful") - if not options.write and options.nobackups: - parser.error("Can't use -n without -w") - if options.version: - print(__version__) - return 0 - if options.list_fixes: - print("Available transformations for the -f/--fix option:") - for fixname in sorted(avail_fixes): - print(fixname) - if not args: - return 0 - if not args: - print("At least one file or directory argument required.", - file=sys.stderr) - print("Use --help to show usage.", file=sys.stderr) - return 2 - if "-" in args: - refactor_stdin = True - if options.write: - print("Can't write to stdin.", file=sys.stderr) - return 2 - - # Set up logging handler - level = logging.DEBUG if options.verbose else logging.INFO - logging.basicConfig(format='%(name)s: %(message)s', level=level) - - # Initialize the refactoring tool - unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix) - - extra_fixes = set() - if options.all_imports: - prefix = 'libpasteurize.fixes.' - extra_fixes.add(prefix + 'fix_add_all__future__imports') - extra_fixes.add(prefix + 'fix_add_future_standard_library_import') - extra_fixes.add(prefix + 'fix_add_all_future_builtins') - - fixer_names = avail_fixes | extra_fixes - unwanted_fixes - - rt = StdoutRefactoringTool(sorted(fixer_names), flags, set(), - options.nobackups, not options.no_diffs) - - # Refactor all files and directories passed as arguments - if not rt.errors: - if refactor_stdin: - rt.refactor_stdin() - else: - try: - rt.refactor(args, options.write, None, - options.processes) - except refactor.MultiprocessingUnsupported: - assert options.processes > 1 - print("Sorry, -j isn't " \ - "supported on this platform.", file=sys.stderr) - return 1 - rt.summarize() - - # Return error status (0 if rt.errors is zero) - return int(bool(rt.errors)) - diff --git a/pype/vendor/past/__init__.py b/pype/vendor/past/__init__.py deleted file mode 100644 index 08eeb58d6d..0000000000 --- a/pype/vendor/past/__init__.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -""" -past: compatibility with Python 2 from Python 3 -=============================================== - -``past`` is a package to aid with Python 2/3 compatibility. Whereas ``future`` -contains backports of Python 3 constructs to Python 2, ``past`` provides -implementations of some Python 2 constructs in Python 3 and tools to import and -run Python 2 code in Python 3. It is intended to be used sparingly, as a way of -running old Python 2 code from Python 3 until the code is ported properly. - -Potential uses for libraries: - -- as a step in porting a Python 2 codebase to Python 3 (e.g. with the ``futurize`` script) -- to provide Python 3 support for previously Python 2-only libraries with the - same APIs as on Python 2 -- particularly with regard to 8-bit strings (the - ``past.builtins.str`` type). -- to aid in providing minimal-effort Python 3 support for applications using - libraries that do not yet wish to upgrade their code properly to Python 3, or - wish to upgrade it gradually to Python 3 style. - - -Here are some code examples that run identically on Python 3 and 2:: - - >>> from past.builtins import str as oldstr - - >>> philosopher = oldstr(u'\u5b54\u5b50'.encode('utf-8')) - >>> # This now behaves like a Py2 byte-string on both Py2 and Py3. - >>> # For example, indexing returns a Python 2-like string object, not - >>> # an integer: - >>> philosopher[0] - '\xe5' - >>> type(philosopher[0]) - - - >>> # List-producing versions of range, reduce, map, filter - >>> from past.builtins import range, reduce - >>> range(10) - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] - >>> reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) - 15 - - >>> # Other functions removed in Python 3 are resurrected ... - >>> from past.builtins import execfile - >>> execfile('myfile.py') - - >>> from past.builtins import raw_input - >>> name = raw_input('What is your name? ') - What is your name? [cursor] - - >>> from past.builtins import reload - >>> reload(mymodule) # equivalent to imp.reload(mymodule) in Python 3 - - >>> from past.builtins import xrange - >>> for i in xrange(10): - ... pass - - -It also provides import hooks so you can import and use Python 2 modules like -this:: - - $ python3 - - >>> from past import autotranslate - >>> authotranslate('mypy2module') - >>> import mypy2module - -until the authors of the Python 2 modules have upgraded their code. Then, for -example:: - - >>> mypy2module.func_taking_py2_string(oldstr(b'abcd')) - - -Credits -------- - -:Author: Ed Schofield -:Sponsor: Python Charmers Pty Ltd, Australia: http://pythoncharmers.com - - -Licensing ---------- -Copyright 2013-2016 Python Charmers Pty Ltd, Australia. -The software is distributed under an MIT licence. See LICENSE.txt. -""" - - -from past.translation import install_hooks as autotranslate -from future import __version__, __copyright__, __license__ - -__title__ = 'past' -__author__ = 'Ed Schofield' - diff --git a/pype/vendor/past/builtins/__init__.py b/pype/vendor/past/builtins/__init__.py deleted file mode 100644 index a967736d03..0000000000 --- a/pype/vendor/past/builtins/__init__.py +++ /dev/null @@ -1,72 +0,0 @@ -""" -A resurrection of some old functions from Python 2 for use in Python 3. These -should be used sparingly, to help with porting efforts, since code using them -is no longer standard Python 3 code. - -This module provides the following: - -1. Implementations of these builtin functions which have no equivalent on Py3: - -- apply -- chr -- cmp -- execfile - -2. Aliases: - -- intern <- sys.intern -- raw_input <- input -- reduce <- functools.reduce -- reload <- imp.reload -- unichr <- chr -- unicode <- str -- xrange <- range - -3. List-producing versions of the corresponding Python 3 iterator-producing functions: - -- filter -- map -- range -- zip - -4. Forward-ported Py2 types: - -- basestring -- dict -- str -- long -- unicode - -""" - -from future.utils import PY3 -from past.builtins.noniterators import (filter, map, range, reduce, zip) -# from past.builtins.misc import (ascii, hex, input, oct, open) -if PY3: - from past.types import (basestring, - olddict as dict, - oldstr as str, - long, - unicode) -else: - from __builtin__ import (basestring, dict, str, long, unicode) - -from past.builtins.misc import (apply, chr, cmp, execfile, intern, oct, - raw_input, reload, unichr, unicode, xrange) -from past import utils - - -if utils.PY3: - # We only import names that shadow the builtins on Py3. No other namespace - # pollution on Py3. - - # Only shadow builtins on Py3; no new names - __all__ = ['filter', 'map', 'range', 'reduce', 'zip', - 'basestring', 'dict', 'str', 'long', 'unicode', - 'apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input', - 'reload', 'unichr', 'xrange' - ] - -else: - # No namespace pollution on Py2 - __all__ = [] diff --git a/pype/vendor/past/builtins/misc.py b/pype/vendor/past/builtins/misc.py deleted file mode 100644 index 55dc63c629..0000000000 --- a/pype/vendor/past/builtins/misc.py +++ /dev/null @@ -1,90 +0,0 @@ -from __future__ import unicode_literals -import sys -import inspect -from collections import Mapping - -from future.utils import PY3, exec_ - - -if PY3: - import builtins - - def apply(f, *args, **kw): - return f(*args, **kw) - - from past.builtins import str as oldstr - - def chr(i): - """ - Return a byte-string of one character with ordinal i; 0 <= i <= 256 - """ - return oldstr(bytes((i,))) - - def cmp(x, y): - """ - cmp(x, y) -> integer - - Return negative if xy. - """ - return (x > y) - (x < y) - - from sys import intern - - def oct(number): - """oct(number) -> string - - Return the octal representation of an integer - """ - return '0' + builtins.oct(number)[2:] - - raw_input = input - from imp import reload - unicode = str - unichr = chr - xrange = range -else: - import __builtin__ - apply = __builtin__.apply - chr = __builtin__.chr - cmp = __builtin__.cmp - execfile = __builtin__.execfile - intern = __builtin__.intern - oct = __builtin__.oct - raw_input = __builtin__.raw_input - reload = __builtin__.reload - unicode = __builtin__.unicode - unichr = __builtin__.unichr - xrange = __builtin__.xrange - - -if PY3: - def execfile(filename, myglobals=None, mylocals=None): - """ - Read and execute a Python script from a file in the given namespaces. - The globals and locals are dictionaries, defaulting to the current - globals and locals. If only globals is given, locals defaults to it. - """ - if myglobals is None: - # There seems to be no alternative to frame hacking here. - caller_frame = inspect.stack()[1] - myglobals = caller_frame[0].f_globals - mylocals = caller_frame[0].f_locals - elif mylocals is None: - # Only if myglobals is given do we set mylocals to it. - mylocals = myglobals - if not isinstance(myglobals, Mapping): - raise TypeError('globals must be a mapping') - if not isinstance(mylocals, Mapping): - raise TypeError('locals must be a mapping') - with open(filename, "rbU") as fin: - source = fin.read() - code = compile(source, filename, "exec") - exec_(code, myglobals, mylocals) - - -if PY3: - __all__ = ['apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input', - 'reload', 'unichr', 'unicode', 'xrange'] -else: - __all__ = [] - diff --git a/pype/vendor/past/builtins/noniterators.py b/pype/vendor/past/builtins/noniterators.py deleted file mode 100644 index 66a4a8a575..0000000000 --- a/pype/vendor/past/builtins/noniterators.py +++ /dev/null @@ -1,273 +0,0 @@ -""" -This module is designed to be used as follows:: - - from past.builtins.noniterators import filter, map, range, reduce, zip - -And then, for example:: - - assert isinstance(range(5), list) - -The list-producing functions this brings in are:: - -- ``filter`` -- ``map`` -- ``range`` -- ``reduce`` -- ``zip`` - -""" - -from __future__ import division, absolute_import, print_function - -from itertools import chain, starmap -import itertools # since zip_longest doesn't exist on Py2 -from past.types import basestring -from past.utils import PY3 - - -def flatmap(f, items): - return chain.from_iterable(map(f, items)) - - -if PY3: - import builtins - - # list-producing versions of the major Python iterating functions - def oldfilter(*args): - """ - filter(function or None, sequence) -> list, tuple, or string - - Return those items of sequence for which function(item) is true. - If function is None, return the items that are true. If sequence - is a tuple or string, return the same type, else return a list. - """ - mytype = type(args[1]) - if isinstance(args[1], basestring): - return mytype().join(builtins.filter(*args)) - elif isinstance(args[1], (tuple, list)): - return mytype(builtins.filter(*args)) - else: - # Fall back to list. Is this the right thing to do? - return list(builtins.filter(*args)) - - # This is surprisingly difficult to get right. For example, the - # solutions here fail with the test cases in the docstring below: - # http://stackoverflow.com/questions/8072755/ - def oldmap(func, *iterables): - """ - map(function, sequence[, sequence, ...]) -> list - - Return a list of the results of applying the function to the - items of the argument sequence(s). If more than one sequence is - given, the function is called with an argument list consisting of - the corresponding item of each sequence, substituting None for - missing values when not all sequences have the same length. If - the function is None, return a list of the items of the sequence - (or a list of tuples if more than one sequence). - - Test cases: - >>> oldmap(None, 'hello world') - ['h', 'e', 'l', 'l', 'o', ' ', 'w', 'o', 'r', 'l', 'd'] - - >>> oldmap(None, range(4)) - [0, 1, 2, 3] - - More test cases are in past.tests.test_builtins. - """ - zipped = itertools.zip_longest(*iterables) - l = list(zipped) - if len(l) == 0: - return [] - if func is None: - result = l - else: - result = list(starmap(func, l)) - - # Inspect to see whether it's a simple sequence of tuples - try: - if max([len(item) for item in result]) == 1: - return list(chain.from_iterable(result)) - # return list(flatmap(func, result)) - except TypeError as e: - # Simple objects like ints have no len() - pass - return result - - ############################ - ### For reference, the source code for Py2.7 map function: - # static PyObject * - # builtin_map(PyObject *self, PyObject *args) - # { - # typedef struct { - # PyObject *it; /* the iterator object */ - # int saw_StopIteration; /* bool: did the iterator end? */ - # } sequence; - # - # PyObject *func, *result; - # sequence *seqs = NULL, *sqp; - # Py_ssize_t n, len; - # register int i, j; - # - # n = PyTuple_Size(args); - # if (n < 2) { - # PyErr_SetString(PyExc_TypeError, - # "map() requires at least two args"); - # return NULL; - # } - # - # func = PyTuple_GetItem(args, 0); - # n--; - # - # if (func == Py_None) { - # if (PyErr_WarnPy3k("map(None, ...) not supported in 3.x; " - # "use list(...)", 1) < 0) - # return NULL; - # if (n == 1) { - # /* map(None, S) is the same as list(S). */ - # return PySequence_List(PyTuple_GetItem(args, 1)); - # } - # } - # - # /* Get space for sequence descriptors. Must NULL out the iterator - # * pointers so that jumping to Fail_2 later doesn't see trash. - # */ - # if ((seqs = PyMem_NEW(sequence, n)) == NULL) { - # PyErr_NoMemory(); - # return NULL; - # } - # for (i = 0; i < n; ++i) { - # seqs[i].it = (PyObject*)NULL; - # seqs[i].saw_StopIteration = 0; - # } - # - # /* Do a first pass to obtain iterators for the arguments, and set len - # * to the largest of their lengths. - # */ - # len = 0; - # for (i = 0, sqp = seqs; i < n; ++i, ++sqp) { - # PyObject *curseq; - # Py_ssize_t curlen; - # - # /* Get iterator. */ - # curseq = PyTuple_GetItem(args, i+1); - # sqp->it = PyObject_GetIter(curseq); - # if (sqp->it == NULL) { - # static char errmsg[] = - # "argument %d to map() must support iteration"; - # char errbuf[sizeof(errmsg) + 25]; - # PyOS_snprintf(errbuf, sizeof(errbuf), errmsg, i+2); - # PyErr_SetString(PyExc_TypeError, errbuf); - # goto Fail_2; - # } - # - # /* Update len. */ - # curlen = _PyObject_LengthHint(curseq, 8); - # if (curlen > len) - # len = curlen; - # } - # - # /* Get space for the result list. */ - # if ((result = (PyObject *) PyList_New(len)) == NULL) - # goto Fail_2; - # - # /* Iterate over the sequences until all have stopped. */ - # for (i = 0; ; ++i) { - # PyObject *alist, *item=NULL, *value; - # int numactive = 0; - # - # if (func == Py_None && n == 1) - # alist = NULL; - # else if ((alist = PyTuple_New(n)) == NULL) - # goto Fail_1; - # - # for (j = 0, sqp = seqs; j < n; ++j, ++sqp) { - # if (sqp->saw_StopIteration) { - # Py_INCREF(Py_None); - # item = Py_None; - # } - # else { - # item = PyIter_Next(sqp->it); - # if (item) - # ++numactive; - # else { - # if (PyErr_Occurred()) { - # Py_XDECREF(alist); - # goto Fail_1; - # } - # Py_INCREF(Py_None); - # item = Py_None; - # sqp->saw_StopIteration = 1; - # } - # } - # if (alist) - # PyTuple_SET_ITEM(alist, j, item); - # else - # break; - # } - # - # if (!alist) - # alist = item; - # - # if (numactive == 0) { - # Py_DECREF(alist); - # break; - # } - # - # if (func == Py_None) - # value = alist; - # else { - # value = PyEval_CallObject(func, alist); - # Py_DECREF(alist); - # if (value == NULL) - # goto Fail_1; - # } - # if (i >= len) { - # int status = PyList_Append(result, value); - # Py_DECREF(value); - # if (status < 0) - # goto Fail_1; - # } - # else if (PyList_SetItem(result, i, value) < 0) - # goto Fail_1; - # } - # - # if (i < len && PyList_SetSlice(result, i, len, NULL) < 0) - # goto Fail_1; - # - # goto Succeed; - # - # Fail_1: - # Py_DECREF(result); - # Fail_2: - # result = NULL; - # Succeed: - # assert(seqs); - # for (i = 0; i < n; ++i) - # Py_XDECREF(seqs[i].it); - # PyMem_DEL(seqs); - # return result; - # } - - def oldrange(*args, **kwargs): - return list(builtins.range(*args, **kwargs)) - - def oldzip(*args, **kwargs): - return list(builtins.zip(*args, **kwargs)) - - filter = oldfilter - map = oldmap - range = oldrange - from functools import reduce - zip = oldzip - __all__ = ['filter', 'map', 'range', 'reduce', 'zip'] - -else: - import __builtin__ - # Python 2-builtin ranges produce lists - filter = __builtin__.filter - map = __builtin__.map - range = __builtin__.range - reduce = __builtin__.reduce - zip = __builtin__.zip - __all__ = [] - diff --git a/pype/vendor/past/tests/__init__.py b/pype/vendor/past/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/past/translation/__init__.py b/pype/vendor/past/translation/__init__.py deleted file mode 100644 index 7b21d9f5f1..0000000000 --- a/pype/vendor/past/translation/__init__.py +++ /dev/null @@ -1,498 +0,0 @@ -# -*- coding: utf-8 -*- -""" -past.translation -================== - -The ``past.translation`` package provides an import hook for Python 3 which -transparently runs ``futurize`` fixers over Python 2 code on import to convert -print statements into functions, etc. - -It is intended to assist users in migrating to Python 3.x even if some -dependencies still only support Python 2.x. - -Usage ------ - -Once your Py2 package is installed in the usual module search path, the import -hook is invoked as follows: - - >>> from past import autotranslate - >>> autotranslate('mypackagename') - -Or: - - >>> autotranslate(['mypackage1', 'mypackage2']) - -You can unregister the hook using:: - - >>> from past.translation import remove_hooks - >>> remove_hooks() - -Author: Ed Schofield. -Inspired by and based on ``uprefix`` by Vinay M. Sajip. -""" - -import imp -import logging -import marshal -import os -import sys -import copy -from lib2to3.pgen2.parse import ParseError -from lib2to3.refactor import RefactoringTool - -from libfuturize import fixes - - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - -myfixes = (list(fixes.libfuturize_fix_names_stage1) + - list(fixes.lib2to3_fix_names_stage1) + - list(fixes.libfuturize_fix_names_stage2) + - list(fixes.lib2to3_fix_names_stage2)) - - -# We detect whether the code is Py2 or Py3 by applying certain lib2to3 fixers -# to it. If the diff is empty, it's Python 3 code. - -py2_detect_fixers = [ -# From stage 1: - 'lib2to3.fixes.fix_apply', - # 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. and move to stage2 - 'lib2to3.fixes.fix_except', - 'lib2to3.fixes.fix_execfile', - 'lib2to3.fixes.fix_exitfunc', - 'lib2to3.fixes.fix_funcattrs', - 'lib2to3.fixes.fix_filter', - 'lib2to3.fixes.fix_has_key', - 'lib2to3.fixes.fix_idioms', - 'lib2to3.fixes.fix_import', # makes any implicit relative imports explicit. (Use with ``from __future__ import absolute_import) - 'lib2to3.fixes.fix_intern', - 'lib2to3.fixes.fix_isinstance', - 'lib2to3.fixes.fix_methodattrs', - 'lib2to3.fixes.fix_ne', - 'lib2to3.fixes.fix_numliterals', # turns 1L into 1, 0755 into 0o755 - 'lib2to3.fixes.fix_paren', - 'lib2to3.fixes.fix_print', - 'lib2to3.fixes.fix_raise', # uses incompatible with_traceback() method on exceptions - 'lib2to3.fixes.fix_renames', - 'lib2to3.fixes.fix_reduce', - # 'lib2to3.fixes.fix_set_literal', # this is unnecessary and breaks Py2.6 support - 'lib2to3.fixes.fix_repr', - 'lib2to3.fixes.fix_standarderror', - 'lib2to3.fixes.fix_sys_exc', - 'lib2to3.fixes.fix_throw', - 'lib2to3.fixes.fix_tuple_params', - 'lib2to3.fixes.fix_types', - 'lib2to3.fixes.fix_ws_comma', - 'lib2to3.fixes.fix_xreadlines', - -# From stage 2: - 'lib2to3.fixes.fix_basestring', - # 'lib2to3.fixes.fix_buffer', # perhaps not safe. Test this. - # 'lib2to3.fixes.fix_callable', # not needed in Py3.2+ - # 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. - 'lib2to3.fixes.fix_exec', - # 'lib2to3.fixes.fix_future', # we don't want to remove __future__ imports - 'lib2to3.fixes.fix_getcwdu', - # 'lib2to3.fixes.fix_imports', # called by libfuturize.fixes.fix_future_standard_library - # 'lib2to3.fixes.fix_imports2', # we don't handle this yet (dbm) - # 'lib2to3.fixes.fix_input', - # 'lib2to3.fixes.fix_itertools', - # 'lib2to3.fixes.fix_itertools_imports', - 'lib2to3.fixes.fix_long', - # 'lib2to3.fixes.fix_map', - # 'lib2to3.fixes.fix_metaclass', # causes SyntaxError in Py2! Use the one from ``six`` instead - 'lib2to3.fixes.fix_next', - 'lib2to3.fixes.fix_nonzero', # TODO: add a decorator for mapping __bool__ to __nonzero__ - # 'lib2to3.fixes.fix_operator', # we will need support for this by e.g. extending the Py2 operator module to provide those functions in Py3 - 'lib2to3.fixes.fix_raw_input', - # 'lib2to3.fixes.fix_unicode', # strips off the u'' prefix, which removes a potentially helpful source of information for disambiguating unicode/byte strings - # 'lib2to3.fixes.fix_urllib', - 'lib2to3.fixes.fix_xrange', - # 'lib2to3.fixes.fix_zip', -] - - -class RTs: - """ - A namespace for the refactoring tools. This avoids creating these at - the module level, which slows down the module import. (See issue #117). - - There are two possible grammars: with or without the print statement. - Hence we have two possible refactoring tool implementations. - """ - _rt = None - _rtp = None - _rt_py2_detect = None - _rtp_py2_detect = None - - @staticmethod - def setup(): - """ - Call this before using the refactoring tools to create them on demand - if needed. - """ - if None in [RTs._rt, RTs._rtp]: - RTs._rt = RefactoringTool(myfixes) - RTs._rtp = RefactoringTool(myfixes, {'print_function': True}) - - - @staticmethod - def setup_detect_python2(): - """ - Call this before using the refactoring tools to create them on demand - if needed. - """ - if None in [RTs._rt_py2_detect, RTs._rtp_py2_detect]: - RTs._rt_py2_detect = RefactoringTool(py2_detect_fixers) - RTs._rtp_py2_detect = RefactoringTool(py2_detect_fixers, - {'print_function': True}) - - -# We need to find a prefix for the standard library, as we don't want to -# process any files there (they will already be Python 3). -# -# The following method is used by Sanjay Vinip in uprefix. This fails for -# ``conda`` environments: -# # In a non-pythonv virtualenv, sys.real_prefix points to the installed Python. -# # In a pythonv venv, sys.base_prefix points to the installed Python. -# # Outside a virtual environment, sys.prefix points to the installed Python. - -# if hasattr(sys, 'real_prefix'): -# _syslibprefix = sys.real_prefix -# else: -# _syslibprefix = getattr(sys, 'base_prefix', sys.prefix) - -# Instead, we use the portion of the path common to both the stdlib modules -# ``math`` and ``urllib``. - -def splitall(path): - """ - Split a path into all components. From Python Cookbook. - """ - allparts = [] - while True: - parts = os.path.split(path) - if parts[0] == path: # sentinel for absolute paths - allparts.insert(0, parts[0]) - break - elif parts[1] == path: # sentinel for relative paths - allparts.insert(0, parts[1]) - break - else: - path = parts[0] - allparts.insert(0, parts[1]) - return allparts - - -def common_substring(s1, s2): - """ - Returns the longest common substring to the two strings, starting from the - left. - """ - chunks = [] - path1 = splitall(s1) - path2 = splitall(s2) - for (dir1, dir2) in zip(path1, path2): - if dir1 != dir2: - break - chunks.append(dir1) - return os.path.join(*chunks) - -# _stdlibprefix = common_substring(math.__file__, urllib.__file__) - - -def detect_python2(source, pathname): - """ - Returns a bool indicating whether we think the code is Py2 - """ - RTs.setup_detect_python2() - try: - tree = RTs._rt_py2_detect.refactor_string(source, pathname) - except ParseError as e: - if e.msg != 'bad input' or e.value != '=': - raise - tree = RTs._rtp.refactor_string(source, pathname) - - if source != str(tree)[:-1]: # remove added newline - # The above fixers made changes, so we conclude it's Python 2 code - logger.debug('Detected Python 2 code: {0}'.format(pathname)) - with open('/tmp/original_code.py', 'w') as f: - f.write('### Original code (detected as py2): %s\n%s' % - (pathname, source)) - with open('/tmp/py2_detection_code.py', 'w') as f: - f.write('### Code after running py3 detection (from %s)\n%s' % - (pathname, str(tree)[:-1])) - return True - else: - logger.debug('Detected Python 3 code: {0}'.format(pathname)) - with open('/tmp/original_code.py', 'w') as f: - f.write('### Original code (detected as py3): %s\n%s' % - (pathname, source)) - try: - os.remove('/tmp/futurize_code.py') - except OSError: - pass - return False - - -class Py2Fixer(object): - """ - An import hook class that uses lib2to3 for source-to-source translation of - Py2 code to Py3. - """ - - # See the comments on :class:future.standard_library.RenameImport. - # We add this attribute here so remove_hooks() and install_hooks() can - # unambiguously detect whether the import hook is installed: - PY2FIXER = True - - def __init__(self): - self.found = None - self.base_exclude_paths = ['future', 'past'] - self.exclude_paths = copy.copy(self.base_exclude_paths) - self.include_paths = [] - - def include(self, paths): - """ - Pass in a sequence of module names such as 'plotrique.plotting' that, - if present at the leftmost side of the full package name, would - specify the module to be transformed from Py2 to Py3. - """ - self.include_paths += paths - - def exclude(self, paths): - """ - Pass in a sequence of strings such as 'mymodule' that, if - present at the leftmost side of the full package name, would cause - the module not to undergo any source transformation. - """ - self.exclude_paths += paths - - def find_module(self, fullname, path=None): - logger.debug('Running find_module: {0}...'.format(fullname)) - if '.' in fullname: - parent, child = fullname.rsplit('.', 1) - if path is None: - loader = self.find_module(parent, path) - mod = loader.load_module(parent) - path = mod.__path__ - fullname = child - - # Perhaps we should try using the new importlib functionality in Python - # 3.3: something like this? - # thing = importlib.machinery.PathFinder.find_module(fullname, path) - try: - self.found = imp.find_module(fullname, path) - except Exception as e: - logger.debug('Py2Fixer could not find {0}') - logger.debug('Exception was: {0})'.format(fullname, e)) - return None - self.kind = self.found[-1][-1] - if self.kind == imp.PKG_DIRECTORY: - self.pathname = os.path.join(self.found[1], '__init__.py') - elif self.kind == imp.PY_SOURCE: - self.pathname = self.found[1] - return self - - def transform(self, source): - # This implementation uses lib2to3, - # you can override and use something else - # if that's better for you - - # lib2to3 likes a newline at the end - RTs.setup() - source += '\n' - try: - tree = RTs._rt.refactor_string(source, self.pathname) - except ParseError as e: - if e.msg != 'bad input' or e.value != '=': - raise - tree = RTs._rtp.refactor_string(source, self.pathname) - # could optimise a bit for only doing str(tree) if - # getattr(tree, 'was_changed', False) returns True - return str(tree)[:-1] # remove added newline - - def load_module(self, fullname): - logger.debug('Running load_module for {0}...'.format(fullname)) - if fullname in sys.modules: - mod = sys.modules[fullname] - else: - if self.kind in (imp.PY_COMPILED, imp.C_EXTENSION, imp.C_BUILTIN, - imp.PY_FROZEN): - convert = False - # elif (self.pathname.startswith(_stdlibprefix) - # and 'site-packages' not in self.pathname): - # # We assume it's a stdlib package in this case. Is this too brittle? - # # Please file a bug report at https://github.com/PythonCharmers/python-future - # # if so. - # convert = False - # in theory, other paths could be configured to be excluded here too - elif any([fullname.startswith(path) for path in self.exclude_paths]): - convert = False - elif any([fullname.startswith(path) for path in self.include_paths]): - convert = True - else: - convert = False - if not convert: - logger.debug('Excluded {0} from translation'.format(fullname)) - mod = imp.load_module(fullname, *self.found) - else: - logger.debug('Autoconverting {0} ...'.format(fullname)) - mod = imp.new_module(fullname) - sys.modules[fullname] = mod - - # required by PEP 302 - mod.__file__ = self.pathname - mod.__name__ = fullname - mod.__loader__ = self - - # This: - # mod.__package__ = '.'.join(fullname.split('.')[:-1]) - # seems to result in "SystemError: Parent module '' not loaded, - # cannot perform relative import" for a package's __init__.py - # file. We use the approach below. Another option to try is the - # minimal load_module pattern from the PEP 302 text instead. - - # Is the test in the next line more or less robust than the - # following one? Presumably less ... - # ispkg = self.pathname.endswith('__init__.py') - - if self.kind == imp.PKG_DIRECTORY: - mod.__path__ = [ os.path.dirname(self.pathname) ] - mod.__package__ = fullname - else: - #else, regular module - mod.__path__ = [] - mod.__package__ = fullname.rpartition('.')[0] - - try: - cachename = imp.cache_from_source(self.pathname) - if not os.path.exists(cachename): - update_cache = True - else: - sourcetime = os.stat(self.pathname).st_mtime - cachetime = os.stat(cachename).st_mtime - update_cache = cachetime < sourcetime - # # Force update_cache to work around a problem with it being treated as Py3 code??? - # update_cache = True - if not update_cache: - with open(cachename, 'rb') as f: - data = f.read() - try: - code = marshal.loads(data) - except Exception: - # pyc could be corrupt. Regenerate it - update_cache = True - if update_cache: - if self.found[0]: - source = self.found[0].read() - elif self.kind == imp.PKG_DIRECTORY: - with open(self.pathname) as f: - source = f.read() - - if detect_python2(source, self.pathname): - source = self.transform(source) - with open('/tmp/futurized_code.py', 'w') as f: - f.write('### Futurized code (from %s)\n%s' % - (self.pathname, source)) - - code = compile(source, self.pathname, 'exec') - - dirname = os.path.dirname(cachename) - if not os.path.exists(dirname): - os.makedirs(dirname) - try: - with open(cachename, 'wb') as f: - data = marshal.dumps(code) - f.write(data) - except Exception: # could be write-protected - pass - exec(code, mod.__dict__) - except Exception as e: - # must remove module from sys.modules - del sys.modules[fullname] - raise # keep it simple - - if self.found[0]: - self.found[0].close() - return mod - -_hook = Py2Fixer() - - -def install_hooks(include_paths=(), exclude_paths=()): - if isinstance(include_paths, str): - include_paths = (include_paths,) - if isinstance(exclude_paths, str): - exclude_paths = (exclude_paths,) - assert len(include_paths) + len(exclude_paths) > 0, 'Pass at least one argument' - _hook.include(include_paths) - _hook.exclude(exclude_paths) - # _hook.debug = debug - enable = sys.version_info[0] >= 3 # enabled for all 3.x - if enable and _hook not in sys.meta_path: - sys.meta_path.insert(0, _hook) # insert at beginning. This could be made a parameter - - # We could return the hook when there are ways of configuring it - #return _hook - - -def remove_hooks(): - if _hook in sys.meta_path: - sys.meta_path.remove(_hook) - - -def detect_hooks(): - """ - Returns True if the import hooks are installed, False if not. - """ - return _hook in sys.meta_path - # present = any([hasattr(hook, 'PY2FIXER') for hook in sys.meta_path]) - # return present - - -class hooks(object): - """ - Acts as a context manager. Use like this: - - >>> from past import translation - >>> with translation.hooks(): - ... import mypy2module - >>> import requests # py2/3 compatible anyway - >>> # etc. - """ - def __enter__(self): - self.hooks_were_installed = detect_hooks() - install_hooks() - return self - - def __exit__(self, *args): - if not self.hooks_were_installed: - remove_hooks() - - -class suspend_hooks(object): - """ - Acts as a context manager. Use like this: - - >>> from past import translation - >>> translation.install_hooks() - >>> import http.client - >>> # ... - >>> with translation.suspend_hooks(): - >>> import requests # or others that support Py2/3 - - If the hooks were disabled before the context, they are not installed when - the context is left. - """ - def __enter__(self): - self.hooks_were_installed = detect_hooks() - remove_hooks() - return self - def __exit__(self, *args): - if self.hooks_were_installed: - install_hooks() - diff --git a/pype/vendor/past/types/__init__.py b/pype/vendor/past/types/__init__.py deleted file mode 100644 index a31b2646cf..0000000000 --- a/pype/vendor/past/types/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -""" -Forward-ports of types from Python 2 for use with Python 3: - -- ``basestring``: equivalent to ``(str, bytes)`` in ``isinstance`` checks -- ``dict``: with list-producing .keys() etc. methods -- ``str``: bytes-like, but iterating over them doesn't product integers -- ``long``: alias of Py3 int with ``L`` suffix in the ``repr`` -- ``unicode``: alias of Py3 str with ``u`` prefix in the ``repr`` - -""" - -from past import utils - -if utils.PY2: - import __builtin__ - basestring = __builtin__.basestring - dict = __builtin__.dict - str = __builtin__.str - long = __builtin__.long - unicode = __builtin__.unicode - __all__ = [] -else: - from .basestring import basestring - from .olddict import olddict - from .oldstr import oldstr - long = int - unicode = str - # from .unicode import unicode - __all__ = ['basestring', 'olddict', 'oldstr', 'long', 'unicode'] - diff --git a/pype/vendor/past/types/basestring.py b/pype/vendor/past/types/basestring.py deleted file mode 100644 index 15437bf7c8..0000000000 --- a/pype/vendor/past/types/basestring.py +++ /dev/null @@ -1,40 +0,0 @@ -""" -An implementation of the basestring type for Python 3 - -Example use: - ->>> s = b'abc' ->>> assert isinstance(s, basestring) ->>> from past.types import str as oldstr ->>> s2 = oldstr(b'abc') ->>> assert isinstance(s2, basestring) - -""" - -import sys - -from past.utils import with_metaclass, PY2 - -if PY2: - str = unicode - -ver = sys.version_info[:2] - - -class BaseBaseString(type): - def __instancecheck__(cls, instance): - return isinstance(instance, (bytes, str)) - - def __subclasshook__(cls, thing): - # TODO: What should go here? - raise NotImplemented - - -class basestring(with_metaclass(BaseBaseString)): - """ - A minimal backport of the Python 2 basestring type to Py3 - """ - - -__all__ = ['basestring'] - diff --git a/pype/vendor/past/types/olddict.py b/pype/vendor/past/types/olddict.py deleted file mode 100644 index b213e28fa9..0000000000 --- a/pype/vendor/past/types/olddict.py +++ /dev/null @@ -1,97 +0,0 @@ -""" -A dict subclass for Python 3 that behaves like Python 2's dict - -Example use: - ->>> from past.builtins import dict ->>> d1 = dict() # instead of {} for an empty dict ->>> d2 = dict(key1='value1', key2='value2') - -The keys, values and items methods now return lists on Python 3.x and there are -methods for iterkeys, itervalues, iteritems, and viewkeys etc. - ->>> for d in (d1, d2): -... assert isinstance(d.keys(), list) -... assert isinstance(d.values(), list) -... assert isinstance(d.items(), list) -""" - -import sys - -from past.utils import with_metaclass - - -_builtin_dict = dict -ver = sys.version_info[:2] - - -class BaseOldDict(type): - def __instancecheck__(cls, instance): - return isinstance(instance, _builtin_dict) - - -class olddict(with_metaclass(BaseOldDict, _builtin_dict)): - """ - A backport of the Python 3 dict object to Py2 - """ - iterkeys = _builtin_dict.keys - viewkeys = _builtin_dict.keys - - def keys(self): - return list(super(olddict, self).keys()) - - itervalues = _builtin_dict.values - viewvalues = _builtin_dict.values - - def values(self): - return list(super(olddict, self).values()) - - iteritems = _builtin_dict.items - viewitems = _builtin_dict.items - - def items(self): - return list(super(olddict, self).items()) - - def has_key(self, k): - """ - D.has_key(k) -> True if D has a key k, else False - """ - return k in self - - # def __new__(cls, *args, **kwargs): - # """ - # dict() -> new empty dictionary - # dict(mapping) -> new dictionary initialized from a mapping object's - # (key, value) pairs - # dict(iterable) -> new dictionary initialized as if via: - # d = {} - # for k, v in iterable: - # d[k] = v - # dict(**kwargs) -> new dictionary initialized with the name=value pairs - # in the keyword argument list. For example: dict(one=1, two=2) - - # """ - # - # if len(args) == 0: - # return super(olddict, cls).__new__(cls) - # # Was: elif isinstance(args[0], newbytes): - # # We use type() instead of the above because we're redefining - # # this to be True for all unicode string subclasses. Warning: - # # This may render newstr un-subclassable. - # elif type(args[0]) == olddict: - # return args[0] - # # elif isinstance(args[0], _builtin_dict): - # # value = args[0] - # else: - # value = args[0] - # return super(olddict, cls).__new__(cls, value) - - def __native__(self): - """ - Hook for the past.utils.native() function - """ - return super(oldbytes, self) - - -__all__ = ['olddict'] - diff --git a/pype/vendor/past/types/oldstr.py b/pype/vendor/past/types/oldstr.py deleted file mode 100644 index 1b90e3e11e..0000000000 --- a/pype/vendor/past/types/oldstr.py +++ /dev/null @@ -1,132 +0,0 @@ -""" -Pure-Python implementation of a Python 2-like str object for Python 3. -""" - -from collections import Iterable -from numbers import Integral - -from past.utils import PY2, with_metaclass - - -_builtin_bytes = bytes - - -class BaseOldStr(type): - def __instancecheck__(cls, instance): - return isinstance(instance, _builtin_bytes) - - -def unescape(s): - """ - Interprets strings with escape sequences - - Example: - >>> s = unescape(r'abc\\def') # i.e. 'abc\\\\def' - >>> print(s) - 'abc\def' - >>> s2 = unescape('abc\\ndef') - >>> len(s2) - 8 - >>> print(s2) - abc - def - """ - return s.encode().decode('unicode_escape') - - -class oldstr(with_metaclass(BaseOldStr, _builtin_bytes)): - """ - A forward port of the Python 2 8-bit string object to Py3 - """ - # Python 2 strings have no __iter__ method: - @property - def __iter__(self): - raise AttributeError - - def __dir__(self): - return [thing for thing in dir(_builtin_bytes) if thing != '__iter__'] - - # def __new__(cls, *args, **kwargs): - # """ - # From the Py3 bytes docstring: - - # bytes(iterable_of_ints) -> bytes - # bytes(string, encoding[, errors]) -> bytes - # bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer - # bytes(int) -> bytes object of size given by the parameter initialized with null bytes - # bytes() -> empty bytes object - # - # Construct an immutable array of bytes from: - # - an iterable yielding integers in range(256) - # - a text string encoded using the specified encoding - # - any object implementing the buffer API. - # - an integer - # """ - # - # if len(args) == 0: - # return super(newbytes, cls).__new__(cls) - # # Was: elif isinstance(args[0], newbytes): - # # We use type() instead of the above because we're redefining - # # this to be True for all unicode string subclasses. Warning: - # # This may render newstr un-subclassable. - # elif type(args[0]) == newbytes: - # return args[0] - # elif isinstance(args[0], _builtin_bytes): - # value = args[0] - # elif isinstance(args[0], unicode): - # if 'encoding' not in kwargs: - # raise TypeError('unicode string argument without an encoding') - # ### - # # Was: value = args[0].encode(**kwargs) - # # Python 2.6 string encode() method doesn't take kwargs: - # # Use this instead: - # newargs = [kwargs['encoding']] - # if 'errors' in kwargs: - # newargs.append(kwargs['errors']) - # value = args[0].encode(*newargs) - # ### - # elif isinstance(args[0], Iterable): - # if len(args[0]) == 0: - # # What is this? - # raise ValueError('unknown argument type') - # elif len(args[0]) > 0 and isinstance(args[0][0], Integral): - # # It's a list of integers - # value = b''.join([chr(x) for x in args[0]]) - # else: - # raise ValueError('item cannot be interpreted as an integer') - # elif isinstance(args[0], Integral): - # if args[0] < 0: - # raise ValueError('negative count') - # value = b'\x00' * args[0] - # else: - # value = args[0] - # return super(newbytes, cls).__new__(cls, value) - - def __repr__(self): - s = super(oldstr, self).__repr__() # e.g. b'abc' on Py3, b'abc' on Py3 - return s[1:] - - def __str__(self): - s = super(oldstr, self).__str__() # e.g. "b'abc'" or "b'abc\\ndef' - # TODO: fix this: - assert s[:2] == "b'" and s[-1] == "'" - return unescape(s[2:-1]) # e.g. 'abc' or 'abc\ndef' - - def __getitem__(self, y): - if isinstance(y, Integral): - return super(oldstr, self).__getitem__(slice(y, y+1)) - else: - return super(oldstr, self).__getitem__(y) - - def __getslice__(self, *args): - return self.__getitem__(slice(*args)) - - def __contains__(self, key): - if isinstance(key, int): - return False - - def __native__(self): - return bytes(self) - - -__all__ = ['oldstr'] diff --git a/pype/vendor/past/utils/__init__.py b/pype/vendor/past/utils/__init__.py deleted file mode 100644 index 02f06d5957..0000000000 --- a/pype/vendor/past/utils/__init__.py +++ /dev/null @@ -1,97 +0,0 @@ -""" -Various non-built-in utility functions and definitions for Py2 -compatibility in Py3. - -For example: - - >>> # The old_div() function behaves like Python 2's / operator - >>> # without "from __future__ import division" - >>> from past.utils import old_div - >>> old_div(3, 2) # like 3/2 in Py2 - 0 - >>> old_div(3, 2.0) # like 3/2.0 in Py2 - 1.5 -""" - -import sys -import numbers - -PY3 = sys.version_info[0] == 3 -PY2 = sys.version_info[0] == 2 -PYPY = hasattr(sys, 'pypy_translation_info') - - -def with_metaclass(meta, *bases): - """ - Function from jinja2/_compat.py. License: BSD. - - Use it like this:: - - class BaseForm(object): - pass - - class FormType(type): - pass - - class Form(with_metaclass(FormType, BaseForm)): - pass - - This requires a bit of explanation: the basic idea is to make a - dummy metaclass for one level of class instantiation that replaces - itself with the actual metaclass. Because of internal type checks - we also need to make sure that we downgrade the custom metaclass - for one level to something closer to type (that's why __call__ and - __init__ comes back from type etc.). - - This has the advantage over six.with_metaclass of not introducing - dummy classes into the final MRO. - """ - class metaclass(meta): - __call__ = type.__call__ - __init__ = type.__init__ - def __new__(cls, name, this_bases, d): - if this_bases is None: - return type.__new__(cls, name, (), d) - return meta(name, bases, d) - return metaclass('temporary_class', None, {}) - - -def native(obj): - """ - On Py2, this is a no-op: native(obj) -> obj - - On Py3, returns the corresponding native Py3 types that are - superclasses for forward-ported objects from Py2: - - >>> from past.builtins import str, dict - - >>> native(str(b'ABC')) # Output on Py3 follows. On Py2, output is 'ABC' - b'ABC' - >>> type(native(str(b'ABC'))) - bytes - - Existing native types on Py3 will be returned unchanged: - - >>> type(native(b'ABC')) - bytes - """ - if hasattr(obj, '__native__'): - return obj.__native__() - else: - return obj - - -# An alias for future.utils.old_div(): -def old_div(a, b): - """ - Equivalent to ``a / b`` on Python 2 without ``from __future__ import - division``. - - TODO: generalize this to other objects (like arrays etc.) - """ - if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral): - return a // b - else: - return a / b - -__all__ = ['PY3', 'PY2', 'PYPY', 'with_metaclass', 'native', 'old_div'] diff --git a/pype/vendor/pather/__init__.py b/pype/vendor/pather/__init__.py deleted file mode 100644 index 91094e45a8..0000000000 --- a/pype/vendor/pather/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -__author__ = 'Roy Nieterau' - - -from .core import * -from .version import * diff --git a/pype/vendor/pather/core.py b/pype/vendor/pather/core.py deleted file mode 100644 index 1203cf9e50..0000000000 --- a/pype/vendor/pather/core.py +++ /dev/null @@ -1,168 +0,0 @@ - -__all__ = ['parse', 'ls', 'ls_iter', 'format'] - -import os -import re -import string -import glob - -from .error import ParseError - -# Regex pattern that matches valid file -# A filename may not contain \/:*?"<>| -RE_FILENAME = r"[^\\/:\"*?<>|]" - - -def format(pattern, data, allow_partial=True): - """Format a pattern with a set of data - - Examples: - - Full formatting - >>> format("{a}/{b}/{c}", {"a": "foo", "b": "bar", "c": "nugget"}) - 'foo/bar/nugget' - - Partial formatting - >>> format("{asset}/{character}", {"asset": "hero"}) - 'hero/{character}' - - Disallow partial formatting - >>> format("{asset}/{character}", {"asset": "hero"}, - ... allow_partial=False) - Traceback (most recent call last): - ... - KeyError: 'character' - - Args: - pattern (str): The pattern to format. - data (dict): The key, value pairs used for formatting. - allow_partial (bool): Whether to raise error on partial format. - - Returns: - str: The formatted result - """ - - assert isinstance(data, dict) - - if not all(isinstance(value, basestring) for value in data.values()): - raise TypeError("The values in the data " - "dictionary must be strings") - - if allow_partial: - return _partial_format(pattern, data) - else: - return pattern.format(**data) - - -def parse(pattern, path): - """Parse data from a path based on a pattern - - Example: - >>> pattern = "root/{task}/{version}/data/" - >>> path = "root/modeling/v001/data/" - >>> parse(pattern, path) - {'task': 'modeling', 'version': 'v001'} - - Returns: - dict: The data retrieved from path using pattern. - """ - - pattern = os.path.normpath(pattern) - path = os.path.normpath(path) - - # Force forward slashes - path = path.replace('\\', '/') - pattern = pattern.replace('\\', '/') - - # Escape characters in path that are regex patterns so they are - # excluded by the regex searches. Exclude '{' and '}' in escaping. - pattern = re.escape(pattern) - pattern = pattern.replace('\{', '{').replace('\}', '}') - - keys = re.findall(r'{(%s+)}' % RE_FILENAME, - pattern) - if not keys: - return [] - - # Find the corresponding values - value_pattern = re.sub(r'{(%s+)}' % RE_FILENAME, - r'(%s+)' % RE_FILENAME, - pattern) - match_values = re.match(value_pattern, path) - - if not match_values: - raise ParseError("Path doesn't match with pattern. No values parsed") - - values = match_values.groups() - - return dict(zip(keys, values)) - - -def ls_iter(pattern, include=None, with_matches=False): - """Yield all matches for the given pattern. - - If the pattern starts with a relative path (or a dynamic key) the search - will start from the current working directory, defined by os.path.realpath. - - Arguments: - pattern (str): The pattern to match and search against. - include (dict): A dictionary used to target the search with the pattern - to include only those key-value pairs within the pattern. With this - you can reduce the filesystem query to a specified subset. - - Example: - >>> data = {"root": "foobar", "content": "nugget"} - >>> for path in ls_iter("{root}/{project}/data/{content}/", - ... include=data): - ... print path - - Returns: - (str, tuple): The matched paths (and data if `with_matches` is True) - - The returned value changes whether `with_matches` parameter is True or - False. If True a 2-tuple is yielded for each match as (path, data) else - only the path is returned - """ - - # format rule by data already provided to reduce query - if include is not None: - pattern = format(pattern, include, allow_partial=True) - - pattern = os.path.expandvars(pattern) - pattern = os.path.realpath(pattern) - - glob_pattern = re.sub(r'([/\\]{\w+}[/\\])', '/*/', pattern) # folder - glob_pattern = re.sub(r'({\w+})', '*', glob_pattern) # filename - - for path in glob.iglob(glob_pattern): - path = os.path.realpath(path) - if with_matches: - data = parse(pattern, path) - yield path, data - else: - yield path - - -def ls(pattern, include=None, with_matches=False): - return list(ls_iter(pattern, include, with_matches=with_matches)) - - -def _partial_format(s, data): - """Return string `s` formatted by `data` allowing a partial format - - Arguments: - s (str): The string that will be formatted - data (dict): The dictionary used to format with. - - Example: - >>> _partial_format("{d} {a} {b} {c} {d}", {'b': "and", 'd': "left"}) - 'left {a} and {c} left' - """ - - class FormatDict(dict): - def __missing__(self, key): - return "{" + key + "}" - - formatter = string.Formatter() - mapping = FormatDict(**data) - return formatter.vformat(s, (), mapping) diff --git a/pype/vendor/pather/error.py b/pype/vendor/pather/error.py deleted file mode 100644 index 92006534d4..0000000000 --- a/pype/vendor/pather/error.py +++ /dev/null @@ -1,5 +0,0 @@ - - -class ParseError(ValueError): - """Error raised when parsing a path with a pattern fails""" - pass diff --git a/pype/vendor/pather/version.py b/pype/vendor/pather/version.py deleted file mode 100644 index 5296c380d7..0000000000 --- a/pype/vendor/pather/version.py +++ /dev/null @@ -1,10 +0,0 @@ - -VERSION_MAJOR = 0 -VERSION_MINOR = 1 -VERSION_PATCH = 1 - -version_info = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH) -version = '%i.%i.%i' % version_info -__version__ = version - -__all__ = ['version', 'version_info', '__version__'] diff --git a/pype/vendor/pico/__init__.py b/pype/vendor/pico/__init__.py deleted file mode 100644 index 77113ba42c..0000000000 --- a/pype/vendor/pico/__init__.py +++ /dev/null @@ -1,324 +0,0 @@ -""" -Pico is a minimalistic HTTP API framework for Python. - -Copyright (c) 2012, Fergal Walsh. -License: BSD -""" - -from __future__ import unicode_literals - -import sys -import traceback -import inspect -import importlib -import logging -import os.path -from io import open -from collections import defaultdict -from functools import partial - -from werkzeug.exceptions import HTTPException, NotFound, BadRequest, InternalServerError -from werkzeug.wrappers import Request, Response - -from . import pragmaticjson as json -from .decorators import base_decorator -from .wrappers import JsonResponse, JsonErrorResponse - -logger = logging.getLogger(__name__) -logger.addHandler(logging.NullHandler()) - - -try: - unicode -except NameError: - unicode = str - -__author__ = 'Fergal Walsh' -__version__ = '2.0.4' - - -registry = defaultdict(dict) - - -def expose(*args, **kwargs): - @base_decorator() - def wrapper(wrapped, args, kwargs, request): - return wrapped(*args, **kwargs) - - def decorator(func): - func = wrapper(func) - registry[func.__module__][func.__name__] = func - return func - return decorator - - -def prehandle(*args, **kwargs): - def decorator(f): - sys.modules[f.__module__]._prehandle = f - return f - return decorator - - -class PicoApp(object): - - def __init__(self, debug=False): - self.debug = debug - self.registry = defaultdict(dict) - self.modules = {} - self.definitions = {} - self.aliases = {} - self.url_map = {} - path = os.path.dirname((inspect.getfile(inspect.currentframe()))) - with open(path + '/pico.min.js') as f: - self._pico_js = f.read() - - def register_module(self, module, alias=None): - if type(module) == str: - module = importlib.import_module(module) - module_name = module.__name__ - alias = alias or module_name - self.aliases[module_name] = alias - self.modules[alias] = module - self.registry[alias] = registry[module_name] - self.definitions[alias] = {} - for func_name, func in self.registry[alias].items(): - self.definitions[alias][func_name] = self.function_definition(func) - self._build_url_map() - - def _get_alias(self, module_name): - return self.aliases.get(module_name, module_name) - - def _build_url_map(self): - self.url_map = {} - self.url_map['/pico.js'] = self.pico_js - self.url_map['/'] = self.app_definition_handler - self.url_map['/picoapp.js'] = partial(self.app_definition_handler, 'pico.loadAppDefinition') - for module_name in self.registry: - url = self.module_url(module_name) - # assign definition response handler to function to urls - self.url_map[url] = partial(self.module_definition_handler, module_name) - self.url_map[url + '.js'] = partial(self.module_definition_handler, module_name, 'pico.loadModuleDefinition') - for func_name, func in self.registry[module_name].items(): - url = self.func_url(func) - # assign the handler function to the the url - self.url_map[url] = func - - def module_url(self, module_name, pico_url='/'): - module_path = module_name.replace('.', '/') - url = '{pico_url}{module}'.format(module=module_path, pico_url=pico_url) - return url - - def func_url(self, func, pico_url='/'): - module_path = self._get_alias(func.__module__).replace('.', '/') - url = '{pico_url}{module}/{func_name}'.format(module=module_path, func_name=func.__name__, pico_url=pico_url) - return url - - def app_definition_handler(self, callback=None, _request=None): - app_def = self.app_definition(pico_url=_request.url_root) - response = JsonResponse(app_def) - if callback: - response = response.to_jsonp(callback) - return response - - def module_definition_handler(self, module_name, callback=None, _request=None): - module_def = self.module_definition(module_name, pico_url=_request.url_root) - response = JsonResponse(module_def) - if callback: - response = response.to_jsonp(callback) - return response - - def app_definition(self, pico_url='/'): - d = {} - d['url'] = pico_url - d['modules'] = [] - for module_name in self.registry: - d['modules'].append(self.module_definition(module_name, pico_url)) - return d - - def module_definition(self, module_name, pico_url='/'): - d = {} - d['name'] = module_name - d['doc'] = inspect.getdoc(self.modules[module_name]) - d['url'] = self.module_url(module_name, pico_url) - d['functions'] = [] - for func_name, func in self.registry[module_name].items(): - func_def = dict(self.definitions[module_name][func_name]) - func_def['url'] = self.func_url(func, pico_url) - d['functions'].append(func_def) - return d - - def function_definition(self, func, pico_url='/'): - annotations = dict(func._annotations) - request_args = set(annotations.pop('request_args', [])) - a = inspect.getargspec(func) - args = [] - for i, arg_name in enumerate(a.args): - if arg_name and arg_name != 'self' and arg_name not in request_args: - arg = {'name': arg_name} - di = (len(a.defaults or []) - len(a.args)) + i - if di >= 0: - arg['default'] = a.defaults[di] - args.append(arg) - d = dict( - name=func.__name__, - doc=inspect.getdoc(func), - url=self.func_url(func, pico_url), - args=args, - ) - if a.keywords is not None: - d['accept_extra_args'] = True - d.update(annotations) - return d - - def pico_js(self, **kwargs): - response = Response(self._pico_js, content_type='text/javascript') - return response - - def parse_args(self, request): - # first we take the GET querystring args - args = _multidict_to_dict(request.args) - # update and override args with post form data - args.update(_multidict_to_dict(request.form)) - # try to parse any strings as json - for k in args: - if isinstance(args[k], list): - for i, v in enumerate(args[k]): - args[k][i] = self._try_json_load(v) - else: - args[k] = self._try_json_load(args[k]) - # update args with files - args.update(_multidict_to_dict(request.files)) - # update and override args with json data - if 'application/json' in request.headers.get('content-type', ''): - data = request.get_data(as_text=True) - if data: - args.update(self.json_load(data)) - args['_request'] = request - return args - - def json_load(self, value): - return json.loads(value) - - def json_dump(self, value): - return json.dumps(value) - - def _try_json_load(self, value): - try: - return self.json_load(value) - except ValueError: - return value - - def dispatch_request(self, request): - path = request.path - if len(path) > 1 and path[-1] == '/': - path = path[:-1] - request.path = path - try: - handler = self.url_map[path] - except KeyError: - try: - path = request.script_root + path - handler = self.url_map[path] - request.path = path - except KeyError: - return NotFound() - return self.handle_request(request, handler) - - def check_args(self, handler, kwargs): - module_name = self._get_alias(handler.__module__) - func_def = self.definitions[module_name][handler.__name__] - args = {a['name']: a for a in func_def['args']} - missing = [k for k in (set(args.keys()) - set(kwargs.keys())) if 'default' not in args[k]] - extra = [k for k in (set(kwargs.keys()) - set(args.keys())) if k[0] != '_'] - message = '' - if extra and not func_def.get('accept_extra_args', False): - message += 'Unexpected parameters: [%s]. ' % ', '.join(extra) - if missing: - message += 'Missing required parameters: [%s]. ' % ', '.join(missing) - if message: - raise BadRequest(message) - - def prehandle(self, request, kwargs): - if self.debug and kwargs.pop('_debug', None): - request.use_debugger = True - else: - request.use_debugger = False - - try: - request.token = request.headers.get('Authorization', '').split('Token ')[-1] - except Exception: - request.token = None - - def posthandle(self, request, response): - pass - - def handle_exception(self, exception, request, **kwargs): - if isinstance(exception, HTTPException): - return JsonErrorResponse(exception, **kwargs) - else: - logger.exception(exception) - if request.use_debugger: - raise - e = InternalServerError() - if self.debug: - _, _, exc_tb = sys.exc_info() - trace = traceback.extract_tb(exc_tb) - trace = ['%s:%i in %s: %s' % t for t in trace if '/pico/' not in t[0]] - del exc_tb - d = dict( - name=type(exception).__name__, - message=unicode(exception), - stack_trace=trace, - ) - kwargs['__debug__'] = d - return JsonErrorResponse(e, **kwargs) - - def handle_request(self, request, handler): - try: - kwargs = self.parse_args(request) - callback = kwargs.pop('_callback', None) - if hasattr(handler, '__module__') and handler.__module__ in self.aliases: - module = self.modules.get(self._get_alias(handler.__module__)) - if module and self.prehandle: - self.prehandle(request, kwargs) - if hasattr(module, '_prehandle'): - module._prehandle(request, kwargs) - self.check_args(handler, kwargs) - result = handler(**kwargs) - if isinstance(result, Response): - response = result - else: - response = JsonResponse(json_string=self.json_dump(result)) - if callback: - response = response.to_jsonp(callback) - except Exception as e: - response = self.handle_exception(e, request) - finally: - self.posthandle(request, response) - return response - - def wsgi_app(self, environ, start_response): - script_name = environ.get('HTTP_X_SCRIPT_NAME', '') - if script_name: - environ['SCRIPT_NAME'] = script_name - path_info = environ['PATH_INFO'] - if path_info.startswith(script_name): - environ['PATH_INFO'] = path_info[len(script_name):] - request = Request(environ) - request.app = self - response = self.dispatch_request(request) - return response(environ, start_response) - - def __call__(self, environ, start_response): - return self.wsgi_app(environ, start_response) - - -def _multidict_to_dict(m): - """ Returns a dict with list values only when a key has multiple values. """ - d = {} - for k, v in m.lists(): - if len(v) == 1: - d[k] = v[0] - else: - d[k] = v - return d diff --git a/pype/vendor/pico/client.py b/pype/vendor/pico/client.py deleted file mode 100644 index 300afbab11..0000000000 --- a/pype/vendor/pico/client.py +++ /dev/null @@ -1,133 +0,0 @@ -""" -Load and call functions from a remote pico module. - -example = pico.client.load('example') -s = example.hello() -# s == "Hello World" - -s = example.hello("Python") -# s == "Hello Python" - -Use help(example.hello) or example.hello? as normal to check function parameters and docstrings. -""" -import os -from . import pragmaticjson as json -import imp -import requests - - -class PicoException(Exception): - pass - - -class PicoClient(object): - def __init__(self, url, headers={}): - self.url = url - if self.url[-1] != '/': - self.url += '/' - self.session = requests.Session() - self.session.timeout = 60.0 - self.session.headers.update(headers) - - def _request(self, url, args={}, timeout=None, headers={}): - if not url.startswith('http'): - url = self.url + url - timeout = timeout or self.session.timeout - if timeout < 0: - timeout = None - request_headers = dict(headers) - request_headers.update({ - 'content-type': 'application/json', - 'accept': 'application/json', - }) - body = json.dumps(args) - r = self.session.post(url, data=body, timeout=timeout, headers=request_headers) - try: - data = r.json() - except ValueError: - r.raise_for_status() - if not r.ok: - raise PicoException(data['message']) - else: - return data - - def _stream(self, url, args={}, timeout=None, headers={}): - if not url.startswith('http'): - url = self.url + url - timeout = timeout or self.session.timeout - if timeout < 0: - timeout = None - r = self.session.get(url, params=args, stream=True, timeout=timeout, headers=headers) - for line in r.iter_lines(chunk_size=1): - line = line.decode() - if 'data:' in line: - s = json.loads(line[6:]) - if s == 'PICO_CLOSE_STREAM': - return - else: - yield s - - def _call_function(self, module, function, args): - url = '{base_url}/{module}/{function}/'.format( - base_url=self.url, module=module.replace('.', '/'), function=function) - return self._request(url, args) - - def load(self, module_name): - """ - Load a remote module - example = client.load("example") - """ - module_dict = self._request(self.url + module_name.replace('.', '/')) - return self.load_from_dict(module_dict) - - def load_from_dict(self, module_def): - """ - Load a module from a definition dictionary - example = client.load_from_dict({...}) - """ - module_name = module_def['name'] - module = imp.new_module(module_name) - module.__doc__ = module_def['doc'] - module._pico_client = self - for function_def in module_def['functions']: - args = [(arg['name'], arg.get('default', None)) - for arg in function_def['args']] + [('_timeout', None), ('_headers', {})] - args_string = ', '.join(["%s=%r" % (a, d) for a, d in args]) - code = 'def {name}({arg_string}):\n' - code += ' """ {docstring} """\n' - code += ' args = locals()\n' - code += ' args.pop("_timeout")\n' - code += ' args.pop("_headers")\n' - if function_def.get('stream'): - code += ' return _pico_client._stream("{url}", args, timeout=_timeout, headers=_headers)' - else: - code += ' return _pico_client._request("{url}", args, timeout=_timeout, headers=_headers)' - code = code.format( - name=function_def['name'], - docstring=function_def['doc'], - arg_string=args_string, - url=function_def['url'], - ) - exec(code, module.__dict__) - return module - - def set_auth_token(self, token): - self.session.headers['Authorization'] = 'Token %s' % token - - def set_auth_basic(self, username, password): - self.session.auth = (username, password) - - def clear_auth(self): - self.session.auth = None - self.session.headers.pop('Authorization', None) - - -def load(module_name): - ip = os.getenv("PICO_IP", '127.0.0.1') - if ip.startswith('http'): - ip = ip.replace("http://", "") - port = int(os.getenv("PICO_PORT", 4242)) - client = PicoClient( - "http://{0}:{1}".format(ip, port) - ) - return client.load(module_name) diff --git a/pype/vendor/pico/decorators.py b/pype/vendor/pico/decorators.py deleted file mode 100644 index 87b8670e1e..0000000000 --- a/pype/vendor/pico/decorators.py +++ /dev/null @@ -1,132 +0,0 @@ -import types - -import wrapt - -from werkzeug.exceptions import MethodNotAllowed -from werkzeug.wrappers import Response - -from . import pragmaticjson as json -from .wrappers import JsonResponse - - -def base_decorator(annotations={}, *args, **kwargs): - def _base_decorator(wrapper): - def _wrapper(wrapped, instance, args, kwargs): - request = kwargs.get('_request', None) - if not hasattr(wrapped, '_self_pico_request_decorator'): - kwargs.pop('_request', None) - if request: - return wrapper(wrapped, args, kwargs, request) - else: - return wrapped(*args, **kwargs) - - def new_wrapper(f): - x = wrapt.decorator(_wrapper)(f) - x._annotations = getattr(x, '_annotations', {}) - x._annotations.update(annotations) - x._self_pico_request_decorator = True - return x - return new_wrapper - return _base_decorator - - -def request_args(*args, **kwargs): - @base_decorator(annotations={'request_args': list(kwargs.keys()) + list(args)}) - def wrapper(wrapped, innerargs, innerkwargs, request): - if args: - innerkwargs[args[0]] = request - for k, v in kwargs.items(): - if isinstance(v, types.FunctionType): - innerkwargs[k] = v(request) - else: - innerkwargs[k] = getattr(request, v) - return wrapped(*innerargs, **innerkwargs) - return wrapper - - -def stream(*args, **kwargs): - @base_decorator(annotations={'stream': True}) - def wrapper(wrapped, args, kwargs, request): - result = wrapped(*args, **kwargs) - - def f(stream): - try: - for d in stream: - yield 'data: ' + json.dumps(d) + '\n\n' - yield 'event: close\n' - yield 'data: close\n\n' - except Exception as e: - yield 'event: error\n' - yield 'data: ' + str(e) + '\n\n' - response = Response(f(result), content_type='text/event-stream') - return response - return wrapper - - -def set_cookie(*args, **kwargs): - @base_decorator() - def wrapper(wrapped, innerargs, innerkwargs, request): - result = wrapped(*innerargs, **innerkwargs) - response = JsonResponse(result) - for k, v in result.items(): - response.set_cookie(k, v, **kwargs) - return response - return wrapper - - -def delete_cookie(key, **kwargs): - @base_decorator() - def wrapper(wrapped, innerargs, innerkwargs, request): - result = wrapped(*innerargs, **innerkwargs) - response = JsonResponse(result) - response.delete_cookie(key, **kwargs) - return response - return wrapper - - -def protected(protector, annotations={}): - """ - Decorator for protecting a function. - The protected function will not be called if the protector raises - an exception or returns False. - The protector should have the following signature: - def protector(request, wrapped, args, kwargs): - return True - """ - @base_decorator(annotations) - def wrapper(wrapped, args, kwargs, request): - if protector(request, wrapped, args, kwargs) is not False: - return wrapped(*args, **kwargs) - return wrapper - - -def require_method(method): - def p(request, w, args, kwargs): - if not request.method == method: - raise MethodNotAllowed() - return protected(p, annotations={'method': method}) - - -def cookie(key): - def accessor(request): - return request.cookies.get(key) - return accessor - - -def header(name): - def accessor(request): - return request.headers.get(name) - return accessor - - -def basic_auth(name=None): - def accessor(request): - if request.authorization: - auth = request.authorization - if name is None: - return (auth.username, auth.password) - else: - return getattr(auth, name) - else: - return None - return accessor diff --git a/pype/vendor/pico/exceptions.py b/pype/vendor/pico/exceptions.py deleted file mode 100644 index f89e15710f..0000000000 --- a/pype/vendor/pico/exceptions.py +++ /dev/null @@ -1,9 +0,0 @@ -import werkzeug.exceptions -from werkzeug.exceptions import * - - -class Unauthorized(werkzeug.exceptions.Unauthorized): - """ Overridden to return WWW-Authenticate challenge header """ - def get_headers(self, environ): - return [('Content-Type', 'text/html'), - ('WWW-Authenticate', 'Basic realm="Login required"')] diff --git a/pype/vendor/pico/extras/__init__.py b/pype/vendor/pico/extras/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/pico/extras/sentry.py b/pype/vendor/pico/extras/sentry.py deleted file mode 100644 index da3be2dd5a..0000000000 --- a/pype/vendor/pico/extras/sentry.py +++ /dev/null @@ -1,43 +0,0 @@ -from pico.exceptions import HTTPException - - -def set_context(client, request): - client.http_context({ - 'method': request.method, - 'url': request.base_url, - 'query_string': request.query_string, - 'data': request.get_data(), - 'headers': dict(request.headers), - - }) - if request.authorization: - client.user_context({ - 'user': request.authorization.username - }) - - -class SentryMixin(object): - sentry_client = None - sentry_ignore_exceptions = (HTTPException,) - - def prehandle(self, request, kwargs): - if self.sentry_client: - set_context(self.sentry_client, request) - super(SentryMixin, self).prehandle(request, kwargs) - - def handle_exception(self, exception, request, **kwargs): - if self.sentry_ignore_exceptions: - should_ignore = isinstance(exception, tuple(self.sentry_ignore_exceptions)) - else: - should_ignore = False - if not should_ignore and self.sentry_client: - sentry_id = self.sentry_client.captureException() - if sentry_id: - # The sentry_id is passed down to pico's JsonErrorResponse so it is - # included as a value in the response. - kwargs['sentry_id'] = sentry_id - return super(SentryMixin, self).handle_exception(exception, request, **kwargs) - - def posthandle(self, request, response): - self.sentry_client.context.clear() - super(SentryMixin, self).posthandle(request, response) diff --git a/pype/vendor/pico/pico.min.js b/pype/vendor/pico/pico.min.js deleted file mode 100644 index f3435a48b5..0000000000 --- a/pype/vendor/pico/pico.min.js +++ /dev/null @@ -1,6 +0,0 @@ -(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.pico=f()}})(function(){var define,module,exports;return function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o0){buf=buf.slice(pos)}})});req.on("error",onConnectionClosed);if(req.setNoDelay)req.setNoDelay(true);req.end()}connect();function _emit(){if(self.listeners(arguments[0]).length>0){self.emit.apply(self,arguments)}}this.close=function(){if(readyState==EventSource.CLOSED)return;readyState=EventSource.CLOSED;if(req.abort)req.abort();if(req.xhr&&req.xhr.abort)req.xhr.abort()};function parseEventStreamLine(buf,pos,fieldLength,lineLength){if(lineLength===0){if(data.length>0){var type=eventName||"message";_emit(type,new MessageEvent(type,{data:data.slice(0,-1),lastEventId:lastEventId,origin:original(url)}));data=""}eventName=void 0}else if(fieldLength>0){var noValue=fieldLength<0,step=0,field=buf.slice(pos,pos+(noValue?lineLength:fieldLength));if(noValue){step=lineLength}else if(buf[pos+fieldLength+1]!==" "){step=fieldLength+1}else{step=fieldLength+2}pos+=step;var valueLength=lineLength-step,value=buf.slice(pos,pos+valueLength);if(field==="data"){data+=value+"\n"}else if(field==="event"){eventName=value}else if(field==="id"){lastEventId=value}else if(field==="retry"){var retry=parseInt(value,10);if(!Number.isNaN(retry)){self.reconnectInterval=retry}}}}}module.exports=EventSource;util.inherits(EventSource,events.EventEmitter);EventSource.prototype.constructor=EventSource;["open","error","message"].forEach(function(method){Object.defineProperty(EventSource.prototype,"on"+method,{get:function get(){var listener=this.listeners(method)[0];return listener?listener._listener?listener._listener:listener:undefined},set:function set(listener){this.removeAllListeners(method);this.addEventListener(method,listener)}})});Object.defineProperty(EventSource,"CONNECTING",{enumerable:true,value:0});Object.defineProperty(EventSource,"OPEN",{enumerable:true,value:1});Object.defineProperty(EventSource,"CLOSED",{enumerable:true,value:2});EventSource.prototype.addEventListener=function addEventListener(method,listener){if(typeof listener==="function"){listener._listener=listener;this.on(method,listener)}};function Event(type,optionalProperties){Object.defineProperty(this,"type",{writable:false,value:type,enumerable:true});if(optionalProperties){for(var f in optionalProperties){if(optionalProperties.hasOwnProperty(f)){Object.defineProperty(this,f,{writable:false,value:optionalProperties[f],enumerable:true})}}}}function MessageEvent(type,eventInitDict){Object.defineProperty(this,"type",{writable:false,value:type,enumerable:true});for(var f in eventInitDict){if(eventInitDict.hasOwnProperty(f)){Object.defineProperty(this,f,{writable:false,value:eventInitDict[f],enumerable:true})}}}}).call(this,require("_process"))},{_process:21,events:17,http:38,https:18,original:3,url:45,util:48}],3:[function(require,module,exports){"use strict";var parse=require("url-parse");function origin(url){if("string"===typeof url)url=parse(url);if(!url.protocol||!url.hostname)return"null";return(url.protocol+"//"+url.host).toLowerCase()}origin.same=function same(a,b){return origin(a)===origin(b)};module.exports=origin},{"url-parse":4}],4:[function(require,module,exports){"use strict";var required=require("requires-port"),lolcation=require("./lolcation"),qs=require("querystringify"),relativere=/^\/(?!\/)/;var instructions=[["#","hash"],["?","query"],["//","protocol",2,1,1],["/","pathname"],["@","auth",1],[NaN,"host",undefined,1,1],[/\:(\d+)$/,"port"],[NaN,"hostname",undefined,1,1]];function URL(address,location,parser){if(!(this instanceof URL)){return new URL(address,location,parser)}var relative=relativere.test(address),parse,instruction,index,key,type=typeof location,url=this,i=0;if("object"!==type&&"string"!==type){parser=location;location=null}if(parser&&"function"!==typeof parser){parser=qs.parse}location=lolcation(location);for(;i-1?upcased:method}function Request(input,options){options=options||{};var body=options.body;if(Request.prototype.isPrototypeOf(input)){if(input.bodyUsed){throw new TypeError("Already read")}this.url=input.url;this.credentials=input.credentials;if(!options.headers){this.headers=new Headers(input.headers)}this.method=input.method;this.mode=input.mode;if(!body){body=input._bodyInit;input.bodyUsed=true}}else{this.url=input}this.credentials=options.credentials||this.credentials||"omit";if(options.headers||!this.headers){this.headers=new Headers(options.headers)}this.method=normalizeMethod(options.method||this.method||"GET");this.mode=options.mode||this.mode||null;this.referrer=null;if((this.method==="GET"||this.method==="HEAD")&&body){throw new TypeError("Body not allowed for GET or HEAD requests")}this._initBody(body)}Request.prototype.clone=function(){return new Request(this)};function decode(body){var form=new FormData;body.trim().split("&").forEach(function(bytes){if(bytes){var split=bytes.split("=");var name=split.shift().replace(/\+/g," ");var value=split.join("=").replace(/\+/g," ");form.append(decodeURIComponent(name),decodeURIComponent(value))}});return form}function headers(xhr){var head=new Headers;var pairs=(xhr.getAllResponseHeaders()||"").trim().split("\n");pairs.forEach(function(header){var split=header.trim().split(":");var key=split.shift().trim();var value=split.join(":").trim();head.append(key,value)});return head}Body.call(Request.prototype);function Response(bodyInit,options){if(!options){options={}}this.type="default";this.status=options.status;this.ok=this.status>=200&&this.status<300;this.statusText=options.statusText;this.headers=options.headers instanceof Headers?options.headers:new Headers(options.headers);this.url=options.url||"";this._initBody(bodyInit)}Body.call(Response.prototype);Response.prototype.clone=function(){return new Response(this._bodyInit,{status:this.status,statusText:this.statusText,headers:new Headers(this.headers),url:this.url})};Response.error=function(){var response=new Response(null,{status:0,statusText:""});response.type="error";return response};var redirectStatuses=[301,302,303,307,308];Response.redirect=function(url,status){if(redirectStatuses.indexOf(status)===-1){throw new RangeError("Invalid status code")}return new Response(null,{status:status,headers:{location:url}})};self.Headers=Headers;self.Request=Request;self.Response=Response;self.fetch=function(input,init){return new Promise(function(resolve,reject){var request;if(Request.prototype.isPrototypeOf(input)&&!init){request=input}else{request=new Request(input,init)}var xhr=new XMLHttpRequest;function responseURL(){if("responseURL"in xhr){return xhr.responseURL}if(/^X-Request-URL:/m.test(xhr.getAllResponseHeaders())){return xhr.getResponseHeader("X-Request-URL")}return}xhr.onload=function(){var options={status:xhr.status,statusText:xhr.statusText,headers:headers(xhr),url:responseURL()};var body="response"in xhr?xhr.response:xhr.responseText;resolve(new Response(body,options))};xhr.onerror=function(){reject(new TypeError("Network request failed"))};xhr.ontimeout=function(){reject(new TypeError("Network request failed"))};xhr.open(request.method,request.url,true);if(request.credentials==="include"){xhr.withCredentials=true}if("responseType"in xhr&&support.blob){xhr.responseType="blob"}request.headers.forEach(function(value,name){xhr.setRequestHeader(name,value)});xhr.send(typeof request._bodyInit==="undefined"?null:request._bodyInit)})};self.fetch.polyfill=true})(typeof self!=="undefined"?self:this)},{}],11:[function(require,module,exports){(function(global,Buffer){require("isomorphic-fetch");require("es6-promise").polyfill();var EventSource=require("eventsource");var FormData=require("form-data");var fetch=global.fetch;var Request=global.Request;var modules={};var json={dumps:function(obj){return JSON.stringify(obj,function(k,v){return v&&typeof v==="object"&&typeof v.json==="string"?JSON.parse(v.json):v})},loads:JSON.parse};function urlencode(params){return Object.keys(params).map(function(k){return k+"="+encodeURIComponent(params[k])}).join("&")}function isFileOrFilelist(obj){return global.File&&global.FileList&&(obj instanceof global.File||obj instanceof global.FileList)}function createNamespace(ns,root){var m=root||global;var parts=ns.split(".");var level;for(var i=0,l=parts.length;istream.max_buffer_size){console.warn("Max stream buffer length exceeded. Discarding oldest message.");stream.buffer.shift()}stream.buffer.push(message)};stream.errorhandler=function(e){console.error(e)};stream.closehandler=function(){};stream.each=function(handler){stream.handler=handler;while(stream.buffer.length){handler(stream.buffer.shift())}return stream};stream.catch=function(handler){stream.errorhandler=handler;return stream};stream.then=function(handler){stream.closehandler=handler;return stream};stream.open();return stream}function loadModuleDefinition(definition){var module=createModuleProxy(definition,definition.name);modules[module.__url__.split("//")[1]]=module;modules[module.__name__.replace("/",".")]=module;return module}function importModule(moduleName){if(moduleName.indexOf("/")>-1){var parts=moduleName.split("//");moduleName=parts[parts.length-1]}return modules[moduleName]}exports.importModule=importModule;exports.loadAsync=function(moduleUrl){var promise=new Promise(function(resolve,reject){fetch(moduleUrl).then(function(response){return response.json()}).then(function(definition){resolve(loadModuleDefinition(definition,definition.name))}).catch(reject)});return promise};exports.loadModuleDefinition=loadModuleDefinition;exports.loadAppDefinition=function(definition){var app=createNamespace("app");app.__definition__=definition;for(var i=0;i=kMaxLength()){throw new RangeError("Attempt to allocate Buffer larger than maximum "+"size: 0x"+kMaxLength().toString(16)+" bytes")}return length|0}function SlowBuffer(length){if(+length!=length){length=0}return Buffer.alloc(+length)}Buffer.isBuffer=function isBuffer(b){return!!(b!=null&&b._isBuffer)};Buffer.compare=function compare(a,b){if(!Buffer.isBuffer(a)||!Buffer.isBuffer(b)){throw new TypeError("Arguments must be Buffers")}if(a===b)return 0;var x=a.length;var y=b.length;for(var i=0,len=Math.min(x,y);i>>1;case"base64":return base64ToBytes(string).length;default:if(loweredCase)return utf8ToBytes(string).length;encoding=(""+encoding).toLowerCase();loweredCase=true}}}Buffer.byteLength=byteLength;function slowToString(encoding,start,end){var loweredCase=false;if(start===undefined||start<0){start=0}if(start>this.length){return""}if(end===undefined||end>this.length){end=this.length}if(end<=0){return""}end>>>=0;start>>>=0;if(end<=start){return""}if(!encoding)encoding="utf8";while(true){switch(encoding){case"hex":return hexSlice(this,start,end);case"utf8":case"utf-8":return utf8Slice(this,start,end);case"ascii":return asciiSlice(this,start,end);case"latin1":case"binary":return latin1Slice(this,start,end);case"base64":return base64Slice(this,start,end);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return utf16leSlice(this,start,end);default:if(loweredCase)throw new TypeError("Unknown encoding: "+encoding);encoding=(encoding+"").toLowerCase();loweredCase=true}}}Buffer.prototype._isBuffer=true;function swap(b,n,m){var i=b[n];b[n]=b[m];b[m]=i}Buffer.prototype.swap16=function swap16(){var len=this.length;if(len%2!==0){throw new RangeError("Buffer size must be a multiple of 16-bits")}for(var i=0;i0){str=this.toString("hex",0,max).match(/.{2}/g).join(" ");if(this.length>max)str+=" ... "}return""};Buffer.prototype.compare=function compare(target,start,end,thisStart,thisEnd){if(!Buffer.isBuffer(target)){throw new TypeError("Argument must be a Buffer")}if(start===undefined){start=0}if(end===undefined){end=target?target.length:0}if(thisStart===undefined){thisStart=0}if(thisEnd===undefined){thisEnd=this.length}if(start<0||end>target.length||thisStart<0||thisEnd>this.length){throw new RangeError("out of range index")}if(thisStart>=thisEnd&&start>=end){return 0}if(thisStart>=thisEnd){return-1}if(start>=end){return 1}start>>>=0;end>>>=0;thisStart>>>=0;thisEnd>>>=0;if(this===target)return 0;var x=thisEnd-thisStart;var y=end-start;var len=Math.min(x,y);var thisCopy=this.slice(thisStart,thisEnd);var targetCopy=target.slice(start,end);for(var i=0;i2147483647){byteOffset=2147483647}else if(byteOffset<-2147483648){byteOffset=-2147483648}byteOffset=+byteOffset;if(isNaN(byteOffset)){byteOffset=dir?0:buffer.length-1}if(byteOffset<0)byteOffset=buffer.length+byteOffset;if(byteOffset>=buffer.length){if(dir)return-1;else byteOffset=buffer.length-1}else if(byteOffset<0){if(dir)byteOffset=0;else return-1}if(typeof val==="string"){val=Buffer.from(val,encoding)}if(Buffer.isBuffer(val)){if(val.length===0){return-1}return arrayIndexOf(buffer,val,byteOffset,encoding,dir)}else if(typeof val==="number"){val=val&255;if(Buffer.TYPED_ARRAY_SUPPORT&&typeof Uint8Array.prototype.indexOf==="function"){if(dir){return Uint8Array.prototype.indexOf.call(buffer,val,byteOffset)}else{return Uint8Array.prototype.lastIndexOf.call(buffer,val,byteOffset)}}return arrayIndexOf(buffer,[val],byteOffset,encoding,dir)}throw new TypeError("val must be string, number or Buffer")}function arrayIndexOf(arr,val,byteOffset,encoding,dir){var indexSize=1;var arrLength=arr.length;var valLength=val.length;if(encoding!==undefined){encoding=String(encoding).toLowerCase();if(encoding==="ucs2"||encoding==="ucs-2"||encoding==="utf16le"||encoding==="utf-16le"){if(arr.length<2||val.length<2){return-1}indexSize=2;arrLength/=2;valLength/=2;byteOffset/=2}}function read(buf,i){if(indexSize===1){return buf[i]}else{return buf.readUInt16BE(i*indexSize)}}var i;if(dir){var foundIndex=-1;for(i=byteOffset;iarrLength)byteOffset=arrLength-valLength;for(i=byteOffset;i>=0;i--){var found=true;for(var j=0;jremaining){length=remaining}}var strLen=string.length;if(strLen%2!==0)throw new TypeError("Invalid hex string");if(length>strLen/2){length=strLen/2}for(var i=0;iremaining)length=remaining;if(string.length>0&&(length<0||offset<0)||offset>this.length){throw new RangeError("Attempt to write outside buffer bounds")}if(!encoding)encoding="utf8";var loweredCase=false;for(;;){switch(encoding){case"hex":return hexWrite(this,string,offset,length);case"utf8":case"utf-8":return utf8Write(this,string,offset,length);case"ascii":return asciiWrite(this,string,offset,length);case"latin1":case"binary":return latin1Write(this,string,offset,length);case"base64":return base64Write(this,string,offset,length);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return ucs2Write(this,string,offset,length);default:if(loweredCase)throw new TypeError("Unknown encoding: "+encoding);encoding=(""+encoding).toLowerCase();loweredCase=true}}};Buffer.prototype.toJSON=function toJSON(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};function base64Slice(buf,start,end){if(start===0&&end===buf.length){return base64.fromByteArray(buf)}else{return base64.fromByteArray(buf.slice(start,end))}}function utf8Slice(buf,start,end){end=Math.min(buf.length,end);var res=[];var i=start;while(i239?4:firstByte>223?3:firstByte>191?2:1;if(i+bytesPerSequence<=end){var secondByte,thirdByte,fourthByte,tempCodePoint;switch(bytesPerSequence){case 1:if(firstByte<128){codePoint=firstByte}break;case 2:secondByte=buf[i+1];if((secondByte&192)===128){tempCodePoint=(firstByte&31)<<6|secondByte&63;if(tempCodePoint>127){codePoint=tempCodePoint}}break;case 3:secondByte=buf[i+1];thirdByte=buf[i+2];if((secondByte&192)===128&&(thirdByte&192)===128){tempCodePoint=(firstByte&15)<<12|(secondByte&63)<<6|thirdByte&63;if(tempCodePoint>2047&&(tempCodePoint<55296||tempCodePoint>57343)){codePoint=tempCodePoint}}break;case 4:secondByte=buf[i+1];thirdByte=buf[i+2];fourthByte=buf[i+3];if((secondByte&192)===128&&(thirdByte&192)===128&&(fourthByte&192)===128){tempCodePoint=(firstByte&15)<<18|(secondByte&63)<<12|(thirdByte&63)<<6|fourthByte&63;if(tempCodePoint>65535&&tempCodePoint<1114112){codePoint=tempCodePoint}}}}if(codePoint===null){codePoint=65533;bytesPerSequence=1}else if(codePoint>65535){codePoint-=65536;res.push(codePoint>>>10&1023|55296);codePoint=56320|codePoint&1023}res.push(codePoint);i+=bytesPerSequence}return decodeCodePointsArray(res)}var MAX_ARGUMENTS_LENGTH=4096;function decodeCodePointsArray(codePoints){var len=codePoints.length;if(len<=MAX_ARGUMENTS_LENGTH){return String.fromCharCode.apply(String,codePoints)}var res="";var i=0;while(ilen)end=len;var out="";for(var i=start;ilen){start=len}if(end<0){end+=len;if(end<0)end=0}else if(end>len){end=len}if(endlength)throw new RangeError("Trying to access beyond buffer length")}Buffer.prototype.readUIntLE=function readUIntLE(offset,byteLength,noAssert){offset=offset|0;byteLength=byteLength|0;if(!noAssert)checkOffset(offset,byteLength,this.length);var val=this[offset];var mul=1;var i=0;while(++i0&&(mul*=256)){val+=this[offset+--byteLength]*mul}return val};Buffer.prototype.readUInt8=function readUInt8(offset,noAssert){if(!noAssert)checkOffset(offset,1,this.length);return this[offset]};Buffer.prototype.readUInt16LE=function readUInt16LE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);return this[offset]|this[offset+1]<<8};Buffer.prototype.readUInt16BE=function readUInt16BE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);return this[offset]<<8|this[offset+1]};Buffer.prototype.readUInt32LE=function readUInt32LE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return(this[offset]|this[offset+1]<<8|this[offset+2]<<16)+this[offset+3]*16777216};Buffer.prototype.readUInt32BE=function readUInt32BE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return this[offset]*16777216+(this[offset+1]<<16|this[offset+2]<<8|this[offset+3])};Buffer.prototype.readIntLE=function readIntLE(offset,byteLength,noAssert){offset=offset|0;byteLength=byteLength|0;if(!noAssert)checkOffset(offset,byteLength,this.length);var val=this[offset];var mul=1;var i=0;while(++i=mul)val-=Math.pow(2,8*byteLength);return val};Buffer.prototype.readIntBE=function readIntBE(offset,byteLength,noAssert){offset=offset|0;byteLength=byteLength|0;if(!noAssert)checkOffset(offset,byteLength,this.length); -var i=byteLength;var mul=1;var val=this[offset+--i];while(i>0&&(mul*=256)){val+=this[offset+--i]*mul}mul*=128;if(val>=mul)val-=Math.pow(2,8*byteLength);return val};Buffer.prototype.readInt8=function readInt8(offset,noAssert){if(!noAssert)checkOffset(offset,1,this.length);if(!(this[offset]&128))return this[offset];return(255-this[offset]+1)*-1};Buffer.prototype.readInt16LE=function readInt16LE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);var val=this[offset]|this[offset+1]<<8;return val&32768?val|4294901760:val};Buffer.prototype.readInt16BE=function readInt16BE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);var val=this[offset+1]|this[offset]<<8;return val&32768?val|4294901760:val};Buffer.prototype.readInt32LE=function readInt32LE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return this[offset]|this[offset+1]<<8|this[offset+2]<<16|this[offset+3]<<24};Buffer.prototype.readInt32BE=function readInt32BE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return this[offset]<<24|this[offset+1]<<16|this[offset+2]<<8|this[offset+3]};Buffer.prototype.readFloatLE=function readFloatLE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return ieee754.read(this,offset,true,23,4)};Buffer.prototype.readFloatBE=function readFloatBE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return ieee754.read(this,offset,false,23,4)};Buffer.prototype.readDoubleLE=function readDoubleLE(offset,noAssert){if(!noAssert)checkOffset(offset,8,this.length);return ieee754.read(this,offset,true,52,8)};Buffer.prototype.readDoubleBE=function readDoubleBE(offset,noAssert){if(!noAssert)checkOffset(offset,8,this.length);return ieee754.read(this,offset,false,52,8)};function checkInt(buf,value,offset,ext,max,min){if(!Buffer.isBuffer(buf))throw new TypeError('"buffer" argument must be a Buffer instance');if(value>max||valuebuf.length)throw new RangeError("Index out of range")}Buffer.prototype.writeUIntLE=function writeUIntLE(value,offset,byteLength,noAssert){value=+value;offset=offset|0;byteLength=byteLength|0;if(!noAssert){var maxBytes=Math.pow(2,8*byteLength)-1;checkInt(this,value,offset,byteLength,maxBytes,0)}var mul=1;var i=0;this[offset]=value&255;while(++i=0&&(mul*=256)){this[offset+i]=value/mul&255}return offset+byteLength};Buffer.prototype.writeUInt8=function writeUInt8(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,1,255,0);if(!Buffer.TYPED_ARRAY_SUPPORT)value=Math.floor(value);this[offset]=value&255;return offset+1};function objectWriteUInt16(buf,value,offset,littleEndian){if(value<0)value=65535+value+1;for(var i=0,j=Math.min(buf.length-offset,2);i>>(littleEndian?i:1-i)*8}}Buffer.prototype.writeUInt16LE=function writeUInt16LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,65535,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value&255;this[offset+1]=value>>>8}else{objectWriteUInt16(this,value,offset,true)}return offset+2};Buffer.prototype.writeUInt16BE=function writeUInt16BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,65535,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>8;this[offset+1]=value&255}else{objectWriteUInt16(this,value,offset,false)}return offset+2};function objectWriteUInt32(buf,value,offset,littleEndian){if(value<0)value=4294967295+value+1;for(var i=0,j=Math.min(buf.length-offset,4);i>>(littleEndian?i:3-i)*8&255}}Buffer.prototype.writeUInt32LE=function writeUInt32LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,4294967295,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset+3]=value>>>24;this[offset+2]=value>>>16;this[offset+1]=value>>>8;this[offset]=value&255}else{objectWriteUInt32(this,value,offset,true)}return offset+4};Buffer.prototype.writeUInt32BE=function writeUInt32BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,4294967295,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>24;this[offset+1]=value>>>16;this[offset+2]=value>>>8;this[offset+3]=value&255}else{objectWriteUInt32(this,value,offset,false)}return offset+4};Buffer.prototype.writeIntLE=function writeIntLE(value,offset,byteLength,noAssert){value=+value;offset=offset|0;if(!noAssert){var limit=Math.pow(2,8*byteLength-1);checkInt(this,value,offset,byteLength,limit-1,-limit)}var i=0;var mul=1;var sub=0;this[offset]=value&255;while(++i>0)-sub&255}return offset+byteLength};Buffer.prototype.writeIntBE=function writeIntBE(value,offset,byteLength,noAssert){value=+value;offset=offset|0;if(!noAssert){var limit=Math.pow(2,8*byteLength-1);checkInt(this,value,offset,byteLength,limit-1,-limit)}var i=byteLength-1;var mul=1;var sub=0;this[offset+i]=value&255;while(--i>=0&&(mul*=256)){if(value<0&&sub===0&&this[offset+i+1]!==0){sub=1}this[offset+i]=(value/mul>>0)-sub&255}return offset+byteLength};Buffer.prototype.writeInt8=function writeInt8(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,1,127,-128);if(!Buffer.TYPED_ARRAY_SUPPORT)value=Math.floor(value);if(value<0)value=255+value+1;this[offset]=value&255;return offset+1};Buffer.prototype.writeInt16LE=function writeInt16LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,32767,-32768);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value&255;this[offset+1]=value>>>8}else{objectWriteUInt16(this,value,offset,true)}return offset+2};Buffer.prototype.writeInt16BE=function writeInt16BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,32767,-32768);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>8;this[offset+1]=value&255}else{objectWriteUInt16(this,value,offset,false)}return offset+2};Buffer.prototype.writeInt32LE=function writeInt32LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,2147483647,-2147483648);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value&255;this[offset+1]=value>>>8;this[offset+2]=value>>>16;this[offset+3]=value>>>24}else{objectWriteUInt32(this,value,offset,true)}return offset+4};Buffer.prototype.writeInt32BE=function writeInt32BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,2147483647,-2147483648);if(value<0)value=4294967295+value+1;if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>24;this[offset+1]=value>>>16;this[offset+2]=value>>>8;this[offset+3]=value&255}else{objectWriteUInt32(this,value,offset,false)}return offset+4};function checkIEEE754(buf,value,offset,ext,max,min){if(offset+ext>buf.length)throw new RangeError("Index out of range");if(offset<0)throw new RangeError("Index out of range")}function writeFloat(buf,value,offset,littleEndian,noAssert){if(!noAssert){checkIEEE754(buf,value,offset,4,3.4028234663852886e38,-3.4028234663852886e38)}ieee754.write(buf,value,offset,littleEndian,23,4);return offset+4}Buffer.prototype.writeFloatLE=function writeFloatLE(value,offset,noAssert){return writeFloat(this,value,offset,true,noAssert)};Buffer.prototype.writeFloatBE=function writeFloatBE(value,offset,noAssert){return writeFloat(this,value,offset,false,noAssert)};function writeDouble(buf,value,offset,littleEndian,noAssert){if(!noAssert){checkIEEE754(buf,value,offset,8,1.7976931348623157e308,-1.7976931348623157e308)}ieee754.write(buf,value,offset,littleEndian,52,8);return offset+8}Buffer.prototype.writeDoubleLE=function writeDoubleLE(value,offset,noAssert){return writeDouble(this,value,offset,true,noAssert)};Buffer.prototype.writeDoubleBE=function writeDoubleBE(value,offset,noAssert){return writeDouble(this,value,offset,false,noAssert)};Buffer.prototype.copy=function copy(target,targetStart,start,end){if(!start)start=0;if(!end&&end!==0)end=this.length;if(targetStart>=target.length)targetStart=target.length;if(!targetStart)targetStart=0;if(end>0&&end=this.length)throw new RangeError("sourceStart out of bounds");if(end<0)throw new RangeError("sourceEnd out of bounds");if(end>this.length)end=this.length;if(target.length-targetStart=0;--i){target[i+targetStart]=this[i+start]}}else if(len<1e3||!Buffer.TYPED_ARRAY_SUPPORT){for(i=0;i>>0;end=end===undefined?this.length:end>>>0;if(!val)val=0;var i;if(typeof val==="number"){for(i=start;i55295&&codePoint<57344){if(!leadSurrogate){if(codePoint>56319){if((units-=3)>-1)bytes.push(239,191,189);continue}else if(i+1===length){if((units-=3)>-1)bytes.push(239,191,189);continue}leadSurrogate=codePoint;continue}if(codePoint<56320){if((units-=3)>-1)bytes.push(239,191,189);leadSurrogate=codePoint;continue}codePoint=(leadSurrogate-55296<<10|codePoint-56320)+65536}else if(leadSurrogate){if((units-=3)>-1)bytes.push(239,191,189)}leadSurrogate=null;if(codePoint<128){if((units-=1)<0)break;bytes.push(codePoint)}else if(codePoint<2048){if((units-=2)<0)break;bytes.push(codePoint>>6|192,codePoint&63|128)}else if(codePoint<65536){if((units-=3)<0)break;bytes.push(codePoint>>12|224,codePoint>>6&63|128,codePoint&63|128)}else if(codePoint<1114112){if((units-=4)<0)break;bytes.push(codePoint>>18|240,codePoint>>12&63|128,codePoint>>6&63|128,codePoint&63|128)}else{throw new Error("Invalid code point")}}return bytes}function asciiToBytes(str){var byteArray=[];for(var i=0;i>8;lo=c%256;byteArray.push(lo);byteArray.push(hi)}return byteArray}function base64ToBytes(str){return base64.toByteArray(base64clean(str))}function blitBuffer(src,dst,offset,length){for(var i=0;i=dst.length||i>=src.length)break;dst[i+offset]=src[i]}return i}function isnan(val){return val!==val}}).call(this,typeof global!=="undefined"?global:typeof self!=="undefined"?self:typeof window!=="undefined"?window:{})},{"base64-js":14,ieee754:15,isarray:16}],14:[function(require,module,exports){"use strict";exports.toByteArray=toByteArray;exports.fromByteArray=fromByteArray;var lookup=[];var revLookup=[];var Arr=typeof Uint8Array!=="undefined"?Uint8Array:Array;function init(){var code="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";for(var i=0,len=code.length;i0){throw new Error("Invalid string. Length must be a multiple of 4")}placeHolders=b64[len-2]==="="?2:b64[len-1]==="="?1:0;arr=new Arr(len*3/4-placeHolders);l=placeHolders>0?len-4:len;var L=0;for(i=0,j=0;i>16&255;arr[L++]=tmp>>8&255;arr[L++]=tmp&255}if(placeHolders===2){tmp=revLookup[b64.charCodeAt(i)]<<2|revLookup[b64.charCodeAt(i+1)]>>4;arr[L++]=tmp&255}else if(placeHolders===1){tmp=revLookup[b64.charCodeAt(i)]<<10|revLookup[b64.charCodeAt(i+1)]<<4|revLookup[b64.charCodeAt(i+2)]>>2;arr[L++]=tmp>>8&255;arr[L++]=tmp&255}return arr}function tripletToBase64(num){return lookup[num>>18&63]+lookup[num>>12&63]+lookup[num>>6&63]+lookup[num&63]}function encodeChunk(uint8,start,end){var tmp;var output=[];for(var i=start;ilen2?len2:i+maxChunkLength))}if(extraBytes===1){tmp=uint8[len-1];output+=lookup[tmp>>2];output+=lookup[tmp<<4&63];output+="=="}else if(extraBytes===2){tmp=(uint8[len-2]<<8)+uint8[len-1];output+=lookup[tmp>>10];output+=lookup[tmp>>4&63];output+=lookup[tmp<<2&63];output+="="}parts.push(output);return parts.join("")}},{}],15:[function(require,module,exports){exports.read=function(buffer,offset,isLE,mLen,nBytes){var e,m;var eLen=nBytes*8-mLen-1;var eMax=(1<>1;var nBits=-7;var i=isLE?nBytes-1:0;var d=isLE?-1:1;var s=buffer[offset+i];i+=d;e=s&(1<<-nBits)-1;s>>=-nBits;nBits+=eLen;for(;nBits>0;e=e*256+buffer[offset+i],i+=d,nBits-=8){}m=e&(1<<-nBits)-1;e>>=-nBits;nBits+=mLen;for(;nBits>0;m=m*256+buffer[offset+i],i+=d,nBits-=8){}if(e===0){e=1-eBias}else if(e===eMax){return m?NaN:(s?-1:1)*Infinity}else{m=m+Math.pow(2,mLen);e=e-eBias}return(s?-1:1)*m*Math.pow(2,e-mLen)};exports.write=function(buffer,value,offset,isLE,mLen,nBytes){var e,m,c;var eLen=nBytes*8-mLen-1;var eMax=(1<>1;var rt=mLen===23?Math.pow(2,-24)-Math.pow(2,-77):0;var i=isLE?0:nBytes-1;var d=isLE?1:-1;var s=value<0||value===0&&1/value<0?1:0;value=Math.abs(value);if(isNaN(value)||value===Infinity){m=isNaN(value)?1:0;e=eMax}else{e=Math.floor(Math.log(value)/Math.LN2);if(value*(c=Math.pow(2,-e))<1){e--;c*=2}if(e+eBias>=1){value+=rt/c}else{value+=rt*Math.pow(2,1-eBias)}if(value*c>=2){e++;c/=2}if(e+eBias>=eMax){m=0;e=eMax}else if(e+eBias>=1){m=(value*c-1)*Math.pow(2,mLen);e=e+eBias}else{m=value*Math.pow(2,eBias-1)*Math.pow(2,mLen);e=0}}for(;mLen>=8;buffer[offset+i]=m&255,i+=d,m/=256,mLen-=8){}e=e<0;buffer[offset+i]=e&255,i+=d,e/=256,eLen-=8){}buffer[offset+i-d]|=s*128}},{}],16:[function(require,module,exports){var toString={}.toString;module.exports=Array.isArray||function(arr){return toString.call(arr)=="[object Array]"}},{}],17:[function(require,module,exports){function EventEmitter(){this._events=this._events||{};this._maxListeners=this._maxListeners||undefined}module.exports=EventEmitter;EventEmitter.EventEmitter=EventEmitter;EventEmitter.prototype._events=undefined;EventEmitter.prototype._maxListeners=undefined;EventEmitter.defaultMaxListeners=10;EventEmitter.prototype.setMaxListeners=function(n){if(!isNumber(n)||n<0||isNaN(n))throw TypeError("n must be a positive number");this._maxListeners=n;return this};EventEmitter.prototype.emit=function(type){var er,handler,len,args,i,listeners;if(!this._events)this._events={};if(type==="error"){if(!this._events.error||isObject(this._events.error)&&!this._events.error.length){er=arguments[1];if(er instanceof Error){throw er}else{var err=new Error('Uncaught, unspecified "error" event. ('+er+")");err.context=er;throw err}}}handler=this._events[type];if(isUndefined(handler))return false;if(isFunction(handler)){switch(arguments.length){case 1:handler.call(this);break;case 2:handler.call(this,arguments[1]);break;case 3:handler.call(this,arguments[1],arguments[2]);break;default:args=Array.prototype.slice.call(arguments,1);handler.apply(this,args)}}else if(isObject(handler)){args=Array.prototype.slice.call(arguments,1);listeners=handler.slice();len=listeners.length;for(i=0;i0&&this._events[type].length>m){this._events[type].warned=true;console.error("(node) warning: possible EventEmitter memory "+"leak detected. %d listeners added. "+"Use emitter.setMaxListeners() to increase limit.",this._events[type].length);if(typeof console.trace==="function"){console.trace()}}}return this};EventEmitter.prototype.on=EventEmitter.prototype.addListener;EventEmitter.prototype.once=function(type,listener){if(!isFunction(listener))throw TypeError("listener must be a function");var fired=false;function g(){this.removeListener(type,g);if(!fired){fired=true;listener.apply(this,arguments)}}g.listener=listener;this.on(type,g);return this};EventEmitter.prototype.removeListener=function(type,listener){var list,position,length,i;if(!isFunction(listener))throw TypeError("listener must be a function");if(!this._events||!this._events[type])return this;list=this._events[type];length=list.length;position=-1;if(list===listener||isFunction(list.listener)&&list.listener===listener){delete this._events[type];if(this._events.removeListener)this.emit("removeListener",type,listener)}else if(isObject(list)){for(i=length;i-- >0;){if(list[i]===listener||list[i].listener&&list[i].listener===listener){position=i;break}}if(position<0)return this;if(list.length===1){list.length=0;delete this._events[type]}else{list.splice(position,1)}if(this._events.removeListener)this.emit("removeListener",type,listener)}return this};EventEmitter.prototype.removeAllListeners=function(type){var key,listeners;if(!this._events)return this;if(!this._events.removeListener){if(arguments.length===0)this._events={};else if(this._events[type])delete this._events[type];return this}if(arguments.length===0){for(key in this._events){if(key==="removeListener")continue;this.removeAllListeners(key)}this.removeAllListeners("removeListener");this._events={};return this}listeners=this._events[type];if(isFunction(listeners)){this.removeListener(type,listeners)}else if(listeners){while(listeners.length)this.removeListener(type,listeners[listeners.length-1])}delete this._events[type];return this};EventEmitter.prototype.listeners=function(type){var ret;if(!this._events||!this._events[type])ret=[];else if(isFunction(this._events[type]))ret=[this._events[type]];else ret=this._events[type].slice();return ret};EventEmitter.prototype.listenerCount=function(type){if(this._events){var evlistener=this._events[type];if(isFunction(evlistener))return 1;else if(evlistener)return evlistener.length}return 0};EventEmitter.listenerCount=function(emitter,type){return emitter.listenerCount(type)};function isFunction(arg){return typeof arg==="function"}function isNumber(arg){return typeof arg==="number"}function isObject(arg){return typeof arg==="object"&&arg!==null}function isUndefined(arg){return arg===void 0}},{}],18:[function(require,module,exports){var http=require("http");var https=module.exports;for(var key in http){if(http.hasOwnProperty(key))https[key]=http[key]}https.request=function(params,cb){if(!params)params={};params.scheme="https";params.protocol="https:";return http.request.call(this,params,cb)}},{http:38}],19:[function(require,module,exports){if(typeof Object.create==="function"){module.exports=function inherits(ctor,superCtor){ctor.super_=superCtor;ctor.prototype=Object.create(superCtor.prototype,{constructor:{value:ctor,enumerable:false,writable:true,configurable:true}})}}else{module.exports=function inherits(ctor,superCtor){ctor.super_=superCtor;var TempCtor=function(){};TempCtor.prototype=superCtor.prototype;ctor.prototype=new TempCtor;ctor.prototype.constructor=ctor}}},{}],20:[function(require,module,exports){module.exports=function(obj){return obj!=null&&(isBuffer(obj)||isSlowBuffer(obj)||!!obj._isBuffer)};function isBuffer(obj){return!!obj.constructor&&typeof obj.constructor.isBuffer==="function"&&obj.constructor.isBuffer(obj)}function isSlowBuffer(obj){return typeof obj.readFloatLE==="function"&&typeof obj.slice==="function"&&isBuffer(obj.slice(0,0))}},{}],21:[function(require,module,exports){var process=module.exports={};var cachedSetTimeout;var cachedClearTimeout;(function(){try{cachedSetTimeout=setTimeout}catch(e){cachedSetTimeout=function(){throw new Error("setTimeout is not defined")}}try{cachedClearTimeout=clearTimeout}catch(e){cachedClearTimeout=function(){throw new Error("clearTimeout is not defined")}}})();function runTimeout(fun){if(cachedSetTimeout===setTimeout){return setTimeout(fun,0)}try{return cachedSetTimeout(fun,0)}catch(e){try{return cachedSetTimeout.call(null,fun,0)}catch(e){return cachedSetTimeout.call(this,fun,0)}}}function runClearTimeout(marker){if(cachedClearTimeout===clearTimeout){return clearTimeout(marker)}try{return cachedClearTimeout(marker)}catch(e){try{return cachedClearTimeout.call(null,marker)}catch(e){return cachedClearTimeout.call(this,marker)}}}var queue=[];var draining=false;var currentQueue;var queueIndex=-1;function cleanUpNextTick(){if(!draining||!currentQueue){return}draining=false;if(currentQueue.length){queue=currentQueue.concat(queue)}else{queueIndex=-1}if(queue.length){drainQueue()}}function drainQueue(){if(draining){return}var timeout=runTimeout(cleanUpNextTick);draining=true;var len=queue.length;while(len){currentQueue=queue;queue=[];while(++queueIndex1){for(var i=1;i= 0x80 (not a basic code point)","invalid-input":"Invalid input"},baseMinusTMin=base-tMin,floor=Math.floor,stringFromCharCode=String.fromCharCode,key;function error(type){throw new RangeError(errors[type])}function map(array,fn){var length=array.length;var result=[];while(length--){result[length]=fn(array[length])}return result}function mapDomain(string,fn){var parts=string.split("@");var result="";if(parts.length>1){result=parts[0]+"@";string=parts[1]}string=string.replace(regexSeparators,".");var labels=string.split(".");var encoded=map(labels,fn).join(".");return result+encoded}function ucs2decode(string){var output=[],counter=0,length=string.length,value,extra;while(counter=55296&&value<=56319&&counter65535){value-=65536;output+=stringFromCharCode(value>>>10&1023|55296);value=56320|value&1023}output+=stringFromCharCode(value);return output}).join("")}function basicToDigit(codePoint){if(codePoint-48<10){return codePoint-22}if(codePoint-65<26){return codePoint-65}if(codePoint-97<26){return codePoint-97}return base}function digitToBasic(digit,flag){return digit+22+75*(digit<26)-((flag!=0)<<5)}function adapt(delta,numPoints,firstTime){var k=0;delta=firstTime?floor(delta/damp):delta>>1;delta+=floor(delta/numPoints);for(;delta>baseMinusTMin*tMax>>1;k+=base){delta=floor(delta/baseMinusTMin)}return floor(k+(baseMinusTMin+1)*delta/(delta+skew))}function decode(input){var output=[],inputLength=input.length,out,i=0,n=initialN,bias=initialBias,basic,j,index,oldi,w,k,digit,t,baseMinusT;basic=input.lastIndexOf(delimiter);if(basic<0){basic=0}for(j=0;j=128){error("not-basic")}output.push(input.charCodeAt(j))}for(index=basic>0?basic+1:0;index=inputLength){error("invalid-input")}digit=basicToDigit(input.charCodeAt(index++));if(digit>=base||digit>floor((maxInt-i)/w)){error("overflow")}i+=digit*w;t=k<=bias?tMin:k>=bias+tMax?tMax:k-bias;if(digitfloor(maxInt/baseMinusT)){error("overflow")}w*=baseMinusT}out=output.length+1;bias=adapt(i-oldi,out,oldi==0);if(floor(i/out)>maxInt-n){error("overflow")}n+=floor(i/out);i%=out;output.splice(i++,0,n)}return ucs2encode(output)}function encode(input){var n,delta,handledCPCount,basicLength,bias,j,m,q,k,t,currentValue,output=[],inputLength,handledCPCountPlusOne,baseMinusT,qMinusT;input=ucs2decode(input);inputLength=input.length;n=initialN;delta=0;bias=initialBias;for(j=0;j=n&¤tValuefloor((maxInt-delta)/handledCPCountPlusOne)){error("overflow")}delta+=(m-n)*handledCPCountPlusOne;n=m;for(j=0;jmaxInt){error("overflow")}if(currentValue==n){for(q=delta,k=base;;k+=base){t=k<=bias?tMin:k>=bias+tMax?tMax:k-bias;if(q0&&len>maxKeys){len=maxKeys}for(var i=0;i=0){kstr=x.substr(0,idx);vstr=x.substr(idx+1)}else{kstr=x;vstr=""}k=decodeURIComponent(kstr);v=decodeURIComponent(vstr);if(!hasOwnProperty(obj,k)){obj[k]=v}else if(isArray(obj[k])){obj[k].push(v)}else{obj[k]=[obj[k],v]}}return obj};var isArray=Array.isArray||function(xs){return Object.prototype.toString.call(xs)==="[object Array]"}},{}],24:[function(require,module,exports){"use strict";var stringifyPrimitive=function(v){switch(typeof v){case"string":return v;case"boolean":return v?"true":"false";case"number":return isFinite(v)?v:"";default:return""}};module.exports=function(obj,sep,eq,name){sep=sep||"&";eq=eq||"=";if(obj===null){obj=undefined}if(typeof obj==="object"){return map(objectKeys(obj),function(k){var ks=encodeURIComponent(stringifyPrimitive(k))+eq;if(isArray(obj[k])){return map(obj[k],function(v){return ks+encodeURIComponent(stringifyPrimitive(v))}).join(sep)}else{return ks+encodeURIComponent(stringifyPrimitive(obj[k]))}}).join(sep)}if(!name)return"";return encodeURIComponent(stringifyPrimitive(name))+eq+encodeURIComponent(stringifyPrimitive(obj))};var isArray=Array.isArray||function(xs){return Object.prototype.toString.call(xs)==="[object Array]"};function map(xs,f){if(xs.map)return xs.map(f);var res=[];for(var i=0;i0){if(state.ended&&!addToFront){var e=new Error("stream.push() after EOF");stream.emit("error",e)}else if(state.endEmitted&&addToFront){var _e=new Error("stream.unshift() after end event");stream.emit("error",_e)}else{var skipAdd;if(state.decoder&&!addToFront&&!encoding){chunk=state.decoder.write(chunk);skipAdd=!state.objectMode&&chunk.length===0}if(!addToFront)state.reading=false;if(!skipAdd){if(state.flowing&&state.length===0&&!state.sync){stream.emit("data",chunk);stream.read(0)}else{state.length+=state.objectMode?1:chunk.length;if(addToFront)state.buffer.unshift(chunk);else state.buffer.push(chunk);if(state.needReadable)emitReadable(stream)}}maybeReadMore(stream,state)}}else if(!addToFront){state.reading=false}return needMoreData(state)}function needMoreData(state){return!state.ended&&(state.needReadable||state.length=MAX_HWM){n=MAX_HWM}else{n--;n|=n>>>1;n|=n>>>2;n|=n>>>4;n|=n>>>8;n|=n>>>16;n++}return n}function howMuchToRead(n,state){if(n<=0||state.length===0&&state.ended)return 0;if(state.objectMode)return 1;if(n!==n){if(state.flowing&&state.length)return state.buffer.head.data.length;else return state.length}if(n>state.highWaterMark)state.highWaterMark=computeNewHighWaterMark(n);if(n<=state.length)return n;if(!state.ended){state.needReadable=true;return 0}return state.length}Readable.prototype.read=function(n){debug("read",n);n=parseInt(n,10);var state=this._readableState;var nOrig=n;if(n!==0)state.emittedReadable=false;if(n===0&&state.needReadable&&(state.length>=state.highWaterMark||state.ended)){debug("read: emitReadable",state.length,state.ended);if(state.length===0&&state.ended)endReadable(this);else emitReadable(this);return null}n=howMuchToRead(n,state);if(n===0&&state.ended){if(state.length===0)endReadable(this);return null}var doRead=state.needReadable;debug("need readable",doRead);if(state.length===0||state.length-n0)ret=fromList(n,state);else ret=null;if(ret===null){state.needReadable=true;n=0}else{state.length-=n}if(state.length===0){if(!state.ended)state.needReadable=true;if(nOrig!==n&&state.ended)endReadable(this)}if(ret!==null)this.emit("data",ret);return ret};function chunkInvalid(state,chunk){var er=null;if(!Buffer.isBuffer(chunk)&&typeof chunk!=="string"&&chunk!==null&&chunk!==undefined&&!state.objectMode){er=new TypeError("Invalid non-string/buffer chunk")}return er}function onEofChunk(stream,state){if(state.ended)return;if(state.decoder){var chunk=state.decoder.end();if(chunk&&chunk.length){state.buffer.push(chunk);state.length+=state.objectMode?1:chunk.length}}state.ended=true;emitReadable(stream)}function emitReadable(stream){var state=stream._readableState;state.needReadable=false;if(!state.emittedReadable){debug("emitReadable",state.flowing);state.emittedReadable=true;if(state.sync)processNextTick(emitReadable_,stream);else emitReadable_(stream)}}function emitReadable_(stream){debug("emit readable");stream.emit("readable");flow(stream)}function maybeReadMore(stream,state){if(!state.readingMore){state.readingMore=true;processNextTick(maybeReadMore_,stream,state)}}function maybeReadMore_(stream,state){var len=state.length;while(!state.reading&&!state.flowing&&!state.ended&&state.length1&&indexOf(state.pipes,dest)!==-1)&&!cleanedUp){debug("false write response, pause",src._readableState.awaitDrain);src._readableState.awaitDrain++;increasedAwaitDrain=true}src.pause()}}function onerror(er){debug("onerror",er);unpipe();dest.removeListener("error",onerror);if(EElistenerCount(dest,"error")===0)dest.emit("error",er)}prependListener(dest,"error",onerror);function onclose(){dest.removeListener("finish",onfinish);unpipe()}dest.once("close",onclose);function onfinish(){debug("onfinish");dest.removeListener("close",onclose);unpipe()}dest.once("finish",onfinish);function unpipe(){debug("unpipe");src.unpipe(dest)}dest.emit("pipe",src);if(!state.flowing){debug("pipe resume");src.resume()}return dest};function pipeOnDrain(src){return function(){var state=src._readableState;debug("pipeOnDrain",state.awaitDrain);if(state.awaitDrain)state.awaitDrain--;if(state.awaitDrain===0&&EElistenerCount(src,"data")){state.flowing=true;flow(src)}}}Readable.prototype.unpipe=function(dest){var state=this._readableState;if(state.pipesCount===0)return this;if(state.pipesCount===1){if(dest&&dest!==state.pipes)return this;if(!dest)dest=state.pipes;state.pipes=null;state.pipesCount=0;state.flowing=false;if(dest)dest.emit("unpipe",this);return this}if(!dest){var dests=state.pipes;var len=state.pipesCount;state.pipes=null;state.pipesCount=0;state.flowing=false;for(var _i=0;_i=state.length){if(state.decoder)ret=state.buffer.join("");else if(state.buffer.length===1)ret=state.buffer.head.data;else ret=state.buffer.concat(state.length);state.buffer.clear()}else{ret=fromListPartial(n,state.buffer,state.decoder)}return ret}function fromListPartial(n,list,hasStrings){var ret;if(nstr.length?str.length:n;if(nb===str.length)ret+=str;else ret+=str.slice(0,n);n-=nb;if(n===0){if(nb===str.length){++c;if(p.next)list.head=p.next;else list.head=list.tail=null}else{list.head=p;p.data=str.slice(nb)}break}++c}list.length-=c;return ret}function copyFromBuffer(n,list){var ret=bufferShim.allocUnsafe(n);var p=list.head;var c=1;p.data.copy(ret);n-=p.data.length;while(p=p.next){var buf=p.data;var nb=n>buf.length?buf.length:n;buf.copy(ret,ret.length-n,0,nb);n-=nb;if(n===0){if(nb===buf.length){++c;if(p.next)list.head=p.next;else list.head=list.tail=null}else{list.head=p;p.data=buf.slice(nb)}break}++c}list.length-=c;return ret}function endReadable(stream){var state=stream._readableState;if(state.length>0)throw new Error('"endReadable()" called on non-empty stream');if(!state.endEmitted){state.ended=true;processNextTick(endReadableNT,state,stream)}}function endReadableNT(state,stream){if(!state.endEmitted&&state.length===0){state.endEmitted=true;stream.readable=false;stream.emit("end")}}function forEach(xs,f){for(var i=0,l=xs.length;i-1?setImmediate:processNextTick;Writable.WritableState=WritableState;var util=require("core-util-is");util.inherits=require("inherits");var internalUtil={deprecate:require("util-deprecate")};var Stream;(function(){try{Stream=require("st"+"ream")}catch(_){}finally{if(!Stream)Stream=require("events").EventEmitter}})();var Buffer=require("buffer").Buffer;var bufferShim=require("buffer-shims");util.inherits(Writable,Stream);function nop(){}function WriteReq(chunk,encoding,cb){this.chunk=chunk;this.encoding=encoding;this.callback=cb;this.next=null}var Duplex;function WritableState(options,stream){Duplex=Duplex||require("./_stream_duplex");options=options||{};this.objectMode=!!options.objectMode;if(stream instanceof Duplex)this.objectMode=this.objectMode||!!options.writableObjectMode;var hwm=options.highWaterMark;var defaultHwm=this.objectMode?16:16*1024;this.highWaterMark=hwm||hwm===0?hwm:defaultHwm;this.highWaterMark=~~this.highWaterMark;this.needDrain=false;this.ending=false;this.ended=false;this.finished=false;var noDecode=options.decodeStrings===false;this.decodeStrings=!noDecode;this.defaultEncoding=options.defaultEncoding||"utf8";this.length=0;this.writing=false;this.corked=0;this.sync=true;this.bufferProcessing=false;this.onwrite=function(er){onwrite(stream,er)};this.writecb=null;this.writelen=0;this.bufferedRequest=null;this.lastBufferedRequest=null;this.pendingcb=0;this.prefinished=false;this.errorEmitted=false;this.bufferedRequestCount=0;this.corkedRequestsFree=new CorkedRequest(this)}WritableState.prototype.getBuffer=function writableStateGetBuffer(){var current=this.bufferedRequest;var out=[];while(current){out.push(current);current=current.next}return out};(function(){try{Object.defineProperty(WritableState.prototype,"buffer",{get:internalUtil.deprecate(function(){return this.getBuffer()},"_writableState.buffer is deprecated. Use _writableState.getBuffer "+"instead.")})}catch(_){}})();var Duplex;function Writable(options){Duplex=Duplex||require("./_stream_duplex");if(!(this instanceof Writable)&&!(this instanceof Duplex))return new Writable(options);this._writableState=new WritableState(options,this);this.writable=true;if(options){if(typeof options.write==="function")this._write=options.write;if(typeof options.writev==="function")this._writev=options.writev}Stream.call(this)}Writable.prototype.pipe=function(){this.emit("error",new Error("Cannot pipe, not readable"))};function writeAfterEnd(stream,cb){var er=new Error("write after end");stream.emit("error",er);processNextTick(cb,er)}function validChunk(stream,state,chunk,cb){var valid=true;var er=false;if(chunk===null){er=new TypeError("May not write null values to stream")}else if(!Buffer.isBuffer(chunk)&&typeof chunk!=="string"&&chunk!==undefined&&!state.objectMode){er=new TypeError("Invalid non-string/buffer chunk")}if(er){stream.emit("error",er);processNextTick(cb,er);valid=false}return valid}Writable.prototype.write=function(chunk,encoding,cb){var state=this._writableState;var ret=false;if(typeof encoding==="function"){cb=encoding;encoding=null}if(Buffer.isBuffer(chunk))encoding="buffer";else if(!encoding)encoding=state.defaultEncoding;if(typeof cb!=="function")cb=nop;if(state.ended)writeAfterEnd(this,cb);else if(validChunk(this,state,chunk,cb)){state.pendingcb++;ret=writeOrBuffer(this,state,chunk,encoding,cb)}return ret};Writable.prototype.cork=function(){var state=this._writableState;state.corked++};Writable.prototype.uncork=function(){var state=this._writableState;if(state.corked){state.corked--;if(!state.writing&&!state.corked&&!state.finished&&!state.bufferProcessing&&state.bufferedRequest)clearBuffer(this,state)}};Writable.prototype.setDefaultEncoding=function setDefaultEncoding(encoding){if(typeof encoding==="string")encoding=encoding.toLowerCase();if(!(["hex","utf8","utf-8","ascii","binary","base64","ucs2","ucs-2","utf16le","utf-16le","raw"].indexOf((encoding+"").toLowerCase())>-1))throw new TypeError("Unknown encoding: "+encoding);this._writableState.defaultEncoding=encoding;return this};function decodeChunk(state,chunk,encoding){if(!state.objectMode&&state.decodeStrings!==false&&typeof chunk==="string"){chunk=bufferShim.from(chunk,encoding)}return chunk}function writeOrBuffer(stream,state,chunk,encoding,cb){chunk=decodeChunk(state,chunk,encoding);if(Buffer.isBuffer(chunk))encoding="buffer";var len=state.objectMode?1:chunk.length;state.length+=len;var ret=state.length0)this.tail.next=entry;else this.head=entry;this.tail=entry;++this.length};BufferList.prototype.unshift=function(v){var entry={data:v,next:this.head};if(this.length===0)this.tail=entry;this.head=entry;++this.length};BufferList.prototype.shift=function(){if(this.length===0)return;var ret=this.head.data;if(this.length===1)this.head=this.tail=null;else this.head=this.head.next;--this.length;return ret};BufferList.prototype.clear=function(){this.head=this.tail=null;this.length=0};BufferList.prototype.join=function(s){if(this.length===0)return"";var p=this.head;var ret=""+p.data;while(p=p.next){ret+=s+p.data}return ret};BufferList.prototype.concat=function(n){if(this.length===0)return bufferShim.alloc(0);if(this.length===1)return this.head.data;var ret=bufferShim.allocUnsafe(n>>>0);var p=this.head;var i=0;while(p){p.data.copy(ret,i);i+=p.data.length;p=p.next}return ret}},{buffer:13,"buffer-shims":32}],32:[function(require,module,exports){(function(global){"use strict";var buffer=require("buffer");var Buffer=buffer.Buffer;var SlowBuffer=buffer.SlowBuffer;var MAX_LEN=buffer.kMaxLength||2147483647;exports.alloc=function alloc(size,fill,encoding){if(typeof Buffer.alloc==="function"){return Buffer.alloc(size,fill,encoding)}if(typeof encoding==="number"){throw new TypeError("encoding must not be number")}if(typeof size!=="number"){throw new TypeError("size must be a number")}if(size>MAX_LEN){throw new RangeError("size is too large")}var enc=encoding;var _fill=fill;if(_fill===undefined){enc=undefined;_fill=0}var buf=new Buffer(size);if(typeof _fill==="string"){var fillBuf=new Buffer(_fill,enc);var flen=fillBuf.length;var i=-1;while(++iMAX_LEN){throw new RangeError("size is too large")}return new Buffer(size)};exports.from=function from(value,encodingOrOffset,length){if(typeof Buffer.from==="function"&&(!global.Uint8Array||Uint8Array.from!==Buffer.from)){return Buffer.from(value,encodingOrOffset,length)}if(typeof value==="number"){throw new TypeError('"value" argument must not be a number')}if(typeof value==="string"){return new Buffer(value,encodingOrOffset)}if(typeof ArrayBuffer!=="undefined"&&value instanceof ArrayBuffer){var offset=encodingOrOffset;if(arguments.length===1){return new Buffer(value)}if(typeof offset==="undefined"){offset=0}var len=length;if(typeof len==="undefined"){len=value.byteLength-offset}if(offset>=value.byteLength){throw new RangeError("'offset' is out of bounds")}if(len>value.byteLength-offset){throw new RangeError("'length' is out of bounds")}return new Buffer(value.slice(offset,offset+len))}if(Buffer.isBuffer(value)){var out=new Buffer(value.length);value.copy(out,0,0,value.length);return out}if(value){if(Array.isArray(value)||typeof ArrayBuffer!=="undefined"&&value.buffer instanceof ArrayBuffer||"length"in value){return new Buffer(value)}if(value.type==="Buffer"&&Array.isArray(value.data)){return new Buffer(value.data)}}throw new TypeError("First argument must be a string, Buffer, "+"ArrayBuffer, Array, or array-like object.")};exports.allocUnsafeSlow=function allocUnsafeSlow(size){if(typeof Buffer.allocUnsafeSlow==="function"){return Buffer.allocUnsafeSlow(size)}if(typeof size!=="number"){throw new TypeError("size must be a number")}if(size>=MAX_LEN){throw new RangeError("size is too large")}return new SlowBuffer(size)}}).call(this,typeof global!=="undefined"?global:typeof self!=="undefined"?self:typeof window!=="undefined"?window:{})},{buffer:13}],33:[function(require,module,exports){(function(Buffer){function isArray(arg){if(Array.isArray){return Array.isArray(arg)}return objectToString(arg)==="[object Array]"; -}exports.isArray=isArray;function isBoolean(arg){return typeof arg==="boolean"}exports.isBoolean=isBoolean;function isNull(arg){return arg===null}exports.isNull=isNull;function isNullOrUndefined(arg){return arg==null}exports.isNullOrUndefined=isNullOrUndefined;function isNumber(arg){return typeof arg==="number"}exports.isNumber=isNumber;function isString(arg){return typeof arg==="string"}exports.isString=isString;function isSymbol(arg){return typeof arg==="symbol"}exports.isSymbol=isSymbol;function isUndefined(arg){return arg===void 0}exports.isUndefined=isUndefined;function isRegExp(re){return objectToString(re)==="[object RegExp]"}exports.isRegExp=isRegExp;function isObject(arg){return typeof arg==="object"&&arg!==null}exports.isObject=isObject;function isDate(d){return objectToString(d)==="[object Date]"}exports.isDate=isDate;function isError(e){return objectToString(e)==="[object Error]"||e instanceof Error}exports.isError=isError;function isFunction(arg){return typeof arg==="function"}exports.isFunction=isFunction;function isPrimitive(arg){return arg===null||typeof arg==="boolean"||typeof arg==="number"||typeof arg==="string"||typeof arg==="symbol"||typeof arg==="undefined"}exports.isPrimitive=isPrimitive;exports.isBuffer=Buffer.isBuffer;function objectToString(o){return Object.prototype.toString.call(o)}}).call(this,{isBuffer:require("../../../../insert-module-globals/node_modules/is-buffer/index.js")})},{"../../../../insert-module-globals/node_modules/is-buffer/index.js":20}],34:[function(require,module,exports){arguments[4][16][0].apply(exports,arguments)},{dup:16}],35:[function(require,module,exports){(function(process){"use strict";if(!process.version||process.version.indexOf("v0.")===0||process.version.indexOf("v1.")===0&&process.version.indexOf("v1.8.")!==0){module.exports=nextTick}else{module.exports=process.nextTick}function nextTick(fn,arg1,arg2,arg3){if(typeof fn!=="function"){throw new TypeError('"callback" argument must be a function')}var len=arguments.length;var args,i;switch(len){case 0:case 1:return process.nextTick(fn);case 2:return process.nextTick(function afterTickOne(){fn.call(null,arg1)});case 3:return process.nextTick(function afterTickTwo(){fn.call(null,arg1,arg2)});case 4:return process.nextTick(function afterTickThree(){fn.call(null,arg1,arg2,arg3)});default:args=new Array(len-1);i=0;while(iself._pos){var newData=response.substr(self._pos);if(self._charset==="x-user-defined"){var buffer=new Buffer(newData.length);for(var i=0;iself._pos){self.push(new Buffer(new Uint8Array(reader.result.slice(self._pos))));self._pos=reader.result.byteLength}};reader.onload=function(){self.push(null)};reader.readAsArrayBuffer(response);break}if(self._xhr.readyState===rStates.DONE&&self._mode!=="ms-stream"){self.push(null)}}}).call(this,require("_process"),typeof global!=="undefined"?global:typeof self!=="undefined"?self:typeof window!=="undefined"?window:{},require("buffer").Buffer)},{"./capability":39,_process:21,buffer:13,inherits:19,"readable-stream":37}],42:[function(require,module,exports){module.exports={100:"Continue",101:"Switching Protocols",102:"Processing",200:"OK",201:"Created",202:"Accepted",203:"Non-Authoritative Information",204:"No Content",205:"Reset Content",206:"Partial Content",207:"Multi-Status",208:"Already Reported",226:"IM Used",300:"Multiple Choices",301:"Moved Permanently",302:"Found",303:"See Other",304:"Not Modified",305:"Use Proxy",307:"Temporary Redirect",308:"Permanent Redirect",400:"Bad Request",401:"Unauthorized",402:"Payment Required",403:"Forbidden",404:"Not Found",405:"Method Not Allowed",406:"Not Acceptable",407:"Proxy Authentication Required",408:"Request Timeout",409:"Conflict",410:"Gone",411:"Length Required",412:"Precondition Failed",413:"Payload Too Large",414:"URI Too Long",415:"Unsupported Media Type",416:"Range Not Satisfiable",417:"Expectation Failed",418:"I'm a teapot",421:"Misdirected Request",422:"Unprocessable Entity",423:"Locked",424:"Failed Dependency",425:"Unordered Collection",426:"Upgrade Required",428:"Precondition Required",429:"Too Many Requests",431:"Request Header Fields Too Large",500:"Internal Server Error",501:"Not Implemented",502:"Bad Gateway",503:"Service Unavailable",504:"Gateway Timeout",505:"HTTP Version Not Supported",506:"Variant Also Negotiates",507:"Insufficient Storage",508:"Loop Detected",509:"Bandwidth Limit Exceeded",510:"Not Extended",511:"Network Authentication Required"}},{}],43:[function(require,module,exports){var Buffer=require("buffer").Buffer;module.exports=function(buf){if(buf instanceof Uint8Array){if(buf.byteOffset===0&&buf.byteLength===buf.buffer.byteLength){return buf.buffer}else if(typeof buf.buffer.slice==="function"){return buf.buffer.slice(buf.byteOffset,buf.byteOffset+buf.byteLength)}}if(Buffer.isBuffer(buf)){var arrayCopy=new Uint8Array(buf.length);var len=buf.length;for(var i=0;i=this.charLength-this.charReceived?this.charLength-this.charReceived:buffer.length;buffer.copy(this.charBuffer,this.charReceived,0,available);this.charReceived+=available;if(this.charReceived=55296&&charCode<=56319){this.charLength+=this.surrogateSize;charStr="";continue}this.charReceived=this.charLength=0;if(buffer.length===0){return charStr}break}this.detectIncompleteChar(buffer);var end=buffer.length;if(this.charLength){buffer.copy(this.charBuffer,0,buffer.length-this.charReceived,end);end-=this.charReceived}charStr+=buffer.toString(this.encoding,0,end);var end=charStr.length-1;var charCode=charStr.charCodeAt(end);if(charCode>=55296&&charCode<=56319){var size=this.surrogateSize;this.charLength+=size;this.charReceived+=size;this.charBuffer.copy(this.charBuffer,size,0,size);buffer.copy(this.charBuffer,0,0,size);return charStr.substring(0,end)}return charStr};StringDecoder.prototype.detectIncompleteChar=function(buffer){var i=buffer.length>=3?3:buffer.length;for(;i>0;i--){var c=buffer[buffer.length-i];if(i==1&&c>>5==6){this.charLength=2;break}if(i<=2&&c>>4==14){this.charLength=3;break}if(i<=3&&c>>3==30){this.charLength=4;break}}this.charReceived=i};StringDecoder.prototype.end=function(buffer){var res="";if(buffer&&buffer.length)res=this.write(buffer);if(this.charReceived){var cr=this.charReceived;var buf=this.charBuffer;var enc=this.encoding;res+=buf.slice(0,cr).toString(enc)}return res};function passThroughWrite(buffer){return buffer.toString(this.encoding)}function utf16DetectIncompleteChar(buffer){this.charReceived=buffer.length%2;this.charLength=this.charReceived?2:0}function base64DetectIncompleteChar(buffer){this.charReceived=buffer.length%3;this.charLength=this.charReceived?3:0}},{buffer:13}],45:[function(require,module,exports){"use strict";var punycode=require("punycode");var util=require("./util");exports.parse=urlParse;exports.resolve=urlResolve;exports.resolveObject=urlResolveObject;exports.format=urlFormat;exports.Url=Url;function Url(){this.protocol=null;this.slashes=null;this.auth=null;this.host=null;this.port=null;this.hostname=null;this.hash=null;this.search=null;this.query=null;this.pathname=null;this.path=null;this.href=null}var protocolPattern=/^([a-z0-9.+-]+:)/i,portPattern=/:[0-9]*$/,simplePathPattern=/^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/,delims=["<",">",'"',"`"," ","\r","\n","\t"],unwise=["{","}","|","\\","^","`"].concat(delims),autoEscape=["'"].concat(unwise),nonHostChars=["%","/","?",";","#"].concat(autoEscape),hostEndingChars=["/","?","#"],hostnameMaxLen=255,hostnamePartPattern=/^[+a-z0-9A-Z_-]{0,63}$/,hostnamePartStart=/^([+a-z0-9A-Z_-]{0,63})(.*)$/,unsafeProtocol={javascript:true,"javascript:":true},hostlessProtocol={javascript:true,"javascript:":true},slashedProtocol={http:true,https:true,ftp:true,gopher:true,file:true,"http:":true,"https:":true,"ftp:":true,"gopher:":true,"file:":true},querystring=require("querystring");function urlParse(url,parseQueryString,slashesDenoteHost){if(url&&util.isObject(url)&&url instanceof Url)return url;var u=new Url;u.parse(url,parseQueryString,slashesDenoteHost);return u}Url.prototype.parse=function(url,parseQueryString,slashesDenoteHost){if(!util.isString(url)){throw new TypeError("Parameter 'url' must be a string, not "+typeof url)}var queryIndex=url.indexOf("?"),splitter=queryIndex!==-1&&queryIndex127){newpart+="x"}else{newpart+=part[j]}}if(!newpart.match(hostnamePartPattern)){var validParts=hostparts.slice(0,i);var notHost=hostparts.slice(i+1);var bit=part.match(hostnamePartStart);if(bit){validParts.push(bit[1]);notHost.unshift(bit[2])}if(notHost.length){rest="/"+notHost.join(".")+rest}this.hostname=validParts.join(".");break}}}}if(this.hostname.length>hostnameMaxLen){this.hostname=""}else{this.hostname=this.hostname.toLowerCase()}if(!ipv6Hostname){this.hostname=punycode.toASCII(this.hostname)}var p=this.port?":"+this.port:"";var h=this.hostname||"";this.host=h+p;this.href+=this.host;if(ipv6Hostname){this.hostname=this.hostname.substr(1,this.hostname.length-2);if(rest[0]!=="/"){rest="/"+rest}}}if(!unsafeProtocol[lowerProto]){for(var i=0,l=autoEscape.length;i0?result.host.split("@"):false;if(authInHost){result.auth=authInHost.shift();result.host=result.hostname=authInHost.shift()}}result.search=relative.search;result.query=relative.query;if(!util.isNull(result.pathname)||!util.isNull(result.search)){result.path=(result.pathname?result.pathname:"")+(result.search?result.search:"")}result.href=result.format();return result}if(!srcPath.length){result.pathname=null;if(result.search){result.path="/"+result.search}else{result.path=null}result.href=result.format();return result}var last=srcPath.slice(-1)[0];var hasTrailingSlash=(result.host||relative.host||srcPath.length>1)&&(last==="."||last==="..")||last==="";var up=0;for(var i=srcPath.length;i>=0;i--){last=srcPath[i];if(last==="."){srcPath.splice(i,1)}else if(last===".."){srcPath.splice(i,1);up++}else if(up){srcPath.splice(i,1);up--}}if(!mustEndAbs&&!removeAllDots){for(;up--;up){srcPath.unshift("..")}}if(mustEndAbs&&srcPath[0]!==""&&(!srcPath[0]||srcPath[0].charAt(0)!=="/")){srcPath.unshift("")}if(hasTrailingSlash&&srcPath.join("/").substr(-1)!=="/"){srcPath.push("")}var isAbsolute=srcPath[0]===""||srcPath[0]&&srcPath[0].charAt(0)==="/";if(psychotic){result.hostname=result.host=isAbsolute?"":srcPath.length?srcPath.shift():"";var authInHost=result.host&&result.host.indexOf("@")>0?result.host.split("@"):false;if(authInHost){result.auth=authInHost.shift();result.host=result.hostname=authInHost.shift()}}mustEndAbs=mustEndAbs||result.host&&srcPath.length;if(mustEndAbs&&!isAbsolute){srcPath.unshift("")}if(!srcPath.length){result.pathname=null;result.path=null}else{result.pathname=srcPath.join("/")}if(!util.isNull(result.pathname)||!util.isNull(result.search)){result.path=(result.pathname?result.pathname:"")+(result.search?result.search:"")}result.auth=relative.auth||result.auth;result.slashes=result.slashes||relative.slashes; -result.href=result.format();return result};Url.prototype.parseHost=function(){var host=this.host;var port=portPattern.exec(host);if(port){port=port[0];if(port!==":"){this.port=port.substr(1)}host=host.substr(0,host.length-port.length)}if(host)this.hostname=host}},{"./util":46,punycode:22,querystring:25}],46:[function(require,module,exports){"use strict";module.exports={isString:function(arg){return typeof arg==="string"},isObject:function(arg){return typeof arg==="object"&&arg!==null},isNull:function(arg){return arg===null},isNullOrUndefined:function(arg){return arg==null}}},{}],47:[function(require,module,exports){module.exports=function isBuffer(arg){return arg&&typeof arg==="object"&&typeof arg.copy==="function"&&typeof arg.fill==="function"&&typeof arg.readUInt8==="function"}},{}],48:[function(require,module,exports){(function(process,global){var formatRegExp=/%[sdj%]/g;exports.format=function(f){if(!isString(f)){var objects=[];for(var i=0;i=len)return x;switch(x){case"%s":return String(args[i++]);case"%d":return Number(args[i++]);case"%j":try{return JSON.stringify(args[i++])}catch(_){return"[Circular]"}default:return x}});for(var x=args[i];i=3)ctx.depth=arguments[2];if(arguments.length>=4)ctx.colors=arguments[3];if(isBoolean(opts)){ctx.showHidden=opts}else if(opts){exports._extend(ctx,opts)}if(isUndefined(ctx.showHidden))ctx.showHidden=false;if(isUndefined(ctx.depth))ctx.depth=2;if(isUndefined(ctx.colors))ctx.colors=false;if(isUndefined(ctx.customInspect))ctx.customInspect=true;if(ctx.colors)ctx.stylize=stylizeWithColor;return formatValue(ctx,obj,ctx.depth)}exports.inspect=inspect;inspect.colors={bold:[1,22],italic:[3,23],underline:[4,24],inverse:[7,27],white:[37,39],grey:[90,39],black:[30,39],blue:[34,39],cyan:[36,39],green:[32,39],magenta:[35,39],red:[31,39],yellow:[33,39]};inspect.styles={special:"cyan",number:"yellow",boolean:"yellow",undefined:"grey",null:"bold",string:"green",date:"magenta",regexp:"red"};function stylizeWithColor(str,styleType){var style=inspect.styles[styleType];if(style){return"["+inspect.colors[style][0]+"m"+str+"["+inspect.colors[style][1]+"m"}else{return str}}function stylizeNoColor(str,styleType){return str}function arrayToHash(array){var hash={};array.forEach(function(val,idx){hash[val]=true});return hash}function formatValue(ctx,value,recurseTimes){if(ctx.customInspect&&value&&isFunction(value.inspect)&&value.inspect!==exports.inspect&&!(value.constructor&&value.constructor.prototype===value)){var ret=value.inspect(recurseTimes,ctx);if(!isString(ret)){ret=formatValue(ctx,ret,recurseTimes)}return ret}var primitive=formatPrimitive(ctx,value);if(primitive){return primitive}var keys=Object.keys(value);var visibleKeys=arrayToHash(keys);if(ctx.showHidden){keys=Object.getOwnPropertyNames(value)}if(isError(value)&&(keys.indexOf("message")>=0||keys.indexOf("description")>=0)){return formatError(value)}if(keys.length===0){if(isFunction(value)){var name=value.name?": "+value.name:"";return ctx.stylize("[Function"+name+"]","special")}if(isRegExp(value)){return ctx.stylize(RegExp.prototype.toString.call(value),"regexp")}if(isDate(value)){return ctx.stylize(Date.prototype.toString.call(value),"date")}if(isError(value)){return formatError(value)}}var base="",array=false,braces=["{","}"];if(isArray(value)){array=true;braces=["[","]"]}if(isFunction(value)){var n=value.name?": "+value.name:"";base=" [Function"+n+"]"}if(isRegExp(value)){base=" "+RegExp.prototype.toString.call(value)}if(isDate(value)){base=" "+Date.prototype.toUTCString.call(value)}if(isError(value)){base=" "+formatError(value)}if(keys.length===0&&(!array||value.length==0)){return braces[0]+base+braces[1]}if(recurseTimes<0){if(isRegExp(value)){return ctx.stylize(RegExp.prototype.toString.call(value),"regexp")}else{return ctx.stylize("[Object]","special")}}ctx.seen.push(value);var output;if(array){output=formatArray(ctx,value,recurseTimes,visibleKeys,keys)}else{output=keys.map(function(key){return formatProperty(ctx,value,recurseTimes,visibleKeys,key,array)})}ctx.seen.pop();return reduceToSingleString(output,base,braces)}function formatPrimitive(ctx,value){if(isUndefined(value))return ctx.stylize("undefined","undefined");if(isString(value)){var simple="'"+JSON.stringify(value).replace(/^"|"$/g,"").replace(/'/g,"\\'").replace(/\\"/g,'"')+"'";return ctx.stylize(simple,"string")}if(isNumber(value))return ctx.stylize(""+value,"number");if(isBoolean(value))return ctx.stylize(""+value,"boolean");if(isNull(value))return ctx.stylize("null","null")}function formatError(value){return"["+Error.prototype.toString.call(value)+"]"}function formatArray(ctx,value,recurseTimes,visibleKeys,keys){var output=[];for(var i=0,l=value.length;i-1){if(array){str=str.split("\n").map(function(line){return" "+line}).join("\n").substr(2)}else{str="\n"+str.split("\n").map(function(line){return" "+line}).join("\n")}}}else{str=ctx.stylize("[Circular]","special")}}if(isUndefined(name)){if(array&&key.match(/^\d+$/)){return str}name=JSON.stringify(""+key);if(name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)){name=name.substr(1,name.length-2);name=ctx.stylize(name,"name")}else{name=name.replace(/'/g,"\\'").replace(/\\"/g,'"').replace(/(^"|"$)/g,"'");name=ctx.stylize(name,"string")}}return name+": "+str}function reduceToSingleString(output,base,braces){var numLinesEst=0;var length=output.reduce(function(prev,cur){numLinesEst++;if(cur.indexOf("\n")>=0)numLinesEst++;return prev+cur.replace(/\u001b\[\d\d?m/g,"").length+1},0);if(length>60){return braces[0]+(base===""?"":base+"\n ")+" "+output.join(",\n ")+" "+braces[1]}return braces[0]+base+" "+output.join(", ")+" "+braces[1]}function isArray(ar){return Array.isArray(ar)}exports.isArray=isArray;function isBoolean(arg){return typeof arg==="boolean"}exports.isBoolean=isBoolean;function isNull(arg){return arg===null}exports.isNull=isNull;function isNullOrUndefined(arg){return arg==null}exports.isNullOrUndefined=isNullOrUndefined;function isNumber(arg){return typeof arg==="number"}exports.isNumber=isNumber;function isString(arg){return typeof arg==="string"}exports.isString=isString;function isSymbol(arg){return typeof arg==="symbol"}exports.isSymbol=isSymbol;function isUndefined(arg){return arg===void 0}exports.isUndefined=isUndefined;function isRegExp(re){return isObject(re)&&objectToString(re)==="[object RegExp]"}exports.isRegExp=isRegExp;function isObject(arg){return typeof arg==="object"&&arg!==null}exports.isObject=isObject;function isDate(d){return isObject(d)&&objectToString(d)==="[object Date]"}exports.isDate=isDate;function isError(e){return isObject(e)&&(objectToString(e)==="[object Error]"||e instanceof Error)}exports.isError=isError;function isFunction(arg){return typeof arg==="function"}exports.isFunction=isFunction;function isPrimitive(arg){return arg===null||typeof arg==="boolean"||typeof arg==="number"||typeof arg==="string"||typeof arg==="symbol"||typeof arg==="undefined"}exports.isPrimitive=isPrimitive;exports.isBuffer=require("./support/isBuffer");function objectToString(o){return Object.prototype.toString.call(o)}function pad(n){return n<10?"0"+n.toString(10):n.toString(10)}var months=["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"];function timestamp(){var d=new Date;var time=[pad(d.getHours()),pad(d.getMinutes()),pad(d.getSeconds())].join(":");return[d.getDate(),months[d.getMonth()],time].join(" ")}exports.log=function(){console.log("%s - %s",timestamp(),exports.format.apply(exports,arguments))};exports.inherits=require("inherits");exports._extend=function(origin,add){if(!add||!isObject(add))return origin;var keys=Object.keys(add);var i=keys.length;while(i--){origin[keys[i]]=add[keys[i]]}return origin};function hasOwnProperty(obj,prop){return Object.prototype.hasOwnProperty.call(obj,prop)}}).call(this,require("_process"),typeof global!=="undefined"?global:typeof self!=="undefined"?self:typeof window!=="undefined"?window:{})},{"./support/isBuffer":47,_process:21,inherits:19}],49:[function(require,module,exports){module.exports=extend;var hasOwnProperty=Object.prototype.hasOwnProperty;function extend(){var target={};for(var i=0;i 1: - module_name = sys.argv[1] - module_name = module_name.split('.py')[0] - if ':' not in module_name: - module_name += ':app' - app = import_string(module_name) - - html_dirs = os.getenv("PICO_HTML_DIR") or None - if html_dirs: - html_dirs = {"/{}".format(os.path.basename(p)): p.replace("\\", "/") - for p in html_dirs.split(os.pathsep)} - - log.info("> html_dirs: {}".format(html_dirs)) - ip = os.getenv("PICO_IP", '127.0.0.1') - - if ip and ip.startswith('http'): - ip = ip.replace("http://", "") - - kwargs = {"ip": ip, - "port": int(os.getenv("PICO_PORT", 4242)), - "use_debugger": os.getenv("PICO_DEBUG", True), - "use_reloader": os.getenv("PICO_RELOADER", True), - "threaded": os.getenv("PICO_THREADED", True), - "html_dir": html_dirs} - - log.info("Pico.server > settings: " - "\n\thtml_dir: {html_dir}" - "\n\tip: {ip}" - "\n\tport: {port}" - "\n\tuse_debugger: {use_debugger}" - "\n\tuse_reloader: {use_reloader}" - "\n\tthreaded: {threaded}".format(**kwargs)) - - run_app(app, **kwargs) diff --git a/pype/vendor/pico/wrappers.py b/pype/vendor/pico/wrappers.py deleted file mode 100644 index 5cb74fe970..0000000000 --- a/pype/vendor/pico/wrappers.py +++ /dev/null @@ -1,55 +0,0 @@ -from copy import deepcopy -from werkzeug.wrappers import Response -from werkzeug.exceptions import HTTPException - -from . import pragmaticjson as json - -try: - unicode -except NameError: - unicode = str - - -class JsonResponse(Response): - def __init__(self, result=None, json_string=None, *args, **kwargs): - if json_string: - kwargs['response'] = json_string - else: - kwargs['response'] = json.dumps(result) - kwargs['content_type'] = u'application/json' - super(JsonResponse, self).__init__(*args, **kwargs) - - def to_jsonp(self, callback): - r = deepcopy(self) - json_string = r.response[0].decode() - content = u'{callback}({json});'.format(callback=callback, json=json_string) - r.set_data(content) - r.content_type = u'text/javascript' - return r - - -class JsonErrorResponse(JsonResponse): - def __init__(self, exception=None, **kwargs): - result = {} - if exception: - if isinstance(exception, HTTPException): - result = { - 'name': exception.name, - 'code': exception.code, - 'message': exception.description, - } - else: - result = { - 'name': type(exception).__name__, - 'code': 500, - 'message': unicode(exception), - } - if hasattr(exception, 'to_dict'): - data = exception.to_dict() - else: - data = result - data.update(kwargs) - result['code'] = result.get('code', 500) - result['name'] = result.get('name', 'Internal Server Error') - super(JsonErrorResponse, self).__init__(data) - self.status = '%s %s' % (result['code'], result['name']) diff --git a/pype/vendor/pynput-1.4.dist-info/DESCRIPTION.rst b/pype/vendor/pynput-1.4.dist-info/DESCRIPTION.rst deleted file mode 100644 index db5e1ec35c..0000000000 --- a/pype/vendor/pynput-1.4.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,451 +0,0 @@ -pynput -====== - -This library allows you to control and monitor input devices. - -Currently, mouse and keyboard input and monitoring are supported. - -See `here `_ for the full -documentation. - - -Controlling the mouse ---------------------- - -Use ``pynput.mouse.Controller`` like this:: - - from pynput.mouse import Button, Controller - - mouse = Controller() - - # Read pointer position - print('The current pointer position is {0}'.format( - mouse.position)) - - # Set pointer position - mouse.position = (10, 20) - print('Now we have moved it to {0}'.format( - mouse.position)) - - # Move pointer relative to current position - mouse.move(5, -5) - - # Press and release - mouse.press(Button.left) - mouse.release(Button.left) - - # Double click; this is different from pressing and releasing - # twice on Mac OSX - mouse.click(Button.left, 2) - - # Scroll two steps down - mouse.scroll(0, 2) - - -Monitoring the mouse --------------------- - -Use ``pynput.mouse.Listener`` like this:: - - from pynput import mouse - - def on_move(x, y): - print('Pointer moved to {0}'.format( - (x, y))) - - def on_click(x, y, button, pressed): - print('{0} at {1}'.format( - 'Pressed' if pressed else 'Released', - (x, y))) - if not pressed: - # Stop listener - return False - - def on_scroll(x, y, dx, dy): - print('Scrolled {0} at {1}'.format( - 'down' if dy < 0 else 'up', - (x, y))) - - # Collect events until released - with mouse.Listener( - on_move=on_move, - on_click=on_click, - on_scroll=on_scroll) as listener: - listener.join() - -A mouse listener is a ``threading.Thread``, and all callbacks will be invoked -from the thread. - -Call ``pynput.mouse.Listener.stop`` from anywhere, raise ``StopException`` or -return ``False`` from a callback to stop the listener. - - -The mouse listener thread -~~~~~~~~~~~~~~~~~~~~~~~~~ - -The listener callbacks are invoked directly from an operating thread on some -platforms, notably *Windows*. - -This means that long running procedures and blocking operations should not be -invoked from the callback, as this risks freezing input for all processes. - -A possible workaround is to just dispatch incoming messages to a queue, and let -a separate thread handle them. - - -Handling mouse listener errors -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -If a callback handler raises an exception, the listener will be stopped. Since -callbacks run in a dedicated thread, the exceptions will not automatically be -reraised. - -To be notified about callback errors, call ``Thread.join`` on the listener -instance:: - - from pynput import mouse - - class MyException(Exception): pass - - def on_click(x, y, button, pressed): - if button == mouse.Button.left: - raise MyException(button) - - # Collect events until released - with mouse.Listener( - on_click=on_click) as listener: - try: - listener.join() - except MyException as e: - print('{0} was clicked'.format(e.args[0])) - - -Controlling the keyboard ------------------------- - -Use ``pynput.keyboard.Controller`` like this:: - - from pynput.keyboard import Key, Controller - - keyboard = Controller() - - # Press and release space - keyboard.press(Key.space) - keyboard.release(Key.space) - - # Type a lower case A; this will work even if no key on the - # physical keyboard is labelled 'A' - keyboard.press('a') - keyboard.release('a') - - # Type two upper case As - keyboard.press('A') - keyboard.release('A') - with keyboard.pressed(Key.shift): - keyboard.press('a') - keyboard.release('a') - - # Type 'Hello World' using the shortcut type method - keyboard.type('Hello World') - - -Monitoring the keyboard ------------------------ - -Use ``pynput.keyboard.Listener`` like this:: - - from pynput import keyboard - - def on_press(key): - try: - print('alphanumeric key {0} pressed'.format( - key.char)) - except AttributeError: - print('special key {0} pressed'.format( - key)) - - def on_release(key): - print('{0} released'.format( - key)) - if key == keyboard.Key.esc: - # Stop listener - return False - - # Collect events until released - with keyboard.Listener( - on_press=on_press, - on_release=on_release) as listener: - listener.join() - -A keyboard listener is a ``threading.Thread``, and all callbacks will be -invoked from the thread. - -Call ``pynput.keyboard.Listener.stop`` from anywhere, raise ``StopException`` -or return ``False`` from a callback to stop the listener. - -The ``key`` parameter passed to callbacks is a ``pynput.keyboard.Key``, for -special keys, a ``pynput.keyboard.KeyCode`` for normal alphanumeric keys, or -just ``None`` for unknown keys. - - -The keyboard listener thread -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The listener callbacks are invoked directly from an operating thread on some -platforms, notably *Windows*. - -This means that long running procedures and blocking operations should not be -invoked from the callback, as this risks freezing input for all processes. - -A possible workaround is to just dispatch incoming messages to a queue, and let -a separate thread handle them. - - -Handling keyboard listener errors -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -If a callback handler raises an exception, the listener will be stopped. Since -callbacks run in a dedicated thread, the exceptions will not automatically be -reraised. - -To be notified about callback errors, call ``Thread.join`` on the listener -instance:: - - from pynput import keyboard - - class MyException(Exception): pass - - def on_press(key): - if key == keyboard.Key.esc: - raise MyException(key) - - # Collect events until released - with keyboard.Listener( - on_press=on_press) as listener: - try: - listener.join() - except MyException as e: - print('{0} was pressed'.format(e.args[0])) - - -Release Notes -============= - -v1.4 - Injected event flags end event suppression -------------------------------------------------- -* Added possibility to fully suppress events when listening. -* Added support for typing some control characters. -* Added support for mouse drag events on *OSX*. Thanks to *jungledrum*! -* Include the key code in keyboard listener events. -* Correctly handle the numeric key pad on *Xorg* with *num lock* active. - Thanks to *TheoRet*! -* Corrected handling of current thread keyboard layout on *Windows*. Thanks to - *Schmettaling*! -* Corrected stopping of listeners on *Xorg*. -* Corrected import of ``Xlib.keysymdef.xkb`` on *Xorg*. Thanks to *Glandos*! - - -v1.3.10 - Do not crash under *Xephyr* -------------------------------------- -* Do not crash when ``Xlib.display.Display.get_input_focus`` returns an - integer, as it may when running under *Xephyr*. Thanks to *Eli Skeggs*! - - -v1.3.9 - Correctly handle the letter *A* on *OSX* -------------------------------------------------- -* Corrected check for virtual key code when generating keyboard events on - *OSX*. This fixes an issue where pressing *A* with *shift* explicitly pressed - would still type a miniscule letter. - - -v1.3.8 - Do not crash on some keyboard layouts on *OSX* -------------------------------------------------------- -* Fall back on a different method to retrieve the keyboard layout on *OSX*. - This helps for some keyboard layouts, such as *Chinese*. Thanks to - *haoflynet*! - - -v1.3.7 - *Xorg* corrections ---------------------------- -* Include mouse buttons up to *30* for *Xorg*. - - -v1.3.6 - *win32* corrections ----------------------------- -* Corrected double delivery of fake keyboard events on *Windows*. -* Corrected handling of synthetic unicode keys on *Windows*. - - -v1.3.5 - Corrected dependencies again -------------------------------------- -* Reverted changes in *1.3.3*. -* Corrected platform specifier for *Python 2* on *Linux*. - - -v1.3.4 - *Xorg* corrections ---------------------------- -* Corrected bounds check for values on *Xorg*. - - -v1.3.3 - Make dependencies non-optional ---------------------------------------- -* Made platform depdendencies non-optional. - - -v1.3.2 - Fix for button click on Mac ------------------------------------- -* Corrected regression from previous release where button clicks would - crash the *Mac* mouse listener. - - -v1.3.1 - Fixes for unknown buttons on Linux -------------------------------------------- -* Fall back on `Button.unknown` for unknown mouse buttons in *Xorg* mouse - listener. - - -v1.3 - Platform specific features ---------------------------------- -* Added ability to stop event propagation on *Windows*. This will prevent - events from reaching other applications. -* Added ability to ignore events on *Windows*. This is a workaround for systems - where the keyboard monitor interferes with normal keyboard events. -* Added ability to modify events on *OSX*. This allows intercepting and - altering input events before they reach other applications. -* Corrected crash on *OSX* when some types of third party input sources are - installed. - - -v1.2 - Improved error handling ------------------------------- -* Allow catching exceptions thrown from listener callbacks. This changes the - API, as joining a listener now potentially raises unhandled exceptions, - and unhandled exceptions will stop listeners. -* Added support for the numeric keypad on *Linux*. -* Improved documentation. -* Thanks to *jollysean* and *gilleswijnker* for their input! - - -v1.1.7 - Handle middle button on Windows ----------------------------------------- -* Listen for and dispatch middle button mouse clicks on *Windows*. - - -v1.1.6 - Corrected context manager for pressing keys ----------------------------------------------------- -* Corrected bug in ``pynput.keyboard.Controller.pressed`` which caused it to - never release the key. Many thanks to Toby Southwell! - - -v1.1.5 - Corrected modifier key combinations on Linux ------------------------------------------------------ -* Corrected handling of modifier keys to allow them to be composable on - *Linux*. - - -v1.1.4 - Small bugfixes ------------------------ -* Corrected error generation when ``GetKeyboardState`` fails. -* Make sure to apply shift state to borrowed keys on *X*. -* Use *pylint*. - - -v1.1.3 - Changed Xlib backend library -------------------------------------- -* Changed *Xlib* library. - - -v1.1.2 - Added missing type for Python 2 ----------------------------------------- -* Added missing ``LPDWORD`` for *Python 2* on *Windows*. - - -v1.1.1 - Fixes for listeners and controllers on Windows -------------------------------------------------------- -* Corrected keyboard listener on *Windows*. Modifier keys and other keys - changing the state of the keyboard are now handled correctly. -* Corrected mouse click and release on *Windows*. -* Corrected code samples. - - -v1.1 - Simplified usage on Linux --------------------------------- -* Propagate import errors raised on Linux to help troubleshoot missing - ``Xlib`` module. -* Declare ``python3-xlib`` as dependency on *Linux* for *Python 3*. - - -v1.0.6 - Universal wheel ------------------------- -* Make sure to build a universal wheel for all python versions. - - -v1.0.5 - Fixes for dragging on OSX ----------------------------------- -* Corrected dragging on *OSX*. -* Added scroll speed constant for *OSX* to correct slow scroll speed. - - -v1.0.4 - Fixes for clicking and scrolling on Windows ----------------------------------------------------- -* Corrected name of mouse input field when sending click and scroll events. - - -v1.0.3 - Fixes for Python 3 on Windows --------------------------------------- -* Corrected use of ``ctypes`` on Windows. - - -v1.0.2 - Fixes for thread identifiers -------------------------------------- -* Use thread identifiers to identify threads, not Thread instances. - - -v1.0.1 - Fixes for Python 3 ---------------------------- -* Corrected bugs which prevented the library from being used on *Python 3*. - - -v1.0 - Stable Release ---------------------- -* Changed license to *LGPL*. -* Corrected minor bugs and inconsistencies. -* Corrected and extended documentation. - - -v0.6 - Keyboard Monitor ------------------------ -* Added support for monitoring the keyboard. -* Corrected wheel packaging. -* Corrected deadlock when stopping a listener in some cases on *X*. -* Corrected key code constants on *Mac OSX*. -* Do not intercept events on *Mac OSX*. - - -v0.5.1 - Do not die on dead keys --------------------------------- -* Corrected handling of dead keys. -* Corrected documentation. - - -v0.5 - Keyboard Modifiers -------------------------- -* Added support for modifiers. - - -v0.4 - Keyboard Controller --------------------------- -* Added keyboard controller. - - -v0.3 - Cleanup ------------------------------------------------------------- -* Moved ``pynput.mouse.Controller.Button`` to top-level. - - -v0.2 - Initial Release ----------------------- -* Support for controlling the mouse on *Linux*, *Mac OSX* and *Windows*. -* Support for monitoring the mouse on *Linux*, *Mac OSX* and *Windows*. - - diff --git a/pype/vendor/pynput-1.4.dist-info/INSTALLER b/pype/vendor/pynput-1.4.dist-info/INSTALLER deleted file mode 100644 index a1b589e38a..0000000000 --- a/pype/vendor/pynput-1.4.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/pype/vendor/pynput-1.4.dist-info/METADATA b/pype/vendor/pynput-1.4.dist-info/METADATA deleted file mode 100644 index 1683c37a5b..0000000000 --- a/pype/vendor/pynput-1.4.dist-info/METADATA +++ /dev/null @@ -1,478 +0,0 @@ -Metadata-Version: 2.0 -Name: pynput -Version: 1.4 -Summary: Monitor and control user input devices -Home-page: https://github.com/moses-palmer/pynput -Author: Moses Palmér -Author-email: moses.palmer@gmail.com -License: LGPLv3 -Description-Content-Type: UNKNOWN -Keywords: control mouse,mouse input,control keyboard,keyboard input -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3) -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: Microsoft :: Windows :: Windows NT/2000 -Classifier: Operating System :: POSIX -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Monitoring -Requires-Dist: six -Requires-Dist: python-xlib (>=0.17); "linux" in sys_platform -Requires-Dist: enum34; python_version == "2.7" -Requires-Dist: pyobjc-framework-Quartz (>=3.0); sys_platform == "darwin" - -pynput -====== - -This library allows you to control and monitor input devices. - -Currently, mouse and keyboard input and monitoring are supported. - -See `here `_ for the full -documentation. - - -Controlling the mouse ---------------------- - -Use ``pynput.mouse.Controller`` like this:: - - from pynput.mouse import Button, Controller - - mouse = Controller() - - # Read pointer position - print('The current pointer position is {0}'.format( - mouse.position)) - - # Set pointer position - mouse.position = (10, 20) - print('Now we have moved it to {0}'.format( - mouse.position)) - - # Move pointer relative to current position - mouse.move(5, -5) - - # Press and release - mouse.press(Button.left) - mouse.release(Button.left) - - # Double click; this is different from pressing and releasing - # twice on Mac OSX - mouse.click(Button.left, 2) - - # Scroll two steps down - mouse.scroll(0, 2) - - -Monitoring the mouse --------------------- - -Use ``pynput.mouse.Listener`` like this:: - - from pynput import mouse - - def on_move(x, y): - print('Pointer moved to {0}'.format( - (x, y))) - - def on_click(x, y, button, pressed): - print('{0} at {1}'.format( - 'Pressed' if pressed else 'Released', - (x, y))) - if not pressed: - # Stop listener - return False - - def on_scroll(x, y, dx, dy): - print('Scrolled {0} at {1}'.format( - 'down' if dy < 0 else 'up', - (x, y))) - - # Collect events until released - with mouse.Listener( - on_move=on_move, - on_click=on_click, - on_scroll=on_scroll) as listener: - listener.join() - -A mouse listener is a ``threading.Thread``, and all callbacks will be invoked -from the thread. - -Call ``pynput.mouse.Listener.stop`` from anywhere, raise ``StopException`` or -return ``False`` from a callback to stop the listener. - - -The mouse listener thread -~~~~~~~~~~~~~~~~~~~~~~~~~ - -The listener callbacks are invoked directly from an operating thread on some -platforms, notably *Windows*. - -This means that long running procedures and blocking operations should not be -invoked from the callback, as this risks freezing input for all processes. - -A possible workaround is to just dispatch incoming messages to a queue, and let -a separate thread handle them. - - -Handling mouse listener errors -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -If a callback handler raises an exception, the listener will be stopped. Since -callbacks run in a dedicated thread, the exceptions will not automatically be -reraised. - -To be notified about callback errors, call ``Thread.join`` on the listener -instance:: - - from pynput import mouse - - class MyException(Exception): pass - - def on_click(x, y, button, pressed): - if button == mouse.Button.left: - raise MyException(button) - - # Collect events until released - with mouse.Listener( - on_click=on_click) as listener: - try: - listener.join() - except MyException as e: - print('{0} was clicked'.format(e.args[0])) - - -Controlling the keyboard ------------------------- - -Use ``pynput.keyboard.Controller`` like this:: - - from pynput.keyboard import Key, Controller - - keyboard = Controller() - - # Press and release space - keyboard.press(Key.space) - keyboard.release(Key.space) - - # Type a lower case A; this will work even if no key on the - # physical keyboard is labelled 'A' - keyboard.press('a') - keyboard.release('a') - - # Type two upper case As - keyboard.press('A') - keyboard.release('A') - with keyboard.pressed(Key.shift): - keyboard.press('a') - keyboard.release('a') - - # Type 'Hello World' using the shortcut type method - keyboard.type('Hello World') - - -Monitoring the keyboard ------------------------ - -Use ``pynput.keyboard.Listener`` like this:: - - from pynput import keyboard - - def on_press(key): - try: - print('alphanumeric key {0} pressed'.format( - key.char)) - except AttributeError: - print('special key {0} pressed'.format( - key)) - - def on_release(key): - print('{0} released'.format( - key)) - if key == keyboard.Key.esc: - # Stop listener - return False - - # Collect events until released - with keyboard.Listener( - on_press=on_press, - on_release=on_release) as listener: - listener.join() - -A keyboard listener is a ``threading.Thread``, and all callbacks will be -invoked from the thread. - -Call ``pynput.keyboard.Listener.stop`` from anywhere, raise ``StopException`` -or return ``False`` from a callback to stop the listener. - -The ``key`` parameter passed to callbacks is a ``pynput.keyboard.Key``, for -special keys, a ``pynput.keyboard.KeyCode`` for normal alphanumeric keys, or -just ``None`` for unknown keys. - - -The keyboard listener thread -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The listener callbacks are invoked directly from an operating thread on some -platforms, notably *Windows*. - -This means that long running procedures and blocking operations should not be -invoked from the callback, as this risks freezing input for all processes. - -A possible workaround is to just dispatch incoming messages to a queue, and let -a separate thread handle them. - - -Handling keyboard listener errors -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -If a callback handler raises an exception, the listener will be stopped. Since -callbacks run in a dedicated thread, the exceptions will not automatically be -reraised. - -To be notified about callback errors, call ``Thread.join`` on the listener -instance:: - - from pynput import keyboard - - class MyException(Exception): pass - - def on_press(key): - if key == keyboard.Key.esc: - raise MyException(key) - - # Collect events until released - with keyboard.Listener( - on_press=on_press) as listener: - try: - listener.join() - except MyException as e: - print('{0} was pressed'.format(e.args[0])) - - -Release Notes -============= - -v1.4 - Injected event flags end event suppression -------------------------------------------------- -* Added possibility to fully suppress events when listening. -* Added support for typing some control characters. -* Added support for mouse drag events on *OSX*. Thanks to *jungledrum*! -* Include the key code in keyboard listener events. -* Correctly handle the numeric key pad on *Xorg* with *num lock* active. - Thanks to *TheoRet*! -* Corrected handling of current thread keyboard layout on *Windows*. Thanks to - *Schmettaling*! -* Corrected stopping of listeners on *Xorg*. -* Corrected import of ``Xlib.keysymdef.xkb`` on *Xorg*. Thanks to *Glandos*! - - -v1.3.10 - Do not crash under *Xephyr* -------------------------------------- -* Do not crash when ``Xlib.display.Display.get_input_focus`` returns an - integer, as it may when running under *Xephyr*. Thanks to *Eli Skeggs*! - - -v1.3.9 - Correctly handle the letter *A* on *OSX* -------------------------------------------------- -* Corrected check for virtual key code when generating keyboard events on - *OSX*. This fixes an issue where pressing *A* with *shift* explicitly pressed - would still type a miniscule letter. - - -v1.3.8 - Do not crash on some keyboard layouts on *OSX* -------------------------------------------------------- -* Fall back on a different method to retrieve the keyboard layout on *OSX*. - This helps for some keyboard layouts, such as *Chinese*. Thanks to - *haoflynet*! - - -v1.3.7 - *Xorg* corrections ---------------------------- -* Include mouse buttons up to *30* for *Xorg*. - - -v1.3.6 - *win32* corrections ----------------------------- -* Corrected double delivery of fake keyboard events on *Windows*. -* Corrected handling of synthetic unicode keys on *Windows*. - - -v1.3.5 - Corrected dependencies again -------------------------------------- -* Reverted changes in *1.3.3*. -* Corrected platform specifier for *Python 2* on *Linux*. - - -v1.3.4 - *Xorg* corrections ---------------------------- -* Corrected bounds check for values on *Xorg*. - - -v1.3.3 - Make dependencies non-optional ---------------------------------------- -* Made platform depdendencies non-optional. - - -v1.3.2 - Fix for button click on Mac ------------------------------------- -* Corrected regression from previous release where button clicks would - crash the *Mac* mouse listener. - - -v1.3.1 - Fixes for unknown buttons on Linux -------------------------------------------- -* Fall back on `Button.unknown` for unknown mouse buttons in *Xorg* mouse - listener. - - -v1.3 - Platform specific features ---------------------------------- -* Added ability to stop event propagation on *Windows*. This will prevent - events from reaching other applications. -* Added ability to ignore events on *Windows*. This is a workaround for systems - where the keyboard monitor interferes with normal keyboard events. -* Added ability to modify events on *OSX*. This allows intercepting and - altering input events before they reach other applications. -* Corrected crash on *OSX* when some types of third party input sources are - installed. - - -v1.2 - Improved error handling ------------------------------- -* Allow catching exceptions thrown from listener callbacks. This changes the - API, as joining a listener now potentially raises unhandled exceptions, - and unhandled exceptions will stop listeners. -* Added support for the numeric keypad on *Linux*. -* Improved documentation. -* Thanks to *jollysean* and *gilleswijnker* for their input! - - -v1.1.7 - Handle middle button on Windows ----------------------------------------- -* Listen for and dispatch middle button mouse clicks on *Windows*. - - -v1.1.6 - Corrected context manager for pressing keys ----------------------------------------------------- -* Corrected bug in ``pynput.keyboard.Controller.pressed`` which caused it to - never release the key. Many thanks to Toby Southwell! - - -v1.1.5 - Corrected modifier key combinations on Linux ------------------------------------------------------ -* Corrected handling of modifier keys to allow them to be composable on - *Linux*. - - -v1.1.4 - Small bugfixes ------------------------ -* Corrected error generation when ``GetKeyboardState`` fails. -* Make sure to apply shift state to borrowed keys on *X*. -* Use *pylint*. - - -v1.1.3 - Changed Xlib backend library -------------------------------------- -* Changed *Xlib* library. - - -v1.1.2 - Added missing type for Python 2 ----------------------------------------- -* Added missing ``LPDWORD`` for *Python 2* on *Windows*. - - -v1.1.1 - Fixes for listeners and controllers on Windows -------------------------------------------------------- -* Corrected keyboard listener on *Windows*. Modifier keys and other keys - changing the state of the keyboard are now handled correctly. -* Corrected mouse click and release on *Windows*. -* Corrected code samples. - - -v1.1 - Simplified usage on Linux --------------------------------- -* Propagate import errors raised on Linux to help troubleshoot missing - ``Xlib`` module. -* Declare ``python3-xlib`` as dependency on *Linux* for *Python 3*. - - -v1.0.6 - Universal wheel ------------------------- -* Make sure to build a universal wheel for all python versions. - - -v1.0.5 - Fixes for dragging on OSX ----------------------------------- -* Corrected dragging on *OSX*. -* Added scroll speed constant for *OSX* to correct slow scroll speed. - - -v1.0.4 - Fixes for clicking and scrolling on Windows ----------------------------------------------------- -* Corrected name of mouse input field when sending click and scroll events. - - -v1.0.3 - Fixes for Python 3 on Windows --------------------------------------- -* Corrected use of ``ctypes`` on Windows. - - -v1.0.2 - Fixes for thread identifiers -------------------------------------- -* Use thread identifiers to identify threads, not Thread instances. - - -v1.0.1 - Fixes for Python 3 ---------------------------- -* Corrected bugs which prevented the library from being used on *Python 3*. - - -v1.0 - Stable Release ---------------------- -* Changed license to *LGPL*. -* Corrected minor bugs and inconsistencies. -* Corrected and extended documentation. - - -v0.6 - Keyboard Monitor ------------------------ -* Added support for monitoring the keyboard. -* Corrected wheel packaging. -* Corrected deadlock when stopping a listener in some cases on *X*. -* Corrected key code constants on *Mac OSX*. -* Do not intercept events on *Mac OSX*. - - -v0.5.1 - Do not die on dead keys --------------------------------- -* Corrected handling of dead keys. -* Corrected documentation. - - -v0.5 - Keyboard Modifiers -------------------------- -* Added support for modifiers. - - -v0.4 - Keyboard Controller --------------------------- -* Added keyboard controller. - - -v0.3 - Cleanup ------------------------------------------------------------- -* Moved ``pynput.mouse.Controller.Button`` to top-level. - - -v0.2 - Initial Release ----------------------- -* Support for controlling the mouse on *Linux*, *Mac OSX* and *Windows*. -* Support for monitoring the mouse on *Linux*, *Mac OSX* and *Windows*. - - diff --git a/pype/vendor/pynput-1.4.dist-info/RECORD b/pype/vendor/pynput-1.4.dist-info/RECORD deleted file mode 100644 index 3dea64140b..0000000000 --- a/pype/vendor/pynput-1.4.dist-info/RECORD +++ /dev/null @@ -1,45 +0,0 @@ -pynput/__init__.py,sha256=EJgBRe_1_qHzZ1gSkLmCLKC2aByszo4TpCkV0bNBb8o,846 -pynput/_info.py,sha256=Qzy8h5tmZvp5hXiUGyJF2wFi2PnsfncKrIznGRUnqAw,772 -pynput/_util/__init__.py,sha256=PajyeFBKHIBWAsEITpH-50IeCkdT0RbDwDSpT7fnkQc,8947 -pynput/_util/darwin.py,sha256=YMiCP5Jdo3U8FxgJtwCOzodd0XF2buFqEj5PrQsffz8,8438 -pynput/_util/win32.py,sha256=W4nH2A9VoWEeHVzV-W-tKfVKYX7Y8LTD2Ljn3ZbUGvk,19385 -pynput/_util/win32_vks.py,sha256=YIp3xaZOVYqyNw2dwV-laKyYBw4_Kj-qsyVqbVW1m4k,2894 -pynput/_util/xorg.py,sha256=NSoP0mL0v87VjFXybZpp6b_LtevlKuLFfhyvQVP5370,14703 -pynput/_util/xorg_keysyms.py,sha256=xWqfQQva2R78FrQUQWzfUGsJcNeMe7U6y4KFyLu0XqI,69338 -pynput/keyboard/__init__.py,sha256=r2pwWTmqVrskvGnAnXDQw3_PH07EKq6dWkb46wn6s00,1922 -pynput/keyboard/_base.py,sha256=XWKIv_mIs1zoGjZmaPJo2bsjYlyZt3UUXY11hakY4Xg,20417 -pynput/keyboard/_darwin.py,sha256=vqu6aiEcLuXBjKhtsjsZIy9INrGRnTkynpmAN4X3HlQ,8093 -pynput/keyboard/_win32.py,sha256=_OFN5HyqDDvuSyZ5Nb2GUWXpqkw-oZK1G9GJ5XUgDns,8733 -pynput/keyboard/_xorg.py,sha256=wc9BX-hYXCb0HIfLvMgugv6-PGap66072BGQJyu2knU,20379 -pynput/mouse/__init__.py,sha256=1GoOHfyHRd3O9AnscLS7wKVP-RcAinuKDiqUi-T82a8,1822 -pynput/mouse/_base.py,sha256=2ZIK_mBbUIA1JUsPs2ZIFarrpExDAMwIlaq5Jr-6ouM,8240 -pynput/mouse/_darwin.py,sha256=5hMK4UhldxfFS9uTx5mn_jud-tgxcFbgJVADzuKGRfY,6929 -pynput/mouse/_win32.py,sha256=m3vXMo2cUteF4AQFInIQ82_-NDd-LwYW719oQq4IOjU,5802 -pynput/mouse/_xorg.py,sha256=ml5HgCTZAmAWHzd93qvOMtvwf1E_-bY4iIgkF6Qp3kU,5069 -pynput-1.4.dist-info/DESCRIPTION.rst,sha256=lwkCa212Gyw15jTNx0qejZaZo6Ijp7bqYaDrxxMtgkc,13177 -pynput-1.4.dist-info/METADATA,sha256=fvh0F8de_SWIE05HJoPJIeA5FQXVgedt6D0a5ydEfrY,14321 -pynput-1.4.dist-info/RECORD,, -pynput-1.4.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 -pynput-1.4.dist-info/metadata.json,sha256=FKkgyQGstvv12SPB1PsI2ugZ61QkCX5YTzZdEXI7C3o,1390 -pynput-1.4.dist-info/pbr.json,sha256=CAnWerrCQ6A-ekJTVVKkD9J-ia4q-xoZzQWKOlPcseQ,47 -pynput-1.4.dist-info/top_level.txt,sha256=DpJjYf-VkYaa_COk_yUczD0pHqsLndB9SjmwcQGkXJQ,7 -pynput-1.4.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -pynput-1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pynput/keyboard/_darwin.pyc,, -pynput/keyboard/_xorg.pyc,, -pynput/_util/xorg_keysyms.pyc,, -pynput/keyboard/_win32.pyc,, -pynput/_util/xorg.pyc,, -pynput/mouse/_xorg.pyc,, -pynput/_info.pyc,, -pynput/mouse/__init__.pyc,, -pynput/_util/__init__.pyc,, -pynput/keyboard/__init__.pyc,, -pynput/mouse/_darwin.pyc,, -pynput/_util/darwin.pyc,, -pynput/mouse/_win32.pyc,, -pynput/mouse/_base.pyc,, -pynput/_util/win32.pyc,, -pynput/_util/win32_vks.pyc,, -pynput/__init__.pyc,, -pynput/keyboard/_base.pyc,, diff --git a/pype/vendor/pynput-1.4.dist-info/WHEEL b/pype/vendor/pynput-1.4.dist-info/WHEEL deleted file mode 100644 index 7332a419cd..0000000000 --- a/pype/vendor/pynput-1.4.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.30.0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/pype/vendor/pynput-1.4.dist-info/metadata.json b/pype/vendor/pynput-1.4.dist-info/metadata.json deleted file mode 100644 index a0685274df..0000000000 --- a/pype/vendor/pynput-1.4.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows :: Windows NT/2000", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Monitoring"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"contacts": [{"email": "moses.palmer@gmail.com", "name": "Moses Palm\u00e9r", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/moses-palmer/pynput"}}}, "extras": [], "generator": "bdist_wheel (0.30.0)", "keywords": ["control", "mouse", "mouse", "input", "control", "keyboard", "keyboard", "input"], "license": "LGPLv3", "metadata_version": "2.0", "name": "pynput", "run_requires": [{"requires": ["six"]}, {"environment": "\"linux\" in sys_platform", "requires": ["python-xlib (>=0.17)"]}, {"environment": "python_version == \"2.7\"", "requires": ["enum34"]}, {"environment": "sys_platform == \"darwin\"", "requires": ["pyobjc-framework-Quartz (>=3.0)"]}], "summary": "Monitor and control user input devices", "version": "1.4"} \ No newline at end of file diff --git a/pype/vendor/pynput-1.4.dist-info/pbr.json b/pype/vendor/pynput-1.4.dist-info/pbr.json deleted file mode 100644 index ab50f33c8f..0000000000 --- a/pype/vendor/pynput-1.4.dist-info/pbr.json +++ /dev/null @@ -1 +0,0 @@ -{"is_release": false, "git_version": "2d6ab69"} \ No newline at end of file diff --git a/pype/vendor/pynput-1.4.dist-info/top_level.txt b/pype/vendor/pynput-1.4.dist-info/top_level.txt deleted file mode 100644 index 91a67a9a31..0000000000 --- a/pype/vendor/pynput-1.4.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -pynput diff --git a/pype/vendor/pynput-1.4.dist-info/zip-safe b/pype/vendor/pynput-1.4.dist-info/zip-safe deleted file mode 100644 index 8b13789179..0000000000 --- a/pype/vendor/pynput-1.4.dist-info/zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/pype/vendor/pynput/__init__.py b/pype/vendor/pynput/__init__.py deleted file mode 100644 index c35df15ad2..0000000000 --- a/pype/vendor/pynput/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -The main *pynput* module. - -This module imports ``keyboard`` and ``mouse``. -""" - -from . import keyboard -from . import mouse diff --git a/pype/vendor/pynput/_info.py b/pype/vendor/pynput/_info.py deleted file mode 100644 index 2d59835843..0000000000 --- a/pype/vendor/pynput/_info.py +++ /dev/null @@ -1,19 +0,0 @@ -# coding=utf-8 -# pystray -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . - -__author__ = u'Moses Palmér' -__version__ = (1, 4) diff --git a/pype/vendor/pynput/_util/__init__.py b/pype/vendor/pynput/_util/__init__.py deleted file mode 100644 index ffb250b628..0000000000 --- a/pype/vendor/pynput/_util/__init__.py +++ /dev/null @@ -1,290 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -General utility functions and classes. -""" - -# pylint: disable=R0903 -# We implement minimal mixins - -# pylint: disable=W0212 -# We implement an internal API - -import contextlib -import functools -import sys -import threading - -import six - -from six.moves import queue - - -class AbstractListener(threading.Thread): - """A class implementing the basic behaviour for event listeners. - - Instances of this class can be used as context managers. This is equivalent - to the following code:: - - listener.start() - listener.wait() - try: - with_statements() - finally: - listener.stop() - - :param bool suppress: Whether to suppress events. Setting this to ``True`` - will prevent the input events from being passed to the rest of the - system. - - :param kwargs: A mapping from callback attribute to callback handler. All - handlers will be wrapped in a function reading the return value of the - callback, and if it ``is False``, raising :class:`StopException`. - - Any callback that is falsy will be ignored. - """ - class StopException(Exception): - """If an event listener callback raises this exception, the current - listener is stopped. - """ - pass - - #: Exceptions that are handled outside of the emitter and should thus not be - #: passed through the queue - _HANDLED_EXCEPTIONS = tuple() - - def __init__(self, suppress=False, **kwargs): - super(AbstractListener, self).__init__() - - def wrapper(f): - def inner(*args): - if f(*args) is False: - raise self.StopException() - return inner - - self._suppress = suppress - self._running = False - self._thread = threading.current_thread() - self._condition = threading.Condition() - self._ready = False - - # Allow multiple calls to stop - self._queue = queue.Queue(10) - - self.daemon = True - - for name, callback in kwargs.items(): - setattr(self, name, wrapper(callback or (lambda *a: None))) - - @property - def suppress(self): - """Whether to suppress events. - """ - return self._suppress - - @property - def running(self): - """Whether the listener is currently running. - """ - return self._running - - def stop(self): - """Stops listening for events. - - When this method returns, no more events will be delivered. - """ - if self._running: - self._running = False - self._queue.put(None) - self._stop_platform() - - def __enter__(self): - self.start() - self.wait() - return self - - def __exit__(self, exc_type, value, traceback): - self.stop() - - def wait(self): - """Waits for this listener to become ready. - """ - self._condition.acquire() - while not self._ready: - self._condition.wait() - self._condition.release() - - def run(self): - """The thread runner method. - """ - self._running = True - self._thread = threading.current_thread() - self._run() - - # Make sure that the queue contains something - self._queue.put(None) - - @classmethod - def _emitter(cls, f): - """A decorator to mark a method as the one emitting the callbacks. - - This decorator will wrap the method and catch exception. If a - :class:`StopException` is caught, the listener will be stopped - gracefully. If any other exception is caught, it will be propagated to - the thread calling :meth:`join` and reraised there. - """ - @functools.wraps(f) - def inner(self, *args, **kwargs): - # pylint: disable=W0702; we want to catch all exception - try: - return f(self, *args, **kwargs) - except Exception as e: - if not isinstance(e, self._HANDLED_EXCEPTIONS): - self._queue.put( - None if isinstance(e, cls.StopException) - else sys.exc_info()) - self.stop() - raise - # pylint: enable=W0702 - - return inner - - def _mark_ready(self): - """Marks this listener as ready to receive events. - - This method must be called from :meth:`_run`. :meth:`wait` will block - until this method is called. - """ - self._condition.acquire() - self._ready = True - self._condition.notify() - self._condition.release() - - def _run(self): - """The implementation of the :meth:`run` method. - - This is a platform dependent implementation. - """ - raise NotImplementedError() - - def _stop_platform(self): - """The implementation of the :meth:`stop` method. - - This is a platform dependent implementation. - """ - raise NotImplementedError() - - def join(self, *args): - super(AbstractListener, self).join(*args) - - # Reraise any exceptions - try: - exc_type, exc_value, exc_traceback = self._queue.get() - except TypeError: - return - six.reraise(exc_type, exc_value, exc_traceback) - - -class NotifierMixin(object): - """A mixin for notifiers of fake events. - - This mixin can be used for controllers on platforms where sending fake - events does not cause a listener to receive a notification. - """ - def _emit(self, action, *args): - """Sends a notification to all registered listeners. - - This method will ensure that listeners that raise - :class:`StopException` are stopped. - - :param str action: The name of the notification. - - :param args: The arguments to pass. - """ - stopped = [] - for listener in self._listeners(): - try: - getattr(listener, action)(*args) - except listener.StopException: - stopped.append(listener) - for listener in stopped: - listener.stop() - - @classmethod - def _receiver(cls, listener_class): - """A decorator to make a class able to receive fake events from a - controller. - - This decorator will add the method ``_receive`` to the decorated class. - - This method is a context manager which ensures that all calls to - :meth:`_emit` will invoke the named method in the listener instance - while the block is active. - """ - @contextlib.contextmanager - def receive(self): - """Executes a code block with this listener instance registered as - a receiver of fake input events. - """ - self._controller_class._add_listener(self) - try: - yield - finally: - self._controller_class._remove_listener(self) - - listener_class._receive = receive - listener_class._controller_class = cls - - # Make sure this class has the necessary attributes - if not hasattr(cls, '_listener_cache'): - cls._listener_cache = set() - cls._listener_lock = threading.Lock() - - return listener_class - - @classmethod - def _listeners(cls): - """Iterates over the set of running listeners. - - This method will quit without acquiring the lock if the set is empty, - so there is potential for race conditions. This is an optimisation, - since :class:`Controller` will need to call this method for every - control event. - """ - if not cls._listener_cache: - return - with cls._listener_lock: - for listener in cls._listener_cache: - yield listener - - @classmethod - def _add_listener(cls, listener): - """Adds a listener to the set of running listeners. - - :param listener: The listener for fake events. - """ - with cls._listener_lock: - cls._listener_cache.add(listener) - - @classmethod - def _remove_listener(cls, listener): - """Removes this listener from the set of running listeners. - - :param listener: The listener for fake events. - """ - with cls._listener_lock: - cls._listener_cache.remove(listener) diff --git a/pype/vendor/pynput/_util/darwin.py b/pype/vendor/pynput/_util/darwin.py deleted file mode 100644 index 6ee603fd32..0000000000 --- a/pype/vendor/pynput/_util/darwin.py +++ /dev/null @@ -1,273 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -Utility functions and classes for the *Darwin* backend. -""" - -# pylint: disable=C0103 -# pylint: disable=R0903 -# This module contains wrapper classes - -import contextlib -import ctypes -import ctypes.util -import six - -import objc -import CoreFoundation -import Quartz - -from . import AbstractListener - - -#: The objc module as a library handle -OBJC = ctypes.PyDLL(objc._objc.__file__) - -OBJC.PyObjCObject_New.restype = ctypes.py_object -OBJC.PyObjCObject_New.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_int] - - -def _wrap_value(value): - """Converts a pointer to a *Python objc* value. - - :param value: The pointer to convert. - - :return: a wrapped value - """ - return OBJC.PyObjCObject_New(value, 0, 1) - - -@contextlib.contextmanager -def _wrapped(value): - """A context manager that converts a raw pointer to a *Python objc* value. - - When the block is exited, the value is released. - - :param value: The raw value to wrap. - """ - wrapped_value = _wrap_value(value) - - try: - yield value - finally: - CoreFoundation.CFRelease(wrapped_value) - - -class CarbonExtra(object): - """A class exposing some missing functionality from *Carbon* as class - attributes. - """ - _Carbon = ctypes.cdll.LoadLibrary(ctypes.util.find_library('Carbon')) - - _Carbon.TISCopyCurrentKeyboardInputSource.argtypes = [] - _Carbon.TISCopyCurrentKeyboardInputSource.restype = ctypes.c_void_p - - _Carbon.TISCopyCurrentASCIICapableKeyboardLayoutInputSource.argtypes = [] - _Carbon.TISCopyCurrentASCIICapableKeyboardLayoutInputSource.restype = \ - ctypes.c_void_p - - _Carbon.TISGetInputSourceProperty.argtypes = [ - ctypes.c_void_p, ctypes.c_void_p] - _Carbon.TISGetInputSourceProperty.restype = ctypes.c_void_p - - _Carbon.LMGetKbdType.argtypes = [] - _Carbon.LMGetKbdType.restype = ctypes.c_uint32 - - _Carbon.UCKeyTranslate.argtypes = [ - ctypes.c_void_p, - ctypes.c_uint16, - ctypes.c_uint16, - ctypes.c_uint32, - ctypes.c_uint32, - ctypes.c_uint32, - ctypes.POINTER(ctypes.c_uint32), - ctypes.c_uint8, - ctypes.POINTER(ctypes.c_uint8), - ctypes.c_uint16 * 4] - _Carbon.UCKeyTranslate.restype = ctypes.c_uint32 - - TISCopyCurrentKeyboardInputSource = \ - _Carbon.TISCopyCurrentKeyboardInputSource - - TISCopyCurrentASCIICapableKeyboardLayoutInputSource = \ - _Carbon.TISCopyCurrentASCIICapableKeyboardLayoutInputSource - - kTISPropertyUnicodeKeyLayoutData = ctypes.c_void_p.in_dll( - _Carbon, 'kTISPropertyUnicodeKeyLayoutData') - - TISGetInputSourceProperty = \ - _Carbon.TISGetInputSourceProperty - - LMGetKbdType = \ - _Carbon.LMGetKbdType - - kUCKeyActionDisplay = 3 - kUCKeyTranslateNoDeadKeysBit = 0 - - UCKeyTranslate = \ - _Carbon.UCKeyTranslate - - -@contextlib.contextmanager -def keycode_context(): - """Returns an opaque value representing a context for translating keycodes - to strings. - """ - keyboard_type, layout_data = None, None - for source in [ - CarbonExtra.TISCopyCurrentKeyboardInputSource, - CarbonExtra.TISCopyCurrentASCIICapableKeyboardLayoutInputSource]: - with _wrapped(source()) as keyboard: - keyboard_type = CarbonExtra.LMGetKbdType() - layout = _wrap_value(CarbonExtra.TISGetInputSourceProperty( - keyboard, - CarbonExtra.kTISPropertyUnicodeKeyLayoutData)) - layout_data = layout.bytes().tobytes() if layout else None - if keyboard is not None and layout_data is not None: - break - yield (keyboard_type, layout_data) - - -def keycode_to_string(context, keycode, modifier_state=0): - """Converts a keycode to a string. - """ - LENGTH = 4 - - keyboard_type, layout_data = context - - dead_key_state = ctypes.c_uint32() - length = ctypes.c_uint8() - unicode_string = (ctypes.c_uint16 * LENGTH)() - CarbonExtra.UCKeyTranslate( - layout_data, - keycode, - CarbonExtra.kUCKeyActionDisplay, - modifier_state, - keyboard_type, - CarbonExtra.kUCKeyTranslateNoDeadKeysBit, - ctypes.byref(dead_key_state), - LENGTH, - ctypes.byref(length), - unicode_string) - return u''.join( - six.unichr(unicode_string[i]) - for i in range(length.value)) - - -def get_unicode_to_keycode_map(): - """Returns a mapping from unicode strings to virtual key codes. - - :return: a dict mapping key codes to strings - """ - with keycode_context() as context: - return { - keycode_to_string(context, keycode): keycode - for keycode in range(128)} - - -class ListenerMixin(object): - """A mixin for *Quartz* event listeners. - - Subclasses should set a value for :attr:`_EVENTS` and implement - :meth:`_handle`. - """ - #: The events that we listen to - _EVENTS = tuple() - - def _run(self): - self._loop = None - try: - tap = self._create_event_tap() - if tap is None: - self._mark_ready() - return - - loop_source = Quartz.CFMachPortCreateRunLoopSource( - None, tap, 0) - self._loop = Quartz.CFRunLoopGetCurrent() - - Quartz.CFRunLoopAddSource( - self._loop, loop_source, Quartz.kCFRunLoopDefaultMode) - Quartz.CGEventTapEnable(tap, True) - - self._mark_ready() - - # pylint: disable=W0702; we want to silence errors - try: - while self.running: - result = Quartz.CFRunLoopRunInMode( - Quartz.kCFRunLoopDefaultMode, 1, False) - try: - if result != Quartz.kCFRunLoopRunTimedOut: - break - except AttributeError: - # This happens during teardown of the virtual machine - break - - except: - # This exception will have been passed to the main thread - pass - # pylint: enable=W0702 - - finally: - self._loop = None - - def _stop_platform(self): - # The base class sets the running flag to False; this will cause the - # loop around run loop invocations to terminate and set this event - try: - if self._loop is not None: - Quartz.CFRunLoopStop(self._loop) - except AttributeError: - # The loop may not have been created - pass - - def _create_event_tap(self): - """Creates the event tap used by the listener. - - :return: an event tap - """ - return Quartz.CGEventTapCreate( - Quartz.kCGSessionEventTap, - Quartz.kCGHeadInsertEventTap, - Quartz.kCGEventTapOptionListenOnly if (True - and not self.suppress - and self._intercept is None) - else Quartz.kCGEventTapOptionDefault, - self._EVENTS, - self._handler, - None) - - @AbstractListener._emitter - def _handler(self, proxy, event_type, event, refcon): - """The callback registered with *Mac OSX* for mouse events. - - This method will call the callbacks registered on initialisation. - """ - self._handle(proxy, event_type, event, refcon) - if self._intercept is not None: - return self._intercept(event_type, event) - elif self.suppress: - return None - - def _handle(self, proxy, event_type, event, refcon): - """The device specific callback handler. - - This method calls the appropriate callback registered when this - listener was created based on the event. - """ - raise NotImplementedError() diff --git a/pype/vendor/pynput/_util/win32.py b/pype/vendor/pynput/_util/win32.py deleted file mode 100644 index 5440f0e556..0000000000 --- a/pype/vendor/pynput/_util/win32.py +++ /dev/null @@ -1,641 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -Utility functions and classes for the *win32* backend. -""" - -# pylint: disable=C0103 -# We want to make it obvious how structs are related - -# pylint: disable=R0903 -# This module contains a number of structs - -import contextlib -import ctypes -import threading - -from ctypes import ( - windll, - wintypes) - -import six - -from . import AbstractListener - - -# LPDWORD is not in ctypes.wintypes on Python 2 -if not hasattr(wintypes, 'LPDWORD'): - wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD) - - -class MOUSEINPUT(ctypes.Structure): - """Contains information about a simulated mouse event. - """ - MOVE = 0x0001 - LEFTDOWN = 0x0002 - LEFTUP = 0x0004 - RIGHTDOWN = 0x0008 - RIGHTUP = 0x0010 - MIDDLEDOWN = 0x0020 - MIDDLEUP = 0x0040 - XDOWN = 0x0080 - XUP = 0x0100 - WHEEL = 0x0800 - HWHEEL = 0x1000 - ABSOLUTE = 0x8000 - - XBUTTON1 = 0x0001 - XBUTTON2 = 0x0002 - - _fields_ = [ - ('dx', wintypes.LONG), - ('dy', wintypes.LONG), - ('mouseData', wintypes.DWORD), - ('dwFlags', wintypes.DWORD), - ('time', wintypes.DWORD), - ('dwExtraInfo', ctypes.c_void_p)] - - -class KEYBDINPUT(ctypes.Structure): - """Contains information about a simulated keyboard event. - """ - EXTENDEDKEY = 0x0001 - KEYUP = 0x0002 - SCANCODE = 0x0008 - UNICODE = 0x0004 - - _fields_ = [ - ('wVk', wintypes.WORD), - ('wScan', wintypes.WORD), - ('dwFlags', wintypes.DWORD), - ('time', wintypes.DWORD), - ('dwExtraInfo', ctypes.c_void_p)] - - -class HARDWAREINPUT(ctypes.Structure): - """Contains information about a simulated message generated by an input - device other than a keyboard or mouse. - """ - _fields_ = [ - ('uMsg', wintypes.DWORD), - ('wParamL', wintypes.WORD), - ('wParamH', wintypes.WORD)] - - -class INPUT_union(ctypes.Union): - """Represents the union of input types in :class:`INPUT`. - """ - _fields_ = [ - ('mi', MOUSEINPUT), - ('ki', KEYBDINPUT), - ('hi', HARDWAREINPUT)] - - -class INPUT(ctypes.Structure): - """Used by :attr:`SendInput` to store information for synthesizing input - events such as keystrokes, mouse movement, and mouse clicks. - """ - MOUSE = 0 - KEYBOARD = 1 - HARDWARE = 2 - - _fields_ = [ - ('type', wintypes.DWORD), - ('value', INPUT_union)] - - -LPINPUT = ctypes.POINTER(INPUT) - -VkKeyScan = windll.user32.VkKeyScanW -VkKeyScan.argtypes = ( - wintypes.WCHAR,) - -SendInput = windll.user32.SendInput -SendInput.argtypes = ( - wintypes.UINT, - LPINPUT, - ctypes.c_int) - -GetCurrentThreadId = windll.kernel32.GetCurrentThreadId -GetCurrentThreadId.restype = wintypes.DWORD - - -class MessageLoop(object): - """A class representing a message loop. - """ - #: The message that signals this loop to terminate - WM_STOP = 0x0401 - - _LPMSG = ctypes.POINTER(wintypes.MSG) - - _GetMessage = windll.user32.GetMessageW - _GetMessage.argtypes = ( - _LPMSG, - wintypes.HWND, - wintypes.UINT, - wintypes.UINT) - _PeekMessage = windll.user32.PeekMessageW - _PeekMessage.argtypes = ( - _LPMSG, - wintypes.HWND, - wintypes.UINT, - wintypes.UINT, - wintypes.UINT) - _PostThreadMessage = windll.user32.PostThreadMessageW - _PostThreadMessage.argtypes = ( - wintypes.DWORD, - wintypes.UINT, - wintypes.WPARAM, - wintypes.LPARAM) - - PM_NOREMOVE = 0 - - def __init__(self): - self._threadid = None - self._event = threading.Event() - self.thread = None - - def __iter__(self): - """Initialises the message loop and yields all messages until - :meth:`stop` is called. - - :raises AssertionError: if :meth:`start` has not been called - """ - assert self._threadid is not None - - try: - # Pump messages until WM_STOP - while True: - msg = wintypes.MSG() - lpmsg = ctypes.byref(msg) - r = self._GetMessage(lpmsg, None, 0, 0) - if r <= 0 or msg.message == self.WM_STOP: - break - else: - yield msg - - finally: - self._threadid = None - self.thread = None - - def start(self): - """Starts the message loop. - - This method must be called before iterating over messages, and it must - be called from the same thread. - """ - self._threadid = GetCurrentThreadId() - self.thread = threading.current_thread() - - # Create the message loop - msg = wintypes.MSG() - lpmsg = ctypes.byref(msg) - self._PeekMessage(lpmsg, None, 0x0400, 0x0400, self.PM_NOREMOVE) - - # Set the event to signal to other threads that the loop is created - self._event.set() - - def stop(self): - """Stops the message loop. - """ - self._event.wait() - if self._threadid: - self.post(self.WM_STOP, 0, 0) - - def post(self, msg, wparam, lparam): - """Posts a message to this message loop. - - :param ctypes.wintypes.UINT msg: The message. - - :param ctypes.wintypes.WPARAM wparam: The value of ``wParam``. - - :param ctypes.wintypes.LPARAM lparam: The value of ``lParam``. - """ - self._PostThreadMessage(self._threadid, msg, wparam, lparam) - - -class SystemHook(object): - """A class to handle Windows hooks. - """ - #: The hook action value for actions we should check - HC_ACTION = 0 - - _HOOKPROC = ctypes.WINFUNCTYPE( - wintypes.LPARAM, - ctypes.c_int32, wintypes.WPARAM, wintypes.LPARAM) - - _SetWindowsHookEx = windll.user32.SetWindowsHookExW - _SetWindowsHookEx.argtypes = ( - ctypes.c_int, - _HOOKPROC, - wintypes.HINSTANCE, - wintypes.DWORD) - _UnhookWindowsHookEx = windll.user32.UnhookWindowsHookEx - _UnhookWindowsHookEx.argtypes = ( - wintypes.HHOOK,) - _CallNextHookEx = windll.user32.CallNextHookEx - _CallNextHookEx.argtypes = ( - wintypes.HHOOK, - ctypes.c_int, - wintypes.WPARAM, - wintypes.LPARAM) - - #: The registered hook procedures - _HOOKS = {} - - class SuppressException(Exception): - """An exception raised by a hook callback to suppress further - propagation of events. - """ - pass - - def __init__(self, hook_id, on_hook=lambda code, msg, lpdata: None): - self.hook_id = hook_id - self.on_hook = on_hook - self._hook = None - - def __enter__(self): - key = threading.current_thread().ident - assert key not in self._HOOKS - - # Add ourself to lookup table and install the hook - self._HOOKS[key] = self - self._hook = self._SetWindowsHookEx( - self.hook_id, - self._handler, - None, - 0) - - return self - - def __exit__(self, exc_type, value, traceback): - key = threading.current_thread().ident - assert key in self._HOOKS - - if self._hook is not None: - # Uninstall the hook and remove ourself from lookup table - self._UnhookWindowsHookEx(self._hook) - del self._HOOKS[key] - - @staticmethod - @_HOOKPROC - def _handler(code, msg, lpdata): - key = threading.current_thread().ident - self = SystemHook._HOOKS.get(key, None) - if self: - # pylint: disable=W0702; we want to silence errors - try: - self.on_hook(code, msg, lpdata) - except self.SuppressException: - # Return non-zero to stop event propagation - return 1 - except: - # Ignore any errors - pass - # pylint: enable=W0702 - return SystemHook._CallNextHookEx(0, code, msg, lpdata) - - -class ListenerMixin(object): - """A mixin for *win32* event listeners. - - Subclasses should set a value for :attr:`_EVENTS` and implement - :meth:`_handle`. - - Subclasses must also be decorated with a decorator compatible with - :meth:`pynput._util.NotifierMixin._receiver` or implement the method - ``_receive()``. - """ - #: The Windows hook ID for the events to capture - _EVENTS = None - - #: The window message used to signal that an even should be handled - _WM_PROCESS = 0x410 - - def suppress_event(self): - """Causes the currently filtered event to be suppressed. - - This has a system wide effect and will generally result in no - applications receiving the event. - - This method will raise an undefined exception. - """ - raise SystemHook.SuppressException() - - def _run(self): - self._message_loop = MessageLoop() - with self._receive(): - self._mark_ready() - self._message_loop.start() - - # pylint: disable=W0702; we want to silence errors - try: - with SystemHook(self._EVENTS, self._handler): - # Just pump messages - for msg in self._message_loop: - if not self.running: - break - if msg.message == self._WM_PROCESS: - self._process(msg.wParam, msg.lParam) - except: - # This exception will have been passed to the main thread - pass - # pylint: enable=W0702 - - def _stop_platform(self): - try: - self._message_loop.stop() - except AttributeError: - # The loop may not have been created - pass - - @AbstractListener._emitter - def _handler(self, code, msg, lpdata): - """The callback registered with *Windows* for events. - - This method will post the message :attr:`_WM_HANDLE` to the message loop - started with this listener using :meth:`MessageLoop.post`. The - parameters are retrieved with a call to :meth:`_handle`. - """ - try: - converted = self._convert(code, msg, lpdata) - if converted is not None: - self._message_loop.post(self._WM_PROCESS, *converted) - except NotImplementedError: - self._handle(code, msg, lpdata) - - if self.suppress: - self.suppress_event() - - def _convert(self, code, msg, lpdata): - """The device specific callback handler. - - This method converts a low-level message and data to a - ``WPARAM`` / ``LPARAM`` pair. - """ - raise NotImplementedError() - - def _process(self, wparam, lparam): - """The device specific callback handler. - - This method performs the actual dispatching of events. - """ - raise NotImplementedError() - - def _handle(self, code, msg, lpdata): - """The device specific callback handler. - - This method calls the appropriate callback registered when this - listener was created based on the event. - - This method is only called if :meth:`_convert` is not implemented. - """ - raise NotImplementedError() - - -class KeyTranslator(object): - """A class to translate virtual key codes to characters. - """ - _AttachThreadInput = ctypes.windll.user32.AttachThreadInput - _AttachThreadInput.argtypes = ( - wintypes.DWORD, - wintypes.DWORD, - wintypes.BOOL) - _GetForegroundWindow = ctypes.windll.user32.GetForegroundWindow - _GetKeyboardLayout = ctypes.windll.user32.GetKeyboardLayout - _GetKeyboardLayout.argtypes = ( - wintypes.DWORD,) - _GetKeyboardState = ctypes.windll.user32.GetKeyboardState - _GetKeyboardState.argtypes = ( - ctypes.c_voidp,) - _GetWindowThreadProcessId = ctypes.windll.user32.GetWindowThreadProcessId - _GetWindowThreadProcessId.argtypes = ( - wintypes.HWND, - wintypes.LPDWORD) - _MapVirtualKeyEx = ctypes.windll.user32.MapVirtualKeyExW - _MapVirtualKeyEx.argtypes = ( - wintypes.UINT, - wintypes.UINT, - wintypes.HKL) - _ToUnicodeEx = ctypes.windll.user32.ToUnicodeEx - _ToUnicodeEx.argtypes = ( - wintypes.UINT, - wintypes.UINT, - ctypes.c_voidp, - ctypes.c_voidp, - ctypes.c_int, - wintypes.UINT, - wintypes.HKL) - - _MAPVK_VK_TO_VSC = 0 - _MAPVK_VK_TO_CHAR = 2 - - def __init__(self): - self.__state = (ctypes.c_ubyte * 255)() - self._cache = {} - self._reinject_arguments = None - - def __call__(self, vk, is_press): - """Converts a virtual key code to a string. - - :param int vk: The virtual key code. - - :param bool is_press: Whether this is a press. Because the *win32* - functions used to translate the key modifies internal kernel state, - some cleanup must be performed for key presses. - - :return: parameters suitable for the :class:`pynput.keyboard.KeyCode` - constructor - - :raises OSError: if a call to any *win32* function fails - """ - # Get the keyboard state and layout - state, layout = self._get_state_and_layout() - - # Get the scan code for the virtual key - scan = self._to_scan(vk, layout) - - # Try to reuse the previous key in the cache - try: - if is_press: - return self._cache[vk] - else: - return self._cache.pop(vk) - except KeyError: - pass - - # Get a string representation of the key - char, is_dead = self._to_char(vk, layout) - modified_char = self._to_char_with_modifiers(vk, layout, scan, state) - - # Clear the keyboard state if the key was a dead key - if is_dead: - self._reset_state(vk, layout, scan) - - # If the previous key handled was a dead key, we reinject it - if self._reinject_arguments: - self._reinject(*self._reinject_arguments) - self._reinject_arguments = None - - # If the current key is a dead key, we store the current state to be - # able to reinject later - elif is_dead: - self._reinject_arguments = ( - vk, - layout, - scan, - (ctypes.c_ubyte * 255)(*state)) - - # Otherwise we just clear any previous dead key state - else: - self._reinject_arguments = None - - # Update the cache - self._cache[vk] = { - 'char': modified_char or char, - 'is_dead': is_dead, - 'vk': vk} - - return self._cache[vk] - - def _get_state_and_layout(self): - """Returns the keyboard state and layout. - - The state is read from the currently active window if possible. It is - kept in a cache, so any call to this method will invalidate return - values from previous invocations. - - :return: the tuple ``(state, layout)`` - """ - # Get the state of the keyboard attached to the active window - with self._thread_input() as active_thread: - if not self._GetKeyboardState(ctypes.byref(self.__state)): - raise OSError( - 'GetKeyboardState failed: %d', - ctypes.wintypes.get_last_error()) - - # Get the keyboard layout for the thread for which we retrieved the - # state - layout = self._GetKeyboardLayout(active_thread) - - return (self.__state, layout) - - def _to_scan(self, vk, layout): - """Retrieves the scan code for a virtual key code. - - :param int vk: The virtual key code. - - :param layout: The keyboard layout. - - :return: the scan code - """ - return self._MapVirtualKeyEx( - vk, self._MAPVK_VK_TO_VSC, layout) - - def _to_char(self, vk, layout): - """Converts a virtual key by simply mapping it through the keyboard - layout. - - This method is stateless, so any active shift state or dead keys are - ignored. - - :param int vk: The virtual key code. - - :param layout: The keyboard layout. - - :return: the string representation of the key, or ``None``, and whether - was dead as the tuple ``(char, is_dead)`` - """ - # MapVirtualKeyEx will yield a string representation for dead keys - flags_and_codepoint = self._MapVirtualKeyEx( - vk, self._MAPVK_VK_TO_CHAR, layout) - if flags_and_codepoint: - return ( - six.unichr(flags_and_codepoint & 0xFFFF), - bool(flags_and_codepoint & (1 << 31))) - else: - return (None, None) - - def _to_char_with_modifiers(self, vk, layout, scan, state): - """Converts a virtual key by mapping it through the keyboard layout and - internal kernel keyboard state. - - This method is stateful, so any active shift state and dead keys are - applied. Currently active dead keys will be removed from the internal - kernel keyboard state. - - :param int vk: The virtual key code. - - :param layout: The keyboard layout. - - :param int scan: The scan code of the key. - - :param state: The keyboard state. - - :return: the string representation of the key, or ``None`` - """ - # This will apply any dead keys and modify the internal kernel keyboard - # state - out = (ctypes.wintypes.WCHAR * 5)() - count = self._ToUnicodeEx( - vk, scan, ctypes.byref(state), ctypes.byref(out), - len(out), 0, layout) - - return out[0] if count > 0 else None - - def _reset_state(self, vk, layout, scan): - """Clears the internal kernel keyboard state. - - This method will remove all dead keys from the internal state. - - :param int vk: The virtual key code. - - :param layout: The keyboard layout. - - :param int scan: The scan code of the key. - """ - state = (ctypes.c_byte * 255)() - out = (ctypes.wintypes.WCHAR * 5)() - while self._ToUnicodeEx( - vk, scan, ctypes.byref(state), ctypes.byref(out), - len(out), 0, layout) < 0: - pass - - def _reinject(self, vk, layout, scan, state): - """Reinjects the previous dead key. - - This must be called if ``ToUnicodeEx`` has been called, and the - previous key was a dead one. - - :param int vk: The virtual key code. - - :param layout: The keyboard layout. - - :param int scan: The scan code of the key. - - :param state: The keyboard state. - """ - out = (ctypes.wintypes.WCHAR * 5)() - self._ToUnicodeEx( - vk, scan, ctypes.byref(state), ctypes.byref(out), - len(out), 0, layout) - - @contextlib.contextmanager - def _thread_input(self): - """Yields the current thread ID. - """ - yield GetCurrentThreadId() diff --git a/pype/vendor/pynput/_util/win32_vks.py b/pype/vendor/pynput/_util/win32_vks.py deleted file mode 100644 index 860beffc4d..0000000000 --- a/pype/vendor/pynput/_util/win32_vks.py +++ /dev/null @@ -1,179 +0,0 @@ -# coding: utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . - -# pylint: disable=C0111,C0302 - -LBUTTON = 1 -RBUTTON = 2 -CANCEL = 3 -MBUTTON = 4 -XBUTTON1 = 5 -XBUTTON2 = 6 -BACK = 8 -TAB = 9 -CLEAR = 12 -RETURN = 13 -SHIFT = 16 -CONTROL = 17 -MENU = 18 -PAUSE = 19 -CAPITAL = 20 -KANA = 21 -HANGEUL = 21 -HANGUL = 21 -JUNJA = 23 -FINAL = 24 -HANJA = 25 -KANJI = 25 -ESCAPE = 27 -CONVERT = 28 -NONCONVERT = 29 -ACCEPT = 30 -MODECHANGE = 31 -SPACE = 32 -PRIOR = 33 -NEXT = 34 -END = 35 -HOME = 36 -LEFT = 37 -UP = 38 -RIGHT = 39 -DOWN = 40 -SELECT = 41 -PRINT = 42 -EXECUTE = 43 -SNAPSHOT = 44 -INSERT = 45 -DELETE = 46 -HELP = 47 -LWIN = 91 -RWIN = 92 -APPS = 93 -SLEEP = 95 -NUMPAD0 = 96 -NUMPAD1 = 97 -NUMPAD2 = 98 -NUMPAD3 = 99 -NUMPAD4 = 100 -NUMPAD5 = 101 -NUMPAD6 = 102 -NUMPAD7 = 103 -NUMPAD8 = 104 -NUMPAD9 = 105 -MULTIPLY = 106 -ADD = 107 -SEPARATOR = 108 -SUBTRACT = 109 -DECIMAL = 110 -DIVIDE = 111 -F1 = 112 -F2 = 113 -F3 = 114 -F4 = 115 -F5 = 116 -F6 = 117 -F7 = 118 -F8 = 119 -F9 = 120 -F10 = 121 -F11 = 122 -F12 = 123 -F13 = 124 -F14 = 125 -F15 = 126 -F16 = 127 -F17 = 128 -F18 = 129 -F19 = 130 -F20 = 131 -F21 = 132 -F22 = 133 -F23 = 134 -F24 = 135 -NUMLOCK = 144 -SCROLL = 145 -OEM_NEC_EQUAL = 146 -OEM_FJ_JISHO = 146 -OEM_FJ_MASSHOU = 147 -OEM_FJ_TOUROKU = 148 -OEM_FJ_LOYA = 149 -OEM_FJ_ROYA = 150 -LSHIFT = 160 -RSHIFT = 161 -LCONTROL = 162 -RCONTROL = 163 -LMENU = 164 -RMENU = 165 -BROWSER_BACK = 166 -BROWSER_FORWARD = 167 -BROWSER_REFRESH = 168 -BROWSER_STOP = 169 -BROWSER_SEARCH = 170 -BROWSER_FAVORITES = 171 -BROWSER_HOME = 172 -VOLUME_MUTE = 173 -VOLUME_DOWN = 174 -VOLUME_UP = 175 -MEDIA_NEXT_TRACK = 176 -MEDIA_PREV_TRACK = 177 -MEDIA_STOP = 178 -MEDIA_PLAY_PAUSE = 179 -LAUNCH_MAIL = 180 -LAUNCH_MEDIA_SELECT = 181 -LAUNCH_APP1 = 182 -LAUNCH_APP2 = 183 -OEM_1 = 186 -OEM_PLUS = 187 -OEM_COMMA = 188 -OEM_MINUS = 189 -OEM_PERIOD = 190 -OEM_2 = 191 -OEM_3 = 192 -OEM_4 = 219 -OEM_5 = 220 -OEM_6 = 221 -OEM_7 = 222 -OEM_8 = 223 -OEM_AX = 225 -OEM_102 = 226 -ICO_HELP = 227 -ICO_00 = 228 -PROCESSKEY = 229 -ICO_CLEAR = 230 -PACKET = 231 -OEM_RESET = 233 -OEM_JUMP = 234 -OEM_PA1 = 235 -OEM_PA2 = 236 -OEM_PA3 = 237 -OEM_WSCTRL = 238 -OEM_CUSEL = 239 -OEM_ATTN = 240 -OEM_FINISH = 241 -OEM_COPY = 242 -OEM_AUTO = 243 -OEM_ENLW = 244 -OEM_BACKTAB = 245 -ATTN = 246 -CRSEL = 247 -EXSEL = 248 -EREOF = 249 -PLAY = 250 -ZOOM = 251 -NONAME = 252 -PA1 = 253 -OEM_CLEAR = 254 diff --git a/pype/vendor/pynput/_util/xorg.py b/pype/vendor/pynput/_util/xorg.py deleted file mode 100644 index 323a7a9252..0000000000 --- a/pype/vendor/pynput/_util/xorg.py +++ /dev/null @@ -1,480 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -Utility functions and classes for the *Xorg* backend. -""" - -# pylint: disable=R0903 -# We implement stubs - -import contextlib -import functools -import itertools -import operator -import Xlib.display -import Xlib.threaded -import Xlib.XK - -from . import AbstractListener -from .xorg_keysyms import SYMBOLS - - -# Create a display to verify that we have an X connection -def _check(): - display = Xlib.display.Display() - display.close() -_check() -del _check - - -class X11Error(Exception): - """An error that is thrown at the end of a code block managed by a - :func:`display_manager` if an *X11* error occurred. - """ - pass - - -@contextlib.contextmanager -def display_manager(display): - """Traps *X* errors and raises an :class:``X11Error`` at the end if any - error occurred. - - This handler also ensures that the :class:`Xlib.display.Display` being - managed is sync'd. - - :param Xlib.display.Display display: The *X* display. - - :return: the display - :rtype: Xlib.display.Display - """ - errors = [] - - def handler(*args): - """The *Xlib* error handler. - """ - errors.append(args) - - old_handler = display.set_error_handler(handler) - try: - yield display - display.sync() - finally: - display.set_error_handler(old_handler) - if errors: - raise X11Error(errors) - - -def _find_mask(display, symbol): - """Returns the mode flags to use for a modifier symbol. - - :param Xlib.display.Display display: The *X* display. - - :param str symbol: The name of the symbol. - - :return: the modifier mask - """ - # Get the key code for the symbol - modifier_keycode = display.keysym_to_keycode( - Xlib.XK.string_to_keysym(symbol)) - - for index, keycodes in enumerate(display.get_modifier_mapping()): - for keycode in keycodes: - if keycode == modifier_keycode: - return 1 << index - - return 0 - - -def alt_mask(display): - """Returns the *alt* mask flags. - - The first time this function is called for a display, the value is cached. - Subsequent calls will return the cached value. - - :param Xlib.display.Display display: The *X* display. - - :return: the modifier mask - """ - if not hasattr(display, '__alt_mask'): - display.__alt_mask = _find_mask(display, 'Alt_L') - return display.__alt_mask - - -def alt_gr_mask(display): - """Returns the *alt* mask flags. - - The first time this function is called for a display, the value is cached. - Subsequent calls will return the cached value. - - :param Xlib.display.Display display: The *X* display. - - :return: the modifier mask - """ - if not hasattr(display, '__altgr_mask'): - display.__altgr_mask = _find_mask(display, 'Mode_switch') - return display.__altgr_mask - - -def numlock_mask(display): - """Returns the *numlock* mask flags. - - The first time this function is called for a display, the value is cached. - Subsequent calls will return the cached value. - - :param Xlib.display.Display display: The *X* display. - - :return: the modifier mask - """ - if not hasattr(display, '__numlock_mask'): - display.__numlock_mask = _find_mask(display, 'Num_Lock') - return display.__numlock_mask - - -def keysym_is_latin_upper(keysym): - """Determines whether a *keysym* is an upper case *latin* character. - - This is true only if ``XK_A`` <= ``keysym`` <= ` XK_Z``. - - :param in keysym: The *keysym* to check. - """ - return Xlib.XK.XK_A <= keysym <= Xlib.XK.XK_Z - - -def keysym_is_latin_lower(keysym): - """Determines whether a *keysym* is a lower case *latin* character. - - This is true only if ``XK_a`` <= ``keysym`` <= ` XK_z``. - - :param in keysym: The *keysym* to check. - """ - return Xlib.XK.XK_a <= keysym <= Xlib.XK.XK_z - - -def keysym_group(ks1, ks2): - """Generates a group from two *keysyms*. - - The implementation of this function comes from: - - Within each group, if the second element of the group is ``NoSymbol``, - then the group should be treated as if the second element were the same - as the first element, except when the first element is an alphabetic - *KeySym* ``K`` for which both lowercase and uppercase forms are - defined. - - In that case, the group should be treated as if the first element were - the lowercase form of ``K`` and the second element were the uppercase - form of ``K``. - - This function assumes that *alphabetic* means *latin*; this assumption - appears to be consistent with observations of the return values from - ``XGetKeyboardMapping``. - - :param ks1: The first *keysym*. - - :param ks2: The second *keysym*. - - :return: a tuple conforming to the description above - """ - if ks2 == Xlib.XK.NoSymbol: - if keysym_is_latin_upper(ks1): - return (Xlib.XK.XK_a + ks1 - Xlib.XK.XK_A, ks1) - elif keysym_is_latin_lower(ks1): - return (ks1, Xlib.XK.XK_A + ks1 - Xlib.XK.XK_a) - else: - return (ks1, ks1) - else: - return (ks1, ks2) - - -def keysym_normalize(keysym): - """Normalises a list of *keysyms*. - - The implementation of this function comes from: - - If the list (ignoring trailing ``NoSymbol`` entries) is a single - *KeySym* ``K``, then the list is treated as if it were the list - ``K NoSymbol K NoSymbol``. - - If the list (ignoring trailing ``NoSymbol`` entries) is a pair of - *KeySyms* ``K1 K2``, then the list is treated as if it were the list - ``K1 K2 K1 K2``. - - If the list (ignoring trailing ``NoSymbol`` entries) is a triple of - *KeySyms* ``K1 K2 K3``, then the list is treated as if it were the list - ``K1 K2 K3 NoSymbol``. - - This function will also group the *keysyms* using :func:`keysym_group`. - - :param keysyms: A list of keysyms. - - :return: the tuple ``(group_1, group_2)`` or ``None`` - """ - # Remove trailing NoSymbol - stripped = list(reversed(list( - itertools.dropwhile( - lambda n: n == Xlib.XK.NoSymbol, - reversed(keysym))))) - - if not stripped: - return - - elif len(stripped) == 1: - return ( - keysym_group(stripped[0], Xlib.XK.NoSymbol), - keysym_group(stripped[0], Xlib.XK.NoSymbol)) - - elif len(stripped) == 2: - return ( - keysym_group(stripped[0], stripped[1]), - keysym_group(stripped[0], stripped[1])) - - elif len(stripped) == 3: - return ( - keysym_group(stripped[0], stripped[1]), - keysym_group(stripped[2], Xlib.XK.NoSymbol)) - - elif len(stripped) >= 6: - # TODO: Find out why this is necessary; using only the documented - # behaviour may lead to only a US layout being used? - return ( - keysym_group(stripped[0], stripped[1]), - keysym_group(stripped[4], stripped[5])) - - else: - return ( - keysym_group(stripped[0], stripped[1]), - keysym_group(stripped[2], stripped[3])) - - -def index_to_shift(display, index): - """Converts an index in a *key code* list to the corresponding shift state. - - :param Xlib.display.Display display: The display for which to retrieve the - shift mask. - - :param int index: The keyboard mapping *key code* index. - - :return: a shift mask - """ - return ( - (1 << 0 if index & 1 else 0) | - (alt_gr_mask(display) if index & 2 else 0)) - - -def shift_to_index(display, shift): - """Converts an index in a *key code* list to the corresponding shift state. - - :param Xlib.display.Display display: The display for which to retrieve the - shift mask. - - :param int index: The keyboard mapping *key code* index. - - :retur: a shift mask - """ - return ( - (1 if shift & 1 else 0) + - (2 if shift & alt_gr_mask(display) else 0)) - - -def keyboard_mapping(display): - """Generates a mapping from *keysyms* to *key codes* and required - modifier shift states. - - :param Xlib.display.Display display: The display for which to retrieve the - keyboard mapping. - - :return: the keyboard mapping - """ - mapping = {} - - shift_mask = 1 << 0 - group_mask = alt_gr_mask(display) - - # Iterate over all keysym lists in the keyboard mapping - min_keycode = display.display.info.min_keycode - keycode_count = display.display.info.max_keycode - min_keycode + 1 - for index, keysyms in enumerate(display.get_keyboard_mapping( - min_keycode, keycode_count)): - key_code = index + min_keycode - - # Normalise the keysym list to yield a tuple containing the two groups - normalized = keysym_normalize(keysyms) - if not normalized: - continue - - # Iterate over the groups to extract the shift and modifier state - for groups, group in zip(normalized, (False, True)): - for keysym, shift in zip(groups, (False, True)): - if not keysym: - continue - shift_state = 0 \ - | (shift_mask if shift else 0) \ - | (group_mask if group else 0) - - # Prefer already known lesser shift states - if keysym in mapping and mapping[keysym][1] < shift_state: - continue - mapping[keysym] = (key_code, shift_state) - - return mapping - - -def symbol_to_keysym(symbol): - """Converts a symbol name to a *keysym*. - - :param str symbol: The name of the symbol. - - :return: the corresponding *keysym*, or ``0`` if it cannot be found - """ - # First try simple translation - keysym = Xlib.XK.string_to_keysym(symbol) - if keysym: - return keysym - - # If that fails, try checking a module attribute of Xlib.keysymdef.xkb - if not keysym: - try: - return getattr(Xlib.keysymdef.xkb, 'XK_' + symbol, 0) - except AttributeError: - return SYMBOLS.get(symbol, (0,))[0] - - -class ListenerMixin(object): - """A mixin for *X* event listeners. - - Subclasses should set a value for :attr:`_EVENTS` and implement - :meth:`_handle`. - """ - #: The events for which to listen - _EVENTS = tuple() - - #: We use this instance for parsing the binary data - _EVENT_PARSER = Xlib.protocol.rq.EventField(None) - - def _run(self): - self._display_stop = Xlib.display.Display() - self._display_record = Xlib.display.Display() - with display_manager(self._display_stop) as dm: - self._context = dm.record_create_context( - 0, - [Xlib.ext.record.AllClients], - [{ - 'core_requests': (0, 0), - 'core_replies': (0, 0), - 'ext_requests': (0, 0, 0, 0), - 'ext_replies': (0, 0, 0, 0), - 'delivered_events': (0, 0), - 'device_events': self._EVENTS, - 'errors': (0, 0), - 'client_started': False, - 'client_died': False}]) - - # pylint: disable=W0702; we want to silence errors - try: - self._initialize(self._display_stop) - self._mark_ready() - if self.suppress: - with display_manager(self._display_record) as dm: - self._suppress_start(dm) - self._display_record.record_enable_context( - self._context, self._handler) - except: - # This exception will have been passed to the main thread - pass - finally: - if self.suppress: - with display_manager(self._display_stop) as dm: - self._suppress_stop(dm) - self._display_record.record_free_context(self._context) - self._display_stop.close() - self._display_record.close() - # pylint: enable=W0702 - - def _stop_platform(self): - if not hasattr(self, '_context'): - self.wait() - # pylint: disable=W0702; we must ignore errors - try: - with display_manager(self._display_stop) as dm: - dm.record_disable_context(self._context) - except: - pass - # pylint: enable=W0702 - - def _suppress_start(self, display): - """Starts suppressing events. - - :param Xlib.display.Display display: The display for which to suppress - events. - """ - raise NotImplementedError() - - def _suppress_stop(self, display): - """Starts suppressing events. - - :param Xlib.display.Display display: The display for which to suppress - events. - """ - raise NotImplementedError() - - @property - def _event_mask(self): - """The event mask. - """ - return functools.reduce(operator.__or__, self._EVENTS, 0) - - @AbstractListener._emitter - def _handler(self, events): - """The callback registered with *X* for mouse events. - - This method will parse the response and call the callbacks registered - on initialisation. - - :param events: The events passed by *X*. This is a binary block - parsable by :attr:`_EVENT_PARSER`. - """ - if not self.running: - raise self.StopException() - - data = events.data - - while data and len(data): - event, data = self._EVENT_PARSER.parse_binary_value( - data, self._display_record.display, None, None) - self._handle(self._display_stop, event) - - def _initialize(self, display): - """Initialises this listener. - - This method is called immediately before the event loop, from the - handler thread. - - :param display: The display being used. - """ - pass - - def _handle(self, display, event): - """The device specific callback handler. - - This method calls the appropriate callback registered when this - listener was created based on the event. - - :param display: The display being used. - - :param event: The event. - """ - pass diff --git a/pype/vendor/pynput/_util/xorg_keysyms.py b/pype/vendor/pynput/_util/xorg_keysyms.py deleted file mode 100644 index f70e5b1893..0000000000 --- a/pype/vendor/pynput/_util/xorg_keysyms.py +++ /dev/null @@ -1,1715 +0,0 @@ -# coding: utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . - -# pylint: disable=C0111,C0302 - -SYMBOLS = { - '0': (0x0030, u'\u0030'), - '1': (0x0031, u'\u0031'), - '2': (0x0032, u'\u0032'), - '3': (0x0033, u'\u0033'), - '4': (0x0034, u'\u0034'), - '5': (0x0035, u'\u0035'), - '6': (0x0036, u'\u0036'), - '7': (0x0037, u'\u0037'), - '8': (0x0038, u'\u0038'), - '9': (0x0039, u'\u0039'), - 'A': (0x0041, u'\u0041'), - 'AE': (0x00c6, u'\u00C6'), - 'Aacute': (0x00c1, u'\u00C1'), - 'Abelowdot': (0x1001ea0, u'\u1EA0'), - 'Abreve': (0x01c3, u'\u0102'), - 'Abreveacute': (0x1001eae, u'\u1EAE'), - 'Abrevebelowdot': (0x1001eb6, u'\u1EB6'), - 'Abrevegrave': (0x1001eb0, u'\u1EB0'), - 'Abrevehook': (0x1001eb2, u'\u1EB2'), - 'Abrevetilde': (0x1001eb4, u'\u1EB4'), - 'Acircumflex': (0x00c2, u'\u00C2'), - 'Acircumflexacute': (0x1001ea4, u'\u1EA4'), - 'Acircumflexbelowdot': (0x1001eac, u'\u1EAC'), - 'Acircumflexgrave': (0x1001ea6, u'\u1EA6'), - 'Acircumflexhook': (0x1001ea8, u'\u1EA8'), - 'Acircumflextilde': (0x1001eaa, u'\u1EAA'), - 'Adiaeresis': (0x00c4, u'\u00C4'), - 'Agrave': (0x00c0, u'\u00C0'), - 'Ahook': (0x1001ea2, u'\u1EA2'), - 'Amacron': (0x03c0, u'\u0100'), - 'Aogonek': (0x01a1, u'\u0104'), - 'Arabic_0': (0x1000660, u'\u0660'), - 'Arabic_1': (0x1000661, u'\u0661'), - 'Arabic_2': (0x1000662, u'\u0662'), - 'Arabic_3': (0x1000663, u'\u0663'), - 'Arabic_4': (0x1000664, u'\u0664'), - 'Arabic_5': (0x1000665, u'\u0665'), - 'Arabic_6': (0x1000666, u'\u0666'), - 'Arabic_7': (0x1000667, u'\u0667'), - 'Arabic_8': (0x1000668, u'\u0668'), - 'Arabic_9': (0x1000669, u'\u0669'), - 'Arabic_ain': (0x05d9, u'\u0639'), - 'Arabic_alef': (0x05c7, u'\u0627'), - 'Arabic_alefmaksura': (0x05e9, u'\u0649'), - 'Arabic_beh': (0x05c8, u'\u0628'), - 'Arabic_comma': (0x05ac, u'\u060C'), - 'Arabic_dad': (0x05d6, u'\u0636'), - 'Arabic_dal': (0x05cf, u'\u062F'), - 'Arabic_damma': (0x05ef, u'\u064F'), - 'Arabic_dammatan': (0x05ec, u'\u064C'), - 'Arabic_ddal': (0x1000688, u'\u0688'), - 'Arabic_farsi_yeh': (0x10006cc, u'\u06CC'), - 'Arabic_fatha': (0x05ee, u'\u064E'), - 'Arabic_fathatan': (0x05eb, u'\u064B'), - 'Arabic_feh': (0x05e1, u'\u0641'), - 'Arabic_fullstop': (0x10006d4, u'\u06D4'), - 'Arabic_gaf': (0x10006af, u'\u06AF'), - 'Arabic_ghain': (0x05da, u'\u063A'), - 'Arabic_ha': (0x05e7, u'\u0647'), - 'Arabic_hah': (0x05cd, u'\u062D'), - 'Arabic_hamza': (0x05c1, u'\u0621'), - 'Arabic_hamza_above': (0x1000654, u'\u0654'), - 'Arabic_hamza_below': (0x1000655, u'\u0655'), - 'Arabic_hamzaonalef': (0x05c3, u'\u0623'), - 'Arabic_hamzaonwaw': (0x05c4, u'\u0624'), - 'Arabic_hamzaonyeh': (0x05c6, u'\u0626'), - 'Arabic_hamzaunderalef': (0x05c5, u'\u0625'), - 'Arabic_heh_doachashmee': (0x10006be, u'\u06BE'), - 'Arabic_heh_goal': (0x10006c1, u'\u06C1'), - 'Arabic_jeem': (0x05cc, u'\u062C'), - 'Arabic_jeh': (0x1000698, u'\u0698'), - 'Arabic_kaf': (0x05e3, u'\u0643'), - 'Arabic_kasra': (0x05f0, u'\u0650'), - 'Arabic_kasratan': (0x05ed, u'\u064D'), - 'Arabic_keheh': (0x10006a9, u'\u06A9'), - 'Arabic_khah': (0x05ce, u'\u062E'), - 'Arabic_lam': (0x05e4, u'\u0644'), - 'Arabic_madda_above': (0x1000653, u'\u0653'), - 'Arabic_maddaonalef': (0x05c2, u'\u0622'), - 'Arabic_meem': (0x05e5, u'\u0645'), - 'Arabic_noon': (0x05e6, u'\u0646'), - 'Arabic_noon_ghunna': (0x10006ba, u'\u06BA'), - 'Arabic_peh': (0x100067e, u'\u067E'), - 'Arabic_percent': (0x100066a, u'\u066A'), - 'Arabic_qaf': (0x05e2, u'\u0642'), - 'Arabic_question_mark': (0x05bf, u'\u061F'), - 'Arabic_ra': (0x05d1, u'\u0631'), - 'Arabic_rreh': (0x1000691, u'\u0691'), - 'Arabic_sad': (0x05d5, u'\u0635'), - 'Arabic_seen': (0x05d3, u'\u0633'), - 'Arabic_semicolon': (0x05bb, u'\u061B'), - 'Arabic_shadda': (0x05f1, u'\u0651'), - 'Arabic_sheen': (0x05d4, u'\u0634'), - 'Arabic_sukun': (0x05f2, u'\u0652'), - 'Arabic_superscript_alef': (0x1000670, u'\u0670'), - 'Arabic_tah': (0x05d7, u'\u0637'), - 'Arabic_tatweel': (0x05e0, u'\u0640'), - 'Arabic_tcheh': (0x1000686, u'\u0686'), - 'Arabic_teh': (0x05ca, u'\u062A'), - 'Arabic_tehmarbuta': (0x05c9, u'\u0629'), - 'Arabic_thal': (0x05d0, u'\u0630'), - 'Arabic_theh': (0x05cb, u'\u062B'), - 'Arabic_tteh': (0x1000679, u'\u0679'), - 'Arabic_veh': (0x10006a4, u'\u06A4'), - 'Arabic_waw': (0x05e8, u'\u0648'), - 'Arabic_yeh': (0x05ea, u'\u064A'), - 'Arabic_yeh_baree': (0x10006d2, u'\u06D2'), - 'Arabic_zah': (0x05d8, u'\u0638'), - 'Arabic_zain': (0x05d2, u'\u0632'), - 'Aring': (0x00c5, u'\u00C5'), - 'Armenian_AT': (0x1000538, u'\u0538'), - 'Armenian_AYB': (0x1000531, u'\u0531'), - 'Armenian_BEN': (0x1000532, u'\u0532'), - 'Armenian_CHA': (0x1000549, u'\u0549'), - 'Armenian_DA': (0x1000534, u'\u0534'), - 'Armenian_DZA': (0x1000541, u'\u0541'), - 'Armenian_E': (0x1000537, u'\u0537'), - 'Armenian_FE': (0x1000556, u'\u0556'), - 'Armenian_GHAT': (0x1000542, u'\u0542'), - 'Armenian_GIM': (0x1000533, u'\u0533'), - 'Armenian_HI': (0x1000545, u'\u0545'), - 'Armenian_HO': (0x1000540, u'\u0540'), - 'Armenian_INI': (0x100053b, u'\u053B'), - 'Armenian_JE': (0x100054b, u'\u054B'), - 'Armenian_KE': (0x1000554, u'\u0554'), - 'Armenian_KEN': (0x100053f, u'\u053F'), - 'Armenian_KHE': (0x100053d, u'\u053D'), - 'Armenian_LYUN': (0x100053c, u'\u053C'), - 'Armenian_MEN': (0x1000544, u'\u0544'), - 'Armenian_NU': (0x1000546, u'\u0546'), - 'Armenian_O': (0x1000555, u'\u0555'), - 'Armenian_PE': (0x100054a, u'\u054A'), - 'Armenian_PYUR': (0x1000553, u'\u0553'), - 'Armenian_RA': (0x100054c, u'\u054C'), - 'Armenian_RE': (0x1000550, u'\u0550'), - 'Armenian_SE': (0x100054d, u'\u054D'), - 'Armenian_SHA': (0x1000547, u'\u0547'), - 'Armenian_TCHE': (0x1000543, u'\u0543'), - 'Armenian_TO': (0x1000539, u'\u0539'), - 'Armenian_TSA': (0x100053e, u'\u053E'), - 'Armenian_TSO': (0x1000551, u'\u0551'), - 'Armenian_TYUN': (0x100054f, u'\u054F'), - 'Armenian_VEV': (0x100054e, u'\u054E'), - 'Armenian_VO': (0x1000548, u'\u0548'), - 'Armenian_VYUN': (0x1000552, u'\u0552'), - 'Armenian_YECH': (0x1000535, u'\u0535'), - 'Armenian_ZA': (0x1000536, u'\u0536'), - 'Armenian_ZHE': (0x100053a, u'\u053A'), - 'Armenian_accent': (0x100055b, u'\u055B'), - 'Armenian_amanak': (0x100055c, u'\u055C'), - 'Armenian_apostrophe': (0x100055a, u'\u055A'), - 'Armenian_at': (0x1000568, u'\u0568'), - 'Armenian_ayb': (0x1000561, u'\u0561'), - 'Armenian_ben': (0x1000562, u'\u0562'), - 'Armenian_but': (0x100055d, u'\u055D'), - 'Armenian_cha': (0x1000579, u'\u0579'), - 'Armenian_da': (0x1000564, u'\u0564'), - 'Armenian_dza': (0x1000571, u'\u0571'), - 'Armenian_e': (0x1000567, u'\u0567'), - 'Armenian_exclam': (0x100055c, u'\u055C'), - 'Armenian_fe': (0x1000586, u'\u0586'), - 'Armenian_full_stop': (0x1000589, u'\u0589'), - 'Armenian_ghat': (0x1000572, u'\u0572'), - 'Armenian_gim': (0x1000563, u'\u0563'), - 'Armenian_hi': (0x1000575, u'\u0575'), - 'Armenian_ho': (0x1000570, u'\u0570'), - 'Armenian_hyphen': (0x100058a, u'\u058A'), - 'Armenian_ini': (0x100056b, u'\u056B'), - 'Armenian_je': (0x100057b, u'\u057B'), - 'Armenian_ke': (0x1000584, u'\u0584'), - 'Armenian_ken': (0x100056f, u'\u056F'), - 'Armenian_khe': (0x100056d, u'\u056D'), - 'Armenian_ligature_ew': (0x1000587, u'\u0587'), - 'Armenian_lyun': (0x100056c, u'\u056C'), - 'Armenian_men': (0x1000574, u'\u0574'), - 'Armenian_nu': (0x1000576, u'\u0576'), - 'Armenian_o': (0x1000585, u'\u0585'), - 'Armenian_paruyk': (0x100055e, u'\u055E'), - 'Armenian_pe': (0x100057a, u'\u057A'), - 'Armenian_pyur': (0x1000583, u'\u0583'), - 'Armenian_question': (0x100055e, u'\u055E'), - 'Armenian_ra': (0x100057c, u'\u057C'), - 'Armenian_re': (0x1000580, u'\u0580'), - 'Armenian_se': (0x100057d, u'\u057D'), - 'Armenian_separation_mark': (0x100055d, u'\u055D'), - 'Armenian_sha': (0x1000577, u'\u0577'), - 'Armenian_shesht': (0x100055b, u'\u055B'), - 'Armenian_tche': (0x1000573, u'\u0573'), - 'Armenian_to': (0x1000569, u'\u0569'), - 'Armenian_tsa': (0x100056e, u'\u056E'), - 'Armenian_tso': (0x1000581, u'\u0581'), - 'Armenian_tyun': (0x100057f, u'\u057F'), - 'Armenian_verjaket': (0x1000589, u'\u0589'), - 'Armenian_vev': (0x100057e, u'\u057E'), - 'Armenian_vo': (0x1000578, u'\u0578'), - 'Armenian_vyun': (0x1000582, u'\u0582'), - 'Armenian_yech': (0x1000565, u'\u0565'), - 'Armenian_yentamna': (0x100058a, u'\u058A'), - 'Armenian_za': (0x1000566, u'\u0566'), - 'Armenian_zhe': (0x100056a, u'\u056A'), - 'Atilde': (0x00c3, u'\u00C3'), - 'B': (0x0042, u'\u0042'), - 'Babovedot': (0x1001e02, u'\u1E02'), - 'Byelorussian_SHORTU': (0x06be, u'\u040E'), - 'Byelorussian_shortu': (0x06ae, u'\u045E'), - 'C': (0x0043, u'\u0043'), - 'Cabovedot': (0x02c5, u'\u010A'), - 'Cacute': (0x01c6, u'\u0106'), - 'Ccaron': (0x01c8, u'\u010C'), - 'Ccedilla': (0x00c7, u'\u00C7'), - 'Ccircumflex': (0x02c6, u'\u0108'), - 'ColonSign': (0x10020a1, u'\u20A1'), - 'CruzeiroSign': (0x10020a2, u'\u20A2'), - 'Cyrillic_A': (0x06e1, u'\u0410'), - 'Cyrillic_BE': (0x06e2, u'\u0411'), - 'Cyrillic_CHE': (0x06fe, u'\u0427'), - 'Cyrillic_CHE_descender': (0x10004b6, u'\u04B6'), - 'Cyrillic_CHE_vertstroke': (0x10004b8, u'\u04B8'), - 'Cyrillic_DE': (0x06e4, u'\u0414'), - 'Cyrillic_DZHE': (0x06bf, u'\u040F'), - 'Cyrillic_E': (0x06fc, u'\u042D'), - 'Cyrillic_EF': (0x06e6, u'\u0424'), - 'Cyrillic_EL': (0x06ec, u'\u041B'), - 'Cyrillic_EM': (0x06ed, u'\u041C'), - 'Cyrillic_EN': (0x06ee, u'\u041D'), - 'Cyrillic_EN_descender': (0x10004a2, u'\u04A2'), - 'Cyrillic_ER': (0x06f2, u'\u0420'), - 'Cyrillic_ES': (0x06f3, u'\u0421'), - 'Cyrillic_GHE': (0x06e7, u'\u0413'), - 'Cyrillic_GHE_bar': (0x1000492, u'\u0492'), - 'Cyrillic_HA': (0x06e8, u'\u0425'), - 'Cyrillic_HARDSIGN': (0x06ff, u'\u042A'), - 'Cyrillic_HA_descender': (0x10004b2, u'\u04B2'), - 'Cyrillic_I': (0x06e9, u'\u0418'), - 'Cyrillic_IE': (0x06e5, u'\u0415'), - 'Cyrillic_IO': (0x06b3, u'\u0401'), - 'Cyrillic_I_macron': (0x10004e2, u'\u04E2'), - 'Cyrillic_JE': (0x06b8, u'\u0408'), - 'Cyrillic_KA': (0x06eb, u'\u041A'), - 'Cyrillic_KA_descender': (0x100049a, u'\u049A'), - 'Cyrillic_KA_vertstroke': (0x100049c, u'\u049C'), - 'Cyrillic_LJE': (0x06b9, u'\u0409'), - 'Cyrillic_NJE': (0x06ba, u'\u040A'), - 'Cyrillic_O': (0x06ef, u'\u041E'), - 'Cyrillic_O_bar': (0x10004e8, u'\u04E8'), - 'Cyrillic_PE': (0x06f0, u'\u041F'), - 'Cyrillic_SCHWA': (0x10004d8, u'\u04D8'), - 'Cyrillic_SHA': (0x06fb, u'\u0428'), - 'Cyrillic_SHCHA': (0x06fd, u'\u0429'), - 'Cyrillic_SHHA': (0x10004ba, u'\u04BA'), - 'Cyrillic_SHORTI': (0x06ea, u'\u0419'), - 'Cyrillic_SOFTSIGN': (0x06f8, u'\u042C'), - 'Cyrillic_TE': (0x06f4, u'\u0422'), - 'Cyrillic_TSE': (0x06e3, u'\u0426'), - 'Cyrillic_U': (0x06f5, u'\u0423'), - 'Cyrillic_U_macron': (0x10004ee, u'\u04EE'), - 'Cyrillic_U_straight': (0x10004ae, u'\u04AE'), - 'Cyrillic_U_straight_bar': (0x10004b0, u'\u04B0'), - 'Cyrillic_VE': (0x06f7, u'\u0412'), - 'Cyrillic_YA': (0x06f1, u'\u042F'), - 'Cyrillic_YERU': (0x06f9, u'\u042B'), - 'Cyrillic_YU': (0x06e0, u'\u042E'), - 'Cyrillic_ZE': (0x06fa, u'\u0417'), - 'Cyrillic_ZHE': (0x06f6, u'\u0416'), - 'Cyrillic_ZHE_descender': (0x1000496, u'\u0496'), - 'Cyrillic_a': (0x06c1, u'\u0430'), - 'Cyrillic_be': (0x06c2, u'\u0431'), - 'Cyrillic_che': (0x06de, u'\u0447'), - 'Cyrillic_che_descender': (0x10004b7, u'\u04B7'), - 'Cyrillic_che_vertstroke': (0x10004b9, u'\u04B9'), - 'Cyrillic_de': (0x06c4, u'\u0434'), - 'Cyrillic_dzhe': (0x06af, u'\u045F'), - 'Cyrillic_e': (0x06dc, u'\u044D'), - 'Cyrillic_ef': (0x06c6, u'\u0444'), - 'Cyrillic_el': (0x06cc, u'\u043B'), - 'Cyrillic_em': (0x06cd, u'\u043C'), - 'Cyrillic_en': (0x06ce, u'\u043D'), - 'Cyrillic_en_descender': (0x10004a3, u'\u04A3'), - 'Cyrillic_er': (0x06d2, u'\u0440'), - 'Cyrillic_es': (0x06d3, u'\u0441'), - 'Cyrillic_ghe': (0x06c7, u'\u0433'), - 'Cyrillic_ghe_bar': (0x1000493, u'\u0493'), - 'Cyrillic_ha': (0x06c8, u'\u0445'), - 'Cyrillic_ha_descender': (0x10004b3, u'\u04B3'), - 'Cyrillic_hardsign': (0x06df, u'\u044A'), - 'Cyrillic_i': (0x06c9, u'\u0438'), - 'Cyrillic_i_macron': (0x10004e3, u'\u04E3'), - 'Cyrillic_ie': (0x06c5, u'\u0435'), - 'Cyrillic_io': (0x06a3, u'\u0451'), - 'Cyrillic_je': (0x06a8, u'\u0458'), - 'Cyrillic_ka': (0x06cb, u'\u043A'), - 'Cyrillic_ka_descender': (0x100049b, u'\u049B'), - 'Cyrillic_ka_vertstroke': (0x100049d, u'\u049D'), - 'Cyrillic_lje': (0x06a9, u'\u0459'), - 'Cyrillic_nje': (0x06aa, u'\u045A'), - 'Cyrillic_o': (0x06cf, u'\u043E'), - 'Cyrillic_o_bar': (0x10004e9, u'\u04E9'), - 'Cyrillic_pe': (0x06d0, u'\u043F'), - 'Cyrillic_schwa': (0x10004d9, u'\u04D9'), - 'Cyrillic_sha': (0x06db, u'\u0448'), - 'Cyrillic_shcha': (0x06dd, u'\u0449'), - 'Cyrillic_shha': (0x10004bb, u'\u04BB'), - 'Cyrillic_shorti': (0x06ca, u'\u0439'), - 'Cyrillic_softsign': (0x06d8, u'\u044C'), - 'Cyrillic_te': (0x06d4, u'\u0442'), - 'Cyrillic_tse': (0x06c3, u'\u0446'), - 'Cyrillic_u': (0x06d5, u'\u0443'), - 'Cyrillic_u_macron': (0x10004ef, u'\u04EF'), - 'Cyrillic_u_straight': (0x10004af, u'\u04AF'), - 'Cyrillic_u_straight_bar': (0x10004b1, u'\u04B1'), - 'Cyrillic_ve': (0x06d7, u'\u0432'), - 'Cyrillic_ya': (0x06d1, u'\u044F'), - 'Cyrillic_yeru': (0x06d9, u'\u044B'), - 'Cyrillic_yu': (0x06c0, u'\u044E'), - 'Cyrillic_ze': (0x06da, u'\u0437'), - 'Cyrillic_zhe': (0x06d6, u'\u0436'), - 'Cyrillic_zhe_descender': (0x1000497, u'\u0497'), - 'D': (0x0044, u'\u0044'), - 'Dabovedot': (0x1001e0a, u'\u1E0A'), - 'Dcaron': (0x01cf, u'\u010E'), - 'DongSign': (0x10020ab, u'\u20AB'), - 'Dstroke': (0x01d0, u'\u0110'), - 'E': (0x0045, u'\u0045'), - 'ENG': (0x03bd, u'\u014A'), - 'ETH': (0x00d0, u'\u00D0'), - 'EZH': (0x10001b7, u'\u01B7'), - 'Eabovedot': (0x03cc, u'\u0116'), - 'Eacute': (0x00c9, u'\u00C9'), - 'Ebelowdot': (0x1001eb8, u'\u1EB8'), - 'Ecaron': (0x01cc, u'\u011A'), - 'Ecircumflex': (0x00ca, u'\u00CA'), - 'Ecircumflexacute': (0x1001ebe, u'\u1EBE'), - 'Ecircumflexbelowdot': (0x1001ec6, u'\u1EC6'), - 'Ecircumflexgrave': (0x1001ec0, u'\u1EC0'), - 'Ecircumflexhook': (0x1001ec2, u'\u1EC2'), - 'Ecircumflextilde': (0x1001ec4, u'\u1EC4'), - 'EcuSign': (0x10020a0, u'\u20A0'), - 'Ediaeresis': (0x00cb, u'\u00CB'), - 'Egrave': (0x00c8, u'\u00C8'), - 'Ehook': (0x1001eba, u'\u1EBA'), - 'Emacron': (0x03aa, u'\u0112'), - 'Eogonek': (0x01ca, u'\u0118'), - 'Etilde': (0x1001ebc, u'\u1EBC'), - 'EuroSign': (0x20ac, u'\u20AC'), - 'F': (0x0046, u'\u0046'), - 'FFrancSign': (0x10020a3, u'\u20A3'), - 'Fabovedot': (0x1001e1e, u'\u1E1E'), - 'Farsi_0': (0x10006f0, u'\u06F0'), - 'Farsi_1': (0x10006f1, u'\u06F1'), - 'Farsi_2': (0x10006f2, u'\u06F2'), - 'Farsi_3': (0x10006f3, u'\u06F3'), - 'Farsi_4': (0x10006f4, u'\u06F4'), - 'Farsi_5': (0x10006f5, u'\u06F5'), - 'Farsi_6': (0x10006f6, u'\u06F6'), - 'Farsi_7': (0x10006f7, u'\u06F7'), - 'Farsi_8': (0x10006f8, u'\u06F8'), - 'Farsi_9': (0x10006f9, u'\u06F9'), - 'Farsi_yeh': (0x10006cc, u'\u06CC'), - 'G': (0x0047, u'\u0047'), - 'Gabovedot': (0x02d5, u'\u0120'), - 'Gbreve': (0x02ab, u'\u011E'), - 'Gcaron': (0x10001e6, u'\u01E6'), - 'Gcedilla': (0x03ab, u'\u0122'), - 'Gcircumflex': (0x02d8, u'\u011C'), - 'Georgian_an': (0x10010d0, u'\u10D0'), - 'Georgian_ban': (0x10010d1, u'\u10D1'), - 'Georgian_can': (0x10010ea, u'\u10EA'), - 'Georgian_char': (0x10010ed, u'\u10ED'), - 'Georgian_chin': (0x10010e9, u'\u10E9'), - 'Georgian_cil': (0x10010ec, u'\u10EC'), - 'Georgian_don': (0x10010d3, u'\u10D3'), - 'Georgian_en': (0x10010d4, u'\u10D4'), - 'Georgian_fi': (0x10010f6, u'\u10F6'), - 'Georgian_gan': (0x10010d2, u'\u10D2'), - 'Georgian_ghan': (0x10010e6, u'\u10E6'), - 'Georgian_hae': (0x10010f0, u'\u10F0'), - 'Georgian_har': (0x10010f4, u'\u10F4'), - 'Georgian_he': (0x10010f1, u'\u10F1'), - 'Georgian_hie': (0x10010f2, u'\u10F2'), - 'Georgian_hoe': (0x10010f5, u'\u10F5'), - 'Georgian_in': (0x10010d8, u'\u10D8'), - 'Georgian_jhan': (0x10010ef, u'\u10EF'), - 'Georgian_jil': (0x10010eb, u'\u10EB'), - 'Georgian_kan': (0x10010d9, u'\u10D9'), - 'Georgian_khar': (0x10010e5, u'\u10E5'), - 'Georgian_las': (0x10010da, u'\u10DA'), - 'Georgian_man': (0x10010db, u'\u10DB'), - 'Georgian_nar': (0x10010dc, u'\u10DC'), - 'Georgian_on': (0x10010dd, u'\u10DD'), - 'Georgian_par': (0x10010de, u'\u10DE'), - 'Georgian_phar': (0x10010e4, u'\u10E4'), - 'Georgian_qar': (0x10010e7, u'\u10E7'), - 'Georgian_rae': (0x10010e0, u'\u10E0'), - 'Georgian_san': (0x10010e1, u'\u10E1'), - 'Georgian_shin': (0x10010e8, u'\u10E8'), - 'Georgian_tan': (0x10010d7, u'\u10D7'), - 'Georgian_tar': (0x10010e2, u'\u10E2'), - 'Georgian_un': (0x10010e3, u'\u10E3'), - 'Georgian_vin': (0x10010d5, u'\u10D5'), - 'Georgian_we': (0x10010f3, u'\u10F3'), - 'Georgian_xan': (0x10010ee, u'\u10EE'), - 'Georgian_zen': (0x10010d6, u'\u10D6'), - 'Georgian_zhar': (0x10010df, u'\u10DF'), - 'Greek_ALPHA': (0x07c1, u'\u0391'), - 'Greek_ALPHAaccent': (0x07a1, u'\u0386'), - 'Greek_BETA': (0x07c2, u'\u0392'), - 'Greek_CHI': (0x07d7, u'\u03A7'), - 'Greek_DELTA': (0x07c4, u'\u0394'), - 'Greek_EPSILON': (0x07c5, u'\u0395'), - 'Greek_EPSILONaccent': (0x07a2, u'\u0388'), - 'Greek_ETA': (0x07c7, u'\u0397'), - 'Greek_ETAaccent': (0x07a3, u'\u0389'), - 'Greek_GAMMA': (0x07c3, u'\u0393'), - 'Greek_IOTA': (0x07c9, u'\u0399'), - 'Greek_IOTAaccent': (0x07a4, u'\u038A'), - 'Greek_IOTAdieresis': (0x07a5, u'\u03AA'), - 'Greek_KAPPA': (0x07ca, u'\u039A'), - 'Greek_LAMBDA': (0x07cb, u'\u039B'), - 'Greek_LAMDA': (0x07cb, u'\u039B'), - 'Greek_MU': (0x07cc, u'\u039C'), - 'Greek_NU': (0x07cd, u'\u039D'), - 'Greek_OMEGA': (0x07d9, u'\u03A9'), - 'Greek_OMEGAaccent': (0x07ab, u'\u038F'), - 'Greek_OMICRON': (0x07cf, u'\u039F'), - 'Greek_OMICRONaccent': (0x07a7, u'\u038C'), - 'Greek_PHI': (0x07d6, u'\u03A6'), - 'Greek_PI': (0x07d0, u'\u03A0'), - 'Greek_PSI': (0x07d8, u'\u03A8'), - 'Greek_RHO': (0x07d1, u'\u03A1'), - 'Greek_SIGMA': (0x07d2, u'\u03A3'), - 'Greek_TAU': (0x07d4, u'\u03A4'), - 'Greek_THETA': (0x07c8, u'\u0398'), - 'Greek_UPSILON': (0x07d5, u'\u03A5'), - 'Greek_UPSILONaccent': (0x07a8, u'\u038E'), - 'Greek_UPSILONdieresis': (0x07a9, u'\u03AB'), - 'Greek_XI': (0x07ce, u'\u039E'), - 'Greek_ZETA': (0x07c6, u'\u0396'), - 'Greek_accentdieresis': (0x07ae, u'\u0385'), - 'Greek_alpha': (0x07e1, u'\u03B1'), - 'Greek_alphaaccent': (0x07b1, u'\u03AC'), - 'Greek_beta': (0x07e2, u'\u03B2'), - 'Greek_chi': (0x07f7, u'\u03C7'), - 'Greek_delta': (0x07e4, u'\u03B4'), - 'Greek_epsilon': (0x07e5, u'\u03B5'), - 'Greek_epsilonaccent': (0x07b2, u'\u03AD'), - 'Greek_eta': (0x07e7, u'\u03B7'), - 'Greek_etaaccent': (0x07b3, u'\u03AE'), - 'Greek_finalsmallsigma': (0x07f3, u'\u03C2'), - 'Greek_gamma': (0x07e3, u'\u03B3'), - 'Greek_horizbar': (0x07af, u'\u2015'), - 'Greek_iota': (0x07e9, u'\u03B9'), - 'Greek_iotaaccent': (0x07b4, u'\u03AF'), - 'Greek_iotaaccentdieresis': (0x07b6, u'\u0390'), - 'Greek_iotadieresis': (0x07b5, u'\u03CA'), - 'Greek_kappa': (0x07ea, u'\u03BA'), - 'Greek_lambda': (0x07eb, u'\u03BB'), - 'Greek_lamda': (0x07eb, u'\u03BB'), - 'Greek_mu': (0x07ec, u'\u03BC'), - 'Greek_nu': (0x07ed, u'\u03BD'), - 'Greek_omega': (0x07f9, u'\u03C9'), - 'Greek_omegaaccent': (0x07bb, u'\u03CE'), - 'Greek_omicron': (0x07ef, u'\u03BF'), - 'Greek_omicronaccent': (0x07b7, u'\u03CC'), - 'Greek_phi': (0x07f6, u'\u03C6'), - 'Greek_pi': (0x07f0, u'\u03C0'), - 'Greek_psi': (0x07f8, u'\u03C8'), - 'Greek_rho': (0x07f1, u'\u03C1'), - 'Greek_sigma': (0x07f2, u'\u03C3'), - 'Greek_tau': (0x07f4, u'\u03C4'), - 'Greek_theta': (0x07e8, u'\u03B8'), - 'Greek_upsilon': (0x07f5, u'\u03C5'), - 'Greek_upsilonaccent': (0x07b8, u'\u03CD'), - 'Greek_upsilonaccentdieresis': (0x07ba, u'\u03B0'), - 'Greek_upsilondieresis': (0x07b9, u'\u03CB'), - 'Greek_xi': (0x07ee, u'\u03BE'), - 'Greek_zeta': (0x07e6, u'\u03B6'), - 'H': (0x0048, u'\u0048'), - 'Hcircumflex': (0x02a6, u'\u0124'), - 'Hstroke': (0x02a1, u'\u0126'), - 'I': (0x0049, u'\u0049'), - 'Iabovedot': (0x02a9, u'\u0130'), - 'Iacute': (0x00cd, u'\u00CD'), - 'Ibelowdot': (0x1001eca, u'\u1ECA'), - 'Ibreve': (0x100012c, u'\u012C'), - 'Icircumflex': (0x00ce, u'\u00CE'), - 'Idiaeresis': (0x00cf, u'\u00CF'), - 'Igrave': (0x00cc, u'\u00CC'), - 'Ihook': (0x1001ec8, u'\u1EC8'), - 'Imacron': (0x03cf, u'\u012A'), - 'Iogonek': (0x03c7, u'\u012E'), - 'Itilde': (0x03a5, u'\u0128'), - 'J': (0x004a, u'\u004A'), - 'Jcircumflex': (0x02ac, u'\u0134'), - 'K': (0x004b, u'\u004B'), - 'KP_0': (0xffb0, None), - 'KP_1': (0xffb1, None), - 'KP_2': (0xffb2, None), - 'KP_3': (0xffb3, None), - 'KP_4': (0xffb4, None), - 'KP_5': (0xffb5, None), - 'KP_6': (0xffb6, None), - 'KP_7': (0xffb7, None), - 'KP_8': (0xffb8, None), - 'KP_9': (0xffb9, None), - 'KP_Add': (0xffab, None), - 'KP_Begin': (0xff9d, None), - 'KP_Decimal': (0xffae, None), - 'KP_Delete': (0xff9f, None), - 'KP_Divide': (0xffaf, None), - 'KP_Down': (0xff99, None), - 'KP_End': (0xff9c, None), - 'KP_Enter': (0xff8d, None), - 'KP_Equal': (0xffbd, None), - 'KP_F1': (0xff91, None), - 'KP_F2': (0xff92, None), - 'KP_F3': (0xff93, None), - 'KP_F4': (0xff94, None), - 'KP_Home': (0xff95, None), - 'KP_Insert': (0xff9e, None), - 'KP_Left': (0xff96, None), - 'KP_Multiply': (0xffaa, None), - 'KP_Next': (0xff9b, None), - 'KP_Page_Down': (0xff9b, None), - 'KP_Page_Up': (0xff9a, None), - 'KP_Prior': (0xff9a, None), - 'KP_Right': (0xff98, None), - 'KP_Separator': (0xffac, None), - 'KP_Space': (0xff80, None), - 'KP_Subtract': (0xffad, None), - 'KP_Tab': (0xff89, None), - 'KP_Up': (0xff97, None), - 'Kcedilla': (0x03d3, u'\u0136'), - 'L': (0x004c, u'\u004C'), - 'Lacute': (0x01c5, u'\u0139'), - 'Lbelowdot': (0x1001e36, u'\u1E36'), - 'Lcaron': (0x01a5, u'\u013D'), - 'Lcedilla': (0x03a6, u'\u013B'), - 'LiraSign': (0x10020a4, u'\u20A4'), - 'Lstroke': (0x01a3, u'\u0141'), - 'M': (0x004d, u'\u004D'), - 'Mabovedot': (0x1001e40, u'\u1E40'), - 'Macedonia_DSE': (0x06b5, u'\u0405'), - 'Macedonia_GJE': (0x06b2, u'\u0403'), - 'Macedonia_KJE': (0x06bc, u'\u040C'), - 'Macedonia_dse': (0x06a5, u'\u0455'), - 'Macedonia_gje': (0x06a2, u'\u0453'), - 'Macedonia_kje': (0x06ac, u'\u045C'), - 'MillSign': (0x10020a5, u'\u20A5'), - 'N': (0x004e, u'\u004E'), - 'Nacute': (0x01d1, u'\u0143'), - 'NairaSign': (0x10020a6, u'\u20A6'), - 'Ncaron': (0x01d2, u'\u0147'), - 'Ncedilla': (0x03d1, u'\u0145'), - 'NewSheqelSign': (0x10020aa, u'\u20AA'), - 'Ntilde': (0x00d1, u'\u00D1'), - 'O': (0x004f, u'\u004F'), - 'OE': (0x13bc, u'\u0152'), - 'Oacute': (0x00d3, u'\u00D3'), - 'Obarred': (0x100019f, u'\u019F'), - 'Obelowdot': (0x1001ecc, u'\u1ECC'), - 'Ocaron': (0x10001d1, u'\u01D2'), - 'Ocircumflex': (0x00d4, u'\u00D4'), - 'Ocircumflexacute': (0x1001ed0, u'\u1ED0'), - 'Ocircumflexbelowdot': (0x1001ed8, u'\u1ED8'), - 'Ocircumflexgrave': (0x1001ed2, u'\u1ED2'), - 'Ocircumflexhook': (0x1001ed4, u'\u1ED4'), - 'Ocircumflextilde': (0x1001ed6, u'\u1ED6'), - 'Odiaeresis': (0x00d6, u'\u00D6'), - 'Odoubleacute': (0x01d5, u'\u0150'), - 'Ograve': (0x00d2, u'\u00D2'), - 'Ohook': (0x1001ece, u'\u1ECE'), - 'Ohorn': (0x10001a0, u'\u01A0'), - 'Ohornacute': (0x1001eda, u'\u1EDA'), - 'Ohornbelowdot': (0x1001ee2, u'\u1EE2'), - 'Ohorngrave': (0x1001edc, u'\u1EDC'), - 'Ohornhook': (0x1001ede, u'\u1EDE'), - 'Ohorntilde': (0x1001ee0, u'\u1EE0'), - 'Omacron': (0x03d2, u'\u014C'), - 'Ooblique': (0x00d8, u'\u00D8'), - 'Oslash': (0x00d8, u'\u00D8'), - 'Otilde': (0x00d5, u'\u00D5'), - 'P': (0x0050, u'\u0050'), - 'Pabovedot': (0x1001e56, u'\u1E56'), - 'PesetaSign': (0x10020a7, u'\u20A7'), - 'Q': (0x0051, u'\u0051'), - 'R': (0x0052, u'\u0052'), - 'Racute': (0x01c0, u'\u0154'), - 'Rcaron': (0x01d8, u'\u0158'), - 'Rcedilla': (0x03a3, u'\u0156'), - 'RupeeSign': (0x10020a8, u'\u20A8'), - 'S': (0x0053, u'\u0053'), - 'SCHWA': (0x100018f, u'\u018F'), - 'Sabovedot': (0x1001e60, u'\u1E60'), - 'Sacute': (0x01a6, u'\u015A'), - 'Scaron': (0x01a9, u'\u0160'), - 'Scedilla': (0x01aa, u'\u015E'), - 'Scircumflex': (0x02de, u'\u015C'), - 'Serbian_DJE': (0x06b1, u'\u0402'), - 'Serbian_TSHE': (0x06bb, u'\u040B'), - 'Serbian_dje': (0x06a1, u'\u0452'), - 'Serbian_tshe': (0x06ab, u'\u045B'), - 'Sinh_a': (0x1000d85, u'\u0D85'), - 'Sinh_aa': (0x1000d86, u'\u0D86'), - 'Sinh_aa2': (0x1000dcf, u'\u0DCF'), - 'Sinh_ae': (0x1000d87, u'\u0D87'), - 'Sinh_ae2': (0x1000dd0, u'\u0DD0'), - 'Sinh_aee': (0x1000d88, u'\u0D88'), - 'Sinh_aee2': (0x1000dd1, u'\u0DD1'), - 'Sinh_ai': (0x1000d93, u'\u0D93'), - 'Sinh_ai2': (0x1000ddb, u'\u0DDB'), - 'Sinh_al': (0x1000dca, u'\u0DCA'), - 'Sinh_au': (0x1000d96, u'\u0D96'), - 'Sinh_au2': (0x1000dde, u'\u0DDE'), - 'Sinh_ba': (0x1000db6, u'\u0DB6'), - 'Sinh_bha': (0x1000db7, u'\u0DB7'), - 'Sinh_ca': (0x1000da0, u'\u0DA0'), - 'Sinh_cha': (0x1000da1, u'\u0DA1'), - 'Sinh_dda': (0x1000da9, u'\u0DA9'), - 'Sinh_ddha': (0x1000daa, u'\u0DAA'), - 'Sinh_dha': (0x1000daf, u'\u0DAF'), - 'Sinh_dhha': (0x1000db0, u'\u0DB0'), - 'Sinh_e': (0x1000d91, u'\u0D91'), - 'Sinh_e2': (0x1000dd9, u'\u0DD9'), - 'Sinh_ee': (0x1000d92, u'\u0D92'), - 'Sinh_ee2': (0x1000dda, u'\u0DDA'), - 'Sinh_fa': (0x1000dc6, u'\u0DC6'), - 'Sinh_ga': (0x1000d9c, u'\u0D9C'), - 'Sinh_gha': (0x1000d9d, u'\u0D9D'), - 'Sinh_h2': (0x1000d83, u'\u0D83'), - 'Sinh_ha': (0x1000dc4, u'\u0DC4'), - 'Sinh_i': (0x1000d89, u'\u0D89'), - 'Sinh_i2': (0x1000dd2, u'\u0DD2'), - 'Sinh_ii': (0x1000d8a, u'\u0D8A'), - 'Sinh_ii2': (0x1000dd3, u'\u0DD3'), - 'Sinh_ja': (0x1000da2, u'\u0DA2'), - 'Sinh_jha': (0x1000da3, u'\u0DA3'), - 'Sinh_jnya': (0x1000da5, u'\u0DA5'), - 'Sinh_ka': (0x1000d9a, u'\u0D9A'), - 'Sinh_kha': (0x1000d9b, u'\u0D9B'), - 'Sinh_kunddaliya': (0x1000df4, u'\u0DF4'), - 'Sinh_la': (0x1000dbd, u'\u0DBD'), - 'Sinh_lla': (0x1000dc5, u'\u0DC5'), - 'Sinh_lu': (0x1000d8f, u'\u0D8F'), - 'Sinh_lu2': (0x1000ddf, u'\u0DDF'), - 'Sinh_luu': (0x1000d90, u'\u0D90'), - 'Sinh_luu2': (0x1000df3, u'\u0DF3'), - 'Sinh_ma': (0x1000db8, u'\u0DB8'), - 'Sinh_mba': (0x1000db9, u'\u0DB9'), - 'Sinh_na': (0x1000db1, u'\u0DB1'), - 'Sinh_ndda': (0x1000dac, u'\u0DAC'), - 'Sinh_ndha': (0x1000db3, u'\u0DB3'), - 'Sinh_ng': (0x1000d82, u'\u0D82'), - 'Sinh_ng2': (0x1000d9e, u'\u0D9E'), - 'Sinh_nga': (0x1000d9f, u'\u0D9F'), - 'Sinh_nja': (0x1000da6, u'\u0DA6'), - 'Sinh_nna': (0x1000dab, u'\u0DAB'), - 'Sinh_nya': (0x1000da4, u'\u0DA4'), - 'Sinh_o': (0x1000d94, u'\u0D94'), - 'Sinh_o2': (0x1000ddc, u'\u0DDC'), - 'Sinh_oo': (0x1000d95, u'\u0D95'), - 'Sinh_oo2': (0x1000ddd, u'\u0DDD'), - 'Sinh_pa': (0x1000db4, u'\u0DB4'), - 'Sinh_pha': (0x1000db5, u'\u0DB5'), - 'Sinh_ra': (0x1000dbb, u'\u0DBB'), - 'Sinh_ri': (0x1000d8d, u'\u0D8D'), - 'Sinh_rii': (0x1000d8e, u'\u0D8E'), - 'Sinh_ru2': (0x1000dd8, u'\u0DD8'), - 'Sinh_ruu2': (0x1000df2, u'\u0DF2'), - 'Sinh_sa': (0x1000dc3, u'\u0DC3'), - 'Sinh_sha': (0x1000dc1, u'\u0DC1'), - 'Sinh_ssha': (0x1000dc2, u'\u0DC2'), - 'Sinh_tha': (0x1000dad, u'\u0DAD'), - 'Sinh_thha': (0x1000dae, u'\u0DAE'), - 'Sinh_tta': (0x1000da7, u'\u0DA7'), - 'Sinh_ttha': (0x1000da8, u'\u0DA8'), - 'Sinh_u': (0x1000d8b, u'\u0D8B'), - 'Sinh_u2': (0x1000dd4, u'\u0DD4'), - 'Sinh_uu': (0x1000d8c, u'\u0D8C'), - 'Sinh_uu2': (0x1000dd6, u'\u0DD6'), - 'Sinh_va': (0x1000dc0, u'\u0DC0'), - 'Sinh_ya': (0x1000dba, u'\u0DBA'), - 'T': (0x0054, u'\u0054'), - 'THORN': (0x00de, u'\u00DE'), - 'Tabovedot': (0x1001e6a, u'\u1E6A'), - 'Tcaron': (0x01ab, u'\u0164'), - 'Tcedilla': (0x01de, u'\u0162'), - 'Thai_baht': (0x0ddf, u'\u0E3F'), - 'Thai_bobaimai': (0x0dba, u'\u0E1A'), - 'Thai_chochan': (0x0da8, u'\u0E08'), - 'Thai_chochang': (0x0daa, u'\u0E0A'), - 'Thai_choching': (0x0da9, u'\u0E09'), - 'Thai_chochoe': (0x0dac, u'\u0E0C'), - 'Thai_dochada': (0x0dae, u'\u0E0E'), - 'Thai_dodek': (0x0db4, u'\u0E14'), - 'Thai_fofa': (0x0dbd, u'\u0E1D'), - 'Thai_fofan': (0x0dbf, u'\u0E1F'), - 'Thai_hohip': (0x0dcb, u'\u0E2B'), - 'Thai_honokhuk': (0x0dce, u'\u0E2E'), - 'Thai_khokhai': (0x0da2, u'\u0E02'), - 'Thai_khokhon': (0x0da5, u'\u0E05'), - 'Thai_khokhuat': (0x0da3, u'\u0E03'), - 'Thai_khokhwai': (0x0da4, u'\u0E04'), - 'Thai_khorakhang': (0x0da6, u'\u0E06'), - 'Thai_kokai': (0x0da1, u'\u0E01'), - 'Thai_lakkhangyao': (0x0de5, u'\u0E45'), - 'Thai_lekchet': (0x0df7, u'\u0E57'), - 'Thai_lekha': (0x0df5, u'\u0E55'), - 'Thai_lekhok': (0x0df6, u'\u0E56'), - 'Thai_lekkao': (0x0df9, u'\u0E59'), - 'Thai_leknung': (0x0df1, u'\u0E51'), - 'Thai_lekpaet': (0x0df8, u'\u0E58'), - 'Thai_leksam': (0x0df3, u'\u0E53'), - 'Thai_leksi': (0x0df4, u'\u0E54'), - 'Thai_leksong': (0x0df2, u'\u0E52'), - 'Thai_leksun': (0x0df0, u'\u0E50'), - 'Thai_lochula': (0x0dcc, u'\u0E2C'), - 'Thai_loling': (0x0dc5, u'\u0E25'), - 'Thai_lu': (0x0dc6, u'\u0E26'), - 'Thai_maichattawa': (0x0deb, u'\u0E4B'), - 'Thai_maiek': (0x0de8, u'\u0E48'), - 'Thai_maihanakat': (0x0dd1, u'\u0E31'), - 'Thai_maitaikhu': (0x0de7, u'\u0E47'), - 'Thai_maitho': (0x0de9, u'\u0E49'), - 'Thai_maitri': (0x0dea, u'\u0E4A'), - 'Thai_maiyamok': (0x0de6, u'\u0E46'), - 'Thai_moma': (0x0dc1, u'\u0E21'), - 'Thai_ngongu': (0x0da7, u'\u0E07'), - 'Thai_nikhahit': (0x0ded, u'\u0E4D'), - 'Thai_nonen': (0x0db3, u'\u0E13'), - 'Thai_nonu': (0x0db9, u'\u0E19'), - 'Thai_oang': (0x0dcd, u'\u0E2D'), - 'Thai_paiyannoi': (0x0dcf, u'\u0E2F'), - 'Thai_phinthu': (0x0dda, u'\u0E3A'), - 'Thai_phophan': (0x0dbe, u'\u0E1E'), - 'Thai_phophung': (0x0dbc, u'\u0E1C'), - 'Thai_phosamphao': (0x0dc0, u'\u0E20'), - 'Thai_popla': (0x0dbb, u'\u0E1B'), - 'Thai_rorua': (0x0dc3, u'\u0E23'), - 'Thai_ru': (0x0dc4, u'\u0E24'), - 'Thai_saraa': (0x0dd0, u'\u0E30'), - 'Thai_saraaa': (0x0dd2, u'\u0E32'), - 'Thai_saraae': (0x0de1, u'\u0E41'), - 'Thai_saraaimaimalai': (0x0de4, u'\u0E44'), - 'Thai_saraaimaimuan': (0x0de3, u'\u0E43'), - 'Thai_saraam': (0x0dd3, u'\u0E33'), - 'Thai_sarae': (0x0de0, u'\u0E40'), - 'Thai_sarai': (0x0dd4, u'\u0E34'), - 'Thai_saraii': (0x0dd5, u'\u0E35'), - 'Thai_sarao': (0x0de2, u'\u0E42'), - 'Thai_sarau': (0x0dd8, u'\u0E38'), - 'Thai_saraue': (0x0dd6, u'\u0E36'), - 'Thai_sarauee': (0x0dd7, u'\u0E37'), - 'Thai_sarauu': (0x0dd9, u'\u0E39'), - 'Thai_sorusi': (0x0dc9, u'\u0E29'), - 'Thai_sosala': (0x0dc8, u'\u0E28'), - 'Thai_soso': (0x0dab, u'\u0E0B'), - 'Thai_sosua': (0x0dca, u'\u0E2A'), - 'Thai_thanthakhat': (0x0dec, u'\u0E4C'), - 'Thai_thonangmontho': (0x0db1, u'\u0E11'), - 'Thai_thophuthao': (0x0db2, u'\u0E12'), - 'Thai_thothahan': (0x0db7, u'\u0E17'), - 'Thai_thothan': (0x0db0, u'\u0E10'), - 'Thai_thothong': (0x0db8, u'\u0E18'), - 'Thai_thothung': (0x0db6, u'\u0E16'), - 'Thai_topatak': (0x0daf, u'\u0E0F'), - 'Thai_totao': (0x0db5, u'\u0E15'), - 'Thai_wowaen': (0x0dc7, u'\u0E27'), - 'Thai_yoyak': (0x0dc2, u'\u0E22'), - 'Thai_yoying': (0x0dad, u'\u0E0D'), - 'Tslash': (0x03ac, u'\u0166'), - 'U': (0x0055, u'\u0055'), - 'Uacute': (0x00da, u'\u00DA'), - 'Ubelowdot': (0x1001ee4, u'\u1EE4'), - 'Ubreve': (0x02dd, u'\u016C'), - 'Ucircumflex': (0x00db, u'\u00DB'), - 'Udiaeresis': (0x00dc, u'\u00DC'), - 'Udoubleacute': (0x01db, u'\u0170'), - 'Ugrave': (0x00d9, u'\u00D9'), - 'Uhook': (0x1001ee6, u'\u1EE6'), - 'Uhorn': (0x10001af, u'\u01AF'), - 'Uhornacute': (0x1001ee8, u'\u1EE8'), - 'Uhornbelowdot': (0x1001ef0, u'\u1EF0'), - 'Uhorngrave': (0x1001eea, u'\u1EEA'), - 'Uhornhook': (0x1001eec, u'\u1EEC'), - 'Uhorntilde': (0x1001eee, u'\u1EEE'), - 'Ukrainian_GHE_WITH_UPTURN': (0x06bd, u'\u0490'), - 'Ukrainian_I': (0x06b6, u'\u0406'), - 'Ukrainian_IE': (0x06b4, u'\u0404'), - 'Ukrainian_YI': (0x06b7, u'\u0407'), - 'Ukrainian_ghe_with_upturn': (0x06ad, u'\u0491'), - 'Ukrainian_i': (0x06a6, u'\u0456'), - 'Ukrainian_ie': (0x06a4, u'\u0454'), - 'Ukrainian_yi': (0x06a7, u'\u0457'), - 'Umacron': (0x03de, u'\u016A'), - 'Uogonek': (0x03d9, u'\u0172'), - 'Uring': (0x01d9, u'\u016E'), - 'Utilde': (0x03dd, u'\u0168'), - 'V': (0x0056, u'\u0056'), - 'W': (0x0057, u'\u0057'), - 'Wacute': (0x1001e82, u'\u1E82'), - 'Wcircumflex': (0x1000174, u'\u0174'), - 'Wdiaeresis': (0x1001e84, u'\u1E84'), - 'Wgrave': (0x1001e80, u'\u1E80'), - 'WonSign': (0x10020a9, u'\u20A9'), - 'X': (0x0058, u'\u0058'), - 'Xabovedot': (0x1001e8a, u'\u1E8A'), - 'Y': (0x0059, u'\u0059'), - 'Yacute': (0x00dd, u'\u00DD'), - 'Ybelowdot': (0x1001ef4, u'\u1EF4'), - 'Ycircumflex': (0x1000176, u'\u0176'), - 'Ydiaeresis': (0x13be, u'\u0178'), - 'Ygrave': (0x1001ef2, u'\u1EF2'), - 'Yhook': (0x1001ef6, u'\u1EF6'), - 'Ytilde': (0x1001ef8, u'\u1EF8'), - 'Z': (0x005a, u'\u005A'), - 'Zabovedot': (0x01af, u'\u017B'), - 'Zacute': (0x01ac, u'\u0179'), - 'Zcaron': (0x01ae, u'\u017D'), - 'Zstroke': (0x10001b5, u'\u01B5'), - 'a': (0x0061, u'\u0061'), - 'aacute': (0x00e1, u'\u00E1'), - 'abelowdot': (0x1001ea1, u'\u1EA1'), - 'abovedot': (0x01ff, u'\u02D9'), - 'abreve': (0x01e3, u'\u0103'), - 'abreveacute': (0x1001eaf, u'\u1EAF'), - 'abrevebelowdot': (0x1001eb7, u'\u1EB7'), - 'abrevegrave': (0x1001eb1, u'\u1EB1'), - 'abrevehook': (0x1001eb3, u'\u1EB3'), - 'abrevetilde': (0x1001eb5, u'\u1EB5'), - 'acircumflex': (0x00e2, u'\u00E2'), - 'acircumflexacute': (0x1001ea5, u'\u1EA5'), - 'acircumflexbelowdot': (0x1001ead, u'\u1EAD'), - 'acircumflexgrave': (0x1001ea7, u'\u1EA7'), - 'acircumflexhook': (0x1001ea9, u'\u1EA9'), - 'acircumflextilde': (0x1001eab, u'\u1EAB'), - 'acute': (0x00b4, u'\u00B4'), - 'adiaeresis': (0x00e4, u'\u00E4'), - 'ae': (0x00e6, u'\u00E6'), - 'agrave': (0x00e0, u'\u00E0'), - 'ahook': (0x1001ea3, u'\u1EA3'), - 'amacron': (0x03e0, u'\u0101'), - 'ampersand': (0x0026, u'\u0026'), - 'aogonek': (0x01b1, u'\u0105'), - 'apostrophe': (0x0027, u'\u0027'), - 'approxeq': (0x1002248, u'\u2245'), - 'approximate': (0x08c8, u'\u223C'), - 'aring': (0x00e5, u'\u00E5'), - 'asciicircum': (0x005e, u'\u005E'), - 'asciitilde': (0x007e, u'\u007E'), - 'asterisk': (0x002a, u'\u002A'), - 'at': (0x0040, u'\u0040'), - 'atilde': (0x00e3, u'\u00E3'), - 'b': (0x0062, u'\u0062'), - 'babovedot': (0x1001e03, u'\u1E03'), - 'backslash': (0x005c, u'\u005C'), - 'ballotcross': (0x0af4, u'\u2717'), - 'bar': (0x007c, u'\u007C'), - 'because': (0x1002235, u'\u2235'), - 'botintegral': (0x08a5, u'\u2321'), - 'botleftparens': (0x08ac, u'\u239D'), - 'botleftsqbracket': (0x08a8, u'\u23A3'), - 'botrightparens': (0x08ae, u'\u23A0'), - 'botrightsqbracket': (0x08aa, u'\u23A6'), - 'bott': (0x09f6, u'\u2534'), - 'braceleft': (0x007b, u'\u007B'), - 'braceright': (0x007d, u'\u007D'), - 'bracketleft': (0x005b, u'\u005B'), - 'bracketright': (0x005d, u'\u005D'), - 'braille_blank': (0x1002800, u'\u2800'), - 'braille_dots_1': (0x1002801, u'\u2801'), - 'braille_dots_12': (0x1002803, u'\u2803'), - 'braille_dots_123': (0x1002807, u'\u2807'), - 'braille_dots_1234': (0x100280f, u'\u280f'), - 'braille_dots_12345': (0x100281f, u'\u281f'), - 'braille_dots_123456': (0x100283f, u'\u283f'), - 'braille_dots_1234567': (0x100287f, u'\u287f'), - 'braille_dots_12345678': (0x10028ff, u'\u28ff'), - 'braille_dots_1234568': (0x10028bf, u'\u28bf'), - 'braille_dots_123457': (0x100285f, u'\u285f'), - 'braille_dots_1234578': (0x10028df, u'\u28df'), - 'braille_dots_123458': (0x100289f, u'\u289f'), - 'braille_dots_12346': (0x100282f, u'\u282f'), - 'braille_dots_123467': (0x100286f, u'\u286f'), - 'braille_dots_1234678': (0x10028ef, u'\u28ef'), - 'braille_dots_123468': (0x10028af, u'\u28af'), - 'braille_dots_12347': (0x100284f, u'\u284f'), - 'braille_dots_123478': (0x10028cf, u'\u28cf'), - 'braille_dots_12348': (0x100288f, u'\u288f'), - 'braille_dots_1235': (0x1002817, u'\u2817'), - 'braille_dots_12356': (0x1002837, u'\u2837'), - 'braille_dots_123567': (0x1002877, u'\u2877'), - 'braille_dots_1235678': (0x10028f7, u'\u28f7'), - 'braille_dots_123568': (0x10028b7, u'\u28b7'), - 'braille_dots_12357': (0x1002857, u'\u2857'), - 'braille_dots_123578': (0x10028d7, u'\u28d7'), - 'braille_dots_12358': (0x1002897, u'\u2897'), - 'braille_dots_1236': (0x1002827, u'\u2827'), - 'braille_dots_12367': (0x1002867, u'\u2867'), - 'braille_dots_123678': (0x10028e7, u'\u28e7'), - 'braille_dots_12368': (0x10028a7, u'\u28a7'), - 'braille_dots_1237': (0x1002847, u'\u2847'), - 'braille_dots_12378': (0x10028c7, u'\u28c7'), - 'braille_dots_1238': (0x1002887, u'\u2887'), - 'braille_dots_124': (0x100280b, u'\u280b'), - 'braille_dots_1245': (0x100281b, u'\u281b'), - 'braille_dots_12456': (0x100283b, u'\u283b'), - 'braille_dots_124567': (0x100287b, u'\u287b'), - 'braille_dots_1245678': (0x10028fb, u'\u28fb'), - 'braille_dots_124568': (0x10028bb, u'\u28bb'), - 'braille_dots_12457': (0x100285b, u'\u285b'), - 'braille_dots_124578': (0x10028db, u'\u28db'), - 'braille_dots_12458': (0x100289b, u'\u289b'), - 'braille_dots_1246': (0x100282b, u'\u282b'), - 'braille_dots_12467': (0x100286b, u'\u286b'), - 'braille_dots_124678': (0x10028eb, u'\u28eb'), - 'braille_dots_12468': (0x10028ab, u'\u28ab'), - 'braille_dots_1247': (0x100284b, u'\u284b'), - 'braille_dots_12478': (0x10028cb, u'\u28cb'), - 'braille_dots_1248': (0x100288b, u'\u288b'), - 'braille_dots_125': (0x1002813, u'\u2813'), - 'braille_dots_1256': (0x1002833, u'\u2833'), - 'braille_dots_12567': (0x1002873, u'\u2873'), - 'braille_dots_125678': (0x10028f3, u'\u28f3'), - 'braille_dots_12568': (0x10028b3, u'\u28b3'), - 'braille_dots_1257': (0x1002853, u'\u2853'), - 'braille_dots_12578': (0x10028d3, u'\u28d3'), - 'braille_dots_1258': (0x1002893, u'\u2893'), - 'braille_dots_126': (0x1002823, u'\u2823'), - 'braille_dots_1267': (0x1002863, u'\u2863'), - 'braille_dots_12678': (0x10028e3, u'\u28e3'), - 'braille_dots_1268': (0x10028a3, u'\u28a3'), - 'braille_dots_127': (0x1002843, u'\u2843'), - 'braille_dots_1278': (0x10028c3, u'\u28c3'), - 'braille_dots_128': (0x1002883, u'\u2883'), - 'braille_dots_13': (0x1002805, u'\u2805'), - 'braille_dots_134': (0x100280d, u'\u280d'), - 'braille_dots_1345': (0x100281d, u'\u281d'), - 'braille_dots_13456': (0x100283d, u'\u283d'), - 'braille_dots_134567': (0x100287d, u'\u287d'), - 'braille_dots_1345678': (0x10028fd, u'\u28fd'), - 'braille_dots_134568': (0x10028bd, u'\u28bd'), - 'braille_dots_13457': (0x100285d, u'\u285d'), - 'braille_dots_134578': (0x10028dd, u'\u28dd'), - 'braille_dots_13458': (0x100289d, u'\u289d'), - 'braille_dots_1346': (0x100282d, u'\u282d'), - 'braille_dots_13467': (0x100286d, u'\u286d'), - 'braille_dots_134678': (0x10028ed, u'\u28ed'), - 'braille_dots_13468': (0x10028ad, u'\u28ad'), - 'braille_dots_1347': (0x100284d, u'\u284d'), - 'braille_dots_13478': (0x10028cd, u'\u28cd'), - 'braille_dots_1348': (0x100288d, u'\u288d'), - 'braille_dots_135': (0x1002815, u'\u2815'), - 'braille_dots_1356': (0x1002835, u'\u2835'), - 'braille_dots_13567': (0x1002875, u'\u2875'), - 'braille_dots_135678': (0x10028f5, u'\u28f5'), - 'braille_dots_13568': (0x10028b5, u'\u28b5'), - 'braille_dots_1357': (0x1002855, u'\u2855'), - 'braille_dots_13578': (0x10028d5, u'\u28d5'), - 'braille_dots_1358': (0x1002895, u'\u2895'), - 'braille_dots_136': (0x1002825, u'\u2825'), - 'braille_dots_1367': (0x1002865, u'\u2865'), - 'braille_dots_13678': (0x10028e5, u'\u28e5'), - 'braille_dots_1368': (0x10028a5, u'\u28a5'), - 'braille_dots_137': (0x1002845, u'\u2845'), - 'braille_dots_1378': (0x10028c5, u'\u28c5'), - 'braille_dots_138': (0x1002885, u'\u2885'), - 'braille_dots_14': (0x1002809, u'\u2809'), - 'braille_dots_145': (0x1002819, u'\u2819'), - 'braille_dots_1456': (0x1002839, u'\u2839'), - 'braille_dots_14567': (0x1002879, u'\u2879'), - 'braille_dots_145678': (0x10028f9, u'\u28f9'), - 'braille_dots_14568': (0x10028b9, u'\u28b9'), - 'braille_dots_1457': (0x1002859, u'\u2859'), - 'braille_dots_14578': (0x10028d9, u'\u28d9'), - 'braille_dots_1458': (0x1002899, u'\u2899'), - 'braille_dots_146': (0x1002829, u'\u2829'), - 'braille_dots_1467': (0x1002869, u'\u2869'), - 'braille_dots_14678': (0x10028e9, u'\u28e9'), - 'braille_dots_1468': (0x10028a9, u'\u28a9'), - 'braille_dots_147': (0x1002849, u'\u2849'), - 'braille_dots_1478': (0x10028c9, u'\u28c9'), - 'braille_dots_148': (0x1002889, u'\u2889'), - 'braille_dots_15': (0x1002811, u'\u2811'), - 'braille_dots_156': (0x1002831, u'\u2831'), - 'braille_dots_1567': (0x1002871, u'\u2871'), - 'braille_dots_15678': (0x10028f1, u'\u28f1'), - 'braille_dots_1568': (0x10028b1, u'\u28b1'), - 'braille_dots_157': (0x1002851, u'\u2851'), - 'braille_dots_1578': (0x10028d1, u'\u28d1'), - 'braille_dots_158': (0x1002891, u'\u2891'), - 'braille_dots_16': (0x1002821, u'\u2821'), - 'braille_dots_167': (0x1002861, u'\u2861'), - 'braille_dots_1678': (0x10028e1, u'\u28e1'), - 'braille_dots_168': (0x10028a1, u'\u28a1'), - 'braille_dots_17': (0x1002841, u'\u2841'), - 'braille_dots_178': (0x10028c1, u'\u28c1'), - 'braille_dots_18': (0x1002881, u'\u2881'), - 'braille_dots_2': (0x1002802, u'\u2802'), - 'braille_dots_23': (0x1002806, u'\u2806'), - 'braille_dots_234': (0x100280e, u'\u280e'), - 'braille_dots_2345': (0x100281e, u'\u281e'), - 'braille_dots_23456': (0x100283e, u'\u283e'), - 'braille_dots_234567': (0x100287e, u'\u287e'), - 'braille_dots_2345678': (0x10028fe, u'\u28fe'), - 'braille_dots_234568': (0x10028be, u'\u28be'), - 'braille_dots_23457': (0x100285e, u'\u285e'), - 'braille_dots_234578': (0x10028de, u'\u28de'), - 'braille_dots_23458': (0x100289e, u'\u289e'), - 'braille_dots_2346': (0x100282e, u'\u282e'), - 'braille_dots_23467': (0x100286e, u'\u286e'), - 'braille_dots_234678': (0x10028ee, u'\u28ee'), - 'braille_dots_23468': (0x10028ae, u'\u28ae'), - 'braille_dots_2347': (0x100284e, u'\u284e'), - 'braille_dots_23478': (0x10028ce, u'\u28ce'), - 'braille_dots_2348': (0x100288e, u'\u288e'), - 'braille_dots_235': (0x1002816, u'\u2816'), - 'braille_dots_2356': (0x1002836, u'\u2836'), - 'braille_dots_23567': (0x1002876, u'\u2876'), - 'braille_dots_235678': (0x10028f6, u'\u28f6'), - 'braille_dots_23568': (0x10028b6, u'\u28b6'), - 'braille_dots_2357': (0x1002856, u'\u2856'), - 'braille_dots_23578': (0x10028d6, u'\u28d6'), - 'braille_dots_2358': (0x1002896, u'\u2896'), - 'braille_dots_236': (0x1002826, u'\u2826'), - 'braille_dots_2367': (0x1002866, u'\u2866'), - 'braille_dots_23678': (0x10028e6, u'\u28e6'), - 'braille_dots_2368': (0x10028a6, u'\u28a6'), - 'braille_dots_237': (0x1002846, u'\u2846'), - 'braille_dots_2378': (0x10028c6, u'\u28c6'), - 'braille_dots_238': (0x1002886, u'\u2886'), - 'braille_dots_24': (0x100280a, u'\u280a'), - 'braille_dots_245': (0x100281a, u'\u281a'), - 'braille_dots_2456': (0x100283a, u'\u283a'), - 'braille_dots_24567': (0x100287a, u'\u287a'), - 'braille_dots_245678': (0x10028fa, u'\u28fa'), - 'braille_dots_24568': (0x10028ba, u'\u28ba'), - 'braille_dots_2457': (0x100285a, u'\u285a'), - 'braille_dots_24578': (0x10028da, u'\u28da'), - 'braille_dots_2458': (0x100289a, u'\u289a'), - 'braille_dots_246': (0x100282a, u'\u282a'), - 'braille_dots_2467': (0x100286a, u'\u286a'), - 'braille_dots_24678': (0x10028ea, u'\u28ea'), - 'braille_dots_2468': (0x10028aa, u'\u28aa'), - 'braille_dots_247': (0x100284a, u'\u284a'), - 'braille_dots_2478': (0x10028ca, u'\u28ca'), - 'braille_dots_248': (0x100288a, u'\u288a'), - 'braille_dots_25': (0x1002812, u'\u2812'), - 'braille_dots_256': (0x1002832, u'\u2832'), - 'braille_dots_2567': (0x1002872, u'\u2872'), - 'braille_dots_25678': (0x10028f2, u'\u28f2'), - 'braille_dots_2568': (0x10028b2, u'\u28b2'), - 'braille_dots_257': (0x1002852, u'\u2852'), - 'braille_dots_2578': (0x10028d2, u'\u28d2'), - 'braille_dots_258': (0x1002892, u'\u2892'), - 'braille_dots_26': (0x1002822, u'\u2822'), - 'braille_dots_267': (0x1002862, u'\u2862'), - 'braille_dots_2678': (0x10028e2, u'\u28e2'), - 'braille_dots_268': (0x10028a2, u'\u28a2'), - 'braille_dots_27': (0x1002842, u'\u2842'), - 'braille_dots_278': (0x10028c2, u'\u28c2'), - 'braille_dots_28': (0x1002882, u'\u2882'), - 'braille_dots_3': (0x1002804, u'\u2804'), - 'braille_dots_34': (0x100280c, u'\u280c'), - 'braille_dots_345': (0x100281c, u'\u281c'), - 'braille_dots_3456': (0x100283c, u'\u283c'), - 'braille_dots_34567': (0x100287c, u'\u287c'), - 'braille_dots_345678': (0x10028fc, u'\u28fc'), - 'braille_dots_34568': (0x10028bc, u'\u28bc'), - 'braille_dots_3457': (0x100285c, u'\u285c'), - 'braille_dots_34578': (0x10028dc, u'\u28dc'), - 'braille_dots_3458': (0x100289c, u'\u289c'), - 'braille_dots_346': (0x100282c, u'\u282c'), - 'braille_dots_3467': (0x100286c, u'\u286c'), - 'braille_dots_34678': (0x10028ec, u'\u28ec'), - 'braille_dots_3468': (0x10028ac, u'\u28ac'), - 'braille_dots_347': (0x100284c, u'\u284c'), - 'braille_dots_3478': (0x10028cc, u'\u28cc'), - 'braille_dots_348': (0x100288c, u'\u288c'), - 'braille_dots_35': (0x1002814, u'\u2814'), - 'braille_dots_356': (0x1002834, u'\u2834'), - 'braille_dots_3567': (0x1002874, u'\u2874'), - 'braille_dots_35678': (0x10028f4, u'\u28f4'), - 'braille_dots_3568': (0x10028b4, u'\u28b4'), - 'braille_dots_357': (0x1002854, u'\u2854'), - 'braille_dots_3578': (0x10028d4, u'\u28d4'), - 'braille_dots_358': (0x1002894, u'\u2894'), - 'braille_dots_36': (0x1002824, u'\u2824'), - 'braille_dots_367': (0x1002864, u'\u2864'), - 'braille_dots_3678': (0x10028e4, u'\u28e4'), - 'braille_dots_368': (0x10028a4, u'\u28a4'), - 'braille_dots_37': (0x1002844, u'\u2844'), - 'braille_dots_378': (0x10028c4, u'\u28c4'), - 'braille_dots_38': (0x1002884, u'\u2884'), - 'braille_dots_4': (0x1002808, u'\u2808'), - 'braille_dots_45': (0x1002818, u'\u2818'), - 'braille_dots_456': (0x1002838, u'\u2838'), - 'braille_dots_4567': (0x1002878, u'\u2878'), - 'braille_dots_45678': (0x10028f8, u'\u28f8'), - 'braille_dots_4568': (0x10028b8, u'\u28b8'), - 'braille_dots_457': (0x1002858, u'\u2858'), - 'braille_dots_4578': (0x10028d8, u'\u28d8'), - 'braille_dots_458': (0x1002898, u'\u2898'), - 'braille_dots_46': (0x1002828, u'\u2828'), - 'braille_dots_467': (0x1002868, u'\u2868'), - 'braille_dots_4678': (0x10028e8, u'\u28e8'), - 'braille_dots_468': (0x10028a8, u'\u28a8'), - 'braille_dots_47': (0x1002848, u'\u2848'), - 'braille_dots_478': (0x10028c8, u'\u28c8'), - 'braille_dots_48': (0x1002888, u'\u2888'), - 'braille_dots_5': (0x1002810, u'\u2810'), - 'braille_dots_56': (0x1002830, u'\u2830'), - 'braille_dots_567': (0x1002870, u'\u2870'), - 'braille_dots_5678': (0x10028f0, u'\u28f0'), - 'braille_dots_568': (0x10028b0, u'\u28b0'), - 'braille_dots_57': (0x1002850, u'\u2850'), - 'braille_dots_578': (0x10028d0, u'\u28d0'), - 'braille_dots_58': (0x1002890, u'\u2890'), - 'braille_dots_6': (0x1002820, u'\u2820'), - 'braille_dots_67': (0x1002860, u'\u2860'), - 'braille_dots_678': (0x10028e0, u'\u28e0'), - 'braille_dots_68': (0x10028a0, u'\u28a0'), - 'braille_dots_7': (0x1002840, u'\u2840'), - 'braille_dots_78': (0x10028c0, u'\u28c0'), - 'braille_dots_8': (0x1002880, u'\u2880'), - 'breve': (0x01a2, u'\u02D8'), - 'brokenbar': (0x00a6, u'\u00A6'), - 'c': (0x0063, u'\u0063'), - 'cabovedot': (0x02e5, u'\u010B'), - 'cacute': (0x01e6, u'\u0107'), - 'careof': (0x0ab8, u'\u2105'), - 'caret': (0x0afc, u'\u2038'), - 'caron': (0x01b7, u'\u02C7'), - 'ccaron': (0x01e8, u'\u010D'), - 'ccedilla': (0x00e7, u'\u00E7'), - 'ccircumflex': (0x02e6, u'\u0109'), - 'cedilla': (0x00b8, u'\u00B8'), - 'cent': (0x00a2, u'\u00A2'), - 'checkerboard': (0x09e1, u'\u2592'), - 'checkmark': (0x0af3, u'\u2713'), - 'circle': (0x0bcf, u'\u25CB'), - 'club': (0x0aec, u'\u2663'), - 'colon': (0x003a, u'\u003A'), - 'comma': (0x002c, u'\u002C'), - 'containsas': (0x100220B, u'\u220B'), - 'copyright': (0x00a9, u'\u00A9'), - 'cr': (0x09e4, u'\u240D'), - 'crossinglines': (0x09ee, u'\u253C'), - 'cuberoot': (0x100221B, u'\u221B'), - 'currency': (0x00a4, u'\u00A4'), - 'd': (0x0064, u'\u0064'), - 'dabovedot': (0x1001e0b, u'\u1E0B'), - 'dagger': (0x0af1, u'\u2020'), - 'dcaron': (0x01ef, u'\u010F'), - 'dead_A': (0xfe81, None), - 'dead_E': (0xfe83, None), - 'dead_I': (0xfe85, None), - 'dead_O': (0xfe87, None), - 'dead_U': (0xfe89, None), - 'dead_a': (0xfe80, None), - 'dead_abovecomma': (0xfe64, u'\u0315'), - 'dead_abovedot': (0xfe56, u'\u0307'), - 'dead_abovereversedcomma': (0xfe65, u'\u0312'), - 'dead_abovering': (0xfe58, u'\u030A'), - 'dead_aboveverticalline': (0xfe91, u'\u030D'), - 'dead_acute': (0xfe51, u'\u0301'), - 'dead_belowbreve': (0xfe6b, u'\u032E'), - 'dead_belowcircumflex': (0xfe69, u'\u032D'), - 'dead_belowcomma': (0xfe6e, u'\u0326'), - 'dead_belowdiaeresis': (0xfe6c, u'\u0324'), - 'dead_belowdot': (0xfe60, u'\u0323'), - 'dead_belowmacron': (0xfe68, u'\u0331'), - 'dead_belowring': (0xfe67, u'\u0325'), - 'dead_belowtilde': (0xfe6a, u'\u0330'), - 'dead_belowverticalline': (0xfe92, u'\u0329'), - 'dead_breve': (0xfe55, u'\u0306'), - 'dead_capital_schwa': (0xfe8b, None), - 'dead_caron': (0xfe5a, u'\u030C'), - 'dead_cedilla': (0xfe5b, u'\u0327'), - 'dead_circumflex': (0xfe52, u'\u0302'), - 'dead_currency': (0xfe6f, None), - 'dead_diaeresis': (0xfe57, u'\u0308'), - 'dead_doubleacute': (0xfe59, u'\u030B'), - 'dead_doublegrave': (0xfe66, u'\u030F'), - 'dead_e': (0xfe82, None), - 'dead_grave': (0xfe50, u'\u0300'), - 'dead_greek': (0xfe8c, None), - 'dead_hook': (0xfe61, u'\u0309'), - 'dead_horn': (0xfe62, u'\u031B'), - 'dead_i': (0xfe84, None), - 'dead_invertedbreve': (0xfe6d, u'\u032F'), - 'dead_iota': (0xfe5d, u'\u0345'), - 'dead_longsolidusoverlay': (0xfe93, u'\u0338'), - 'dead_lowline': (0xfe90, u'\u0332'), - 'dead_macron': (0xfe54, u'\u0304'), - 'dead_o': (0xfe86, None), - 'dead_ogonek': (0xfe5c, u'\u0328'), - 'dead_semivoiced_sound': (0xfe5f, None), - 'dead_small_schwa': (0xfe8a, None), - 'dead_stroke': (0xfe63, u'\u0335'), - 'dead_tilde': (0xfe53, u'\u0303'), - 'dead_u': (0xfe88, None), - 'dead_voiced_sound': (0xfe5e, None), - 'degree': (0x00b0, u'\u00B0'), - 'diaeresis': (0x00a8, u'\u00A8'), - 'diamond': (0x0aed, u'\u2666'), - 'digitspace': (0x0aa5, u'\u2007'), - 'dintegral': (0x100222C, u'\u222C'), - 'division': (0x00f7, u'\u00F7'), - 'dollar': (0x0024, u'\u0024'), - 'doubbaselinedot': (0x0aaf, u'\u2025'), - 'doubleacute': (0x01bd, u'\u02DD'), - 'doubledagger': (0x0af2, u'\u2021'), - 'doublelowquotemark': (0x0afe, u'\u201E'), - 'downarrow': (0x08fe, u'\u2193'), - 'downstile': (0x0bc4, u'\u230A'), - 'downtack': (0x0bc2, u'\u22A4'), - 'dstroke': (0x01f0, u'\u0111'), - 'e': (0x0065, u'\u0065'), - 'eabovedot': (0x03ec, u'\u0117'), - 'eacute': (0x00e9, u'\u00E9'), - 'ebelowdot': (0x1001eb9, u'\u1EB9'), - 'ecaron': (0x01ec, u'\u011B'), - 'ecircumflex': (0x00ea, u'\u00EA'), - 'ecircumflexacute': (0x1001ebf, u'\u1EBF'), - 'ecircumflexbelowdot': (0x1001ec7, u'\u1EC7'), - 'ecircumflexgrave': (0x1001ec1, u'\u1EC1'), - 'ecircumflexhook': (0x1001ec3, u'\u1EC3'), - 'ecircumflextilde': (0x1001ec5, u'\u1EC5'), - 'ediaeresis': (0x00eb, u'\u00EB'), - 'egrave': (0x00e8, u'\u00E8'), - 'ehook': (0x1001ebb, u'\u1EBB'), - 'eightsubscript': (0x1002088, u'\u2088'), - 'eightsuperior': (0x1002078, u'\u2078'), - 'elementof': (0x1002208, u'\u2208'), - 'ellipsis': (0x0aae, u'\u2026'), - 'em3space': (0x0aa3, u'\u2004'), - 'em4space': (0x0aa4, u'\u2005'), - 'emacron': (0x03ba, u'\u0113'), - 'emdash': (0x0aa9, u'\u2014'), - 'emptyset': (0x1002205, u'\u2205'), - 'emspace': (0x0aa1, u'\u2003'), - 'endash': (0x0aaa, u'\u2013'), - 'eng': (0x03bf, u'\u014B'), - 'enspace': (0x0aa2, u'\u2002'), - 'eogonek': (0x01ea, u'\u0119'), - 'equal': (0x003d, u'\u003D'), - 'eth': (0x00f0, u'\u00F0'), - 'etilde': (0x1001ebd, u'\u1EBD'), - 'exclam': (0x0021, u'\u0021'), - 'exclamdown': (0x00a1, u'\u00A1'), - 'ezh': (0x1000292, u'\u0292'), - 'f': (0x0066, u'\u0066'), - 'fabovedot': (0x1001e1f, u'\u1E1F'), - 'femalesymbol': (0x0af8, u'\u2640'), - 'ff': (0x09e3, u'\u240C'), - 'figdash': (0x0abb, u'\u2012'), - 'fiveeighths': (0x0ac5, u'\u215D'), - 'fivesixths': (0x0ab7, u'\u215A'), - 'fivesubscript': (0x1002085, u'\u2085'), - 'fivesuperior': (0x1002075, u'\u2075'), - 'fourfifths': (0x0ab5, u'\u2158'), - 'foursubscript': (0x1002084, u'\u2084'), - 'foursuperior': (0x1002074, u'\u2074'), - 'fourthroot': (0x100221C, u'\u221C'), - 'function': (0x08f6, u'\u0192'), - 'g': (0x0067, u'\u0067'), - 'gabovedot': (0x02f5, u'\u0121'), - 'gbreve': (0x02bb, u'\u011F'), - 'gcaron': (0x10001e7, u'\u01E7'), - 'gcedilla': (0x03bb, u'\u0123'), - 'gcircumflex': (0x02f8, u'\u011D'), - 'grave': (0x0060, u'\u0060'), - 'greater': (0x003e, u'\u003E'), - 'greaterthanequal': (0x08be, u'\u2265'), - 'guillemotleft': (0x00ab, u'\u00AB'), - 'guillemotright': (0x00bb, u'\u00BB'), - 'h': (0x0068, u'\u0068'), - 'hairspace': (0x0aa8, u'\u200A'), - 'hcircumflex': (0x02b6, u'\u0125'), - 'heart': (0x0aee, u'\u2665'), - 'hebrew_aleph': (0x0ce0, u'\u05D0'), - 'hebrew_ayin': (0x0cf2, u'\u05E2'), - 'hebrew_bet': (0x0ce1, u'\u05D1'), - 'hebrew_chet': (0x0ce7, u'\u05D7'), - 'hebrew_dalet': (0x0ce3, u'\u05D3'), - 'hebrew_doublelowline': (0x0cdf, u'\u2017'), - 'hebrew_finalkaph': (0x0cea, u'\u05DA'), - 'hebrew_finalmem': (0x0ced, u'\u05DD'), - 'hebrew_finalnun': (0x0cef, u'\u05DF'), - 'hebrew_finalpe': (0x0cf3, u'\u05E3'), - 'hebrew_finalzade': (0x0cf5, u'\u05E5'), - 'hebrew_gimel': (0x0ce2, u'\u05D2'), - 'hebrew_he': (0x0ce4, u'\u05D4'), - 'hebrew_kaph': (0x0ceb, u'\u05DB'), - 'hebrew_lamed': (0x0cec, u'\u05DC'), - 'hebrew_mem': (0x0cee, u'\u05DE'), - 'hebrew_nun': (0x0cf0, u'\u05E0'), - 'hebrew_pe': (0x0cf4, u'\u05E4'), - 'hebrew_qoph': (0x0cf7, u'\u05E7'), - 'hebrew_resh': (0x0cf8, u'\u05E8'), - 'hebrew_samech': (0x0cf1, u'\u05E1'), - 'hebrew_shin': (0x0cf9, u'\u05E9'), - 'hebrew_taw': (0x0cfa, u'\u05EA'), - 'hebrew_tet': (0x0ce8, u'\u05D8'), - 'hebrew_waw': (0x0ce5, u'\u05D5'), - 'hebrew_yod': (0x0ce9, u'\u05D9'), - 'hebrew_zade': (0x0cf6, u'\u05E6'), - 'hebrew_zain': (0x0ce6, u'\u05D6'), - 'horizlinescan1': (0x09ef, u'\u23BA'), - 'horizlinescan3': (0x09f0, u'\u23BB'), - 'horizlinescan5': (0x09f1, u'\u2500'), - 'horizlinescan7': (0x09f2, u'\u23BC'), - 'horizlinescan9': (0x09f3, u'\u23BD'), - 'hstroke': (0x02b1, u'\u0127'), - 'ht': (0x09e2, u'\u2409'), - 'hyphen': (0x00ad, u'\u00AD'), - 'i': (0x0069, u'\u0069'), - 'iacute': (0x00ed, u'\u00ED'), - 'ibelowdot': (0x1001ecb, u'\u1ECB'), - 'ibreve': (0x100012d, u'\u012D'), - 'icircumflex': (0x00ee, u'\u00EE'), - 'identical': (0x08cf, u'\u2261'), - 'idiaeresis': (0x00ef, u'\u00EF'), - 'idotless': (0x02b9, u'\u0131'), - 'ifonlyif': (0x08cd, u'\u21D4'), - 'igrave': (0x00ec, u'\u00EC'), - 'ihook': (0x1001ec9, u'\u1EC9'), - 'imacron': (0x03ef, u'\u012B'), - 'implies': (0x08ce, u'\u21D2'), - 'includedin': (0x08da, u'\u2282'), - 'includes': (0x08db, u'\u2283'), - 'infinity': (0x08c2, u'\u221E'), - 'integral': (0x08bf, u'\u222B'), - 'intersection': (0x08dc, u'\u2229'), - 'iogonek': (0x03e7, u'\u012F'), - 'itilde': (0x03b5, u'\u0129'), - 'j': (0x006a, u'\u006A'), - 'jcircumflex': (0x02bc, u'\u0135'), - 'jot': (0x0bca, u'\u2218'), - 'k': (0x006b, u'\u006B'), - 'kana_A': (0x04b1, u'\u30A2'), - 'kana_CHI': (0x04c1, u'\u30C1'), - 'kana_E': (0x04b4, u'\u30A8'), - 'kana_FU': (0x04cc, u'\u30D5'), - 'kana_HA': (0x04ca, u'\u30CF'), - 'kana_HE': (0x04cd, u'\u30D8'), - 'kana_HI': (0x04cb, u'\u30D2'), - 'kana_HO': (0x04ce, u'\u30DB'), - 'kana_I': (0x04b2, u'\u30A4'), - 'kana_KA': (0x04b6, u'\u30AB'), - 'kana_KE': (0x04b9, u'\u30B1'), - 'kana_KI': (0x04b7, u'\u30AD'), - 'kana_KO': (0x04ba, u'\u30B3'), - 'kana_KU': (0x04b8, u'\u30AF'), - 'kana_MA': (0x04cf, u'\u30DE'), - 'kana_ME': (0x04d2, u'\u30E1'), - 'kana_MI': (0x04d0, u'\u30DF'), - 'kana_MO': (0x04d3, u'\u30E2'), - 'kana_MU': (0x04d1, u'\u30E0'), - 'kana_N': (0x04dd, u'\u30F3'), - 'kana_NA': (0x04c5, u'\u30CA'), - 'kana_NE': (0x04c8, u'\u30CD'), - 'kana_NI': (0x04c6, u'\u30CB'), - 'kana_NO': (0x04c9, u'\u30CE'), - 'kana_NU': (0x04c7, u'\u30CC'), - 'kana_O': (0x04b5, u'\u30AA'), - 'kana_RA': (0x04d7, u'\u30E9'), - 'kana_RE': (0x04da, u'\u30EC'), - 'kana_RI': (0x04d8, u'\u30EA'), - 'kana_RO': (0x04db, u'\u30ED'), - 'kana_RU': (0x04d9, u'\u30EB'), - 'kana_SA': (0x04bb, u'\u30B5'), - 'kana_SE': (0x04be, u'\u30BB'), - 'kana_SHI': (0x04bc, u'\u30B7'), - 'kana_SO': (0x04bf, u'\u30BD'), - 'kana_SU': (0x04bd, u'\u30B9'), - 'kana_TA': (0x04c0, u'\u30BF'), - 'kana_TE': (0x04c3, u'\u30C6'), - 'kana_TO': (0x04c4, u'\u30C8'), - 'kana_TSU': (0x04c2, u'\u30C4'), - 'kana_U': (0x04b3, u'\u30A6'), - 'kana_WA': (0x04dc, u'\u30EF'), - 'kana_WO': (0x04a6, u'\u30F2'), - 'kana_YA': (0x04d4, u'\u30E4'), - 'kana_YO': (0x04d6, u'\u30E8'), - 'kana_YU': (0x04d5, u'\u30E6'), - 'kana_a': (0x04a7, u'\u30A1'), - 'kana_closingbracket': (0x04a3, u'\u300D'), - 'kana_comma': (0x04a4, u'\u3001'), - 'kana_conjunctive': (0x04a5, u'\u30FB'), - 'kana_e': (0x04aa, u'\u30A7'), - 'kana_fullstop': (0x04a1, u'\u3002'), - 'kana_i': (0x04a8, u'\u30A3'), - 'kana_o': (0x04ab, u'\u30A9'), - 'kana_openingbracket': (0x04a2, u'\u300C'), - 'kana_tsu': (0x04af, u'\u30C3'), - 'kana_u': (0x04a9, u'\u30A5'), - 'kana_ya': (0x04ac, u'\u30E3'), - 'kana_yo': (0x04ae, u'\u30E7'), - 'kana_yu': (0x04ad, u'\u30E5'), - 'kcedilla': (0x03f3, u'\u0137'), - 'kra': (0x03a2, u'\u0138'), - 'l': (0x006c, u'\u006C'), - 'lacute': (0x01e5, u'\u013A'), - 'latincross': (0x0ad9, u'\u271D'), - 'lbelowdot': (0x1001e37, u'\u1E37'), - 'lcaron': (0x01b5, u'\u013E'), - 'lcedilla': (0x03b6, u'\u013C'), - 'leftarrow': (0x08fb, u'\u2190'), - 'leftdoublequotemark': (0x0ad2, u'\u201C'), - 'leftmiddlecurlybrace': (0x08af, u'\u23A8'), - 'leftradical': (0x08a1, u'\u23B7'), - 'leftsinglequotemark': (0x0ad0, u'\u2018'), - 'leftt': (0x09f4, u'\u251C'), - 'lefttack': (0x0bdc, u'\u22A3'), - 'less': (0x003c, u'\u003C'), - 'lessthanequal': (0x08bc, u'\u2264'), - 'lf': (0x09e5, u'\u240A'), - 'logicaland': (0x08de, u'\u2227'), - 'logicalor': (0x08df, u'\u2228'), - 'lowleftcorner': (0x09ed, u'\u2514'), - 'lowrightcorner': (0x09ea, u'\u2518'), - 'lstroke': (0x01b3, u'\u0142'), - 'm': (0x006d, u'\u006D'), - 'mabovedot': (0x1001e41, u'\u1E41'), - 'macron': (0x00af, u'\u00AF'), - 'malesymbol': (0x0af7, u'\u2642'), - 'maltesecross': (0x0af0, u'\u2720'), - 'masculine': (0x00ba, u'\u00BA'), - 'minus': (0x002d, u'\u002D'), - 'minutes': (0x0ad6, u'\u2032'), - 'mu': (0x00b5, u'\u00B5'), - 'multiply': (0x00d7, u'\u00D7'), - 'musicalflat': (0x0af6, u'\u266D'), - 'musicalsharp': (0x0af5, u'\u266F'), - 'n': (0x006e, u'\u006E'), - 'nabla': (0x08c5, u'\u2207'), - 'nacute': (0x01f1, u'\u0144'), - 'ncaron': (0x01f2, u'\u0148'), - 'ncedilla': (0x03f1, u'\u0146'), - 'ninesubscript': (0x1002089, u'\u2089'), - 'ninesuperior': (0x1002079, u'\u2079'), - 'nl': (0x09e8, u'\u2424'), - 'nobreakspace': (0x00a0, u'\u00A0'), - 'notapproxeq': (0x1002247, u'\u2247'), - 'notelementof': (0x1002209, u'\u2209'), - 'notequal': (0x08bd, u'\u2260'), - 'notidentical': (0x1002262, u'\u2262'), - 'notsign': (0x00ac, u'\u00AC'), - 'ntilde': (0x00f1, u'\u00F1'), - 'numbersign': (0x0023, u'\u0023'), - 'numerosign': (0x06b0, u'\u2116'), - 'o': (0x006f, u'\u006F'), - 'oacute': (0x00f3, u'\u00F3'), - 'obarred': (0x1000275, u'\u0275'), - 'obelowdot': (0x1001ecd, u'\u1ECD'), - 'ocaron': (0x10001d2, u'\u01D2'), - 'ocircumflex': (0x00f4, u'\u00F4'), - 'ocircumflexacute': (0x1001ed1, u'\u1ED1'), - 'ocircumflexbelowdot': (0x1001ed9, u'\u1ED9'), - 'ocircumflexgrave': (0x1001ed3, u'\u1ED3'), - 'ocircumflexhook': (0x1001ed5, u'\u1ED5'), - 'ocircumflextilde': (0x1001ed7, u'\u1ED7'), - 'odiaeresis': (0x00f6, u'\u00F6'), - 'odoubleacute': (0x01f5, u'\u0151'), - 'oe': (0x13bd, u'\u0153'), - 'ogonek': (0x01b2, u'\u02DB'), - 'ograve': (0x00f2, u'\u00F2'), - 'ohook': (0x1001ecf, u'\u1ECF'), - 'ohorn': (0x10001a1, u'\u01A1'), - 'ohornacute': (0x1001edb, u'\u1EDB'), - 'ohornbelowdot': (0x1001ee3, u'\u1EE3'), - 'ohorngrave': (0x1001edd, u'\u1EDD'), - 'ohornhook': (0x1001edf, u'\u1EDF'), - 'ohorntilde': (0x1001ee1, u'\u1EE1'), - 'omacron': (0x03f2, u'\u014D'), - 'oneeighth': (0x0ac3, u'\u215B'), - 'onefifth': (0x0ab2, u'\u2155'), - 'onehalf': (0x00bd, u'\u00BD'), - 'onequarter': (0x00bc, u'\u00BC'), - 'onesixth': (0x0ab6, u'\u2159'), - 'onesubscript': (0x1002081, u'\u2081'), - 'onesuperior': (0x00b9, u'\u00B9'), - 'onethird': (0x0ab0, u'\u2153'), - 'ooblique': (0x00f8, u'\u00F8'), - 'ordfeminine': (0x00aa, u'\u00AA'), - 'oslash': (0x00f8, u'\u00F8'), - 'otilde': (0x00f5, u'\u00F5'), - 'overline': (0x047e, u'\u203E'), - 'p': (0x0070, u'\u0070'), - 'pabovedot': (0x1001e57, u'\u1E57'), - 'paragraph': (0x00b6, u'\u00B6'), - 'parenleft': (0x0028, u'\u0028'), - 'parenright': (0x0029, u'\u0029'), - 'partdifferential': (0x1002202, u'\u2202'), - 'partialderivative': (0x08ef, u'\u2202'), - 'percent': (0x0025, u'\u0025'), - 'period': (0x002e, u'\u002E'), - 'periodcentered': (0x00b7, u'\u00B7'), - 'permille': (0x0ad5, u'\u2030'), - 'phonographcopyright': (0x0afb, u'\u2117'), - 'plus': (0x002b, u'\u002B'), - 'plusminus': (0x00b1, u'\u00B1'), - 'prescription': (0x0ad4, u'\u211E'), - 'prolongedsound': (0x04b0, u'\u30FC'), - 'punctspace': (0x0aa6, u'\u2008'), - 'q': (0x0071, u'\u0071'), - 'quad': (0x0bcc, u'\u2395'), - 'question': (0x003f, u'\u003F'), - 'questiondown': (0x00bf, u'\u00BF'), - 'quotedbl': (0x0022, u'\u0022'), - 'r': (0x0072, u'\u0072'), - 'racute': (0x01e0, u'\u0155'), - 'radical': (0x08d6, u'\u221A'), - 'rcaron': (0x01f8, u'\u0159'), - 'rcedilla': (0x03b3, u'\u0157'), - 'registered': (0x00ae, u'\u00AE'), - 'rightarrow': (0x08fd, u'\u2192'), - 'rightdoublequotemark': (0x0ad3, u'\u201D'), - 'rightmiddlecurlybrace': (0x08b0, u'\u23AC'), - 'rightsinglequotemark': (0x0ad1, u'\u2019'), - 'rightt': (0x09f5, u'\u2524'), - 'righttack': (0x0bfc, u'\u22A2'), - 's': (0x0073, u'\u0073'), - 'sabovedot': (0x1001e61, u'\u1E61'), - 'sacute': (0x01b6, u'\u015B'), - 'scaron': (0x01b9, u'\u0161'), - 'scedilla': (0x01ba, u'\u015F'), - 'schwa': (0x1000259, u'\u0259'), - 'scircumflex': (0x02fe, u'\u015D'), - 'seconds': (0x0ad7, u'\u2033'), - 'section': (0x00a7, u'\u00A7'), - 'semicolon': (0x003b, u'\u003B'), - 'semivoicedsound': (0x04df, u'\u309C'), - 'seveneighths': (0x0ac6, u'\u215E'), - 'sevensubscript': (0x1002087, u'\u2087'), - 'sevensuperior': (0x1002077, u'\u2077'), - 'similarequal': (0x08c9, u'\u2243'), - 'singlelowquotemark': (0x0afd, u'\u201A'), - 'sixsubscript': (0x1002086, u'\u2086'), - 'sixsuperior': (0x1002076, u'\u2076'), - 'slash': (0x002f, u'\u002F'), - 'soliddiamond': (0x09e0, u'\u25C6'), - 'space': (0x0020, u'\u0020'), - 'squareroot': (0x100221A, u'\u221A'), - 'ssharp': (0x00df, u'\u00DF'), - 'sterling': (0x00a3, u'\u00A3'), - 'stricteq': (0x1002263, u'\u2263'), - 't': (0x0074, u'\u0074'), - 'tabovedot': (0x1001e6b, u'\u1E6B'), - 'tcaron': (0x01bb, u'\u0165'), - 'tcedilla': (0x01fe, u'\u0163'), - 'telephone': (0x0af9, u'\u260E'), - 'telephonerecorder': (0x0afa, u'\u2315'), - 'therefore': (0x08c0, u'\u2234'), - 'thinspace': (0x0aa7, u'\u2009'), - 'thorn': (0x00fe, u'\u00FE'), - 'threeeighths': (0x0ac4, u'\u215C'), - 'threefifths': (0x0ab4, u'\u2157'), - 'threequarters': (0x00be, u'\u00BE'), - 'threesubscript': (0x1002083, u'\u2083'), - 'threesuperior': (0x00b3, u'\u00B3'), - 'tintegral': (0x100222D, u'\u222D'), - 'topintegral': (0x08a4, u'\u2320'), - 'topleftparens': (0x08ab, u'\u239B'), - 'topleftsqbracket': (0x08a7, u'\u23A1'), - 'toprightparens': (0x08ad, u'\u239E'), - 'toprightsqbracket': (0x08a9, u'\u23A4'), - 'topt': (0x09f7, u'\u252C'), - 'trademark': (0x0ac9, u'\u2122'), - 'tslash': (0x03bc, u'\u0167'), - 'twofifths': (0x0ab3, u'\u2156'), - 'twosubscript': (0x1002082, u'\u2082'), - 'twosuperior': (0x00b2, u'\u00B2'), - 'twothirds': (0x0ab1, u'\u2154'), - 'u': (0x0075, u'\u0075'), - 'uacute': (0x00fa, u'\u00FA'), - 'ubelowdot': (0x1001ee5, u'\u1EE5'), - 'ubreve': (0x02fd, u'\u016D'), - 'ucircumflex': (0x00fb, u'\u00FB'), - 'udiaeresis': (0x00fc, u'\u00FC'), - 'udoubleacute': (0x01fb, u'\u0171'), - 'ugrave': (0x00f9, u'\u00F9'), - 'uhook': (0x1001ee7, u'\u1EE7'), - 'uhorn': (0x10001b0, u'\u01B0'), - 'uhornacute': (0x1001ee9, u'\u1EE9'), - 'uhornbelowdot': (0x1001ef1, u'\u1EF1'), - 'uhorngrave': (0x1001eeb, u'\u1EEB'), - 'uhornhook': (0x1001eed, u'\u1EED'), - 'uhorntilde': (0x1001eef, u'\u1EEF'), - 'umacron': (0x03fe, u'\u016B'), - 'underscore': (0x005f, u'\u005F'), - 'union': (0x08dd, u'\u222A'), - 'uogonek': (0x03f9, u'\u0173'), - 'uparrow': (0x08fc, u'\u2191'), - 'upleftcorner': (0x09ec, u'\u250C'), - 'uprightcorner': (0x09eb, u'\u2510'), - 'upstile': (0x0bd3, u'\u2308'), - 'uptack': (0x0bce, u'\u22A5'), - 'uring': (0x01f9, u'\u016F'), - 'utilde': (0x03fd, u'\u0169'), - 'v': (0x0076, u'\u0076'), - 'variation': (0x08c1, u'\u221D'), - 'vertbar': (0x09f8, u'\u2502'), - 'voicedsound': (0x04de, u'\u309B'), - 'vt': (0x09e9, u'\u240B'), - 'w': (0x0077, u'\u0077'), - 'wacute': (0x1001e83, u'\u1E83'), - 'wcircumflex': (0x1000175, u'\u0175'), - 'wdiaeresis': (0x1001e85, u'\u1E85'), - 'wgrave': (0x1001e81, u'\u1E81'), - 'x': (0x0078, u'\u0078'), - 'xabovedot': (0x1001e8b, u'\u1E8B'), - 'y': (0x0079, u'\u0079'), - 'yacute': (0x00fd, u'\u00FD'), - 'ybelowdot': (0x1001ef5, u'\u1EF5'), - 'ycircumflex': (0x1000177, u'\u0177'), - 'ydiaeresis': (0x00ff, u'\u00FF'), - 'yen': (0x00a5, u'\u00A5'), - 'ygrave': (0x1001ef3, u'\u1EF3'), - 'yhook': (0x1001ef7, u'\u1EF7'), - 'ytilde': (0x1001ef9, u'\u1EF9'), - 'z': (0x007a, u'\u007A'), - 'zabovedot': (0x01bf, u'\u017C'), - 'zacute': (0x01bc, u'\u017A'), - 'zcaron': (0x01be, u'\u017E'), - 'zerosubscript': (0x1002080, u'\u2080'), - 'zerosuperior': (0x1002070, u'\u2070'), - 'zstroke': (0x10001b6, u'\u01B6')} - -DEAD_KEYS = { - u'\u0307': u'\u02D9', - u'\u030A': u'\u02DA', - u'\u0301': u'\u00B4', - u'\u0306': u'\u02D8', - u'\u030C': u'\u02C7', - u'\u0327': u'\u00B8', - u'\u0302': u'\u005E', - u'\u0308': u'\u00A8', - u'\u030B': u'\u02DD', - u'\u0300': u'\u0060', - u'\u0345': u'\u037A', - u'\u0332': u'\u005F', - u'\u0304': u'\u00AF', - u'\u0328': u'\u02DB', - u'\u0303': u'\u007E'} - -KEYPAD_KEYS = { - 'KP_0': 0xffb0, - 'KP_1': 0xffb1, - 'KP_2': 0xffb2, - 'KP_3': 0xffb3, - 'KP_4': 0xffb4, - 'KP_5': 0xffb5, - 'KP_6': 0xffb6, - 'KP_7': 0xffb7, - 'KP_8': 0xffb8, - 'KP_9': 0xffb9, - 'KP_Add': 0xffab, - 'KP_Begin': 0xff9d, - 'KP_Decimal': 0xffae, - 'KP_Delete': 0xff9f, - 'KP_Divide': 0xffaf, - 'KP_Down': 0xff99, - 'KP_End': 0xff9c, - 'KP_Enter': 0xff8d, - 'KP_Equal': 0xffbd, - 'KP_F1': 0xff91, - 'KP_F2': 0xff92, - 'KP_F3': 0xff93, - 'KP_F4': 0xff94, - 'KP_Home': 0xff95, - 'KP_Insert': 0xff9e, - 'KP_Left': 0xff96, - 'KP_Multiply': 0xffaa, - 'KP_Next': 0xff9b, - 'KP_Page_Down': 0xff9b, - 'KP_Page_Up': 0xff9a, - 'KP_Prior': 0xff9a, - 'KP_Right': 0xff98, - 'KP_Separator': 0xffac, - 'KP_Space': 0xff80, - 'KP_Subtract': 0xffad, - 'KP_Tab': 0xff89, - 'KP_Up': 0xff97} - -CHARS = { - codepoint: name - for name, (keysym, codepoint) in SYMBOLS.items() - if codepoint} - -KEYSYMS = { - keysym: name - for name, (keysym, codepoint) in SYMBOLS.items() - if codepoint} diff --git a/pype/vendor/pynput/keyboard/__init__.py b/pype/vendor/pynput/keyboard/__init__.py deleted file mode 100644 index 1290ea1cd9..0000000000 --- a/pype/vendor/pynput/keyboard/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -The module containing keyboard classes. - -See the documentation for more information. -""" - -# pylint: disable=C0103 -# KeyCode, Key, Controller and Listener are not constants - -import os -import sys - -if os.environ.get('__PYNPUT_GENERATE_DOCUMENTATION') == 'yes': - from ._base import KeyCode, Key, Controller, Listener -else: - KeyCode = None - Key = None - Controller = None - Listener = None - - -if sys.platform == 'darwin': - if not KeyCode and not Key and not Controller and not Listener: - from ._darwin import KeyCode, Key, Controller, Listener - -elif sys.platform == 'win32': - if not KeyCode and not Key and not Controller and not Listener: - from ._win32 import KeyCode, Key, Controller, Listener - -else: - if not KeyCode and not Key and not Controller and not Listener: - try: - from ._xorg import KeyCode, Key, Controller, Listener - except ImportError: - # For now, since we only support Xlib anyway, we re-raise these - # errors to allow users to determine the cause of failures to import - raise - - -if not KeyCode or not Key or not Controller or not Listener: - raise ImportError('this platform is not supported') diff --git a/pype/vendor/pynput/keyboard/_base.py b/pype/vendor/pynput/keyboard/_base.py deleted file mode 100644 index 57aadba2bf..0000000000 --- a/pype/vendor/pynput/keyboard/_base.py +++ /dev/null @@ -1,669 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -This module contains the base implementation. - -The actual interface to keyboard classes is defined here, but the implementation -is located in a platform dependent module. -""" - -# pylint: disable=R0903 -# We implement stubs - -import contextlib -import enum -import threading -import unicodedata - -import six - -from pynput._util import AbstractListener - - -class KeyCode(object): - """ - A :class:`KeyCode` represents the description of a key code used by the - operating system. - """ - def __init__(self, vk=None, char=None, is_dead=False): - self.vk = vk - self.char = six.text_type(char) if char is not None else None - self.is_dead = is_dead - - if self.is_dead: - self.combining = unicodedata.lookup( - 'COMBINING ' + unicodedata.name(self.char)) - if not self.combining: - raise KeyError(char) - else: - self.combining = None - - def __repr__(self): - if self.is_dead: - return '[%s]' % repr(self.char) - if self.char is not None: - return repr(self.char) - else: - return '<%d>' % self.vk - - def __str__(self): - return repr(self) - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return False - if self.char is not None and other.char is not None: - return self.char == other.char and self.is_dead == other.is_dead - else: - return self.vk == other.vk - - def __hash__(self): - return hash(repr(self)) - - def join(self, key): - """Applies this dead key to another key and returns the result. - - Joining a dead key with space (``' '``) or itself yields the non-dead - version of this key, if one exists; for example, - ``KeyCode.from_dead('~').join(KeyCode.from_char(' '))`` equals - ``KeyCode.from_char('~')`` and - ``KeyCode.from_dead('~').join(KeyCode.from_dead('~'))``. - - :param KeyCode key: The key to join with this key. - - :return: a key code - - :raises ValueError: if the keys cannot be joined - """ - # A non-dead key cannot be joined - if not self.is_dead: - raise ValueError(self) - - # Joining two of the same keycodes, or joining with space, yields the - # non-dead version of the key - if key.char == ' ' or self == key: - return self.from_char(self.char) - - # Otherwise we combine the characters - if key.char is not None: - combined = unicodedata.normalize( - 'NFC', - key.char + self.combining) - if combined: - return self.from_char(combined) - - raise ValueError(key) - - @classmethod - def from_vk(cls, vk, **kwargs): - """Creates a key from a virtual key code. - - :param vk: The virtual key code. - - :param kwargs: Any other parameters to pass. - - :return: a key code - """ - return cls(vk=vk, **kwargs) - - @classmethod - def from_char(cls, char, **kwargs): - """Creates a key from a character. - - :param str char: The character. - - :return: a key code - """ - return cls(char=char, **kwargs) - - @classmethod - def from_dead(cls, char, **kwargs): - """Creates a dead key. - - :param char: The dead key. This should be the unicode character - representing the stand alone character, such as ``'~'`` for - *COMBINING TILDE*. - - :return: a key code - """ - return cls(char=char, is_dead=True, **kwargs) - - -class Key(enum.Enum): - """A class representing various buttons that may not correspond to - letters. This includes modifier keys and function keys. - - The actual values for these items differ between platforms. Some platforms - may have additional buttons, but these are guaranteed to be present - everywhere. - """ - #: A generic Alt key. This is a modifier. - alt = 0 - - #: The left Alt key. This is a modifier. - alt_l = 0 - - #: The right Alt key. This is a modifier. - alt_r = 0 - - #: The AltGr key. This is a modifier. - alt_gr = 0 - - #: The Backspace key. - backspace = 0 - - #: The CapsLock key. - caps_lock = 0 - - #: A generic command button. On *PC* platforms, this corresponds to the - #: Super key or Windows key, and on *Mac* it corresponds to the Command - #: key. This may be a modifier. - cmd = 0 - - #: The left command button. On *PC* platforms, this corresponds to the - #: Super key or Windows key, and on *Mac* it corresponds to the Command - #: key. This may be a modifier. - cmd_l = 0 - - #: The right command button. On *PC* platforms, this corresponds to the - #: Super key or Windows key, and on *Mac* it corresponds to the Command - #: key. This may be a modifier. - cmd_r = 0 - - #: A generic Ctrl key. This is a modifier. - ctrl = 0 - - #: The left Ctrl key. This is a modifier. - ctrl_l = 0 - - #: The right Ctrl key. This is a modifier. - ctrl_r = 0 - - #: The Delete key. - delete = 0 - - #: A down arrow key. - down = 0 - - #: The End key. - end = 0 - - #: The Enter or Return key. - enter = 0 - - #: The Esc key. - esc = 0 - - #: The function keys. F1 to F20 are defined. - f1 = 0 - f2 = 0 - f3 = 0 - f4 = 0 - f5 = 0 - f6 = 0 - f7 = 0 - f8 = 0 - f9 = 0 - f10 = 0 - f11 = 0 - f12 = 0 - f13 = 0 - f14 = 0 - f15 = 0 - f16 = 0 - f17 = 0 - f18 = 0 - f19 = 0 - f20 = 0 - - #: The Home key. - home = 0 - - #: A left arrow key. - left = 0 - - #: The PageDown key. - page_down = 0 - - #: The PageUp key. - page_up = 0 - - #: A right arrow key. - right = 0 - - #: A generic Shift key. This is a modifier. - shift = 0 - - #: The left Shift key. This is a modifier. - shift_l = 0 - - #: The right Shift key. This is a modifier. - shift_r = 0 - - #: The Space key. - space = 0 - - #: The Tab key. - tab = 0 - - #: An up arrow key. - up = 0 - - #: The Insert key. This may be undefined for some platforms. - insert = 0 - - #: The Menu key. This may be undefined for some platforms. - menu = 0 - - #: The NumLock key. This may be undefined for some platforms. - num_lock = 0 - - #: The Pause/Break key. This may be undefined for some platforms. - pause = 0 - - #: The PrintScreen key. This may be undefined for some platforms. - print_screen = 0 - - #: The ScrollLock key. This may be undefined for some platforms. - scroll_lock = 0 - - -class Controller(object): - """A controller for sending virtual keyboard events to the system. - """ - #: The virtual key codes - _KeyCode = KeyCode - - #: The various keys. - _Key = Key - - class InvalidKeyException(Exception): - """The exception raised when an invalid ``key`` parameter is passed to - either :meth:`Controller.press` or :meth:`Controller.release`. - - Its first argument is the ``key`` parameter. - """ - pass - - class InvalidCharacterException(Exception): - """The exception raised when an invalid character is encountered in - the string passed to :meth:`Controller.type`. - - Its first argument is the index of the character in the string, and the - second the character. - """ - pass - - def __init__(self): - self._modifiers_lock = threading.RLock() - self._modifiers = set() - self._caps_lock = False - self._dead_key = None - - kc = self._Key - - # pylint: disable=C0103; this is treated as a class scope constant, but - # we cannot set it in the class scope, as _Key is overridden by platform - # implementations - # pylint: disable=C0326; it is easier to read column aligned keys - #: The keys used as modifiers; the first value in each tuple is the - #: base modifier to use for subsequent modifiers. - self._MODIFIER_KEYS = ( - (kc.alt_gr, (kc.alt_gr.value,)), - (kc.alt, (kc.alt.value, kc.alt_l.value, kc.alt_r.value)), - (kc.cmd, (kc.cmd.value, kc.cmd_l.value, kc.cmd_r.value)), - (kc.ctrl, (kc.ctrl.value, kc.ctrl_l.value, kc.ctrl_r.value)), - (kc.shift, (kc.shift.value, kc.shift_l.value, kc.shift_r.value))) - - #: Control codes to transform into key codes when typing - self._CONTROL_CODES = { - '\n': kc.enter, - '\r': kc.enter, - '\t': kc.tab} - # pylint: enable=C0103,C0326 - - def press(self, key): - """Presses a key. - - A key may be either a string of length 1, one of the :class:`Key` - members or a :class:`KeyCode`. - - Strings will be transformed to :class:`KeyCode` using - :meth:`KeyCode.char`. Members of :class:`Key` will be translated to - their :meth:`~Key.value`. - - :param key: The key to press. - - :raises InvalidKeyException: if the key is invalid - - :raises ValueError: if ``key`` is a string, but its length is not ``1`` - """ - resolved = self._resolve(key) - self._update_modifiers(resolved, True) - - # Update caps lock state - if resolved == self._Key.caps_lock.value: - self._caps_lock = not self._caps_lock - - # If we currently have a dead key pressed, join it with this key - original = resolved - if self._dead_key: - try: - resolved = self._dead_key.join(resolved) - except ValueError: - self._handle(self._dead_key, True) - self._handle(self._dead_key, False) - - # If the key is a dead key, keep it for later - if resolved.is_dead: - self._dead_key = resolved - return - - try: - self._handle(resolved, True) - except self.InvalidKeyException: - if resolved != original: - self._handle(self._dead_key, True) - self._handle(self._dead_key, False) - self._handle(original, True) - - self._dead_key = None - - def release(self, key): - """Releases a key. - - A key may be either a string of length 1, one of the :class:`Key` - members or a :class:`KeyCode`. - - Strings will be transformed to :class:`KeyCode` using - :meth:`KeyCode.char`. Members of :class:`Key` will be translated to - their :meth:`~Key.value`. - - :param key: The key to release. If this is a string, it is passed to - :meth:`touches` and the returned releases are used. - - :raises InvalidKeyException: if the key is invalid - - :raises ValueError: if ``key`` is a string, but its length is not ``1`` - """ - resolved = self._resolve(key) - self._update_modifiers(resolved, False) - - # Ignore released dead keys - if resolved.is_dead: - return - - self._handle(resolved, False) - - def touch(self, key, is_press): - """Calls either :meth:`press` or :meth:`release` depending on the value - of ``is_press``. - - :param key: The key to press or release. - - :param bool is_press: Whether to press the key. - - :raises InvalidKeyException: if the key is invalid - """ - if is_press: - self.press(key) - else: - self.release(key) - - @contextlib.contextmanager - def pressed(self, *args): - """Executes a block with some keys pressed. - - :param keys: The keys to keep pressed. - """ - for key in args: - self.press(key) - - try: - yield - finally: - for key in reversed(args): - self.release(key) - - def type(self, string): - """Types a string. - - This method will send all key presses and releases necessary to type - all characters in the string. - - :param str string: The string to type. - - :raises InvalidCharacterException: if an untypable character is - encountered - """ - for i, character in enumerate(string): - key = self._CONTROL_CODES.get(character, character) - try: - self.press(key) - self.release(key) - - except (ValueError, self.InvalidKeyException): - raise self.InvalidCharacterException(i, character) - - @property - @contextlib.contextmanager - def modifiers(self): - """The currently pressed modifier keys. - - Please note that this reflects only the internal state of this - controller, and not the state of the operating system keyboard buffer. - This property cannot be used to determine whether a key is physically - pressed. - - Only the generic modifiers will be set; when pressing either - :attr:`Key.shift_l`, :attr:`Key.shift_r` or :attr:`Key.shift`, only - :attr:`Key.shift` will be present. - - Use this property within a context block thus:: - - with controller.modifiers as modifiers: - with_block() - - This ensures that the modifiers cannot be modified by another thread. - """ - with self._modifiers_lock: - yield set( - self._as_modifier(modifier) - for modifier in self._modifiers) - - @property - def alt_pressed(self): - """Whether any *alt* key is pressed. - - Please note that this reflects only the internal state of this - controller. See :attr:`modifiers` for more information. - """ - with self.modifiers as modifiers: - return self._Key.alt in modifiers - - @property - def alt_gr_pressed(self): - """Whether *altgr* is pressed. - - Please note that this reflects only the internal state of this - controller. See :attr:`modifiers` for more information. - """ - with self.modifiers as modifiers: - return self._Key.alt_gr in modifiers - - @property - def ctrl_pressed(self): - """Whether any *ctrl* key is pressed. - - Please note that this reflects only the internal state of this - controller. See :attr:`modifiers` for more information. - """ - with self.modifiers as modifiers: - return self._Key.ctrl in modifiers - - @property - def shift_pressed(self): - """Whether any *shift* key is pressed, or *caps lock* is toggled. - - Please note that this reflects only the internal state of this - controller. See :attr:`modifiers` for more information. - """ - if self._caps_lock: - return True - - with self.modifiers as modifiers: - return self._Key.shift in modifiers - - def _resolve(self, key): - """Resolves a key to a :class:`KeyCode` instance. - - This method will convert any key representing a character to uppercase - if a shift modifier is active. - - :param key: The key to resolve. - - :return: a key code, or ``None`` if it cannot be resolved - """ - # Use the value for the key constants - if key in self._Key: - return key.value - - # Convert strings to key codes - if isinstance(key, six.string_types): - if len(key) != 1: - raise ValueError(key) - return self._KeyCode.from_char(key) - - # Assume this is a proper key - if isinstance(key, self._KeyCode): - if key.char is not None and self.shift_pressed: - return self._KeyCode(vk=key.vk, char=key.char.upper()) - else: - return key - - def _update_modifiers(self, key, is_press): - """Updates the current modifier list. - - If ``key`` is not a modifier, no action is taken. - - :param key: The key being pressed or released. - """ - # Check whether the key is a modifier - if self._as_modifier(key): - with self._modifiers_lock: - if is_press: - self._modifiers.add(key) - else: - try: - self._modifiers.remove(key) - except KeyError: - pass - - def _as_modifier(self, key): - """Returns a key as the modifier used internally if defined. - - This method will convert values like :attr:`Key.alt_r.value` and - :attr:`Key.shift_l.value` to :attr:`Key.alt` and :attr:`Key.shift`. - - :param key: The possible modifier key. - - :return: the base modifier key, or ``None`` if ``key`` is not a - modifier - """ - for base, modifiers in self._MODIFIER_KEYS: - if key in modifiers: - return base - - def _handle(self, key, is_press): - """The platform implementation of the actual emitting of keyboard - events. - - This is a platform dependent implementation. - - :param Key key: The key to handle. - - :param bool is_press: Whether this is a key press event. - """ - raise NotImplementedError() - - -# pylint: disable=W0223; This is also an abstract class -class Listener(AbstractListener): - """A listener for keyboard events. - - Instances of this class can be used as context managers. This is equivalent - to the following code:: - - listener.start() - try: - with_statements() - finally: - listener.stop() - - This class inherits from :class:`threading.Thread` and supports all its - methods. It will set :attr:`daemon` to ``True`` when created. - - :param callable on_press: The callback to call when a button is pressed. - - It will be called with the argument ``(key)``, where ``key`` is a - :class:`KeyCode`, a :class:`Key` or ``None`` if the key is unknown. - - :param callable on_release: The callback to call when a button is release. - - It will be called with the argument ``(key)``, where ``key`` is a - :class:`KeyCode`, a :class:`Key` or ``None`` if the key is unknown. - - :param bool suppress: Whether to suppress events. Setting this to ``True`` - will prevent the input events from being passed to the rest of the - system. - - :param kwargs: Any non-standard platform dependent options. These should be - prefixed with the platform name thus: ``darwin_``, ``xorg_`` or - ``win32_``. - - Supported values are: - - ``darwin_intercept`` - A callable taking the arguments ``(event_type, event)``, where - ``event_type`` is ``Quartz.kCGEventKeyDown`` or - ``Quartz.kCGEventKeyDown``, and ``event`` is a ``CGEventRef``. - - This callable can freely modify the event using functions like - ``Quartz.CGEventSetIntegerValueField``. If this callable does not - return the event, the event is suppressed system wide. - - ``win32_event_filter`` - A callable taking the arguments ``(msg, data)``, where ``msg`` is - the current message, and ``data`` associated data as a - `KBLLHOOKSTRUCT `_. - - If this callback returns ``False``, the event will not be propagated - to the listener callback. - - If ``self.suppress_event()`` is called, the event is suppressed - system wide. - """ - def __init__(self, on_press=None, on_release=None, suppress=False, - **kwargs): - prefix = self.__class__.__module__.rsplit('.', 1)[-1][1:] + '_' - self._options = { - key[len(prefix):]: value - for key, value in kwargs.items() - if key.startswith(prefix)} - super(Listener, self).__init__( - on_press=on_press, on_release=on_release, suppress=suppress) -# pylint: enable=W0223 diff --git a/pype/vendor/pynput/keyboard/_darwin.py b/pype/vendor/pynput/keyboard/_darwin.py deleted file mode 100644 index 92c4a245e5..0000000000 --- a/pype/vendor/pynput/keyboard/_darwin.py +++ /dev/null @@ -1,250 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -The keyboard implementation for *OSX*. -""" - -# pylint: disable=C0111 -# The documentation is extracted from the base classes - -# pylint: disable=R0903 -# We implement stubs - -import enum - -import Quartz - -from pynput._util.darwin import ( - get_unicode_to_keycode_map, - keycode_context, - keycode_to_string, - ListenerMixin) -from . import _base - - -class KeyCode(_base.KeyCode): - def _event(self, modifiers, mapping, is_pressed): - """This key as a *Quartz* event. - - :param set modifiers: The currently active modifiers. - - :param mapping: The current keyboard mapping. - - :param bool is_press: Whether to generate a press event. - - :return: a *Quartz* event - """ - vk = self.vk or mapping.get(self.char, 0) - result = Quartz.CGEventCreateKeyboardEvent(None, vk, is_pressed) - - Quartz.CGEventSetFlags( - result, - 0 - | (Quartz.kCGEventFlagMaskAlternate - if Key.alt in modifiers else 0) - - | (Quartz.kCGEventFlagMaskCommand - if Key.cmd in modifiers else 0) - - | (Quartz.kCGEventFlagMaskControl - if Key.ctrl in modifiers else 0) - - | (Quartz.kCGEventFlagMaskShift - if Key.shift in modifiers else 0)) - - if vk is None and self.char is not None: - Quartz.CGEventKeyboardSetUnicodeString( - result, len(self.char), self.char) - - return result - - -class Key(enum.Enum): - # Default keys - alt = KeyCode.from_vk(0x3A) - alt_l = KeyCode.from_vk(0x3A) - alt_r = KeyCode.from_vk(0x3D) - alt_gr = KeyCode.from_vk(0x3D) - backspace = KeyCode.from_vk(0x33) - caps_lock = KeyCode.from_vk(0x39) - cmd = KeyCode.from_vk(0x37) - cmd_l = KeyCode.from_vk(0x37) - cmd_r = KeyCode.from_vk(0x36) - ctrl = KeyCode.from_vk(0x3B) - ctrl_l = KeyCode.from_vk(0x3B) - ctrl_r = KeyCode.from_vk(0x3E) - delete = KeyCode.from_vk(0x75) - down = KeyCode.from_vk(0x7D) - end = KeyCode.from_vk(0x77) - enter = KeyCode.from_vk(0x24) - esc = KeyCode.from_vk(0x35) - f1 = KeyCode.from_vk(0x7A) - f2 = KeyCode.from_vk(0x78) - f3 = KeyCode.from_vk(0x63) - f4 = KeyCode.from_vk(0x76) - f5 = KeyCode.from_vk(0x60) - f6 = KeyCode.from_vk(0x61) - f7 = KeyCode.from_vk(0x62) - f8 = KeyCode.from_vk(0x64) - f9 = KeyCode.from_vk(0x65) - f10 = KeyCode.from_vk(0x6D) - f11 = KeyCode.from_vk(0x67) - f12 = KeyCode.from_vk(0x6F) - f13 = KeyCode.from_vk(0x69) - f14 = KeyCode.from_vk(0x6B) - f15 = KeyCode.from_vk(0x71) - f16 = KeyCode.from_vk(0x6A) - f17 = KeyCode.from_vk(0x40) - f18 = KeyCode.from_vk(0x4F) - f19 = KeyCode.from_vk(0x50) - f20 = KeyCode.from_vk(0x5A) - home = KeyCode.from_vk(0x73) - left = KeyCode.from_vk(0x7B) - page_down = KeyCode.from_vk(0x79) - page_up = KeyCode.from_vk(0x74) - right = KeyCode.from_vk(0x7C) - shift = KeyCode.from_vk(0x38) - shift_l = KeyCode.from_vk(0x38) - shift_r = KeyCode.from_vk(0x3C) - space = KeyCode.from_vk(0x31, char=' ') - tab = KeyCode.from_vk(0x30) - up = KeyCode.from_vk(0x7E) - - -class Controller(_base.Controller): - _KeyCode = KeyCode - _Key = Key - - def __init__(self): - super(Controller, self).__init__() - self._mapping = get_unicode_to_keycode_map() - - def _handle(self, key, is_press): - with self.modifiers as modifiers: - Quartz.CGEventPost( - Quartz.kCGHIDEventTap, - (key if key not in Key else key.value)._event( - modifiers, self._mapping, is_press)) - - -class Listener(ListenerMixin, _base.Listener): - #: The events that we listen to - _EVENTS = ( - Quartz.CGEventMaskBit(Quartz.kCGEventKeyDown) | - Quartz.CGEventMaskBit(Quartz.kCGEventKeyUp) | - Quartz.CGEventMaskBit(Quartz.kCGEventFlagsChanged)) - - #: A mapping from keysym to special key - _SPECIAL_KEYS = { - key.value.vk: key - for key in Key} - - #: The event flags set for the various modifier keys - _MODIFIER_FLAGS = { - Key.alt: Quartz.kCGEventFlagMaskAlternate, - Key.alt_l: Quartz.kCGEventFlagMaskAlternate, - Key.alt_r: Quartz.kCGEventFlagMaskAlternate, - Key.cmd: Quartz.kCGEventFlagMaskCommand, - Key.cmd_l: Quartz.kCGEventFlagMaskCommand, - Key.cmd_r: Quartz.kCGEventFlagMaskCommand, - Key.ctrl: Quartz.kCGEventFlagMaskControl, - Key.ctrl_l: Quartz.kCGEventFlagMaskControl, - Key.ctrl_r: Quartz.kCGEventFlagMaskControl, - Key.shift: Quartz.kCGEventFlagMaskShift, - Key.shift_l: Quartz.kCGEventFlagMaskShift, - Key.shift_r: Quartz.kCGEventFlagMaskShift} - - def __init__(self, *args, **kwargs): - super(Listener, self).__init__(*args, **kwargs) - self._flags = 0 - self._context = None - self._intercept = self._options.get( - 'intercept', - None) - - def _run(self): - with keycode_context() as context: - self._context = context - try: - super(Listener, self)._run() - finally: - self._context = None - - def _handle(self, dummy_proxy, event_type, event, dummy_refcon): - # Convert the event to a KeyCode; this may fail, and in that case we - # pass None - try: - key = self._event_to_key(event) - except IndexError: - key = None - - try: - if event_type == Quartz.kCGEventKeyDown: - # This is a normal key press - self.on_press(key) - - elif event_type == Quartz.kCGEventKeyUp: - # This is a normal key release - self.on_release(key) - - elif key == Key.caps_lock: - # We only get an event when caps lock is toggled, so we fake - # press and release - self.on_press(key) - self.on_release(key) - - else: - # This is a modifier event---excluding caps lock---for which we - # must check the current modifier state to determine whether - # the key was pressed or released - flags = Quartz.CGEventGetFlags(event) - is_press = flags & self._MODIFIER_FLAGS.get(key, 0) - if is_press: - self.on_press(key) - else: - self.on_release(key) - - finally: - # Store the current flag mask to be able to detect modifier state - # changes - self._flags = Quartz.CGEventGetFlags(event) - - def _event_to_key(self, event): - """Converts a *Quartz* event to a :class:`KeyCode`. - - :param event: The event to convert. - - :return: a :class:`pynput.keyboard.KeyCode` - - :raises IndexError: if the key code is invalid - """ - vk = Quartz.CGEventGetIntegerValueField( - event, Quartz.kCGKeyboardEventKeycode) - - # First try special keys... - if vk in self._SPECIAL_KEYS: - return self._SPECIAL_KEYS[vk] - - # ...then try characters... - # TODO: Use Quartz.CGEventKeyboardGetUnicodeString instead - char = keycode_to_string( - self._context, vk, Quartz.CGEventGetFlags(event)) - if char: - return KeyCode.from_char(char, vk=vk) - - # ...and fall back on a virtual key code - return KeyCode.from_vk(vk) diff --git a/pype/vendor/pynput/keyboard/_win32.py b/pype/vendor/pynput/keyboard/_win32.py deleted file mode 100644 index 521f9b9be0..0000000000 --- a/pype/vendor/pynput/keyboard/_win32.py +++ /dev/null @@ -1,284 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -The keyboard implementation for *Windows*. -""" - -# pylint: disable=C0111 -# The documentation is extracted from the base classes - -# pylint: disable=R0903 -# We implement stubs - -import contextlib -import ctypes -import enum -import six - -from ctypes import wintypes - -import pynput._util.win32_vks as VK - -from pynput._util import AbstractListener, NotifierMixin -from pynput._util.win32 import ( - INPUT, - INPUT_union, - KEYBDINPUT, - KeyTranslator, - ListenerMixin, - SendInput, - SystemHook, - VkKeyScan) -from . import _base - - -class KeyCode(_base.KeyCode): - def _parameters(self, is_press): - """The parameters to pass to ``SendInput`` to generate this key. - - :param bool is_press: Whether to generate a press event. - - :return: all arguments to pass to ``SendInput`` for this key - - :rtype: dict - """ - if self.vk: - vk = self.vk - scan = 0 - flags = 0 - else: - res = VkKeyScan(self.char) - if (res >> 8) & 0xFF == 0: - vk = res & 0xFF - scan = 0 - flags = 0 - else: - vk = 0 - scan = ord(self.char) - flags = KEYBDINPUT.UNICODE - return dict( - dwFlags=flags | (KEYBDINPUT.KEYUP if not is_press else 0), - wVk=vk, - wScan=scan) - - -class Key(enum.Enum): - alt = KeyCode.from_vk(VK.MENU) - alt_l = KeyCode.from_vk(VK.LMENU) - alt_r = KeyCode.from_vk(VK.RMENU) - alt_gr = KeyCode.from_vk(VK.RMENU) - backspace = KeyCode.from_vk(VK.BACK) - caps_lock = KeyCode.from_vk(VK.CAPITAL) - cmd = KeyCode.from_vk(VK.LWIN) - cmd_l = KeyCode.from_vk(VK.LWIN) - cmd_r = KeyCode.from_vk(VK.RWIN) - ctrl = KeyCode.from_vk(VK.CONTROL) - ctrl_l = KeyCode.from_vk(VK.LCONTROL) - ctrl_r = KeyCode.from_vk(VK.RCONTROL) - delete = KeyCode.from_vk(VK.DELETE) - down = KeyCode.from_vk(VK.DOWN) - end = KeyCode.from_vk(VK.END) - enter = KeyCode.from_vk(VK.RETURN) - esc = KeyCode.from_vk(VK.ESCAPE) - f1 = KeyCode.from_vk(VK.F1) - f2 = KeyCode.from_vk(VK.F2) - f3 = KeyCode.from_vk(VK.F3) - f4 = KeyCode.from_vk(VK.F4) - f5 = KeyCode.from_vk(VK.F5) - f6 = KeyCode.from_vk(VK.F6) - f7 = KeyCode.from_vk(VK.F7) - f8 = KeyCode.from_vk(VK.F8) - f9 = KeyCode.from_vk(VK.F9) - f10 = KeyCode.from_vk(VK.F10) - f11 = KeyCode.from_vk(VK.F11) - f12 = KeyCode.from_vk(VK.F12) - f13 = KeyCode.from_vk(VK.F13) - f14 = KeyCode.from_vk(VK.F14) - f15 = KeyCode.from_vk(VK.F15) - f16 = KeyCode.from_vk(VK.F16) - f17 = KeyCode.from_vk(VK.F17) - f18 = KeyCode.from_vk(VK.F18) - f19 = KeyCode.from_vk(VK.F19) - f20 = KeyCode.from_vk(VK.F20) - home = KeyCode.from_vk(VK.HOME) - left = KeyCode.from_vk(VK.LEFT) - page_down = KeyCode.from_vk(VK.NEXT) - page_up = KeyCode.from_vk(VK.PRIOR) - right = KeyCode.from_vk(VK.RIGHT) - shift = KeyCode.from_vk(VK.LSHIFT) - shift_l = KeyCode.from_vk(VK.LSHIFT) - shift_r = KeyCode.from_vk(VK.RSHIFT) - space = KeyCode.from_vk(VK.SPACE, char=' ') - tab = KeyCode.from_vk(VK.TAB) - up = KeyCode.from_vk(VK.UP) - - insert = KeyCode.from_vk(VK.INSERT) - menu = KeyCode.from_vk(VK.APPS) - num_lock = KeyCode.from_vk(VK.NUMLOCK) - pause = KeyCode.from_vk(VK.PAUSE) - print_screen = KeyCode.from_vk(VK.SNAPSHOT) - scroll_lock = KeyCode.from_vk(VK.SCROLL) - - -class Controller(_base.Controller): - _KeyCode = KeyCode - _Key = Key - - def _handle(self, key, is_press): - SendInput( - 1, - ctypes.byref(INPUT( - type=INPUT.KEYBOARD, - value=INPUT_union( - ki=KEYBDINPUT(**key._parameters(is_press))))), - ctypes.sizeof(INPUT)) - - -class Listener(ListenerMixin, _base.Listener): - #: The Windows hook ID for low level keyboard events, ``WH_KEYBOARD_LL`` - _EVENTS = 13 - - _WM_KEYDOWN = 0x0100 - _WM_KEYUP = 0x0101 - _WM_SYSKEYDOWN = 0x0104 - _WM_SYSKEYUP = 0x0105 - - # A bit flag attached to messages indicating that the payload is an actual - # UTF-16 character code - _UTF16_FLAG = 0x1000 - - # A special virtual key code designating unicode characters - _VK_PACKET = 0xE7 - - #: The messages that correspond to a key press - _PRESS_MESSAGES = (_WM_KEYDOWN, _WM_SYSKEYDOWN) - - #: The messages that correspond to a key release - _RELEASE_MESSAGES = (_WM_KEYUP, _WM_SYSKEYUP) - - #: A mapping from keysym to special key - _SPECIAL_KEYS = { - key.value.vk: key - for key in Key} - - _HANDLED_EXCEPTIONS = ( - SystemHook.SuppressException,) - - class _KBDLLHOOKSTRUCT(ctypes.Structure): - """Contains information about a mouse event passed to a - ``WH_KEYBOARD_LL`` hook procedure, ``LowLevelKeyboardProc``. - """ - _fields_ = [ - ('vkCode', wintypes.DWORD), - ('scanCode', wintypes.DWORD), - ('flags', wintypes.DWORD), - ('time', wintypes.DWORD), - ('dwExtraInfo', ctypes.c_void_p)] - - #: A pointer to a :class:`KBDLLHOOKSTRUCT` - _LPKBDLLHOOKSTRUCT = ctypes.POINTER(_KBDLLHOOKSTRUCT) - - def __init__(self, *args, **kwargs): - super(Listener, self).__init__(*args, **kwargs) - self._translator = KeyTranslator() - self._event_filter = self._options.get( - 'event_filter', - lambda msg, data: True) - - def _convert(self, code, msg, lpdata): - if code != SystemHook.HC_ACTION: - return - - data = ctypes.cast(lpdata, self._LPKBDLLHOOKSTRUCT).contents - is_packet = data.vkCode == self._VK_PACKET - - # Suppress further propagation of the event if it is filtered - if self._event_filter(msg, data) is False: - return None - elif is_packet: - return (msg | self._UTF16_FLAG, data.scanCode) - else: - return (msg, data.vkCode) - - @AbstractListener._emitter - def _process(self, wparam, lparam): - msg = wparam - vk = lparam - - # If the key has the UTF-16 flag, we treat it as a unicode character, - # otherwise convert the event to a KeyCode; this may fail, and in that - # case we pass None - is_utf16 = msg & self._UTF16_FLAG - if is_utf16: - msg = msg ^ self._UTF16_FLAG - scan = vk - key = KeyCode.from_char(six.unichr(scan)) - else: - try: - key = self._event_to_key(msg, vk) - except OSError: - key = None - - if msg in self._PRESS_MESSAGES: - self.on_press(key) - - elif msg in self._RELEASE_MESSAGES: - self.on_release(key) - - # pylint: disable=R0201 - @contextlib.contextmanager - def _receive(self): - """An empty context manager; we do not need to fake keyboard events. - """ - yield - # pylint: enable=R0201 - - def _event_to_key(self, msg, vk): - """Converts an :class:`_KBDLLHOOKSTRUCT` to a :class:`KeyCode`. - - :param msg: The message received. - - :param vk: The virtual key code to convert. - - :return: a :class:`pynput.keyboard.KeyCode` - - :raises OSError: if the message and data could not be converted - """ - # We must always call self._translate to keep the keyboard state up to - # date - key = KeyCode(**self._translate( - vk, - msg in self._PRESS_MESSAGES)) - - # If the virtual key code corresponds to a Key value, we prefer that - if vk in self._SPECIAL_KEYS: - return self._SPECIAL_KEYS[vk] - else: - return key - - def _translate(self, vk, is_press): - """Translates a virtual key code to a parameter list passable to - :class:`pynput.keyboard.KeyCode`. - - :param int vk: The virtual key code. - - :param bool is_press: Whether this is a press event. - - :return: a paramter list to the :class:`pynput.keyboard.KeyCode` - constructor - """ - return self._translator(vk, is_press) diff --git a/pype/vendor/pynput/keyboard/_xorg.py b/pype/vendor/pynput/keyboard/_xorg.py deleted file mode 100644 index 782f62ce96..0000000000 --- a/pype/vendor/pynput/keyboard/_xorg.py +++ /dev/null @@ -1,607 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -The keyboard implementation for *Xorg*. -""" - -# pylint: disable=C0111 -# The documentation is extracted from the base classes - -# pylint: disable=R0903 -# We implement stubs - -import enum -import threading - -import Xlib.display -import Xlib.ext -import Xlib.ext.xtest -import Xlib.X -import Xlib.XK -import Xlib.protocol -import Xlib.keysymdef.xkb - -from pynput._util import NotifierMixin -from pynput._util.xorg import ( - alt_mask, - alt_gr_mask, - display_manager, - index_to_shift, - keyboard_mapping, - ListenerMixin, - numlock_mask, - shift_to_index, - symbol_to_keysym) -from pynput._util.xorg_keysyms import ( - CHARS, - DEAD_KEYS, - KEYPAD_KEYS, - KEYSYMS, - SYMBOLS) -from . import _base - - -class KeyCode(_base.KeyCode): - @classmethod - def _from_symbol(cls, symbol, **kwargs): - """Creates a key from a symbol. - - :param str symbol: The symbol name. - - :return: a key code - """ - # First try simple translation - keysym = Xlib.XK.string_to_keysym(symbol) - if keysym: - return cls.from_vk(keysym, **kwargs) - - # If that fails, try checking a module attribute of Xlib.keysymdef.xkb - if not keysym: - # pylint: disable=W0702; we want to ignore errors - try: - return cls.from_vk( - getattr(Xlib.keysymdef.xkb, 'XK_' + symbol, 0), - **kwargs) - except: - return cls.from_vk( - SYMBOLS.get(symbol, (0,))[0], - **kwargs) - # pylint: enable=W0702 - - -class Key(enum.Enum): - # Default keys - alt = KeyCode._from_symbol('Alt_L') - alt_l = KeyCode._from_symbol('Alt_L') - alt_r = KeyCode._from_symbol('Alt_R') - alt_gr = KeyCode._from_symbol('Mode_switch') - backspace = KeyCode._from_symbol('BackSpace') - caps_lock = KeyCode._from_symbol('Caps_Lock') - cmd = KeyCode._from_symbol('Super_L') - cmd_l = KeyCode._from_symbol('Super_L') - cmd_r = KeyCode._from_symbol('Super_R') - ctrl = KeyCode._from_symbol('Control_L') - ctrl_l = KeyCode._from_symbol('Control_L') - ctrl_r = KeyCode._from_symbol('Control_R') - delete = KeyCode._from_symbol('Delete') - down = KeyCode._from_symbol('Down') - end = KeyCode._from_symbol('End') - enter = KeyCode._from_symbol('Return') - esc = KeyCode._from_symbol('Escape') - f1 = KeyCode._from_symbol('F1') - f2 = KeyCode._from_symbol('F2') - f3 = KeyCode._from_symbol('F3') - f4 = KeyCode._from_symbol('F4') - f5 = KeyCode._from_symbol('F5') - f6 = KeyCode._from_symbol('F6') - f7 = KeyCode._from_symbol('F7') - f8 = KeyCode._from_symbol('F8') - f9 = KeyCode._from_symbol('F9') - f10 = KeyCode._from_symbol('F10') - f11 = KeyCode._from_symbol('F11') - f12 = KeyCode._from_symbol('F12') - f13 = KeyCode._from_symbol('F13') - f14 = KeyCode._from_symbol('F14') - f15 = KeyCode._from_symbol('F15') - f16 = KeyCode._from_symbol('F16') - f17 = KeyCode._from_symbol('F17') - f18 = KeyCode._from_symbol('F18') - f19 = KeyCode._from_symbol('F19') - f20 = KeyCode._from_symbol('F20') - home = KeyCode._from_symbol('Home') - left = KeyCode._from_symbol('Left') - page_down = KeyCode._from_symbol('Page_Down') - page_up = KeyCode._from_symbol('Page_Up') - right = KeyCode._from_symbol('Right') - shift = KeyCode._from_symbol('Shift_L') - shift_l = KeyCode._from_symbol('Shift_L') - shift_r = KeyCode._from_symbol('Shift_R') - space = KeyCode._from_symbol('space', char=' ') - tab = KeyCode._from_symbol('Tab') - up = KeyCode._from_symbol('Up') - - insert = KeyCode._from_symbol('Insert') - menu = KeyCode._from_symbol('Menu') - num_lock = KeyCode._from_symbol('Num_Lock') - pause = KeyCode._from_symbol('Pause') - print_screen = KeyCode._from_symbol('Print') - scroll_lock = KeyCode._from_symbol('Scroll_Lock') - - -class Controller(NotifierMixin, _base.Controller): - _KeyCode = KeyCode - _Key = Key - - #: The shift mask for :attr:`Key.ctrl` - CTRL_MASK = Xlib.X.ControlMask - - #: The shift mask for :attr:`Key.shift` - SHIFT_MASK = Xlib.X.ShiftMask - - def __init__(self): - super(Controller, self).__init__() - self._display = Xlib.display.Display() - self._keyboard_mapping = None - self._borrows = {} - self._borrow_lock = threading.RLock() - - # pylint: disable=C0103; this is treated as a class scope constant, but - # we cannot set it in the class scope, as it requires a Display instance - self.ALT_MASK = alt_mask(self._display) - self.ALT_GR_MASK = alt_gr_mask(self._display) - # pylint: enable=C0103 - - def __del__(self): - if self._display: - self._display.close() - - @property - def keyboard_mapping(self): - """A mapping from *keysyms* to *key codes*. - - Each value is the tuple ``(key_code, shift_state)``. By sending an - event with the specified *key code* and shift state, the specified - *keysym* will be touched. - """ - if not self._keyboard_mapping: - self._update_keyboard_mapping() - return self._keyboard_mapping - - def _handle(self, key, is_press): - """Resolves a key identifier and sends a keyboard event. - - :param event: The *X* keyboard event. - - :param int keysym: The keysym to handle. - """ - event = Xlib.display.event.KeyPress if is_press \ - else Xlib.display.event.KeyRelease - keysym = self._keysym(key) - - # Make sure to verify that the key was resolved - if keysym is None: - raise self.InvalidKeyException(key) - - # If the key has a virtual key code, use that immediately with - # fake_input; fake input,being an X server extension, has access to more - # internal state that we - if key.vk is not None: - with display_manager(self._display) as dm: - Xlib.ext.xtest.fake_input( - dm, - Xlib.X.KeyPress if is_press else Xlib.X.KeyRelease, - dm.keysym_to_keycode(key.vk)) - - # Otherwise use XSendEvent; we need to use this in the general case to - # work around problems with keyboard layouts - else: - try: - keycode, shift_state = self.keyboard_mapping[keysym] - self._send_key(event, keycode, shift_state) - - except KeyError: - with self._borrow_lock: - keycode, index, count = self._borrows[keysym] - self._send_key( - event, - keycode, - index_to_shift(self._display, index)) - count += 1 if is_press else -1 - self._borrows[keysym] = (keycode, index, count) - - # Notify any running listeners - self._emit('_on_fake_event', key, is_press) - - def _keysym(self, key): - """Converts a key to a *keysym*. - - :param KeyCode key: The key code to convert. - """ - return self._resolve_dead(key) if key.is_dead else None \ - or self._resolve_special(key) \ - or self._resolve_normal(key) \ - or self._resolve_borrowed(key) \ - or self._resolve_borrowing(key) - - def _send_key(self, event, keycode, shift_state): - """Sends a single keyboard event. - - :param event: The *X* keyboard event. - - :param int keycode: The calculated keycode. - - :param int shift_state: The shift state. The actual value used is - :attr:`shift_state` or'd with this value. - """ - with display_manager(self._display) as dm, self.modifiers as modifiers: - # Under certain cimcumstances, such as when running under Xephyr, - # the value returned by dm.get_input_focus is an int - window = dm.get_input_focus().focus - send_event = getattr( - window, - 'send_event', - lambda event: dm.send_event(window, event)) - send_event(event( - detail=keycode, - state=shift_state | self._shift_mask(modifiers), - time=0, - root=dm.screen().root, - window=window, - same_screen=0, - child=Xlib.X.NONE, - root_x=0, root_y=0, event_x=0, event_y=0)) - - def _resolve_dead(self, key): - """Tries to resolve a dead key. - - :param str identifier: The identifier to resolve. - """ - # pylint: disable=W0702; we want to ignore errors - try: - keysym, _ = SYMBOLS[CHARS[key.combining]] - except: - return None - # pylint: enable=W0702 - - if keysym not in self.keyboard_mapping: - return None - - return keysym - - def _resolve_special(self, key): - """Tries to resolve a special key. - - A special key has the :attr:`~KeyCode.vk` attribute set. - - :param KeyCode key: The key to resolve. - """ - if not key.vk: - return None - - return key.vk - - def _resolve_normal(self, key): - """Tries to resolve a normal key. - - A normal key exists on the keyboard, and is typed by pressing - and releasing a simple key, possibly in combination with a modifier. - - :param KeyCode key: The key to resolve. - """ - keysym = self._key_to_keysym(key) - if keysym is None: - return None - - if keysym not in self.keyboard_mapping: - return None - - return keysym - - def _resolve_borrowed(self, key): - """Tries to resolve a key by looking up the already borrowed *keysyms*. - - A borrowed *keysym* does not exist on the keyboard, but has been - temporarily added to the layout. - - :param KeyCode key: The key to resolve. - """ - keysym = self._key_to_keysym(key) - if keysym is None: - return None - - with self._borrow_lock: - if keysym not in self._borrows: - return None - - return keysym - - def _resolve_borrowing(self, key): - """Tries to resolve a key by modifying the layout temporarily. - - A borrowed *keysym* does not exist on the keyboard, but is temporarily - added to the layout. - - :param KeyCode key: The key to resolve. - """ - keysym = self._key_to_keysym(key) - if keysym is None: - return None - - mapping = self._display.get_keyboard_mapping(8, 255 - 8) - - def i2kc(index): - return index + 8 - - def kc2i(keycode): - return keycode - 8 - - #: Finds a keycode and index by looking at already used keycodes - def reuse(): - for _, (keycode, _, _) in self._borrows.items(): - keycodes = mapping[kc2i(keycode)] - - # Only the first four items are addressable by X - for index in range(4): - if not keycodes[index]: - return keycode, index - - #: Finds a keycode and index by using a new keycode - def borrow(): - for i, keycodes in enumerate(mapping): - if not any(keycodes): - return i2kc(i), 0 - - #: Finds a keycode and index by reusing an old, unused one - def overwrite(): - for keysym, (keycode, index, count) in self._borrows.items(): - if count < 1: - del self._borrows[keysym] - return keycode, index - - #: Registers a keycode for a specific key and modifier state - def register(dm, keycode, index): - i = kc2i(keycode) - mapping[i][index] = keysym - dm.change_keyboard_mapping( - keycode, - mapping[i:i + 1]) - self._borrows[keysym] = (keycode, index, 0) - - try: - with display_manager(self._display) as dm, self._borrow_lock as _: - # First try an already used keycode, then try a new one, and - # fall back on reusing one that is not currently pressed - register(dm, *( - reuse() or - borrow() or - overwrite())) - return keysym - - except TypeError: - return None - - def _key_to_keysym(self, key): - """Converts a character key code to a *keysym*. - - :param KeyCode key: The key code. - - :return: a keysym if found - :rtype: int or None - """ - symbol = CHARS.get(key.char, None) - if symbol is None: - return None - - # pylint: disable=W0702; we want to ignore errors - try: - return symbol_to_keysym(symbol) - except: - try: - return SYMBOLS[symbol][0] - except: - return None - # pylint: enable=W0702 - - def _shift_mask(self, modifiers): - """The *X* modifier mask to apply for a set of modifiers. - - :param set modifiers: A set of active modifiers for which to get the - shift mask. - """ - return ( - 0 - | (self.ALT_MASK - if Key.alt in modifiers else 0) - - | (self.ALT_GR_MASK - if Key.alt_gr in modifiers else 0) - - | (self.CTRL_MASK - if Key.ctrl in modifiers else 0) - - | (self.SHIFT_MASK - if Key.shift in modifiers else 0)) - - def _update_keyboard_mapping(self): - """Updates the keyboard mapping. - """ - with display_manager(self._display) as dm: - self._keyboard_mapping = keyboard_mapping(dm) - - -@Controller._receiver -class Listener(ListenerMixin, _base.Listener): - _EVENTS = ( - Xlib.X.KeyPress, - Xlib.X.KeyRelease) - - #: A mapping from keysym to special key - _SPECIAL_KEYS = { - key.value.vk: key - for key in Key} - - #: A mapping from numeric keypad keys to keys - _KEYPAD_KEYS = { - KEYPAD_KEYS['KP_0']: KeyCode.from_char('0'), - KEYPAD_KEYS['KP_1']: KeyCode.from_char('1'), - KEYPAD_KEYS['KP_2']: KeyCode.from_char('2'), - KEYPAD_KEYS['KP_3']: KeyCode.from_char('3'), - KEYPAD_KEYS['KP_4']: KeyCode.from_char('4'), - KEYPAD_KEYS['KP_5']: KeyCode.from_char('5'), - KEYPAD_KEYS['KP_6']: KeyCode.from_char('6'), - KEYPAD_KEYS['KP_7']: KeyCode.from_char('7'), - KEYPAD_KEYS['KP_8']: KeyCode.from_char('8'), - KEYPAD_KEYS['KP_9']: KeyCode.from_char('9'), - KEYPAD_KEYS['KP_Add']: KeyCode.from_char('+'), - KEYPAD_KEYS['KP_Decimal']: KeyCode.from_char(','), - KEYPAD_KEYS['KP_Delete']: Key.delete, - KEYPAD_KEYS['KP_Divide']: KeyCode.from_char('/'), - KEYPAD_KEYS['KP_Down']: Key.down, - KEYPAD_KEYS['KP_End']: Key.end, - KEYPAD_KEYS['KP_Enter']: Key.enter, - KEYPAD_KEYS['KP_Equal']: KeyCode.from_char('='), - KEYPAD_KEYS['KP_F1']: Key.f1, - KEYPAD_KEYS['KP_F2']: Key.f2, - KEYPAD_KEYS['KP_F3']: Key.f3, - KEYPAD_KEYS['KP_F4']: Key.f4, - KEYPAD_KEYS['KP_Home']: Key.home, - KEYPAD_KEYS['KP_Insert']: Key.insert, - KEYPAD_KEYS['KP_Left']: Key.left, - KEYPAD_KEYS['KP_Multiply']: KeyCode.from_char('*'), - KEYPAD_KEYS['KP_Page_Down']: Key.page_down, - KEYPAD_KEYS['KP_Page_Up']: Key.page_up, - KEYPAD_KEYS['KP_Right']: Key.right, - KEYPAD_KEYS['KP_Space']: Key.space, - KEYPAD_KEYS['KP_Subtract']: KeyCode.from_char('-'), - KEYPAD_KEYS['KP_Tab']: Key.tab, - KEYPAD_KEYS['KP_Up']: Key.up} - - def __init__(self, *args, **kwargs): - super(Listener, self).__init__(*args, **kwargs) - self._keyboard_mapping = None - - def _run(self): - with self._receive(): - super(Listener, self)._run() - - def _initialize(self, display): - # Get the keyboard mapping to be able to translate events details to - # key codes - min_keycode = display.display.info.min_keycode - keycode_count = display.display.info.max_keycode - min_keycode + 1 - self._keyboard_mapping = display.get_keyboard_mapping( - min_keycode, keycode_count) - - def _handle(self, display, event): - # Convert the event to a KeyCode; this may fail, and in that case we - # pass None - try: - key = self._event_to_key(display, event) - except IndexError: - key = None - - if event.type == Xlib.X.KeyPress: - self.on_press(key) - - elif event.type == Xlib.X.KeyRelease: - self.on_release(key) - - def _suppress_start(self, display): - display.screen().root.grab_keyboard( - self._event_mask, Xlib.X.GrabModeAsync, Xlib.X.GrabModeAsync, - Xlib.X.CurrentTime) - - def _suppress_stop(self, display): - display.ungrab_keyboard(Xlib.X.CurrentTime) - - def _on_fake_event(self, key, is_press): - """The handler for fake press events sent by the controllers. - - :param KeyCode key: The key pressed. - - :param bool is_press: Whether this is a press event. - """ - (self.on_press if is_press else self.on_release)( - self._SPECIAL_KEYS.get(key.vk, key)) - - def _keycode_to_keysym(self, display, keycode, index): - """Converts a keycode and shift state index to a keysym. - - This method uses a simplified version of the *X* convention to locate - the correct keysym in the display table: since this method is only used - to locate special keys, alphanumeric keys are not treated specially. - - :param display: The current *X* display. - - :param keycode: The keycode. - - :param index: The shift state index. - - :return: a keysym - """ - keysym = display.keycode_to_keysym(keycode, index) - if keysym: - return keysym - elif index & 0x2: - return self._keycode_to_keysym(display, keycode, index & ~0x2) - elif index & 0x1: - return self._keycode_to_keysym(display, keycode, index & ~0x1) - else: - return 0 - - def _event_to_key(self, display, event): - """Converts an *X* event to a :class:`KeyCode`. - - :param display: The current *X* display. - - :param event: The event to convert. - - :return: a :class:`pynput.keyboard.KeyCode` - - :raises IndexError: if the key code is invalid - """ - keycode = event.detail - index = shift_to_index(display, event.state) - - # First try special keys... - keysym = self._keycode_to_keysym(display, keycode, index) - if keysym in self._SPECIAL_KEYS: - return self._SPECIAL_KEYS[keysym] - elif keysym in self._KEYPAD_KEYS: - # We must recalculate the index if numlock is active; index 1 is the - # one to use - try: - return self._KEYPAD_KEYS[ - self._keycode_to_keysym( - display, - keycode, - bool(event.state & numlock_mask(display)))] - except KeyError: - # Since we recalculated the key, this may happen - pass - - # ...then try characters... - name = KEYSYMS.get(keysym, None) - if name is not None and name in SYMBOLS: - char = SYMBOLS[name][1].upper() if index & 1 else SYMBOLS[name][1] - if char in DEAD_KEYS: - return KeyCode.from_dead(DEAD_KEYS[char], vk=keycode) - else: - return KeyCode.from_char(char, vk=keycode) - - # ...and fall back on a virtual key code - return KeyCode.from_vk(keysym) diff --git a/pype/vendor/pynput/mouse/__init__.py b/pype/vendor/pynput/mouse/__init__.py deleted file mode 100644 index ee77766935..0000000000 --- a/pype/vendor/pynput/mouse/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -The module containing mouse classes. - -See the documentation for more information. -""" - -# pylint: disable=C0103 -# Button, Controller and Listener are not constants - -import os -import sys - -if os.environ.get('__PYNPUT_GENERATE_DOCUMENTATION') == 'yes': - from ._base import Button, Controller, Listener -else: - Button = None - Controller = None - Listener = None - - -if sys.platform == 'darwin': - if not Button and not Controller and not Listener: - from ._darwin import Button, Controller, Listener - -elif sys.platform == 'win32': - if not Button and not Controller and not Listener: - from ._win32 import Button, Controller, Listener - -else: - if not Button and not Controller and not Listener: - try: - from ._xorg import Button, Controller, Listener - except ImportError: - # For now, since we only support Xlib anyway, we re-raise these - # errors to allow users to determine the cause of failures to import - raise - - -if not Button or not Controller or not Listener: - raise ImportError('this platform is not supported') diff --git a/pype/vendor/pynput/mouse/_base.py b/pype/vendor/pynput/mouse/_base.py deleted file mode 100644 index b23ad527a3..0000000000 --- a/pype/vendor/pynput/mouse/_base.py +++ /dev/null @@ -1,255 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -This module contains the base implementation. - -The actual interface to mouse classes is defined here, but the implementation is -located in a platform dependent module. -""" - -# pylint: disable=R0903 -# We implement stubs - -import enum - -from pynput._util import AbstractListener - - -class Button(enum.Enum): - """The various buttons. - - The actual values for these items differ between platforms. Some - platforms may have additional buttons, but these are guaranteed to be - present everywhere. - """ - #: An unknown button was pressed - unknown = 0 - - #: The left button - left = 1 - - #: The middle button - middle = 2 - - #: The right button - right = 3 - - -class Controller(object): - """A controller for sending virtual mouse events to the system. - """ - @property - def position(self): - """The current position of the mouse pointer. - - This is the tuple ``(x, y)``, and setting it will move the pointer. - """ - return self._position_get() - - @position.setter - def position(self, pos): - self._position_set(pos) - - def scroll(self, dx, dy): - """Sends scroll events. - - :param int dx: The horizontal scroll. The units of scrolling is - undefined. - - :param int dy: The vertical scroll. The units of scrolling is - undefined. - - :raises ValueError: if the values are invalid, for example out of bounds - """ - self._scroll(dx, dy) - - def press(self, button): - """Emits a button press event at the current position. - - :param Button button: The button to press. - """ - self._press(button) - - def release(self, button): - """Emits a button release event at the current position. - - :param Button button: The button to release. - """ - self._release(button) - - def move(self, dx, dy): - """Moves the mouse pointer a number of pixels from its current - position. - - :param int x: The horizontal offset. - - :param int dy: The vertical offset. - - :raises ValueError: if the values are invalid, for example out of bounds - """ - self.position = tuple(sum(i) for i in zip(self.position, (dx, dy))) - - def click(self, button, count=1): - """Emits a button click event at the current position. - - The default implementation sends a series of press and release events. - - :param Button button: The button to click. - - :param int count: The number of clicks to send. - """ - with self as controller: - for _ in range(count): - controller.press(button) - controller.release(button) - - def __enter__(self): - """Begins a series of clicks. - - In the default :meth:`click` implementation, the return value of this - method is used for the calls to :meth:`press` and :meth:`release` - instead of ``self``. - - The default implementation is a no-op. - """ - return self - - def __exit__(self, exc_type, value, traceback): - """Ends a series of clicks. - """ - pass - - def _position_get(self): - """The implementation of the getter for :attr:`position`. - - This is a platform dependent implementation. - """ - raise NotImplementedError() - - def _position_set(self, pos): - """The implementation of the setter for :attr:`position`. - - This is a platform dependent implementation. - """ - raise NotImplementedError() - - def _scroll(self, dx, dy): - """The implementation of the :meth:`scroll` method. - - This is a platform dependent implementation. - """ - raise NotImplementedError() - - def _press(self, button): - """The implementation of the :meth:`press` method. - - This is a platform dependent implementation. - """ - raise NotImplementedError() - - def _release(self, button): - """The implementation of the :meth:`release` method. - - This is a platform dependent implementation. - """ - raise NotImplementedError() - - -# pylint: disable=W0223; This is also an abstract class -class Listener(AbstractListener): - """A listener for mouse events. - - Instances of this class can be used as context managers. This is equivalent - to the following code:: - - listener.start() - try: - with_statements() - finally: - listener.stop() - - This class inherits from :class:`threading.Thread` and supports all its - methods. It will set :attr:`daemon` to ``True`` when created. - - :param callable on_move: The callback to call when mouse move events occur. - - It will be called with the arguments ``(x, y)``, which is the new - pointer position. If this callback raises :class:`StopException` or - returns ``False``, the listener is stopped. - - :param callable on_click: The callback to call when a mouse button is - clicked. - - It will be called with the arguments ``(x, y, button, pressed)``, - where ``(x, y)`` is the new pointer position, ``button`` is one of the - :class:`Button` values and ``pressed`` is whether the button was - pressed. - - If this callback raises :class:`StopException` or returns ``False``, - the listener is stopped. - - :param callable on_scroll: The callback to call when mouse scroll - events occur. - - It will be called with the arguments ``(x, y, dx, dy)``, where - ``(x, y)`` is the new pointer position, and ``(dx, dy)`` is the scroll - vector. - - If this callback raises :class:`StopException` or returns ``False``, - the listener is stopped. - - :param bool suppress: Whether to suppress events. Setting this to ``True`` - will prevent the input events from being passed to the rest of the - system. - - :param kwargs: Any non-standard platform dependent options. These should be - prefixed with the platform name thus: ``darwin_``, ``xorg_`` or - ``win32_``. - - Supported values are: - - ``darwin_intercept`` - A callable taking the arguments ``(event_type, event)``, where - ``event_type`` is any mouse related event type constant, and - ``event`` is a ``CGEventRef``. - - This callable can freely modify the event using functions like - ``Quartz.CGEventSetIntegerValueField``. If this callable does not - return the event, the event is suppressed system wide. - - ``win32_event_filter`` - A callable taking the arguments ``(msg, data)``, where ``msg`` is - the current message, and ``data`` associated data as a - `MSLLHOOKSTRUCT `_. - - If this callback returns ``False``, the event will not be propagated - to the listener callback. - - If ``self.suppress_event()`` is called, the event is suppressed - system wide. - """ - def __init__(self, on_move=None, on_click=None, on_scroll=None, - suppress=False, **kwargs): - prefix = self.__class__.__module__.rsplit('.', 1)[-1][1:] + '_' - self._options = { - key[len(prefix):]: value - for key, value in kwargs.items() - if key.startswith(prefix)} - super(Listener, self).__init__( - on_move=on_move, on_click=on_click, on_scroll=on_scroll, - suppress=suppress) -# pylint: enable=W0223 diff --git a/pype/vendor/pynput/mouse/_darwin.py b/pype/vendor/pynput/mouse/_darwin.py deleted file mode 100644 index f6dac9e385..0000000000 --- a/pype/vendor/pynput/mouse/_darwin.py +++ /dev/null @@ -1,215 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -The mouse implementation for *OSX*. -""" - -# pylint: disable=C0111 -# The documentation is extracted from the base classes - -# pylint: disable=R0903 -# We implement stubs - -import enum -import Quartz - -from AppKit import NSEvent - -from pynput._util.darwin import ( - ListenerMixin) -from . import _base - - -def _button_value(base_name, mouse_button): - """Generates the value tuple for a :class:`Button` value. - - :param str base_name: The base name for the button. This shuld be a string - like ``'kCGEventLeftMouse'``. - - :param int mouse_button: The mouse button ID. - - :return: a value tuple - """ - return ( - tuple( - getattr(Quartz, '%sMouse%s' % (base_name, name)) - for name in ('Down', 'Up', 'Dragged')), - mouse_button) - - -class Button(enum.Enum): - """The various buttons. - """ - unknown = None - left = _button_value('kCGEventLeft', 0) - middle = _button_value('kCGEventOther', 2) - right = _button_value('kCGEventRight', 1) - - -class Controller(_base.Controller): - #: The scroll speed - _SCROLL_SPEED = 5 - - def __init__(self, *args, **kwargs): - super(Controller, self).__init__(*args, **kwargs) - self._click = None - self._drag_button = None - - def _position_get(self): - pos = NSEvent.mouseLocation() - - return pos.x, Quartz.CGDisplayPixelsHigh(0) - pos.y - - def _position_set(self, pos): - try: - (_, _, mouse_type), mouse_button = self._drag_button.value - except AttributeError: - mouse_type = Quartz.kCGEventMouseMoved - mouse_button = 0 - - Quartz.CGEventPost( - Quartz.kCGHIDEventTap, - Quartz.CGEventCreateMouseEvent( - None, - mouse_type, - pos, - mouse_button)) - - def _scroll(self, dx, dy): - while dx != 0 or dy != 0: - xval = 1 if dx > 0 else -1 if dx < 0 else 0 - dx -= xval - yval = 1 if dy > 0 else -1 if dy < 0 else 0 - dy -= yval - - Quartz.CGEventPost( - Quartz.kCGHIDEventTap, - Quartz.CGEventCreateScrollWheelEvent( - None, - Quartz.kCGScrollEventUnitPixel, - 2, - yval * self._SCROLL_SPEED, - xval * self._SCROLL_SPEED)) - - def _press(self, button): - (press, _, _), mouse_button = button.value - event = Quartz.CGEventCreateMouseEvent( - None, - press, - self.position, - mouse_button) - - # If we are performing a click, we need to set this state flag - if self._click is not None: - self._click += 1 - Quartz.CGEventSetIntegerValueField( - event, - Quartz.kCGMouseEventClickState, - self._click) - - Quartz.CGEventPost(Quartz.kCGHIDEventTap, event) - - # Store the button to enable dragging - self._drag_button = button - - def _release(self, button): - (_, release, _), mouse_button = button.value - event = Quartz.CGEventCreateMouseEvent( - None, - release, - self.position, - mouse_button) - - # If we are performing a click, we need to set this state flag - if self._click is not None: - Quartz.CGEventSetIntegerValueField( - event, - Quartz.kCGMouseEventClickState, - self._click) - - Quartz.CGEventPost(Quartz.kCGHIDEventTap, event) - - if button == self._drag_button: - self._drag_button = None - - def __enter__(self): - self._click = 0 - return self - - def __exit__(self, exc_type, value, traceback): - self._click = None - - -class Listener(ListenerMixin, _base.Listener): - #: The events that we listen to - _EVENTS = ( - Quartz.CGEventMaskBit(Quartz.kCGEventMouseMoved) | - Quartz.CGEventMaskBit(Quartz.kCGEventLeftMouseDown) | - Quartz.CGEventMaskBit(Quartz.kCGEventLeftMouseUp) | - Quartz.CGEventMaskBit(Quartz.kCGEventLeftMouseDragged) | - Quartz.CGEventMaskBit(Quartz.kCGEventRightMouseDown) | - Quartz.CGEventMaskBit(Quartz.kCGEventRightMouseUp) | - Quartz.CGEventMaskBit(Quartz.kCGEventRightMouseDragged) | - Quartz.CGEventMaskBit(Quartz.kCGEventOtherMouseDown) | - Quartz.CGEventMaskBit(Quartz.kCGEventOtherMouseUp) | - Quartz.CGEventMaskBit(Quartz.kCGEventOtherMouseDragged) | - Quartz.CGEventMaskBit(Quartz.kCGEventScrollWheel)) - - def __init__(self, *args, **kwargs): - super(Listener, self).__init__(*args, **kwargs) - self._intercept = self._options.get( - 'intercept', - None) - - def _handle(self, dummy_proxy, event_type, event, dummy_refcon): - """The callback registered with *Mac OSX* for mouse events. - - This method will call the callbacks registered on initialisation. - """ - try: - (px, py) = Quartz.CGEventGetLocation(event) - except AttributeError: - # This happens during teardown of the virtual machine - return - - # Quickly detect the most common event type - if event_type == Quartz.kCGEventMouseMoved: - self.on_move(px, py) - - elif event_type == Quartz.kCGEventScrollWheel: - dx = Quartz.CGEventGetIntegerValueField( - event, - Quartz.kCGScrollWheelEventDeltaAxis2) - dy = Quartz.CGEventGetIntegerValueField( - event, - Quartz.kCGScrollWheelEventDeltaAxis1) - self.on_scroll(px, py, dx, dy) - - else: - for button in Button: - try: - (press, release, drag), _ = button.value - except TypeError: - # Button.unknown cannot be enumerated - continue - - # Press and release generate click events, and drag - # generates move events - if event_type in (press, release): - self.on_click(px, py, button, event_type == press) - elif event_type == drag: - self.on_move(px, py) diff --git a/pype/vendor/pynput/mouse/_win32.py b/pype/vendor/pynput/mouse/_win32.py deleted file mode 100644 index c0ac10a9ec..0000000000 --- a/pype/vendor/pynput/mouse/_win32.py +++ /dev/null @@ -1,192 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -The mouse implementation for *Windows*. -""" - -# pylint: disable=C0111 -# The documentation is extracted from the base classes - -# pylint: disable=R0903 -# We implement stubs - -import ctypes -import enum - -from ctypes import ( - windll, - wintypes) - -from pynput._util import NotifierMixin -from pynput._util.win32 import ( - INPUT, - INPUT_union, - ListenerMixin, - MOUSEINPUT, - SendInput, - SystemHook) -from . import _base - - -class Button(enum.Enum): - """The various buttons. - """ - unknown = None - left = (MOUSEINPUT.LEFTUP, MOUSEINPUT.LEFTDOWN) - middle = (MOUSEINPUT.MIDDLEUP, MOUSEINPUT.MIDDLEDOWN) - right = (MOUSEINPUT.RIGHTUP, MOUSEINPUT.RIGHTDOWN) - - -class Controller(NotifierMixin, _base.Controller): - __GetCursorPos = windll.user32.GetCursorPos - __SetCursorPos = windll.user32.SetCursorPos - - def _position_get(self): - point = wintypes.POINT() - if self.__GetCursorPos(ctypes.byref(point)): - return (point.x, point.y) - else: - return None - - def _position_set(self, pos): - pos = int(pos[0]), int(pos[1]) - self.__SetCursorPos(*pos) - self._emit('on_move', *pos) - - def _scroll(self, dx, dy): - if dy: - SendInput( - 1, - ctypes.byref(INPUT( - type=INPUT.MOUSE, - value=INPUT_union( - mi=MOUSEINPUT( - dwFlags=MOUSEINPUT.WHEEL, - mouseData=int(dy))))), - ctypes.sizeof(INPUT)) - - if dx: - SendInput( - 1, - ctypes.byref(INPUT( - type=INPUT.MOUSE, - value=INPUT_union( - mi=MOUSEINPUT( - dwFlags=MOUSEINPUT.HWHEEL, - mouseData=int(dx))))), - ctypes.sizeof(INPUT)) - - if dx or dy: - px, py = self._position_get() - self._emit('on_scroll', px, py, dx, dy) - - def _press(self, button): - SendInput( - 1, - ctypes.byref(INPUT( - type=INPUT.MOUSE, - value=INPUT_union( - mi=MOUSEINPUT( - dwFlags=button.value[1])))), - ctypes.sizeof(INPUT)) - - def _release(self, button): - SendInput( - 1, - ctypes.byref(INPUT( - type=INPUT.MOUSE, - value=INPUT_union( - mi=MOUSEINPUT( - dwFlags=button.value[0])))), - ctypes.sizeof(INPUT)) - - -@Controller._receiver -class Listener(ListenerMixin, _base.Listener): - #: The Windows hook ID for low level mouse events, ``WH_MOUSE_LL`` - _EVENTS = 14 - - WM_LBUTTONDOWN = 0x0201 - WM_LBUTTONUP = 0x0202 - WM_MBUTTONDOWN = 0x0207 - WM_MBUTTONUP = 0x0208 - WM_MOUSEMOVE = 0x0200 - WM_MOUSEWHEEL = 0x020A - WM_MOUSEHWHEEL = 0x020E - WM_RBUTTONDOWN = 0x0204 - WM_RBUTTONUP = 0x0205 - - _WHEEL_DELTA = 120 - - #: A mapping from messages to button events - CLICK_BUTTONS = { - WM_LBUTTONDOWN: (Button.left, True), - WM_LBUTTONUP: (Button.left, False), - WM_MBUTTONDOWN: (Button.middle, True), - WM_MBUTTONUP: (Button.middle, False), - WM_RBUTTONDOWN: (Button.right, True), - WM_RBUTTONUP: (Button.right, False)} - - #: A mapping from messages to scroll vectors - SCROLL_BUTTONS = { - WM_MOUSEWHEEL: (0, 1), - WM_MOUSEHWHEEL: (1, 0)} - - _HANDLED_EXCEPTIONS = ( - SystemHook.SuppressException,) - - class _MSLLHOOKSTRUCT(ctypes.Structure): - """Contains information about a mouse event passed to a ``WH_MOUSE_LL`` - hook procedure, ``MouseProc``. - """ - _fields_ = [ - ('pt', wintypes.POINT), - ('mouseData', wintypes.DWORD), - ('flags', wintypes.DWORD), - ('time', wintypes.DWORD), - ('dwExtraInfo', ctypes.c_void_p)] - - #: A pointer to a :class:`_MSLLHOOKSTRUCT` - _LPMSLLHOOKSTRUCT = ctypes.POINTER(_MSLLHOOKSTRUCT) - - def __init__(self, *args, **kwargs): - super(Listener, self).__init__(*args, **kwargs) - self._event_filter = self._options.get( - 'event_filter', - lambda msg, data: True) - - def _handle(self, code, msg, lpdata): - if code != SystemHook.HC_ACTION: - return - - data = ctypes.cast(lpdata, self._LPMSLLHOOKSTRUCT).contents - - # Suppress further propagation of the event if it is filtered - if self._event_filter(msg, data) is False: - return - - if msg == self.WM_MOUSEMOVE: - self.on_move(data.pt.x, data.pt.y) - - elif msg in self.CLICK_BUTTONS: - button, pressed = self.CLICK_BUTTONS[msg] - self.on_click(data.pt.x, data.pt.y, button, pressed) - - elif msg in self.SCROLL_BUTTONS: - mx, my = self.SCROLL_BUTTONS[msg] - dd = wintypes.SHORT(data.mouseData >> 16).value // self._WHEEL_DELTA - self.on_scroll(data.pt.x, data.pt.y, dd * mx, dd * my) diff --git a/pype/vendor/pynput/mouse/_xorg.py b/pype/vendor/pynput/mouse/_xorg.py deleted file mode 100644 index d9e13a9fa5..0000000000 --- a/pype/vendor/pynput/mouse/_xorg.py +++ /dev/null @@ -1,170 +0,0 @@ -# coding=utf-8 -# pynput -# Copyright (C) 2015-2017 Moses Palmér -# -# This program is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 3 of the License, or (at your option) any -# later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . -""" -The keyboard implementation for *Xorg*. -""" - -# pylint: disable=C0111 -# The documentation is extracted from the base classes - - -# pylint: disable=E1101,E1102 -# We dynamically generate the Button class - -# pylint: disable=R0903 -# We implement stubs - -import enum -import Xlib.display -import Xlib.ext -import Xlib.ext.xtest -import Xlib.X -import Xlib.protocol - -from pynput._util.xorg import ( - display_manager, - ListenerMixin) -from . import _base - - -# pylint: disable=C0103 -Button = enum.Enum( - 'Button', - module=__name__, - names=[ - ('unknown', None), - ('left', 1), - ('middle', 2), - ('right', 3), - ('scroll_up', 4), - ('scroll_down', 5), - ('scroll_left', 6), - ('scroll_right', 7)] + [ - ('button%d' % i, i) - for i in range(8, 31)]) -# pylint: enable=C0103 - - -class Controller(_base.Controller): - def __init__(self): - self._display = Xlib.display.Display() - - def __del__(self): - if hasattr(self, '_display'): - self._display.close() - - def _position_get(self): - with display_manager(self._display) as dm: - qp = dm.screen().root.query_pointer() - return (qp.root_x, qp.root_y) - - def _position_set(self, pos): - px, py = self._check_bounds(*pos) - with display_manager(self._display) as dm: - Xlib.ext.xtest.fake_input(dm, Xlib.X.MotionNotify, x=px, y=py) - - def _scroll(self, dx, dy): - dx, dy = self._check_bounds(dx, dy) - if dy: - self.click( - button=Button.scroll_up if dy > 0 else Button.scroll_down, - count=abs(dy)) - - if dx: - self.click( - button=Button.scroll_right if dx > 0 else Button.scroll_left, - count=abs(dx)) - - def _press(self, button): - with display_manager(self._display) as dm: - Xlib.ext.xtest.fake_input(dm, Xlib.X.ButtonPress, button.value) - - def _release(self, button): - with display_manager(self._display) as dm: - Xlib.ext.xtest.fake_input(dm, Xlib.X.ButtonRelease, button.value) - - def _check_bounds(self, *args): - """Checks the arguments and makes sure they are within the bounds of a - short integer. - - :param args: The values to verify. - """ - if not all( - (-0x7fff - 1) <= number <= 0x7fff - for number in args): - raise ValueError(args) - else: - return tuple(int(p) for p in args) - - -class Listener(ListenerMixin, _base.Listener): - #: A mapping from button values to scroll directions - _SCROLL_BUTTONS = { - Button.scroll_up.value: (0, 1), - Button.scroll_down.value: (0, -1), - Button.scroll_right.value: (1, 0), - Button.scroll_left.value: (-1, 0)} - - _EVENTS = ( - Xlib.X.ButtonPressMask, - Xlib.X.ButtonReleaseMask) - - def _handle(self, dummy_display, event): - px = event.root_x - py = event.root_y - - if event.type == Xlib.X.ButtonPress: - # Scroll events are sent as button presses with the scroll - # button codes - scroll = self._SCROLL_BUTTONS.get(event.detail, None) - if scroll: - self.on_scroll(px, py, *scroll) - else: - self.on_click(px, py, self._button(event.detail), True) - - elif event.type == Xlib.X.ButtonRelease: - # Send an event only if this was not a scroll event - if event.detail not in self._SCROLL_BUTTONS: - self.on_click(px, py, self._button(event.detail), False) - - else: - self.on_move(px, py) - - - def _suppress_start(self, display): - display.screen().root.grab_pointer( - True, self._event_mask, Xlib.X.GrabModeAsync, Xlib.X.GrabModeAsync, - 0, 0, Xlib.X.CurrentTime) - - def _suppress_stop(self, display): - display.ungrab_pointer(Xlib.X.CurrentTime) - - # pylint: disable=R0201 - def _button(self, detail): - """Creates a mouse button from an event detail. - - If the button is unknown, :attr:`Button.unknown` is returned. - - :param detail: The event detail. - - :return: a button - """ - try: - return Button(detail) - except ValueError: - return Button.unknown - # pylint: enable=R0201 diff --git a/pype/vendor/pyparsing.py b/pype/vendor/pyparsing.py deleted file mode 100644 index cf38419bcb..0000000000 --- a/pype/vendor/pyparsing.py +++ /dev/null @@ -1,5771 +0,0 @@ -# module pyparsing.py -# -# Copyright (c) 2003-2018 Paul T. McGuire -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# - -__doc__ = \ -""" -pyparsing module - Classes and methods to define and execute parsing grammars -============================================================================= - -The pyparsing module is an alternative approach to creating and executing simple grammars, -vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you -don't need to learn a new syntax for defining grammars or matching expressions - the parsing module -provides a library of classes that you use to construct the grammar directly in Python. - -Here is a program to parse "Hello, World!" (or any greeting of the form -C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements -(L{'+'} operator gives L{And} expressions, strings are auto-converted to -L{Literal} expressions):: - - from pyparsing import Word, alphas - - # define grammar of a greeting - greet = Word(alphas) + "," + Word(alphas) + "!" - - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - -The program outputs the following:: - - Hello, World! -> ['Hello', ',', 'World', '!'] - -The Python representation of the grammar is quite readable, owing to the self-explanatory -class names, and the use of '+', '|' and '^' operators. - -The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an -object with named attributes. - -The pyparsing module handles some of the problems that are typically vexing when writing text parsers: - - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) - - quoted strings - - embedded comments - - -Getting Started - ------------------ -Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing -classes inherit from. Use the docstrings for examples of how to: - - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes - - construct character word-group expressions using the L{Word} class - - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes - - use L{'+'}, L{'|'}, L{'^'}, and L{'&'} operators to combine simple expressions into more complex ones - - associate names with your parsed results using L{ParserElement.setResultsName} - - find some helpful expression short-cuts like L{delimitedList} and L{oneOf} - - find more useful common expressions in the L{pyparsing_common} namespace class -""" - -__version__ = "2.2.2" -__versionTime__ = "29 Sep 2018 15:58 UTC" -__author__ = "Paul McGuire " - -import string -from weakref import ref as wkref -import copy -import sys -import warnings -import re -import sre_constants -import collections -import pprint -import traceback -import types -from datetime import datetime - -try: - from _thread import RLock -except ImportError: - from threading import RLock - -try: - # Python 3 - from collections.abc import Iterable - from collections.abc import MutableMapping -except ImportError: - # Python 2.7 - from collections import Iterable - from collections import MutableMapping - -try: - from collections import OrderedDict as _OrderedDict -except ImportError: - try: - from ordereddict import OrderedDict as _OrderedDict - except ImportError: - _OrderedDict = None - -#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) - -__all__ = [ -'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', -'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', -'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', -'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', -'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', -'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', -'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', -'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', -'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', -'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', -'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', -'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', -'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', -'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', -'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', -'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', -'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', -'CloseMatch', 'tokenMap', 'pyparsing_common', -] - -system_version = tuple(sys.version_info)[:3] -PY_3 = system_version[0] == 3 -if PY_3: - _MAX_INT = sys.maxsize - basestring = str - unichr = chr - _ustr = str - - # build list of single arg builtins, that can be used as parse actions - singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] - -else: - _MAX_INT = sys.maxint - range = xrange - - def _ustr(obj): - """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries - str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It - then < returns the unicode object | encodes it with the default encoding | ... >. - """ - if isinstance(obj,unicode): - return obj - - try: - # If this works, then _ustr(obj) has the same behaviour as str(obj), so - # it won't break any existing code. - return str(obj) - - except UnicodeEncodeError: - # Else encode it - ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') - xmlcharref = Regex(r'&#\d+;') - xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) - return xmlcharref.transformString(ret) - - # build list of single arg builtins, tolerant of Python version, that can be used as parse actions - singleArgBuiltins = [] - import __builtin__ - for fname in "sum len sorted reversed list tuple set any all min max".split(): - try: - singleArgBuiltins.append(getattr(__builtin__,fname)) - except AttributeError: - continue - -_generatorType = type((y for y in range(1))) - -def _xml_escape(data): - """Escape &, <, >, ", ', etc. in a string of data.""" - - # ampersand must be replaced first - from_symbols = '&><"\'' - to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) - for from_,to_ in zip(from_symbols, to_symbols): - data = data.replace(from_, to_) - return data - -class _Constants(object): - pass - -alphas = string.ascii_uppercase + string.ascii_lowercase -nums = "0123456789" -hexnums = nums + "ABCDEFabcdef" -alphanums = alphas + nums -_bslash = chr(92) -printables = "".join(c for c in string.printable if c not in string.whitespace) - -class ParseBaseException(Exception): - """base exception class for all parsing runtime exceptions""" - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, pstr, loc=0, msg=None, elem=None ): - self.loc = loc - if msg is None: - self.msg = pstr - self.pstr = "" - else: - self.msg = msg - self.pstr = pstr - self.parserElement = elem - self.args = (pstr, loc, msg) - - @classmethod - def _from_exception(cls, pe): - """ - internal factory method to simplify creating one type of ParseException - from another - avoids having __init__ signature conflicts among subclasses - """ - return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) - - def __getattr__( self, aname ): - """supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - """ - if( aname == "lineno" ): - return lineno( self.loc, self.pstr ) - elif( aname in ("col", "column") ): - return col( self.loc, self.pstr ) - elif( aname == "line" ): - return line( self.loc, self.pstr ) - else: - raise AttributeError(aname) - - def __str__( self ): - return "%s (at char %d), (line:%d, col:%d)" % \ - ( self.msg, self.loc, self.lineno, self.column ) - def __repr__( self ): - return _ustr(self) - def markInputline( self, markerString = ">!<" ): - """Extracts the exception line from the input string, and marks - the location of the exception with a special symbol. - """ - line_str = self.line - line_column = self.column - 1 - if markerString: - line_str = "".join((line_str[:line_column], - markerString, line_str[line_column:])) - return line_str.strip() - def __dir__(self): - return "lineno col line".split() + dir(type(self)) - -class ParseException(ParseBaseException): - """ - Exception thrown when parse expressions don't match class; - supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - - Example:: - try: - Word(nums).setName("integer").parseString("ABC") - except ParseException as pe: - print(pe) - print("column: {}".format(pe.col)) - - prints:: - Expected integer (at char 0), (line:1, col:1) - column: 1 - """ - pass - -class ParseFatalException(ParseBaseException): - """user-throwable exception thrown when inconsistent parse content - is found; stops all parsing immediately""" - pass - -class ParseSyntaxException(ParseFatalException): - """just like L{ParseFatalException}, but thrown internally when an - L{ErrorStop} ('-' operator) indicates that parsing is to stop - immediately because an unbacktrackable syntax error has been found""" - pass - -#~ class ReparseException(ParseBaseException): - #~ """Experimental class - parse actions can raise this exception to cause - #~ pyparsing to reparse the input string: - #~ - with a modified input string, and/or - #~ - with a modified start location - #~ Set the values of the ReparseException in the constructor, and raise the - #~ exception in a parse action to cause pyparsing to use the new string/location. - #~ Setting the values as None causes no change to be made. - #~ """ - #~ def __init_( self, newstring, restartLoc ): - #~ self.newParseText = newstring - #~ self.reparseLoc = restartLoc - -class RecursiveGrammarException(Exception): - """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" - def __init__( self, parseElementList ): - self.parseElementTrace = parseElementList - - def __str__( self ): - return "RecursiveGrammarException: %s" % self.parseElementTrace - -class _ParseResultsWithOffset(object): - def __init__(self,p1,p2): - self.tup = (p1,p2) - def __getitem__(self,i): - return self.tup[i] - def __repr__(self): - return repr(self.tup[0]) - def setOffset(self,i): - self.tup = (self.tup[0],i) - -class ParseResults(object): - """ - Structured parse results, to provide multiple means of access to the parsed data: - - as a list (C{len(results)}) - - by list index (C{results[0], results[1]}, etc.) - - by attribute (C{results.} - see L{ParserElement.setResultsName}) - - Example:: - integer = Word(nums) - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - # equivalent form: - # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - # parseString returns a ParseResults object - result = date_str.parseString("1999/12/31") - - def test(s, fn=repr): - print("%s -> %s" % (s, fn(eval(s)))) - test("list(result)") - test("result[0]") - test("result['month']") - test("result.day") - test("'month' in result") - test("'minutes' in result") - test("result.dump()", str) - prints:: - list(result) -> ['1999', '/', '12', '/', '31'] - result[0] -> '1999' - result['month'] -> '12' - result.day -> '31' - 'month' in result -> True - 'minutes' in result -> False - result.dump() -> ['1999', '/', '12', '/', '31'] - - day: 31 - - month: 12 - - year: 1999 - """ - def __new__(cls, toklist=None, name=None, asList=True, modal=True ): - if isinstance(toklist, cls): - return toklist - retobj = object.__new__(cls) - retobj.__doinit = True - return retobj - - # Performance tuning: we construct a *lot* of these, so keep this - # constructor as small and fast as possible - def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): - if self.__doinit: - self.__doinit = False - self.__name = None - self.__parent = None - self.__accumNames = {} - self.__asList = asList - self.__modal = modal - if toklist is None: - toklist = [] - if isinstance(toklist, list): - self.__toklist = toklist[:] - elif isinstance(toklist, _generatorType): - self.__toklist = list(toklist) - else: - self.__toklist = [toklist] - self.__tokdict = dict() - - if name is not None and name: - if not modal: - self.__accumNames[name] = 0 - if isinstance(name,int): - name = _ustr(name) # will always return a str, but use _ustr for consistency - self.__name = name - if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): - if isinstance(toklist,basestring): - toklist = [ toklist ] - if asList: - if isinstance(toklist,ParseResults): - self[name] = _ParseResultsWithOffset(toklist.copy(),0) - else: - self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) - self[name].__name = name - else: - try: - self[name] = toklist[0] - except (KeyError,TypeError,IndexError): - self[name] = toklist - - def __getitem__( self, i ): - if isinstance( i, (int,slice) ): - return self.__toklist[i] - else: - if i not in self.__accumNames: - return self.__tokdict[i][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[i] ]) - - def __setitem__( self, k, v, isinstance=isinstance ): - if isinstance(v,_ParseResultsWithOffset): - self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] - sub = v[0] - elif isinstance(k,(int,slice)): - self.__toklist[k] = v - sub = v - else: - self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] - sub = v - if isinstance(sub,ParseResults): - sub.__parent = wkref(self) - - def __delitem__( self, i ): - if isinstance(i,(int,slice)): - mylen = len( self.__toklist ) - del self.__toklist[i] - - # convert int to slice - if isinstance(i, int): - if i < 0: - i += mylen - i = slice(i, i+1) - # get removed indices - removed = list(range(*i.indices(mylen))) - removed.reverse() - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for j in removed: - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) - else: - del self.__tokdict[i] - - def __contains__( self, k ): - return k in self.__tokdict - - def __len__( self ): return len( self.__toklist ) - def __bool__(self): return ( not not self.__toklist ) - __nonzero__ = __bool__ - def __iter__( self ): return iter( self.__toklist ) - def __reversed__( self ): return iter( self.__toklist[::-1] ) - def _iterkeys( self ): - if hasattr(self.__tokdict, "iterkeys"): - return self.__tokdict.iterkeys() - else: - return iter(self.__tokdict) - - def _itervalues( self ): - return (self[k] for k in self._iterkeys()) - - def _iteritems( self ): - return ((k, self[k]) for k in self._iterkeys()) - - if PY_3: - keys = _iterkeys - """Returns an iterator of all named result keys (Python 3.x only).""" - - values = _itervalues - """Returns an iterator of all named result values (Python 3.x only).""" - - items = _iteritems - """Returns an iterator of all named result key-value tuples (Python 3.x only).""" - - else: - iterkeys = _iterkeys - """Returns an iterator of all named result keys (Python 2.x only).""" - - itervalues = _itervalues - """Returns an iterator of all named result values (Python 2.x only).""" - - iteritems = _iteritems - """Returns an iterator of all named result key-value tuples (Python 2.x only).""" - - def keys( self ): - """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iterkeys()) - - def values( self ): - """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" - return list(self.itervalues()) - - def items( self ): - """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" - return list(self.iteritems()) - - def haskeys( self ): - """Since keys() returns an iterator, this method is helpful in bypassing - code that looks for the existence of any defined results names.""" - return bool(self.__tokdict) - - def pop( self, *args, **kwargs): - """ - Removes and returns item at specified index (default=C{last}). - Supports both C{list} and C{dict} semantics for C{pop()}. If passed no - argument or an integer argument, it will use C{list} semantics - and pop tokens from the list of parsed tokens. If passed a - non-integer argument (most likely a string), it will use C{dict} - semantics and pop the corresponding value from any defined - results names. A second default return value argument is - supported, just as in C{dict.pop()}. - - Example:: - def remove_first(tokens): - tokens.pop(0) - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] - - label = Word(alphas) - patt = label("LABEL") + OneOrMore(Word(nums)) - print(patt.parseString("AAB 123 321").dump()) - - # Use pop() in a parse action to remove named result (note that corresponding value is not - # removed from list form of results) - def remove_LABEL(tokens): - tokens.pop("LABEL") - return tokens - patt.addParseAction(remove_LABEL) - print(patt.parseString("AAB 123 321").dump()) - prints:: - ['AAB', '123', '321'] - - LABEL: AAB - - ['AAB', '123', '321'] - """ - if not args: - args = [-1] - for k,v in kwargs.items(): - if k == 'default': - args = (args[0], v) - else: - raise TypeError("pop() got an unexpected keyword argument '%s'" % k) - if (isinstance(args[0], int) or - len(args) == 1 or - args[0] in self): - index = args[0] - ret = self[index] - del self[index] - return ret - else: - defaultvalue = args[1] - return defaultvalue - - def get(self, key, defaultValue=None): - """ - Returns named result matching the given key, or if there is no - such name, then returns the given C{defaultValue} or C{None} if no - C{defaultValue} is specified. - - Similar to C{dict.get()}. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString("1999/12/31") - print(result.get("year")) # -> '1999' - print(result.get("hour", "not specified")) # -> 'not specified' - print(result.get("hour")) # -> None - """ - if key in self: - return self[key] - else: - return defaultValue - - def insert( self, index, insStr ): - """ - Inserts new element at location index in the list of parsed tokens. - - Similar to C{list.insert()}. - - Example:: - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to insert the parse location in the front of the parsed results - def insert_locn(locn, tokens): - tokens.insert(0, locn) - print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] - """ - self.__toklist.insert(index, insStr) - # fixup indices in token dictionary - for name,occurrences in self.__tokdict.items(): - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) - - def append( self, item ): - """ - Add single element to end of ParseResults list of elements. - - Example:: - print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - - # use a parse action to compute the sum of the parsed integers, and add it to the end - def append_sum(tokens): - tokens.append(sum(map(int, tokens))) - print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] - """ - self.__toklist.append(item) - - def extend( self, itemseq ): - """ - Add sequence of elements to end of ParseResults list of elements. - - Example:: - patt = OneOrMore(Word(alphas)) - - # use a parse action to append the reverse of the matched strings, to make a palindrome - def make_palindrome(tokens): - tokens.extend(reversed([t[::-1] for t in tokens])) - return ''.join(tokens) - print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' - """ - if isinstance(itemseq, ParseResults): - self += itemseq - else: - self.__toklist.extend(itemseq) - - def clear( self ): - """ - Clear all elements and results names. - """ - del self.__toklist[:] - self.__tokdict.clear() - - def __getattr__( self, name ): - try: - return self[name] - except KeyError: - return "" - - if name in self.__tokdict: - if name not in self.__accumNames: - return self.__tokdict[name][-1][0] - else: - return ParseResults([ v[0] for v in self.__tokdict[name] ]) - else: - return "" - - def __add__( self, other ): - ret = self.copy() - ret += other - return ret - - def __iadd__( self, other ): - if other.__tokdict: - offset = len(self.__toklist) - addoffset = lambda a: offset if a<0 else a+offset - otheritems = other.__tokdict.items() - otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) - for (k,vlist) in otheritems for v in vlist] - for k,v in otherdictitems: - self[k] = v - if isinstance(v[0],ParseResults): - v[0].__parent = wkref(self) - - self.__toklist += other.__toklist - self.__accumNames.update( other.__accumNames ) - return self - - def __radd__(self, other): - if isinstance(other,int) and other == 0: - # useful for merging many ParseResults using sum() builtin - return self.copy() - else: - # this may raise a TypeError - so be it - return other + self - - def __repr__( self ): - return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) - - def __str__( self ): - return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' - - def _asStringList( self, sep='' ): - out = [] - for item in self.__toklist: - if out and sep: - out.append(sep) - if isinstance( item, ParseResults ): - out += item._asStringList() - else: - out.append( _ustr(item) ) - return out - - def asList( self ): - """ - Returns the parse results as a nested list of matching tokens, all converted to strings. - - Example:: - patt = OneOrMore(Word(alphas)) - result = patt.parseString("sldkj lsdkj sldkj") - # even though the result prints in string-like form, it is actually a pyparsing ParseResults - print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] - - # Use asList() to create an actual list - result_list = result.asList() - print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] - """ - return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] - - def asDict( self ): - """ - Returns the named parse results as a nested dictionary. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) - - result_dict = result.asDict() - print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} - - # even though a ParseResults supports dict-like access, sometime you just need to have a dict - import json - print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable - print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} - """ - if PY_3: - item_fn = self.items - else: - item_fn = self.iteritems - - def toItem(obj): - if isinstance(obj, ParseResults): - if obj.haskeys(): - return obj.asDict() - else: - return [toItem(v) for v in obj] - else: - return obj - - return dict((k,toItem(v)) for k,v in item_fn()) - - def copy( self ): - """ - Returns a new copy of a C{ParseResults} object. - """ - ret = ParseResults( self.__toklist ) - ret.__tokdict = self.__tokdict.copy() - ret.__parent = self.__parent - ret.__accumNames.update( self.__accumNames ) - ret.__name = self.__name - return ret - - def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): - """ - (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. - """ - nl = "\n" - out = [] - namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() - for v in vlist) - nextLevelIndent = indent + " " - - # collapse out indents if formatting is not desired - if not formatted: - indent = "" - nextLevelIndent = "" - nl = "" - - selfTag = None - if doctag is not None: - selfTag = doctag - else: - if self.__name: - selfTag = self.__name - - if not selfTag: - if namedItemsOnly: - return "" - else: - selfTag = "ITEM" - - out += [ nl, indent, "<", selfTag, ">" ] - - for i,res in enumerate(self.__toklist): - if isinstance(res,ParseResults): - if i in namedItems: - out += [ res.asXML(namedItems[i], - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - out += [ res.asXML(None, - namedItemsOnly and doctag is None, - nextLevelIndent, - formatted)] - else: - # individual token, see if there is a name for it - resTag = None - if i in namedItems: - resTag = namedItems[i] - if not resTag: - if namedItemsOnly: - continue - else: - resTag = "ITEM" - xmlBodyText = _xml_escape(_ustr(res)) - out += [ nl, nextLevelIndent, "<", resTag, ">", - xmlBodyText, - "" ] - - out += [ nl, indent, "" ] - return "".join(out) - - def __lookup(self,sub): - for k,vlist in self.__tokdict.items(): - for v,loc in vlist: - if sub is v: - return k - return None - - def getName(self): - r""" - Returns the results name for this token expression. Useful when several - different expressions might match at a particular location. - - Example:: - integer = Word(nums) - ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") - house_number_expr = Suppress('#') + Word(nums, alphanums) - user_data = (Group(house_number_expr)("house_number") - | Group(ssn_expr)("ssn") - | Group(integer)("age")) - user_info = OneOrMore(user_data) - - result = user_info.parseString("22 111-22-3333 #221B") - for item in result: - print(item.getName(), ':', item[0]) - prints:: - age : 22 - ssn : 111-22-3333 - house_number : 221B - """ - if self.__name: - return self.__name - elif self.__parent: - par = self.__parent() - if par: - return par.__lookup(self) - else: - return None - elif (len(self) == 1 and - len(self.__tokdict) == 1 and - next(iter(self.__tokdict.values()))[0][1] in (0,-1)): - return next(iter(self.__tokdict.keys())) - else: - return None - - def dump(self, indent='', depth=0, full=True): - """ - Diagnostic method for listing out the contents of a C{ParseResults}. - Accepts an optional C{indent} argument so that this string can be embedded - in a nested display of other data. - - Example:: - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - result = date_str.parseString('12/31/1999') - print(result.dump()) - prints:: - ['12', '/', '31', '/', '1999'] - - day: 1999 - - month: 31 - - year: 12 - """ - out = [] - NL = '\n' - out.append( indent+_ustr(self.asList()) ) - if full: - if self.haskeys(): - items = sorted((str(k), v) for k,v in self.items()) - for k,v in items: - if out: - out.append(NL) - out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) - if isinstance(v,ParseResults): - if v: - out.append( v.dump(indent,depth+1) ) - else: - out.append(_ustr(v)) - else: - out.append(repr(v)) - elif any(isinstance(vv,ParseResults) for vv in self): - v = self - for i,vv in enumerate(v): - if isinstance(vv,ParseResults): - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) - else: - out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) - - return "".join(out) - - def pprint(self, *args, **kwargs): - """ - Pretty-printer for parsed results as a list, using the C{pprint} module. - Accepts additional positional or keyword args as defined for the - C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) - - Example:: - ident = Word(alphas, alphanums) - num = Word(nums) - func = Forward() - term = ident | num | Group('(' + func + ')') - func <<= ident + Group(Optional(delimitedList(term))) - result = func.parseString("fna a,b,(fnb c,d,200),100") - result.pprint(width=40) - prints:: - ['fna', - ['a', - 'b', - ['(', 'fnb', ['c', 'd', '200'], ')'], - '100']] - """ - pprint.pprint(self.asList(), *args, **kwargs) - - # add support for pickle protocol - def __getstate__(self): - return ( self.__toklist, - ( self.__tokdict.copy(), - self.__parent is not None and self.__parent() or None, - self.__accumNames, - self.__name ) ) - - def __setstate__(self,state): - self.__toklist = state[0] - (self.__tokdict, - par, - inAccumNames, - self.__name) = state[1] - self.__accumNames = {} - self.__accumNames.update(inAccumNames) - if par is not None: - self.__parent = wkref(par) - else: - self.__parent = None - - def __getnewargs__(self): - return self.__toklist, self.__name, self.__asList, self.__modal - - def __dir__(self): - return (dir(type(self)) + list(self.keys())) - -MutableMapping.register(ParseResults) - -def col (loc,strg): - """Returns current column within a string, counting newlines as line separators. - The first column is number 1. - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{ParserElement.parseString}} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ - s = strg - return 1 if 0} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ - return strg.count("\n",0,loc) + 1 - -def line( loc, strg ): - """Returns the line of text containing loc within a string, counting newlines as line separators. - """ - lastCR = strg.rfind("\n", 0, loc) - nextCR = strg.find("\n", loc) - if nextCR >= 0: - return strg[lastCR+1:nextCR] - else: - return strg[lastCR+1:] - -def _defaultStartDebugAction( instring, loc, expr ): - print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) - -def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): - print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) - -def _defaultExceptionDebugAction( instring, loc, expr, exc ): - print ("Exception raised:" + _ustr(exc)) - -def nullDebugAction(*args): - """'Do-nothing' debug action, to suppress debugging output during parsing.""" - pass - -# Only works on Python 3.x - nonlocal is toxic to Python 2 installs -#~ 'decorator to trim function calls to match the arity of the target' -#~ def _trim_arity(func, maxargs=3): - #~ if func in singleArgBuiltins: - #~ return lambda s,l,t: func(t) - #~ limit = 0 - #~ foundArity = False - #~ def wrapper(*args): - #~ nonlocal limit,foundArity - #~ while 1: - #~ try: - #~ ret = func(*args[limit:]) - #~ foundArity = True - #~ return ret - #~ except TypeError: - #~ if limit == maxargs or foundArity: - #~ raise - #~ limit += 1 - #~ continue - #~ return wrapper - -# this version is Python 2.x-3.x cross-compatible -'decorator to trim function calls to match the arity of the target' -def _trim_arity(func, maxargs=2): - if func in singleArgBuiltins: - return lambda s,l,t: func(t) - limit = [0] - foundArity = [False] - - # traceback return data structure changed in Py3.5 - normalize back to plain tuples - if system_version[:2] >= (3,5): - def extract_stack(limit=0): - # special handling for Python 3.5.0 - extra deep call stack by 1 - offset = -3 if system_version == (3,5,0) else -2 - frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] - return [frame_summary[:2]] - def extract_tb(tb, limit=0): - frames = traceback.extract_tb(tb, limit=limit) - frame_summary = frames[-1] - return [frame_summary[:2]] - else: - extract_stack = traceback.extract_stack - extract_tb = traceback.extract_tb - - # synthesize what would be returned by traceback.extract_stack at the call to - # user's parse action 'func', so that we don't incur call penalty at parse time - - LINE_DIFF = 6 - # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND - # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! - this_line = extract_stack(limit=2)[-1] - pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) - - def wrapper(*args): - while 1: - try: - ret = func(*args[limit[0]:]) - foundArity[0] = True - return ret - except TypeError: - # re-raise TypeErrors if they did not come from our arity testing - if foundArity[0]: - raise - else: - try: - tb = sys.exc_info()[-1] - if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: - raise - finally: - del tb - - if limit[0] <= maxargs: - limit[0] += 1 - continue - raise - - # copy func name to wrapper for sensible debug output - func_name = "" - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - wrapper.__name__ = func_name - - return wrapper - -class ParserElement(object): - """Abstract base level parser element class.""" - DEFAULT_WHITE_CHARS = " \n\t\r" - verbose_stacktrace = False - - @staticmethod - def setDefaultWhitespaceChars( chars ): - r""" - Overrides the default whitespace chars - - Example:: - # default whitespace chars are space, and newline - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] - - # change to just treat newline as significant - ParserElement.setDefaultWhitespaceChars(" \t") - OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] - """ - ParserElement.DEFAULT_WHITE_CHARS = chars - - @staticmethod - def inlineLiteralsUsing(cls): - """ - Set class to be used for inclusion of string literals into a parser. - - Example:: - # default literal class used is Literal - integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - - # change to Suppress - ParserElement.inlineLiteralsUsing(Suppress) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - - date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] - """ - ParserElement._literalStringClass = cls - - def __init__( self, savelist=False ): - self.parseAction = list() - self.failAction = None - #~ self.name = "" # don't define self.name, let subclasses try/except upcall - self.strRepr = None - self.resultsName = None - self.saveAsList = savelist - self.skipWhitespace = True - self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS - self.copyDefaultWhiteChars = True - self.mayReturnEmpty = False # used when checking for left-recursion - self.keepTabs = False - self.ignoreExprs = list() - self.debug = False - self.streamlined = False - self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index - self.errmsg = "" - self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) - self.debugActions = ( None, None, None ) #custom debug actions - self.re = None - self.callPreparse = True # used to avoid redundant calls to preParse - self.callDuringTry = False - - def copy( self ): - """ - Make a copy of this C{ParserElement}. Useful for defining different parse actions - for the same parsing pattern, using copies of the original parse element. - - Example:: - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") - integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - - print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) - prints:: - [5120, 100, 655360, 268435456] - Equivalent form of C{expr.copy()} is just C{expr()}:: - integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - """ - cpy = copy.copy( self ) - cpy.parseAction = self.parseAction[:] - cpy.ignoreExprs = self.ignoreExprs[:] - if self.copyDefaultWhiteChars: - cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS - return cpy - - def setName( self, name ): - """ - Define name for this expression, makes debugging and exception messages clearer. - - Example:: - Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) - Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) - """ - self.name = name - self.errmsg = "Expected " + self.name - if hasattr(self,"exception"): - self.exception.msg = self.errmsg - return self - - def setResultsName( self, name, listAllMatches=False ): - """ - Define name for referencing matching tokens as a nested attribute - of the returned parse results. - NOTE: this returns a *copy* of the original C{ParserElement} object; - this is so that the client can define a basic element, such as an - integer, and reference it in multiple places with different names. - - You can also set results names using the abbreviated syntax, - C{expr("name")} in place of C{expr.setResultsName("name")} - - see L{I{__call__}<__call__>}. - - Example:: - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' - + integer.setResultsName("day")) - - # equivalent form: - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - """ - newself = self.copy() - if name.endswith("*"): - name = name[:-1] - listAllMatches=True - newself.resultsName = name - newself.modalResults = not listAllMatches - return newself - - def setBreak(self,breakFlag = True): - """Method to invoke the Python pdb debugger when this element is - about to be parsed. Set C{breakFlag} to True to enable, False to - disable. - """ - if breakFlag: - _parseMethod = self._parse - def breaker(instring, loc, doActions=True, callPreParse=True): - import pdb - pdb.set_trace() - return _parseMethod( instring, loc, doActions, callPreParse ) - breaker._originalParseMethod = _parseMethod - self._parse = breaker - else: - if hasattr(self._parse,"_originalParseMethod"): - self._parse = self._parse._originalParseMethod - return self - - def setParseAction( self, *fns, **kwargs ): - """ - Define one or more actions to perform when successfully matching parse element definition. - Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, - C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - - s = the original string being parsed (see note below) - - loc = the location of the matching substring - - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object - If the functions in fns modify the tokens, they can return them as the return - value from fn, and the modified list of tokens will replace the original. - Otherwise, fn does not need to return any value. - - Optional keyword arguments: - - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing - - Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{parseString}} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - - Example:: - integer = Word(nums) - date_str = integer + '/' + integer + '/' + integer - - date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] - - # use parse action to convert to ints at parse time - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - date_str = integer + '/' + integer + '/' + integer - - # note that integer fields are now ints, not strings - date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] - """ - self.parseAction = list(map(_trim_arity, list(fns))) - self.callDuringTry = kwargs.get("callDuringTry", False) - return self - - def addParseAction( self, *fns, **kwargs ): - """ - Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}}. - - See examples in L{I{copy}}. - """ - self.parseAction += list(map(_trim_arity, list(fns))) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def addCondition(self, *fns, **kwargs): - """Add a boolean predicate function to expression's list of parse actions. See - L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, - functions passed to C{addCondition} need to return boolean success/fail of the condition. - - Optional keyword arguments: - - message = define a custom message to be used in the raised exception - - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException - - Example:: - integer = Word(nums).setParseAction(lambda toks: int(toks[0])) - year_int = integer.copy() - year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") - date_str = year_int + '/' + integer + '/' + integer - - result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) - """ - msg = kwargs.get("message", "failed user-defined condition") - exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException - for fn in fns: - def pa(s,l,t): - if not bool(_trim_arity(fn)(s,l,t)): - raise exc_type(s,l,msg) - self.parseAction.append(pa) - self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) - return self - - def setFailAction( self, fn ): - """Define action to perform if parsing fails at this expression. - Fail acton fn is a callable function that takes the arguments - C{fn(s,loc,expr,err)} where: - - s = string being parsed - - loc = location where expression match was attempted and failed - - expr = the parse expression that failed - - err = the exception thrown - The function returns no value. It may throw C{L{ParseFatalException}} - if it is desired to stop parsing immediately.""" - self.failAction = fn - return self - - def _skipIgnorables( self, instring, loc ): - exprsFound = True - while exprsFound: - exprsFound = False - for e in self.ignoreExprs: - try: - while 1: - loc,dummy = e._parse( instring, loc ) - exprsFound = True - except ParseException: - pass - return loc - - def preParse( self, instring, loc ): - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - - if self.skipWhitespace: - wt = self.whiteChars - instrlen = len(instring) - while loc < instrlen and instring[loc] in wt: - loc += 1 - - return loc - - def parseImpl( self, instring, loc, doActions=True ): - return loc, [] - - def postParse( self, instring, loc, tokenlist ): - return tokenlist - - #~ @profile - def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): - debugging = ( self.debug ) #and doActions ) - - if debugging or self.failAction: - #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) - if (self.debugActions[0] ): - self.debugActions[0]( instring, loc, self ) - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - try: - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - except ParseBaseException as err: - #~ print ("Exception raised:", err) - if self.debugActions[2]: - self.debugActions[2]( instring, tokensStart, self, err ) - if self.failAction: - self.failAction( instring, tokensStart, self, err ) - raise - else: - if callPreParse and self.callPreparse: - preloc = self.preParse( instring, loc ) - else: - preloc = loc - tokensStart = preloc - if self.mayIndexError or preloc >= len(instring): - try: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - except IndexError: - raise ParseException( instring, len(instring), self.errmsg, self ) - else: - loc,tokens = self.parseImpl( instring, preloc, doActions ) - - tokens = self.postParse( instring, loc, tokens ) - - retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) - if self.parseAction and (doActions or self.callDuringTry): - if debugging: - try: - for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - except ParseBaseException as err: - #~ print "Exception raised in user parse action:", err - if (self.debugActions[2] ): - self.debugActions[2]( instring, tokensStart, self, err ) - raise - else: - for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: - retTokens = ParseResults( tokens, - self.resultsName, - asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), - modal=self.modalResults ) - if debugging: - #~ print ("Matched",self,"->",retTokens.asList()) - if (self.debugActions[1] ): - self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) - - return loc, retTokens - - def tryParse( self, instring, loc ): - try: - return self._parse( instring, loc, doActions=False )[0] - except ParseFatalException: - raise ParseException( instring, loc, self.errmsg, self) - - def canParseNext(self, instring, loc): - try: - self.tryParse(instring, loc) - except (ParseException, IndexError): - return False - else: - return True - - class _UnboundedCache(object): - def __init__(self): - cache = {} - self.not_in_cache = not_in_cache = object() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - if _OrderedDict is not None: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = _OrderedDict() - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(cache) > size: - try: - cache.popitem(False) - except KeyError: - pass - - def clear(self): - cache.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - else: - class _FifoCache(object): - def __init__(self, size): - self.not_in_cache = not_in_cache = object() - - cache = {} - key_fifo = collections.deque([], size) - - def get(self, key): - return cache.get(key, not_in_cache) - - def set(self, key, value): - cache[key] = value - while len(key_fifo) > size: - cache.pop(key_fifo.popleft(), None) - key_fifo.append(key) - - def clear(self): - cache.clear() - key_fifo.clear() - - def cache_len(self): - return len(cache) - - self.get = types.MethodType(get, self) - self.set = types.MethodType(set, self) - self.clear = types.MethodType(clear, self) - self.__len__ = types.MethodType(cache_len, self) - - # argument cache for optimizing repeated calls when backtracking through recursive expressions - packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail - packrat_cache_lock = RLock() - packrat_cache_stats = [0, 0] - - # this method gets repeatedly called during backtracking with the same arguments - - # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression - def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): - HIT, MISS = 0, 1 - lookup = (self, instring, loc, callPreParse, doActions) - with ParserElement.packrat_cache_lock: - cache = ParserElement.packrat_cache - value = cache.get(lookup) - if value is cache.not_in_cache: - ParserElement.packrat_cache_stats[MISS] += 1 - try: - value = self._parseNoCache(instring, loc, doActions, callPreParse) - except ParseBaseException as pe: - # cache a copy of the exception, without the traceback - cache.set(lookup, pe.__class__(*pe.args)) - raise - else: - cache.set(lookup, (value[0], value[1].copy())) - return value - else: - ParserElement.packrat_cache_stats[HIT] += 1 - if isinstance(value, Exception): - raise value - return (value[0], value[1].copy()) - - _parse = _parseNoCache - - @staticmethod - def resetCache(): - ParserElement.packrat_cache.clear() - ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) - - _packratEnabled = False - @staticmethod - def enablePackrat(cache_size_limit=128): - """Enables "packrat" parsing, which adds memoizing to the parsing logic. - Repeated parse attempts at the same string location (which happens - often in many complex grammars) can immediately return a cached value, - instead of re-executing parsing/validating code. Memoizing is done of - both valid results and parsing exceptions. - - Parameters: - - cache_size_limit - (default=C{128}) - if an integer value is provided - will limit the size of the packrat cache; if None is passed, then - the cache size will be unbounded; if 0 is passed, the cache will - be effectively disabled. - - This speedup may break existing programs that use parse actions that - have side-effects. For this reason, packrat parsing is disabled when - you first import pyparsing. To activate the packrat feature, your - program must call the class method C{ParserElement.enablePackrat()}. If - your program uses C{psyco} to "compile as you go", you must call - C{enablePackrat} before calling C{psyco.full()}. If you do not do this, - Python will crash. For best results, call C{enablePackrat()} immediately - after importing pyparsing. - - Example:: - import pyparsing - pyparsing.ParserElement.enablePackrat() - """ - if not ParserElement._packratEnabled: - ParserElement._packratEnabled = True - if cache_size_limit is None: - ParserElement.packrat_cache = ParserElement._UnboundedCache() - else: - ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) - ParserElement._parse = ParserElement._parseCache - - def parseString( self, instring, parseAll=False ): - """ - Execute the parse expression with the given string. - This is the main interface to the client code, once the complete - expression has been built. - - If you want the grammar to require that the entire input string be - successfully parsed, then set C{parseAll} to True (equivalent to ending - the grammar with C{L{StringEnd()}}). - - Note: C{parseString} implicitly calls C{expandtabs()} on the input string, - in order to report proper column numbers in parse actions. - If the input string contains tabs and - the grammar uses parse actions that use the C{loc} argument to index into the - string being parsed, you can ensure you have a consistent view of the input - string by: - - calling C{parseWithTabs} on your grammar before calling C{parseString} - (see L{I{parseWithTabs}}) - - define your parse action using the full C{(s,loc,toks)} signature, and - reference the input string using the parse action's C{s} argument - - explictly expand the tabs in your input string before calling - C{parseString} - - Example:: - Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] - Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text - """ - ParserElement.resetCache() - if not self.streamlined: - self.streamline() - #~ self.saveAsList = True - for e in self.ignoreExprs: - e.streamline() - if not self.keepTabs: - instring = instring.expandtabs() - try: - loc, tokens = self._parse( instring, 0 ) - if parseAll: - loc = self.preParse( instring, loc ) - se = Empty() + StringEnd() - se._parse( instring, loc ) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - else: - return tokens - - def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): - """ - Scan the input string for expression matches. Each match will return the - matching tokens, start location, and end location. May be called with optional - C{maxMatches} argument, to clip scanning after 'n' matches are found. If - C{overlap} is specified, then overlapping matches will be reported. - - Note that the start and end locations are reported relative to the string - being parsed. See L{I{parseString}} for more information on parsing - strings with embedded tabs. - - Example:: - source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" - print(source) - for tokens,start,end in Word(alphas).scanString(source): - print(' '*start + '^'*(end-start)) - print(' '*start + tokens[0]) - - prints:: - - sldjf123lsdjjkf345sldkjf879lkjsfd987 - ^^^^^ - sldjf - ^^^^^^^ - lsdjjkf - ^^^^^^ - sldkjf - ^^^^^^ - lkjsfd - """ - if not self.streamlined: - self.streamline() - for e in self.ignoreExprs: - e.streamline() - - if not self.keepTabs: - instring = _ustr(instring).expandtabs() - instrlen = len(instring) - loc = 0 - preparseFn = self.preParse - parseFn = self._parse - ParserElement.resetCache() - matches = 0 - try: - while loc <= instrlen and matches < maxMatches: - try: - preloc = preparseFn( instring, loc ) - nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) - except ParseException: - loc = preloc+1 - else: - if nextLoc > loc: - matches += 1 - yield tokens, preloc, nextLoc - if overlap: - nextloc = preparseFn( instring, loc ) - if nextloc > loc: - loc = nextLoc - else: - loc += 1 - else: - loc = nextLoc - else: - loc = preloc+1 - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def transformString( self, instring ): - """ - Extension to C{L{scanString}}, to modify matching text with modified tokens that may - be returned from a parse action. To use C{transformString}, define a grammar and - attach a parse action to it that modifies the returned token list. - Invoking C{transformString()} on a target string will then scan for matches, - and replace the matched text patterns according to the logic in the parse - action. C{transformString()} returns the resulting transformed string. - - Example:: - wd = Word(alphas) - wd.setParseAction(lambda toks: toks[0].title()) - - print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) - Prints:: - Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. - """ - out = [] - lastE = 0 - # force preservation of s, to minimize unwanted transformation of string, and to - # keep string locs straight between transformString and scanString - self.keepTabs = True - try: - for t,s,e in self.scanString( instring ): - out.append( instring[lastE:s] ) - if t: - if isinstance(t,ParseResults): - out += t.asList() - elif isinstance(t,list): - out += t - else: - out.append(t) - lastE = e - out.append(instring[lastE:]) - out = [o for o in out if o] - return "".join(map(_ustr,_flatten(out))) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def searchString( self, instring, maxMatches=_MAX_INT ): - """ - Another extension to C{L{scanString}}, simplifying the access to the tokens found - to match the given parse expression. May be called with optional - C{maxMatches} argument, to clip searching after 'n' matches are found. - - Example:: - # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters - cap_word = Word(alphas.upper(), alphas.lower()) - - print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) - - # the sum() builtin can be used to merge results into a single ParseResults object - print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) - prints:: - [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] - ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] - """ - try: - return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): - """ - Generator method to split a string using the given expression as a separator. - May be called with optional C{maxsplit} argument, to limit the number of splits; - and the optional C{includeSeparators} argument (default=C{False}), if the separating - matching text should be included in the split results. - - Example:: - punc = oneOf(list(".,;:/-!?")) - print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) - prints:: - ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] - """ - splits = 0 - last = 0 - for t,s,e in self.scanString(instring, maxMatches=maxsplit): - yield instring[last:s] - if includeSeparators: - yield t[0] - last = e - yield instring[last:] - - def __add__(self, other ): - """ - Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement - converts them to L{Literal}s by default. - - Example:: - greet = Word(alphas) + "," + Word(alphas) + "!" - hello = "Hello, World!" - print (hello, "->", greet.parseString(hello)) - Prints:: - Hello, World! -> ['Hello', ',', 'World', '!'] - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return And( [ self, other ] ) - - def __radd__(self, other ): - """ - Implementation of + operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other + self - - def __sub__(self, other): - """ - Implementation of - operator, returns C{L{And}} with error stop - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return self + And._ErrorStop() + other - - def __rsub__(self, other ): - """ - Implementation of - operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other - self - - def __mul__(self,other): - """ - Implementation of * operator, allows use of C{expr * 3} in place of - C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer - tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples - may also include C{None} as in: - - C{expr*(n,None)} or C{expr*(n,)} is equivalent - to C{expr*n + L{ZeroOrMore}(expr)} - (read as "at least n instances of C{expr}") - - C{expr*(None,n)} is equivalent to C{expr*(0,n)} - (read as "0 to n instances of C{expr}") - - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} - - Note that C{expr*(None,n)} does not raise an exception if - more than n exprs exist in the input stream; that is, - C{expr*(None,n)} does not enforce a maximum number of expr - occurrences. If this behavior is desired, then write - C{expr*(None,n) + ~expr} - """ - if isinstance(other,int): - minElements, optElements = other,0 - elif isinstance(other,tuple): - other = (other + (None, None))[:2] - if other[0] is None: - other = (0, other[1]) - if isinstance(other[0],int) and other[1] is None: - if other[0] == 0: - return ZeroOrMore(self) - if other[0] == 1: - return OneOrMore(self) - else: - return self*other[0] + ZeroOrMore(self) - elif isinstance(other[0],int) and isinstance(other[1],int): - minElements, optElements = other - optElements -= minElements - else: - raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) - else: - raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) - - if minElements < 0: - raise ValueError("cannot multiply ParserElement by negative value") - if optElements < 0: - raise ValueError("second tuple value must be greater or equal to first tuple value") - if minElements == optElements == 0: - raise ValueError("cannot multiply ParserElement by 0 or (0,0)") - - if (optElements): - def makeOptionalList(n): - if n>1: - return Optional(self + makeOptionalList(n-1)) - else: - return Optional(self) - if minElements: - if minElements == 1: - ret = self + makeOptionalList(optElements) - else: - ret = And([self]*minElements) + makeOptionalList(optElements) - else: - ret = makeOptionalList(optElements) - else: - if minElements == 1: - ret = self - else: - ret = And([self]*minElements) - return ret - - def __rmul__(self, other): - return self.__mul__(other) - - def __or__(self, other ): - """ - Implementation of | operator - returns C{L{MatchFirst}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return MatchFirst( [ self, other ] ) - - def __ror__(self, other ): - """ - Implementation of | operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other | self - - def __xor__(self, other ): - """ - Implementation of ^ operator - returns C{L{Or}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Or( [ self, other ] ) - - def __rxor__(self, other ): - """ - Implementation of ^ operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other ^ self - - def __and__(self, other ): - """ - Implementation of & operator - returns C{L{Each}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return Each( [ self, other ] ) - - def __rand__(self, other ): - """ - Implementation of & operator when left operand is not a C{L{ParserElement}} - """ - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - if not isinstance( other, ParserElement ): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) - return None - return other & self - - def __invert__( self ): - """ - Implementation of ~ operator - returns C{L{NotAny}} - """ - return NotAny( self ) - - def __call__(self, name=None): - """ - Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. - - If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be - passed as C{True}. - - If C{name} is omitted, same as calling C{L{copy}}. - - Example:: - # these are equivalent - userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") - userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") - """ - if name is not None: - return self.setResultsName(name) - else: - return self.copy() - - def suppress( self ): - """ - Suppresses the output of this C{ParserElement}; useful to keep punctuation from - cluttering up returned output. - """ - return Suppress( self ) - - def leaveWhitespace( self ): - """ - Disables the skipping of whitespace before matching the characters in the - C{ParserElement}'s defined pattern. This is normally only used internally by - the pyparsing module, but may be needed in some whitespace-sensitive grammars. - """ - self.skipWhitespace = False - return self - - def setWhitespaceChars( self, chars ): - """ - Overrides the default whitespace chars - """ - self.skipWhitespace = True - self.whiteChars = chars - self.copyDefaultWhiteChars = False - return self - - def parseWithTabs( self ): - """ - Overrides default behavior to expand C{}s to spaces before parsing the input string. - Must be called before C{parseString} when the input grammar contains elements that - match C{} characters. - """ - self.keepTabs = True - return self - - def ignore( self, other ): - """ - Define expression to be ignored (e.g., comments) while doing pattern - matching; may be called repeatedly, to define multiple comment or other - ignorable patterns. - - Example:: - patt = OneOrMore(Word(alphas)) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] - - patt.ignore(cStyleComment) - patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] - """ - if isinstance(other, basestring): - other = Suppress(other) - - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - self.ignoreExprs.append(other) - else: - self.ignoreExprs.append( Suppress( other.copy() ) ) - return self - - def setDebugActions( self, startAction, successAction, exceptionAction ): - """ - Enable display of debugging messages while doing pattern matching. - """ - self.debugActions = (startAction or _defaultStartDebugAction, - successAction or _defaultSuccessDebugAction, - exceptionAction or _defaultExceptionDebugAction) - self.debug = True - return self - - def setDebug( self, flag=True ): - """ - Enable display of debugging messages while doing pattern matching. - Set C{flag} to True to enable, False to disable. - - Example:: - wd = Word(alphas).setName("alphaword") - integer = Word(nums).setName("numword") - term = wd | integer - - # turn on debugging for wd - wd.setDebug() - - OneOrMore(term).parseString("abc 123 xyz 890") - - prints:: - Match alphaword at loc 0(1,1) - Matched alphaword -> ['abc'] - Match alphaword at loc 3(1,4) - Exception raised:Expected alphaword (at char 4), (line:1, col:5) - Match alphaword at loc 7(1,8) - Matched alphaword -> ['xyz'] - Match alphaword at loc 11(1,12) - Exception raised:Expected alphaword (at char 12), (line:1, col:13) - Match alphaword at loc 15(1,16) - Exception raised:Expected alphaword (at char 15), (line:1, col:16) - - The output shown is that produced by the default debug actions - custom debug actions can be - specified using L{setDebugActions}. Prior to attempting - to match the C{wd} expression, the debugging message C{"Match at loc (,)"} - is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} - message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, - which makes debugging and exception messages easier to understand - for instance, the default - name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. - """ - if flag: - self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) - else: - self.debug = False - return self - - def __str__( self ): - return self.name - - def __repr__( self ): - return _ustr(self) - - def streamline( self ): - self.streamlined = True - self.strRepr = None - return self - - def checkRecursion( self, parseElementList ): - pass - - def validate( self, validateTrace=[] ): - """ - Check defined expressions for valid structure, check for infinite recursive definitions. - """ - self.checkRecursion( [] ) - - def parseFile( self, file_or_filename, parseAll=False ): - """ - Execute the parse expression on the given file or filename. - If a filename is specified (instead of a file object), - the entire file is opened, read, and closed before parsing. - """ - try: - file_contents = file_or_filename.read() - except AttributeError: - with open(file_or_filename, "r") as f: - file_contents = f.read() - try: - return self.parseString(file_contents, parseAll) - except ParseBaseException as exc: - if ParserElement.verbose_stacktrace: - raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc - - def __eq__(self,other): - if isinstance(other, ParserElement): - return self is other or vars(self) == vars(other) - elif isinstance(other, basestring): - return self.matches(other) - else: - return super(ParserElement,self)==other - - def __ne__(self,other): - return not (self == other) - - def __hash__(self): - return hash(id(self)) - - def __req__(self,other): - return self == other - - def __rne__(self,other): - return not (self == other) - - def matches(self, testString, parseAll=True): - """ - Method for quick testing of a parser against a test string. Good for simple - inline microtests of sub expressions while building up larger parser. - - Parameters: - - testString - to test against this expression for a match - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - - Example:: - expr = Word(nums) - assert expr.matches("100") - """ - try: - self.parseString(_ustr(testString), parseAll=parseAll) - return True - except ParseBaseException: - return False - - def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): - """ - Execute the parse expression on a series of test strings, showing each - test, the parsed results or where the parse failed. Quick and easy way to - run a parse expression against a list of sample strings. - - Parameters: - - tests - a list of separate test strings, or a multiline string of test strings - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - - comment - (default=C{'#'}) - expression for indicating embedded comments in the test - string; pass None to disable comment filtering - - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; - if False, only dump nested list - - printResults - (default=C{True}) prints test output to stdout - - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing - - Returns: a (success, results) tuple, where success indicates that all tests succeeded - (or failed if C{failureTests} is True), and the results contain a list of lines of each - test's output - - Example:: - number_expr = pyparsing_common.number.copy() - - result = number_expr.runTests(''' - # unsigned integer - 100 - # negative integer - -100 - # float with scientific notation - 6.02e23 - # integer with scientific notation - 1e-12 - ''') - print("Success" if result[0] else "Failed!") - - result = number_expr.runTests(''' - # stray character - 100Z - # missing leading digit before '.' - -.100 - # too many '.' - 3.14.159 - ''', failureTests=True) - print("Success" if result[0] else "Failed!") - prints:: - # unsigned integer - 100 - [100] - - # negative integer - -100 - [-100] - - # float with scientific notation - 6.02e23 - [6.02e+23] - - # integer with scientific notation - 1e-12 - [1e-12] - - Success - - # stray character - 100Z - ^ - FAIL: Expected end of text (at char 3), (line:1, col:4) - - # missing leading digit before '.' - -.100 - ^ - FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) - - # too many '.' - 3.14.159 - ^ - FAIL: Expected end of text (at char 4), (line:1, col:5) - - Success - - Each test string must be on a single line. If you want to test a string that spans multiple - lines, create a test like this:: - - expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") - - (Note that this is a raw string literal, you must include the leading 'r'.) - """ - if isinstance(tests, basestring): - tests = list(map(str.strip, tests.rstrip().splitlines())) - if isinstance(comment, basestring): - comment = Literal(comment) - allResults = [] - comments = [] - success = True - for t in tests: - if comment is not None and comment.matches(t, False) or comments and not t: - comments.append(t) - continue - if not t: - continue - out = ['\n'.join(comments), t] - comments = [] - try: - t = t.replace(r'\n','\n') - result = self.parseString(t, parseAll=parseAll) - out.append(result.dump(full=fullDump)) - success = success and not failureTests - except ParseBaseException as pe: - fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" - if '\n' in t: - out.append(line(pe.loc, t)) - out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) - else: - out.append(' '*pe.loc + '^' + fatal) - out.append("FAIL: " + str(pe)) - success = success and failureTests - result = pe - except Exception as exc: - out.append("FAIL-EXCEPTION: " + str(exc)) - success = success and failureTests - result = exc - - if printResults: - if fullDump: - out.append('') - print('\n'.join(out)) - - allResults.append((t, result)) - - return success, allResults - - -class Token(ParserElement): - """ - Abstract C{ParserElement} subclass, for defining atomic matching patterns. - """ - def __init__( self ): - super(Token,self).__init__( savelist=False ) - - -class Empty(Token): - """ - An empty token, will always match. - """ - def __init__( self ): - super(Empty,self).__init__() - self.name = "Empty" - self.mayReturnEmpty = True - self.mayIndexError = False - - -class NoMatch(Token): - """ - A token that will never match. - """ - def __init__( self ): - super(NoMatch,self).__init__() - self.name = "NoMatch" - self.mayReturnEmpty = True - self.mayIndexError = False - self.errmsg = "Unmatchable token" - - def parseImpl( self, instring, loc, doActions=True ): - raise ParseException(instring, loc, self.errmsg, self) - - -class Literal(Token): - """ - Token to exactly match a specified string. - - Example:: - Literal('blah').parseString('blah') # -> ['blah'] - Literal('blah').parseString('blahfooblah') # -> ['blah'] - Literal('blah').parseString('bla') # -> Exception: Expected "blah" - - For case-insensitive matching, use L{CaselessLiteral}. - - For keyword matching (force word break before and after the matched string), - use L{Keyword} or L{CaselessKeyword}. - """ - def __init__( self, matchString ): - super(Literal,self).__init__() - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Literal; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.__class__ = Empty - self.name = '"%s"' % _ustr(self.match) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - - # Performance tuning: this routine gets called a *lot* - # if this is a single character match string and the first character matches, - # short-circuit as quickly as possible, and avoid calling startswith - #~ @profile - def parseImpl( self, instring, loc, doActions=True ): - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) -_L = Literal -ParserElement._literalStringClass = Literal - -class Keyword(Token): - """ - Token to exactly match a specified string as a keyword, that is, it must be - immediately followed by a non-keyword character. Compare with C{L{Literal}}: - - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. - - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} - Accepts two optional constructor arguments in addition to the keyword string: - - C{identChars} is a string of characters that would be valid identifier characters, - defaulting to all alphanumerics + "_" and "$" - - C{caseless} allows case-insensitive matching, default is C{False}. - - Example:: - Keyword("start").parseString("start") # -> ['start'] - Keyword("start").parseString("starting") # -> Exception - - For case-insensitive matching, use L{CaselessKeyword}. - """ - DEFAULT_KEYWORD_CHARS = alphanums+"_$" - - def __init__( self, matchString, identChars=None, caseless=False ): - super(Keyword,self).__init__() - if identChars is None: - identChars = Keyword.DEFAULT_KEYWORD_CHARS - self.match = matchString - self.matchLen = len(matchString) - try: - self.firstMatchChar = matchString[0] - except IndexError: - warnings.warn("null string passed to Keyword; use Empty() instead", - SyntaxWarning, stacklevel=2) - self.name = '"%s"' % self.match - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = False - self.mayIndexError = False - self.caseless = caseless - if caseless: - self.caselessmatch = matchString.upper() - identChars = identChars.upper() - self.identChars = set(identChars) - - def parseImpl( self, instring, loc, doActions=True ): - if self.caseless: - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and - (loc == 0 or instring[loc-1].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - else: - if (instring[loc] == self.firstMatchChar and - (self.matchLen==1 or instring.startswith(self.match,loc)) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and - (loc == 0 or instring[loc-1] not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - - def copy(self): - c = super(Keyword,self).copy() - c.identChars = Keyword.DEFAULT_KEYWORD_CHARS - return c - - @staticmethod - def setDefaultKeywordChars( chars ): - """Overrides the default Keyword chars - """ - Keyword.DEFAULT_KEYWORD_CHARS = chars - -class CaselessLiteral(Literal): - """ - Token to match a specified string, ignoring case of letters. - Note: the matched results will always be in the case of the given - match string, NOT the case of the input text. - - Example:: - OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] - - (Contrast with example for L{CaselessKeyword}.) - """ - def __init__( self, matchString ): - super(CaselessLiteral,self).__init__( matchString.upper() ) - # Preserve the defining literal. - self.returnString = matchString - self.name = "'%s'" % self.returnString - self.errmsg = "Expected " + self.name - - def parseImpl( self, instring, loc, doActions=True ): - if instring[ loc:loc+self.matchLen ].upper() == self.match: - return loc+self.matchLen, self.returnString - raise ParseException(instring, loc, self.errmsg, self) - -class CaselessKeyword(Keyword): - """ - Caseless version of L{Keyword}. - - Example:: - OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] - - (Contrast with example for L{CaselessLiteral}.) - """ - def __init__( self, matchString, identChars=None ): - super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) - - def parseImpl( self, instring, loc, doActions=True ): - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - -class CloseMatch(Token): - """ - A variation on L{Literal} which matches "close" matches, that is, - strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: - - C{match_string} - string to be matched - - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match - - The results from a successful parse will contain the matched text from the input string and the following named results: - - C{mismatches} - a list of the positions within the match_string where mismatches were found - - C{original} - the original match_string used to compare against the input string - - If C{mismatches} is an empty list, then the match was an exact match. - - Example:: - patt = CloseMatch("ATCATCGAATGGA") - patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) - patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) - - # exact match - patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) - - # close match allowing up to 2 mismatches - patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) - patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) - """ - def __init__(self, match_string, maxMismatches=1): - super(CloseMatch,self).__init__() - self.name = match_string - self.match_string = match_string - self.maxMismatches = maxMismatches - self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) - self.mayIndexError = False - self.mayReturnEmpty = False - - def parseImpl( self, instring, loc, doActions=True ): - start = loc - instrlen = len(instring) - maxloc = start + len(self.match_string) - - if maxloc <= instrlen: - match_string = self.match_string - match_stringloc = 0 - mismatches = [] - maxMismatches = self.maxMismatches - - for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): - src,mat = s_m - if src != mat: - mismatches.append(match_stringloc) - if len(mismatches) > maxMismatches: - break - else: - loc = match_stringloc + 1 - results = ParseResults([instring[start:loc]]) - results['original'] = self.match_string - results['mismatches'] = mismatches - return loc, results - - raise ParseException(instring, loc, self.errmsg, self) - - -class Word(Token): - """ - Token for matching words composed of allowed character sets. - Defined with string containing all allowed initial characters, - an optional string containing allowed body characters (if omitted, - defaults to the initial character set), and an optional minimum, - maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. An optional - C{excludeChars} parameter can list characters that might be found in - the input C{bodyChars} string; useful to define a word of all printables - except for one or two characters, for instance. - - L{srange} is useful for defining custom character set strings for defining - C{Word} expressions, using range notation from regular expression character sets. - - A common mistake is to use C{Word} to match a specific literal string, as in - C{Word("Address")}. Remember that C{Word} uses the string argument to define - I{sets} of matchable characters. This expression would match "Add", "AAA", - "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. - To match an exact literal string, use L{Literal} or L{Keyword}. - - pyparsing includes helper strings for building Words: - - L{alphas} - - L{nums} - - L{alphanums} - - L{hexnums} - - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) - - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) - - L{printables} (any non-whitespace character) - - Example:: - # a word composed of digits - integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) - - # a word with a leading capital, and zero or more lowercase - capital_word = Word(alphas.upper(), alphas.lower()) - - # hostnames are alphanumeric, with leading alpha, and '-' - hostname = Word(alphas, alphanums+'-') - - # roman numeral (not a strict parser, accepts invalid mix of characters) - roman = Word("IVXLCDM") - - # any string of non-whitespace characters, except for ',' - csv_value = Word(printables, excludeChars=",") - """ - def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): - super(Word,self).__init__() - if excludeChars: - initChars = ''.join(c for c in initChars if c not in excludeChars) - if bodyChars: - bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) - self.initCharsOrig = initChars - self.initChars = set(initChars) - if bodyChars : - self.bodyCharsOrig = bodyChars - self.bodyChars = set(bodyChars) - else: - self.bodyCharsOrig = initChars - self.bodyChars = set(initChars) - - self.maxSpecified = max > 0 - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.asKeyword = asKeyword - - if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): - if self.bodyCharsOrig == self.initCharsOrig: - self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) - elif len(self.initCharsOrig) == 1: - self.reString = "%s[%s]*" % \ - (re.escape(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - else: - self.reString = "[%s][%s]*" % \ - (_escapeRegexRangeChars(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) - if self.asKeyword: - self.reString = r"\b"+self.reString+r"\b" - try: - self.re = re.compile( self.reString ) - except Exception: - self.re = None - - def parseImpl( self, instring, loc, doActions=True ): - if self.re: - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - return loc, result.group() - - if not(instring[ loc ] in self.initChars): - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - instrlen = len(instring) - bodychars = self.bodyChars - maxloc = start + self.maxLen - maxloc = min( maxloc, instrlen ) - while loc < maxloc and instring[loc] in bodychars: - loc += 1 - - throwException = False - if loc - start < self.minLen: - throwException = True - if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: - throwException = True - if self.asKeyword: - if (start>0 and instring[start-1] in bodychars) or (loc4: - return s[:4]+"..." - else: - return s - - if ( self.initCharsOrig != self.bodyCharsOrig ): - self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) - else: - self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) - - return self.strRepr - - -class Regex(Token): - r""" - Token for matching strings that match a given regular expression. - Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. - If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as - named parse results. - - Example:: - realnum = Regex(r"[+-]?\d+\.\d*") - date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)') - # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression - roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") - """ - compiledREtype = type(re.compile("[A-Z]")) - def __init__( self, pattern, flags=0, asGroupList=False, asMatch=False): - """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" - super(Regex,self).__init__() - - if isinstance(pattern, basestring): - if not pattern: - warnings.warn("null string passed to Regex; use Empty() instead", - SyntaxWarning, stacklevel=2) - - self.pattern = pattern - self.flags = flags - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % pattern, - SyntaxWarning, stacklevel=2) - raise - - elif isinstance(pattern, Regex.compiledREtype): - self.re = pattern - self.pattern = \ - self.reString = str(pattern) - self.flags = flags - - else: - raise ValueError("Regex may only be constructed with a string or a compiled RE object") - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - self.asGroupList = asGroupList - self.asMatch = asMatch - - def parseImpl( self, instring, loc, doActions=True ): - result = self.re.match(instring,loc) - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - d = result.groupdict() - if self.asMatch: - ret = result - elif self.asGroupList: - ret = result.groups() - else: - ret = ParseResults(result.group()) - if d: - for k in d: - ret[k] = d[k] - return loc,ret - - def __str__( self ): - try: - return super(Regex,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "Re:(%s)" % repr(self.pattern) - - return self.strRepr - - def sub(self, repl): - """ - Return Regex with an attached parse action to transform the parsed - result as if called using C{re.sub(expr, repl, string)}. - """ - if self.asGroupList: - warnings.warn("cannot use sub() with Regex(asGroupList=True)", - SyntaxWarning, stacklevel=2) - raise SyntaxError() - - if self.asMatch and callable(repl): - warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)", - SyntaxWarning, stacklevel=2) - raise SyntaxError() - - if self.asMatch: - def pa(tokens): - return tokens[0].expand(repl) - else: - def pa(tokens): - return self.re.sub(repl, tokens[0]) - return self.addParseAction(pa) - -class QuotedString(Token): - r""" - Token for matching strings that are delimited by quoting characters. - - Defined with the following parameters: - - quoteChar - string of one or more characters defining the quote delimiting string - - escChar - character to escape quotes, typically backslash (default=C{None}) - - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) - - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) - - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) - - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) - - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) - - Example:: - qs = QuotedString('"') - print(qs.searchString('lsjdf "This is the quote" sldjf')) - complex_qs = QuotedString('{{', endQuoteChar='}}') - print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) - sql_qs = QuotedString('"', escQuote='""') - print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) - prints:: - [['This is the quote']] - [['This is the "quote"']] - [['This is the quote with "embedded" quotes']] - """ - def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): - super(QuotedString,self).__init__() - - # remove white space from quote chars - wont work anyway - quoteChar = quoteChar.strip() - if not quoteChar: - warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - if endQuoteChar is None: - endQuoteChar = quoteChar - else: - endQuoteChar = endQuoteChar.strip() - if not endQuoteChar: - warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) - raise SyntaxError() - - self.quoteChar = quoteChar - self.quoteCharLen = len(quoteChar) - self.firstQuoteChar = quoteChar[0] - self.endQuoteChar = endQuoteChar - self.endQuoteCharLen = len(endQuoteChar) - self.escChar = escChar - self.escQuote = escQuote - self.unquoteResults = unquoteResults - self.convertWhitespaceEscapes = convertWhitespaceEscapes - - if multiline: - self.flags = re.MULTILINE | re.DOTALL - self.pattern = r'%s(?:[^%s%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - else: - self.flags = 0 - self.pattern = r'%s(?:[^%s\n\r%s]' % \ - ( re.escape(self.quoteChar), - _escapeRegexRangeChars(self.endQuoteChar[0]), - (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) - if len(self.endQuoteChar) > 1: - self.pattern += ( - '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), - _escapeRegexRangeChars(self.endQuoteChar[i])) - for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' - ) - if escQuote: - self.pattern += (r'|(?:%s)' % re.escape(escQuote)) - if escChar: - self.pattern += (r'|(?:%s.)' % re.escape(escChar)) - self.escCharReplacePattern = re.escape(self.escChar)+"(.)" - self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) - - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, - SyntaxWarning, stacklevel=2) - raise - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayIndexError = False - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None - if not result: - raise ParseException(instring, loc, self.errmsg, self) - - loc = result.end() - ret = result.group() - - if self.unquoteResults: - - # strip off quotes - ret = ret[self.quoteCharLen:-self.endQuoteCharLen] - - if isinstance(ret,basestring): - # replace escaped whitespace - if '\\' in ret and self.convertWhitespaceEscapes: - ws_map = { - r'\t' : '\t', - r'\n' : '\n', - r'\f' : '\f', - r'\r' : '\r', - } - for wslit,wschar in ws_map.items(): - ret = ret.replace(wslit, wschar) - - # replace escaped characters - if self.escChar: - ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) - - # replace escaped quotes - if self.escQuote: - ret = ret.replace(self.escQuote, self.endQuoteChar) - - return loc, ret - - def __str__( self ): - try: - return super(QuotedString,self).__str__() - except Exception: - pass - - if self.strRepr is None: - self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) - - return self.strRepr - - -class CharsNotIn(Token): - """ - Token for matching words composed of characters I{not} in a given set (will - include whitespace in matched characters if not listed in the provided exclusion set - see example). - Defined with string containing all disallowed characters, and an optional - minimum, maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. - - Example:: - # define a comma-separated-value as anything that is not a ',' - csv_value = CharsNotIn(',') - print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) - prints:: - ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] - """ - def __init__( self, notChars, min=1, max=0, exact=0 ): - super(CharsNotIn,self).__init__() - self.skipWhitespace = False - self.notChars = notChars - - if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - self.name = _ustr(self) - self.errmsg = "Expected " + self.name - self.mayReturnEmpty = ( self.minLen == 0 ) - self.mayIndexError = False - - def parseImpl( self, instring, loc, doActions=True ): - if instring[loc] in self.notChars: - raise ParseException(instring, loc, self.errmsg, self) - - start = loc - loc += 1 - notchars = self.notChars - maxlen = min( start+self.maxLen, len(instring) ) - while loc < maxlen and \ - (instring[loc] not in notchars): - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - def __str__( self ): - try: - return super(CharsNotIn, self).__str__() - except Exception: - pass - - if self.strRepr is None: - if len(self.notChars) > 4: - self.strRepr = "!W:(%s...)" % self.notChars[:4] - else: - self.strRepr = "!W:(%s)" % self.notChars - - return self.strRepr - -class White(Token): - """ - Special matching class for matching whitespace. Normally, whitespace is ignored - by pyparsing grammars. This class is included when some whitespace structures - are significant. Define with a string containing the whitespace characters to be - matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, - as defined for the C{L{Word}} class. - """ - whiteStrs = { - " " : "", - "\t": "", - "\n": "", - "\r": "", - "\f": "", - } - def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): - super(White,self).__init__() - self.matchWhite = ws - self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) - #~ self.leaveWhitespace() - self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) - self.mayReturnEmpty = True - self.errmsg = "Expected " + self.name - - self.minLen = min - - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - - if exact > 0: - self.maxLen = exact - self.minLen = exact - - def parseImpl( self, instring, loc, doActions=True ): - if not(instring[ loc ] in self.matchWhite): - raise ParseException(instring, loc, self.errmsg, self) - start = loc - loc += 1 - maxloc = start + self.maxLen - maxloc = min( maxloc, len(instring) ) - while loc < maxloc and instring[loc] in self.matchWhite: - loc += 1 - - if loc - start < self.minLen: - raise ParseException(instring, loc, self.errmsg, self) - - return loc, instring[start:loc] - - -class _PositionToken(Token): - def __init__( self ): - super(_PositionToken,self).__init__() - self.name=self.__class__.__name__ - self.mayReturnEmpty = True - self.mayIndexError = False - -class GoToColumn(_PositionToken): - """ - Token to advance to a specific column of input text; useful for tabular report scraping. - """ - def __init__( self, colno ): - super(GoToColumn,self).__init__() - self.col = colno - - def preParse( self, instring, loc ): - if col(loc,instring) != self.col: - instrlen = len(instring) - if self.ignoreExprs: - loc = self._skipIgnorables( instring, loc ) - while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : - loc += 1 - return loc - - def parseImpl( self, instring, loc, doActions=True ): - thiscol = col( loc, instring ) - if thiscol > self.col: - raise ParseException( instring, loc, "Text not in expected column", self ) - newloc = loc + self.col - thiscol - ret = instring[ loc: newloc ] - return newloc, ret - - -class LineStart(_PositionToken): - """ - Matches if current position is at the beginning of a line within the parse string - - Example:: - - test = '''\ - AAA this line - AAA and this line - AAA but not this one - B AAA and definitely not this one - ''' - - for t in (LineStart() + 'AAA' + restOfLine).searchString(test): - print(t) - - Prints:: - ['AAA', ' this line'] - ['AAA', ' and this line'] - - """ - def __init__( self ): - super(LineStart,self).__init__() - self.errmsg = "Expected start of line" - - def parseImpl( self, instring, loc, doActions=True ): - if col(loc, instring) == 1: - return loc, [] - raise ParseException(instring, loc, self.errmsg, self) - -class LineEnd(_PositionToken): - """ - Matches if current position is at the end of a line within the parse string - """ - def __init__( self ): - super(LineEnd,self).__init__() - self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) - self.errmsg = "Expected end of line" - - def parseImpl( self, instring, loc, doActions=True ): - if loc len(instring): - return loc, [] - else: - raise ParseException(instring, loc, self.errmsg, self) - -class WordStart(_PositionToken): - """ - Matches if the current position is at the beginning of a Word, and - is not preceded by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of - the string being parsed, or at the beginning of a line. - """ - def __init__(self, wordChars = printables): - super(WordStart,self).__init__() - self.wordChars = set(wordChars) - self.errmsg = "Not at the start of a word" - - def parseImpl(self, instring, loc, doActions=True ): - if loc != 0: - if (instring[loc-1] in self.wordChars or - instring[loc] not in self.wordChars): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - -class WordEnd(_PositionToken): - """ - Matches if the current position is at the end of a Word, and - is not followed by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of - the string being parsed, or at the end of a line. - """ - def __init__(self, wordChars = printables): - super(WordEnd,self).__init__() - self.wordChars = set(wordChars) - self.skipWhitespace = False - self.errmsg = "Not at the end of a word" - - def parseImpl(self, instring, loc, doActions=True ): - instrlen = len(instring) - if instrlen>0 and loc maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - else: - # save match among all matches, to retry longest to shortest - matches.append((loc2, e)) - - if matches: - matches.sort(key=lambda x: -x[0]) - for _,e in matches: - try: - return e._parse( instring, loc, doActions ) - except ParseException as err: - err.__traceback__ = None - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - - def __ixor__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #Or( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class MatchFirst(ParseExpression): - """ - Requires that at least one C{ParseExpression} is found. - If two expressions match, the first one listed is the one that will match. - May be constructed using the C{'|'} operator. - - Example:: - # construct MatchFirst using '|' operator - - # watch the order of expressions to match - number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) - print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] - - # put more selective expression first - number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) - print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] - """ - def __init__( self, exprs, savelist = False ): - super(MatchFirst,self).__init__(exprs, savelist) - if self.exprs: - self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) - else: - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - maxExcLoc = -1 - maxException = None - for e in self.exprs: - try: - ret = e._parse( instring, loc, doActions ) - return ret - except ParseException as err: - if err.loc > maxExcLoc: - maxException = err - maxExcLoc = err.loc - except IndexError: - if len(instring) > maxExcLoc: - maxException = ParseException(instring,len(instring),e.errmsg,self) - maxExcLoc = len(instring) - - # only got here if no expression matched, raise exception for match that made it the furthest - else: - if maxException is not None: - maxException.msg = self.errmsg - raise maxException - else: - raise ParseException(instring, loc, "no defined alternatives to match", self) - - def __ior__(self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass( other ) - return self.append( other ) #MatchFirst( [ self, other ] ) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class Each(ParseExpression): - """ - Requires all given C{ParseExpression}s to be found, but in any order. - Expressions may be separated by whitespace. - May be constructed using the C{'&'} operator. - - Example:: - color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") - shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") - integer = Word(nums) - shape_attr = "shape:" + shape_type("shape") - posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") - color_attr = "color:" + color("color") - size_attr = "size:" + integer("size") - - # use Each (using operator '&') to accept attributes in any order - # (shape and posn are required, color and size are optional) - shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) - - shape_spec.runTests(''' - shape: SQUARE color: BLACK posn: 100, 120 - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - color:GREEN size:20 shape:TRIANGLE posn:20,40 - ''' - ) - prints:: - shape: SQUARE color: BLACK posn: 100, 120 - ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] - - color: BLACK - - posn: ['100', ',', '120'] - - x: 100 - - y: 120 - - shape: SQUARE - - - shape: CIRCLE size: 50 color: BLUE posn: 50,80 - ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] - - color: BLUE - - posn: ['50', ',', '80'] - - x: 50 - - y: 80 - - shape: CIRCLE - - size: 50 - - - color: GREEN size: 20 shape: TRIANGLE posn: 20,40 - ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] - - color: GREEN - - posn: ['20', ',', '40'] - - x: 20 - - y: 40 - - shape: TRIANGLE - - size: 20 - """ - def __init__( self, exprs, savelist = True ): - super(Each,self).__init__(exprs, savelist) - self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) - self.skipWhitespace = True - self.initExprGroups = True - - def parseImpl( self, instring, loc, doActions=True ): - if self.initExprGroups: - self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) - opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] - opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] - self.optionals = opt1 + opt2 - self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] - self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] - self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] - self.required += self.multirequired - self.initExprGroups = False - tmpLoc = loc - tmpReqd = self.required[:] - tmpOpt = self.optionals[:] - matchOrder = [] - - keepMatching = True - while keepMatching: - tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired - failed = [] - for e in tmpExprs: - try: - tmpLoc = e.tryParse( instring, tmpLoc ) - except ParseException: - failed.append(e) - else: - matchOrder.append(self.opt1map.get(id(e),e)) - if e in tmpReqd: - tmpReqd.remove(e) - elif e in tmpOpt: - tmpOpt.remove(e) - if len(failed) == len(tmpExprs): - keepMatching = False - - if tmpReqd: - missing = ", ".join(_ustr(e) for e in tmpReqd) - raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) - - # add any unmatched Optionals, in case they have default values defined - matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] - - resultlist = [] - for e in matchOrder: - loc,results = e._parse(instring,loc,doActions) - resultlist.append(results) - - finalResults = sum(resultlist, ParseResults([])) - return loc, finalResults - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" - - return self.strRepr - - def checkRecursion( self, parseElementList ): - subRecCheckList = parseElementList[:] + [ self ] - for e in self.exprs: - e.checkRecursion( subRecCheckList ) - - -class ParseElementEnhance(ParserElement): - """ - Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. - """ - def __init__( self, expr, savelist=False ): - super(ParseElementEnhance,self).__init__(savelist) - if isinstance( expr, basestring ): - if issubclass(ParserElement._literalStringClass, Token): - expr = ParserElement._literalStringClass(expr) - else: - expr = ParserElement._literalStringClass(Literal(expr)) - self.expr = expr - self.strRepr = None - if expr is not None: - self.mayIndexError = expr.mayIndexError - self.mayReturnEmpty = expr.mayReturnEmpty - self.setWhitespaceChars( expr.whiteChars ) - self.skipWhitespace = expr.skipWhitespace - self.saveAsList = expr.saveAsList - self.callPreparse = expr.callPreparse - self.ignoreExprs.extend(expr.ignoreExprs) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr is not None: - return self.expr._parse( instring, loc, doActions, callPreParse=False ) - else: - raise ParseException("",loc,self.errmsg,self) - - def leaveWhitespace( self ): - self.skipWhitespace = False - self.expr = self.expr.copy() - if self.expr is not None: - self.expr.leaveWhitespace() - return self - - def ignore( self, other ): - if isinstance( other, Suppress ): - if other not in self.ignoreExprs: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - else: - super( ParseElementEnhance, self).ignore( other ) - if self.expr is not None: - self.expr.ignore( self.ignoreExprs[-1] ) - return self - - def streamline( self ): - super(ParseElementEnhance,self).streamline() - if self.expr is not None: - self.expr.streamline() - return self - - def checkRecursion( self, parseElementList ): - if self in parseElementList: - raise RecursiveGrammarException( parseElementList+[self] ) - subRecCheckList = parseElementList[:] + [ self ] - if self.expr is not None: - self.expr.checkRecursion( subRecCheckList ) - - def validate( self, validateTrace=[] ): - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion( [] ) - - def __str__( self ): - try: - return super(ParseElementEnhance,self).__str__() - except Exception: - pass - - if self.strRepr is None and self.expr is not None: - self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) - return self.strRepr - - -class FollowedBy(ParseElementEnhance): - """ - Lookahead matching of the given parse expression. C{FollowedBy} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression matches at the current - position. C{FollowedBy} always returns a null token list. - - Example:: - # use FollowedBy to match a label only if it is followed by a ':' - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() - prints:: - [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] - """ - def __init__( self, expr ): - super(FollowedBy,self).__init__(expr) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - self.expr.tryParse( instring, loc ) - return loc, [] - - -class NotAny(ParseElementEnhance): - """ - Lookahead to disallow matching with the given parse expression. C{NotAny} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression does I{not} match at the current - position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} - always returns a null token list. May be constructed using the '~' operator. - - Example:: - - """ - def __init__( self, expr ): - super(NotAny,self).__init__(expr) - #~ self.leaveWhitespace() - self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs - self.mayReturnEmpty = True - self.errmsg = "Found unwanted token, "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - if self.expr.canParseNext(instring, loc): - raise ParseException(instring, loc, self.errmsg, self) - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "~{" + _ustr(self.expr) + "}" - - return self.strRepr - -class _MultipleMatch(ParseElementEnhance): - def __init__( self, expr, stopOn=None): - super(_MultipleMatch, self).__init__(expr) - self.saveAsList = True - ender = stopOn - if isinstance(ender, basestring): - ender = ParserElement._literalStringClass(ender) - self.not_ender = ~ender if ender is not None else None - - def parseImpl( self, instring, loc, doActions=True ): - self_expr_parse = self.expr._parse - self_skip_ignorables = self._skipIgnorables - check_ender = self.not_ender is not None - if check_ender: - try_not_ender = self.not_ender.tryParse - - # must be at least one (but first see if we are the stopOn sentinel; - # if so, fail) - if check_ender: - try_not_ender(instring, loc) - loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) - try: - hasIgnoreExprs = (not not self.ignoreExprs) - while 1: - if check_ender: - try_not_ender(instring, loc) - if hasIgnoreExprs: - preloc = self_skip_ignorables( instring, loc ) - else: - preloc = loc - loc, tmptokens = self_expr_parse( instring, preloc, doActions ) - if tmptokens or tmptokens.haskeys(): - tokens += tmptokens - except (ParseException,IndexError): - pass - - return loc, tokens - -class OneOrMore(_MultipleMatch): - """ - Repetition of one or more of the given expression. - - Parameters: - - expr - expression that must match one or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example:: - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: BLACK" - OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] - - # use stopOn attribute for OneOrMore to avoid reading label string as part of the data - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] - - # could also be written as - (attr_expr * (1,)).parseString(text).pprint() - """ - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "{" + _ustr(self.expr) + "}..." - - return self.strRepr - -class ZeroOrMore(_MultipleMatch): - """ - Optional repetition of zero or more of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) - - Example: similar to L{OneOrMore} - """ - def __init__( self, expr, stopOn=None): - super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) - except (ParseException,IndexError): - return loc, [] - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]..." - - return self.strRepr - -class _NullToken(object): - def __bool__(self): - return False - __nonzero__ = __bool__ - def __str__(self): - return "" - -_optionalNotMatched = _NullToken() -class Optional(ParseElementEnhance): - """ - Optional matching of the given expression. - - Parameters: - - expr - expression that must match zero or more times - - default (optional) - value to be returned if the optional expression is not found. - - Example:: - # US postal code can be a 5-digit zip, plus optional 4-digit qualifier - zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) - zip.runTests(''' - # traditional ZIP code - 12345 - - # ZIP+4 form - 12101-0001 - - # invalid ZIP - 98765- - ''') - prints:: - # traditional ZIP code - 12345 - ['12345'] - - # ZIP+4 form - 12101-0001 - ['12101-0001'] - - # invalid ZIP - 98765- - ^ - FAIL: Expected end of text (at char 5), (line:1, col:6) - """ - def __init__( self, expr, default=_optionalNotMatched ): - super(Optional,self).__init__( expr, savelist=False ) - self.saveAsList = self.expr.saveAsList - self.defaultValue = default - self.mayReturnEmpty = True - - def parseImpl( self, instring, loc, doActions=True ): - try: - loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) - except (ParseException,IndexError): - if self.defaultValue is not _optionalNotMatched: - if self.expr.resultsName: - tokens = ParseResults([ self.defaultValue ]) - tokens[self.expr.resultsName] = self.defaultValue - else: - tokens = [ self.defaultValue ] - else: - tokens = [] - return loc, tokens - - def __str__( self ): - if hasattr(self,"name"): - return self.name - - if self.strRepr is None: - self.strRepr = "[" + _ustr(self.expr) + "]" - - return self.strRepr - -class SkipTo(ParseElementEnhance): - """ - Token for skipping over all undefined text until the matched expression is found. - - Parameters: - - expr - target expression marking the end of the data to be skipped - - include - (default=C{False}) if True, the target expression is also parsed - (the skipped text and target expression are returned as a 2-element list). - - ignore - (default=C{None}) used to define grammars (typically quoted strings and - comments) that might contain false matches to the target expression - - failOn - (default=C{None}) define expressions that are not allowed to be - included in the skipped test; if found before the target expression is found, - the SkipTo is not a match - - Example:: - report = ''' - Outstanding Issues Report - 1 Jan 2000 - - # | Severity | Description | Days Open - -----+----------+-------------------------------------------+----------- - 101 | Critical | Intermittent system crash | 6 - 94 | Cosmetic | Spelling error on Login ('log|n') | 14 - 79 | Minor | System slow when running too many reports | 47 - ''' - integer = Word(nums) - SEP = Suppress('|') - # use SkipTo to simply match everything up until the next SEP - # - ignore quoted strings, so that a '|' character inside a quoted string does not match - # - parse action will call token.strip() for each matched token, i.e., the description body - string_data = SkipTo(SEP, ignore=quotedString) - string_data.setParseAction(tokenMap(str.strip)) - ticket_expr = (integer("issue_num") + SEP - + string_data("sev") + SEP - + string_data("desc") + SEP - + integer("days_open")) - - for tkt in ticket_expr.searchString(report): - print tkt.dump() - prints:: - ['101', 'Critical', 'Intermittent system crash', '6'] - - days_open: 6 - - desc: Intermittent system crash - - issue_num: 101 - - sev: Critical - ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] - - days_open: 14 - - desc: Spelling error on Login ('log|n') - - issue_num: 94 - - sev: Cosmetic - ['79', 'Minor', 'System slow when running too many reports', '47'] - - days_open: 47 - - desc: System slow when running too many reports - - issue_num: 79 - - sev: Minor - """ - def __init__( self, other, include=False, ignore=None, failOn=None ): - super( SkipTo, self ).__init__( other ) - self.ignoreExpr = ignore - self.mayReturnEmpty = True - self.mayIndexError = False - self.includeMatch = include - self.saveAsList = False - if isinstance(failOn, basestring): - self.failOn = ParserElement._literalStringClass(failOn) - else: - self.failOn = failOn - self.errmsg = "No match found for "+_ustr(self.expr) - - def parseImpl( self, instring, loc, doActions=True ): - startloc = loc - instrlen = len(instring) - expr = self.expr - expr_parse = self.expr._parse - self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None - self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None - - tmploc = loc - while tmploc <= instrlen: - if self_failOn_canParseNext is not None: - # break if failOn expression matches - if self_failOn_canParseNext(instring, tmploc): - break - - if self_ignoreExpr_tryParse is not None: - # advance past ignore expressions - while 1: - try: - tmploc = self_ignoreExpr_tryParse(instring, tmploc) - except ParseBaseException: - break - - try: - expr_parse(instring, tmploc, doActions=False, callPreParse=False) - except (ParseException, IndexError): - # no match, advance loc in string - tmploc += 1 - else: - # matched skipto expr, done - break - - else: - # ran off the end of the input string without matching skipto expr, fail - raise ParseException(instring, loc, self.errmsg, self) - - # build up return values - loc = tmploc - skiptext = instring[startloc:loc] - skipresult = ParseResults(skiptext) - - if self.includeMatch: - loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) - skipresult += mat - - return loc, skipresult - -class Forward(ParseElementEnhance): - """ - Forward declaration of an expression to be defined later - - used for recursive grammars, such as algebraic infix notation. - When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. - - Note: take care when assigning to C{Forward} not to overlook precedence of operators. - Specifically, '|' has a lower precedence than '<<', so that:: - fwdExpr << a | b | c - will actually be evaluated as:: - (fwdExpr << a) | b | c - thereby leaving b and c out as parseable alternatives. It is recommended that you - explicitly group the values inserted into the C{Forward}:: - fwdExpr << (a | b | c) - Converting to use the '<<=' operator instead will avoid this problem. - - See L{ParseResults.pprint} for an example of a recursive parser created using - C{Forward}. - """ - def __init__( self, other=None ): - super(Forward,self).__init__( other, savelist=False ) - - def __lshift__( self, other ): - if isinstance( other, basestring ): - other = ParserElement._literalStringClass(other) - self.expr = other - self.strRepr = None - self.mayIndexError = self.expr.mayIndexError - self.mayReturnEmpty = self.expr.mayReturnEmpty - self.setWhitespaceChars( self.expr.whiteChars ) - self.skipWhitespace = self.expr.skipWhitespace - self.saveAsList = self.expr.saveAsList - self.ignoreExprs.extend(self.expr.ignoreExprs) - return self - - def __ilshift__(self, other): - return self << other - - def leaveWhitespace( self ): - self.skipWhitespace = False - return self - - def streamline( self ): - if not self.streamlined: - self.streamlined = True - if self.expr is not None: - self.expr.streamline() - return self - - def validate( self, validateTrace=[] ): - if self not in validateTrace: - tmp = validateTrace[:]+[self] - if self.expr is not None: - self.expr.validate(tmp) - self.checkRecursion([]) - - def __str__( self ): - if hasattr(self,"name"): - return self.name - return self.__class__.__name__ + ": ..." - - # stubbed out for now - creates awful memory and perf issues - self._revertClass = self.__class__ - self.__class__ = _ForwardNoRecurse - try: - if self.expr is not None: - retString = _ustr(self.expr) - else: - retString = "None" - finally: - self.__class__ = self._revertClass - return self.__class__.__name__ + ": " + retString - - def copy(self): - if self.expr is not None: - return super(Forward,self).copy() - else: - ret = Forward() - ret <<= self - return ret - -class _ForwardNoRecurse(Forward): - def __str__( self ): - return "..." - -class TokenConverter(ParseElementEnhance): - """ - Abstract subclass of C{ParseExpression}, for converting parsed results. - """ - def __init__( self, expr, savelist=False ): - super(TokenConverter,self).__init__( expr )#, savelist ) - self.saveAsList = False - -class Combine(TokenConverter): - """ - Converter to concatenate all matching tokens to a single string. - By default, the matching patterns must also be contiguous in the input string; - this can be disabled by specifying C{'adjacent=False'} in the constructor. - - Example:: - real = Word(nums) + '.' + Word(nums) - print(real.parseString('3.1416')) # -> ['3', '.', '1416'] - # will also erroneously match the following - print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] - - real = Combine(Word(nums) + '.' + Word(nums)) - print(real.parseString('3.1416')) # -> ['3.1416'] - # no match when there are internal spaces - print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) - """ - def __init__( self, expr, joinString="", adjacent=True ): - super(Combine,self).__init__( expr ) - # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself - if adjacent: - self.leaveWhitespace() - self.adjacent = adjacent - self.skipWhitespace = True - self.joinString = joinString - self.callPreparse = True - - def ignore( self, other ): - if self.adjacent: - ParserElement.ignore(self, other) - else: - super( Combine, self).ignore( other ) - return self - - def postParse( self, instring, loc, tokenlist ): - retToks = tokenlist.copy() - del retToks[:] - retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) - - if self.resultsName and retToks.haskeys(): - return [ retToks ] - else: - return retToks - -class Group(TokenConverter): - """ - Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. - - Example:: - ident = Word(alphas) - num = Word(nums) - term = ident | num - func = ident + Optional(delimitedList(term)) - print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] - - func = ident + Group(Optional(delimitedList(term))) - print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] - """ - def __init__( self, expr ): - super(Group,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - return [ tokenlist ] - -class Dict(TokenConverter): - """ - Converter to return a repetitive expression as a list, but also as a dictionary. - Each element can also be referenced using the first token in the expression as its key. - Useful for tabular report scraping when the first column can be used as a item key. - - Example:: - data_word = Word(alphas) - label = data_word + FollowedBy(':') - attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) - - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - - # print attributes as plain groups - print(OneOrMore(attr_expr).parseString(text).dump()) - - # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names - result = Dict(OneOrMore(Group(attr_expr))).parseString(text) - print(result.dump()) - - # access named fields as dict entries, or output as dict - print(result['shape']) - print(result.asDict()) - prints:: - ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] - - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} - See more examples at L{ParseResults} of accessing fields by results name. - """ - def __init__( self, expr ): - super(Dict,self).__init__( expr ) - self.saveAsList = True - - def postParse( self, instring, loc, tokenlist ): - for i,tok in enumerate(tokenlist): - if len(tok) == 0: - continue - ikey = tok[0] - if isinstance(ikey,int): - ikey = _ustr(tok[0]).strip() - if len(tok)==1: - tokenlist[ikey] = _ParseResultsWithOffset("",i) - elif len(tok)==2 and not isinstance(tok[1],ParseResults): - tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) - else: - dictvalue = tok.copy() #ParseResults(i) - del dictvalue[0] - if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) - else: - tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) - - if self.resultsName: - return [ tokenlist ] - else: - return tokenlist - - -class Suppress(TokenConverter): - """ - Converter for ignoring the results of a parsed expression. - - Example:: - source = "a, b, c,d" - wd = Word(alphas) - wd_list1 = wd + ZeroOrMore(',' + wd) - print(wd_list1.parseString(source)) - - # often, delimiters that are useful during parsing are just in the - # way afterward - use Suppress to keep them out of the parsed output - wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) - print(wd_list2.parseString(source)) - prints:: - ['a', ',', 'b', ',', 'c', ',', 'd'] - ['a', 'b', 'c', 'd'] - (See also L{delimitedList}.) - """ - def postParse( self, instring, loc, tokenlist ): - return [] - - def suppress( self ): - return self - - -class OnlyOnce(object): - """ - Wrapper for parse actions, to ensure they are only called once. - """ - def __init__(self, methodCall): - self.callable = _trim_arity(methodCall) - self.called = False - def __call__(self,s,l,t): - if not self.called: - results = self.callable(s,l,t) - self.called = True - return results - raise ParseException(s,l,"") - def reset(self): - self.called = False - -def traceParseAction(f): - """ - Decorator for debugging parse actions. - - When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} - When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. - - Example:: - wd = Word(alphas) - - @traceParseAction - def remove_duplicate_chars(tokens): - return ''.join(sorted(set(''.join(tokens)))) - - wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) - print(wds.parseString("slkdjs sld sldd sdlf sdljf")) - prints:: - >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) - <3: - thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc - sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) - try: - ret = f(*paArgs) - except Exception as exc: - sys.stderr.write( "< ['aa', 'bb', 'cc'] - delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] - """ - dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." - if combine: - return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) - else: - return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) - -def countedArray( expr, intExpr=None ): - """ - Helper to define a counted list of expressions. - This helper defines a pattern of the form:: - integer expr expr expr... - where the leading integer tells how many expr expressions follow. - The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. - - If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. - - Example:: - countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] - - # in this parser, the leading integer value is given in binary, - # '10' indicating that 2 values are in the array - binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) - countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] - """ - arrayExpr = Forward() - def countFieldParseAction(s,l,t): - n = t[0] - arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) - return [] - if intExpr is None: - intExpr = Word(nums).setParseAction(lambda t:int(t[0])) - else: - intExpr = intExpr.copy() - intExpr.setName("arrayLen") - intExpr.addParseAction(countFieldParseAction, callDuringTry=True) - return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') - -def _flatten(L): - ret = [] - for i in L: - if isinstance(i,list): - ret.extend(_flatten(i)) - else: - ret.append(i) - return ret - -def matchPreviousLiteral(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousLiteral(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches a - previous literal, will also match the leading C{"1:1"} in C{"1:10"}. - If this is not desired, use C{matchPreviousExpr}. - Do I{not} use with packrat parsing enabled. - """ - rep = Forward() - def copyTokenToRepeater(s,l,t): - if t: - if len(t) == 1: - rep << t[0] - else: - # flatten t tokens - tflat = _flatten(t.asList()) - rep << And(Literal(tt) for tt in tflat) - else: - rep << Empty() - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def matchPreviousExpr(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: - first = Word(nums) - second = matchPreviousExpr(first) - matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches by - expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; - the expressions are evaluated first, and then compared, so - C{"1"} is compared with C{"10"}. - Do I{not} use with packrat parsing enabled. - """ - rep = Forward() - e2 = expr.copy() - rep <<= e2 - def copyTokenToRepeater(s,l,t): - matchTokens = _flatten(t.asList()) - def mustMatchTheseTokens(s,l,t): - theseTokens = _flatten(t.asList()) - if theseTokens != matchTokens: - raise ParseException("",0,"") - rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) - expr.addParseAction(copyTokenToRepeater, callDuringTry=True) - rep.setName('(prev) ' + _ustr(expr)) - return rep - -def _escapeRegexRangeChars(s): - #~ escape these chars: ^-] - for c in r"\^-]": - s = s.replace(c,_bslash+c) - s = s.replace("\n",r"\n") - s = s.replace("\t",r"\t") - return _ustr(s) - -def oneOf( strs, caseless=False, useRegex=True ): - """ - Helper to quickly define a set of alternative Literals, and makes sure to do - longest-first testing when there is a conflict, regardless of the input order, - but returns a C{L{MatchFirst}} for best performance. - - Parameters: - - strs - a string of space-delimited literals, or a collection of string literals - - caseless - (default=C{False}) - treat all literals as caseless - - useRegex - (default=C{True}) - as an optimization, will generate a Regex - object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or - if creating a C{Regex} raises an exception) - - Example:: - comp_oper = oneOf("< = > <= >= !=") - var = Word(alphas) - number = Word(nums) - term = var | number - comparison_expr = term + comp_oper + term - print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) - prints:: - [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] - """ - if caseless: - isequal = ( lambda a,b: a.upper() == b.upper() ) - masks = ( lambda a,b: b.upper().startswith(a.upper()) ) - parseElementClass = CaselessLiteral - else: - isequal = ( lambda a,b: a == b ) - masks = ( lambda a,b: b.startswith(a) ) - parseElementClass = Literal - - symbols = [] - if isinstance(strs,basestring): - symbols = strs.split() - elif isinstance(strs, Iterable): - symbols = list(strs) - else: - warnings.warn("Invalid argument to oneOf, expected string or iterable", - SyntaxWarning, stacklevel=2) - if not symbols: - return NoMatch() - - i = 0 - while i < len(symbols)-1: - cur = symbols[i] - for j,other in enumerate(symbols[i+1:]): - if ( isequal(other, cur) ): - del symbols[i+j+1] - break - elif ( masks(cur, other) ): - del symbols[i+j+1] - symbols.insert(i,other) - cur = other - break - else: - i += 1 - - if not caseless and useRegex: - #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) - try: - if len(symbols)==len("".join(symbols)): - return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) - else: - return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) - except Exception: - warnings.warn("Exception creating Regex for oneOf, building MatchFirst", - SyntaxWarning, stacklevel=2) - - - # last resort, just use MatchFirst - return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) - -def dictOf( key, value ): - """ - Helper to easily and clearly define a dictionary by specifying the respective patterns - for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens - in the proper order. The key pattern can include delimiting markers or punctuation, - as long as they are suppressed, thereby leaving the significant key text. The value - pattern can include named results, so that the C{Dict} results can include named token - fields. - - Example:: - text = "shape: SQUARE posn: upper left color: light blue texture: burlap" - attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - print(OneOrMore(attr_expr).parseString(text).dump()) - - attr_label = label - attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) - - # similar to Dict, but simpler call format - result = dictOf(attr_label, attr_value).parseString(text) - print(result.dump()) - print(result['shape']) - print(result.shape) # object attribute access works too - print(result.asDict()) - prints:: - [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap - SQUARE - SQUARE - {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} - """ - return Dict( ZeroOrMore( Group ( key + value ) ) ) - -def originalTextFor(expr, asString=True): - """ - Helper to return the original, untokenized text for a given expression. Useful to - restore the parsed fields of an HTML start tag into the raw tag text itself, or to - revert separate tokens with intervening whitespace back to the original matching - input text. By default, returns astring containing the original parsed text. - - If the optional C{asString} argument is passed as C{False}, then the return value is a - C{L{ParseResults}} containing any results names that were originally matched, and a - single token containing the original matched text from the input string. So if - the expression passed to C{L{originalTextFor}} contains expressions with defined - results names, you must set C{asString} to C{False} if you want to preserve those - results name values. - - Example:: - src = "this is test bold text normal text " - for tag in ("b","i"): - opener,closer = makeHTMLTags(tag) - patt = originalTextFor(opener + SkipTo(closer) + closer) - print(patt.searchString(src)[0]) - prints:: - [' bold text '] - ['text'] - """ - locMarker = Empty().setParseAction(lambda s,loc,t: loc) - endlocMarker = locMarker.copy() - endlocMarker.callPreparse = False - matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") - if asString: - extractText = lambda s,l,t: s[t._original_start:t._original_end] - else: - def extractText(s,l,t): - t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] - matchExpr.setParseAction(extractText) - matchExpr.ignoreExprs = expr.ignoreExprs - return matchExpr - -def ungroup(expr): - """ - Helper to undo pyparsing's default grouping of And expressions, even - if all but one are non-empty. - """ - return TokenConverter(expr).setParseAction(lambda t:t[0]) - -def locatedExpr(expr): - """ - Helper to decorate a returned token with its starting and ending locations in the input string. - This helper adds the following results names: - - locn_start = location where matched expression begins - - locn_end = location where matched expression ends - - value = the actual parsed results - - Be careful if the input text contains C{} characters, you may want to call - C{L{ParserElement.parseWithTabs}} - - Example:: - wd = Word(alphas) - for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): - print(match) - prints:: - [[0, 'ljsdf', 5]] - [[8, 'lksdjjf', 15]] - [[18, 'lkkjj', 23]] - """ - locator = Empty().setParseAction(lambda s,l,t: l) - return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) - - -# convenience constants for positional expressions -empty = Empty().setName("empty") -lineStart = LineStart().setName("lineStart") -lineEnd = LineEnd().setName("lineEnd") -stringStart = StringStart().setName("stringStart") -stringEnd = StringEnd().setName("stringEnd") - -_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) -_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) -_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) -_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) -_charRange = Group(_singleChar + Suppress("-") + _singleChar) -_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" - -def srange(s): - r""" - Helper to easily define string ranges for use in Word construction. Borrows - syntax from regexp '[]' string range definitions:: - srange("[0-9]") -> "0123456789" - srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" - srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" - The input string must be enclosed in []'s, and the returned string is the expanded - character set joined into a single string. - The values enclosed in the []'s may be: - - a single character - - an escaped character with a leading backslash (such as C{\-} or C{\]}) - - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) - (C{\0x##} is also supported for backwards compatibility) - - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) - - a range of any of the above, separated by a dash (C{'a-z'}, etc.) - - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) - """ - _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) - try: - return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) - except Exception: - return "" - -def matchOnlyAtCol(n): - """ - Helper method for defining parse actions that require matching at a specific - column in the input text. - """ - def verifyCol(strg,locn,toks): - if col(locn,strg) != n: - raise ParseException(strg,locn,"matched token not at column %d" % n) - return verifyCol - -def replaceWith(replStr): - """ - Helper method for common parse actions that simply return a literal value. Especially - useful when used with C{L{transformString}()}. - - Example:: - num = Word(nums).setParseAction(lambda toks: int(toks[0])) - na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) - term = na | num - - OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] - """ - return lambda s,l,t: [replStr] - -def removeQuotes(s,l,t): - """ - Helper parse action for removing quotation marks from parsed quoted strings. - - Example:: - # by default, quotation marks are included in parsed results - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] - - # use removeQuotes to strip quotation marks from parsed results - quotedString.setParseAction(removeQuotes) - quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] - """ - return t[0][1:-1] - -def tokenMap(func, *args): - """ - Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional - args are passed, they are forwarded to the given function as additional arguments after - the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the - parsed data to an integer using base 16. - - Example (compare the last to example in L{ParserElement.transformString}:: - hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) - hex_ints.runTests(''' - 00 11 22 aa FF 0a 0d 1a - ''') - - upperword = Word(alphas).setParseAction(tokenMap(str.upper)) - OneOrMore(upperword).runTests(''' - my kingdom for a horse - ''') - - wd = Word(alphas).setParseAction(tokenMap(str.title)) - OneOrMore(wd).setParseAction(' '.join).runTests(''' - now is the winter of our discontent made glorious summer by this sun of york - ''') - prints:: - 00 11 22 aa FF 0a 0d 1a - [0, 17, 34, 170, 255, 10, 13, 26] - - my kingdom for a horse - ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] - - now is the winter of our discontent made glorious summer by this sun of york - ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] - """ - def pa(s,l,t): - return [func(tokn, *args) for tokn in t] - - try: - func_name = getattr(func, '__name__', - getattr(func, '__class__').__name__) - except Exception: - func_name = str(func) - pa.__name__ = func_name - - return pa - -upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) -"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" - -downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) -"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" - -def _makeTags(tagStr, xml): - """Internal helper to construct opening and closing tag expressions, given a tag name""" - if isinstance(tagStr,basestring): - resname = tagStr - tagStr = Keyword(tagStr, caseless=not xml) - else: - resname = tagStr.name - - tagAttrName = Word(alphas,alphanums+"_-:") - if (xml): - tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - else: - printablesLessRAbrack = "".join(c for c in printables if c not in ">") - tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) - openTag = Suppress("<") + tagStr("tag") + \ - Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ - Optional( Suppress("=") + tagAttrValue ) ))) + \ - Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") - closeTag = Combine(_L("") - - openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) - closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % resname) - openTag.tag = resname - closeTag.tag = resname - return openTag, closeTag - -def makeHTMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches - tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. - - Example:: - text = 'More info at the pyparsing wiki page' - # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple - a,a_end = makeHTMLTags("A") - link_expr = a + SkipTo(a_end)("link_text") + a_end - - for link in link_expr.searchString(text): - # attributes in the tag (like "href" shown here) are also accessible as named results - print(link.link_text, '->', link.href) - prints:: - pyparsing -> http://pyparsing.wikispaces.com - """ - return _makeTags( tagStr, False ) - -def makeXMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for XML, given a tag name. Matches - tags only in the given upper/lower case. - - Example: similar to L{makeHTMLTags} - """ - return _makeTags( tagStr, True ) - -def withAttribute(*args,**attrDict): - """ - Helper to create a validating parse action to be used with start tags created - with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag - with a required attribute value, to avoid false matches on common tags such as - C{} or C{
}. - - Call C{withAttribute} with a series of attribute names and values. Specify the list - of filter attributes names and values as: - - keyword arguments, as in C{(align="right")}, or - - as an explicit dict with C{**} operator, when an attribute name is also a Python - reserved word, as in C{**{"class":"Customer", "align":"right"}} - - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) - For attribute names with a namespace prefix, you must use the second form. Attribute - names are matched insensitive to upper/lower case. - - If just testing for C{class} (with or without a namespace), use C{L{withClass}}. - - To verify that the attribute exists, but without specifying a value, pass - C{withAttribute.ANY_VALUE} as the value. - - Example:: - html = ''' -
- Some text -
1 4 0 1 0
-
1,3 2,3 1,1
-
this has no type
-
- - ''' - div,div_end = makeHTMLTags("div") - - # only match div tag having a type attribute with value "grid" - div_grid = div().setParseAction(withAttribute(type="grid")) - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - # construct a match with any div tag having a type attribute, regardless of the value - div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - if args: - attrs = args[:] - else: - attrs = attrDict.items() - attrs = [(k,v) for k,v in attrs] - def pa(s,l,tokens): - for attrName,attrValue in attrs: - if attrName not in tokens: - raise ParseException(s,l,"no matching attribute " + attrName) - if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: - raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % - (attrName, tokens[attrName], attrValue)) - return pa -withAttribute.ANY_VALUE = object() - -def withClass(classname, namespace=''): - """ - Simplified version of C{L{withAttribute}} when matching on a div class - made - difficult because C{class} is a reserved word in Python. - - Example:: - html = ''' -
- Some text -
1 4 0 1 0
-
1,3 2,3 1,1
-
this <div> has no class
-
- - ''' - div,div_end = makeHTMLTags("div") - div_grid = div().setParseAction(withClass("grid")) - - grid_expr = div_grid + SkipTo(div | div_end)("body") - for grid_header in grid_expr.searchString(html): - print(grid_header.body) - - div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) - div_expr = div_any_type + SkipTo(div | div_end)("body") - for div_header in div_expr.searchString(html): - print(div_header.body) - prints:: - 1 4 0 1 0 - - 1 4 0 1 0 - 1,3 2,3 1,1 - """ - classattr = "%s:class" % namespace if namespace else "class" - return withAttribute(**{classattr : classname}) - -opAssoc = _Constants() -opAssoc.LEFT = object() -opAssoc.RIGHT = object() - -def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): - """ - Helper method for constructing grammars of expressions made up of - operators working in a precedence hierarchy. Operators may be unary or - binary, left- or right-associative. Parse actions can also be attached - to operator expressions. The generated parser will also recognize the use - of parentheses to override operator precedences (see example below). - - Note: if you define a deep operator list, you may see performance issues - when using infixNotation. See L{ParserElement.enablePackrat} for a - mechanism to potentially improve your parser performance. - - Parameters: - - baseExpr - expression representing the most basic element for the nested - - opList - list of tuples, one for each operator precedence level in the - expression grammar; each tuple is of the form - (opExpr, numTerms, rightLeftAssoc, parseAction), where: - - opExpr is the pyparsing expression for the operator; - may also be a string, which will be converted to a Literal; - if numTerms is 3, opExpr is a tuple of two expressions, for the - two operators separating the 3 terms - - numTerms is the number of terms for this operator (must - be 1, 2, or 3) - - rightLeftAssoc is the indicator whether the operator is - right or left associative, using the pyparsing-defined - constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. - - parseAction is the parse action to be associated with - expressions matching this operator expression (the - parse action tuple member may be omitted); if the parse action - is passed a tuple or list of functions, this is equivalent to - calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) - - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) - - Example:: - # simple example of four-function arithmetic with ints and variable names - integer = pyparsing_common.signed_integer - varname = pyparsing_common.identifier - - arith_expr = infixNotation(integer | varname, - [ - ('-', 1, opAssoc.RIGHT), - (oneOf('* /'), 2, opAssoc.LEFT), - (oneOf('+ -'), 2, opAssoc.LEFT), - ]) - - arith_expr.runTests(''' - 5+3*6 - (5+3)*6 - -2--11 - ''', fullDump=False) - prints:: - 5+3*6 - [[5, '+', [3, '*', 6]]] - - (5+3)*6 - [[[5, '+', 3], '*', 6]] - - -2--11 - [[['-', 2], '-', ['-', 11]]] - """ - ret = Forward() - lastExpr = baseExpr | ( lpar + ret + rpar ) - for i,operDef in enumerate(opList): - opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] - termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr - if arity == 3: - if opExpr is None or len(opExpr) != 2: - raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") - opExpr1, opExpr2 = opExpr - thisExpr = Forward().setName(termName) - if rightLeftAssoc == opAssoc.LEFT: - if arity == 1: - matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) - else: - matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ - Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - elif rightLeftAssoc == opAssoc.RIGHT: - if arity == 1: - # try to avoid LR with this extra test - if not isinstance(opExpr, Optional): - opExpr = Optional(opExpr) - matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) - elif arity == 2: - if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) - else: - matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) - elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ - Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) - else: - raise ValueError("operator must be unary (1), binary (2), or ternary (3)") - else: - raise ValueError("operator must indicate right or left associativity") - if pa: - if isinstance(pa, (tuple, list)): - matchExpr.setParseAction(*pa) - else: - matchExpr.setParseAction(pa) - thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) - lastExpr = thisExpr - ret <<= lastExpr - return ret - -operatorPrecedence = infixNotation -"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" - -dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") -sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") -quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| - Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") -unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") - -def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): - """ - Helper method for defining nested lists enclosed in opening and closing - delimiters ("(" and ")" are the default). - - Parameters: - - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression - - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression - - content - expression for items within the nested lists (default=C{None}) - - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) - - If an expression is not provided for the content argument, the nested - expression will capture all whitespace-delimited content between delimiters - as a list of separate values. - - Use the C{ignoreExpr} argument to define expressions that may contain - opening or closing characters that should not be treated as opening - or closing characters for nesting, such as quotedString or a comment - expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. - The default is L{quotedString}, but if no expressions are to be ignored, - then pass C{None} for this argument. - - Example:: - data_type = oneOf("void int short long char float double") - decl_data_type = Combine(data_type + Optional(Word('*'))) - ident = Word(alphas+'_', alphanums+'_') - number = pyparsing_common.number - arg = Group(decl_data_type + ident) - LPAR,RPAR = map(Suppress, "()") - - code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) - - c_function = (decl_data_type("type") - + ident("name") - + LPAR + Optional(delimitedList(arg), [])("args") + RPAR - + code_body("body")) - c_function.ignore(cStyleComment) - - source_code = ''' - int is_odd(int x) { - return (x%2); - } - - int dec_to_hex(char hchar) { - if (hchar >= '0' && hchar <= '9') { - return (ord(hchar)-ord('0')); - } else { - return (10+ord(hchar)-ord('A')); - } - } - ''' - for func in c_function.searchString(source_code): - print("%(name)s (%(type)s) args: %(args)s" % func) - - prints:: - is_odd (int) args: [['int', 'x']] - dec_to_hex (int) args: [['char', 'hchar']] - """ - if opener == closer: - raise ValueError("opening and closing strings cannot be the same") - if content is None: - if isinstance(opener,basestring) and isinstance(closer,basestring): - if len(opener) == 1 and len(closer)==1: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS - ).setParseAction(lambda t:t[0].strip())) - else: - if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + - ~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + - CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) - ).setParseAction(lambda t:t[0].strip())) - else: - raise ValueError("opening and closing arguments must be strings if no content expression is given") - ret = Forward() - if ignoreExpr is not None: - ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) - else: - ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) - ret.setName('nested %s%s expression' % (opener,closer)) - return ret - -def indentedBlock(blockStatementExpr, indentStack, indent=True): - """ - Helper method for defining space-delimited indentation blocks, such as - those used to define block statements in Python source code. - - Parameters: - - blockStatementExpr - expression defining syntax of statement that - is repeated within the indented block - - indentStack - list created by caller to manage indentation stack - (multiple statementWithIndentedBlock expressions within a single grammar - should share a common indentStack) - - indent - boolean indicating whether block must be indented beyond the - the current level; set to False for block of left-most statements - (default=C{True}) - - A valid block must contain at least one C{blockStatement}. - - Example:: - data = ''' - def A(z): - A1 - B = 100 - G = A2 - A2 - A3 - B - def BB(a,b,c): - BB1 - def BBA(): - bba1 - bba2 - bba3 - C - D - def spam(x,y): - def eggs(z): - pass - ''' - - - indentStack = [1] - stmt = Forward() - - identifier = Word(alphas, alphanums) - funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") - func_body = indentedBlock(stmt, indentStack) - funcDef = Group( funcDecl + func_body ) - - rvalue = Forward() - funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") - rvalue << (funcCall | identifier | Word(nums)) - assignment = Group(identifier + "=" + rvalue) - stmt << ( funcDef | assignment | identifier ) - - module_body = OneOrMore(stmt) - - parseTree = module_body.parseString(data) - parseTree.pprint() - prints:: - [['def', - 'A', - ['(', 'z', ')'], - ':', - [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], - 'B', - ['def', - 'BB', - ['(', 'a', 'b', 'c', ')'], - ':', - [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], - 'C', - 'D', - ['def', - 'spam', - ['(', 'x', 'y', ')'], - ':', - [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] - """ - def checkPeerIndent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if curCol != indentStack[-1]: - if curCol > indentStack[-1]: - raise ParseFatalException(s,l,"illegal nesting") - raise ParseException(s,l,"not a peer entry") - - def checkSubIndent(s,l,t): - curCol = col(l,s) - if curCol > indentStack[-1]: - indentStack.append( curCol ) - else: - raise ParseException(s,l,"not a subentry") - - def checkUnindent(s,l,t): - if l >= len(s): return - curCol = col(l,s) - if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): - raise ParseException(s,l,"not an unindent") - indentStack.pop() - - NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) - INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') - PEER = Empty().setParseAction(checkPeerIndent).setName('') - UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') - if indent: - smExpr = Group( Optional(NL) + - #~ FollowedBy(blockStatementExpr) + - INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) - else: - smExpr = Group( Optional(NL) + - (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) - blockStatementExpr.ignore(_bslash + LineEnd()) - return smExpr.setName('indented block') - -alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") -punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") - -anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) -_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) -commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") -def replaceHTMLEntity(t): - """Helper parser action to replace common HTML entities with their special characters""" - return _htmlEntityMap.get(t.entity) - -# it's easy to get these comment structures wrong - they're very common, so may as well make them available -cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") -"Comment of the form C{/* ... */}" - -htmlComment = Regex(r"").setName("HTML comment") -"Comment of the form C{}" - -restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") -dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") -"Comment of the form C{// ... (to end of line)}" - -cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") -"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" - -javaStyleComment = cppStyleComment -"Same as C{L{cppStyleComment}}" - -pythonStyleComment = Regex(r"#.*").setName("Python style comment") -"Comment of the form C{# ... (to end of line)}" - -_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + - Optional( Word(" \t") + - ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") -commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") -"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. - This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" - -# some other useful expressions - using lower-case class name since we are really using this as a namespace -class pyparsing_common: - """ - Here are some common low-level expressions that may be useful in jump-starting parser development: - - numeric forms (L{integers}, L{reals}, L{scientific notation}) - - common L{programming identifiers} - - network addresses (L{MAC}, L{IPv4}, L{IPv6}) - - ISO8601 L{dates} and L{datetime} - - L{UUID} - - L{comma-separated list} - Parse actions: - - C{L{convertToInteger}} - - C{L{convertToFloat}} - - C{L{convertToDate}} - - C{L{convertToDatetime}} - - C{L{stripHTMLTags}} - - C{L{upcaseTokens}} - - C{L{downcaseTokens}} - - Example:: - pyparsing_common.number.runTests(''' - # any int or real number, returned as the appropriate type - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.fnumber.runTests(''' - # any int or real number, returned as float - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - ''') - - pyparsing_common.hex_integer.runTests(''' - # hex numbers - 100 - FF - ''') - - pyparsing_common.fraction.runTests(''' - # fractions - 1/2 - -3/4 - ''') - - pyparsing_common.mixed_integer.runTests(''' - # mixed fractions - 1 - 1/2 - -3/4 - 1-3/4 - ''') - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(''' - # uuid - 12345678-1234-5678-1234-567812345678 - ''') - prints:: - # any int or real number, returned as the appropriate type - 100 - [100] - - -100 - [-100] - - +100 - [100] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # any int or real number, returned as float - 100 - [100.0] - - -100 - [-100.0] - - +100 - [100.0] - - 3.14159 - [3.14159] - - 6.02e23 - [6.02e+23] - - 1e-12 - [1e-12] - - # hex numbers - 100 - [256] - - FF - [255] - - # fractions - 1/2 - [0.5] - - -3/4 - [-0.75] - - # mixed fractions - 1 - [1] - - 1/2 - [0.5] - - -3/4 - [-0.75] - - 1-3/4 - [1.75] - - # uuid - 12345678-1234-5678-1234-567812345678 - [UUID('12345678-1234-5678-1234-567812345678')] - """ - - convertToInteger = tokenMap(int) - """ - Parse action for converting parsed integers to Python int - """ - - convertToFloat = tokenMap(float) - """ - Parse action for converting parsed numbers to Python float - """ - - integer = Word(nums).setName("integer").setParseAction(convertToInteger) - """expression that parses an unsigned integer, returns an int""" - - hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) - """expression that parses a hexadecimal integer, returns an int""" - - signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) - """expression that parses an integer with optional leading sign, returns an int""" - - fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") - """fractional expression of an integer divided by an integer, returns a float""" - fraction.addParseAction(lambda t: t[0]/t[-1]) - - mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") - """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" - mixed_integer.addParseAction(sum) - - real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) - """expression that parses a floating point number and returns a float""" - - sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) - """expression that parses a floating point number with optional scientific notation and returns a float""" - - # streamlining this expression makes the docs nicer-looking - number = (sci_real | real | signed_integer).streamline() - """any numeric expression, returns the corresponding Python type""" - - fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) - """any int or real number, returned as float""" - - identifier = Word(alphas+'_', alphanums+'_').setName("identifier") - """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" - - ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") - "IPv4 address (C{0.0.0.0 - 255.255.255.255})" - - _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") - _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") - _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") - _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) - _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") - ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") - "IPv6 address (long, short, or mixed form)" - - mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") - "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" - - @staticmethod - def convertToDate(fmt="%Y-%m-%d"): - """ - Helper to create a parse action for converting parsed date string to Python datetime.date - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) - - Example:: - date_expr = pyparsing_common.iso8601_date.copy() - date_expr.setParseAction(pyparsing_common.convertToDate()) - print(date_expr.parseString("1999-12-31")) - prints:: - [datetime.date(1999, 12, 31)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt).date() - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - @staticmethod - def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): - """ - Helper to create a parse action for converting parsed datetime string to Python datetime.datetime - - Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) - - Example:: - dt_expr = pyparsing_common.iso8601_datetime.copy() - dt_expr.setParseAction(pyparsing_common.convertToDatetime()) - print(dt_expr.parseString("1999-12-31T23:59:59.999")) - prints:: - [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] - """ - def cvt_fn(s,l,t): - try: - return datetime.strptime(t[0], fmt) - except ValueError as ve: - raise ParseException(s, l, str(ve)) - return cvt_fn - - iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") - "ISO8601 date (C{yyyy-mm-dd})" - - iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") - "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" - - uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") - "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" - - _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() - @staticmethod - def stripHTMLTags(s, l, tokens): - """ - Parse action to remove HTML tags from web page HTML source - - Example:: - # strip HTML links from normal text - text = 'More info at the
pyparsing wiki page' - td,td_end = makeHTMLTags("TD") - table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end - - print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' - """ - return pyparsing_common._html_stripper.transformString(tokens[0]) - - _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') - + Optional( White(" \t") ) ) ).streamline().setName("commaItem") - comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") - """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" - - upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) - """Parse action to convert tokens to upper case.""" - - downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) - """Parse action to convert tokens to lower case.""" - - -if __name__ == "__main__": - - selectToken = CaselessLiteral("select") - fromToken = CaselessLiteral("from") - - ident = Word(alphas, alphanums + "_$") - - columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - columnNameList = Group(delimitedList(columnName)).setName("columns") - columnSpec = ('*' | columnNameList) - - tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) - tableNameList = Group(delimitedList(tableName)).setName("tables") - - simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") - - # demo runTests method, including embedded comments in test string - simpleSQL.runTests(""" - # '*' as column list and dotted table name - select * from SYS.XYZZY - - # caseless match on "SELECT", and casts back to "select" - SELECT * from XYZZY, ABC - - # list of column names, and mixed case SELECT keyword - Select AA,BB,CC from Sys.dual - - # multiple tables - Select A, B, C from Sys.dual, Table2 - - # invalid SELECT keyword - should fail - Xelect A, B, C from Sys.dual - - # incomplete command - should fail - Select - - # invalid column name - should fail - Select ^^^ frox Sys.dual - - """) - - pyparsing_common.number.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - # any int or real number, returned as float - pyparsing_common.fnumber.runTests(""" - 100 - -100 - +100 - 3.14159 - 6.02e23 - 1e-12 - """) - - pyparsing_common.hex_integer.runTests(""" - 100 - FF - """) - - import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(""" - 12345678-1234-5678-1234-567812345678 - """) diff --git a/pype/vendor/pysync.py b/pype/vendor/pysync.py deleted file mode 100644 index 14a6dda34c..0000000000 --- a/pype/vendor/pysync.py +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/local/bin/python3 -# https://github.com/snullp/pySync/blob/master/pySync.py - -import sys -import shutil -import os -import time -import configparser -from os.path import ( - getsize, - getmtime, - isfile, - isdir, - join, - abspath, - expanduser, - realpath -) -import logging - -log = logging.getLogger(__name__) - -ignoreFiles = ("Thumbs.db", ".DS_Store") - -# this feature is not yet implemented -ignorePaths = [] - -if os.name == 'nt': - # msvcrt can't function correctly in IDLE - if 'idlelib.run' in sys.modules: - print("Please don't run this script in IDLE.") - sys.exit(0) - import msvcrt - - def flush_input(str, set=None): - if not set: - while msvcrt.kbhit(): - ch = msvcrt.getch() - if ch == '\xff': - print("msvcrt is broken, this is weird.") - sys.exit(0) - return input(str) - else: - return set -else: - import select - - def flush_input(str, set=None): - if not set: - while len(select.select([sys.stdin.fileno()], [], [], 0.0)[0]) > 0: - os.read(sys.stdin.fileno(), 4096) - return input(str) - else: - return set - - -def compare(fa, fb, options_input=[]): - if isfile(fa) == isfile(fb): - if isdir(fa): - walktree(fa, fb, options_input) - elif isfile(fa): - if getsize(fa) != getsize(fb) \ - or int(getmtime(fa)) != int(getmtime(fb)): - log.info(str((fa, ': size=', getsize(fa), 'mtime=', - time.asctime(time.localtime(getmtime(fa)))))) - log.info(str((fb, ': size=', getsize(fb), 'mtime=', - time.asctime(time.localtime(getmtime(fb)))))) - if getmtime(fa) > getmtime(fb): - act = '>' - else: - act = '<' - - set = [i for i in options_input if i in [">", "<"]][0] - - s = flush_input('What to do?(>,<,r,n)[' + act + ']', set=set) - if len(s) > 0: - act = s[0] - if act == '>': - shutil.copy2(fa, fb) - elif act == '<': - shutil.copy2(fb, fa) - elif act == 'r': - if isdir(fa): - shutil.rmtree(fa) - elif isfile(fa): - os.remove(fa) - else: - log.info(str(('Remove: Skipping', fa))) - if isdir(fb): - shutil.rmtree(fb) - elif isfile(fb): - os.remove(fb) - else: - log.info(str(('Remove: Skipping', fb))) - - else: - log.debug(str(('Compare: Skipping non-dir and non-file', fa))) - else: - log.error(str(('Error:', fa, ',', fb, 'have different file type'))) - - -def copy(fa, fb, options_input=[]): - set = [i for i in options_input if i in ["y"]][0] - s = flush_input('Copy ' + fa + ' to another side?(r,y,n)[y]', set=set) - if len(s) > 0: - act = s[0] - else: - act = 'y' - if act == 'y': - if isdir(fa): - shutil.copytree(fa, fb) - elif isfile(fa): - shutil.copy2(fa, fb) - else: - log.debug(str(('Copy: Skipping ', fa))) - elif act == 'r': - if isdir(fa): - shutil.rmtree(fa) - elif isfile(fa): - os.remove(fa) - else: - log.debug(str(('Remove: Skipping ', fa))) - - -stoentry = [] -tarentry = [] - - -def walktree(source, target, options_input=[]): - srclist = os.listdir(source) - tarlist = os.listdir(target) - if '!sync' in srclist: - return - if '!sync' in tarlist: - return - # files in source dir... - for f in srclist: - if f in ignoreFiles: - continue - spath = join(source, f) - tpath = join(target, f) - if spath in ignorePaths: - continue - if spath in stoentry: - # just in case target also have this one - if f in tarlist: - del tarlist[tarlist.index(f)] - continue - - # if also exists in target dir - if f in tarlist: - del tarlist[tarlist.index(f)] - compare(spath, tpath, options_input) - - # exists in source dir only - else: - copy(spath, tpath, options_input) - - # exists in target dir only - set = [i for i in options_input if i in ["<"]] - - for f in tarlist: - if f in ignoreFiles: - continue - spath = join(source, f) - tpath = join(target, f) - if tpath in ignorePaths: - continue - if tpath in tarentry: - continue - if set: - copy(tpath, spath, options_input) - else: - print("REMOVING: {}".format(f)) - if os.path.isdir(tpath): - shutil.rmtree(tpath) - else: - os.remove(tpath) - print("REMOVING: {}".format(f)) - - -if __name__ == '__main__': - stoconf = configparser.RawConfigParser() - tarconf = configparser.RawConfigParser() - stoconf.read("pySync.ini") - tarconf.read(expanduser("~/.pysync")) - stoname = stoconf.sections()[0] - tarname = tarconf.sections()[0] - - # calculate storage's base folder - if stoconf.has_option(stoname, 'BASE'): - stobase = abspath(stoconf.get(stoname, 'BASE')) - stoconf.remove_option(stoname, 'BASE') - else: - stobase = os.getcwd() - - # same, for target's base folder - if tarconf.has_option(tarname, 'BASE'): - tarbase = abspath(tarconf.get(tarname, 'BASE')) - tarconf.remove_option(tarname, 'BASE') - else: - tarbase = expanduser('~/') - - print("Syncing between", stoname, "and", tarname) - sto_content = {x: realpath(join(stobase, stoconf.get(stoname, x))) - for x in stoconf.options(stoname)} - tar_content = {x: realpath(join(tarbase, tarconf.get(tarname, x))) - for x in tarconf.options(tarname)} - stoentry = [sto_content[x] for x in sto_content] - tarentry = [tar_content[x] for x in tar_content] - - for folder in sto_content: - if folder in tar_content: - print('Processing', folder) - walktree(sto_content[folder], tar_content[folder], options_input) - print("Done.") diff --git a/pype/vendor/python_dateutil-2.7.3.dist-info/INSTALLER b/pype/vendor/python_dateutil-2.7.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e38a..0000000000 --- a/pype/vendor/python_dateutil-2.7.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/pype/vendor/python_dateutil-2.7.3.dist-info/LICENSE.txt b/pype/vendor/python_dateutil-2.7.3.dist-info/LICENSE.txt deleted file mode 100644 index 1e65815cf0..0000000000 --- a/pype/vendor/python_dateutil-2.7.3.dist-info/LICENSE.txt +++ /dev/null @@ -1,54 +0,0 @@ -Copyright 2017- Paul Ganssle -Copyright 2017- dateutil contributors (see AUTHORS file) - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -The above license applies to all contributions after 2017-12-01, as well as -all contributions that have been re-licensed (see AUTHORS file for the list of -contributors who have re-licensed their code). --------------------------------------------------------------------------------- -dateutil - Extensions to the standard Python datetime module. - -Copyright (c) 2003-2011 - Gustavo Niemeyer -Copyright (c) 2012-2014 - Tomi Pieviläinen -Copyright (c) 2014-2016 - Yaron de Leeuw -Copyright (c) 2015- - Paul Ganssle -Copyright (c) 2015- - dateutil contributors (see AUTHORS file) - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR -CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -The above BSD License Applies to all code, even that also covered by Apache 2.0. \ No newline at end of file diff --git a/pype/vendor/python_dateutil-2.7.3.dist-info/METADATA b/pype/vendor/python_dateutil-2.7.3.dist-info/METADATA deleted file mode 100644 index 935943e295..0000000000 --- a/pype/vendor/python_dateutil-2.7.3.dist-info/METADATA +++ /dev/null @@ -1,190 +0,0 @@ -Metadata-Version: 2.1 -Name: python-dateutil -Version: 2.7.3 -Summary: Extensions to the standard Python datetime module -Home-page: https://dateutil.readthedocs.io -Author: Gustavo Niemeyer -Author-email: gustavo@niemeyer.net -Maintainer: Paul Ganssle -Maintainer-email: dateutil@python.org -License: Dual License -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Topic :: Software Development :: Libraries -Requires: six -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.* -Description-Content-Type: text/x-rst -Requires-Dist: six (>=1.5) - -dateutil - powerful extensions to datetime -========================================== - -|pypi| |support| |licence| - -|gitter| |readthedocs| - -|travis| |appveyor| |coverage| - -.. |pypi| image:: https://img.shields.io/pypi/v/python-dateutil.svg?style=flat-square - :target: https://pypi.org/project/python-dateutil/ - :alt: pypi version - -.. |support| image:: https://img.shields.io/pypi/pyversions/python-dateutil.svg?style=flat-square - :target: https://pypi.org/project/python-dateutil/ - :alt: supported Python version - -.. |travis| image:: https://img.shields.io/travis/dateutil/dateutil/master.svg?style=flat-square&label=Travis%20Build - :target: https://travis-ci.org/dateutil/dateutil - :alt: travis build status - -.. |appveyor| image:: https://img.shields.io/appveyor/ci/dateutil/dateutil/master.svg?style=flat-square&logo=appveyor - :target: https://ci.appveyor.com/project/dateutil/dateutil - :alt: appveyor build status - -.. |coverage| image:: https://codecov.io/github/dateutil/dateutil/coverage.svg?branch=master - :target: https://codecov.io/github/dateutil/dateutil?branch=master - :alt: Code coverage - -.. |gitter| image:: https://badges.gitter.im/dateutil/dateutil.svg - :alt: Join the chat at https://gitter.im/dateutil/dateutil - :target: https://gitter.im/dateutil/dateutil - -.. |licence| image:: https://img.shields.io/pypi/l/python-dateutil.svg?style=flat-square - :target: https://pypi.org/project/python-dateutil/ - :alt: licence - -.. |readthedocs| image:: https://img.shields.io/readthedocs/dateutil/latest.svg?style=flat-square&label=Read%20the%20Docs - :alt: Read the documentation at https://dateutil.readthedocs.io/en/latest/ - :target: https://dateutil.readthedocs.io/en/latest/ - -The `dateutil` module provides powerful extensions to -the standard `datetime` module, available in Python. - - -Download -======== -dateutil is available on PyPI -https://pypi.org/project/python-dateutil/ - -The documentation is hosted at: -https://dateutil.readthedocs.io/en/stable/ - -Code -==== -The code and issue tracker are hosted on Github: -https://github.com/dateutil/dateutil/ - -Features -======== - -* Computing of relative deltas (next month, next year, - next monday, last week of month, etc); -* Computing of relative deltas between two given - date and/or datetime objects; -* Computing of dates based on very flexible recurrence rules, - using a superset of the `iCalendar `_ - specification. Parsing of RFC strings is supported as well. -* Generic parsing of dates in almost any string format; -* Timezone (tzinfo) implementations for tzfile(5) format - files (/etc/localtime, /usr/share/zoneinfo, etc), TZ - environment string (in all known formats), iCalendar - format files, given ranges (with help from relative deltas), - local machine timezone, fixed offset timezone, UTC timezone, - and Windows registry-based time zones. -* Internal up-to-date world timezone information based on - Olson's database. -* Computing of Easter Sunday dates for any given year, - using Western, Orthodox or Julian algorithms; -* A comprehensive test suite. - -Quick example -============= -Here's a snapshot, just to give an idea about the power of the -package. For more examples, look at the documentation. - -Suppose you want to know how much time is left, in -years/months/days/etc, before the next easter happening on a -year with a Friday 13th in August, and you want to get today's -date out of the "date" unix system command. Here is the code: - -.. code-block:: python3 - - >>> from dateutil.relativedelta import * - >>> from dateutil.easter import * - >>> from dateutil.rrule import * - >>> from dateutil.parser import * - >>> from datetime import * - >>> now = parse("Sat Oct 11 17:13:46 UTC 2003") - >>> today = now.date() - >>> year = rrule(YEARLY,dtstart=now,bymonth=8,bymonthday=13,byweekday=FR)[0].year - >>> rdelta = relativedelta(easter(year), today) - >>> print("Today is: %s" % today) - Today is: 2003-10-11 - >>> print("Year with next Aug 13th on a Friday is: %s" % year) - Year with next Aug 13th on a Friday is: 2004 - >>> print("How far is the Easter of that year: %s" % rdelta) - How far is the Easter of that year: relativedelta(months=+6) - >>> print("And the Easter of that year is: %s" % (today+rdelta)) - And the Easter of that year is: 2004-04-11 - -Being exactly 6 months ahead was **really** a coincidence :) - -Contributing -============ - -We welcome many types of contributions - bug reports, pull requests (code, infrastructure or documentation fixes). For more information about how to contribute to the project, see the ``CONTRIBUTING.md`` file in the repository. - - -Author -====== -The dateutil module was written by Gustavo Niemeyer -in 2003. - -It is maintained by: - -* Gustavo Niemeyer 2003-2011 -* Tomi Pieviläinen 2012-2014 -* Yaron de Leeuw 2014-2016 -* Paul Ganssle 2015- - -Starting with version 2.4.1, all source and binary distributions will be signed -by a PGP key that has, at the very least, been signed by the key which made the -previous release. A table of release signing keys can be found below: - -=========== ============================ -Releases Signing key fingerprint -=========== ============================ -2.4.1- `6B49 ACBA DCF6 BD1C A206 67AB CD54 FCE3 D964 BEFB`_ (|pgp_mirror|_) -=========== ============================ - - -Contact -======= -Our mailing list is available at `dateutil@python.org `_. As it is hosted by the PSF, it is subject to the `PSF code of -conduct `_. - -License -======= - -All contributions after December 1, 2017 released under dual license - either `Apache 2.0 License `_ or the `BSD 3-Clause License `_. Contributions before December 1, 2017 - except those those explicitly relicensed - are released only under the BSD 3-Clause License. - - -.. _6B49 ACBA DCF6 BD1C A206 67AB CD54 FCE3 D964 BEFB: - https://pgp.mit.edu/pks/lookup?op=vindex&search=0xCD54FCE3D964BEFB - -.. |pgp_mirror| replace:: mirror -.. _pgp_mirror: https://sks-keyservers.net/pks/lookup?op=vindex&search=0xCD54FCE3D964BEFB - - diff --git a/pype/vendor/python_dateutil-2.7.3.dist-info/RECORD b/pype/vendor/python_dateutil-2.7.3.dist-info/RECORD deleted file mode 100644 index efdb6b7c62..0000000000 --- a/pype/vendor/python_dateutil-2.7.3.dist-info/RECORD +++ /dev/null @@ -1,44 +0,0 @@ -dateutil/__init__.py,sha256=lXElASqwYGwqlrSWSeX19JwF5Be9tNecDa9ebk-0gmk,222 -dateutil/__pycache__/__init__.cpython-36.pyc,, -dateutil/__pycache__/_common.cpython-36.pyc,, -dateutil/__pycache__/_version.cpython-36.pyc,, -dateutil/__pycache__/easter.cpython-36.pyc,, -dateutil/__pycache__/relativedelta.cpython-36.pyc,, -dateutil/__pycache__/rrule.cpython-36.pyc,, -dateutil/__pycache__/tzwin.cpython-36.pyc,, -dateutil/__pycache__/utils.cpython-36.pyc,, -dateutil/_common.py,sha256=77w0yytkrxlYbSn--lDVPUMabUXRR9I3lBv_vQRUqUY,932 -dateutil/_version.py,sha256=XL-cFqVZ8TgiXGz7zwkDsdx_2gsdUEQ56sbKFKDDt7o,116 -dateutil/easter.py,sha256=0liVsgqSx-NPhaFevOJaYgEbrSu2oQQ2o9m_OEBdc-s,2684 -dateutil/parser/__init__.py,sha256=efFZ9gA3I85X8IgQ_Rq6icz4rYdkUevfK8Oxekn3uYU,1727 -dateutil/parser/__pycache__/__init__.cpython-36.pyc,, -dateutil/parser/__pycache__/_parser.cpython-36.pyc,, -dateutil/parser/__pycache__/isoparser.cpython-36.pyc,, -dateutil/parser/_parser.py,sha256=nxl6E6a3C4EsyTL7Npp_to68JoX05BJl3-FUnRag_VM,57607 -dateutil/parser/isoparser.py,sha256=N--NyotwfEShxeOoW5CvpmLRNgJIKIHYv6XgsqlYqDY,12902 -dateutil/relativedelta.py,sha256=ZJj33WexdbkXbPqS4jvyRby1v3NeNOvAgw_YEofKxQM,24418 -dateutil/rrule.py,sha256=w9v8mTO_YF6OM4kgsN1KwukBqQRFGntDBPsSXrfNUww,64802 -dateutil/tz/__init__.py,sha256=xkeNSDRpsFSeqSaaGuLPmhyTfhjZ7W7BZnKcNfFKYUA,551 -dateutil/tz/__pycache__/__init__.cpython-36.pyc,, -dateutil/tz/__pycache__/_common.cpython-36.pyc,, -dateutil/tz/__pycache__/_factories.cpython-36.pyc,, -dateutil/tz/__pycache__/tz.cpython-36.pyc,, -dateutil/tz/__pycache__/win.cpython-36.pyc,, -dateutil/tz/_common.py,sha256=VAR-3Wd2f4FYZyKg09e6PFnflcGhNEABLYBv_wjS-T8,12892 -dateutil/tz/_factories.py,sha256=JUnlX9efOpuhTnp5fDSM_e62cwNy7W5kHcZpJHDhbFE,1434 -dateutil/tz/tz.py,sha256=2s3vRGHL1d48_TGr5kEwVxVMq4tJCHMMLg0SR91AZH4,60472 -dateutil/tz/win.py,sha256=M67X4b3x6RIveSR0Dl19B30Ow8No7Lkc-Hd4xdvNz8Q,11318 -dateutil/tzwin.py,sha256=7Ar4vdQCnnM0mKR3MUjbIKsZrBVfHgdwsJZc_mGYRew,59 -dateutil/utils.py,sha256=wG0nPh2fS274tUSsgywKAz3Ahq8m_-oKWwDtYeOPJuU,1963 -dateutil/zoneinfo/__init__.py,sha256=KYg0pthCMjcp5MXSEiBJn3nMjZeNZav7rlJw5-tz1S4,5889 -dateutil/zoneinfo/__pycache__/__init__.cpython-36.pyc,, -dateutil/zoneinfo/__pycache__/rebuild.cpython-36.pyc,, -dateutil/zoneinfo/dateutil-zoneinfo.tar.gz,sha256=-EsiuJpLHtWxVvWR5EgDPs-BbGpfSTkto8-NYuaBNrw,139130 -dateutil/zoneinfo/rebuild.py,sha256=2uFJQiW3Fl8fVogrSXisJMpLeHI1zGwpvBFF43QdeF0,1719 -python_dateutil-2.7.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -python_dateutil-2.7.3.dist-info/LICENSE.txt,sha256=ugD1Gg2SgjtaHN4n2LW50jIeZ-2NqbwWPv-W1eF-V34,2889 -python_dateutil-2.7.3.dist-info/METADATA,sha256=iKIXl1SQaeVSdwgdhxWQRblzZLyNiKXVwSNxb8LmFNI,7486 -python_dateutil-2.7.3.dist-info/RECORD,, -python_dateutil-2.7.3.dist-info/WHEEL,sha256=J3CsTk7Mf2JNUyhImI-mjX-fmI4oDjyiXgWT4qgZiCE,110 -python_dateutil-2.7.3.dist-info/top_level.txt,sha256=4tjdWkhRZvF7LA_BYe_L9gB2w_p2a-z5y6ArjaRkot8,9 -python_dateutil-2.7.3.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 diff --git a/pype/vendor/python_dateutil-2.7.3.dist-info/WHEEL b/pype/vendor/python_dateutil-2.7.3.dist-info/WHEEL deleted file mode 100644 index f21b51cd8a..0000000000 --- a/pype/vendor/python_dateutil-2.7.3.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.31.0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/pype/vendor/python_dateutil-2.7.3.dist-info/top_level.txt b/pype/vendor/python_dateutil-2.7.3.dist-info/top_level.txt deleted file mode 100644 index 66501480ba..0000000000 --- a/pype/vendor/python_dateutil-2.7.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -dateutil diff --git a/pype/vendor/python_dateutil-2.7.3.dist-info/zip-safe b/pype/vendor/python_dateutil-2.7.3.dist-info/zip-safe deleted file mode 100644 index 8b13789179..0000000000 --- a/pype/vendor/python_dateutil-2.7.3.dist-info/zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/pype/vendor/requests/__init__.py b/pype/vendor/requests/__init__.py deleted file mode 100644 index bc168ee533..0000000000 --- a/pype/vendor/requests/__init__.py +++ /dev/null @@ -1,131 +0,0 @@ -# -*- coding: utf-8 -*- - -# __ -# /__) _ _ _ _ _/ _ -# / ( (- (/ (/ (- _) / _) -# / - -""" -Requests HTTP Library -~~~~~~~~~~~~~~~~~~~~~ - -Requests is an HTTP library, written in Python, for human beings. Basic GET -usage: - - >>> import requests - >>> r = requests.get('https://www.python.org') - >>> r.status_code - 200 - >>> 'Python is a programming language' in r.content - True - -... or POST: - - >>> payload = dict(key1='value1', key2='value2') - >>> r = requests.post('https://httpbin.org/post', data=payload) - >>> print(r.text) - { - ... - "form": { - "key2": "value2", - "key1": "value1" - }, - ... - } - -The other HTTP methods are supported - see `requests.api`. Full documentation -is at . - -:copyright: (c) 2017 by Kenneth Reitz. -:license: Apache 2.0, see LICENSE for more details. -""" - -import urllib3 -import chardet -import warnings -from .exceptions import RequestsDependencyWarning - - -def check_compatibility(urllib3_version, chardet_version): - urllib3_version = urllib3_version.split('.') - assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. - - # Sometimes, urllib3 only reports its version as 16.1. - if len(urllib3_version) == 2: - urllib3_version.append('0') - - # Check urllib3 for compatibility. - major, minor, patch = urllib3_version # noqa: F811 - major, minor, patch = int(major), int(minor), int(patch) - # urllib3 >= 1.21.1, <= 1.24 - assert major == 1 - assert minor >= 21 - assert minor <= 24 - - # Check chardet for compatibility. - major, minor, patch = chardet_version.split('.')[:3] - major, minor, patch = int(major), int(minor), int(patch) - # chardet >= 3.0.2, < 3.1.0 - assert major == 3 - assert minor < 1 - assert patch >= 2 - - -def _check_cryptography(cryptography_version): - # cryptography < 1.3.4 - try: - cryptography_version = list(map(int, cryptography_version.split('.'))) - except ValueError: - return - - if cryptography_version < [1, 3, 4]: - warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version) - warnings.warn(warning, RequestsDependencyWarning) - -# Check imported dependencies for compatibility. -try: - check_compatibility(urllib3.__version__, chardet.__version__) -except (AssertionError, ValueError): - warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported " - "version!".format(urllib3.__version__, chardet.__version__), - RequestsDependencyWarning) - -# Attempt to enable urllib3's SNI support, if possible -try: - from urllib3.contrib import pyopenssl - pyopenssl.inject_into_urllib3() - - # Check cryptography version - from cryptography import __version__ as cryptography_version - _check_cryptography(cryptography_version) -except ImportError: - pass - -# urllib3's DependencyWarnings should be silenced. -from urllib3.exceptions import DependencyWarning -warnings.simplefilter('ignore', DependencyWarning) - -from .__version__ import __title__, __description__, __url__, __version__ -from .__version__ import __build__, __author__, __author_email__, __license__ -from .__version__ import __copyright__, __cake__ - -from . import utils -from . import packages -from .models import Request, Response, PreparedRequest -from .api import request, get, head, post, patch, put, delete, options -from .sessions import session, Session -from .status_codes import codes -from .exceptions import ( - RequestException, Timeout, URLRequired, - TooManyRedirects, HTTPError, ConnectionError, - FileModeWarning, ConnectTimeout, ReadTimeout -) - -# Set default logging handler to avoid "No handler found" warnings. -import logging -from logging import NullHandler - -logging.getLogger(__name__).addHandler(NullHandler()) - -# FileModeWarnings go off per the default. -warnings.simplefilter('default', FileModeWarning, append=True) diff --git a/pype/vendor/requests/__version__.py b/pype/vendor/requests/__version__.py deleted file mode 100644 index f5b5d03671..0000000000 --- a/pype/vendor/requests/__version__.py +++ /dev/null @@ -1,14 +0,0 @@ -# .-. .-. .-. . . .-. .-. .-. .-. -# |( |- |.| | | |- `-. | `-. -# ' ' `-' `-`.`-' `-' `-' ' `-' - -__title__ = 'requests' -__description__ = 'Python HTTP for Humans.' -__url__ = 'http://python-requests.org' -__version__ = '2.21.0' -__build__ = 0x022100 -__author__ = 'Kenneth Reitz' -__author_email__ = 'me@kennethreitz.org' -__license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2018 Kenneth Reitz' -__cake__ = u'\u2728 \U0001f370 \u2728' diff --git a/pype/vendor/requests/_internal_utils.py b/pype/vendor/requests/_internal_utils.py deleted file mode 100644 index 759d9a56ba..0000000000 --- a/pype/vendor/requests/_internal_utils.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests._internal_utils -~~~~~~~~~~~~~~ - -Provides utility functions that are consumed internally by Requests -which depend on extremely few external helpers (such as compat) -""" - -from .compat import is_py2, builtin_str, str - - -def to_native_string(string, encoding='ascii'): - """Given a string object, regardless of type, returns a representation of - that string in the native string type, encoding and decoding where - necessary. This assumes ASCII unless told otherwise. - """ - if isinstance(string, builtin_str): - out = string - else: - if is_py2: - out = string.encode(encoding) - else: - out = string.decode(encoding) - - return out - - -def unicode_is_ascii(u_string): - """Determine if unicode string only contains ASCII characters. - - :param str u_string: unicode string to check. Must be unicode - and not Python 2 `str`. - :rtype: bool - """ - assert isinstance(u_string, str) - try: - u_string.encode('ascii') - return True - except UnicodeEncodeError: - return False diff --git a/pype/vendor/requests/adapters.py b/pype/vendor/requests/adapters.py deleted file mode 100644 index fa4d9b3cc9..0000000000 --- a/pype/vendor/requests/adapters.py +++ /dev/null @@ -1,533 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.adapters -~~~~~~~~~~~~~~~~~ - -This module contains the transport adapters that Requests uses to define -and maintain connections. -""" - -import os.path -import socket - -from urllib3.poolmanager import PoolManager, proxy_from_url -from urllib3.response import HTTPResponse -from urllib3.util import parse_url -from urllib3.util import Timeout as TimeoutSauce -from urllib3.util.retry import Retry -from urllib3.exceptions import ClosedPoolError -from urllib3.exceptions import ConnectTimeoutError -from urllib3.exceptions import HTTPError as _HTTPError -from urllib3.exceptions import MaxRetryError -from urllib3.exceptions import NewConnectionError -from urllib3.exceptions import ProxyError as _ProxyError -from urllib3.exceptions import ProtocolError -from urllib3.exceptions import ReadTimeoutError -from urllib3.exceptions import SSLError as _SSLError -from urllib3.exceptions import ResponseError -from urllib3.exceptions import LocationValueError - -from .models import Response -from .compat import urlparse, basestring -from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths, - get_encoding_from_headers, prepend_scheme_if_needed, - get_auth_from_url, urldefragauth, select_proxy) -from .structures import CaseInsensitiveDict -from .cookies import extract_cookies_to_jar -from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, - ProxyError, RetryError, InvalidSchema, InvalidProxyURL, - InvalidURL) -from .auth import _basic_auth_str - -try: - from urllib3.contrib.socks import SOCKSProxyManager -except ImportError: - def SOCKSProxyManager(*args, **kwargs): - raise InvalidSchema("Missing dependencies for SOCKS support.") - -DEFAULT_POOLBLOCK = False -DEFAULT_POOLSIZE = 10 -DEFAULT_RETRIES = 0 -DEFAULT_POOL_TIMEOUT = None - - -class BaseAdapter(object): - """The Base Transport Adapter""" - - def __init__(self): - super(BaseAdapter, self).__init__() - - def send(self, request, stream=False, timeout=None, verify=True, - cert=None, proxies=None): - """Sends PreparedRequest object. Returns Response object. - - :param request: The :class:`PreparedRequest ` being sent. - :param stream: (optional) Whether to stream the request content. - :param timeout: (optional) How long to wait for the server to send - data before giving up, as a float, or a :ref:`(connect timeout, - read timeout) ` tuple. - :type timeout: float or tuple - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use - :param cert: (optional) Any user-provided SSL certificate to be trusted. - :param proxies: (optional) The proxies dictionary to apply to the request. - """ - raise NotImplementedError - - def close(self): - """Cleans up adapter specific items.""" - raise NotImplementedError - - -class HTTPAdapter(BaseAdapter): - """The built-in HTTP Adapter for urllib3. - - Provides a general-case interface for Requests sessions to contact HTTP and - HTTPS urls by implementing the Transport Adapter interface. This class will - usually be created by the :class:`Session ` class under the - covers. - - :param pool_connections: The number of urllib3 connection pools to cache. - :param pool_maxsize: The maximum number of connections to save in the pool. - :param max_retries: The maximum number of retries each connection - should attempt. Note, this applies only to failed DNS lookups, socket - connections and connection timeouts, never to requests where data has - made it to the server. By default, Requests does not retry failed - connections. If you need granular control over the conditions under - which we retry a request, import urllib3's ``Retry`` class and pass - that instead. - :param pool_block: Whether the connection pool should block for connections. - - Usage:: - - >>> import requests - >>> s = requests.Session() - >>> a = requests.adapters.HTTPAdapter(max_retries=3) - >>> s.mount('http://', a) - """ - __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', - '_pool_block'] - - def __init__(self, pool_connections=DEFAULT_POOLSIZE, - pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, - pool_block=DEFAULT_POOLBLOCK): - if max_retries == DEFAULT_RETRIES: - self.max_retries = Retry(0, read=False) - else: - self.max_retries = Retry.from_int(max_retries) - self.config = {} - self.proxy_manager = {} - - super(HTTPAdapter, self).__init__() - - self._pool_connections = pool_connections - self._pool_maxsize = pool_maxsize - self._pool_block = pool_block - - self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) - - def __getstate__(self): - return {attr: getattr(self, attr, None) for attr in self.__attrs__} - - def __setstate__(self, state): - # Can't handle by adding 'proxy_manager' to self.__attrs__ because - # self.poolmanager uses a lambda function, which isn't pickleable. - self.proxy_manager = {} - self.config = {} - - for attr, value in state.items(): - setattr(self, attr, value) - - self.init_poolmanager(self._pool_connections, self._pool_maxsize, - block=self._pool_block) - - def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): - """Initializes a urllib3 PoolManager. - - This method should not be called from user code, and is only - exposed for use when subclassing the - :class:`HTTPAdapter `. - - :param connections: The number of urllib3 connection pools to cache. - :param maxsize: The maximum number of connections to save in the pool. - :param block: Block when no free connections are available. - :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. - """ - # save these values for pickling - self._pool_connections = connections - self._pool_maxsize = maxsize - self._pool_block = block - - self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, - block=block, strict=True, **pool_kwargs) - - def proxy_manager_for(self, proxy, **proxy_kwargs): - """Return urllib3 ProxyManager for the given proxy. - - This method should not be called from user code, and is only - exposed for use when subclassing the - :class:`HTTPAdapter `. - - :param proxy: The proxy to return a urllib3 ProxyManager for. - :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. - :returns: ProxyManager - :rtype: urllib3.ProxyManager - """ - if proxy in self.proxy_manager: - manager = self.proxy_manager[proxy] - elif proxy.lower().startswith('socks'): - username, password = get_auth_from_url(proxy) - manager = self.proxy_manager[proxy] = SOCKSProxyManager( - proxy, - username=username, - password=password, - num_pools=self._pool_connections, - maxsize=self._pool_maxsize, - block=self._pool_block, - **proxy_kwargs - ) - else: - proxy_headers = self.proxy_headers(proxy) - manager = self.proxy_manager[proxy] = proxy_from_url( - proxy, - proxy_headers=proxy_headers, - num_pools=self._pool_connections, - maxsize=self._pool_maxsize, - block=self._pool_block, - **proxy_kwargs) - - return manager - - def cert_verify(self, conn, url, verify, cert): - """Verify a SSL certificate. This method should not be called from user - code, and is only exposed for use when subclassing the - :class:`HTTPAdapter `. - - :param conn: The urllib3 connection object associated with the cert. - :param url: The requested URL. - :param verify: Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use - :param cert: The SSL certificate to verify. - """ - if url.lower().startswith('https') and verify: - - cert_loc = None - - # Allow self-specified cert location. - if verify is not True: - cert_loc = verify - - if not cert_loc: - cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) - - if not cert_loc or not os.path.exists(cert_loc): - raise IOError("Could not find a suitable TLS CA certificate bundle, " - "invalid path: {}".format(cert_loc)) - - conn.cert_reqs = 'CERT_REQUIRED' - - if not os.path.isdir(cert_loc): - conn.ca_certs = cert_loc - else: - conn.ca_cert_dir = cert_loc - else: - conn.cert_reqs = 'CERT_NONE' - conn.ca_certs = None - conn.ca_cert_dir = None - - if cert: - if not isinstance(cert, basestring): - conn.cert_file = cert[0] - conn.key_file = cert[1] - else: - conn.cert_file = cert - conn.key_file = None - if conn.cert_file and not os.path.exists(conn.cert_file): - raise IOError("Could not find the TLS certificate file, " - "invalid path: {}".format(conn.cert_file)) - if conn.key_file and not os.path.exists(conn.key_file): - raise IOError("Could not find the TLS key file, " - "invalid path: {}".format(conn.key_file)) - - def build_response(self, req, resp): - """Builds a :class:`Response ` object from a urllib3 - response. This should not be called from user code, and is only exposed - for use when subclassing the - :class:`HTTPAdapter ` - - :param req: The :class:`PreparedRequest ` used to generate the response. - :param resp: The urllib3 response object. - :rtype: requests.Response - """ - response = Response() - - # Fallback to None if there's no status_code, for whatever reason. - response.status_code = getattr(resp, 'status', None) - - # Make headers case-insensitive. - response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) - - # Set encoding. - response.encoding = get_encoding_from_headers(response.headers) - response.raw = resp - response.reason = response.raw.reason - - if isinstance(req.url, bytes): - response.url = req.url.decode('utf-8') - else: - response.url = req.url - - # Add new cookies from the server. - extract_cookies_to_jar(response.cookies, req, resp) - - # Give the Response some context. - response.request = req - response.connection = self - - return response - - def get_connection(self, url, proxies=None): - """Returns a urllib3 connection for the given URL. This should not be - called from user code, and is only exposed for use when subclassing the - :class:`HTTPAdapter `. - - :param url: The URL to connect to. - :param proxies: (optional) A Requests-style dictionary of proxies used on this request. - :rtype: urllib3.ConnectionPool - """ - proxy = select_proxy(url, proxies) - - if proxy: - proxy = prepend_scheme_if_needed(proxy, 'http') - proxy_url = parse_url(proxy) - if not proxy_url.host: - raise InvalidProxyURL("Please check proxy URL. It is malformed" - " and could be missing the host.") - proxy_manager = self.proxy_manager_for(proxy) - conn = proxy_manager.connection_from_url(url) - else: - # Only scheme should be lower case - parsed = urlparse(url) - url = parsed.geturl() - conn = self.poolmanager.connection_from_url(url) - - return conn - - def close(self): - """Disposes of any internal state. - - Currently, this closes the PoolManager and any active ProxyManager, - which closes any pooled connections. - """ - self.poolmanager.clear() - for proxy in self.proxy_manager.values(): - proxy.clear() - - def request_url(self, request, proxies): - """Obtain the url to use when making the final request. - - If the message is being sent through a HTTP proxy, the full URL has to - be used. Otherwise, we should only use the path portion of the URL. - - This should not be called from user code, and is only exposed for use - when subclassing the - :class:`HTTPAdapter `. - - :param request: The :class:`PreparedRequest ` being sent. - :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. - :rtype: str - """ - proxy = select_proxy(request.url, proxies) - scheme = urlparse(request.url).scheme - - is_proxied_http_request = (proxy and scheme != 'https') - using_socks_proxy = False - if proxy: - proxy_scheme = urlparse(proxy).scheme.lower() - using_socks_proxy = proxy_scheme.startswith('socks') - - url = request.path_url - if is_proxied_http_request and not using_socks_proxy: - url = urldefragauth(request.url) - - return url - - def add_headers(self, request, **kwargs): - """Add any headers needed by the connection. As of v2.0 this does - nothing by default, but is left for overriding by users that subclass - the :class:`HTTPAdapter `. - - This should not be called from user code, and is only exposed for use - when subclassing the - :class:`HTTPAdapter `. - - :param request: The :class:`PreparedRequest ` to add headers to. - :param kwargs: The keyword arguments from the call to send(). - """ - pass - - def proxy_headers(self, proxy): - """Returns a dictionary of the headers to add to any request sent - through a proxy. This works with urllib3 magic to ensure that they are - correctly sent to the proxy, rather than in a tunnelled request if - CONNECT is being used. - - This should not be called from user code, and is only exposed for use - when subclassing the - :class:`HTTPAdapter `. - - :param proxy: The url of the proxy being used for this request. - :rtype: dict - """ - headers = {} - username, password = get_auth_from_url(proxy) - - if username: - headers['Proxy-Authorization'] = _basic_auth_str(username, - password) - - return headers - - def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): - """Sends PreparedRequest object. Returns Response object. - - :param request: The :class:`PreparedRequest ` being sent. - :param stream: (optional) Whether to stream the request content. - :param timeout: (optional) How long to wait for the server to send - data before giving up, as a float, or a :ref:`(connect timeout, - read timeout) ` tuple. - :type timeout: float or tuple or urllib3 Timeout object - :param verify: (optional) Either a boolean, in which case it controls whether - we verify the server's TLS certificate, or a string, in which case it - must be a path to a CA bundle to use - :param cert: (optional) Any user-provided SSL certificate to be trusted. - :param proxies: (optional) The proxies dictionary to apply to the request. - :rtype: requests.Response - """ - - try: - conn = self.get_connection(request.url, proxies) - except LocationValueError as e: - raise InvalidURL(e, request=request) - - self.cert_verify(conn, request.url, verify, cert) - url = self.request_url(request, proxies) - self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies) - - chunked = not (request.body is None or 'Content-Length' in request.headers) - - if isinstance(timeout, tuple): - try: - connect, read = timeout - timeout = TimeoutSauce(connect=connect, read=read) - except ValueError as e: - # this may raise a string formatting error. - err = ("Invalid timeout {}. Pass a (connect, read) " - "timeout tuple, or a single float to set " - "both timeouts to the same value".format(timeout)) - raise ValueError(err) - elif isinstance(timeout, TimeoutSauce): - pass - else: - timeout = TimeoutSauce(connect=timeout, read=timeout) - - try: - if not chunked: - resp = conn.urlopen( - method=request.method, - url=url, - body=request.body, - headers=request.headers, - redirect=False, - assert_same_host=False, - preload_content=False, - decode_content=False, - retries=self.max_retries, - timeout=timeout - ) - - # Send the request. - else: - if hasattr(conn, 'proxy_pool'): - conn = conn.proxy_pool - - low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) - - try: - low_conn.putrequest(request.method, - url, - skip_accept_encoding=True) - - for header, value in request.headers.items(): - low_conn.putheader(header, value) - - low_conn.endheaders() - - for i in request.body: - low_conn.send(hex(len(i))[2:].encode('utf-8')) - low_conn.send(b'\r\n') - low_conn.send(i) - low_conn.send(b'\r\n') - low_conn.send(b'0\r\n\r\n') - - # Receive the response from the server - try: - # For Python 2.7, use buffering of HTTP responses - r = low_conn.getresponse(buffering=True) - except TypeError: - # For compatibility with Python 3.3+ - r = low_conn.getresponse() - - resp = HTTPResponse.from_httplib( - r, - pool=conn, - connection=low_conn, - preload_content=False, - decode_content=False - ) - except: - # If we hit any problems here, clean up the connection. - # Then, reraise so that we can handle the actual exception. - low_conn.close() - raise - - except (ProtocolError, socket.error) as err: - raise ConnectionError(err, request=request) - - except MaxRetryError as e: - if isinstance(e.reason, ConnectTimeoutError): - # TODO: Remove this in 3.0.0: see #2811 - if not isinstance(e.reason, NewConnectionError): - raise ConnectTimeout(e, request=request) - - if isinstance(e.reason, ResponseError): - raise RetryError(e, request=request) - - if isinstance(e.reason, _ProxyError): - raise ProxyError(e, request=request) - - if isinstance(e.reason, _SSLError): - # This branch is for urllib3 v1.22 and later. - raise SSLError(e, request=request) - - raise ConnectionError(e, request=request) - - except ClosedPoolError as e: - raise ConnectionError(e, request=request) - - except _ProxyError as e: - raise ProxyError(e) - - except (_SSLError, _HTTPError) as e: - if isinstance(e, _SSLError): - # This branch is for urllib3 versions earlier than v1.22 - raise SSLError(e, request=request) - elif isinstance(e, ReadTimeoutError): - raise ReadTimeout(e, request=request) - else: - raise - - return self.build_response(request, resp) diff --git a/pype/vendor/requests/api.py b/pype/vendor/requests/api.py deleted file mode 100644 index abada96d46..0000000000 --- a/pype/vendor/requests/api.py +++ /dev/null @@ -1,158 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.api -~~~~~~~~~~~~ - -This module implements the Requests API. - -:copyright: (c) 2012 by Kenneth Reitz. -:license: Apache2, see LICENSE for more details. -""" - -from . import sessions - - -def request(method, url, **kwargs): - """Constructs and sends a :class:`Request `. - - :param method: method for the new :class:`Request` object. - :param url: URL for the new :class:`Request` object. - :param params: (optional) Dictionary, list of tuples or bytes to send - in the body of the :class:`Request`. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. - :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. - :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. - :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. - ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` - or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string - defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers - to add for the file. - :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) How many seconds to wait for the server to send data - before giving up, as a float, or a :ref:`(connect timeout, read - timeout) ` tuple. - :type timeout: float or tuple - :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. - :type allow_redirects: bool - :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use. Defaults to ``True``. - :param stream: (optional) if ``False``, the response content will be immediately downloaded. - :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. - :return: :class:`Response ` object - :rtype: requests.Response - - Usage:: - - >>> import requests - >>> req = requests.request('GET', 'https://httpbin.org/get') - - """ - - # By using the 'with' statement we are sure the session is closed, thus we - # avoid leaving sockets open which can trigger a ResourceWarning in some - # cases, and look like a memory leak in others. - with sessions.Session() as session: - return session.request(method=method, url=url, **kwargs) - - -def get(url, params=None, **kwargs): - r"""Sends a GET request. - - :param url: URL for the new :class:`Request` object. - :param params: (optional) Dictionary, list of tuples or bytes to send - in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', True) - return request('get', url, params=params, **kwargs) - - -def options(url, **kwargs): - r"""Sends an OPTIONS request. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', True) - return request('options', url, **kwargs) - - -def head(url, **kwargs): - r"""Sends a HEAD request. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', False) - return request('head', url, **kwargs) - - -def post(url, data=None, json=None, **kwargs): - r"""Sends a POST request. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - return request('post', url, data=data, json=json, **kwargs) - - -def put(url, data=None, **kwargs): - r"""Sends a PUT request. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - return request('put', url, data=data, **kwargs) - - -def patch(url, data=None, **kwargs): - r"""Sends a PATCH request. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - return request('patch', url, data=data, **kwargs) - - -def delete(url, **kwargs): - r"""Sends a DELETE request. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response ` object - :rtype: requests.Response - """ - - return request('delete', url, **kwargs) diff --git a/pype/vendor/requests/auth.py b/pype/vendor/requests/auth.py deleted file mode 100644 index bdde51c7fd..0000000000 --- a/pype/vendor/requests/auth.py +++ /dev/null @@ -1,305 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.auth -~~~~~~~~~~~~~ - -This module contains the authentication handlers for Requests. -""" - -import os -import re -import time -import hashlib -import threading -import warnings - -from base64 import b64encode - -from .compat import urlparse, str, basestring -from .cookies import extract_cookies_to_jar -from ._internal_utils import to_native_string -from .utils import parse_dict_header - -CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' -CONTENT_TYPE_MULTI_PART = 'multipart/form-data' - - -def _basic_auth_str(username, password): - """Returns a Basic Auth string.""" - - # "I want us to put a big-ol' comment on top of it that - # says that this behaviour is dumb but we need to preserve - # it because people are relying on it." - # - Lukasa - # - # These are here solely to maintain backwards compatibility - # for things like ints. This will be removed in 3.0.0. - if not isinstance(username, basestring): - warnings.warn( - "Non-string usernames will no longer be supported in Requests " - "3.0.0. Please convert the object you've passed in ({!r}) to " - "a string or bytes object in the near future to avoid " - "problems.".format(username), - category=DeprecationWarning, - ) - username = str(username) - - if not isinstance(password, basestring): - warnings.warn( - "Non-string passwords will no longer be supported in Requests " - "3.0.0. Please convert the object you've passed in ({!r}) to " - "a string or bytes object in the near future to avoid " - "problems.".format(password), - category=DeprecationWarning, - ) - password = str(password) - # -- End Removal -- - - if isinstance(username, str): - username = username.encode('latin1') - - if isinstance(password, str): - password = password.encode('latin1') - - authstr = 'Basic ' + to_native_string( - b64encode(b':'.join((username, password))).strip() - ) - - return authstr - - -class AuthBase(object): - """Base class that all auth implementations derive from""" - - def __call__(self, r): - raise NotImplementedError('Auth hooks must be callable.') - - -class HTTPBasicAuth(AuthBase): - """Attaches HTTP Basic Authentication to the given Request object.""" - - def __init__(self, username, password): - self.username = username - self.password = password - - def __eq__(self, other): - return all([ - self.username == getattr(other, 'username', None), - self.password == getattr(other, 'password', None) - ]) - - def __ne__(self, other): - return not self == other - - def __call__(self, r): - r.headers['Authorization'] = _basic_auth_str(self.username, self.password) - return r - - -class HTTPProxyAuth(HTTPBasicAuth): - """Attaches HTTP Proxy Authentication to a given Request object.""" - - def __call__(self, r): - r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) - return r - - -class HTTPDigestAuth(AuthBase): - """Attaches HTTP Digest Authentication to the given Request object.""" - - def __init__(self, username, password): - self.username = username - self.password = password - # Keep state in per-thread local storage - self._thread_local = threading.local() - - def init_per_thread_state(self): - # Ensure state is initialized just once per-thread - if not hasattr(self._thread_local, 'init'): - self._thread_local.init = True - self._thread_local.last_nonce = '' - self._thread_local.nonce_count = 0 - self._thread_local.chal = {} - self._thread_local.pos = None - self._thread_local.num_401_calls = None - - def build_digest_header(self, method, url): - """ - :rtype: str - """ - - realm = self._thread_local.chal['realm'] - nonce = self._thread_local.chal['nonce'] - qop = self._thread_local.chal.get('qop') - algorithm = self._thread_local.chal.get('algorithm') - opaque = self._thread_local.chal.get('opaque') - hash_utf8 = None - - if algorithm is None: - _algorithm = 'MD5' - else: - _algorithm = algorithm.upper() - # lambdas assume digest modules are imported at the top level - if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': - def md5_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.md5(x).hexdigest() - hash_utf8 = md5_utf8 - elif _algorithm == 'SHA': - def sha_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.sha1(x).hexdigest() - hash_utf8 = sha_utf8 - elif _algorithm == 'SHA-256': - def sha256_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.sha256(x).hexdigest() - hash_utf8 = sha256_utf8 - elif _algorithm == 'SHA-512': - def sha512_utf8(x): - if isinstance(x, str): - x = x.encode('utf-8') - return hashlib.sha512(x).hexdigest() - hash_utf8 = sha512_utf8 - - KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) - - if hash_utf8 is None: - return None - - # XXX not implemented yet - entdig = None - p_parsed = urlparse(url) - #: path is request-uri defined in RFC 2616 which should not be empty - path = p_parsed.path or "/" - if p_parsed.query: - path += '?' + p_parsed.query - - A1 = '%s:%s:%s' % (self.username, realm, self.password) - A2 = '%s:%s' % (method, path) - - HA1 = hash_utf8(A1) - HA2 = hash_utf8(A2) - - if nonce == self._thread_local.last_nonce: - self._thread_local.nonce_count += 1 - else: - self._thread_local.nonce_count = 1 - ncvalue = '%08x' % self._thread_local.nonce_count - s = str(self._thread_local.nonce_count).encode('utf-8') - s += nonce.encode('utf-8') - s += time.ctime().encode('utf-8') - s += os.urandom(8) - - cnonce = (hashlib.sha1(s).hexdigest()[:16]) - if _algorithm == 'MD5-SESS': - HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) - - if not qop: - respdig = KD(HA1, "%s:%s" % (nonce, HA2)) - elif qop == 'auth' or 'auth' in qop.split(','): - noncebit = "%s:%s:%s:%s:%s" % ( - nonce, ncvalue, cnonce, 'auth', HA2 - ) - respdig = KD(HA1, noncebit) - else: - # XXX handle auth-int. - return None - - self._thread_local.last_nonce = nonce - - # XXX should the partial digests be encoded too? - base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ - 'response="%s"' % (self.username, realm, nonce, path, respdig) - if opaque: - base += ', opaque="%s"' % opaque - if algorithm: - base += ', algorithm="%s"' % algorithm - if entdig: - base += ', digest="%s"' % entdig - if qop: - base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) - - return 'Digest %s' % (base) - - def handle_redirect(self, r, **kwargs): - """Reset num_401_calls counter on redirects.""" - if r.is_redirect: - self._thread_local.num_401_calls = 1 - - def handle_401(self, r, **kwargs): - """ - Takes the given response and tries digest-auth, if needed. - - :rtype: requests.Response - """ - - # If response is not 4xx, do not auth - # See https://github.com/requests/requests/issues/3772 - if not 400 <= r.status_code < 500: - self._thread_local.num_401_calls = 1 - return r - - if self._thread_local.pos is not None: - # Rewind the file position indicator of the body to where - # it was to resend the request. - r.request.body.seek(self._thread_local.pos) - s_auth = r.headers.get('www-authenticate', '') - - if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: - - self._thread_local.num_401_calls += 1 - pat = re.compile(r'digest ', flags=re.IGNORECASE) - self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) - - # Consume content and release the original connection - # to allow our new request to reuse the same one. - r.content - r.close() - prep = r.request.copy() - extract_cookies_to_jar(prep._cookies, r.request, r.raw) - prep.prepare_cookies(prep._cookies) - - prep.headers['Authorization'] = self.build_digest_header( - prep.method, prep.url) - _r = r.connection.send(prep, **kwargs) - _r.history.append(r) - _r.request = prep - - return _r - - self._thread_local.num_401_calls = 1 - return r - - def __call__(self, r): - # Initialize per-thread state, if needed - self.init_per_thread_state() - # If we have a saved nonce, skip the 401 - if self._thread_local.last_nonce: - r.headers['Authorization'] = self.build_digest_header(r.method, r.url) - try: - self._thread_local.pos = r.body.tell() - except AttributeError: - # In the case of HTTPDigestAuth being reused and the body of - # the previous request was a file-like object, pos has the - # file position of the previous body. Ensure it's set to - # None. - self._thread_local.pos = None - r.register_hook('response', self.handle_401) - r.register_hook('response', self.handle_redirect) - self._thread_local.num_401_calls = 1 - - return r - - def __eq__(self, other): - return all([ - self.username == getattr(other, 'username', None), - self.password == getattr(other, 'password', None) - ]) - - def __ne__(self, other): - return not self == other diff --git a/pype/vendor/requests/certs.py b/pype/vendor/requests/certs.py deleted file mode 100644 index d1a378d787..0000000000 --- a/pype/vendor/requests/certs.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -""" -requests.certs -~~~~~~~~~~~~~~ - -This module returns the preferred default CA certificate bundle. There is -only one — the one from the certifi package. - -If you are packaging Requests, e.g., for a Linux distribution or a managed -environment, you can change the definition of where() to return a separately -packaged CA bundle. -""" -from certifi import where - -if __name__ == '__main__': - print(where()) diff --git a/pype/vendor/requests/compat.py b/pype/vendor/requests/compat.py deleted file mode 100644 index c44b35efb9..0000000000 --- a/pype/vendor/requests/compat.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.compat -~~~~~~~~~~~~~~~ - -This module handles import compatibility issues between Python 2 and -Python 3. -""" - -import chardet - -import sys - -# ------- -# Pythons -# ------- - -# Syntax sugar. -_ver = sys.version_info - -#: Python 2.x? -is_py2 = (_ver[0] == 2) - -#: Python 3.x? -is_py3 = (_ver[0] == 3) - -try: - import simplejson as json -except ImportError: - import json - -# --------- -# Specifics -# --------- - -if is_py2: - from urllib import ( - quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, - proxy_bypass, proxy_bypass_environment, getproxies_environment) - from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag - from urllib2 import parse_http_list - import cookielib - from Cookie import Morsel - from StringIO import StringIO - from collections import Callable, Mapping, MutableMapping, OrderedDict - - - builtin_str = str - bytes = str - str = unicode - basestring = basestring - numeric_types = (int, long, float) - integer_types = (int, long) - -elif is_py3: - from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag - from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment - from http import cookiejar as cookielib - from http.cookies import Morsel - from io import StringIO - from collections import OrderedDict - from collections.abc import Callable, Mapping, MutableMapping - - builtin_str = str - str = str - bytes = bytes - basestring = (str, bytes) - numeric_types = (int, float) - integer_types = (int,) diff --git a/pype/vendor/requests/cookies.py b/pype/vendor/requests/cookies.py deleted file mode 100644 index 56fccd9c25..0000000000 --- a/pype/vendor/requests/cookies.py +++ /dev/null @@ -1,549 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.cookies -~~~~~~~~~~~~~~~~ - -Compatibility code to be able to use `cookielib.CookieJar` with requests. - -requests.utils imports from here, so be careful with imports. -""" - -import copy -import time -import calendar - -from ._internal_utils import to_native_string -from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping - -try: - import threading -except ImportError: - import dummy_threading as threading - - -class MockRequest(object): - """Wraps a `requests.Request` to mimic a `urllib2.Request`. - - The code in `cookielib.CookieJar` expects this interface in order to correctly - manage cookie policies, i.e., determine whether a cookie can be set, given the - domains of the request and the cookie. - - The original request object is read-only. The client is responsible for collecting - the new headers via `get_new_headers()` and interpreting them appropriately. You - probably want `get_cookie_header`, defined below. - """ - - def __init__(self, request): - self._r = request - self._new_headers = {} - self.type = urlparse(self._r.url).scheme - - def get_type(self): - return self.type - - def get_host(self): - return urlparse(self._r.url).netloc - - def get_origin_req_host(self): - return self.get_host() - - def get_full_url(self): - # Only return the response's URL if the user hadn't set the Host - # header - if not self._r.headers.get('Host'): - return self._r.url - # If they did set it, retrieve it and reconstruct the expected domain - host = to_native_string(self._r.headers['Host'], encoding='utf-8') - parsed = urlparse(self._r.url) - # Reconstruct the URL as we expect it - return urlunparse([ - parsed.scheme, host, parsed.path, parsed.params, parsed.query, - parsed.fragment - ]) - - def is_unverifiable(self): - return True - - def has_header(self, name): - return name in self._r.headers or name in self._new_headers - - def get_header(self, name, default=None): - return self._r.headers.get(name, self._new_headers.get(name, default)) - - def add_header(self, key, val): - """cookielib has no legitimate use for this method; add it back if you find one.""" - raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") - - def add_unredirected_header(self, name, value): - self._new_headers[name] = value - - def get_new_headers(self): - return self._new_headers - - @property - def unverifiable(self): - return self.is_unverifiable() - - @property - def origin_req_host(self): - return self.get_origin_req_host() - - @property - def host(self): - return self.get_host() - - -class MockResponse(object): - """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. - - ...what? Basically, expose the parsed HTTP headers from the server response - the way `cookielib` expects to see them. - """ - - def __init__(self, headers): - """Make a MockResponse for `cookielib` to read. - - :param headers: a httplib.HTTPMessage or analogous carrying the headers - """ - self._headers = headers - - def info(self): - return self._headers - - def getheaders(self, name): - self._headers.getheaders(name) - - -def extract_cookies_to_jar(jar, request, response): - """Extract the cookies from the response into a CookieJar. - - :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) - :param request: our own requests.Request object - :param response: urllib3.HTTPResponse object - """ - if not (hasattr(response, '_original_response') and - response._original_response): - return - # the _original_response field is the wrapped httplib.HTTPResponse object, - req = MockRequest(request) - # pull out the HTTPMessage with the headers and put it in the mock: - res = MockResponse(response._original_response.msg) - jar.extract_cookies(res, req) - - -def get_cookie_header(jar, request): - """ - Produce an appropriate Cookie header string to be sent with `request`, or None. - - :rtype: str - """ - r = MockRequest(request) - jar.add_cookie_header(r) - return r.get_new_headers().get('Cookie') - - -def remove_cookie_by_name(cookiejar, name, domain=None, path=None): - """Unsets a cookie by name, by default over all domains and paths. - - Wraps CookieJar.clear(), is O(n). - """ - clearables = [] - for cookie in cookiejar: - if cookie.name != name: - continue - if domain is not None and domain != cookie.domain: - continue - if path is not None and path != cookie.path: - continue - clearables.append((cookie.domain, cookie.path, cookie.name)) - - for domain, path, name in clearables: - cookiejar.clear(domain, path, name) - - -class CookieConflictError(RuntimeError): - """There are two cookies that meet the criteria specified in the cookie jar. - Use .get and .set and include domain and path args in order to be more specific. - """ - - -class RequestsCookieJar(cookielib.CookieJar, MutableMapping): - """Compatibility class; is a cookielib.CookieJar, but exposes a dict - interface. - - This is the CookieJar we create by default for requests and sessions that - don't specify one, since some clients may expect response.cookies and - session.cookies to support dict operations. - - Requests does not use the dict interface internally; it's just for - compatibility with external client code. All requests code should work - out of the box with externally provided instances of ``CookieJar``, e.g. - ``LWPCookieJar`` and ``FileCookieJar``. - - Unlike a regular CookieJar, this class is pickleable. - - .. warning:: dictionary operations that are normally O(1) may be O(n). - """ - - def get(self, name, default=None, domain=None, path=None): - """Dict-like get() that also supports optional domain and path args in - order to resolve naming collisions from using one cookie jar over - multiple domains. - - .. warning:: operation is O(n), not O(1). - """ - try: - return self._find_no_duplicates(name, domain, path) - except KeyError: - return default - - def set(self, name, value, **kwargs): - """Dict-like set() that also supports optional domain and path args in - order to resolve naming collisions from using one cookie jar over - multiple domains. - """ - # support client code that unsets cookies by assignment of a None value: - if value is None: - remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) - return - - if isinstance(value, Morsel): - c = morsel_to_cookie(value) - else: - c = create_cookie(name, value, **kwargs) - self.set_cookie(c) - return c - - def iterkeys(self): - """Dict-like iterkeys() that returns an iterator of names of cookies - from the jar. - - .. seealso:: itervalues() and iteritems(). - """ - for cookie in iter(self): - yield cookie.name - - def keys(self): - """Dict-like keys() that returns a list of names of cookies from the - jar. - - .. seealso:: values() and items(). - """ - return list(self.iterkeys()) - - def itervalues(self): - """Dict-like itervalues() that returns an iterator of values of cookies - from the jar. - - .. seealso:: iterkeys() and iteritems(). - """ - for cookie in iter(self): - yield cookie.value - - def values(self): - """Dict-like values() that returns a list of values of cookies from the - jar. - - .. seealso:: keys() and items(). - """ - return list(self.itervalues()) - - def iteritems(self): - """Dict-like iteritems() that returns an iterator of name-value tuples - from the jar. - - .. seealso:: iterkeys() and itervalues(). - """ - for cookie in iter(self): - yield cookie.name, cookie.value - - def items(self): - """Dict-like items() that returns a list of name-value tuples from the - jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a - vanilla python dict of key value pairs. - - .. seealso:: keys() and values(). - """ - return list(self.iteritems()) - - def list_domains(self): - """Utility method to list all the domains in the jar.""" - domains = [] - for cookie in iter(self): - if cookie.domain not in domains: - domains.append(cookie.domain) - return domains - - def list_paths(self): - """Utility method to list all the paths in the jar.""" - paths = [] - for cookie in iter(self): - if cookie.path not in paths: - paths.append(cookie.path) - return paths - - def multiple_domains(self): - """Returns True if there are multiple domains in the jar. - Returns False otherwise. - - :rtype: bool - """ - domains = [] - for cookie in iter(self): - if cookie.domain is not None and cookie.domain in domains: - return True - domains.append(cookie.domain) - return False # there is only one domain in jar - - def get_dict(self, domain=None, path=None): - """Takes as an argument an optional domain and path and returns a plain - old Python dict of name-value pairs of cookies that meet the - requirements. - - :rtype: dict - """ - dictionary = {} - for cookie in iter(self): - if ( - (domain is None or cookie.domain == domain) and - (path is None or cookie.path == path) - ): - dictionary[cookie.name] = cookie.value - return dictionary - - def __contains__(self, name): - try: - return super(RequestsCookieJar, self).__contains__(name) - except CookieConflictError: - return True - - def __getitem__(self, name): - """Dict-like __getitem__() for compatibility with client code. Throws - exception if there are more than one cookie with name. In that case, - use the more explicit get() method instead. - - .. warning:: operation is O(n), not O(1). - """ - return self._find_no_duplicates(name) - - def __setitem__(self, name, value): - """Dict-like __setitem__ for compatibility with client code. Throws - exception if there is already a cookie of that name in the jar. In that - case, use the more explicit set() method instead. - """ - self.set(name, value) - - def __delitem__(self, name): - """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s - ``remove_cookie_by_name()``. - """ - remove_cookie_by_name(self, name) - - def set_cookie(self, cookie, *args, **kwargs): - if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): - cookie.value = cookie.value.replace('\\"', '') - return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) - - def update(self, other): - """Updates this jar with cookies from another CookieJar or dict-like""" - if isinstance(other, cookielib.CookieJar): - for cookie in other: - self.set_cookie(copy.copy(cookie)) - else: - super(RequestsCookieJar, self).update(other) - - def _find(self, name, domain=None, path=None): - """Requests uses this method internally to get cookie values. - - If there are conflicting cookies, _find arbitrarily chooses one. - See _find_no_duplicates if you want an exception thrown if there are - conflicting cookies. - - :param name: a string containing name of cookie - :param domain: (optional) string containing domain of cookie - :param path: (optional) string containing path of cookie - :return: cookie.value - """ - for cookie in iter(self): - if cookie.name == name: - if domain is None or cookie.domain == domain: - if path is None or cookie.path == path: - return cookie.value - - raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) - - def _find_no_duplicates(self, name, domain=None, path=None): - """Both ``__get_item__`` and ``get`` call this function: it's never - used elsewhere in Requests. - - :param name: a string containing name of cookie - :param domain: (optional) string containing domain of cookie - :param path: (optional) string containing path of cookie - :raises KeyError: if cookie is not found - :raises CookieConflictError: if there are multiple cookies - that match name and optionally domain and path - :return: cookie.value - """ - toReturn = None - for cookie in iter(self): - if cookie.name == name: - if domain is None or cookie.domain == domain: - if path is None or cookie.path == path: - if toReturn is not None: # if there are multiple cookies that meet passed in criteria - raise CookieConflictError('There are multiple cookies with name, %r' % (name)) - toReturn = cookie.value # we will eventually return this as long as no cookie conflict - - if toReturn: - return toReturn - raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) - - def __getstate__(self): - """Unlike a normal CookieJar, this class is pickleable.""" - state = self.__dict__.copy() - # remove the unpickleable RLock object - state.pop('_cookies_lock') - return state - - def __setstate__(self, state): - """Unlike a normal CookieJar, this class is pickleable.""" - self.__dict__.update(state) - if '_cookies_lock' not in self.__dict__: - self._cookies_lock = threading.RLock() - - def copy(self): - """Return a copy of this RequestsCookieJar.""" - new_cj = RequestsCookieJar() - new_cj.set_policy(self.get_policy()) - new_cj.update(self) - return new_cj - - def get_policy(self): - """Return the CookiePolicy instance used.""" - return self._policy - - -def _copy_cookie_jar(jar): - if jar is None: - return None - - if hasattr(jar, 'copy'): - # We're dealing with an instance of RequestsCookieJar - return jar.copy() - # We're dealing with a generic CookieJar instance - new_jar = copy.copy(jar) - new_jar.clear() - for cookie in jar: - new_jar.set_cookie(copy.copy(cookie)) - return new_jar - - -def create_cookie(name, value, **kwargs): - """Make a cookie from underspecified parameters. - - By default, the pair of `name` and `value` will be set for the domain '' - and sent on every request (this is sometimes called a "supercookie"). - """ - result = { - 'version': 0, - 'name': name, - 'value': value, - 'port': None, - 'domain': '', - 'path': '/', - 'secure': False, - 'expires': None, - 'discard': True, - 'comment': None, - 'comment_url': None, - 'rest': {'HttpOnly': None}, - 'rfc2109': False, - } - - badargs = set(kwargs) - set(result) - if badargs: - err = 'create_cookie() got unexpected keyword arguments: %s' - raise TypeError(err % list(badargs)) - - result.update(kwargs) - result['port_specified'] = bool(result['port']) - result['domain_specified'] = bool(result['domain']) - result['domain_initial_dot'] = result['domain'].startswith('.') - result['path_specified'] = bool(result['path']) - - return cookielib.Cookie(**result) - - -def morsel_to_cookie(morsel): - """Convert a Morsel object into a Cookie containing the one k/v pair.""" - - expires = None - if morsel['max-age']: - try: - expires = int(time.time() + int(morsel['max-age'])) - except ValueError: - raise TypeError('max-age: %s must be integer' % morsel['max-age']) - elif morsel['expires']: - time_template = '%a, %d-%b-%Y %H:%M:%S GMT' - expires = calendar.timegm( - time.strptime(morsel['expires'], time_template) - ) - return create_cookie( - comment=morsel['comment'], - comment_url=bool(morsel['comment']), - discard=False, - domain=morsel['domain'], - expires=expires, - name=morsel.key, - path=morsel['path'], - port=None, - rest={'HttpOnly': morsel['httponly']}, - rfc2109=False, - secure=bool(morsel['secure']), - value=morsel.value, - version=morsel['version'] or 0, - ) - - -def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): - """Returns a CookieJar from a key/value dictionary. - - :param cookie_dict: Dict of key/values to insert into CookieJar. - :param cookiejar: (optional) A cookiejar to add the cookies to. - :param overwrite: (optional) If False, will not replace cookies - already in the jar with new ones. - :rtype: CookieJar - """ - if cookiejar is None: - cookiejar = RequestsCookieJar() - - if cookie_dict is not None: - names_from_jar = [cookie.name for cookie in cookiejar] - for name in cookie_dict: - if overwrite or (name not in names_from_jar): - cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) - - return cookiejar - - -def merge_cookies(cookiejar, cookies): - """Add cookies to cookiejar and returns a merged CookieJar. - - :param cookiejar: CookieJar object to add the cookies to. - :param cookies: Dictionary or CookieJar object to be added. - :rtype: CookieJar - """ - if not isinstance(cookiejar, cookielib.CookieJar): - raise ValueError('You can only merge into CookieJar') - - if isinstance(cookies, dict): - cookiejar = cookiejar_from_dict( - cookies, cookiejar=cookiejar, overwrite=False) - elif isinstance(cookies, cookielib.CookieJar): - try: - cookiejar.update(cookies) - except AttributeError: - for cookie_in_jar in cookies: - cookiejar.set_cookie(cookie_in_jar) - - return cookiejar diff --git a/pype/vendor/requests/exceptions.py b/pype/vendor/requests/exceptions.py deleted file mode 100644 index a80cad80f1..0000000000 --- a/pype/vendor/requests/exceptions.py +++ /dev/null @@ -1,126 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.exceptions -~~~~~~~~~~~~~~~~~~~ - -This module contains the set of Requests' exceptions. -""" -from urllib3.exceptions import HTTPError as BaseHTTPError - - -class RequestException(IOError): - """There was an ambiguous exception that occurred while handling your - request. - """ - - def __init__(self, *args, **kwargs): - """Initialize RequestException with `request` and `response` objects.""" - response = kwargs.pop('response', None) - self.response = response - self.request = kwargs.pop('request', None) - if (response is not None and not self.request and - hasattr(response, 'request')): - self.request = self.response.request - super(RequestException, self).__init__(*args, **kwargs) - - -class HTTPError(RequestException): - """An HTTP error occurred.""" - - -class ConnectionError(RequestException): - """A Connection error occurred.""" - - -class ProxyError(ConnectionError): - """A proxy error occurred.""" - - -class SSLError(ConnectionError): - """An SSL error occurred.""" - - -class Timeout(RequestException): - """The request timed out. - - Catching this error will catch both - :exc:`~requests.exceptions.ConnectTimeout` and - :exc:`~requests.exceptions.ReadTimeout` errors. - """ - - -class ConnectTimeout(ConnectionError, Timeout): - """The request timed out while trying to connect to the remote server. - - Requests that produced this error are safe to retry. - """ - - -class ReadTimeout(Timeout): - """The server did not send any data in the allotted amount of time.""" - - -class URLRequired(RequestException): - """A valid URL is required to make a request.""" - - -class TooManyRedirects(RequestException): - """Too many redirects.""" - - -class MissingSchema(RequestException, ValueError): - """The URL schema (e.g. http or https) is missing.""" - - -class InvalidSchema(RequestException, ValueError): - """See defaults.py for valid schemas.""" - - -class InvalidURL(RequestException, ValueError): - """The URL provided was somehow invalid.""" - - -class InvalidHeader(RequestException, ValueError): - """The header value provided was somehow invalid.""" - - -class InvalidProxyURL(InvalidURL): - """The proxy URL provided is invalid.""" - - -class ChunkedEncodingError(RequestException): - """The server declared chunked encoding but sent an invalid chunk.""" - - -class ContentDecodingError(RequestException, BaseHTTPError): - """Failed to decode response content""" - - -class StreamConsumedError(RequestException, TypeError): - """The content for this response was already consumed""" - - -class RetryError(RequestException): - """Custom retries logic failed""" - - -class UnrewindableBodyError(RequestException): - """Requests encountered an error when trying to rewind a body""" - -# Warnings - - -class RequestsWarning(Warning): - """Base warning for Requests.""" - pass - - -class FileModeWarning(RequestsWarning, DeprecationWarning): - """A file was opened in text mode, but Requests determined its binary length.""" - pass - - -class RequestsDependencyWarning(RequestsWarning): - """An imported dependency doesn't match the expected version range.""" - pass diff --git a/pype/vendor/requests/help.py b/pype/vendor/requests/help.py deleted file mode 100644 index e53d35ef6d..0000000000 --- a/pype/vendor/requests/help.py +++ /dev/null @@ -1,119 +0,0 @@ -"""Module containing bug report helper(s).""" -from __future__ import print_function - -import json -import platform -import sys -import ssl - -import idna -import urllib3 -import chardet - -from . import __version__ as requests_version - -try: - from urllib3.contrib import pyopenssl -except ImportError: - pyopenssl = None - OpenSSL = None - cryptography = None -else: - import OpenSSL - import cryptography - - -def _implementation(): - """Return a dict with the Python implementation and version. - - Provide both the name and the version of the Python implementation - currently running. For example, on CPython 2.7.5 it will return - {'name': 'CPython', 'version': '2.7.5'}. - - This function works best on CPython and PyPy: in particular, it probably - doesn't work for Jython or IronPython. Future investigation should be done - to work out the correct shape of the code for those platforms. - """ - implementation = platform.python_implementation() - - if implementation == 'CPython': - implementation_version = platform.python_version() - elif implementation == 'PyPy': - implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, - sys.pypy_version_info.minor, - sys.pypy_version_info.micro) - if sys.pypy_version_info.releaselevel != 'final': - implementation_version = ''.join([ - implementation_version, sys.pypy_version_info.releaselevel - ]) - elif implementation == 'Jython': - implementation_version = platform.python_version() # Complete Guess - elif implementation == 'IronPython': - implementation_version = platform.python_version() # Complete Guess - else: - implementation_version = 'Unknown' - - return {'name': implementation, 'version': implementation_version} - - -def info(): - """Generate information for a bug report.""" - try: - platform_info = { - 'system': platform.system(), - 'release': platform.release(), - } - except IOError: - platform_info = { - 'system': 'Unknown', - 'release': 'Unknown', - } - - implementation_info = _implementation() - urllib3_info = {'version': urllib3.__version__} - chardet_info = {'version': chardet.__version__} - - pyopenssl_info = { - 'version': None, - 'openssl_version': '', - } - if OpenSSL: - pyopenssl_info = { - 'version': OpenSSL.__version__, - 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, - } - cryptography_info = { - 'version': getattr(cryptography, '__version__', ''), - } - idna_info = { - 'version': getattr(idna, '__version__', ''), - } - - system_ssl = ssl.OPENSSL_VERSION_NUMBER - system_ssl_info = { - 'version': '%x' % system_ssl if system_ssl is not None else '' - } - - return { - 'platform': platform_info, - 'implementation': implementation_info, - 'system_ssl': system_ssl_info, - 'using_pyopenssl': pyopenssl is not None, - 'pyOpenSSL': pyopenssl_info, - 'urllib3': urllib3_info, - 'chardet': chardet_info, - 'cryptography': cryptography_info, - 'idna': idna_info, - 'requests': { - 'version': requests_version, - }, - } - - -def main(): - """Pretty-print the bug information as JSON.""" - print(json.dumps(info(), sort_keys=True, indent=2)) - - -if __name__ == '__main__': - main() diff --git a/pype/vendor/requests/hooks.py b/pype/vendor/requests/hooks.py deleted file mode 100644 index 7a51f212c8..0000000000 --- a/pype/vendor/requests/hooks.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.hooks -~~~~~~~~~~~~~~ - -This module provides the capabilities for the Requests hooks system. - -Available hooks: - -``response``: - The response generated from a Request. -""" -HOOKS = ['response'] - - -def default_hooks(): - return {event: [] for event in HOOKS} - -# TODO: response is the only one - - -def dispatch_hook(key, hooks, hook_data, **kwargs): - """Dispatches a hook dictionary on a given piece of data.""" - hooks = hooks or {} - hooks = hooks.get(key) - if hooks: - if hasattr(hooks, '__call__'): - hooks = [hooks] - for hook in hooks: - _hook_data = hook(hook_data, **kwargs) - if _hook_data is not None: - hook_data = _hook_data - return hook_data diff --git a/pype/vendor/requests/models.py b/pype/vendor/requests/models.py deleted file mode 100644 index 62dcd0b7c8..0000000000 --- a/pype/vendor/requests/models.py +++ /dev/null @@ -1,953 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.models -~~~~~~~~~~~~~~~ - -This module contains the primary objects that power Requests. -""" - -import datetime -import sys - -# Import encoding now, to avoid implicit import later. -# Implicit import within threads may cause LookupError when standard library is in a ZIP, -# such as in Embedded Python. See https://github.com/requests/requests/issues/3578. -import encodings.idna - -from urllib3.fields import RequestField -from urllib3.filepost import encode_multipart_formdata -from urllib3.util import parse_url -from urllib3.exceptions import ( - DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) - -from io import UnsupportedOperation -from .hooks import default_hooks -from .structures import CaseInsensitiveDict - -from .auth import HTTPBasicAuth -from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar -from .exceptions import ( - HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, - ContentDecodingError, ConnectionError, StreamConsumedError) -from ._internal_utils import to_native_string, unicode_is_ascii -from .utils import ( - guess_filename, get_auth_from_url, requote_uri, - stream_decode_response_unicode, to_key_val_list, parse_header_links, - iter_slices, guess_json_utf, super_len, check_header_validity) -from .compat import ( - Callable, Mapping, - cookielib, urlunparse, urlsplit, urlencode, str, bytes, - is_py2, chardet, builtin_str, basestring) -from .compat import json as complexjson -from .status_codes import codes - -#: The set of HTTP status codes that indicate an automatically -#: processable redirect. -REDIRECT_STATI = ( - codes.moved, # 301 - codes.found, # 302 - codes.other, # 303 - codes.temporary_redirect, # 307 - codes.permanent_redirect, # 308 -) - -DEFAULT_REDIRECT_LIMIT = 30 -CONTENT_CHUNK_SIZE = 10 * 1024 -ITER_CHUNK_SIZE = 512 - - -class RequestEncodingMixin(object): - @property - def path_url(self): - """Build the path URL to use.""" - - url = [] - - p = urlsplit(self.url) - - path = p.path - if not path: - path = '/' - - url.append(path) - - query = p.query - if query: - url.append('?') - url.append(query) - - return ''.join(url) - - @staticmethod - def _encode_params(data): - """Encode parameters in a piece of data. - - Will successfully encode parameters when passed as a dict or a list of - 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary - if parameters are supplied as a dict. - """ - - if isinstance(data, (str, bytes)): - return data - elif hasattr(data, 'read'): - return data - elif hasattr(data, '__iter__'): - result = [] - for k, vs in to_key_val_list(data): - if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): - vs = [vs] - for v in vs: - if v is not None: - result.append( - (k.encode('utf-8') if isinstance(k, str) else k, - v.encode('utf-8') if isinstance(v, str) else v)) - return urlencode(result, doseq=True) - else: - return data - - @staticmethod - def _encode_files(files, data): - """Build the body for a multipart/form-data request. - - Will successfully encode files when passed as a dict or a list of - tuples. Order is retained if data is a list of tuples but arbitrary - if parameters are supplied as a dict. - The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) - or 4-tuples (filename, fileobj, contentype, custom_headers). - """ - if (not files): - raise ValueError("Files must be provided.") - elif isinstance(data, basestring): - raise ValueError("Data must not be a string.") - - new_fields = [] - fields = to_key_val_list(data or {}) - files = to_key_val_list(files or {}) - - for field, val in fields: - if isinstance(val, basestring) or not hasattr(val, '__iter__'): - val = [val] - for v in val: - if v is not None: - # Don't call str() on bytestrings: in Py3 it all goes wrong. - if not isinstance(v, bytes): - v = str(v) - - new_fields.append( - (field.decode('utf-8') if isinstance(field, bytes) else field, - v.encode('utf-8') if isinstance(v, str) else v)) - - for (k, v) in files: - # support for explicit filename - ft = None - fh = None - if isinstance(v, (tuple, list)): - if len(v) == 2: - fn, fp = v - elif len(v) == 3: - fn, fp, ft = v - else: - fn, fp, ft, fh = v - else: - fn = guess_filename(v) or k - fp = v - - if isinstance(fp, (str, bytes, bytearray)): - fdata = fp - elif hasattr(fp, 'read'): - fdata = fp.read() - elif fp is None: - continue - else: - fdata = fp - - rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) - rf.make_multipart(content_type=ft) - new_fields.append(rf) - - body, content_type = encode_multipart_formdata(new_fields) - - return body, content_type - - -class RequestHooksMixin(object): - def register_hook(self, event, hook): - """Properly register a hook.""" - - if event not in self.hooks: - raise ValueError('Unsupported event specified, with event name "%s"' % (event)) - - if isinstance(hook, Callable): - self.hooks[event].append(hook) - elif hasattr(hook, '__iter__'): - self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) - - def deregister_hook(self, event, hook): - """Deregister a previously registered hook. - Returns True if the hook existed, False if not. - """ - - try: - self.hooks[event].remove(hook) - return True - except ValueError: - return False - - -class Request(RequestHooksMixin): - """A user-created :class:`Request ` object. - - Used to prepare a :class:`PreparedRequest `, which is sent to the server. - - :param method: HTTP method to use. - :param url: URL to send. - :param headers: dictionary of headers to send. - :param files: dictionary of {filename: fileobject} files to multipart upload. - :param data: the body to attach to the request. If a dictionary or - list of tuples ``[(key, value)]`` is provided, form-encoding will - take place. - :param json: json for the body to attach to the request (if files or data is not specified). - :param params: URL parameters to append to the URL. If a dictionary or - list of tuples ``[(key, value)]`` is provided, form-encoding will - take place. - :param auth: Auth handler or (user, pass) tuple. - :param cookies: dictionary or CookieJar of cookies to attach to this request. - :param hooks: dictionary of callback hooks, for internal usage. - - Usage:: - - >>> import requests - >>> req = requests.Request('GET', 'https://httpbin.org/get') - >>> req.prepare() - - """ - - def __init__(self, - method=None, url=None, headers=None, files=None, data=None, - params=None, auth=None, cookies=None, hooks=None, json=None): - - # Default empty dicts for dict params. - data = [] if data is None else data - files = [] if files is None else files - headers = {} if headers is None else headers - params = {} if params is None else params - hooks = {} if hooks is None else hooks - - self.hooks = default_hooks() - for (k, v) in list(hooks.items()): - self.register_hook(event=k, hook=v) - - self.method = method - self.url = url - self.headers = headers - self.files = files - self.data = data - self.json = json - self.params = params - self.auth = auth - self.cookies = cookies - - def __repr__(self): - return '' % (self.method) - - def prepare(self): - """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" - p = PreparedRequest() - p.prepare( - method=self.method, - url=self.url, - headers=self.headers, - files=self.files, - data=self.data, - json=self.json, - params=self.params, - auth=self.auth, - cookies=self.cookies, - hooks=self.hooks, - ) - return p - - -class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): - """The fully mutable :class:`PreparedRequest ` object, - containing the exact bytes that will be sent to the server. - - Generated from either a :class:`Request ` object or manually. - - Usage:: - - >>> import requests - >>> req = requests.Request('GET', 'https://httpbin.org/get') - >>> r = req.prepare() - - - >>> s = requests.Session() - >>> s.send(r) - - """ - - def __init__(self): - #: HTTP verb to send to the server. - self.method = None - #: HTTP URL to send the request to. - self.url = None - #: dictionary of HTTP headers. - self.headers = None - # The `CookieJar` used to create the Cookie header will be stored here - # after prepare_cookies is called - self._cookies = None - #: request body to send to the server. - self.body = None - #: dictionary of callback hooks, for internal usage. - self.hooks = default_hooks() - #: integer denoting starting position of a readable file-like body. - self._body_position = None - - def prepare(self, - method=None, url=None, headers=None, files=None, data=None, - params=None, auth=None, cookies=None, hooks=None, json=None): - """Prepares the entire request with the given parameters.""" - - self.prepare_method(method) - self.prepare_url(url, params) - self.prepare_headers(headers) - self.prepare_cookies(cookies) - self.prepare_body(data, files, json) - self.prepare_auth(auth, url) - - # Note that prepare_auth must be last to enable authentication schemes - # such as OAuth to work on a fully prepared request. - - # This MUST go after prepare_auth. Authenticators could add a hook - self.prepare_hooks(hooks) - - def __repr__(self): - return '' % (self.method) - - def copy(self): - p = PreparedRequest() - p.method = self.method - p.url = self.url - p.headers = self.headers.copy() if self.headers is not None else None - p._cookies = _copy_cookie_jar(self._cookies) - p.body = self.body - p.hooks = self.hooks - p._body_position = self._body_position - return p - - def prepare_method(self, method): - """Prepares the given HTTP method.""" - self.method = method - if self.method is not None: - self.method = to_native_string(self.method.upper()) - - @staticmethod - def _get_idna_encoded_host(host): - import idna - - try: - host = idna.encode(host, uts46=True).decode('utf-8') - except idna.IDNAError: - raise UnicodeError - return host - - def prepare_url(self, url, params): - """Prepares the given HTTP URL.""" - #: Accept objects that have string representations. - #: We're unable to blindly call unicode/str functions - #: as this will include the bytestring indicator (b'') - #: on python 3.x. - #: https://github.com/requests/requests/pull/2238 - if isinstance(url, bytes): - url = url.decode('utf8') - else: - url = unicode(url) if is_py2 else str(url) - - # Remove leading whitespaces from url - url = url.lstrip() - - # Don't do any URL preparation for non-HTTP schemes like `mailto`, - # `data` etc to work around exceptions from `url_parse`, which - # handles RFC 3986 only. - if ':' in url and not url.lower().startswith('http'): - self.url = url - return - - # Support for unicode domain names and paths. - try: - scheme, auth, host, port, path, query, fragment = parse_url(url) - except LocationParseError as e: - raise InvalidURL(*e.args) - - if not scheme: - error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") - error = error.format(to_native_string(url, 'utf8')) - - raise MissingSchema(error) - - if not host: - raise InvalidURL("Invalid URL %r: No host supplied" % url) - - # In general, we want to try IDNA encoding the hostname if the string contains - # non-ASCII characters. This allows users to automatically get the correct IDNA - # behaviour. For strings containing only ASCII characters, we need to also verify - # it doesn't start with a wildcard (*), before allowing the unencoded hostname. - if not unicode_is_ascii(host): - try: - host = self._get_idna_encoded_host(host) - except UnicodeError: - raise InvalidURL('URL has an invalid label.') - elif host.startswith(u'*'): - raise InvalidURL('URL has an invalid label.') - - # Carefully reconstruct the network location - netloc = auth or '' - if netloc: - netloc += '@' - netloc += host - if port: - netloc += ':' + str(port) - - # Bare domains aren't valid URLs. - if not path: - path = '/' - - if is_py2: - if isinstance(scheme, str): - scheme = scheme.encode('utf-8') - if isinstance(netloc, str): - netloc = netloc.encode('utf-8') - if isinstance(path, str): - path = path.encode('utf-8') - if isinstance(query, str): - query = query.encode('utf-8') - if isinstance(fragment, str): - fragment = fragment.encode('utf-8') - - if isinstance(params, (str, bytes)): - params = to_native_string(params) - - enc_params = self._encode_params(params) - if enc_params: - if query: - query = '%s&%s' % (query, enc_params) - else: - query = enc_params - - url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) - self.url = url - - def prepare_headers(self, headers): - """Prepares the given HTTP headers.""" - - self.headers = CaseInsensitiveDict() - if headers: - for header in headers.items(): - # Raise exception on invalid header value. - check_header_validity(header) - name, value = header - self.headers[to_native_string(name)] = value - - def prepare_body(self, data, files, json=None): - """Prepares the given HTTP body data.""" - - # Check if file, fo, generator, iterator. - # If not, run through normal process. - - # Nottin' on you. - body = None - content_type = None - - if not data and json is not None: - # urllib3 requires a bytes-like body. Python 2's json.dumps - # provides this natively, but Python 3 gives a Unicode string. - content_type = 'application/json' - body = complexjson.dumps(json) - if not isinstance(body, bytes): - body = body.encode('utf-8') - - is_stream = all([ - hasattr(data, '__iter__'), - not isinstance(data, (basestring, list, tuple, Mapping)) - ]) - - try: - length = super_len(data) - except (TypeError, AttributeError, UnsupportedOperation): - length = None - - if is_stream: - body = data - - if getattr(body, 'tell', None) is not None: - # Record the current file position before reading. - # This will allow us to rewind a file in the event - # of a redirect. - try: - self._body_position = body.tell() - except (IOError, OSError): - # This differentiates from None, allowing us to catch - # a failed `tell()` later when trying to rewind the body - self._body_position = object() - - if files: - raise NotImplementedError('Streamed bodies and files are mutually exclusive.') - - if length: - self.headers['Content-Length'] = builtin_str(length) - else: - self.headers['Transfer-Encoding'] = 'chunked' - else: - # Multi-part file uploads. - if files: - (body, content_type) = self._encode_files(files, data) - else: - if data: - body = self._encode_params(data) - if isinstance(data, basestring) or hasattr(data, 'read'): - content_type = None - else: - content_type = 'application/x-www-form-urlencoded' - - self.prepare_content_length(body) - - # Add content-type if it wasn't explicitly provided. - if content_type and ('content-type' not in self.headers): - self.headers['Content-Type'] = content_type - - self.body = body - - def prepare_content_length(self, body): - """Prepare Content-Length header based on request method and body""" - if body is not None: - length = super_len(body) - if length: - # If length exists, set it. Otherwise, we fallback - # to Transfer-Encoding: chunked. - self.headers['Content-Length'] = builtin_str(length) - elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: - # Set Content-Length to 0 for methods that can have a body - # but don't provide one. (i.e. not GET or HEAD) - self.headers['Content-Length'] = '0' - - def prepare_auth(self, auth, url=''): - """Prepares the given HTTP auth data.""" - - # If no Auth is explicitly provided, extract it from the URL first. - if auth is None: - url_auth = get_auth_from_url(self.url) - auth = url_auth if any(url_auth) else None - - if auth: - if isinstance(auth, tuple) and len(auth) == 2: - # special-case basic HTTP auth - auth = HTTPBasicAuth(*auth) - - # Allow auth to make its changes. - r = auth(self) - - # Update self to reflect the auth changes. - self.__dict__.update(r.__dict__) - - # Recompute Content-Length - self.prepare_content_length(self.body) - - def prepare_cookies(self, cookies): - """Prepares the given HTTP cookie data. - - This function eventually generates a ``Cookie`` header from the - given cookies using cookielib. Due to cookielib's design, the header - will not be regenerated if it already exists, meaning this function - can only be called once for the life of the - :class:`PreparedRequest ` object. Any subsequent calls - to ``prepare_cookies`` will have no actual effect, unless the "Cookie" - header is removed beforehand. - """ - if isinstance(cookies, cookielib.CookieJar): - self._cookies = cookies - else: - self._cookies = cookiejar_from_dict(cookies) - - cookie_header = get_cookie_header(self._cookies, self) - if cookie_header is not None: - self.headers['Cookie'] = cookie_header - - def prepare_hooks(self, hooks): - """Prepares the given hooks.""" - # hooks can be passed as None to the prepare method and to this - # method. To prevent iterating over None, simply use an empty list - # if hooks is False-y - hooks = hooks or [] - for event in hooks: - self.register_hook(event, hooks[event]) - - -class Response(object): - """The :class:`Response ` object, which contains a - server's response to an HTTP request. - """ - - __attrs__ = [ - '_content', 'status_code', 'headers', 'url', 'history', - 'encoding', 'reason', 'cookies', 'elapsed', 'request' - ] - - def __init__(self): - self._content = False - self._content_consumed = False - self._next = None - - #: Integer Code of responded HTTP Status, e.g. 404 or 200. - self.status_code = None - - #: Case-insensitive Dictionary of Response Headers. - #: For example, ``headers['content-encoding']`` will return the - #: value of a ``'Content-Encoding'`` response header. - self.headers = CaseInsensitiveDict() - - #: File-like object representation of response (for advanced usage). - #: Use of ``raw`` requires that ``stream=True`` be set on the request. - # This requirement does not apply for use internally to Requests. - self.raw = None - - #: Final URL location of Response. - self.url = None - - #: Encoding to decode with when accessing r.text. - self.encoding = None - - #: A list of :class:`Response ` objects from - #: the history of the Request. Any redirect responses will end - #: up here. The list is sorted from the oldest to the most recent request. - self.history = [] - - #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". - self.reason = None - - #: A CookieJar of Cookies the server sent back. - self.cookies = cookiejar_from_dict({}) - - #: The amount of time elapsed between sending the request - #: and the arrival of the response (as a timedelta). - #: This property specifically measures the time taken between sending - #: the first byte of the request and finishing parsing the headers. It - #: is therefore unaffected by consuming the response content or the - #: value of the ``stream`` keyword argument. - self.elapsed = datetime.timedelta(0) - - #: The :class:`PreparedRequest ` object to which this - #: is a response. - self.request = None - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - def __getstate__(self): - # Consume everything; accessing the content attribute makes - # sure the content has been fully read. - if not self._content_consumed: - self.content - - return {attr: getattr(self, attr, None) for attr in self.__attrs__} - - def __setstate__(self, state): - for name, value in state.items(): - setattr(self, name, value) - - # pickled objects do not have .raw - setattr(self, '_content_consumed', True) - setattr(self, 'raw', None) - - def __repr__(self): - return '' % (self.status_code) - - def __bool__(self): - """Returns True if :attr:`status_code` is less than 400. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If - the status code, is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ - return self.ok - - def __nonzero__(self): - """Returns True if :attr:`status_code` is less than 400. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If - the status code, is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ - return self.ok - - def __iter__(self): - """Allows you to use a response as an iterator.""" - return self.iter_content(128) - - @property - def ok(self): - """Returns True if :attr:`status_code` is less than 400, False if not. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If - the status code is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ - try: - self.raise_for_status() - except HTTPError: - return False - return True - - @property - def is_redirect(self): - """True if this Response is a well-formed HTTP redirect that could have - been processed automatically (by :meth:`Session.resolve_redirects`). - """ - return ('location' in self.headers and self.status_code in REDIRECT_STATI) - - @property - def is_permanent_redirect(self): - """True if this Response one of the permanent versions of redirect.""" - return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) - - @property - def next(self): - """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" - return self._next - - @property - def apparent_encoding(self): - """The apparent encoding, provided by the chardet library.""" - return chardet.detect(self.content)['encoding'] - - def iter_content(self, chunk_size=1, decode_unicode=False): - """Iterates over the response data. When stream=True is set on the - request, this avoids reading the content at once into memory for - large responses. The chunk size is the number of bytes it should - read into memory. This is not necessarily the length of each item - returned as decoding can take place. - - chunk_size must be of type int or None. A value of None will - function differently depending on the value of `stream`. - stream=True will read data as it arrives in whatever size the - chunks are received. If stream=False, data is returned as - a single chunk. - - If decode_unicode is True, content will be decoded using the best - available encoding based on the response. - """ - - def generate(): - # Special case for urllib3. - if hasattr(self.raw, 'stream'): - try: - for chunk in self.raw.stream(chunk_size, decode_content=True): - yield chunk - except ProtocolError as e: - raise ChunkedEncodingError(e) - except DecodeError as e: - raise ContentDecodingError(e) - except ReadTimeoutError as e: - raise ConnectionError(e) - else: - # Standard file-like object. - while True: - chunk = self.raw.read(chunk_size) - if not chunk: - break - yield chunk - - self._content_consumed = True - - if self._content_consumed and isinstance(self._content, bool): - raise StreamConsumedError() - elif chunk_size is not None and not isinstance(chunk_size, int): - raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) - # simulate reading small chunks of the content - reused_chunks = iter_slices(self._content, chunk_size) - - stream_chunks = generate() - - chunks = reused_chunks if self._content_consumed else stream_chunks - - if decode_unicode: - chunks = stream_decode_response_unicode(chunks, self) - - return chunks - - def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): - """Iterates over the response data, one line at a time. When - stream=True is set on the request, this avoids reading the - content at once into memory for large responses. - - .. note:: This method is not reentrant safe. - """ - - pending = None - - for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): - - if pending is not None: - chunk = pending + chunk - - if delimiter: - lines = chunk.split(delimiter) - else: - lines = chunk.splitlines() - - if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: - pending = lines.pop() - else: - pending = None - - for line in lines: - yield line - - if pending is not None: - yield pending - - @property - def content(self): - """Content of the response, in bytes.""" - - if self._content is False: - # Read the contents. - if self._content_consumed: - raise RuntimeError( - 'The content for this response was already consumed') - - if self.status_code == 0 or self.raw is None: - self._content = None - else: - self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' - - self._content_consumed = True - # don't need to release the connection; that's been handled by urllib3 - # since we exhausted the data. - return self._content - - @property - def text(self): - """Content of the response, in unicode. - - If Response.encoding is None, encoding will be guessed using - ``chardet``. - - The encoding of the response content is determined based solely on HTTP - headers, following RFC 2616 to the letter. If you can take advantage of - non-HTTP knowledge to make a better guess at the encoding, you should - set ``r.encoding`` appropriately before accessing this property. - """ - - # Try charset from content-type - content = None - encoding = self.encoding - - if not self.content: - return str('') - - # Fallback to auto-detected encoding. - if self.encoding is None: - encoding = self.apparent_encoding - - # Decode unicode from given encoding. - try: - content = str(self.content, encoding, errors='replace') - except (LookupError, TypeError): - # A LookupError is raised if the encoding was not found which could - # indicate a misspelling or similar mistake. - # - # A TypeError can be raised if encoding is None - # - # So we try blindly encoding. - content = str(self.content, errors='replace') - - return content - - def json(self, **kwargs): - r"""Returns the json-encoded content of a response, if any. - - :param \*\*kwargs: Optional arguments that ``json.loads`` takes. - :raises ValueError: If the response body does not contain valid json. - """ - - if not self.encoding and self.content and len(self.content) > 3: - # No encoding set. JSON RFC 4627 section 3 states we should expect - # UTF-8, -16 or -32. Detect which one to use; If the detection or - # decoding fails, fall back to `self.text` (using chardet to make - # a best guess). - encoding = guess_json_utf(self.content) - if encoding is not None: - try: - return complexjson.loads( - self.content.decode(encoding), **kwargs - ) - except UnicodeDecodeError: - # Wrong UTF codec detected; usually because it's not UTF-8 - # but some other 8-bit codec. This is an RFC violation, - # and the server didn't bother to tell us what codec *was* - # used. - pass - return complexjson.loads(self.text, **kwargs) - - @property - def links(self): - """Returns the parsed header links of the response, if any.""" - - header = self.headers.get('link') - - # l = MultiDict() - l = {} - - if header: - links = parse_header_links(header) - - for link in links: - key = link.get('rel') or link.get('url') - l[key] = link - - return l - - def raise_for_status(self): - """Raises stored :class:`HTTPError`, if one occurred.""" - - http_error_msg = '' - if isinstance(self.reason, bytes): - # We attempt to decode utf-8 first because some servers - # choose to localize their reason strings. If the string - # isn't utf-8, we fall back to iso-8859-1 for all other - # encodings. (See PR #3538) - try: - reason = self.reason.decode('utf-8') - except UnicodeDecodeError: - reason = self.reason.decode('iso-8859-1') - else: - reason = self.reason - - if 400 <= self.status_code < 500: - http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) - - elif 500 <= self.status_code < 600: - http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) - - if http_error_msg: - raise HTTPError(http_error_msg, response=self) - - def close(self): - """Releases the connection back to the pool. Once this method has been - called the underlying ``raw`` object must not be accessed again. - - *Note: Should not normally need to be called explicitly.* - """ - if not self._content_consumed: - self.raw.close() - - release_conn = getattr(self.raw, 'release_conn', None) - if release_conn is not None: - release_conn() diff --git a/pype/vendor/requests/packages.py b/pype/vendor/requests/packages.py deleted file mode 100644 index 7232fe0ff7..0000000000 --- a/pype/vendor/requests/packages.py +++ /dev/null @@ -1,14 +0,0 @@ -import sys - -# This code exists for backwards compatibility reasons. -# I don't like it either. Just look the other way. :) - -for package in ('urllib3', 'idna', 'chardet'): - locals()[package] = __import__(package) - # This traversal is apparently necessary such that the identities are - # preserved (requests.packages.urllib3.* is urllib3.*) - for mod in list(sys.modules): - if mod == package or mod.startswith(package + '.'): - sys.modules['requests.packages.' + mod] = sys.modules[mod] - -# Kinda cool, though, right? diff --git a/pype/vendor/requests/packages/__init__.py b/pype/vendor/requests/packages/__init__.py deleted file mode 100644 index 971c2ad024..0000000000 --- a/pype/vendor/requests/packages/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -''' -Debian and other distributions "unbundle" requests' vendored dependencies, and -rewrite all imports to use the global versions of ``urllib3`` and ``chardet``. -The problem with this is that not only requests itself imports those -dependencies, but third-party code outside of the distros' control too. - -In reaction to these problems, the distro maintainers replaced -``requests.packages`` with a magical "stub module" that imports the correct -modules. The implementations were varying in quality and all had severe -problems. For example, a symlink (or hardlink) that links the correct modules -into place introduces problems regarding object identity, since you now have -two modules in `sys.modules` with the same API, but different identities:: - - requests.packages.urllib3 is not urllib3 - -With version ``2.5.2``, requests started to maintain its own stub, so that -distro-specific breakage would be reduced to a minimum, even though the whole -issue is not requests' fault in the first place. See -https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull -request. -''' - -from __future__ import absolute_import -import sys - -try: - from . import urllib3 -except ImportError: - import urllib3 - sys.modules['%s.urllib3' % __name__] = urllib3 - -try: - from . import chardet -except ImportError: - import chardet - sys.modules['%s.chardet' % __name__] = chardet diff --git a/pype/vendor/requests/packages/chardet/__init__.py b/pype/vendor/requests/packages/chardet/__init__.py deleted file mode 100644 index 0f9f820ef6..0000000000 --- a/pype/vendor/requests/packages/chardet/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - - -from .compat import PY2, PY3 -from .universaldetector import UniversalDetector -from .version import __version__, VERSION - - -def detect(byte_str): - """ - Detect the encoding of the given byte string. - - :param byte_str: The byte sequence to examine. - :type byte_str: ``bytes`` or ``bytearray`` - """ - if not isinstance(byte_str, bytearray): - if not isinstance(byte_str, bytes): - raise TypeError('Expected object of type bytes or bytearray, got: ' - '{0}'.format(type(byte_str))) - else: - byte_str = bytearray(byte_str) - detector = UniversalDetector() - detector.feed(byte_str) - return detector.close() diff --git a/pype/vendor/requests/packages/chardet/big5freq.py b/pype/vendor/requests/packages/chardet/big5freq.py deleted file mode 100644 index 38f32517aa..0000000000 --- a/pype/vendor/requests/packages/chardet/big5freq.py +++ /dev/null @@ -1,386 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Big5 frequency table -# by Taiwan's Mandarin Promotion Council -# -# -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -#Char to FreqOrder table -BIG5_TABLE_SIZE = 5376 - -BIG5_CHAR_TO_FREQ_ORDER = ( - 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 -3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 -1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 - 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 -3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 -4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 -5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 - 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 - 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 - 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 -2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 -1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 -3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 - 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 -3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 -2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 - 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 -3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 -1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 -5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 - 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 -5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 -1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 - 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 - 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 -3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 -3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 - 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 -2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 -2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 - 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 - 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 -3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 -1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 -1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 -1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 -2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 - 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 -4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 -1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 -5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 -2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 - 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 - 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 - 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 - 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 -5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 - 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 -1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 - 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 - 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 -5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 -1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 - 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 -3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 -4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 -3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 - 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 - 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 -1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 -4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 -3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 -3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 -2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 -5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 -3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 -5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 -1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 -2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 -1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 - 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 -1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 -4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 -3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 - 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 - 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 - 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 -2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 -5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 -1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 -2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 -1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 -1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 -5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 -5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 -5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 -3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 -4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 -4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 -2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 -5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 -3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 - 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 -5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 -5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 -1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 -2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 -3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 -4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 -5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 -3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 -4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 -1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 -1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 -4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 -1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 - 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 -1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 -1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 -3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 - 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 -5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 -2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 -1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 -1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 -5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 - 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 -4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 - 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 -2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 - 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 -1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 -1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 - 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 -4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 -4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 -1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 -3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 -5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 -5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 -1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 -2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 -1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 -3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 -2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 -3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 -2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 -4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 -4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 -3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 - 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 -3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 - 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 -3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 -4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 -3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 -1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 -5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 - 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 -5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 -1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 - 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 -4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 -4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 - 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 -2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 -2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 -3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 -1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 -4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 -2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 -1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 -1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 -2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 -3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 -1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 -5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 -1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 -4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 -1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 - 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 -1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 -4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 -4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 -2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 -1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 -4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 - 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 -5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 -2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 -3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 -4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 - 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 -5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 -5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 -1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 -4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 -4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 -2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 -3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 -3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 -2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 -1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 -4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 -3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 -3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 -2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 -4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 -5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 -3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 -2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 -3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 -1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 -2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 -3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 -4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 -2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 -2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 -5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 -1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 -2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 -1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 -3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 -4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 -2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 -3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 -3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 -2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 -4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 -2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 -3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 -4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 -5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 -3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 - 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 -1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 -4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 -1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 -4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 -5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 - 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 -5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 -5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 -2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 -3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 -2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 -2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 - 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 -1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 -4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 -3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 -3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 - 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 -2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 - 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 -2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 -4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 -1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 -4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 -1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 -3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 - 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 -3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 -5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 -5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 -3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 -3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 -1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 -2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 -5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 -1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 -1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 -3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 - 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 -1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 -4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 -5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 -2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 -3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 - 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 -1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 -2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 -2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 -5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 -5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 -5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 -2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 -2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 -1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 -4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 -3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 -3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 -4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 -4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 -2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 -2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 -5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 -4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 -5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 -4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 - 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 - 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 -1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 -3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 -4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 -1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 -5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 -2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 -2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 -3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 -5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 -1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 -3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 -5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 -1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 -5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 -2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 -3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 -2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 -3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 -3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 -3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 -4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 - 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 -2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 -4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 -3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 -5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 -1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 -5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 - 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 -1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 - 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 -4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 -1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 -4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 -1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 - 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 -3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 -4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 -5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 - 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 -3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 - 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 -2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 -) - diff --git a/pype/vendor/requests/packages/chardet/big5prober.py b/pype/vendor/requests/packages/chardet/big5prober.py deleted file mode 100644 index 98f9970122..0000000000 --- a/pype/vendor/requests/packages/chardet/big5prober.py +++ /dev/null @@ -1,47 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import Big5DistributionAnalysis -from .mbcssm import BIG5_SM_MODEL - - -class Big5Prober(MultiByteCharSetProber): - def __init__(self): - super(Big5Prober, self).__init__() - self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) - self.distribution_analyzer = Big5DistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "Big5" - - @property - def language(self): - return "Chinese" diff --git a/pype/vendor/requests/packages/chardet/chardetect.py b/pype/vendor/requests/packages/chardet/chardetect.py deleted file mode 100644 index ffe892f25d..0000000000 --- a/pype/vendor/requests/packages/chardet/chardetect.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python -""" -Script which takes one or more file paths and reports on their detected -encodings - -Example:: - - % chardetect somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - -If no paths are provided, it takes its input from stdin. - -""" - -from __future__ import absolute_import, print_function, unicode_literals - -import argparse -import sys -from io import open - -from chardet import __version__ -from chardet.universaldetector import UniversalDetector - - -def description_of(lines, name='stdin'): - """ - Return a string describing the probable encoding of a file or - list of strings. - - :param lines: The lines to get the encoding of. - :type lines: Iterable of bytes - :param name: Name of file or collection of lines - :type name: str - """ - u = UniversalDetector() - for line in lines: - u.feed(line) - u.close() - result = u.result - if result['encoding']: - return '{0}: {1} with confidence {2}'.format(name, result['encoding'], - result['confidence']) - else: - return '{0}: no result'.format(name) - - -def main(argv=None): - ''' - Handles command line arguments and gets things started. - - :param argv: List of arguments, as if specified on the command-line. - If None, ``sys.argv[1:]`` is used instead. - :type argv: list of str - ''' - # Get command line arguments - parser = argparse.ArgumentParser( - description="Takes one or more file paths and reports their detected \ - encodings", - formatter_class=argparse.ArgumentDefaultsHelpFormatter, - conflict_handler='resolve') - parser.add_argument('input', - help='File whose encoding we would like to determine.', - type=argparse.FileType('rb'), nargs='*', - default=[sys.stdin]) - parser.add_argument('--version', action='version', - version='%(prog)s {0}'.format(__version__)) - args = parser.parse_args(argv) - - for f in args.input: - if f.isatty(): - print("You are running chardetect interactively. Press " + - "CTRL-D twice at the start of a blank line to signal the " + - "end of your input. If you want help, run chardetect " + - "--help\n", file=sys.stderr) - print(description_of(f, f.name)) - - -if __name__ == '__main__': - main() diff --git a/pype/vendor/requests/packages/chardet/chardistribution.py b/pype/vendor/requests/packages/chardet/chardistribution.py deleted file mode 100644 index c0395f4a45..0000000000 --- a/pype/vendor/requests/packages/chardet/chardistribution.py +++ /dev/null @@ -1,233 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, - EUCTW_TYPICAL_DISTRIBUTION_RATIO) -from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, - EUCKR_TYPICAL_DISTRIBUTION_RATIO) -from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, - GB2312_TYPICAL_DISTRIBUTION_RATIO) -from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, - BIG5_TYPICAL_DISTRIBUTION_RATIO) -from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, - JIS_TYPICAL_DISTRIBUTION_RATIO) - - -class CharDistributionAnalysis(object): - ENOUGH_DATA_THRESHOLD = 1024 - SURE_YES = 0.99 - SURE_NO = 0.01 - MINIMUM_DATA_THRESHOLD = 3 - - def __init__(self): - # Mapping table to get frequency order from char order (get from - # GetOrder()) - self._char_to_freq_order = None - self._table_size = None # Size of above table - # This is a constant value which varies from language to language, - # used in calculating confidence. See - # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html - # for further detail. - self.typical_distribution_ratio = None - self._done = None - self._total_chars = None - self._freq_chars = None - self.reset() - - def reset(self): - """reset analyser, clear any state""" - # If this flag is set to True, detection is done and conclusion has - # been made - self._done = False - self._total_chars = 0 # Total characters encountered - # The number of characters whose frequency order is less than 512 - self._freq_chars = 0 - - def feed(self, char, char_len): - """feed a character with known length""" - if char_len == 2: - # we only care about 2-bytes character in our distribution analysis - order = self.get_order(char) - else: - order = -1 - if order >= 0: - self._total_chars += 1 - # order is valid - if order < self._table_size: - if 512 > self._char_to_freq_order[order]: - self._freq_chars += 1 - - def get_confidence(self): - """return confidence based on existing data""" - # if we didn't receive any character in our consideration range, - # return negative answer - if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: - return self.SURE_NO - - if self._total_chars != self._freq_chars: - r = (self._freq_chars / ((self._total_chars - self._freq_chars) - * self.typical_distribution_ratio)) - if r < self.SURE_YES: - return r - - # normalize confidence (we don't want to be 100% sure) - return self.SURE_YES - - def got_enough_data(self): - # It is not necessary to receive all data to draw conclusion. - # For charset detection, certain amount of data is enough - return self._total_chars > self.ENOUGH_DATA_THRESHOLD - - def get_order(self, byte_str): - # We do not handle characters based on the original encoding string, - # but convert this encoding string to a number, here called order. - # This allows multiple encodings of a language to share one frequency - # table. - return -1 - - -class EUCTWDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCTWDistributionAnalysis, self).__init__() - self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER - self._table_size = EUCTW_TABLE_SIZE - self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-TW encoding, we are interested - # first byte range: 0xc4 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = byte_str[0] - if first_char >= 0xC4: - return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 - else: - return -1 - - -class EUCKRDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCKRDistributionAnalysis, self).__init__() - self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER - self._table_size = EUCKR_TABLE_SIZE - self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-KR encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = byte_str[0] - if first_char >= 0xB0: - return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 - else: - return -1 - - -class GB2312DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(GB2312DistributionAnalysis, self).__init__() - self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER - self._table_size = GB2312_TABLE_SIZE - self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for GB2312 encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if (first_char >= 0xB0) and (second_char >= 0xA1): - return 94 * (first_char - 0xB0) + second_char - 0xA1 - else: - return -1 - - -class Big5DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(Big5DistributionAnalysis, self).__init__() - self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER - self._table_size = BIG5_TABLE_SIZE - self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for big5 encoding, we are interested - # first byte range: 0xa4 -- 0xfe - # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if first_char >= 0xA4: - if second_char >= 0xA1: - return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 - else: - return 157 * (first_char - 0xA4) + second_char - 0x40 - else: - return -1 - - -class SJISDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(SJISDistributionAnalysis, self).__init__() - self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER - self._table_size = JIS_TABLE_SIZE - self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for sjis encoding, we are interested - # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe - # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if (first_char >= 0x81) and (first_char <= 0x9F): - order = 188 * (first_char - 0x81) - elif (first_char >= 0xE0) and (first_char <= 0xEF): - order = 188 * (first_char - 0xE0 + 31) - else: - return -1 - order = order + second_char - 0x40 - if second_char > 0x7F: - order = -1 - return order - - -class EUCJPDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCJPDistributionAnalysis, self).__init__() - self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER - self._table_size = JIS_TABLE_SIZE - self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-JP encoding, we are interested - # first byte range: 0xa0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - char = byte_str[0] - if char >= 0xA0: - return 94 * (char - 0xA1) + byte_str[1] - 0xa1 - else: - return -1 diff --git a/pype/vendor/requests/packages/chardet/charsetgroupprober.py b/pype/vendor/requests/packages/chardet/charsetgroupprober.py deleted file mode 100644 index 8b3738efd8..0000000000 --- a/pype/vendor/requests/packages/chardet/charsetgroupprober.py +++ /dev/null @@ -1,106 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import ProbingState -from .charsetprober import CharSetProber - - -class CharSetGroupProber(CharSetProber): - def __init__(self, lang_filter=None): - super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) - self._active_num = 0 - self.probers = [] - self._best_guess_prober = None - - def reset(self): - super(CharSetGroupProber, self).reset() - self._active_num = 0 - for prober in self.probers: - if prober: - prober.reset() - prober.active = True - self._active_num += 1 - self._best_guess_prober = None - - @property - def charset_name(self): - if not self._best_guess_prober: - self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.charset_name - - @property - def language(self): - if not self._best_guess_prober: - self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.language - - def feed(self, byte_str): - for prober in self.probers: - if not prober: - continue - if not prober.active: - continue - state = prober.feed(byte_str) - if not state: - continue - if state == ProbingState.FOUND_IT: - self._best_guess_prober = prober - return self.state - elif state == ProbingState.NOT_ME: - prober.active = False - self._active_num -= 1 - if self._active_num <= 0: - self._state = ProbingState.NOT_ME - return self.state - return self.state - - def get_confidence(self): - state = self.state - if state == ProbingState.FOUND_IT: - return 0.99 - elif state == ProbingState.NOT_ME: - return 0.01 - best_conf = 0.0 - self._best_guess_prober = None - for prober in self.probers: - if not prober: - continue - if not prober.active: - self.logger.debug('%s not active', prober.charset_name) - continue - conf = prober.get_confidence() - self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) - if best_conf < conf: - best_conf = conf - self._best_guess_prober = prober - if not self._best_guess_prober: - return 0.0 - return best_conf diff --git a/pype/vendor/requests/packages/chardet/charsetprober.py b/pype/vendor/requests/packages/chardet/charsetprober.py deleted file mode 100644 index eac4e59865..0000000000 --- a/pype/vendor/requests/packages/chardet/charsetprober.py +++ /dev/null @@ -1,145 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import logging -import re - -from .enums import ProbingState - - -class CharSetProber(object): - - SHORTCUT_THRESHOLD = 0.95 - - def __init__(self, lang_filter=None): - self._state = None - self.lang_filter = lang_filter - self.logger = logging.getLogger(__name__) - - def reset(self): - self._state = ProbingState.DETECTING - - @property - def charset_name(self): - return None - - def feed(self, buf): - pass - - @property - def state(self): - return self._state - - def get_confidence(self): - return 0.0 - - @staticmethod - def filter_high_byte_only(buf): - buf = re.sub(b'([\x00-\x7F])+', b' ', buf) - return buf - - @staticmethod - def filter_international_words(buf): - """ - We define three types of bytes: - alphabet: english alphabets [a-zA-Z] - international: international characters [\x80-\xFF] - marker: everything else [^a-zA-Z\x80-\xFF] - - The input buffer can be thought to contain a series of words delimited - by markers. This function works to filter all words that contain at - least one international character. All contiguous sequences of markers - are replaced by a single space ascii character. - - This filter applies to all scripts which do not use English characters. - """ - filtered = bytearray() - - # This regex expression filters out only words that have at-least one - # international character. The word may include one marker character at - # the end. - words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', - buf) - - for word in words: - filtered.extend(word[:-1]) - - # If the last character in the word is a marker, replace it with a - # space as markers shouldn't affect our analysis (they are used - # similarly across all languages and may thus have similar - # frequencies). - last_char = word[-1:] - if not last_char.isalpha() and last_char < b'\x80': - last_char = b' ' - filtered.extend(last_char) - - return filtered - - @staticmethod - def filter_with_english_letters(buf): - """ - Returns a copy of ``buf`` that retains only the sequences of English - alphabet and high byte characters that are not between <> characters. - Also retains English alphabet and high byte characters immediately - before occurrences of >. - - This filter can be applied to all scripts which contain both English - characters and extended ASCII characters, but is currently only used by - ``Latin1Prober``. - """ - filtered = bytearray() - in_tag = False - prev = 0 - - for curr in range(len(buf)): - # Slice here to get bytes instead of an int with Python 3 - buf_char = buf[curr:curr + 1] - # Check if we're coming out of or entering an HTML tag - if buf_char == b'>': - in_tag = False - elif buf_char == b'<': - in_tag = True - - # If current character is not extended-ASCII and not alphabetic... - if buf_char < b'\x80' and not buf_char.isalpha(): - # ...and we're not in a tag - if curr > prev and not in_tag: - # Keep everything after last non-extended-ASCII, - # non-alphabetic character - filtered.extend(buf[prev:curr]) - # Output a space to delimit stretch we kept - filtered.extend(b' ') - prev = curr + 1 - - # If we're not in a tag... - if not in_tag: - # Keep everything after last non-extended-ASCII, non-alphabetic - # character - filtered.extend(buf[prev:]) - - return filtered diff --git a/pype/vendor/requests/packages/chardet/cli/__init__.py b/pype/vendor/requests/packages/chardet/cli/__init__.py deleted file mode 100644 index 8b13789179..0000000000 --- a/pype/vendor/requests/packages/chardet/cli/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/pype/vendor/requests/packages/chardet/cli/chardetect.py b/pype/vendor/requests/packages/chardet/cli/chardetect.py deleted file mode 100644 index f0a4cc5d79..0000000000 --- a/pype/vendor/requests/packages/chardet/cli/chardetect.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python -""" -Script which takes one or more file paths and reports on their detected -encodings - -Example:: - - % chardetect somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - -If no paths are provided, it takes its input from stdin. - -""" - -from __future__ import absolute_import, print_function, unicode_literals - -import argparse -import sys - -from chardet import __version__ -from chardet.compat import PY2 -from chardet.universaldetector import UniversalDetector - - -def description_of(lines, name='stdin'): - """ - Return a string describing the probable encoding of a file or - list of strings. - - :param lines: The lines to get the encoding of. - :type lines: Iterable of bytes - :param name: Name of file or collection of lines - :type name: str - """ - u = UniversalDetector() - for line in lines: - line = bytearray(line) - u.feed(line) - # shortcut out of the loop to save reading further - particularly useful if we read a BOM. - if u.done: - break - u.close() - result = u.result - if PY2: - name = name.decode(sys.getfilesystemencoding(), 'ignore') - if result['encoding']: - return '{0}: {1} with confidence {2}'.format(name, result['encoding'], - result['confidence']) - else: - return '{0}: no result'.format(name) - - -def main(argv=None): - """ - Handles command line arguments and gets things started. - - :param argv: List of arguments, as if specified on the command-line. - If None, ``sys.argv[1:]`` is used instead. - :type argv: list of str - """ - # Get command line arguments - parser = argparse.ArgumentParser( - description="Takes one or more file paths and reports their detected \ - encodings") - parser.add_argument('input', - help='File whose encoding we would like to determine. \ - (default: stdin)', - type=argparse.FileType('rb'), nargs='*', - default=[sys.stdin if PY2 else sys.stdin.buffer]) - parser.add_argument('--version', action='version', - version='%(prog)s {0}'.format(__version__)) - args = parser.parse_args(argv) - - for f in args.input: - if f.isatty(): - print("You are running chardetect interactively. Press " + - "CTRL-D twice at the start of a blank line to signal the " + - "end of your input. If you want help, run chardetect " + - "--help\n", file=sys.stderr) - print(description_of(f, f.name)) - - -if __name__ == '__main__': - main() diff --git a/pype/vendor/requests/packages/chardet/codingstatemachine.py b/pype/vendor/requests/packages/chardet/codingstatemachine.py deleted file mode 100644 index 68fba44f14..0000000000 --- a/pype/vendor/requests/packages/chardet/codingstatemachine.py +++ /dev/null @@ -1,88 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import logging - -from .enums import MachineState - - -class CodingStateMachine(object): - """ - A state machine to verify a byte sequence for a particular encoding. For - each byte the detector receives, it will feed that byte to every active - state machine available, one byte at a time. The state machine changes its - state based on its previous state and the byte it receives. There are 3 - states in a state machine that are of interest to an auto-detector: - - START state: This is the state to start with, or a legal byte sequence - (i.e. a valid code point) for character has been identified. - - ME state: This indicates that the state machine identified a byte sequence - that is specific to the charset it is designed for and that - there is no other possible encoding which can contain this byte - sequence. This will to lead to an immediate positive answer for - the detector. - - ERROR state: This indicates the state machine identified an illegal byte - sequence for that encoding. This will lead to an immediate - negative answer for this encoding. Detector will exclude this - encoding from consideration from here on. - """ - def __init__(self, sm): - self._model = sm - self._curr_byte_pos = 0 - self._curr_char_len = 0 - self._curr_state = None - self.logger = logging.getLogger(__name__) - self.reset() - - def reset(self): - self._curr_state = MachineState.START - - def next_state(self, c): - # for each byte we get its class - # if it is first byte, we also get byte length - byte_class = self._model['class_table'][c] - if self._curr_state == MachineState.START: - self._curr_byte_pos = 0 - self._curr_char_len = self._model['char_len_table'][byte_class] - # from byte's class and state_table, we get its next state - curr_state = (self._curr_state * self._model['class_factor'] - + byte_class) - self._curr_state = self._model['state_table'][curr_state] - self._curr_byte_pos += 1 - return self._curr_state - - def get_current_charlen(self): - return self._curr_char_len - - def get_coding_state_machine(self): - return self._model['name'] - - @property - def language(self): - return self._model['language'] diff --git a/pype/vendor/requests/packages/chardet/compat.py b/pype/vendor/requests/packages/chardet/compat.py deleted file mode 100644 index ddd74687c0..0000000000 --- a/pype/vendor/requests/packages/chardet/compat.py +++ /dev/null @@ -1,34 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# Contributor(s): -# Dan Blanchard -# Ian Cordasco -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import sys - - -if sys.version_info < (3, 0): - PY2 = True - PY3 = False - base_str = (str, unicode) - text_type = unicode -else: - PY2 = False - PY3 = True - base_str = (bytes, str) - text_type = str diff --git a/pype/vendor/requests/packages/chardet/constants.py b/pype/vendor/requests/packages/chardet/constants.py deleted file mode 100644 index e4d148b3c5..0000000000 --- a/pype/vendor/requests/packages/chardet/constants.py +++ /dev/null @@ -1,39 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -_debug = 0 - -eDetecting = 0 -eFoundIt = 1 -eNotMe = 2 - -eStart = 0 -eError = 1 -eItsMe = 2 - -SHORTCUT_THRESHOLD = 0.95 diff --git a/pype/vendor/requests/packages/chardet/cp949prober.py b/pype/vendor/requests/packages/chardet/cp949prober.py deleted file mode 100644 index efd793abca..0000000000 --- a/pype/vendor/requests/packages/chardet/cp949prober.py +++ /dev/null @@ -1,49 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .chardistribution import EUCKRDistributionAnalysis -from .codingstatemachine import CodingStateMachine -from .mbcharsetprober import MultiByteCharSetProber -from .mbcssm import CP949_SM_MODEL - - -class CP949Prober(MultiByteCharSetProber): - def __init__(self): - super(CP949Prober, self).__init__() - self.coding_sm = CodingStateMachine(CP949_SM_MODEL) - # NOTE: CP949 is a superset of EUC-KR, so the distribution should be - # not different. - self.distribution_analyzer = EUCKRDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "CP949" - - @property - def language(self): - return "Korean" diff --git a/pype/vendor/requests/packages/chardet/enums.py b/pype/vendor/requests/packages/chardet/enums.py deleted file mode 100644 index 0451207225..0000000000 --- a/pype/vendor/requests/packages/chardet/enums.py +++ /dev/null @@ -1,76 +0,0 @@ -""" -All of the Enums that are used throughout the chardet package. - -:author: Dan Blanchard (dan.blanchard@gmail.com) -""" - - -class InputState(object): - """ - This enum represents the different states a universal detector can be in. - """ - PURE_ASCII = 0 - ESC_ASCII = 1 - HIGH_BYTE = 2 - - -class LanguageFilter(object): - """ - This enum represents the different language filters we can apply to a - ``UniversalDetector``. - """ - CHINESE_SIMPLIFIED = 0x01 - CHINESE_TRADITIONAL = 0x02 - JAPANESE = 0x04 - KOREAN = 0x08 - NON_CJK = 0x10 - ALL = 0x1F - CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL - CJK = CHINESE | JAPANESE | KOREAN - - -class ProbingState(object): - """ - This enum represents the different states a prober can be in. - """ - DETECTING = 0 - FOUND_IT = 1 - NOT_ME = 2 - - -class MachineState(object): - """ - This enum represents the different states a state machine can be in. - """ - START = 0 - ERROR = 1 - ITS_ME = 2 - - -class SequenceLikelihood(object): - """ - This enum represents the likelihood of a character following the previous one. - """ - NEGATIVE = 0 - UNLIKELY = 1 - LIKELY = 2 - POSITIVE = 3 - - @classmethod - def get_num_categories(cls): - """:returns: The number of likelihood categories in the enum.""" - return 4 - - -class CharacterCategory(object): - """ - This enum represents the different categories language models for - ``SingleByteCharsetProber`` put characters into. - - Anything less than CONTROL is considered a letter. - """ - UNDEFINED = 255 - LINE_BREAK = 254 - SYMBOL = 253 - DIGIT = 252 - CONTROL = 251 diff --git a/pype/vendor/requests/packages/chardet/escprober.py b/pype/vendor/requests/packages/chardet/escprober.py deleted file mode 100644 index c70493f2b1..0000000000 --- a/pype/vendor/requests/packages/chardet/escprober.py +++ /dev/null @@ -1,101 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .codingstatemachine import CodingStateMachine -from .enums import LanguageFilter, ProbingState, MachineState -from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, - ISO2022KR_SM_MODEL) - - -class EscCharSetProber(CharSetProber): - """ - This CharSetProber uses a "code scheme" approach for detecting encodings, - whereby easily recognizable escape or shift sequences are relied on to - identify these encodings. - """ - - def __init__(self, lang_filter=None): - super(EscCharSetProber, self).__init__(lang_filter=lang_filter) - self.coding_sm = [] - if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: - self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) - self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) - if self.lang_filter & LanguageFilter.JAPANESE: - self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) - if self.lang_filter & LanguageFilter.KOREAN: - self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) - self.active_sm_count = None - self._detected_charset = None - self._detected_language = None - self._state = None - self.reset() - - def reset(self): - super(EscCharSetProber, self).reset() - for coding_sm in self.coding_sm: - if not coding_sm: - continue - coding_sm.active = True - coding_sm.reset() - self.active_sm_count = len(self.coding_sm) - self._detected_charset = None - self._detected_language = None - - @property - def charset_name(self): - return self._detected_charset - - @property - def language(self): - return self._detected_language - - def get_confidence(self): - if self._detected_charset: - return 0.99 - else: - return 0.00 - - def feed(self, byte_str): - for c in byte_str: - for coding_sm in self.coding_sm: - if not coding_sm or not coding_sm.active: - continue - coding_state = coding_sm.next_state(c) - if coding_state == MachineState.ERROR: - coding_sm.active = False - self.active_sm_count -= 1 - if self.active_sm_count <= 0: - self._state = ProbingState.NOT_ME - return self.state - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - self._detected_charset = coding_sm.get_coding_state_machine() - self._detected_language = coding_sm.language - return self.state - - return self.state diff --git a/pype/vendor/requests/packages/chardet/escsm.py b/pype/vendor/requests/packages/chardet/escsm.py deleted file mode 100644 index 0069523a04..0000000000 --- a/pype/vendor/requests/packages/chardet/escsm.py +++ /dev/null @@ -1,246 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import MachineState - -HZ_CLS = ( -1,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,0,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,4,0,5,2,0, # 78 - 7f -1,1,1,1,1,1,1,1, # 80 - 87 -1,1,1,1,1,1,1,1, # 88 - 8f -1,1,1,1,1,1,1,1, # 90 - 97 -1,1,1,1,1,1,1,1, # 98 - 9f -1,1,1,1,1,1,1,1, # a0 - a7 -1,1,1,1,1,1,1,1, # a8 - af -1,1,1,1,1,1,1,1, # b0 - b7 -1,1,1,1,1,1,1,1, # b8 - bf -1,1,1,1,1,1,1,1, # c0 - c7 -1,1,1,1,1,1,1,1, # c8 - cf -1,1,1,1,1,1,1,1, # d0 - d7 -1,1,1,1,1,1,1,1, # d8 - df -1,1,1,1,1,1,1,1, # e0 - e7 -1,1,1,1,1,1,1,1, # e8 - ef -1,1,1,1,1,1,1,1, # f0 - f7 -1,1,1,1,1,1,1,1, # f8 - ff -) - -HZ_ST = ( -MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 - 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f - 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 - 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f -) - -HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) - -HZ_SM_MODEL = {'class_table': HZ_CLS, - 'class_factor': 6, - 'state_table': HZ_ST, - 'char_len_table': HZ_CHAR_LEN_TABLE, - 'name': "HZ-GB-2312", - 'language': 'Chinese'} - -ISO2022CN_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,3,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,4,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022CN_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 -MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f -MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 - 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f -) - -ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, - 'class_factor': 9, - 'state_table': ISO2022CN_ST, - 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, - 'name': "ISO-2022-CN", - 'language': 'Chinese'} - -ISO2022JP_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,2,2, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,7,0,0,0, # 20 - 27 -3,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -6,0,4,0,8,0,0,0, # 40 - 47 -0,9,5,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022JP_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 -MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f -MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 -) - -ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, - 'class_factor': 10, - 'state_table': ISO2022JP_ST, - 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, - 'name': "ISO-2022-JP", - 'language': 'Japanese'} - -ISO2022KR_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,3,0,0,0, # 20 - 27 -0,4,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,5,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022KR_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 -) - -ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) - -ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, - 'class_factor': 6, - 'state_table': ISO2022KR_ST, - 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, - 'name': "ISO-2022-KR", - 'language': 'Korean'} - - diff --git a/pype/vendor/requests/packages/chardet/eucjpprober.py b/pype/vendor/requests/packages/chardet/eucjpprober.py deleted file mode 100644 index 20ce8f7d15..0000000000 --- a/pype/vendor/requests/packages/chardet/eucjpprober.py +++ /dev/null @@ -1,92 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import ProbingState, MachineState -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCJPDistributionAnalysis -from .jpcntx import EUCJPContextAnalysis -from .mbcssm import EUCJP_SM_MODEL - - -class EUCJPProber(MultiByteCharSetProber): - def __init__(self): - super(EUCJPProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) - self.distribution_analyzer = EUCJPDistributionAnalysis() - self.context_analyzer = EUCJPContextAnalysis() - self.reset() - - def reset(self): - super(EUCJPProber, self).reset() - self.context_analyzer.reset() - - @property - def charset_name(self): - return "EUC-JP" - - @property - def language(self): - return "Japanese" - - def feed(self, byte_str): - for i in range(len(byte_str)): - # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.context_analyzer.feed(self._last_char, char_len) - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.context_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - context_conf = self.context_analyzer.get_confidence() - distrib_conf = self.distribution_analyzer.get_confidence() - return max(context_conf, distrib_conf) diff --git a/pype/vendor/requests/packages/chardet/euckrfreq.py b/pype/vendor/requests/packages/chardet/euckrfreq.py deleted file mode 100644 index b68078cb96..0000000000 --- a/pype/vendor/requests/packages/chardet/euckrfreq.py +++ /dev/null @@ -1,195 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology - -# 128 --> 0.79 -# 256 --> 0.92 -# 512 --> 0.986 -# 1024 --> 0.99944 -# 2048 --> 0.99999 -# -# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 -# Random Distribution Ration = 512 / (2350-512) = 0.279. -# -# Typical Distribution Ratio - -EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 - -EUCKR_TABLE_SIZE = 2352 - -# Char to FreqOrder table , -EUCKR_CHAR_TO_FREQ_ORDER = ( - 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, -1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, -1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, - 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, - 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, - 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, -1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, - 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, - 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, -1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, -1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, -1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, -1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, -1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, - 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, -1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, -1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, -1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, -1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, - 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, -1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, - 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, - 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, -1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, - 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, -1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, - 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, - 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, -1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, -1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, -1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, -1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, - 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, -1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, - 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, - 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, -1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, -1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, -1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, -1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, -1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, -1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, - 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, - 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, - 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, -1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, - 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, -1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, - 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, - 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, -2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, - 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, - 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, -2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, -2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, -2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, - 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, - 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, -2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, - 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, -1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, -2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, -1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, -2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, -2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, -1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, - 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, -2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, -2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, - 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, - 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, -2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, -1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, -2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, -2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, -2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, -2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, -2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, -2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, -1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, -2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, -2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, -2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, -2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, -2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, -1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, -1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, -2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, -1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, -2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, -1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, - 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, -2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, - 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, -2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, - 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, -2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, -2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, - 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, -2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, -1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, - 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, -1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, -2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, -1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, -2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, - 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, -2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, -1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, -2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, -1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, -2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, -1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, - 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, -2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, -2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, - 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, - 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, -1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, -1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, - 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, -2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, -2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, - 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, - 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, - 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, -2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, - 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, - 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, -2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, -2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, - 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, -2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, -1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, - 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, -2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, -2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, -2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, - 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, - 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, - 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, -2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, -2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, -2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, -1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, -2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, - 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 -) - diff --git a/pype/vendor/requests/packages/chardet/euckrprober.py b/pype/vendor/requests/packages/chardet/euckrprober.py deleted file mode 100644 index 345a060d02..0000000000 --- a/pype/vendor/requests/packages/chardet/euckrprober.py +++ /dev/null @@ -1,47 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCKRDistributionAnalysis -from .mbcssm import EUCKR_SM_MODEL - - -class EUCKRProber(MultiByteCharSetProber): - def __init__(self): - super(EUCKRProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) - self.distribution_analyzer = EUCKRDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "EUC-KR" - - @property - def language(self): - return "Korean" diff --git a/pype/vendor/requests/packages/chardet/euctwfreq.py b/pype/vendor/requests/packages/chardet/euctwfreq.py deleted file mode 100644 index ed7a995a3a..0000000000 --- a/pype/vendor/requests/packages/chardet/euctwfreq.py +++ /dev/null @@ -1,387 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# EUCTW frequency table -# Converted from big5 work -# by Taiwan's Mandarin Promotion Council -# - -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -# Char to FreqOrder table , -EUCTW_TABLE_SIZE = 5376 - -EUCTW_CHAR_TO_FREQ_ORDER = ( - 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 -3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 -1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 - 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 -3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 -4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 -7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 - 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 - 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 - 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 -2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 -1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 -3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 - 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 -3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 -2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 - 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 -3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 -1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 -7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 - 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 -7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 -1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 - 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 - 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 -3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 -3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 - 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 -2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 -2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 - 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 - 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 -3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 -1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 -1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 -1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 -2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 - 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 -4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 -1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 -7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 -2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 - 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 - 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 - 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 - 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 -7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 - 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 -1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 - 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 - 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 -7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 -1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 - 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 -3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 -4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 -3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 - 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 - 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 -1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 -4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 -3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 -3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 -2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 -7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 -3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 -7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 -1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 -2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 -1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 - 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 -1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 -4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 -3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 - 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 - 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 - 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 -2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 -7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 -1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 -2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 -1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 -1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 -7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 -7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 -7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 -3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 -4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 -1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 -7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 -2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 -7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 -3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 -3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 -7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 -2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 -7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 - 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 -4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 -2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 -7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 -3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 -2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 -2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 - 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 -2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 -1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 -1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 -2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 -1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 -7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 -7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 -2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 -4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 -1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 -7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 - 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 -4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 - 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 -2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 - 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 -1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 -1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 - 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 -3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 -3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 -1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 -3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 -7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 -7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 -1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 -2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 -1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 -3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 -2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 -3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 -2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 -4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 -4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 -3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 - 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 -3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 - 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 -3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 -3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 -3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 -1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 -7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 - 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 -7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 -1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 - 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 -4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 -3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 - 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 -2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 -2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 -3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 -1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 -4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 -2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 -1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 -1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 -2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 -3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 -1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 -7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 -1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 -4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 -1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 - 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 -1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 -3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 -3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 -2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 -1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 -4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 - 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 -7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 -2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 -3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 -4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 - 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 -7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 -7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 -1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 -4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 -3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 -2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 -3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 -3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 -2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 -1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 -4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 -3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 -3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 -2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 -4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 -7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 -3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 -2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 -3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 -1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 -2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 -3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 -4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 -2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 -2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 -7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 -1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 -2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 -1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 -3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 -4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 -2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 -3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 -3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 -2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 -4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 -2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 -3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 -4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 -7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 -3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 - 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 -1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 -4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 -1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 -4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 -7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 - 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 -7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 -2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 -1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 -1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 -3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 - 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 - 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 - 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 -3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 -2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 - 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 -7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 -1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 -3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 -7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 -1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 -7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 -4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 -1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 -2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 -2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 -4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 - 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 - 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 -3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 -3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 -1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 -2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 -7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 -1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 -1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 -3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 - 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 -1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 -4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 -7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 -2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 -3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 - 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 -1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 -2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 -2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 -7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 -7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 -7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 -2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 -2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 -1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 -4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 -3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 -3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 -4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 -4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 -2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 -2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 -7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 -4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 -7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 -2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 -1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 -3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 -4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 -2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 - 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 -2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 -1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 -2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 -2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 -4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 -7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 -1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 -3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 -7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 -1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 -8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 -2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 -8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 -2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 -2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 -8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 -8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 -8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 - 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 -8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 -4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 -3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 -8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 -1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 -8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 - 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 -1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 - 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 -4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 -1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 -4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 -1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 - 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 -3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 -4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 -8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 - 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 -3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 - 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 -2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 -) - diff --git a/pype/vendor/requests/packages/chardet/euctwprober.py b/pype/vendor/requests/packages/chardet/euctwprober.py deleted file mode 100644 index 35669cc4dd..0000000000 --- a/pype/vendor/requests/packages/chardet/euctwprober.py +++ /dev/null @@ -1,46 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCTWDistributionAnalysis -from .mbcssm import EUCTW_SM_MODEL - -class EUCTWProber(MultiByteCharSetProber): - def __init__(self): - super(EUCTWProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) - self.distribution_analyzer = EUCTWDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "EUC-TW" - - @property - def language(self): - return "Taiwan" diff --git a/pype/vendor/requests/packages/chardet/gb2312freq.py b/pype/vendor/requests/packages/chardet/gb2312freq.py deleted file mode 100644 index 697837bd9a..0000000000 --- a/pype/vendor/requests/packages/chardet/gb2312freq.py +++ /dev/null @@ -1,283 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# GB2312 most frequently used character table -# -# Char to FreqOrder table , from hz6763 - -# 512 --> 0.79 -- 0.79 -# 1024 --> 0.92 -- 0.13 -# 2048 --> 0.98 -- 0.06 -# 6768 --> 1.00 -- 0.02 -# -# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 -# Random Distribution Ration = 512 / (3755 - 512) = 0.157 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR - -GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 - -GB2312_TABLE_SIZE = 3760 - -GB2312_CHAR_TO_FREQ_ORDER = ( -1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, -2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, -2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, - 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, -1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, -1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, - 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, -1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, -2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, -3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, - 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, -1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, - 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, -2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, - 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, -2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, -1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, -3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, - 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, -1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, - 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, -2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, -1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, -3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, -1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, -2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, -1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, - 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, -3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, -3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, - 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, -3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, - 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, -1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, -3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, -2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, -1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, - 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, -1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, -4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, - 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, -3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, -3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, - 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, -1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, -2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, -1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, -1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, - 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, -3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, -3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, -4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, - 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, -3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, -1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, -1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, -4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, - 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, - 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, -3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, -1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, - 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, -1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, -2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, - 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, - 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, - 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, -3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, -4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, -3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, - 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, -2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, -2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, -2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, - 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, -2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, - 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, - 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, - 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, -3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, -2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, -2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, -1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, - 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, -2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, - 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, - 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, -1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, -1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, - 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, - 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, -1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, -2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, -3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, -2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, -2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, -2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, -3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, -1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, -1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, -2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, -1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, -3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, -1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, -1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, -3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, - 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, -2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, -1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, -4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, -1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, -1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, -3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, -1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, - 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, - 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, -1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, - 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, -1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, -1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, - 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, -3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, -4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, -3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, -2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, -2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, -1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, -3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, -2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, -1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, -1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, - 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, -2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, -2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, -3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, -4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, -3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, - 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, -3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, -2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, -1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, - 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, - 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, -3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, -4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, -2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, -1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, -1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, - 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, -1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, -3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, - 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, - 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, -1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, - 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, -1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, - 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, -2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, - 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, -2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, -2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, -1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, -1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, -2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, - 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, -1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, -1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, -2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, -2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, -3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, -1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, -4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, - 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, - 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, -3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, -1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, - 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, -3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, -1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, -4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, -1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, -2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, -1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, - 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, -1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, -3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, - 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, -2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, - 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, -1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, -1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, -1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, -3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, -2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, -3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, -3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, -3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, - 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, -2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, - 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, -2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, - 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, -1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, - 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, - 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, -1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, -3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, -3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, -1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, -1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, -3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, -2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, -2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, -1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, -3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, - 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, -4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, -1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, -2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, -3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, -3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, -1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, - 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, - 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, -2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, - 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, -1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, - 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, -1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, -1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, -1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, -1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, -1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, - 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, - 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 -) - diff --git a/pype/vendor/requests/packages/chardet/gb2312prober.py b/pype/vendor/requests/packages/chardet/gb2312prober.py deleted file mode 100644 index 8446d2dd95..0000000000 --- a/pype/vendor/requests/packages/chardet/gb2312prober.py +++ /dev/null @@ -1,46 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import GB2312DistributionAnalysis -from .mbcssm import GB2312_SM_MODEL - -class GB2312Prober(MultiByteCharSetProber): - def __init__(self): - super(GB2312Prober, self).__init__() - self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) - self.distribution_analyzer = GB2312DistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "GB2312" - - @property - def language(self): - return "Chinese" diff --git a/pype/vendor/requests/packages/chardet/hebrewprober.py b/pype/vendor/requests/packages/chardet/hebrewprober.py deleted file mode 100644 index b0e1bf4926..0000000000 --- a/pype/vendor/requests/packages/chardet/hebrewprober.py +++ /dev/null @@ -1,292 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Shy Shalom -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState - -# This prober doesn't actually recognize a language or a charset. -# It is a helper prober for the use of the Hebrew model probers - -### General ideas of the Hebrew charset recognition ### -# -# Four main charsets exist in Hebrew: -# "ISO-8859-8" - Visual Hebrew -# "windows-1255" - Logical Hebrew -# "ISO-8859-8-I" - Logical Hebrew -# "x-mac-hebrew" - ?? Logical Hebrew ?? -# -# Both "ISO" charsets use a completely identical set of code points, whereas -# "windows-1255" and "x-mac-hebrew" are two different proper supersets of -# these code points. windows-1255 defines additional characters in the range -# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific -# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. -# x-mac-hebrew defines similar additional code points but with a different -# mapping. -# -# As far as an average Hebrew text with no diacritics is concerned, all four -# charsets are identical with respect to code points. Meaning that for the -# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters -# (including final letters). -# -# The dominant difference between these charsets is their directionality. -# "Visual" directionality means that the text is ordered as if the renderer is -# not aware of a BIDI rendering algorithm. The renderer sees the text and -# draws it from left to right. The text itself when ordered naturally is read -# backwards. A buffer of Visual Hebrew generally looks like so: -# "[last word of first line spelled backwards] [whole line ordered backwards -# and spelled backwards] [first word of first line spelled backwards] -# [end of line] [last word of second line] ... etc' " -# adding punctuation marks, numbers and English text to visual text is -# naturally also "visual" and from left to right. -# -# "Logical" directionality means the text is ordered "naturally" according to -# the order it is read. It is the responsibility of the renderer to display -# the text from right to left. A BIDI algorithm is used to place general -# punctuation marks, numbers and English text in the text. -# -# Texts in x-mac-hebrew are almost impossible to find on the Internet. From -# what little evidence I could find, it seems that its general directionality -# is Logical. -# -# To sum up all of the above, the Hebrew probing mechanism knows about two -# charsets: -# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are -# backwards while line order is natural. For charset recognition purposes -# the line order is unimportant (In fact, for this implementation, even -# word order is unimportant). -# Logical Hebrew - "windows-1255" - normal, naturally ordered text. -# -# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be -# specifically identified. -# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew -# that contain special punctuation marks or diacritics is displayed with -# some unconverted characters showing as question marks. This problem might -# be corrected using another model prober for x-mac-hebrew. Due to the fact -# that x-mac-hebrew texts are so rare, writing another model prober isn't -# worth the effort and performance hit. -# -#### The Prober #### -# -# The prober is divided between two SBCharSetProbers and a HebrewProber, -# all of which are managed, created, fed data, inquired and deleted by the -# SBCSGroupProber. The two SBCharSetProbers identify that the text is in -# fact some kind of Hebrew, Logical or Visual. The final decision about which -# one is it is made by the HebrewProber by combining final-letter scores -# with the scores of the two SBCharSetProbers to produce a final answer. -# -# The SBCSGroupProber is responsible for stripping the original text of HTML -# tags, English characters, numbers, low-ASCII punctuation characters, spaces -# and new lines. It reduces any sequence of such characters to a single space. -# The buffer fed to each prober in the SBCS group prober is pure text in -# high-ASCII. -# The two SBCharSetProbers (model probers) share the same language model: -# Win1255Model. -# The first SBCharSetProber uses the model normally as any other -# SBCharSetProber does, to recognize windows-1255, upon which this model was -# built. The second SBCharSetProber is told to make the pair-of-letter -# lookup in the language model backwards. This in practice exactly simulates -# a visual Hebrew model using the windows-1255 logical Hebrew model. -# -# The HebrewProber is not using any language model. All it does is look for -# final-letter evidence suggesting the text is either logical Hebrew or visual -# Hebrew. Disjointed from the model probers, the results of the HebrewProber -# alone are meaningless. HebrewProber always returns 0.00 as confidence -# since it never identifies a charset by itself. Instead, the pointer to the -# HebrewProber is passed to the model probers as a helper "Name Prober". -# When the Group prober receives a positive identification from any prober, -# it asks for the name of the charset identified. If the prober queried is a -# Hebrew model prober, the model prober forwards the call to the -# HebrewProber to make the final decision. In the HebrewProber, the -# decision is made according to the final-letters scores maintained and Both -# model probers scores. The answer is returned in the form of the name of the -# charset identified, either "windows-1255" or "ISO-8859-8". - -class HebrewProber(CharSetProber): - # windows-1255 / ISO-8859-8 code points of interest - FINAL_KAF = 0xea - NORMAL_KAF = 0xeb - FINAL_MEM = 0xed - NORMAL_MEM = 0xee - FINAL_NUN = 0xef - NORMAL_NUN = 0xf0 - FINAL_PE = 0xf3 - NORMAL_PE = 0xf4 - FINAL_TSADI = 0xf5 - NORMAL_TSADI = 0xf6 - - # Minimum Visual vs Logical final letter score difference. - # If the difference is below this, don't rely solely on the final letter score - # distance. - MIN_FINAL_CHAR_DISTANCE = 5 - - # Minimum Visual vs Logical model score difference. - # If the difference is below this, don't rely at all on the model score - # distance. - MIN_MODEL_DISTANCE = 0.01 - - VISUAL_HEBREW_NAME = "ISO-8859-8" - LOGICAL_HEBREW_NAME = "windows-1255" - - def __init__(self): - super(HebrewProber, self).__init__() - self._final_char_logical_score = None - self._final_char_visual_score = None - self._prev = None - self._before_prev = None - self._logical_prober = None - self._visual_prober = None - self.reset() - - def reset(self): - self._final_char_logical_score = 0 - self._final_char_visual_score = 0 - # The two last characters seen in the previous buffer, - # mPrev and mBeforePrev are initialized to space in order to simulate - # a word delimiter at the beginning of the data - self._prev = ' ' - self._before_prev = ' ' - # These probers are owned by the group prober. - - def set_model_probers(self, logicalProber, visualProber): - self._logical_prober = logicalProber - self._visual_prober = visualProber - - def is_final(self, c): - return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, - self.FINAL_PE, self.FINAL_TSADI] - - def is_non_final(self, c): - # The normal Tsadi is not a good Non-Final letter due to words like - # 'lechotet' (to chat) containing an apostrophe after the tsadi. This - # apostrophe is converted to a space in FilterWithoutEnglishLetters - # causing the Non-Final tsadi to appear at an end of a word even - # though this is not the case in the original text. - # The letters Pe and Kaf rarely display a related behavior of not being - # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' - # for example legally end with a Non-Final Pe or Kaf. However, the - # benefit of these letters as Non-Final letters outweighs the damage - # since these words are quite rare. - return c in [self.NORMAL_KAF, self.NORMAL_MEM, - self.NORMAL_NUN, self.NORMAL_PE] - - def feed(self, byte_str): - # Final letter analysis for logical-visual decision. - # Look for evidence that the received buffer is either logical Hebrew - # or visual Hebrew. - # The following cases are checked: - # 1) A word longer than 1 letter, ending with a final letter. This is - # an indication that the text is laid out "naturally" since the - # final letter really appears at the end. +1 for logical score. - # 2) A word longer than 1 letter, ending with a Non-Final letter. In - # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, - # should not end with the Non-Final form of that letter. Exceptions - # to this rule are mentioned above in isNonFinal(). This is an - # indication that the text is laid out backwards. +1 for visual - # score - # 3) A word longer than 1 letter, starting with a final letter. Final - # letters should not appear at the beginning of a word. This is an - # indication that the text is laid out backwards. +1 for visual - # score. - # - # The visual score and logical score are accumulated throughout the - # text and are finally checked against each other in GetCharSetName(). - # No checking for final letters in the middle of words is done since - # that case is not an indication for either Logical or Visual text. - # - # We automatically filter out all 7-bit characters (replace them with - # spaces) so the word boundary detection works properly. [MAP] - - if self.state == ProbingState.NOT_ME: - # Both model probers say it's not them. No reason to continue. - return ProbingState.NOT_ME - - byte_str = self.filter_high_byte_only(byte_str) - - for cur in byte_str: - if cur == ' ': - # We stand on a space - a word just ended - if self._before_prev != ' ': - # next-to-last char was not a space so self._prev is not a - # 1 letter word - if self.is_final(self._prev): - # case (1) [-2:not space][-1:final letter][cur:space] - self._final_char_logical_score += 1 - elif self.is_non_final(self._prev): - # case (2) [-2:not space][-1:Non-Final letter][ - # cur:space] - self._final_char_visual_score += 1 - else: - # Not standing on a space - if ((self._before_prev == ' ') and - (self.is_final(self._prev)) and (cur != ' ')): - # case (3) [-2:space][-1:final letter][cur:not space] - self._final_char_visual_score += 1 - self._before_prev = self._prev - self._prev = cur - - # Forever detecting, till the end or until both model probers return - # ProbingState.NOT_ME (handled above) - return ProbingState.DETECTING - - @property - def charset_name(self): - # Make the decision: is it Logical or Visual? - # If the final letter score distance is dominant enough, rely on it. - finalsub = self._final_char_logical_score - self._final_char_visual_score - if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: - return self.LOGICAL_HEBREW_NAME - if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: - return self.VISUAL_HEBREW_NAME - - # It's not dominant enough, try to rely on the model scores instead. - modelsub = (self._logical_prober.get_confidence() - - self._visual_prober.get_confidence()) - if modelsub > self.MIN_MODEL_DISTANCE: - return self.LOGICAL_HEBREW_NAME - if modelsub < -self.MIN_MODEL_DISTANCE: - return self.VISUAL_HEBREW_NAME - - # Still no good, back to final letter distance, maybe it'll save the - # day. - if finalsub < 0.0: - return self.VISUAL_HEBREW_NAME - - # (finalsub > 0 - Logical) or (don't know what to do) default to - # Logical. - return self.LOGICAL_HEBREW_NAME - - @property - def language(self): - return 'Hebrew' - - @property - def state(self): - # Remain active as long as any of the model probers are active. - if (self._logical_prober.state == ProbingState.NOT_ME) and \ - (self._visual_prober.state == ProbingState.NOT_ME): - return ProbingState.NOT_ME - return ProbingState.DETECTING diff --git a/pype/vendor/requests/packages/chardet/jisfreq.py b/pype/vendor/requests/packages/chardet/jisfreq.py deleted file mode 100644 index 83fc082b54..0000000000 --- a/pype/vendor/requests/packages/chardet/jisfreq.py +++ /dev/null @@ -1,325 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology -# -# Japanese frequency table, applied to both S-JIS and EUC-JP -# They are sorted in order. - -# 128 --> 0.77094 -# 256 --> 0.85710 -# 512 --> 0.92635 -# 1024 --> 0.97130 -# 2048 --> 0.99431 -# -# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 -# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 -# -# Typical Distribution Ratio, 25% of IDR - -JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 - -# Char to FreqOrder table , -JIS_TABLE_SIZE = 4368 - -JIS_CHAR_TO_FREQ_ORDER = ( - 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 -3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 -1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 -2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 -2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 -5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 -1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 -5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 -5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 -5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 -5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 -5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 -5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 -1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 -1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 -1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 -2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 -3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 -3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 - 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 - 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 -1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 - 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 -5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 - 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 - 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 - 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 - 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 - 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 -5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 -5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 -5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 -4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 -5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 -5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 -5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 -5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 -5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 -5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 -5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 -5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 -5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 -3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 -5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 -5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 -5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 -5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 -5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 -5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 -5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 -5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 -5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 -5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 -5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 -5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 -5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 -5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 -5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 -5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 -5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 -5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 -5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 -5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 -5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 -5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 -5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 -5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 -5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 -5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 -5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 -5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 -5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 -5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 -5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 -5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 -5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 -5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 -5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 -5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 -5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 -5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 -6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 -6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 -6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 -6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 -6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 -6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 -6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 -6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 -4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 - 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 - 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 -1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 -1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 - 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 -3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 -3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 - 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 -3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 -3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 - 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 -2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 - 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 -3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 -1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 - 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 -1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 - 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 -2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 -2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 -2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 -2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 -1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 -1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 -1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 -1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 -2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 -1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 -2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 -1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 -1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 -1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 -1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 -1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 -1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 - 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 - 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 -1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 -2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 -2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 -2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 -3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 -3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 - 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 -3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 -1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 - 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 -2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 -1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 - 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 -3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 -4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 -2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 -1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 -2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 -1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 - 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 - 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 -1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 -2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 -2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 -2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 -3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 -1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 -2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 - 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 - 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 - 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 -1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 -2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 - 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 -1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 -1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 - 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 -1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 -1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 -1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 - 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 -2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 - 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 -2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 -3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 -2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 -1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 -6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 -1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 -2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 -1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 - 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 - 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 -3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 -3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 -1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 -1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 -1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 -1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 - 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 - 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 -2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 - 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 -3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 -2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 - 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 -1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 -2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 - 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 -1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 - 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 -4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 -2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 -1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 - 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 -1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 -2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 - 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 -6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 -1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 -1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 -2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 -3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 - 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 -3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 -1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 - 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 -1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 - 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 -3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 - 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 -2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 - 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 -4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 -2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 -1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 -1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 -1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 - 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 -1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 -3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 -1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 -3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 - 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 - 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 - 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 -2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 -1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 - 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 -1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 - 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 -1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 - 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 - 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 - 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 -1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 -1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 -2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 -4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 - 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 -1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 - 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 -1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 -3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 -1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 -2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 -2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 -1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 -1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 -2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 - 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 -2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 -1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 -1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 -1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 -1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 -3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 -2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 -2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 - 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 -3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 -3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 -1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 -2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 -1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 -2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 -) - - diff --git a/pype/vendor/requests/packages/chardet/jpcntx.py b/pype/vendor/requests/packages/chardet/jpcntx.py deleted file mode 100644 index 20044e4bc8..0000000000 --- a/pype/vendor/requests/packages/chardet/jpcntx.py +++ /dev/null @@ -1,233 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - - -# This is hiragana 2-char sequence table, the number in each cell represents its frequency category -jp2CharContext = ( -(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), -(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), -(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), -(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), -(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), -(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), -(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), -(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), -(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), -(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), -(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), -(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), -(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), -(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), -(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), -(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), -(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), -(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), -(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), -(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), -(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), -(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), -(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), -(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), -(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), -(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), -(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), -(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), -(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), -(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), -(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), -(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), -(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), -(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), -(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), -(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), -(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), -(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), -(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), -(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), -(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), -(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), -(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), -(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), -(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), -(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), -(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), -(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), -(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), -(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), -(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), -(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), -(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), -(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), -(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), -(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), -(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), -(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), -(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), -(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), -(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), -(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), -(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), -(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), -(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), -(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), -(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), -(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), -(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), -(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), -(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), -(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), -(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), -(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), -) - -class JapaneseContextAnalysis(object): - NUM_OF_CATEGORY = 6 - DONT_KNOW = -1 - ENOUGH_REL_THRESHOLD = 100 - MAX_REL_THRESHOLD = 1000 - MINIMUM_DATA_THRESHOLD = 4 - - def __init__(self): - self._total_rel = None - self._rel_sample = None - self._need_to_skip_char_num = None - self._last_char_order = None - self._done = None - self.reset() - - def reset(self): - self._total_rel = 0 # total sequence received - # category counters, each integer counts sequence in its category - self._rel_sample = [0] * self.NUM_OF_CATEGORY - # if last byte in current buffer is not the last byte of a character, - # we need to know how many bytes to skip in next buffer - self._need_to_skip_char_num = 0 - self._last_char_order = -1 # The order of previous char - # If this flag is set to True, detection is done and conclusion has - # been made - self._done = False - - def feed(self, byte_str, num_bytes): - if self._done: - return - - # The buffer we got is byte oriented, and a character may span in more than one - # buffers. In case the last one or two byte in last buffer is not - # complete, we record how many byte needed to complete that character - # and skip these bytes here. We can choose to record those bytes as - # well and analyse the character once it is complete, but since a - # character will not make much difference, by simply skipping - # this character will simply our logic and improve performance. - i = self._need_to_skip_char_num - while i < num_bytes: - order, char_len = self.get_order(byte_str[i:i + 2]) - i += char_len - if i > num_bytes: - self._need_to_skip_char_num = i - num_bytes - self._last_char_order = -1 - else: - if (order != -1) and (self._last_char_order != -1): - self._total_rel += 1 - if self._total_rel > self.MAX_REL_THRESHOLD: - self._done = True - break - self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 - self._last_char_order = order - - def got_enough_data(self): - return self._total_rel > self.ENOUGH_REL_THRESHOLD - - def get_confidence(self): - # This is just one way to calculate confidence. It works well for me. - if self._total_rel > self.MINIMUM_DATA_THRESHOLD: - return (self._total_rel - self._rel_sample[0]) / self._total_rel - else: - return self.DONT_KNOW - - def get_order(self, byte_str): - return -1, 1 - -class SJISContextAnalysis(JapaneseContextAnalysis): - def __init__(self): - super(SJISContextAnalysis, self).__init__() - self._charset_name = "SHIFT_JIS" - - @property - def charset_name(self): - return self._charset_name - - def get_order(self, byte_str): - if not byte_str: - return -1, 1 - # find out current char's byte length - first_char = byte_str[0] - if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): - char_len = 2 - if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): - self._charset_name = "CP932" - else: - char_len = 1 - - # return its order if it is hiragana - if len(byte_str) > 1: - second_char = byte_str[1] - if (first_char == 202) and (0x9F <= second_char <= 0xF1): - return second_char - 0x9F, char_len - - return -1, char_len - -class EUCJPContextAnalysis(JapaneseContextAnalysis): - def get_order(self, byte_str): - if not byte_str: - return -1, 1 - # find out current char's byte length - first_char = byte_str[0] - if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): - char_len = 2 - elif first_char == 0x8F: - char_len = 3 - else: - char_len = 1 - - # return its order if it is hiragana - if len(byte_str) > 1: - second_char = byte_str[1] - if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): - return second_char - 0xA1, char_len - - return -1, char_len - - diff --git a/pype/vendor/requests/packages/chardet/langbulgarianmodel.py b/pype/vendor/requests/packages/chardet/langbulgarianmodel.py deleted file mode 100644 index 2aa4fb2e22..0000000000 --- a/pype/vendor/requests/packages/chardet/langbulgarianmodel.py +++ /dev/null @@ -1,228 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -# this table is modified base on win1251BulgarianCharToOrderMap, so -# only number <64 is sure valid - -Latin5_BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 -210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 - 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 - 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 -) - -win1251BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 -221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 - 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 - 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 96.9392% -# first 1024 sequences:3.0618% -# rest sequences: 0.2992% -# negative sequences: 0.0020% -BulgarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, -3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, -0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, -0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, -0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, -0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, -0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, -2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, -3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, -1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, -3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, -1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, -2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, -2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, -3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, -1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, -2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, -2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, -1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, -2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, -2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, -2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, -1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, -2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, -1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, -3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, -1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, -3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, -1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, -2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, -1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, -2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, -1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, -2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, -1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, -2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, -1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, -0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, -1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, -1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, -1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, -0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, -1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, -1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -) - -Latin5BulgarianModel = { - 'char_to_order_map': Latin5_BulgarianCharToOrderMap, - 'precedence_matrix': BulgarianLangModel, - 'typical_positive_ratio': 0.969392, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-5", - 'language': 'Bulgairan', -} - -Win1251BulgarianModel = { - 'char_to_order_map': win1251BulgarianCharToOrderMap, - 'precedence_matrix': BulgarianLangModel, - 'typical_positive_ratio': 0.969392, - 'keep_english_letter': False, - 'charset_name': "windows-1251", - 'language': 'Bulgarian', -} diff --git a/pype/vendor/requests/packages/chardet/langcyrillicmodel.py b/pype/vendor/requests/packages/chardet/langcyrillicmodel.py deleted file mode 100644 index e5f9a1fd19..0000000000 --- a/pype/vendor/requests/packages/chardet/langcyrillicmodel.py +++ /dev/null @@ -1,333 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# KOI8-R language model -# Character Mapping Table: -KOI8R_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 -223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 -238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 - 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 - 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 - 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 - 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 -) - -win1251_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -) - -latin5_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -macCyrillic_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, -) - -IBM855_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, -206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, - 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, -220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, -230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, - 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, - 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, -250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, -) - -IBM866_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 97.6601% -# first 1024 sequences: 2.3389% -# rest sequences: 0.1237% -# negative sequences: 0.0009% -RussianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, -1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, -1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, -2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, -1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, -3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, -1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, -2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, -1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, -1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, -1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, -1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, -3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, -1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, -2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, -1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, -2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, -1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, -1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, -1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, -3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, -3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, -1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, -1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, -0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, -1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, -1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, -0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, -1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, -2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, -1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, -1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, -2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, -1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, -1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, -1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, -0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, -0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, -0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, -2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, -0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -) - -Koi8rModel = { - 'char_to_order_map': KOI8R_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "KOI8-R", - 'language': 'Russian', -} - -Win1251CyrillicModel = { - 'char_to_order_map': win1251_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "windows-1251", - 'language': 'Russian', -} - -Latin5CyrillicModel = { - 'char_to_order_map': latin5_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-5", - 'language': 'Russian', -} - -MacCyrillicModel = { - 'char_to_order_map': macCyrillic_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "MacCyrillic", - 'language': 'Russian', -} - -Ibm866Model = { - 'char_to_order_map': IBM866_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "IBM866", - 'language': 'Russian', -} - -Ibm855Model = { - 'char_to_order_map': IBM855_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "IBM855", - 'language': 'Russian', -} diff --git a/pype/vendor/requests/packages/chardet/langgreekmodel.py b/pype/vendor/requests/packages/chardet/langgreekmodel.py deleted file mode 100644 index 533222166c..0000000000 --- a/pype/vendor/requests/packages/chardet/langgreekmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin7_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -win1253_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.2851% -# first 1024 sequences:1.7001% -# rest sequences: 0.0359% -# negative sequences: 0.0148% -GreekLangModel = ( -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, -2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, -2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, -2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, -0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, -3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, -2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, -0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, -0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, -0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, -0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, -0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, -0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, -0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, -0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, -0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, -0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, -0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, -0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, -0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, -0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, -0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, -0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, -0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, -0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -Latin7GreekModel = { - 'char_to_order_map': Latin7_char_to_order_map, - 'precedence_matrix': GreekLangModel, - 'typical_positive_ratio': 0.982851, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-7", - 'language': 'Greek', -} - -Win1253GreekModel = { - 'char_to_order_map': win1253_char_to_order_map, - 'precedence_matrix': GreekLangModel, - 'typical_positive_ratio': 0.982851, - 'keep_english_letter': False, - 'charset_name': "windows-1253", - 'language': 'Greek', -} diff --git a/pype/vendor/requests/packages/chardet/langhebrewmodel.py b/pype/vendor/requests/packages/chardet/langhebrewmodel.py deleted file mode 100644 index 58f4c875ec..0000000000 --- a/pype/vendor/requests/packages/chardet/langhebrewmodel.py +++ /dev/null @@ -1,200 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Simon Montagu -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Shoshannah Forbes - original C code (?) -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Windows-1255 language model -# Character Mapping Table: -WIN1255_CHAR_TO_ORDER_MAP = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 - 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 -253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 - 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 -124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, -215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, - 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, -106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, - 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, -238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, - 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, - 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.4004% -# first 1024 sequences: 1.5981% -# rest sequences: 0.087% -# negative sequences: 0.0015% -HEBREW_LANG_MODEL = ( -0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, -3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, -1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, -1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, -1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, -1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, -0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, -0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, -0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, -0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, -0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, -0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, -0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, -0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, -0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, -1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, -0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, -0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, -0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, -0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, -0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, -1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, -1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, -2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, -0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, -0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, -) - -Win1255HebrewModel = { - 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, - 'precedence_matrix': HEBREW_LANG_MODEL, - 'typical_positive_ratio': 0.984004, - 'keep_english_letter': False, - 'charset_name': "windows-1255", - 'language': 'Hebrew', -} diff --git a/pype/vendor/requests/packages/chardet/langhungarianmodel.py b/pype/vendor/requests/packages/chardet/langhungarianmodel.py deleted file mode 100644 index bb7c095e1e..0000000000 --- a/pype/vendor/requests/packages/chardet/langhungarianmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin2_HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, -175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, - 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, - 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, -245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -win1250HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, -177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, - 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, - 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, -245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 94.7368% -# first 1024 sequences:5.2623% -# rest sequences: 0.8894% -# negative sequences: 0.0009% -HungarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, -3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, -0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, -1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, -1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, -3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, -2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, -2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, -2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, -2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, -1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, -1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, -3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, -1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, -1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, -2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, -2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, -2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, -3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, -1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, -1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, -1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, -2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, -1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, -2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, -2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, -1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, -1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, -0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, -2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, -2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, -1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, -1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, -2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, -2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, -2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, -1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, -0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -) - -Latin2HungarianModel = { - 'char_to_order_map': Latin2_HungarianCharToOrderMap, - 'precedence_matrix': HungarianLangModel, - 'typical_positive_ratio': 0.947368, - 'keep_english_letter': True, - 'charset_name': "ISO-8859-2", - 'language': 'Hungarian', -} - -Win1250HungarianModel = { - 'char_to_order_map': win1250HungarianCharToOrderMap, - 'precedence_matrix': HungarianLangModel, - 'typical_positive_ratio': 0.947368, - 'keep_english_letter': True, - 'charset_name': "windows-1250", - 'language': 'Hungarian', -} diff --git a/pype/vendor/requests/packages/chardet/langthaimodel.py b/pype/vendor/requests/packages/chardet/langthaimodel.py deleted file mode 100644 index 15f94c2df0..0000000000 --- a/pype/vendor/requests/packages/chardet/langthaimodel.py +++ /dev/null @@ -1,199 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# The following result for thai was collected from a limited sample (1M). - -# Character Mapping Table: -TIS620CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 -188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 -253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 - 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 -209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, -223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, -236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, - 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, - 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, - 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, - 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, - 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 92.6386% -# first 1024 sequences:7.3177% -# rest sequences: 1.0230% -# negative sequences: 0.0436% -ThaiLangModel = ( -0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, -0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, -3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, -0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, -3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, -3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, -3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, -3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, -2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, -3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, -1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, -3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, -1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, -0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, -0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, -2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, -0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, -3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, -2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, -3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, -2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, -3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, -3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, -3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, -3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, -1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, -0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, -0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, -3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, -3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, -1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, -3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, -3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, -0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, -0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, -1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, -1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, -3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, -0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, -3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, -0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, -0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, -0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, -0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, -0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, -0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, -0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, -0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, -0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, -3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, -2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, -0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, -3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, -1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, -1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -TIS620ThaiModel = { - 'char_to_order_map': TIS620CharToOrderMap, - 'precedence_matrix': ThaiLangModel, - 'typical_positive_ratio': 0.926386, - 'keep_english_letter': False, - 'charset_name': "TIS-620", - 'language': 'Thai', -} diff --git a/pype/vendor/requests/packages/chardet/langturkishmodel.py b/pype/vendor/requests/packages/chardet/langturkishmodel.py deleted file mode 100644 index a427a45739..0000000000 --- a/pype/vendor/requests/packages/chardet/langturkishmodel.py +++ /dev/null @@ -1,193 +0,0 @@ -# -*- coding: utf-8 -*- -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Özgür Baskın - Turkish Language Model -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin5_TurkishCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, - 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, -255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, - 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, -180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, -164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, -150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, - 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, -124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, - 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, - 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, - 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, -) - -TurkishLangModel = ( -3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, -3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, -3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, -3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, -3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, -3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, -3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, -2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, -3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, -2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, -1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, -3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, -3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, -2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, -3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, -2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, -3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, -3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, -3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, -0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, -3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, -0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, -3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, -3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, -2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, -2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, -3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, -0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, -1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, -3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, -1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, -3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, -0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, -3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, -0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, -1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, -1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, -2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, -2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, -0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, -3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, -0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, -3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, -1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, -0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, -3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, -0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, -3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, -3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, -1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, -2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, -0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, -3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, -0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, -0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, -3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, -0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, -0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, -3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, -0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, -3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, -0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, -0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, -3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, -0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, -0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, -3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, -0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, -0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, -0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, -3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, -0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, -0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, -0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, -0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, -1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, -0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, -0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, -3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, -0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, -2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, -2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, -0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, -0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -Latin5TurkishModel = { - 'char_to_order_map': Latin5_TurkishCharToOrderMap, - 'precedence_matrix': TurkishLangModel, - 'typical_positive_ratio': 0.970290, - 'keep_english_letter': True, - 'charset_name': "ISO-8859-9", - 'language': 'Turkish', -} diff --git a/pype/vendor/requests/packages/chardet/latin1prober.py b/pype/vendor/requests/packages/chardet/latin1prober.py deleted file mode 100644 index 7d1e8c20fb..0000000000 --- a/pype/vendor/requests/packages/chardet/latin1prober.py +++ /dev/null @@ -1,145 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState - -FREQ_CAT_NUM = 4 - -UDF = 0 # undefined -OTH = 1 # other -ASC = 2 # ascii capital letter -ASS = 3 # ascii small letter -ACV = 4 # accent capital vowel -ACO = 5 # accent capital other -ASV = 6 # accent small vowel -ASO = 7 # accent small other -CLASS_NUM = 8 # total classes - -Latin1_CharToClass = ( - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F - OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 - ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F - OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 - ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F - OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 - OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F - UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 - OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF - ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 - ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF - ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 - ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF - ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 - ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF - ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 - ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF -) - -# 0 : illegal -# 1 : very unlikely -# 2 : normal -# 3 : very likely -Latin1ClassModel = ( -# UDF OTH ASC ASS ACV ACO ASV ASO - 0, 0, 0, 0, 0, 0, 0, 0, # UDF - 0, 3, 3, 3, 3, 3, 3, 3, # OTH - 0, 3, 3, 3, 3, 3, 3, 3, # ASC - 0, 3, 3, 3, 1, 1, 3, 3, # ASS - 0, 3, 3, 3, 1, 2, 1, 2, # ACV - 0, 3, 3, 3, 3, 3, 3, 3, # ACO - 0, 3, 1, 3, 1, 1, 1, 3, # ASV - 0, 3, 1, 3, 1, 1, 3, 3, # ASO -) - - -class Latin1Prober(CharSetProber): - def __init__(self): - super(Latin1Prober, self).__init__() - self._last_char_class = None - self._freq_counter = None - self.reset() - - def reset(self): - self._last_char_class = OTH - self._freq_counter = [0] * FREQ_CAT_NUM - CharSetProber.reset(self) - - @property - def charset_name(self): - return "ISO-8859-1" - - @property - def language(self): - return "" - - def feed(self, byte_str): - byte_str = self.filter_with_english_letters(byte_str) - for c in byte_str: - char_class = Latin1_CharToClass[c] - freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) - + char_class] - if freq == 0: - self._state = ProbingState.NOT_ME - break - self._freq_counter[freq] += 1 - self._last_char_class = char_class - - return self.state - - def get_confidence(self): - if self.state == ProbingState.NOT_ME: - return 0.01 - - total = sum(self._freq_counter) - if total < 0.01: - confidence = 0.0 - else: - confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) - / total) - if confidence < 0.0: - confidence = 0.0 - # lower the confidence of latin1 so that other more accurate - # detector can take priority. - confidence = confidence * 0.73 - return confidence diff --git a/pype/vendor/requests/packages/chardet/mbcharsetprober.py b/pype/vendor/requests/packages/chardet/mbcharsetprober.py deleted file mode 100644 index 6256ecfd1e..0000000000 --- a/pype/vendor/requests/packages/chardet/mbcharsetprober.py +++ /dev/null @@ -1,91 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState, MachineState - - -class MultiByteCharSetProber(CharSetProber): - """ - MultiByteCharSetProber - """ - - def __init__(self, lang_filter=None): - super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) - self.distribution_analyzer = None - self.coding_sm = None - self._last_char = [0, 0] - - def reset(self): - super(MultiByteCharSetProber, self).reset() - if self.coding_sm: - self.coding_sm.reset() - if self.distribution_analyzer: - self.distribution_analyzer.reset() - self._last_char = [0, 0] - - @property - def charset_name(self): - raise NotImplementedError - - @property - def language(self): - raise NotImplementedError - - def feed(self, byte_str): - for i in range(len(byte_str)): - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.distribution_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - return self.distribution_analyzer.get_confidence() diff --git a/pype/vendor/requests/packages/chardet/mbcsgroupprober.py b/pype/vendor/requests/packages/chardet/mbcsgroupprober.py deleted file mode 100644 index 530abe75e0..0000000000 --- a/pype/vendor/requests/packages/chardet/mbcsgroupprober.py +++ /dev/null @@ -1,54 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .utf8prober import UTF8Prober -from .sjisprober import SJISProber -from .eucjpprober import EUCJPProber -from .gb2312prober import GB2312Prober -from .euckrprober import EUCKRProber -from .cp949prober import CP949Prober -from .big5prober import Big5Prober -from .euctwprober import EUCTWProber - - -class MBCSGroupProber(CharSetGroupProber): - def __init__(self, lang_filter=None): - super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) - self.probers = [ - UTF8Prober(), - SJISProber(), - EUCJPProber(), - GB2312Prober(), - EUCKRProber(), - CP949Prober(), - Big5Prober(), - EUCTWProber() - ] - self.reset() diff --git a/pype/vendor/requests/packages/chardet/mbcssm.py b/pype/vendor/requests/packages/chardet/mbcssm.py deleted file mode 100644 index 8360d0f284..0000000000 --- a/pype/vendor/requests/packages/chardet/mbcssm.py +++ /dev/null @@ -1,572 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import MachineState - -# BIG5 - -BIG5_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 4,4,4,4,4,4,4,4, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 4,3,3,3,3,3,3,3, # a0 - a7 - 3,3,3,3,3,3,3,3, # a8 - af - 3,3,3,3,3,3,3,3, # b0 - b7 - 3,3,3,3,3,3,3,3, # b8 - bf - 3,3,3,3,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -BIG5_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 -) - -BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) - -BIG5_SM_MODEL = {'class_table': BIG5_CLS, - 'class_factor': 5, - 'state_table': BIG5_ST, - 'char_len_table': BIG5_CHAR_LEN_TABLE, - 'name': 'Big5'} - -# CP949 - -CP949_CLS = ( - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f - 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f - 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f - 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f - 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f - 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f - 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f - 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f - 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af - 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf - 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff -) - -CP949_ST = ( -#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = - MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 -) - -CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) - -CP949_SM_MODEL = {'class_table': CP949_CLS, - 'class_factor': 10, - 'state_table': CP949_ST, - 'char_len_table': CP949_CHAR_LEN_TABLE, - 'name': 'CP949'} - -# EUC-JP - -EUCJP_CLS = ( - 4,4,4,4,4,4,4,4, # 00 - 07 - 4,4,4,4,4,4,5,5, # 08 - 0f - 4,4,4,4,4,4,4,4, # 10 - 17 - 4,4,4,5,4,4,4,4, # 18 - 1f - 4,4,4,4,4,4,4,4, # 20 - 27 - 4,4,4,4,4,4,4,4, # 28 - 2f - 4,4,4,4,4,4,4,4, # 30 - 37 - 4,4,4,4,4,4,4,4, # 38 - 3f - 4,4,4,4,4,4,4,4, # 40 - 47 - 4,4,4,4,4,4,4,4, # 48 - 4f - 4,4,4,4,4,4,4,4, # 50 - 57 - 4,4,4,4,4,4,4,4, # 58 - 5f - 4,4,4,4,4,4,4,4, # 60 - 67 - 4,4,4,4,4,4,4,4, # 68 - 6f - 4,4,4,4,4,4,4,4, # 70 - 77 - 4,4,4,4,4,4,4,4, # 78 - 7f - 5,5,5,5,5,5,5,5, # 80 - 87 - 5,5,5,5,5,5,1,3, # 88 - 8f - 5,5,5,5,5,5,5,5, # 90 - 97 - 5,5,5,5,5,5,5,5, # 98 - 9f - 5,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,0,5 # f8 - ff -) - -EUCJP_ST = ( - 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f - 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 -) - -EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) - -EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, - 'class_factor': 6, - 'state_table': EUCJP_ST, - 'char_len_table': EUCJP_CHAR_LEN_TABLE, - 'name': 'EUC-JP'} - -# EUC-KR - -EUCKR_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,3,3,3, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,3,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 2,2,2,2,2,2,2,2, # e0 - e7 - 2,2,2,2,2,2,2,2, # e8 - ef - 2,2,2,2,2,2,2,2, # f0 - f7 - 2,2,2,2,2,2,2,0 # f8 - ff -) - -EUCKR_ST = ( - MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f -) - -EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) - -EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, - 'class_factor': 4, - 'state_table': EUCKR_ST, - 'char_len_table': EUCKR_CHAR_LEN_TABLE, - 'name': 'EUC-KR'} - -# EUC-TW - -EUCTW_CLS = ( - 2,2,2,2,2,2,2,2, # 00 - 07 - 2,2,2,2,2,2,0,0, # 08 - 0f - 2,2,2,2,2,2,2,2, # 10 - 17 - 2,2,2,0,2,2,2,2, # 18 - 1f - 2,2,2,2,2,2,2,2, # 20 - 27 - 2,2,2,2,2,2,2,2, # 28 - 2f - 2,2,2,2,2,2,2,2, # 30 - 37 - 2,2,2,2,2,2,2,2, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,2, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,6,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,3,4,4,4,4,4,4, # a0 - a7 - 5,5,1,1,1,1,1,1, # a8 - af - 1,1,1,1,1,1,1,1, # b0 - b7 - 1,1,1,1,1,1,1,1, # b8 - bf - 1,1,3,1,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -EUCTW_ST = ( - MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 - MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 - MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f -) - -EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) - -EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, - 'class_factor': 7, - 'state_table': EUCTW_ST, - 'char_len_table': EUCTW_CHAR_LEN_TABLE, - 'name': 'x-euc-tw'} - -# GB2312 - -GB2312_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 3,3,3,3,3,3,3,3, # 30 - 37 - 3,3,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,4, # 78 - 7f - 5,6,6,6,6,6,6,6, # 80 - 87 - 6,6,6,6,6,6,6,6, # 88 - 8f - 6,6,6,6,6,6,6,6, # 90 - 97 - 6,6,6,6,6,6,6,6, # 98 - 9f - 6,6,6,6,6,6,6,6, # a0 - a7 - 6,6,6,6,6,6,6,6, # a8 - af - 6,6,6,6,6,6,6,6, # b0 - b7 - 6,6,6,6,6,6,6,6, # b8 - bf - 6,6,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 6,6,6,6,6,6,6,6, # e0 - e7 - 6,6,6,6,6,6,6,6, # e8 - ef - 6,6,6,6,6,6,6,6, # f0 - f7 - 6,6,6,6,6,6,6,0 # f8 - ff -) - -GB2312_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 - 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f -) - -# To be accurate, the length of class 6 can be either 2 or 4. -# But it is not necessary to discriminate between the two since -# it is used for frequency analysis only, and we are validating -# each code range there as well. So it is safe to set it to be -# 2 here. -GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) - -GB2312_SM_MODEL = {'class_table': GB2312_CLS, - 'class_factor': 7, - 'state_table': GB2312_ST, - 'char_len_table': GB2312_CHAR_LEN_TABLE, - 'name': 'GB2312'} - -# Shift_JIS - -SJIS_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 3,3,3,3,3,2,2,3, # 80 - 87 - 3,3,3,3,3,3,3,3, # 88 - 8f - 3,3,3,3,3,3,3,3, # 90 - 97 - 3,3,3,3,3,3,3,3, # 98 - 9f - #0xa0 is illegal in sjis encoding, but some pages does - #contain such byte. We need to be more error forgiven. - 2,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,4,4,4, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,0,0,0) # f8 - ff - - -SJIS_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 -) - -SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) - -SJIS_SM_MODEL = {'class_table': SJIS_CLS, - 'class_factor': 6, - 'state_table': SJIS_ST, - 'char_len_table': SJIS_CHAR_LEN_TABLE, - 'name': 'Shift_JIS'} - -# UCS2-BE - -UCS2BE_CLS = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2BE_ST = ( - 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 - 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f - 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 - 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f - 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 -) - -UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) - -UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, - 'class_factor': 6, - 'state_table': UCS2BE_ST, - 'char_len_table': UCS2BE_CHAR_LEN_TABLE, - 'name': 'UTF-16BE'} - -# UCS2-LE - -UCS2LE_CLS = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2LE_ST = ( - 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 - 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f - 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 - 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f - 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 -) - -UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) - -UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, - 'class_factor': 6, - 'state_table': UCS2LE_ST, - 'char_len_table': UCS2LE_CHAR_LEN_TABLE, - 'name': 'UTF-16LE'} - -# UTF-8 - -UTF8_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 2,2,2,2,3,3,3,3, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 5,5,5,5,5,5,5,5, # a0 - a7 - 5,5,5,5,5,5,5,5, # a8 - af - 5,5,5,5,5,5,5,5, # b0 - b7 - 5,5,5,5,5,5,5,5, # b8 - bf - 0,0,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 7,8,8,8,8,8,8,8, # e0 - e7 - 8,8,8,8,8,9,8,8, # e8 - ef - 10,11,11,11,11,11,11,11, # f0 - f7 - 12,13,13,13,14,15,0,0 # f8 - ff -) - -UTF8_ST = ( - MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 - 9, 11, 8, 7, 6, 5, 4, 3,#08-0f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f - MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f - MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f - MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f - MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af - MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf -) - -UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) - -UTF8_SM_MODEL = {'class_table': UTF8_CLS, - 'class_factor': 16, - 'state_table': UTF8_ST, - 'char_len_table': UTF8_CHAR_LEN_TABLE, - 'name': 'UTF-8'} diff --git a/pype/vendor/requests/packages/chardet/sbcharsetprober.py b/pype/vendor/requests/packages/chardet/sbcharsetprober.py deleted file mode 100644 index 0adb51de5a..0000000000 --- a/pype/vendor/requests/packages/chardet/sbcharsetprober.py +++ /dev/null @@ -1,132 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import CharacterCategory, ProbingState, SequenceLikelihood - - -class SingleByteCharSetProber(CharSetProber): - SAMPLE_SIZE = 64 - SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 - POSITIVE_SHORTCUT_THRESHOLD = 0.95 - NEGATIVE_SHORTCUT_THRESHOLD = 0.05 - - def __init__(self, model, reversed=False, name_prober=None): - super(SingleByteCharSetProber, self).__init__() - self._model = model - # TRUE if we need to reverse every pair in the model lookup - self._reversed = reversed - # Optional auxiliary prober for name decision - self._name_prober = name_prober - self._last_order = None - self._seq_counters = None - self._total_seqs = None - self._total_char = None - self._freq_char = None - self.reset() - - def reset(self): - super(SingleByteCharSetProber, self).reset() - # char order of last character - self._last_order = 255 - self._seq_counters = [0] * SequenceLikelihood.get_num_categories() - self._total_seqs = 0 - self._total_char = 0 - # characters that fall in our sampling range - self._freq_char = 0 - - @property - def charset_name(self): - if self._name_prober: - return self._name_prober.charset_name - else: - return self._model['charset_name'] - - @property - def language(self): - if self._name_prober: - return self._name_prober.language - else: - return self._model.get('language') - - def feed(self, byte_str): - if not self._model['keep_english_letter']: - byte_str = self.filter_international_words(byte_str) - if not byte_str: - return self.state - char_to_order_map = self._model['char_to_order_map'] - for i, c in enumerate(byte_str): - # XXX: Order is in range 1-64, so one would think we want 0-63 here, - # but that leads to 27 more test failures than before. - order = char_to_order_map[c] - # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but - # CharacterCategory.SYMBOL is actually 253, so we use CONTROL - # to make it closer to the original intent. The only difference - # is whether or not we count digits and control characters for - # _total_char purposes. - if order < CharacterCategory.CONTROL: - self._total_char += 1 - if order < self.SAMPLE_SIZE: - self._freq_char += 1 - if self._last_order < self.SAMPLE_SIZE: - self._total_seqs += 1 - if not self._reversed: - i = (self._last_order * self.SAMPLE_SIZE) + order - model = self._model['precedence_matrix'][i] - else: # reverse the order of the letters in the lookup - i = (order * self.SAMPLE_SIZE) + self._last_order - model = self._model['precedence_matrix'][i] - self._seq_counters[model] += 1 - self._last_order = order - - charset_name = self._model['charset_name'] - if self.state == ProbingState.DETECTING: - if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: - confidence = self.get_confidence() - if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, we have a winner', - charset_name, confidence) - self._state = ProbingState.FOUND_IT - elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, below negative ' - 'shortcut threshhold %s', charset_name, - confidence, - self.NEGATIVE_SHORTCUT_THRESHOLD) - self._state = ProbingState.NOT_ME - - return self.state - - def get_confidence(self): - r = 0.01 - if self._total_seqs > 0: - r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / - self._total_seqs / self._model['typical_positive_ratio']) - r = r * self._freq_char / self._total_char - if r >= 1.0: - r = 0.99 - return r diff --git a/pype/vendor/requests/packages/chardet/sbcsgroupprober.py b/pype/vendor/requests/packages/chardet/sbcsgroupprober.py deleted file mode 100644 index 98e95dc1a3..0000000000 --- a/pype/vendor/requests/packages/chardet/sbcsgroupprober.py +++ /dev/null @@ -1,73 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .sbcharsetprober import SingleByteCharSetProber -from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, - Latin5CyrillicModel, MacCyrillicModel, - Ibm866Model, Ibm855Model) -from .langgreekmodel import Latin7GreekModel, Win1253GreekModel -from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel -# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel -from .langthaimodel import TIS620ThaiModel -from .langhebrewmodel import Win1255HebrewModel -from .hebrewprober import HebrewProber -from .langturkishmodel import Latin5TurkishModel - - -class SBCSGroupProber(CharSetGroupProber): - def __init__(self): - super(SBCSGroupProber, self).__init__() - self.probers = [ - SingleByteCharSetProber(Win1251CyrillicModel), - SingleByteCharSetProber(Koi8rModel), - SingleByteCharSetProber(Latin5CyrillicModel), - SingleByteCharSetProber(MacCyrillicModel), - SingleByteCharSetProber(Ibm866Model), - SingleByteCharSetProber(Ibm855Model), - SingleByteCharSetProber(Latin7GreekModel), - SingleByteCharSetProber(Win1253GreekModel), - SingleByteCharSetProber(Latin5BulgarianModel), - SingleByteCharSetProber(Win1251BulgarianModel), - # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) - # after we retrain model. - # SingleByteCharSetProber(Latin2HungarianModel), - # SingleByteCharSetProber(Win1250HungarianModel), - SingleByteCharSetProber(TIS620ThaiModel), - SingleByteCharSetProber(Latin5TurkishModel), - ] - hebrew_prober = HebrewProber() - logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, - False, hebrew_prober) - visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, - hebrew_prober) - hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) - self.probers.extend([hebrew_prober, logical_hebrew_prober, - visual_hebrew_prober]) - - self.reset() diff --git a/pype/vendor/requests/packages/chardet/sjisprober.py b/pype/vendor/requests/packages/chardet/sjisprober.py deleted file mode 100644 index 9e29623bdc..0000000000 --- a/pype/vendor/requests/packages/chardet/sjisprober.py +++ /dev/null @@ -1,92 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import SJISDistributionAnalysis -from .jpcntx import SJISContextAnalysis -from .mbcssm import SJIS_SM_MODEL -from .enums import ProbingState, MachineState - - -class SJISProber(MultiByteCharSetProber): - def __init__(self): - super(SJISProber, self).__init__() - self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) - self.distribution_analyzer = SJISDistributionAnalysis() - self.context_analyzer = SJISContextAnalysis() - self.reset() - - def reset(self): - super(SJISProber, self).reset() - self.context_analyzer.reset() - - @property - def charset_name(self): - return self.context_analyzer.charset_name - - @property - def language(self): - return "Japanese" - - def feed(self, byte_str): - for i in range(len(byte_str)): - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.context_analyzer.feed(self._last_char[2 - char_len:], - char_len) - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 - - char_len], char_len) - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - context_conf = self.context_analyzer.get_confidence() - distrib_conf = self.distribution_analyzer.get_confidence() - return max(context_conf, distrib_conf) diff --git a/pype/vendor/requests/packages/chardet/universaldetector.py b/pype/vendor/requests/packages/chardet/universaldetector.py deleted file mode 100644 index b3c057e848..0000000000 --- a/pype/vendor/requests/packages/chardet/universaldetector.py +++ /dev/null @@ -1,278 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### -""" -Module containing the UniversalDetector detector class, which is the primary -class a user of ``chardet`` should use. - -:author: Mark Pilgrim (initial port to Python) -:author: Shy Shalom (original C code) -:author: Dan Blanchard (major refactoring for 3.0) -:author: Ian Cordasco -""" - - -import codecs -import logging -import re - -from .enums import InputState, LanguageFilter, ProbingState -from .escprober import EscCharSetProber -from .latin1prober import Latin1Prober -from .mbcsgroupprober import MBCSGroupProber -from .sbcsgroupprober import SBCSGroupProber - - -class UniversalDetector(object): - """ - The ``UniversalDetector`` class underlies the ``chardet.detect`` function - and coordinates all of the different charset probers. - - To get a ``dict`` containing an encoding and its confidence, you can simply - run: - - .. code:: - - u = UniversalDetector() - u.feed(some_bytes) - u.close() - detected = u.result - - """ - - MINIMUM_THRESHOLD = 0.20 - HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') - ESC_DETECTOR = re.compile(b'(\033|~{)') - WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') - ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', - 'iso-8859-2': 'Windows-1250', - 'iso-8859-5': 'Windows-1251', - 'iso-8859-6': 'Windows-1256', - 'iso-8859-7': 'Windows-1253', - 'iso-8859-8': 'Windows-1255', - 'iso-8859-9': 'Windows-1254', - 'iso-8859-13': 'Windows-1257'} - - def __init__(self, lang_filter=LanguageFilter.ALL): - self._esc_charset_prober = None - self._charset_probers = [] - self.result = None - self.done = None - self._got_data = None - self._input_state = None - self._last_char = None - self.lang_filter = lang_filter - self.logger = logging.getLogger(__name__) - self._has_win_bytes = None - self.reset() - - def reset(self): - """ - Reset the UniversalDetector and all of its probers back to their - initial states. This is called by ``__init__``, so you only need to - call this directly in between analyses of different documents. - """ - self.result = {'encoding': None, 'confidence': 0.0, 'language': None} - self.done = False - self._got_data = False - self._has_win_bytes = False - self._input_state = InputState.PURE_ASCII - self._last_char = b'' - if self._esc_charset_prober: - self._esc_charset_prober.reset() - for prober in self._charset_probers: - prober.reset() - - def feed(self, byte_str): - """ - Takes a chunk of a document and feeds it through all of the relevant - charset probers. - - After calling ``feed``, you can check the value of the ``done`` - attribute to see if you need to continue feeding the - ``UniversalDetector`` more data, or if it has made a prediction - (in the ``result`` attribute). - - .. note:: - You should always call ``close`` when you're done feeding in your - document if ``done`` is not already ``True``. - """ - if self.done: - return - - if not len(byte_str): - return - - if not isinstance(byte_str, bytearray): - byte_str = bytearray(byte_str) - - # First check for known BOMs, since these are guaranteed to be correct - if not self._got_data: - # If the data starts with BOM, we know it is UTF - if byte_str.startswith(codecs.BOM_UTF8): - # EF BB BF UTF-8 with BOM - self.result = {'encoding': "UTF-8-SIG", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith((codecs.BOM_UTF32_LE, - codecs.BOM_UTF32_BE)): - # FF FE 00 00 UTF-32, little-endian BOM - # 00 00 FE FF UTF-32, big-endian BOM - self.result = {'encoding': "UTF-32", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith(b'\xFE\xFF\x00\x00'): - # FE FF 00 00 UCS-4, unusual octet order BOM (3412) - self.result = {'encoding': "X-ISO-10646-UCS-4-3412", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith(b'\x00\x00\xFF\xFE'): - # 00 00 FF FE UCS-4, unusual octet order BOM (2143) - self.result = {'encoding': "X-ISO-10646-UCS-4-2143", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): - # FF FE UTF-16, little endian BOM - # FE FF UTF-16, big endian BOM - self.result = {'encoding': "UTF-16", - 'confidence': 1.0, - 'language': ''} - - self._got_data = True - if self.result['encoding'] is not None: - self.done = True - return - - # If none of those matched and we've only see ASCII so far, check - # for high bytes and escape sequences - if self._input_state == InputState.PURE_ASCII: - if self.HIGH_BYTE_DETECTOR.search(byte_str): - self._input_state = InputState.HIGH_BYTE - elif self._input_state == InputState.PURE_ASCII and \ - self.ESC_DETECTOR.search(self._last_char + byte_str): - self._input_state = InputState.ESC_ASCII - - self._last_char = byte_str[-1:] - - # If we've seen escape sequences, use the EscCharSetProber, which - # uses a simple state machine to check for known escape sequences in - # HZ and ISO-2022 encodings, since those are the only encodings that - # use such sequences. - if self._input_state == InputState.ESC_ASCII: - if not self._esc_charset_prober: - self._esc_charset_prober = EscCharSetProber(self.lang_filter) - if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: - self.result = {'encoding': - self._esc_charset_prober.charset_name, - 'confidence': - self._esc_charset_prober.get_confidence(), - 'language': - self._esc_charset_prober.language} - self.done = True - # If we've seen high bytes (i.e., those with values greater than 127), - # we need to do more complicated checks using all our multi-byte and - # single-byte probers that are left. The single-byte probers - # use character bigram distributions to determine the encoding, whereas - # the multi-byte probers use a combination of character unigram and - # bigram distributions. - elif self._input_state == InputState.HIGH_BYTE: - if not self._charset_probers: - self._charset_probers = [MBCSGroupProber(self.lang_filter)] - # If we're checking non-CJK encodings, use single-byte prober - if self.lang_filter & LanguageFilter.NON_CJK: - self._charset_probers.append(SBCSGroupProber()) - self._charset_probers.append(Latin1Prober()) - for prober in self._charset_probers: - if prober.feed(byte_str) == ProbingState.FOUND_IT: - self.result = {'encoding': prober.charset_name, - 'confidence': prober.get_confidence(), - 'language': prober.language} - self.done = True - break - if self.WIN_BYTE_DETECTOR.search(byte_str): - self._has_win_bytes = True - - def close(self): - """ - Stop analyzing the current document and come up with a final - prediction. - - :returns: The ``result`` attribute, a ``dict`` with the keys - `encoding`, `confidence`, and `language`. - """ - # Don't bother with checks if we're already done - if self.done: - return self.result - self.done = True - - if not self._got_data: - self.logger.debug('no data received!') - - # Default to ASCII if it is all we've seen so far - elif self._input_state == InputState.PURE_ASCII: - self.result = {'encoding': 'ascii', - 'confidence': 1.0, - 'language': ''} - - # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD - elif self._input_state == InputState.HIGH_BYTE: - prober_confidence = None - max_prober_confidence = 0.0 - max_prober = None - for prober in self._charset_probers: - if not prober: - continue - prober_confidence = prober.get_confidence() - if prober_confidence > max_prober_confidence: - max_prober_confidence = prober_confidence - max_prober = prober - if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): - charset_name = max_prober.charset_name - lower_charset_name = max_prober.charset_name.lower() - confidence = max_prober.get_confidence() - # Use Windows encoding name instead of ISO-8859 if we saw any - # extra Windows-specific bytes - if lower_charset_name.startswith('iso-8859'): - if self._has_win_bytes: - charset_name = self.ISO_WIN_MAP.get(lower_charset_name, - charset_name) - self.result = {'encoding': charset_name, - 'confidence': confidence, - 'language': max_prober.language} - - # Log all prober confidences if none met MINIMUM_THRESHOLD - if self.logger.getEffectiveLevel() == logging.DEBUG: - if self.result['encoding'] is None: - self.logger.debug('no probers hit minimum threshold') - for prober in self._charset_probers[0].probers: - if not prober: - continue - self.logger.debug('%s %s confidence = %s', - prober.charset_name, - prober.language, - prober.get_confidence()) - return self.result diff --git a/pype/vendor/requests/packages/chardet/utf8prober.py b/pype/vendor/requests/packages/chardet/utf8prober.py deleted file mode 100644 index 6c3196cc2d..0000000000 --- a/pype/vendor/requests/packages/chardet/utf8prober.py +++ /dev/null @@ -1,82 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState, MachineState -from .codingstatemachine import CodingStateMachine -from .mbcssm import UTF8_SM_MODEL - - - -class UTF8Prober(CharSetProber): - ONE_CHAR_PROB = 0.5 - - def __init__(self): - super(UTF8Prober, self).__init__() - self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) - self._num_mb_chars = None - self.reset() - - def reset(self): - super(UTF8Prober, self).reset() - self.coding_sm.reset() - self._num_mb_chars = 0 - - @property - def charset_name(self): - return "utf-8" - - @property - def language(self): - return "" - - def feed(self, byte_str): - for c in byte_str: - coding_state = self.coding_sm.next_state(c) - if coding_state == MachineState.ERROR: - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - if self.coding_sm.get_current_charlen() >= 2: - self._num_mb_chars += 1 - - if self.state == ProbingState.DETECTING: - if self.get_confidence() > self.SHORTCUT_THRESHOLD: - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - unlike = 0.99 - if self._num_mb_chars < 6: - unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars - return 1.0 - unlike - else: - return unlike diff --git a/pype/vendor/requests/packages/chardet/version.py b/pype/vendor/requests/packages/chardet/version.py deleted file mode 100644 index f29337ca4a..0000000000 --- a/pype/vendor/requests/packages/chardet/version.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -This module exists only to simplify retrieving the version number of chardet -from within setup.py and from chardet subpackages. - -:author: Dan Blanchard (dan.blanchard@gmail.com) -""" - -__version__ = "3.0.2" -VERSION = __version__.split('.') diff --git a/pype/vendor/requests/packages/idna/__init__.py b/pype/vendor/requests/packages/idna/__init__.py deleted file mode 100644 index bb67a43fa4..0000000000 --- a/pype/vendor/requests/packages/idna/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .core import * diff --git a/pype/vendor/requests/packages/idna/codec.py b/pype/vendor/requests/packages/idna/codec.py deleted file mode 100644 index 98c65ead14..0000000000 --- a/pype/vendor/requests/packages/idna/codec.py +++ /dev/null @@ -1,118 +0,0 @@ -from .core import encode, decode, alabel, ulabel, IDNAError -import codecs -import re - -_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') - -class Codec(codecs.Codec): - - def encode(self, data, errors='strict'): - - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return "", 0 - - return encode(data), len(data) - - def decode(self, data, errors='strict'): - - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return u"", 0 - - return decode(data), len(data) - -class IncrementalEncoder(codecs.BufferedIncrementalEncoder): - def _buffer_encode(self, data, errors, final): - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return ("", 0) - - labels = _unicode_dots_re.split(data) - trailing_dot = u'' - if labels: - if not labels[-1]: - trailing_dot = '.' - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = '.' - - result = [] - size = 0 - for label in labels: - result.append(alabel(label)) - if size: - size += 1 - size += len(label) - - # Join with U+002E - result = ".".join(result) + trailing_dot - size += len(trailing_dot) - return (result, size) - -class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, data, errors, final): - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return (u"", 0) - - # IDNA allows decoding to operate on Unicode strings, too. - if isinstance(data, unicode): - labels = _unicode_dots_re.split(data) - else: - # Must be ASCII string - data = str(data) - unicode(data, "ascii") - labels = data.split(".") - - trailing_dot = u'' - if labels: - if not labels[-1]: - trailing_dot = u'.' - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = u'.' - - result = [] - size = 0 - for label in labels: - result.append(ulabel(label)) - if size: - size += 1 - size += len(label) - - result = u".".join(result) + trailing_dot - size += len(trailing_dot) - return (result, size) - - -class StreamWriter(Codec, codecs.StreamWriter): - pass - -class StreamReader(Codec, codecs.StreamReader): - pass - -def getregentry(): - return codecs.CodecInfo( - name='idna', - encode=Codec().encode, - decode=Codec().decode, - incrementalencoder=IncrementalEncoder, - incrementaldecoder=IncrementalDecoder, - streamwriter=StreamWriter, - streamreader=StreamReader, - ) diff --git a/pype/vendor/requests/packages/idna/compat.py b/pype/vendor/requests/packages/idna/compat.py deleted file mode 100644 index 4d47f336db..0000000000 --- a/pype/vendor/requests/packages/idna/compat.py +++ /dev/null @@ -1,12 +0,0 @@ -from .core import * -from .codec import * - -def ToASCII(label): - return encode(label) - -def ToUnicode(label): - return decode(label) - -def nameprep(s): - raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") - diff --git a/pype/vendor/requests/packages/idna/core.py b/pype/vendor/requests/packages/idna/core.py deleted file mode 100644 index b55b664568..0000000000 --- a/pype/vendor/requests/packages/idna/core.py +++ /dev/null @@ -1,387 +0,0 @@ -from . import idnadata -import bisect -import unicodedata -import re -import sys -from .intranges import intranges_contain - -_virama_combining_class = 9 -_alabel_prefix = b'xn--' -_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') - -if sys.version_info[0] == 3: - unicode = str - unichr = chr - -class IDNAError(UnicodeError): - """ Base exception for all IDNA-encoding related problems """ - pass - - -class IDNABidiError(IDNAError): - """ Exception when bidirectional requirements are not satisfied """ - pass - - -class InvalidCodepoint(IDNAError): - """ Exception when a disallowed or unallocated codepoint is used """ - pass - - -class InvalidCodepointContext(IDNAError): - """ Exception when the codepoint is not valid in the context it is used """ - pass - - -def _combining_class(cp): - return unicodedata.combining(unichr(cp)) - -def _is_script(cp, script): - return intranges_contain(ord(cp), idnadata.scripts[script]) - -def _punycode(s): - return s.encode('punycode') - -def _unot(s): - return 'U+{0:04X}'.format(s) - - -def valid_label_length(label): - - if len(label) > 63: - return False - return True - - -def valid_string_length(label, trailing_dot): - - if len(label) > (254 if trailing_dot else 253): - return False - return True - - -def check_bidi(label, check_ltr=False): - - # Bidi rules should only be applied if string contains RTL characters - bidi_label = False - for (idx, cp) in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - if direction == '': - # String likely comes from a newer version of Unicode - raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) - if direction in ['R', 'AL', 'AN']: - bidi_label = True - break - if not bidi_label and not check_ltr: - return True - - # Bidi rule 1 - direction = unicodedata.bidirectional(label[0]) - if direction in ['R', 'AL']: - rtl = True - elif direction == 'L': - rtl = False - else: - raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) - - valid_ending = False - number_type = False - for (idx, cp) in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - - if rtl: - # Bidi rule 2 - if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: - raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) - # Bidi rule 3 - if direction in ['R', 'AL', 'EN', 'AN']: - valid_ending = True - elif direction != 'NSM': - valid_ending = False - # Bidi rule 4 - if direction in ['AN', 'EN']: - if not number_type: - number_type = direction - else: - if number_type != direction: - raise IDNABidiError('Can not mix numeral types in a right-to-left label') - else: - # Bidi rule 5 - if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: - raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) - # Bidi rule 6 - if direction in ['L', 'EN']: - valid_ending = True - elif direction != 'NSM': - valid_ending = False - - if not valid_ending: - raise IDNABidiError('Label ends with illegal codepoint directionality') - - return True - - -def check_initial_combiner(label): - - if unicodedata.category(label[0])[0] == 'M': - raise IDNAError('Label begins with an illegal combining character') - return True - - -def check_hyphen_ok(label): - - if label[2:4] == '--': - raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') - if label[0] == '-' or label[-1] == '-': - raise IDNAError('Label must not start or end with a hyphen') - return True - - -def check_nfc(label): - - if unicodedata.normalize('NFC', label) != label: - raise IDNAError('Label must be in Normalization Form C') - - -def valid_contextj(label, pos): - - cp_value = ord(label[pos]) - - if cp_value == 0x200c: - - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - - ok = False - for i in range(pos-1, -1, -1): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord('T'): - continue - if joining_type in [ord('L'), ord('D')]: - ok = True - break - - if not ok: - return False - - ok = False - for i in range(pos+1, len(label)): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord('T'): - continue - if joining_type in [ord('R'), ord('D')]: - ok = True - break - return ok - - if cp_value == 0x200d: - - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - return False - - else: - - return False - - -def valid_contexto(label, pos, exception=False): - - cp_value = ord(label[pos]) - - if cp_value == 0x00b7: - if 0 < pos < len(label)-1: - if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: - return True - return False - - elif cp_value == 0x0375: - if pos < len(label)-1 and len(label) > 1: - return _is_script(label[pos + 1], 'Greek') - return False - - elif cp_value == 0x05f3 or cp_value == 0x05f4: - if pos > 0: - return _is_script(label[pos - 1], 'Hebrew') - return False - - elif cp_value == 0x30fb: - for cp in label: - if cp == u'\u30fb': - continue - if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): - return True - return False - - elif 0x660 <= cp_value <= 0x669: - for cp in label: - if 0x6f0 <= ord(cp) <= 0x06f9: - return False - return True - - elif 0x6f0 <= cp_value <= 0x6f9: - for cp in label: - if 0x660 <= ord(cp) <= 0x0669: - return False - return True - - -def check_label(label): - - if isinstance(label, (bytes, bytearray)): - label = label.decode('utf-8') - if len(label) == 0: - raise IDNAError('Empty Label') - - check_nfc(label) - check_hyphen_ok(label) - check_initial_combiner(label) - - for (pos, cp) in enumerate(label): - cp_value = ord(cp) - if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): - continue - elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): - if not valid_contextj(label, pos): - raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) - elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): - if not valid_contexto(label, pos): - raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) - else: - raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) - - check_bidi(label) - - -def alabel(label): - - try: - label = label.encode('ascii') - try: - ulabel(label) - except IDNAError: - raise IDNAError('The label {0} is not a valid A-label'.format(label)) - if not valid_label_length(label): - raise IDNAError('Label too long') - return label - except UnicodeEncodeError: - pass - - if not label: - raise IDNAError('No Input') - - label = unicode(label) - check_label(label) - label = _punycode(label) - label = _alabel_prefix + label - - if not valid_label_length(label): - raise IDNAError('Label too long') - - return label - - -def ulabel(label): - - if not isinstance(label, (bytes, bytearray)): - try: - label = label.encode('ascii') - except UnicodeEncodeError: - check_label(label) - return label - - label = label.lower() - if label.startswith(_alabel_prefix): - label = label[len(_alabel_prefix):] - else: - check_label(label) - return label.decode('ascii') - - label = label.decode('punycode') - check_label(label) - return label - - -def uts46_remap(domain, std3_rules=True, transitional=False): - """Re-map the characters in the string according to UTS46 processing.""" - from .uts46data import uts46data - output = u"" - try: - for pos, char in enumerate(domain): - code_point = ord(char) - uts46row = uts46data[code_point if code_point < 256 else - bisect.bisect_left(uts46data, (code_point, "Z")) - 1] - status = uts46row[1] - replacement = uts46row[2] if len(uts46row) == 3 else None - if (status == "V" or - (status == "D" and not transitional) or - (status == "3" and std3_rules and replacement is None)): - output += char - elif replacement is not None and (status == "M" or - (status == "3" and std3_rules) or - (status == "D" and transitional)): - output += replacement - elif status != "I": - raise IndexError() - return unicodedata.normalize("NFC", output) - except IndexError: - raise InvalidCodepoint( - "Codepoint {0} not allowed at position {1} in {2}".format( - _unot(code_point), pos + 1, repr(domain))) - - -def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): - - if isinstance(s, (bytes, bytearray)): - s = s.decode("ascii") - if uts46: - s = uts46_remap(s, std3_rules, transitional) - trailing_dot = False - result = [] - if strict: - labels = s.split('.') - else: - labels = _unicode_dots_re.split(s) - while labels and not labels[0]: - del labels[0] - if not labels: - raise IDNAError('Empty domain') - if labels[-1] == '': - del labels[-1] - trailing_dot = True - for label in labels: - result.append(alabel(label)) - if trailing_dot: - result.append(b'') - s = b'.'.join(result) - if not valid_string_length(s, trailing_dot): - raise IDNAError('Domain too long') - return s - - -def decode(s, strict=False, uts46=False, std3_rules=False): - - if isinstance(s, (bytes, bytearray)): - s = s.decode("ascii") - if uts46: - s = uts46_remap(s, std3_rules, False) - trailing_dot = False - result = [] - if not strict: - labels = _unicode_dots_re.split(s) - else: - labels = s.split(u'.') - while labels and not labels[0]: - del labels[0] - if not labels: - raise IDNAError('Empty domain') - if not labels[-1]: - del labels[-1] - trailing_dot = True - for label in labels: - result.append(ulabel(label)) - if trailing_dot: - result.append(u'') - return u'.'.join(result) diff --git a/pype/vendor/requests/packages/idna/idnadata.py b/pype/vendor/requests/packages/idna/idnadata.py deleted file mode 100644 index 2ff30fee29..0000000000 --- a/pype/vendor/requests/packages/idna/idnadata.py +++ /dev/null @@ -1,1584 +0,0 @@ -# This file is automatically generated by build-idnadata.py - -scripts = { - 'Greek': ( - 0x37000000374, - 0x37500000378, - 0x37a0000037e, - 0x38400000385, - 0x38600000387, - 0x3880000038b, - 0x38c0000038d, - 0x38e000003a2, - 0x3a3000003e2, - 0x3f000000400, - 0x1d2600001d2b, - 0x1d5d00001d62, - 0x1d6600001d6b, - 0x1dbf00001dc0, - 0x1f0000001f16, - 0x1f1800001f1e, - 0x1f2000001f46, - 0x1f4800001f4e, - 0x1f5000001f58, - 0x1f5900001f5a, - 0x1f5b00001f5c, - 0x1f5d00001f5e, - 0x1f5f00001f7e, - 0x1f8000001fb5, - 0x1fb600001fc5, - 0x1fc600001fd4, - 0x1fd600001fdc, - 0x1fdd00001ff0, - 0x1ff200001ff5, - 0x1ff600001fff, - 0x212600002127, - 0x101400001018b, - 0x1d2000001d246, - ), - 'Han': ( - 0x2e8000002e9a, - 0x2e9b00002ef4, - 0x2f0000002fd6, - 0x300500003006, - 0x300700003008, - 0x30210000302a, - 0x30380000303c, - 0x340000004db6, - 0x4e0000009fcd, - 0xf9000000fa6e, - 0xfa700000fada, - 0x200000002a6d7, - 0x2a7000002b735, - 0x2b7400002b81e, - 0x2f8000002fa1e, - ), - 'Hebrew': ( - 0x591000005c8, - 0x5d0000005eb, - 0x5f0000005f5, - 0xfb1d0000fb37, - 0xfb380000fb3d, - 0xfb3e0000fb3f, - 0xfb400000fb42, - 0xfb430000fb45, - 0xfb460000fb50, - ), - 'Hiragana': ( - 0x304100003097, - 0x309d000030a0, - 0x1b0010001b002, - 0x1f2000001f201, - ), - 'Katakana': ( - 0x30a1000030fb, - 0x30fd00003100, - 0x31f000003200, - 0x32d0000032ff, - 0x330000003358, - 0xff660000ff70, - 0xff710000ff9e, - 0x1b0000001b001, - ), -} -joining_types = { - 0x600: 85, - 0x601: 85, - 0x602: 85, - 0x603: 85, - 0x604: 85, - 0x608: 85, - 0x60b: 85, - 0x620: 68, - 0x621: 85, - 0x622: 82, - 0x623: 82, - 0x624: 82, - 0x625: 82, - 0x626: 68, - 0x627: 82, - 0x628: 68, - 0x629: 82, - 0x62a: 68, - 0x62b: 68, - 0x62c: 68, - 0x62d: 68, - 0x62e: 68, - 0x62f: 82, - 0x630: 82, - 0x631: 82, - 0x632: 82, - 0x633: 68, - 0x634: 68, - 0x635: 68, - 0x636: 68, - 0x637: 68, - 0x638: 68, - 0x639: 68, - 0x63a: 68, - 0x63b: 68, - 0x63c: 68, - 0x63d: 68, - 0x63e: 68, - 0x63f: 68, - 0x640: 67, - 0x641: 68, - 0x642: 68, - 0x643: 68, - 0x644: 68, - 0x645: 68, - 0x646: 68, - 0x647: 68, - 0x648: 82, - 0x649: 68, - 0x64a: 68, - 0x66e: 68, - 0x66f: 68, - 0x671: 82, - 0x672: 82, - 0x673: 82, - 0x674: 85, - 0x675: 82, - 0x676: 82, - 0x677: 82, - 0x678: 68, - 0x679: 68, - 0x67a: 68, - 0x67b: 68, - 0x67c: 68, - 0x67d: 68, - 0x67e: 68, - 0x67f: 68, - 0x680: 68, - 0x681: 68, - 0x682: 68, - 0x683: 68, - 0x684: 68, - 0x685: 68, - 0x686: 68, - 0x687: 68, - 0x688: 82, - 0x689: 82, - 0x68a: 82, - 0x68b: 82, - 0x68c: 82, - 0x68d: 82, - 0x68e: 82, - 0x68f: 82, - 0x690: 82, - 0x691: 82, - 0x692: 82, - 0x693: 82, - 0x694: 82, - 0x695: 82, - 0x696: 82, - 0x697: 82, - 0x698: 82, - 0x699: 82, - 0x69a: 68, - 0x69b: 68, - 0x69c: 68, - 0x69d: 68, - 0x69e: 68, - 0x69f: 68, - 0x6a0: 68, - 0x6a1: 68, - 0x6a2: 68, - 0x6a3: 68, - 0x6a4: 68, - 0x6a5: 68, - 0x6a6: 68, - 0x6a7: 68, - 0x6a8: 68, - 0x6a9: 68, - 0x6aa: 68, - 0x6ab: 68, - 0x6ac: 68, - 0x6ad: 68, - 0x6ae: 68, - 0x6af: 68, - 0x6b0: 68, - 0x6b1: 68, - 0x6b2: 68, - 0x6b3: 68, - 0x6b4: 68, - 0x6b5: 68, - 0x6b6: 68, - 0x6b7: 68, - 0x6b8: 68, - 0x6b9: 68, - 0x6ba: 68, - 0x6bb: 68, - 0x6bc: 68, - 0x6bd: 68, - 0x6be: 68, - 0x6bf: 68, - 0x6c0: 82, - 0x6c1: 68, - 0x6c2: 68, - 0x6c3: 82, - 0x6c4: 82, - 0x6c5: 82, - 0x6c6: 82, - 0x6c7: 82, - 0x6c8: 82, - 0x6c9: 82, - 0x6ca: 82, - 0x6cb: 82, - 0x6cc: 68, - 0x6cd: 82, - 0x6ce: 68, - 0x6cf: 82, - 0x6d0: 68, - 0x6d1: 68, - 0x6d2: 82, - 0x6d3: 82, - 0x6d5: 82, - 0x6dd: 85, - 0x6ee: 82, - 0x6ef: 82, - 0x6fa: 68, - 0x6fb: 68, - 0x6fc: 68, - 0x6ff: 68, - 0x710: 82, - 0x712: 68, - 0x713: 68, - 0x714: 68, - 0x715: 82, - 0x716: 82, - 0x717: 82, - 0x718: 82, - 0x719: 82, - 0x71a: 68, - 0x71b: 68, - 0x71c: 68, - 0x71d: 68, - 0x71e: 82, - 0x71f: 68, - 0x720: 68, - 0x721: 68, - 0x722: 68, - 0x723: 68, - 0x724: 68, - 0x725: 68, - 0x726: 68, - 0x727: 68, - 0x728: 82, - 0x729: 68, - 0x72a: 82, - 0x72b: 68, - 0x72c: 82, - 0x72d: 68, - 0x72e: 68, - 0x72f: 82, - 0x74d: 82, - 0x74e: 68, - 0x74f: 68, - 0x750: 68, - 0x751: 68, - 0x752: 68, - 0x753: 68, - 0x754: 68, - 0x755: 68, - 0x756: 68, - 0x757: 68, - 0x758: 68, - 0x759: 82, - 0x75a: 82, - 0x75b: 82, - 0x75c: 68, - 0x75d: 68, - 0x75e: 68, - 0x75f: 68, - 0x760: 68, - 0x761: 68, - 0x762: 68, - 0x763: 68, - 0x764: 68, - 0x765: 68, - 0x766: 68, - 0x767: 68, - 0x768: 68, - 0x769: 68, - 0x76a: 68, - 0x76b: 82, - 0x76c: 82, - 0x76d: 68, - 0x76e: 68, - 0x76f: 68, - 0x770: 68, - 0x771: 82, - 0x772: 68, - 0x773: 82, - 0x774: 82, - 0x775: 68, - 0x776: 68, - 0x777: 68, - 0x778: 82, - 0x779: 82, - 0x77a: 68, - 0x77b: 68, - 0x77c: 68, - 0x77d: 68, - 0x77e: 68, - 0x77f: 68, - 0x7ca: 68, - 0x7cb: 68, - 0x7cc: 68, - 0x7cd: 68, - 0x7ce: 68, - 0x7cf: 68, - 0x7d0: 68, - 0x7d1: 68, - 0x7d2: 68, - 0x7d3: 68, - 0x7d4: 68, - 0x7d5: 68, - 0x7d6: 68, - 0x7d7: 68, - 0x7d8: 68, - 0x7d9: 68, - 0x7da: 68, - 0x7db: 68, - 0x7dc: 68, - 0x7dd: 68, - 0x7de: 68, - 0x7df: 68, - 0x7e0: 68, - 0x7e1: 68, - 0x7e2: 68, - 0x7e3: 68, - 0x7e4: 68, - 0x7e5: 68, - 0x7e6: 68, - 0x7e7: 68, - 0x7e8: 68, - 0x7e9: 68, - 0x7ea: 68, - 0x7fa: 67, - 0x840: 82, - 0x841: 68, - 0x842: 68, - 0x843: 68, - 0x844: 68, - 0x845: 68, - 0x846: 82, - 0x847: 68, - 0x848: 68, - 0x849: 82, - 0x84a: 68, - 0x84b: 68, - 0x84c: 68, - 0x84d: 68, - 0x84e: 68, - 0x84f: 82, - 0x850: 68, - 0x851: 68, - 0x852: 68, - 0x853: 68, - 0x854: 82, - 0x855: 68, - 0x856: 85, - 0x857: 85, - 0x858: 85, - 0x8a0: 68, - 0x8a2: 68, - 0x8a3: 68, - 0x8a4: 68, - 0x8a5: 68, - 0x8a6: 68, - 0x8a7: 68, - 0x8a8: 68, - 0x8a9: 68, - 0x8aa: 82, - 0x8ab: 82, - 0x8ac: 82, - 0x1806: 85, - 0x1807: 68, - 0x180a: 67, - 0x180e: 85, - 0x1820: 68, - 0x1821: 68, - 0x1822: 68, - 0x1823: 68, - 0x1824: 68, - 0x1825: 68, - 0x1826: 68, - 0x1827: 68, - 0x1828: 68, - 0x1829: 68, - 0x182a: 68, - 0x182b: 68, - 0x182c: 68, - 0x182d: 68, - 0x182e: 68, - 0x182f: 68, - 0x1830: 68, - 0x1831: 68, - 0x1832: 68, - 0x1833: 68, - 0x1834: 68, - 0x1835: 68, - 0x1836: 68, - 0x1837: 68, - 0x1838: 68, - 0x1839: 68, - 0x183a: 68, - 0x183b: 68, - 0x183c: 68, - 0x183d: 68, - 0x183e: 68, - 0x183f: 68, - 0x1840: 68, - 0x1841: 68, - 0x1842: 68, - 0x1843: 68, - 0x1844: 68, - 0x1845: 68, - 0x1846: 68, - 0x1847: 68, - 0x1848: 68, - 0x1849: 68, - 0x184a: 68, - 0x184b: 68, - 0x184c: 68, - 0x184d: 68, - 0x184e: 68, - 0x184f: 68, - 0x1850: 68, - 0x1851: 68, - 0x1852: 68, - 0x1853: 68, - 0x1854: 68, - 0x1855: 68, - 0x1856: 68, - 0x1857: 68, - 0x1858: 68, - 0x1859: 68, - 0x185a: 68, - 0x185b: 68, - 0x185c: 68, - 0x185d: 68, - 0x185e: 68, - 0x185f: 68, - 0x1860: 68, - 0x1861: 68, - 0x1862: 68, - 0x1863: 68, - 0x1864: 68, - 0x1865: 68, - 0x1866: 68, - 0x1867: 68, - 0x1868: 68, - 0x1869: 68, - 0x186a: 68, - 0x186b: 68, - 0x186c: 68, - 0x186d: 68, - 0x186e: 68, - 0x186f: 68, - 0x1870: 68, - 0x1871: 68, - 0x1872: 68, - 0x1873: 68, - 0x1874: 68, - 0x1875: 68, - 0x1876: 68, - 0x1877: 68, - 0x1880: 85, - 0x1881: 85, - 0x1882: 85, - 0x1883: 85, - 0x1884: 85, - 0x1885: 85, - 0x1886: 85, - 0x1887: 68, - 0x1888: 68, - 0x1889: 68, - 0x188a: 68, - 0x188b: 68, - 0x188c: 68, - 0x188d: 68, - 0x188e: 68, - 0x188f: 68, - 0x1890: 68, - 0x1891: 68, - 0x1892: 68, - 0x1893: 68, - 0x1894: 68, - 0x1895: 68, - 0x1896: 68, - 0x1897: 68, - 0x1898: 68, - 0x1899: 68, - 0x189a: 68, - 0x189b: 68, - 0x189c: 68, - 0x189d: 68, - 0x189e: 68, - 0x189f: 68, - 0x18a0: 68, - 0x18a1: 68, - 0x18a2: 68, - 0x18a3: 68, - 0x18a4: 68, - 0x18a5: 68, - 0x18a6: 68, - 0x18a7: 68, - 0x18a8: 68, - 0x18aa: 68, - 0x200c: 85, - 0x200d: 67, - 0x2066: 85, - 0x2067: 85, - 0x2068: 85, - 0x2069: 85, - 0xa840: 68, - 0xa841: 68, - 0xa842: 68, - 0xa843: 68, - 0xa844: 68, - 0xa845: 68, - 0xa846: 68, - 0xa847: 68, - 0xa848: 68, - 0xa849: 68, - 0xa84a: 68, - 0xa84b: 68, - 0xa84c: 68, - 0xa84d: 68, - 0xa84e: 68, - 0xa84f: 68, - 0xa850: 68, - 0xa851: 68, - 0xa852: 68, - 0xa853: 68, - 0xa854: 68, - 0xa855: 68, - 0xa856: 68, - 0xa857: 68, - 0xa858: 68, - 0xa859: 68, - 0xa85a: 68, - 0xa85b: 68, - 0xa85c: 68, - 0xa85d: 68, - 0xa85e: 68, - 0xa85f: 68, - 0xa860: 68, - 0xa861: 68, - 0xa862: 68, - 0xa863: 68, - 0xa864: 68, - 0xa865: 68, - 0xa866: 68, - 0xa867: 68, - 0xa868: 68, - 0xa869: 68, - 0xa86a: 68, - 0xa86b: 68, - 0xa86c: 68, - 0xa86d: 68, - 0xa86e: 68, - 0xa86f: 68, - 0xa870: 68, - 0xa871: 68, - 0xa872: 76, - 0xa873: 85, -} -codepoint_classes = { - 'PVALID': ( - 0x2d0000002e, - 0x300000003a, - 0x610000007b, - 0xdf000000f7, - 0xf800000100, - 0x10100000102, - 0x10300000104, - 0x10500000106, - 0x10700000108, - 0x1090000010a, - 0x10b0000010c, - 0x10d0000010e, - 0x10f00000110, - 0x11100000112, - 0x11300000114, - 0x11500000116, - 0x11700000118, - 0x1190000011a, - 0x11b0000011c, - 0x11d0000011e, - 0x11f00000120, - 0x12100000122, - 0x12300000124, - 0x12500000126, - 0x12700000128, - 0x1290000012a, - 0x12b0000012c, - 0x12d0000012e, - 0x12f00000130, - 0x13100000132, - 0x13500000136, - 0x13700000139, - 0x13a0000013b, - 0x13c0000013d, - 0x13e0000013f, - 0x14200000143, - 0x14400000145, - 0x14600000147, - 0x14800000149, - 0x14b0000014c, - 0x14d0000014e, - 0x14f00000150, - 0x15100000152, - 0x15300000154, - 0x15500000156, - 0x15700000158, - 0x1590000015a, - 0x15b0000015c, - 0x15d0000015e, - 0x15f00000160, - 0x16100000162, - 0x16300000164, - 0x16500000166, - 0x16700000168, - 0x1690000016a, - 0x16b0000016c, - 0x16d0000016e, - 0x16f00000170, - 0x17100000172, - 0x17300000174, - 0x17500000176, - 0x17700000178, - 0x17a0000017b, - 0x17c0000017d, - 0x17e0000017f, - 0x18000000181, - 0x18300000184, - 0x18500000186, - 0x18800000189, - 0x18c0000018e, - 0x19200000193, - 0x19500000196, - 0x1990000019c, - 0x19e0000019f, - 0x1a1000001a2, - 0x1a3000001a4, - 0x1a5000001a6, - 0x1a8000001a9, - 0x1aa000001ac, - 0x1ad000001ae, - 0x1b0000001b1, - 0x1b4000001b5, - 0x1b6000001b7, - 0x1b9000001bc, - 0x1bd000001c4, - 0x1ce000001cf, - 0x1d0000001d1, - 0x1d2000001d3, - 0x1d4000001d5, - 0x1d6000001d7, - 0x1d8000001d9, - 0x1da000001db, - 0x1dc000001de, - 0x1df000001e0, - 0x1e1000001e2, - 0x1e3000001e4, - 0x1e5000001e6, - 0x1e7000001e8, - 0x1e9000001ea, - 0x1eb000001ec, - 0x1ed000001ee, - 0x1ef000001f1, - 0x1f5000001f6, - 0x1f9000001fa, - 0x1fb000001fc, - 0x1fd000001fe, - 0x1ff00000200, - 0x20100000202, - 0x20300000204, - 0x20500000206, - 0x20700000208, - 0x2090000020a, - 0x20b0000020c, - 0x20d0000020e, - 0x20f00000210, - 0x21100000212, - 0x21300000214, - 0x21500000216, - 0x21700000218, - 0x2190000021a, - 0x21b0000021c, - 0x21d0000021e, - 0x21f00000220, - 0x22100000222, - 0x22300000224, - 0x22500000226, - 0x22700000228, - 0x2290000022a, - 0x22b0000022c, - 0x22d0000022e, - 0x22f00000230, - 0x23100000232, - 0x2330000023a, - 0x23c0000023d, - 0x23f00000241, - 0x24200000243, - 0x24700000248, - 0x2490000024a, - 0x24b0000024c, - 0x24d0000024e, - 0x24f000002b0, - 0x2b9000002c2, - 0x2c6000002d2, - 0x2ec000002ed, - 0x2ee000002ef, - 0x30000000340, - 0x34200000343, - 0x3460000034f, - 0x35000000370, - 0x37100000372, - 0x37300000374, - 0x37700000378, - 0x37b0000037e, - 0x39000000391, - 0x3ac000003cf, - 0x3d7000003d8, - 0x3d9000003da, - 0x3db000003dc, - 0x3dd000003de, - 0x3df000003e0, - 0x3e1000003e2, - 0x3e3000003e4, - 0x3e5000003e6, - 0x3e7000003e8, - 0x3e9000003ea, - 0x3eb000003ec, - 0x3ed000003ee, - 0x3ef000003f0, - 0x3f3000003f4, - 0x3f8000003f9, - 0x3fb000003fd, - 0x43000000460, - 0x46100000462, - 0x46300000464, - 0x46500000466, - 0x46700000468, - 0x4690000046a, - 0x46b0000046c, - 0x46d0000046e, - 0x46f00000470, - 0x47100000472, - 0x47300000474, - 0x47500000476, - 0x47700000478, - 0x4790000047a, - 0x47b0000047c, - 0x47d0000047e, - 0x47f00000480, - 0x48100000482, - 0x48300000488, - 0x48b0000048c, - 0x48d0000048e, - 0x48f00000490, - 0x49100000492, - 0x49300000494, - 0x49500000496, - 0x49700000498, - 0x4990000049a, - 0x49b0000049c, - 0x49d0000049e, - 0x49f000004a0, - 0x4a1000004a2, - 0x4a3000004a4, - 0x4a5000004a6, - 0x4a7000004a8, - 0x4a9000004aa, - 0x4ab000004ac, - 0x4ad000004ae, - 0x4af000004b0, - 0x4b1000004b2, - 0x4b3000004b4, - 0x4b5000004b6, - 0x4b7000004b8, - 0x4b9000004ba, - 0x4bb000004bc, - 0x4bd000004be, - 0x4bf000004c0, - 0x4c2000004c3, - 0x4c4000004c5, - 0x4c6000004c7, - 0x4c8000004c9, - 0x4ca000004cb, - 0x4cc000004cd, - 0x4ce000004d0, - 0x4d1000004d2, - 0x4d3000004d4, - 0x4d5000004d6, - 0x4d7000004d8, - 0x4d9000004da, - 0x4db000004dc, - 0x4dd000004de, - 0x4df000004e0, - 0x4e1000004e2, - 0x4e3000004e4, - 0x4e5000004e6, - 0x4e7000004e8, - 0x4e9000004ea, - 0x4eb000004ec, - 0x4ed000004ee, - 0x4ef000004f0, - 0x4f1000004f2, - 0x4f3000004f4, - 0x4f5000004f6, - 0x4f7000004f8, - 0x4f9000004fa, - 0x4fb000004fc, - 0x4fd000004fe, - 0x4ff00000500, - 0x50100000502, - 0x50300000504, - 0x50500000506, - 0x50700000508, - 0x5090000050a, - 0x50b0000050c, - 0x50d0000050e, - 0x50f00000510, - 0x51100000512, - 0x51300000514, - 0x51500000516, - 0x51700000518, - 0x5190000051a, - 0x51b0000051c, - 0x51d0000051e, - 0x51f00000520, - 0x52100000522, - 0x52300000524, - 0x52500000526, - 0x52700000528, - 0x5590000055a, - 0x56100000587, - 0x591000005be, - 0x5bf000005c0, - 0x5c1000005c3, - 0x5c4000005c6, - 0x5c7000005c8, - 0x5d0000005eb, - 0x5f0000005f3, - 0x6100000061b, - 0x62000000640, - 0x64100000660, - 0x66e00000675, - 0x679000006d4, - 0x6d5000006dd, - 0x6df000006e9, - 0x6ea000006f0, - 0x6fa00000700, - 0x7100000074b, - 0x74d000007b2, - 0x7c0000007f6, - 0x8000000082e, - 0x8400000085c, - 0x8a0000008a1, - 0x8a2000008ad, - 0x8e4000008ff, - 0x90000000958, - 0x96000000964, - 0x96600000970, - 0x97100000978, - 0x97900000980, - 0x98100000984, - 0x9850000098d, - 0x98f00000991, - 0x993000009a9, - 0x9aa000009b1, - 0x9b2000009b3, - 0x9b6000009ba, - 0x9bc000009c5, - 0x9c7000009c9, - 0x9cb000009cf, - 0x9d7000009d8, - 0x9e0000009e4, - 0x9e6000009f2, - 0xa0100000a04, - 0xa0500000a0b, - 0xa0f00000a11, - 0xa1300000a29, - 0xa2a00000a31, - 0xa3200000a33, - 0xa3500000a36, - 0xa3800000a3a, - 0xa3c00000a3d, - 0xa3e00000a43, - 0xa4700000a49, - 0xa4b00000a4e, - 0xa5100000a52, - 0xa5c00000a5d, - 0xa6600000a76, - 0xa8100000a84, - 0xa8500000a8e, - 0xa8f00000a92, - 0xa9300000aa9, - 0xaaa00000ab1, - 0xab200000ab4, - 0xab500000aba, - 0xabc00000ac6, - 0xac700000aca, - 0xacb00000ace, - 0xad000000ad1, - 0xae000000ae4, - 0xae600000af0, - 0xb0100000b04, - 0xb0500000b0d, - 0xb0f00000b11, - 0xb1300000b29, - 0xb2a00000b31, - 0xb3200000b34, - 0xb3500000b3a, - 0xb3c00000b45, - 0xb4700000b49, - 0xb4b00000b4e, - 0xb5600000b58, - 0xb5f00000b64, - 0xb6600000b70, - 0xb7100000b72, - 0xb8200000b84, - 0xb8500000b8b, - 0xb8e00000b91, - 0xb9200000b96, - 0xb9900000b9b, - 0xb9c00000b9d, - 0xb9e00000ba0, - 0xba300000ba5, - 0xba800000bab, - 0xbae00000bba, - 0xbbe00000bc3, - 0xbc600000bc9, - 0xbca00000bce, - 0xbd000000bd1, - 0xbd700000bd8, - 0xbe600000bf0, - 0xc0100000c04, - 0xc0500000c0d, - 0xc0e00000c11, - 0xc1200000c29, - 0xc2a00000c34, - 0xc3500000c3a, - 0xc3d00000c45, - 0xc4600000c49, - 0xc4a00000c4e, - 0xc5500000c57, - 0xc5800000c5a, - 0xc6000000c64, - 0xc6600000c70, - 0xc8200000c84, - 0xc8500000c8d, - 0xc8e00000c91, - 0xc9200000ca9, - 0xcaa00000cb4, - 0xcb500000cba, - 0xcbc00000cc5, - 0xcc600000cc9, - 0xcca00000cce, - 0xcd500000cd7, - 0xcde00000cdf, - 0xce000000ce4, - 0xce600000cf0, - 0xcf100000cf3, - 0xd0200000d04, - 0xd0500000d0d, - 0xd0e00000d11, - 0xd1200000d3b, - 0xd3d00000d45, - 0xd4600000d49, - 0xd4a00000d4f, - 0xd5700000d58, - 0xd6000000d64, - 0xd6600000d70, - 0xd7a00000d80, - 0xd8200000d84, - 0xd8500000d97, - 0xd9a00000db2, - 0xdb300000dbc, - 0xdbd00000dbe, - 0xdc000000dc7, - 0xdca00000dcb, - 0xdcf00000dd5, - 0xdd600000dd7, - 0xdd800000de0, - 0xdf200000df4, - 0xe0100000e33, - 0xe3400000e3b, - 0xe4000000e4f, - 0xe5000000e5a, - 0xe8100000e83, - 0xe8400000e85, - 0xe8700000e89, - 0xe8a00000e8b, - 0xe8d00000e8e, - 0xe9400000e98, - 0xe9900000ea0, - 0xea100000ea4, - 0xea500000ea6, - 0xea700000ea8, - 0xeaa00000eac, - 0xead00000eb3, - 0xeb400000eba, - 0xebb00000ebe, - 0xec000000ec5, - 0xec600000ec7, - 0xec800000ece, - 0xed000000eda, - 0xede00000ee0, - 0xf0000000f01, - 0xf0b00000f0c, - 0xf1800000f1a, - 0xf2000000f2a, - 0xf3500000f36, - 0xf3700000f38, - 0xf3900000f3a, - 0xf3e00000f43, - 0xf4400000f48, - 0xf4900000f4d, - 0xf4e00000f52, - 0xf5300000f57, - 0xf5800000f5c, - 0xf5d00000f69, - 0xf6a00000f6d, - 0xf7100000f73, - 0xf7400000f75, - 0xf7a00000f81, - 0xf8200000f85, - 0xf8600000f93, - 0xf9400000f98, - 0xf9900000f9d, - 0xf9e00000fa2, - 0xfa300000fa7, - 0xfa800000fac, - 0xfad00000fb9, - 0xfba00000fbd, - 0xfc600000fc7, - 0x10000000104a, - 0x10500000109e, - 0x10d0000010fb, - 0x10fd00001100, - 0x120000001249, - 0x124a0000124e, - 0x125000001257, - 0x125800001259, - 0x125a0000125e, - 0x126000001289, - 0x128a0000128e, - 0x1290000012b1, - 0x12b2000012b6, - 0x12b8000012bf, - 0x12c0000012c1, - 0x12c2000012c6, - 0x12c8000012d7, - 0x12d800001311, - 0x131200001316, - 0x13180000135b, - 0x135d00001360, - 0x138000001390, - 0x13a0000013f5, - 0x14010000166d, - 0x166f00001680, - 0x16810000169b, - 0x16a0000016eb, - 0x17000000170d, - 0x170e00001715, - 0x172000001735, - 0x174000001754, - 0x17600000176d, - 0x176e00001771, - 0x177200001774, - 0x1780000017b4, - 0x17b6000017d4, - 0x17d7000017d8, - 0x17dc000017de, - 0x17e0000017ea, - 0x18100000181a, - 0x182000001878, - 0x1880000018ab, - 0x18b0000018f6, - 0x19000000191d, - 0x19200000192c, - 0x19300000193c, - 0x19460000196e, - 0x197000001975, - 0x1980000019ac, - 0x19b0000019ca, - 0x19d0000019da, - 0x1a0000001a1c, - 0x1a2000001a5f, - 0x1a6000001a7d, - 0x1a7f00001a8a, - 0x1a9000001a9a, - 0x1aa700001aa8, - 0x1b0000001b4c, - 0x1b5000001b5a, - 0x1b6b00001b74, - 0x1b8000001bf4, - 0x1c0000001c38, - 0x1c4000001c4a, - 0x1c4d00001c7e, - 0x1cd000001cd3, - 0x1cd400001cf7, - 0x1d0000001d2c, - 0x1d2f00001d30, - 0x1d3b00001d3c, - 0x1d4e00001d4f, - 0x1d6b00001d78, - 0x1d7900001d9b, - 0x1dc000001de7, - 0x1dfc00001e00, - 0x1e0100001e02, - 0x1e0300001e04, - 0x1e0500001e06, - 0x1e0700001e08, - 0x1e0900001e0a, - 0x1e0b00001e0c, - 0x1e0d00001e0e, - 0x1e0f00001e10, - 0x1e1100001e12, - 0x1e1300001e14, - 0x1e1500001e16, - 0x1e1700001e18, - 0x1e1900001e1a, - 0x1e1b00001e1c, - 0x1e1d00001e1e, - 0x1e1f00001e20, - 0x1e2100001e22, - 0x1e2300001e24, - 0x1e2500001e26, - 0x1e2700001e28, - 0x1e2900001e2a, - 0x1e2b00001e2c, - 0x1e2d00001e2e, - 0x1e2f00001e30, - 0x1e3100001e32, - 0x1e3300001e34, - 0x1e3500001e36, - 0x1e3700001e38, - 0x1e3900001e3a, - 0x1e3b00001e3c, - 0x1e3d00001e3e, - 0x1e3f00001e40, - 0x1e4100001e42, - 0x1e4300001e44, - 0x1e4500001e46, - 0x1e4700001e48, - 0x1e4900001e4a, - 0x1e4b00001e4c, - 0x1e4d00001e4e, - 0x1e4f00001e50, - 0x1e5100001e52, - 0x1e5300001e54, - 0x1e5500001e56, - 0x1e5700001e58, - 0x1e5900001e5a, - 0x1e5b00001e5c, - 0x1e5d00001e5e, - 0x1e5f00001e60, - 0x1e6100001e62, - 0x1e6300001e64, - 0x1e6500001e66, - 0x1e6700001e68, - 0x1e6900001e6a, - 0x1e6b00001e6c, - 0x1e6d00001e6e, - 0x1e6f00001e70, - 0x1e7100001e72, - 0x1e7300001e74, - 0x1e7500001e76, - 0x1e7700001e78, - 0x1e7900001e7a, - 0x1e7b00001e7c, - 0x1e7d00001e7e, - 0x1e7f00001e80, - 0x1e8100001e82, - 0x1e8300001e84, - 0x1e8500001e86, - 0x1e8700001e88, - 0x1e8900001e8a, - 0x1e8b00001e8c, - 0x1e8d00001e8e, - 0x1e8f00001e90, - 0x1e9100001e92, - 0x1e9300001e94, - 0x1e9500001e9a, - 0x1e9c00001e9e, - 0x1e9f00001ea0, - 0x1ea100001ea2, - 0x1ea300001ea4, - 0x1ea500001ea6, - 0x1ea700001ea8, - 0x1ea900001eaa, - 0x1eab00001eac, - 0x1ead00001eae, - 0x1eaf00001eb0, - 0x1eb100001eb2, - 0x1eb300001eb4, - 0x1eb500001eb6, - 0x1eb700001eb8, - 0x1eb900001eba, - 0x1ebb00001ebc, - 0x1ebd00001ebe, - 0x1ebf00001ec0, - 0x1ec100001ec2, - 0x1ec300001ec4, - 0x1ec500001ec6, - 0x1ec700001ec8, - 0x1ec900001eca, - 0x1ecb00001ecc, - 0x1ecd00001ece, - 0x1ecf00001ed0, - 0x1ed100001ed2, - 0x1ed300001ed4, - 0x1ed500001ed6, - 0x1ed700001ed8, - 0x1ed900001eda, - 0x1edb00001edc, - 0x1edd00001ede, - 0x1edf00001ee0, - 0x1ee100001ee2, - 0x1ee300001ee4, - 0x1ee500001ee6, - 0x1ee700001ee8, - 0x1ee900001eea, - 0x1eeb00001eec, - 0x1eed00001eee, - 0x1eef00001ef0, - 0x1ef100001ef2, - 0x1ef300001ef4, - 0x1ef500001ef6, - 0x1ef700001ef8, - 0x1ef900001efa, - 0x1efb00001efc, - 0x1efd00001efe, - 0x1eff00001f08, - 0x1f1000001f16, - 0x1f2000001f28, - 0x1f3000001f38, - 0x1f4000001f46, - 0x1f5000001f58, - 0x1f6000001f68, - 0x1f7000001f71, - 0x1f7200001f73, - 0x1f7400001f75, - 0x1f7600001f77, - 0x1f7800001f79, - 0x1f7a00001f7b, - 0x1f7c00001f7d, - 0x1fb000001fb2, - 0x1fb600001fb7, - 0x1fc600001fc7, - 0x1fd000001fd3, - 0x1fd600001fd8, - 0x1fe000001fe3, - 0x1fe400001fe8, - 0x1ff600001ff7, - 0x214e0000214f, - 0x218400002185, - 0x2c3000002c5f, - 0x2c6100002c62, - 0x2c6500002c67, - 0x2c6800002c69, - 0x2c6a00002c6b, - 0x2c6c00002c6d, - 0x2c7100002c72, - 0x2c7300002c75, - 0x2c7600002c7c, - 0x2c8100002c82, - 0x2c8300002c84, - 0x2c8500002c86, - 0x2c8700002c88, - 0x2c8900002c8a, - 0x2c8b00002c8c, - 0x2c8d00002c8e, - 0x2c8f00002c90, - 0x2c9100002c92, - 0x2c9300002c94, - 0x2c9500002c96, - 0x2c9700002c98, - 0x2c9900002c9a, - 0x2c9b00002c9c, - 0x2c9d00002c9e, - 0x2c9f00002ca0, - 0x2ca100002ca2, - 0x2ca300002ca4, - 0x2ca500002ca6, - 0x2ca700002ca8, - 0x2ca900002caa, - 0x2cab00002cac, - 0x2cad00002cae, - 0x2caf00002cb0, - 0x2cb100002cb2, - 0x2cb300002cb4, - 0x2cb500002cb6, - 0x2cb700002cb8, - 0x2cb900002cba, - 0x2cbb00002cbc, - 0x2cbd00002cbe, - 0x2cbf00002cc0, - 0x2cc100002cc2, - 0x2cc300002cc4, - 0x2cc500002cc6, - 0x2cc700002cc8, - 0x2cc900002cca, - 0x2ccb00002ccc, - 0x2ccd00002cce, - 0x2ccf00002cd0, - 0x2cd100002cd2, - 0x2cd300002cd4, - 0x2cd500002cd6, - 0x2cd700002cd8, - 0x2cd900002cda, - 0x2cdb00002cdc, - 0x2cdd00002cde, - 0x2cdf00002ce0, - 0x2ce100002ce2, - 0x2ce300002ce5, - 0x2cec00002ced, - 0x2cee00002cf2, - 0x2cf300002cf4, - 0x2d0000002d26, - 0x2d2700002d28, - 0x2d2d00002d2e, - 0x2d3000002d68, - 0x2d7f00002d97, - 0x2da000002da7, - 0x2da800002daf, - 0x2db000002db7, - 0x2db800002dbf, - 0x2dc000002dc7, - 0x2dc800002dcf, - 0x2dd000002dd7, - 0x2dd800002ddf, - 0x2de000002e00, - 0x2e2f00002e30, - 0x300500003008, - 0x302a0000302e, - 0x303c0000303d, - 0x304100003097, - 0x30990000309b, - 0x309d0000309f, - 0x30a1000030fb, - 0x30fc000030ff, - 0x31050000312e, - 0x31a0000031bb, - 0x31f000003200, - 0x340000004db6, - 0x4e0000009fcd, - 0xa0000000a48d, - 0xa4d00000a4fe, - 0xa5000000a60d, - 0xa6100000a62c, - 0xa6410000a642, - 0xa6430000a644, - 0xa6450000a646, - 0xa6470000a648, - 0xa6490000a64a, - 0xa64b0000a64c, - 0xa64d0000a64e, - 0xa64f0000a650, - 0xa6510000a652, - 0xa6530000a654, - 0xa6550000a656, - 0xa6570000a658, - 0xa6590000a65a, - 0xa65b0000a65c, - 0xa65d0000a65e, - 0xa65f0000a660, - 0xa6610000a662, - 0xa6630000a664, - 0xa6650000a666, - 0xa6670000a668, - 0xa6690000a66a, - 0xa66b0000a66c, - 0xa66d0000a670, - 0xa6740000a67e, - 0xa67f0000a680, - 0xa6810000a682, - 0xa6830000a684, - 0xa6850000a686, - 0xa6870000a688, - 0xa6890000a68a, - 0xa68b0000a68c, - 0xa68d0000a68e, - 0xa68f0000a690, - 0xa6910000a692, - 0xa6930000a694, - 0xa6950000a696, - 0xa6970000a698, - 0xa69f0000a6e6, - 0xa6f00000a6f2, - 0xa7170000a720, - 0xa7230000a724, - 0xa7250000a726, - 0xa7270000a728, - 0xa7290000a72a, - 0xa72b0000a72c, - 0xa72d0000a72e, - 0xa72f0000a732, - 0xa7330000a734, - 0xa7350000a736, - 0xa7370000a738, - 0xa7390000a73a, - 0xa73b0000a73c, - 0xa73d0000a73e, - 0xa73f0000a740, - 0xa7410000a742, - 0xa7430000a744, - 0xa7450000a746, - 0xa7470000a748, - 0xa7490000a74a, - 0xa74b0000a74c, - 0xa74d0000a74e, - 0xa74f0000a750, - 0xa7510000a752, - 0xa7530000a754, - 0xa7550000a756, - 0xa7570000a758, - 0xa7590000a75a, - 0xa75b0000a75c, - 0xa75d0000a75e, - 0xa75f0000a760, - 0xa7610000a762, - 0xa7630000a764, - 0xa7650000a766, - 0xa7670000a768, - 0xa7690000a76a, - 0xa76b0000a76c, - 0xa76d0000a76e, - 0xa76f0000a770, - 0xa7710000a779, - 0xa77a0000a77b, - 0xa77c0000a77d, - 0xa77f0000a780, - 0xa7810000a782, - 0xa7830000a784, - 0xa7850000a786, - 0xa7870000a789, - 0xa78c0000a78d, - 0xa78e0000a78f, - 0xa7910000a792, - 0xa7930000a794, - 0xa7a10000a7a2, - 0xa7a30000a7a4, - 0xa7a50000a7a6, - 0xa7a70000a7a8, - 0xa7a90000a7aa, - 0xa7fa0000a828, - 0xa8400000a874, - 0xa8800000a8c5, - 0xa8d00000a8da, - 0xa8e00000a8f8, - 0xa8fb0000a8fc, - 0xa9000000a92e, - 0xa9300000a954, - 0xa9800000a9c1, - 0xa9cf0000a9da, - 0xaa000000aa37, - 0xaa400000aa4e, - 0xaa500000aa5a, - 0xaa600000aa77, - 0xaa7a0000aa7c, - 0xaa800000aac3, - 0xaadb0000aade, - 0xaae00000aaf0, - 0xaaf20000aaf7, - 0xab010000ab07, - 0xab090000ab0f, - 0xab110000ab17, - 0xab200000ab27, - 0xab280000ab2f, - 0xabc00000abeb, - 0xabec0000abee, - 0xabf00000abfa, - 0xac000000d7a4, - 0xfa0e0000fa10, - 0xfa110000fa12, - 0xfa130000fa15, - 0xfa1f0000fa20, - 0xfa210000fa22, - 0xfa230000fa25, - 0xfa270000fa2a, - 0xfb1e0000fb1f, - 0xfe200000fe27, - 0xfe730000fe74, - 0x100000001000c, - 0x1000d00010027, - 0x100280001003b, - 0x1003c0001003e, - 0x1003f0001004e, - 0x100500001005e, - 0x10080000100fb, - 0x101fd000101fe, - 0x102800001029d, - 0x102a0000102d1, - 0x103000001031f, - 0x1033000010341, - 0x103420001034a, - 0x103800001039e, - 0x103a0000103c4, - 0x103c8000103d0, - 0x104280001049e, - 0x104a0000104aa, - 0x1080000010806, - 0x1080800010809, - 0x1080a00010836, - 0x1083700010839, - 0x1083c0001083d, - 0x1083f00010856, - 0x1090000010916, - 0x109200001093a, - 0x10980000109b8, - 0x109be000109c0, - 0x10a0000010a04, - 0x10a0500010a07, - 0x10a0c00010a14, - 0x10a1500010a18, - 0x10a1900010a34, - 0x10a3800010a3b, - 0x10a3f00010a40, - 0x10a6000010a7d, - 0x10b0000010b36, - 0x10b4000010b56, - 0x10b6000010b73, - 0x10c0000010c49, - 0x1100000011047, - 0x1106600011070, - 0x11080000110bb, - 0x110d0000110e9, - 0x110f0000110fa, - 0x1110000011135, - 0x1113600011140, - 0x11180000111c5, - 0x111d0000111da, - 0x11680000116b8, - 0x116c0000116ca, - 0x120000001236f, - 0x130000001342f, - 0x1680000016a39, - 0x16f0000016f45, - 0x16f5000016f7f, - 0x16f8f00016fa0, - 0x1b0000001b002, - 0x200000002a6d7, - 0x2a7000002b735, - 0x2b7400002b81e, - ), - 'CONTEXTJ': ( - 0x200c0000200e, - ), - 'CONTEXTO': ( - 0xb7000000b8, - 0x37500000376, - 0x5f3000005f5, - 0x6600000066a, - 0x6f0000006fa, - 0x30fb000030fc, - ), -} diff --git a/pype/vendor/requests/packages/idna/intranges.py b/pype/vendor/requests/packages/idna/intranges.py deleted file mode 100644 index fa8a735662..0000000000 --- a/pype/vendor/requests/packages/idna/intranges.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -Given a list of integers, made up of (hopefully) a small number of long runs -of consecutive integers, compute a representation of the form -((start1, end1), (start2, end2) ...). Then answer the question "was x present -in the original list?" in time O(log(# runs)). -""" - -import bisect - -def intranges_from_list(list_): - """Represent a list of integers as a sequence of ranges: - ((start_0, end_0), (start_1, end_1), ...), such that the original - integers are exactly those x such that start_i <= x < end_i for some i. - - Ranges are encoded as single integers (start << 32 | end), not as tuples. - """ - - sorted_list = sorted(list_) - ranges = [] - last_write = -1 - for i in range(len(sorted_list)): - if i+1 < len(sorted_list): - if sorted_list[i] == sorted_list[i+1]-1: - continue - current_range = sorted_list[last_write+1:i+1] - ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) - last_write = i - - return tuple(ranges) - -def _encode_range(start, end): - return (start << 32) | end - -def _decode_range(r): - return (r >> 32), (r & ((1 << 32) - 1)) - - -def intranges_contain(int_, ranges): - """Determine if `int_` falls into one of the ranges in `ranges`.""" - tuple_ = _encode_range(int_, 0) - pos = bisect.bisect_left(ranges, tuple_) - # we could be immediately ahead of a tuple (start, end) - # with start < int_ <= end - if pos > 0: - left, right = _decode_range(ranges[pos-1]) - if left <= int_ < right: - return True - # or we could be immediately behind a tuple (int_, end) - if pos < len(ranges): - left, _ = _decode_range(ranges[pos]) - if left == int_: - return True - return False diff --git a/pype/vendor/requests/packages/idna/uts46data.py b/pype/vendor/requests/packages/idna/uts46data.py deleted file mode 100644 index 48da8408f2..0000000000 --- a/pype/vendor/requests/packages/idna/uts46data.py +++ /dev/null @@ -1,7633 +0,0 @@ -# This file is automatically generated by tools/build-uts46data.py -# vim: set fileencoding=utf-8 : - -"""IDNA Mapping Table from UTS46.""" - - -def _seg_0(): - return [ - (0x0, '3'), - (0x1, '3'), - (0x2, '3'), - (0x3, '3'), - (0x4, '3'), - (0x5, '3'), - (0x6, '3'), - (0x7, '3'), - (0x8, '3'), - (0x9, '3'), - (0xA, '3'), - (0xB, '3'), - (0xC, '3'), - (0xD, '3'), - (0xE, '3'), - (0xF, '3'), - (0x10, '3'), - (0x11, '3'), - (0x12, '3'), - (0x13, '3'), - (0x14, '3'), - (0x15, '3'), - (0x16, '3'), - (0x17, '3'), - (0x18, '3'), - (0x19, '3'), - (0x1A, '3'), - (0x1B, '3'), - (0x1C, '3'), - (0x1D, '3'), - (0x1E, '3'), - (0x1F, '3'), - (0x20, '3'), - (0x21, '3'), - (0x22, '3'), - (0x23, '3'), - (0x24, '3'), - (0x25, '3'), - (0x26, '3'), - (0x27, '3'), - (0x28, '3'), - (0x29, '3'), - (0x2A, '3'), - (0x2B, '3'), - (0x2C, '3'), - (0x2D, 'V'), - (0x2E, 'V'), - (0x2F, '3'), - (0x30, 'V'), - (0x31, 'V'), - (0x32, 'V'), - (0x33, 'V'), - (0x34, 'V'), - (0x35, 'V'), - (0x36, 'V'), - (0x37, 'V'), - (0x38, 'V'), - (0x39, 'V'), - (0x3A, '3'), - (0x3B, '3'), - (0x3C, '3'), - (0x3D, '3'), - (0x3E, '3'), - (0x3F, '3'), - (0x40, '3'), - (0x41, 'M', u'a'), - (0x42, 'M', u'b'), - (0x43, 'M', u'c'), - (0x44, 'M', u'd'), - (0x45, 'M', u'e'), - (0x46, 'M', u'f'), - (0x47, 'M', u'g'), - (0x48, 'M', u'h'), - (0x49, 'M', u'i'), - (0x4A, 'M', u'j'), - (0x4B, 'M', u'k'), - (0x4C, 'M', u'l'), - (0x4D, 'M', u'm'), - (0x4E, 'M', u'n'), - (0x4F, 'M', u'o'), - (0x50, 'M', u'p'), - (0x51, 'M', u'q'), - (0x52, 'M', u'r'), - (0x53, 'M', u's'), - (0x54, 'M', u't'), - (0x55, 'M', u'u'), - (0x56, 'M', u'v'), - (0x57, 'M', u'w'), - (0x58, 'M', u'x'), - (0x59, 'M', u'y'), - (0x5A, 'M', u'z'), - (0x5B, '3'), - (0x5C, '3'), - (0x5D, '3'), - (0x5E, '3'), - (0x5F, '3'), - (0x60, '3'), - (0x61, 'V'), - (0x62, 'V'), - (0x63, 'V'), - ] - -def _seg_1(): - return [ - (0x64, 'V'), - (0x65, 'V'), - (0x66, 'V'), - (0x67, 'V'), - (0x68, 'V'), - (0x69, 'V'), - (0x6A, 'V'), - (0x6B, 'V'), - (0x6C, 'V'), - (0x6D, 'V'), - (0x6E, 'V'), - (0x6F, 'V'), - (0x70, 'V'), - (0x71, 'V'), - (0x72, 'V'), - (0x73, 'V'), - (0x74, 'V'), - (0x75, 'V'), - (0x76, 'V'), - (0x77, 'V'), - (0x78, 'V'), - (0x79, 'V'), - (0x7A, 'V'), - (0x7B, '3'), - (0x7C, '3'), - (0x7D, '3'), - (0x7E, '3'), - (0x7F, '3'), - (0x80, 'X'), - (0x81, 'X'), - (0x82, 'X'), - (0x83, 'X'), - (0x84, 'X'), - (0x85, 'X'), - (0x86, 'X'), - (0x87, 'X'), - (0x88, 'X'), - (0x89, 'X'), - (0x8A, 'X'), - (0x8B, 'X'), - (0x8C, 'X'), - (0x8D, 'X'), - (0x8E, 'X'), - (0x8F, 'X'), - (0x90, 'X'), - (0x91, 'X'), - (0x92, 'X'), - (0x93, 'X'), - (0x94, 'X'), - (0x95, 'X'), - (0x96, 'X'), - (0x97, 'X'), - (0x98, 'X'), - (0x99, 'X'), - (0x9A, 'X'), - (0x9B, 'X'), - (0x9C, 'X'), - (0x9D, 'X'), - (0x9E, 'X'), - (0x9F, 'X'), - (0xA0, '3', u' '), - (0xA1, 'V'), - (0xA2, 'V'), - (0xA3, 'V'), - (0xA4, 'V'), - (0xA5, 'V'), - (0xA6, 'V'), - (0xA7, 'V'), - (0xA8, '3', u' ̈'), - (0xA9, 'V'), - (0xAA, 'M', u'a'), - (0xAB, 'V'), - (0xAC, 'V'), - (0xAD, 'I'), - (0xAE, 'V'), - (0xAF, '3', u' ̄'), - (0xB0, 'V'), - (0xB1, 'V'), - (0xB2, 'M', u'2'), - (0xB3, 'M', u'3'), - (0xB4, '3', u' ́'), - (0xB5, 'M', u'μ'), - (0xB6, 'V'), - (0xB7, 'V'), - (0xB8, '3', u' ̧'), - (0xB9, 'M', u'1'), - (0xBA, 'M', u'o'), - (0xBB, 'V'), - (0xBC, 'M', u'1⁄4'), - (0xBD, 'M', u'1⁄2'), - (0xBE, 'M', u'3⁄4'), - (0xBF, 'V'), - (0xC0, 'M', u'à'), - (0xC1, 'M', u'á'), - (0xC2, 'M', u'â'), - (0xC3, 'M', u'ã'), - (0xC4, 'M', u'ä'), - (0xC5, 'M', u'å'), - (0xC6, 'M', u'æ'), - (0xC7, 'M', u'ç'), - ] - -def _seg_2(): - return [ - (0xC8, 'M', u'è'), - (0xC9, 'M', u'é'), - (0xCA, 'M', u'ê'), - (0xCB, 'M', u'ë'), - (0xCC, 'M', u'ì'), - (0xCD, 'M', u'í'), - (0xCE, 'M', u'î'), - (0xCF, 'M', u'ï'), - (0xD0, 'M', u'ð'), - (0xD1, 'M', u'ñ'), - (0xD2, 'M', u'ò'), - (0xD3, 'M', u'ó'), - (0xD4, 'M', u'ô'), - (0xD5, 'M', u'õ'), - (0xD6, 'M', u'ö'), - (0xD7, 'V'), - (0xD8, 'M', u'ø'), - (0xD9, 'M', u'ù'), - (0xDA, 'M', u'ú'), - (0xDB, 'M', u'û'), - (0xDC, 'M', u'ü'), - (0xDD, 'M', u'ý'), - (0xDE, 'M', u'þ'), - (0xDF, 'D', u'ss'), - (0xE0, 'V'), - (0xE1, 'V'), - (0xE2, 'V'), - (0xE3, 'V'), - (0xE4, 'V'), - (0xE5, 'V'), - (0xE6, 'V'), - (0xE7, 'V'), - (0xE8, 'V'), - (0xE9, 'V'), - (0xEA, 'V'), - (0xEB, 'V'), - (0xEC, 'V'), - (0xED, 'V'), - (0xEE, 'V'), - (0xEF, 'V'), - (0xF0, 'V'), - (0xF1, 'V'), - (0xF2, 'V'), - (0xF3, 'V'), - (0xF4, 'V'), - (0xF5, 'V'), - (0xF6, 'V'), - (0xF7, 'V'), - (0xF8, 'V'), - (0xF9, 'V'), - (0xFA, 'V'), - (0xFB, 'V'), - (0xFC, 'V'), - (0xFD, 'V'), - (0xFE, 'V'), - (0xFF, 'V'), - (0x100, 'M', u'ā'), - (0x101, 'V'), - (0x102, 'M', u'ă'), - (0x103, 'V'), - (0x104, 'M', u'ą'), - (0x105, 'V'), - (0x106, 'M', u'ć'), - (0x107, 'V'), - (0x108, 'M', u'ĉ'), - (0x109, 'V'), - (0x10A, 'M', u'ċ'), - (0x10B, 'V'), - (0x10C, 'M', u'č'), - (0x10D, 'V'), - (0x10E, 'M', u'ď'), - (0x10F, 'V'), - (0x110, 'M', u'đ'), - (0x111, 'V'), - (0x112, 'M', u'ē'), - (0x113, 'V'), - (0x114, 'M', u'ĕ'), - (0x115, 'V'), - (0x116, 'M', u'ė'), - (0x117, 'V'), - (0x118, 'M', u'ę'), - (0x119, 'V'), - (0x11A, 'M', u'ě'), - (0x11B, 'V'), - (0x11C, 'M', u'ĝ'), - (0x11D, 'V'), - (0x11E, 'M', u'ğ'), - (0x11F, 'V'), - (0x120, 'M', u'ġ'), - (0x121, 'V'), - (0x122, 'M', u'ģ'), - (0x123, 'V'), - (0x124, 'M', u'ĥ'), - (0x125, 'V'), - (0x126, 'M', u'ħ'), - (0x127, 'V'), - (0x128, 'M', u'ĩ'), - (0x129, 'V'), - (0x12A, 'M', u'ī'), - (0x12B, 'V'), - ] - -def _seg_3(): - return [ - (0x12C, 'M', u'ĭ'), - (0x12D, 'V'), - (0x12E, 'M', u'į'), - (0x12F, 'V'), - (0x130, 'M', u'i̇'), - (0x131, 'V'), - (0x132, 'M', u'ij'), - (0x134, 'M', u'ĵ'), - (0x135, 'V'), - (0x136, 'M', u'ķ'), - (0x137, 'V'), - (0x139, 'M', u'ĺ'), - (0x13A, 'V'), - (0x13B, 'M', u'ļ'), - (0x13C, 'V'), - (0x13D, 'M', u'ľ'), - (0x13E, 'V'), - (0x13F, 'M', u'l·'), - (0x141, 'M', u'ł'), - (0x142, 'V'), - (0x143, 'M', u'ń'), - (0x144, 'V'), - (0x145, 'M', u'ņ'), - (0x146, 'V'), - (0x147, 'M', u'ň'), - (0x148, 'V'), - (0x149, 'M', u'ʼn'), - (0x14A, 'M', u'ŋ'), - (0x14B, 'V'), - (0x14C, 'M', u'ō'), - (0x14D, 'V'), - (0x14E, 'M', u'ŏ'), - (0x14F, 'V'), - (0x150, 'M', u'ő'), - (0x151, 'V'), - (0x152, 'M', u'œ'), - (0x153, 'V'), - (0x154, 'M', u'ŕ'), - (0x155, 'V'), - (0x156, 'M', u'ŗ'), - (0x157, 'V'), - (0x158, 'M', u'ř'), - (0x159, 'V'), - (0x15A, 'M', u'ś'), - (0x15B, 'V'), - (0x15C, 'M', u'ŝ'), - (0x15D, 'V'), - (0x15E, 'M', u'ş'), - (0x15F, 'V'), - (0x160, 'M', u'š'), - (0x161, 'V'), - (0x162, 'M', u'ţ'), - (0x163, 'V'), - (0x164, 'M', u'ť'), - (0x165, 'V'), - (0x166, 'M', u'ŧ'), - (0x167, 'V'), - (0x168, 'M', u'ũ'), - (0x169, 'V'), - (0x16A, 'M', u'ū'), - (0x16B, 'V'), - (0x16C, 'M', u'ŭ'), - (0x16D, 'V'), - (0x16E, 'M', u'ů'), - (0x16F, 'V'), - (0x170, 'M', u'ű'), - (0x171, 'V'), - (0x172, 'M', u'ų'), - (0x173, 'V'), - (0x174, 'M', u'ŵ'), - (0x175, 'V'), - (0x176, 'M', u'ŷ'), - (0x177, 'V'), - (0x178, 'M', u'ÿ'), - (0x179, 'M', u'ź'), - (0x17A, 'V'), - (0x17B, 'M', u'ż'), - (0x17C, 'V'), - (0x17D, 'M', u'ž'), - (0x17E, 'V'), - (0x17F, 'M', u's'), - (0x180, 'V'), - (0x181, 'M', u'ɓ'), - (0x182, 'M', u'ƃ'), - (0x183, 'V'), - (0x184, 'M', u'ƅ'), - (0x185, 'V'), - (0x186, 'M', u'ɔ'), - (0x187, 'M', u'ƈ'), - (0x188, 'V'), - (0x189, 'M', u'ɖ'), - (0x18A, 'M', u'ɗ'), - (0x18B, 'M', u'ƌ'), - (0x18C, 'V'), - (0x18E, 'M', u'ǝ'), - (0x18F, 'M', u'ə'), - (0x190, 'M', u'ɛ'), - (0x191, 'M', u'ƒ'), - (0x192, 'V'), - (0x193, 'M', u'ɠ'), - ] - -def _seg_4(): - return [ - (0x194, 'M', u'ɣ'), - (0x195, 'V'), - (0x196, 'M', u'ɩ'), - (0x197, 'M', u'ɨ'), - (0x198, 'M', u'ƙ'), - (0x199, 'V'), - (0x19C, 'M', u'ɯ'), - (0x19D, 'M', u'ɲ'), - (0x19E, 'V'), - (0x19F, 'M', u'ɵ'), - (0x1A0, 'M', u'ơ'), - (0x1A1, 'V'), - (0x1A2, 'M', u'ƣ'), - (0x1A3, 'V'), - (0x1A4, 'M', u'ƥ'), - (0x1A5, 'V'), - (0x1A6, 'M', u'ʀ'), - (0x1A7, 'M', u'ƨ'), - (0x1A8, 'V'), - (0x1A9, 'M', u'ʃ'), - (0x1AA, 'V'), - (0x1AC, 'M', u'ƭ'), - (0x1AD, 'V'), - (0x1AE, 'M', u'ʈ'), - (0x1AF, 'M', u'ư'), - (0x1B0, 'V'), - (0x1B1, 'M', u'ʊ'), - (0x1B2, 'M', u'ʋ'), - (0x1B3, 'M', u'ƴ'), - (0x1B4, 'V'), - (0x1B5, 'M', u'ƶ'), - (0x1B6, 'V'), - (0x1B7, 'M', u'ʒ'), - (0x1B8, 'M', u'ƹ'), - (0x1B9, 'V'), - (0x1BC, 'M', u'ƽ'), - (0x1BD, 'V'), - (0x1C4, 'M', u'dž'), - (0x1C7, 'M', u'lj'), - (0x1CA, 'M', u'nj'), - (0x1CD, 'M', u'ǎ'), - (0x1CE, 'V'), - (0x1CF, 'M', u'ǐ'), - (0x1D0, 'V'), - (0x1D1, 'M', u'ǒ'), - (0x1D2, 'V'), - (0x1D3, 'M', u'ǔ'), - (0x1D4, 'V'), - (0x1D5, 'M', u'ǖ'), - (0x1D6, 'V'), - (0x1D7, 'M', u'ǘ'), - (0x1D8, 'V'), - (0x1D9, 'M', u'ǚ'), - (0x1DA, 'V'), - (0x1DB, 'M', u'ǜ'), - (0x1DC, 'V'), - (0x1DE, 'M', u'ǟ'), - (0x1DF, 'V'), - (0x1E0, 'M', u'ǡ'), - (0x1E1, 'V'), - (0x1E2, 'M', u'ǣ'), - (0x1E3, 'V'), - (0x1E4, 'M', u'ǥ'), - (0x1E5, 'V'), - (0x1E6, 'M', u'ǧ'), - (0x1E7, 'V'), - (0x1E8, 'M', u'ǩ'), - (0x1E9, 'V'), - (0x1EA, 'M', u'ǫ'), - (0x1EB, 'V'), - (0x1EC, 'M', u'ǭ'), - (0x1ED, 'V'), - (0x1EE, 'M', u'ǯ'), - (0x1EF, 'V'), - (0x1F1, 'M', u'dz'), - (0x1F4, 'M', u'ǵ'), - (0x1F5, 'V'), - (0x1F6, 'M', u'ƕ'), - (0x1F7, 'M', u'ƿ'), - (0x1F8, 'M', u'ǹ'), - (0x1F9, 'V'), - (0x1FA, 'M', u'ǻ'), - (0x1FB, 'V'), - (0x1FC, 'M', u'ǽ'), - (0x1FD, 'V'), - (0x1FE, 'M', u'ǿ'), - (0x1FF, 'V'), - (0x200, 'M', u'ȁ'), - (0x201, 'V'), - (0x202, 'M', u'ȃ'), - (0x203, 'V'), - (0x204, 'M', u'ȅ'), - (0x205, 'V'), - (0x206, 'M', u'ȇ'), - (0x207, 'V'), - (0x208, 'M', u'ȉ'), - (0x209, 'V'), - (0x20A, 'M', u'ȋ'), - (0x20B, 'V'), - (0x20C, 'M', u'ȍ'), - ] - -def _seg_5(): - return [ - (0x20D, 'V'), - (0x20E, 'M', u'ȏ'), - (0x20F, 'V'), - (0x210, 'M', u'ȑ'), - (0x211, 'V'), - (0x212, 'M', u'ȓ'), - (0x213, 'V'), - (0x214, 'M', u'ȕ'), - (0x215, 'V'), - (0x216, 'M', u'ȗ'), - (0x217, 'V'), - (0x218, 'M', u'ș'), - (0x219, 'V'), - (0x21A, 'M', u'ț'), - (0x21B, 'V'), - (0x21C, 'M', u'ȝ'), - (0x21D, 'V'), - (0x21E, 'M', u'ȟ'), - (0x21F, 'V'), - (0x220, 'M', u'ƞ'), - (0x221, 'V'), - (0x222, 'M', u'ȣ'), - (0x223, 'V'), - (0x224, 'M', u'ȥ'), - (0x225, 'V'), - (0x226, 'M', u'ȧ'), - (0x227, 'V'), - (0x228, 'M', u'ȩ'), - (0x229, 'V'), - (0x22A, 'M', u'ȫ'), - (0x22B, 'V'), - (0x22C, 'M', u'ȭ'), - (0x22D, 'V'), - (0x22E, 'M', u'ȯ'), - (0x22F, 'V'), - (0x230, 'M', u'ȱ'), - (0x231, 'V'), - (0x232, 'M', u'ȳ'), - (0x233, 'V'), - (0x23A, 'M', u'ⱥ'), - (0x23B, 'M', u'ȼ'), - (0x23C, 'V'), - (0x23D, 'M', u'ƚ'), - (0x23E, 'M', u'ⱦ'), - (0x23F, 'V'), - (0x241, 'M', u'ɂ'), - (0x242, 'V'), - (0x243, 'M', u'ƀ'), - (0x244, 'M', u'ʉ'), - (0x245, 'M', u'ʌ'), - (0x246, 'M', u'ɇ'), - (0x247, 'V'), - (0x248, 'M', u'ɉ'), - (0x249, 'V'), - (0x24A, 'M', u'ɋ'), - (0x24B, 'V'), - (0x24C, 'M', u'ɍ'), - (0x24D, 'V'), - (0x24E, 'M', u'ɏ'), - (0x24F, 'V'), - (0x2B0, 'M', u'h'), - (0x2B1, 'M', u'ɦ'), - (0x2B2, 'M', u'j'), - (0x2B3, 'M', u'r'), - (0x2B4, 'M', u'ɹ'), - (0x2B5, 'M', u'ɻ'), - (0x2B6, 'M', u'ʁ'), - (0x2B7, 'M', u'w'), - (0x2B8, 'M', u'y'), - (0x2B9, 'V'), - (0x2D8, '3', u' ̆'), - (0x2D9, '3', u' ̇'), - (0x2DA, '3', u' ̊'), - (0x2DB, '3', u' ̨'), - (0x2DC, '3', u' ̃'), - (0x2DD, '3', u' ̋'), - (0x2DE, 'V'), - (0x2E0, 'M', u'ɣ'), - (0x2E1, 'M', u'l'), - (0x2E2, 'M', u's'), - (0x2E3, 'M', u'x'), - (0x2E4, 'M', u'ʕ'), - (0x2E5, 'V'), - (0x340, 'M', u'̀'), - (0x341, 'M', u'́'), - (0x342, 'V'), - (0x343, 'M', u'̓'), - (0x344, 'M', u'̈́'), - (0x345, 'M', u'ι'), - (0x346, 'V'), - (0x34F, 'I'), - (0x350, 'V'), - (0x370, 'M', u'ͱ'), - (0x371, 'V'), - (0x372, 'M', u'ͳ'), - (0x373, 'V'), - (0x374, 'M', u'ʹ'), - (0x375, 'V'), - (0x376, 'M', u'ͷ'), - (0x377, 'V'), - ] - -def _seg_6(): - return [ - (0x378, 'X'), - (0x37A, '3', u' ι'), - (0x37B, 'V'), - (0x37E, '3', u';'), - (0x37F, 'X'), - (0x384, '3', u' ́'), - (0x385, '3', u' ̈́'), - (0x386, 'M', u'ά'), - (0x387, 'M', u'·'), - (0x388, 'M', u'έ'), - (0x389, 'M', u'ή'), - (0x38A, 'M', u'ί'), - (0x38B, 'X'), - (0x38C, 'M', u'ό'), - (0x38D, 'X'), - (0x38E, 'M', u'ύ'), - (0x38F, 'M', u'ώ'), - (0x390, 'V'), - (0x391, 'M', u'α'), - (0x392, 'M', u'β'), - (0x393, 'M', u'γ'), - (0x394, 'M', u'δ'), - (0x395, 'M', u'ε'), - (0x396, 'M', u'ζ'), - (0x397, 'M', u'η'), - (0x398, 'M', u'θ'), - (0x399, 'M', u'ι'), - (0x39A, 'M', u'κ'), - (0x39B, 'M', u'λ'), - (0x39C, 'M', u'μ'), - (0x39D, 'M', u'ν'), - (0x39E, 'M', u'ξ'), - (0x39F, 'M', u'ο'), - (0x3A0, 'M', u'π'), - (0x3A1, 'M', u'ρ'), - (0x3A2, 'X'), - (0x3A3, 'M', u'σ'), - (0x3A4, 'M', u'τ'), - (0x3A5, 'M', u'υ'), - (0x3A6, 'M', u'φ'), - (0x3A7, 'M', u'χ'), - (0x3A8, 'M', u'ψ'), - (0x3A9, 'M', u'ω'), - (0x3AA, 'M', u'ϊ'), - (0x3AB, 'M', u'ϋ'), - (0x3AC, 'V'), - (0x3C2, 'D', u'σ'), - (0x3C3, 'V'), - (0x3CF, 'M', u'ϗ'), - (0x3D0, 'M', u'β'), - (0x3D1, 'M', u'θ'), - (0x3D2, 'M', u'υ'), - (0x3D3, 'M', u'ύ'), - (0x3D4, 'M', u'ϋ'), - (0x3D5, 'M', u'φ'), - (0x3D6, 'M', u'π'), - (0x3D7, 'V'), - (0x3D8, 'M', u'ϙ'), - (0x3D9, 'V'), - (0x3DA, 'M', u'ϛ'), - (0x3DB, 'V'), - (0x3DC, 'M', u'ϝ'), - (0x3DD, 'V'), - (0x3DE, 'M', u'ϟ'), - (0x3DF, 'V'), - (0x3E0, 'M', u'ϡ'), - (0x3E1, 'V'), - (0x3E2, 'M', u'ϣ'), - (0x3E3, 'V'), - (0x3E4, 'M', u'ϥ'), - (0x3E5, 'V'), - (0x3E6, 'M', u'ϧ'), - (0x3E7, 'V'), - (0x3E8, 'M', u'ϩ'), - (0x3E9, 'V'), - (0x3EA, 'M', u'ϫ'), - (0x3EB, 'V'), - (0x3EC, 'M', u'ϭ'), - (0x3ED, 'V'), - (0x3EE, 'M', u'ϯ'), - (0x3EF, 'V'), - (0x3F0, 'M', u'κ'), - (0x3F1, 'M', u'ρ'), - (0x3F2, 'M', u'σ'), - (0x3F3, 'V'), - (0x3F4, 'M', u'θ'), - (0x3F5, 'M', u'ε'), - (0x3F6, 'V'), - (0x3F7, 'M', u'ϸ'), - (0x3F8, 'V'), - (0x3F9, 'M', u'σ'), - (0x3FA, 'M', u'ϻ'), - (0x3FB, 'V'), - (0x3FD, 'M', u'ͻ'), - (0x3FE, 'M', u'ͼ'), - (0x3FF, 'M', u'ͽ'), - (0x400, 'M', u'ѐ'), - (0x401, 'M', u'ё'), - (0x402, 'M', u'ђ'), - (0x403, 'M', u'ѓ'), - ] - -def _seg_7(): - return [ - (0x404, 'M', u'є'), - (0x405, 'M', u'ѕ'), - (0x406, 'M', u'і'), - (0x407, 'M', u'ї'), - (0x408, 'M', u'ј'), - (0x409, 'M', u'љ'), - (0x40A, 'M', u'њ'), - (0x40B, 'M', u'ћ'), - (0x40C, 'M', u'ќ'), - (0x40D, 'M', u'ѝ'), - (0x40E, 'M', u'ў'), - (0x40F, 'M', u'џ'), - (0x410, 'M', u'а'), - (0x411, 'M', u'б'), - (0x412, 'M', u'в'), - (0x413, 'M', u'г'), - (0x414, 'M', u'д'), - (0x415, 'M', u'е'), - (0x416, 'M', u'ж'), - (0x417, 'M', u'з'), - (0x418, 'M', u'и'), - (0x419, 'M', u'й'), - (0x41A, 'M', u'к'), - (0x41B, 'M', u'л'), - (0x41C, 'M', u'м'), - (0x41D, 'M', u'н'), - (0x41E, 'M', u'о'), - (0x41F, 'M', u'п'), - (0x420, 'M', u'р'), - (0x421, 'M', u'с'), - (0x422, 'M', u'т'), - (0x423, 'M', u'у'), - (0x424, 'M', u'ф'), - (0x425, 'M', u'х'), - (0x426, 'M', u'ц'), - (0x427, 'M', u'ч'), - (0x428, 'M', u'ш'), - (0x429, 'M', u'щ'), - (0x42A, 'M', u'ъ'), - (0x42B, 'M', u'ы'), - (0x42C, 'M', u'ь'), - (0x42D, 'M', u'э'), - (0x42E, 'M', u'ю'), - (0x42F, 'M', u'я'), - (0x430, 'V'), - (0x460, 'M', u'ѡ'), - (0x461, 'V'), - (0x462, 'M', u'ѣ'), - (0x463, 'V'), - (0x464, 'M', u'ѥ'), - (0x465, 'V'), - (0x466, 'M', u'ѧ'), - (0x467, 'V'), - (0x468, 'M', u'ѩ'), - (0x469, 'V'), - (0x46A, 'M', u'ѫ'), - (0x46B, 'V'), - (0x46C, 'M', u'ѭ'), - (0x46D, 'V'), - (0x46E, 'M', u'ѯ'), - (0x46F, 'V'), - (0x470, 'M', u'ѱ'), - (0x471, 'V'), - (0x472, 'M', u'ѳ'), - (0x473, 'V'), - (0x474, 'M', u'ѵ'), - (0x475, 'V'), - (0x476, 'M', u'ѷ'), - (0x477, 'V'), - (0x478, 'M', u'ѹ'), - (0x479, 'V'), - (0x47A, 'M', u'ѻ'), - (0x47B, 'V'), - (0x47C, 'M', u'ѽ'), - (0x47D, 'V'), - (0x47E, 'M', u'ѿ'), - (0x47F, 'V'), - (0x480, 'M', u'ҁ'), - (0x481, 'V'), - (0x48A, 'M', u'ҋ'), - (0x48B, 'V'), - (0x48C, 'M', u'ҍ'), - (0x48D, 'V'), - (0x48E, 'M', u'ҏ'), - (0x48F, 'V'), - (0x490, 'M', u'ґ'), - (0x491, 'V'), - (0x492, 'M', u'ғ'), - (0x493, 'V'), - (0x494, 'M', u'ҕ'), - (0x495, 'V'), - (0x496, 'M', u'җ'), - (0x497, 'V'), - (0x498, 'M', u'ҙ'), - (0x499, 'V'), - (0x49A, 'M', u'қ'), - (0x49B, 'V'), - (0x49C, 'M', u'ҝ'), - (0x49D, 'V'), - (0x49E, 'M', u'ҟ'), - ] - -def _seg_8(): - return [ - (0x49F, 'V'), - (0x4A0, 'M', u'ҡ'), - (0x4A1, 'V'), - (0x4A2, 'M', u'ң'), - (0x4A3, 'V'), - (0x4A4, 'M', u'ҥ'), - (0x4A5, 'V'), - (0x4A6, 'M', u'ҧ'), - (0x4A7, 'V'), - (0x4A8, 'M', u'ҩ'), - (0x4A9, 'V'), - (0x4AA, 'M', u'ҫ'), - (0x4AB, 'V'), - (0x4AC, 'M', u'ҭ'), - (0x4AD, 'V'), - (0x4AE, 'M', u'ү'), - (0x4AF, 'V'), - (0x4B0, 'M', u'ұ'), - (0x4B1, 'V'), - (0x4B2, 'M', u'ҳ'), - (0x4B3, 'V'), - (0x4B4, 'M', u'ҵ'), - (0x4B5, 'V'), - (0x4B6, 'M', u'ҷ'), - (0x4B7, 'V'), - (0x4B8, 'M', u'ҹ'), - (0x4B9, 'V'), - (0x4BA, 'M', u'һ'), - (0x4BB, 'V'), - (0x4BC, 'M', u'ҽ'), - (0x4BD, 'V'), - (0x4BE, 'M', u'ҿ'), - (0x4BF, 'V'), - (0x4C0, 'X'), - (0x4C1, 'M', u'ӂ'), - (0x4C2, 'V'), - (0x4C3, 'M', u'ӄ'), - (0x4C4, 'V'), - (0x4C5, 'M', u'ӆ'), - (0x4C6, 'V'), - (0x4C7, 'M', u'ӈ'), - (0x4C8, 'V'), - (0x4C9, 'M', u'ӊ'), - (0x4CA, 'V'), - (0x4CB, 'M', u'ӌ'), - (0x4CC, 'V'), - (0x4CD, 'M', u'ӎ'), - (0x4CE, 'V'), - (0x4D0, 'M', u'ӑ'), - (0x4D1, 'V'), - (0x4D2, 'M', u'ӓ'), - (0x4D3, 'V'), - (0x4D4, 'M', u'ӕ'), - (0x4D5, 'V'), - (0x4D6, 'M', u'ӗ'), - (0x4D7, 'V'), - (0x4D8, 'M', u'ә'), - (0x4D9, 'V'), - (0x4DA, 'M', u'ӛ'), - (0x4DB, 'V'), - (0x4DC, 'M', u'ӝ'), - (0x4DD, 'V'), - (0x4DE, 'M', u'ӟ'), - (0x4DF, 'V'), - (0x4E0, 'M', u'ӡ'), - (0x4E1, 'V'), - (0x4E2, 'M', u'ӣ'), - (0x4E3, 'V'), - (0x4E4, 'M', u'ӥ'), - (0x4E5, 'V'), - (0x4E6, 'M', u'ӧ'), - (0x4E7, 'V'), - (0x4E8, 'M', u'ө'), - (0x4E9, 'V'), - (0x4EA, 'M', u'ӫ'), - (0x4EB, 'V'), - (0x4EC, 'M', u'ӭ'), - (0x4ED, 'V'), - (0x4EE, 'M', u'ӯ'), - (0x4EF, 'V'), - (0x4F0, 'M', u'ӱ'), - (0x4F1, 'V'), - (0x4F2, 'M', u'ӳ'), - (0x4F3, 'V'), - (0x4F4, 'M', u'ӵ'), - (0x4F5, 'V'), - (0x4F6, 'M', u'ӷ'), - (0x4F7, 'V'), - (0x4F8, 'M', u'ӹ'), - (0x4F9, 'V'), - (0x4FA, 'M', u'ӻ'), - (0x4FB, 'V'), - (0x4FC, 'M', u'ӽ'), - (0x4FD, 'V'), - (0x4FE, 'M', u'ӿ'), - (0x4FF, 'V'), - (0x500, 'M', u'ԁ'), - (0x501, 'V'), - (0x502, 'M', u'ԃ'), - (0x503, 'V'), - ] - -def _seg_9(): - return [ - (0x504, 'M', u'ԅ'), - (0x505, 'V'), - (0x506, 'M', u'ԇ'), - (0x507, 'V'), - (0x508, 'M', u'ԉ'), - (0x509, 'V'), - (0x50A, 'M', u'ԋ'), - (0x50B, 'V'), - (0x50C, 'M', u'ԍ'), - (0x50D, 'V'), - (0x50E, 'M', u'ԏ'), - (0x50F, 'V'), - (0x510, 'M', u'ԑ'), - (0x511, 'V'), - (0x512, 'M', u'ԓ'), - (0x513, 'V'), - (0x514, 'M', u'ԕ'), - (0x515, 'V'), - (0x516, 'M', u'ԗ'), - (0x517, 'V'), - (0x518, 'M', u'ԙ'), - (0x519, 'V'), - (0x51A, 'M', u'ԛ'), - (0x51B, 'V'), - (0x51C, 'M', u'ԝ'), - (0x51D, 'V'), - (0x51E, 'M', u'ԟ'), - (0x51F, 'V'), - (0x520, 'M', u'ԡ'), - (0x521, 'V'), - (0x522, 'M', u'ԣ'), - (0x523, 'V'), - (0x524, 'M', u'ԥ'), - (0x525, 'V'), - (0x526, 'M', u'ԧ'), - (0x527, 'V'), - (0x528, 'X'), - (0x531, 'M', u'ա'), - (0x532, 'M', u'բ'), - (0x533, 'M', u'գ'), - (0x534, 'M', u'դ'), - (0x535, 'M', u'ե'), - (0x536, 'M', u'զ'), - (0x537, 'M', u'է'), - (0x538, 'M', u'ը'), - (0x539, 'M', u'թ'), - (0x53A, 'M', u'ժ'), - (0x53B, 'M', u'ի'), - (0x53C, 'M', u'լ'), - (0x53D, 'M', u'խ'), - (0x53E, 'M', u'ծ'), - (0x53F, 'M', u'կ'), - (0x540, 'M', u'հ'), - (0x541, 'M', u'ձ'), - (0x542, 'M', u'ղ'), - (0x543, 'M', u'ճ'), - (0x544, 'M', u'մ'), - (0x545, 'M', u'յ'), - (0x546, 'M', u'ն'), - (0x547, 'M', u'շ'), - (0x548, 'M', u'ո'), - (0x549, 'M', u'չ'), - (0x54A, 'M', u'պ'), - (0x54B, 'M', u'ջ'), - (0x54C, 'M', u'ռ'), - (0x54D, 'M', u'ս'), - (0x54E, 'M', u'վ'), - (0x54F, 'M', u'տ'), - (0x550, 'M', u'ր'), - (0x551, 'M', u'ց'), - (0x552, 'M', u'ւ'), - (0x553, 'M', u'փ'), - (0x554, 'M', u'ք'), - (0x555, 'M', u'օ'), - (0x556, 'M', u'ֆ'), - (0x557, 'X'), - (0x559, 'V'), - (0x560, 'X'), - (0x561, 'V'), - (0x587, 'M', u'եւ'), - (0x588, 'X'), - (0x589, 'V'), - (0x58B, 'X'), - (0x58F, 'V'), - (0x590, 'X'), - (0x591, 'V'), - (0x5C8, 'X'), - (0x5D0, 'V'), - (0x5EB, 'X'), - (0x5F0, 'V'), - (0x5F5, 'X'), - (0x606, 'V'), - (0x61C, 'X'), - (0x61E, 'V'), - (0x675, 'M', u'اٴ'), - (0x676, 'M', u'وٴ'), - (0x677, 'M', u'ۇٴ'), - (0x678, 'M', u'يٴ'), - (0x679, 'V'), - (0x6DD, 'X'), - ] - -def _seg_10(): - return [ - (0x6DE, 'V'), - (0x70E, 'X'), - (0x710, 'V'), - (0x74B, 'X'), - (0x74D, 'V'), - (0x7B2, 'X'), - (0x7C0, 'V'), - (0x7FB, 'X'), - (0x800, 'V'), - (0x82E, 'X'), - (0x830, 'V'), - (0x83F, 'X'), - (0x840, 'V'), - (0x85C, 'X'), - (0x85E, 'V'), - (0x85F, 'X'), - (0x8A0, 'V'), - (0x8A1, 'X'), - (0x8A2, 'V'), - (0x8AD, 'X'), - (0x8E4, 'V'), - (0x8FF, 'X'), - (0x900, 'V'), - (0x958, 'M', u'क़'), - (0x959, 'M', u'ख़'), - (0x95A, 'M', u'ग़'), - (0x95B, 'M', u'ज़'), - (0x95C, 'M', u'ड़'), - (0x95D, 'M', u'ढ़'), - (0x95E, 'M', u'फ़'), - (0x95F, 'M', u'य़'), - (0x960, 'V'), - (0x978, 'X'), - (0x979, 'V'), - (0x980, 'X'), - (0x981, 'V'), - (0x984, 'X'), - (0x985, 'V'), - (0x98D, 'X'), - (0x98F, 'V'), - (0x991, 'X'), - (0x993, 'V'), - (0x9A9, 'X'), - (0x9AA, 'V'), - (0x9B1, 'X'), - (0x9B2, 'V'), - (0x9B3, 'X'), - (0x9B6, 'V'), - (0x9BA, 'X'), - (0x9BC, 'V'), - (0x9C5, 'X'), - (0x9C7, 'V'), - (0x9C9, 'X'), - (0x9CB, 'V'), - (0x9CF, 'X'), - (0x9D7, 'V'), - (0x9D8, 'X'), - (0x9DC, 'M', u'ড়'), - (0x9DD, 'M', u'ঢ়'), - (0x9DE, 'X'), - (0x9DF, 'M', u'য়'), - (0x9E0, 'V'), - (0x9E4, 'X'), - (0x9E6, 'V'), - (0x9FC, 'X'), - (0xA01, 'V'), - (0xA04, 'X'), - (0xA05, 'V'), - (0xA0B, 'X'), - (0xA0F, 'V'), - (0xA11, 'X'), - (0xA13, 'V'), - (0xA29, 'X'), - (0xA2A, 'V'), - (0xA31, 'X'), - (0xA32, 'V'), - (0xA33, 'M', u'ਲ਼'), - (0xA34, 'X'), - (0xA35, 'V'), - (0xA36, 'M', u'ਸ਼'), - (0xA37, 'X'), - (0xA38, 'V'), - (0xA3A, 'X'), - (0xA3C, 'V'), - (0xA3D, 'X'), - (0xA3E, 'V'), - (0xA43, 'X'), - (0xA47, 'V'), - (0xA49, 'X'), - (0xA4B, 'V'), - (0xA4E, 'X'), - (0xA51, 'V'), - (0xA52, 'X'), - (0xA59, 'M', u'ਖ਼'), - (0xA5A, 'M', u'ਗ਼'), - (0xA5B, 'M', u'ਜ਼'), - (0xA5C, 'V'), - (0xA5D, 'X'), - (0xA5E, 'M', u'ਫ਼'), - (0xA5F, 'X'), - ] - -def _seg_11(): - return [ - (0xA66, 'V'), - (0xA76, 'X'), - (0xA81, 'V'), - (0xA84, 'X'), - (0xA85, 'V'), - (0xA8E, 'X'), - (0xA8F, 'V'), - (0xA92, 'X'), - (0xA93, 'V'), - (0xAA9, 'X'), - (0xAAA, 'V'), - (0xAB1, 'X'), - (0xAB2, 'V'), - (0xAB4, 'X'), - (0xAB5, 'V'), - (0xABA, 'X'), - (0xABC, 'V'), - (0xAC6, 'X'), - (0xAC7, 'V'), - (0xACA, 'X'), - (0xACB, 'V'), - (0xACE, 'X'), - (0xAD0, 'V'), - (0xAD1, 'X'), - (0xAE0, 'V'), - (0xAE4, 'X'), - (0xAE6, 'V'), - (0xAF2, 'X'), - (0xB01, 'V'), - (0xB04, 'X'), - (0xB05, 'V'), - (0xB0D, 'X'), - (0xB0F, 'V'), - (0xB11, 'X'), - (0xB13, 'V'), - (0xB29, 'X'), - (0xB2A, 'V'), - (0xB31, 'X'), - (0xB32, 'V'), - (0xB34, 'X'), - (0xB35, 'V'), - (0xB3A, 'X'), - (0xB3C, 'V'), - (0xB45, 'X'), - (0xB47, 'V'), - (0xB49, 'X'), - (0xB4B, 'V'), - (0xB4E, 'X'), - (0xB56, 'V'), - (0xB58, 'X'), - (0xB5C, 'M', u'ଡ଼'), - (0xB5D, 'M', u'ଢ଼'), - (0xB5E, 'X'), - (0xB5F, 'V'), - (0xB64, 'X'), - (0xB66, 'V'), - (0xB78, 'X'), - (0xB82, 'V'), - (0xB84, 'X'), - (0xB85, 'V'), - (0xB8B, 'X'), - (0xB8E, 'V'), - (0xB91, 'X'), - (0xB92, 'V'), - (0xB96, 'X'), - (0xB99, 'V'), - (0xB9B, 'X'), - (0xB9C, 'V'), - (0xB9D, 'X'), - (0xB9E, 'V'), - (0xBA0, 'X'), - (0xBA3, 'V'), - (0xBA5, 'X'), - (0xBA8, 'V'), - (0xBAB, 'X'), - (0xBAE, 'V'), - (0xBBA, 'X'), - (0xBBE, 'V'), - (0xBC3, 'X'), - (0xBC6, 'V'), - (0xBC9, 'X'), - (0xBCA, 'V'), - (0xBCE, 'X'), - (0xBD0, 'V'), - (0xBD1, 'X'), - (0xBD7, 'V'), - (0xBD8, 'X'), - (0xBE6, 'V'), - (0xBFB, 'X'), - (0xC01, 'V'), - (0xC04, 'X'), - (0xC05, 'V'), - (0xC0D, 'X'), - (0xC0E, 'V'), - (0xC11, 'X'), - (0xC12, 'V'), - (0xC29, 'X'), - (0xC2A, 'V'), - (0xC34, 'X'), - (0xC35, 'V'), - ] - -def _seg_12(): - return [ - (0xC3A, 'X'), - (0xC3D, 'V'), - (0xC45, 'X'), - (0xC46, 'V'), - (0xC49, 'X'), - (0xC4A, 'V'), - (0xC4E, 'X'), - (0xC55, 'V'), - (0xC57, 'X'), - (0xC58, 'V'), - (0xC5A, 'X'), - (0xC60, 'V'), - (0xC64, 'X'), - (0xC66, 'V'), - (0xC70, 'X'), - (0xC78, 'V'), - (0xC80, 'X'), - (0xC82, 'V'), - (0xC84, 'X'), - (0xC85, 'V'), - (0xC8D, 'X'), - (0xC8E, 'V'), - (0xC91, 'X'), - (0xC92, 'V'), - (0xCA9, 'X'), - (0xCAA, 'V'), - (0xCB4, 'X'), - (0xCB5, 'V'), - (0xCBA, 'X'), - (0xCBC, 'V'), - (0xCC5, 'X'), - (0xCC6, 'V'), - (0xCC9, 'X'), - (0xCCA, 'V'), - (0xCCE, 'X'), - (0xCD5, 'V'), - (0xCD7, 'X'), - (0xCDE, 'V'), - (0xCDF, 'X'), - (0xCE0, 'V'), - (0xCE4, 'X'), - (0xCE6, 'V'), - (0xCF0, 'X'), - (0xCF1, 'V'), - (0xCF3, 'X'), - (0xD02, 'V'), - (0xD04, 'X'), - (0xD05, 'V'), - (0xD0D, 'X'), - (0xD0E, 'V'), - (0xD11, 'X'), - (0xD12, 'V'), - (0xD3B, 'X'), - (0xD3D, 'V'), - (0xD45, 'X'), - (0xD46, 'V'), - (0xD49, 'X'), - (0xD4A, 'V'), - (0xD4F, 'X'), - (0xD57, 'V'), - (0xD58, 'X'), - (0xD60, 'V'), - (0xD64, 'X'), - (0xD66, 'V'), - (0xD76, 'X'), - (0xD79, 'V'), - (0xD80, 'X'), - (0xD82, 'V'), - (0xD84, 'X'), - (0xD85, 'V'), - (0xD97, 'X'), - (0xD9A, 'V'), - (0xDB2, 'X'), - (0xDB3, 'V'), - (0xDBC, 'X'), - (0xDBD, 'V'), - (0xDBE, 'X'), - (0xDC0, 'V'), - (0xDC7, 'X'), - (0xDCA, 'V'), - (0xDCB, 'X'), - (0xDCF, 'V'), - (0xDD5, 'X'), - (0xDD6, 'V'), - (0xDD7, 'X'), - (0xDD8, 'V'), - (0xDE0, 'X'), - (0xDF2, 'V'), - (0xDF5, 'X'), - (0xE01, 'V'), - (0xE33, 'M', u'ํา'), - (0xE34, 'V'), - (0xE3B, 'X'), - (0xE3F, 'V'), - (0xE5C, 'X'), - (0xE81, 'V'), - (0xE83, 'X'), - (0xE84, 'V'), - (0xE85, 'X'), - (0xE87, 'V'), - ] - -def _seg_13(): - return [ - (0xE89, 'X'), - (0xE8A, 'V'), - (0xE8B, 'X'), - (0xE8D, 'V'), - (0xE8E, 'X'), - (0xE94, 'V'), - (0xE98, 'X'), - (0xE99, 'V'), - (0xEA0, 'X'), - (0xEA1, 'V'), - (0xEA4, 'X'), - (0xEA5, 'V'), - (0xEA6, 'X'), - (0xEA7, 'V'), - (0xEA8, 'X'), - (0xEAA, 'V'), - (0xEAC, 'X'), - (0xEAD, 'V'), - (0xEB3, 'M', u'ໍາ'), - (0xEB4, 'V'), - (0xEBA, 'X'), - (0xEBB, 'V'), - (0xEBE, 'X'), - (0xEC0, 'V'), - (0xEC5, 'X'), - (0xEC6, 'V'), - (0xEC7, 'X'), - (0xEC8, 'V'), - (0xECE, 'X'), - (0xED0, 'V'), - (0xEDA, 'X'), - (0xEDC, 'M', u'ຫນ'), - (0xEDD, 'M', u'ຫມ'), - (0xEDE, 'V'), - (0xEE0, 'X'), - (0xF00, 'V'), - (0xF0C, 'M', u'་'), - (0xF0D, 'V'), - (0xF43, 'M', u'གྷ'), - (0xF44, 'V'), - (0xF48, 'X'), - (0xF49, 'V'), - (0xF4D, 'M', u'ཌྷ'), - (0xF4E, 'V'), - (0xF52, 'M', u'དྷ'), - (0xF53, 'V'), - (0xF57, 'M', u'བྷ'), - (0xF58, 'V'), - (0xF5C, 'M', u'ཛྷ'), - (0xF5D, 'V'), - (0xF69, 'M', u'ཀྵ'), - (0xF6A, 'V'), - (0xF6D, 'X'), - (0xF71, 'V'), - (0xF73, 'M', u'ཱི'), - (0xF74, 'V'), - (0xF75, 'M', u'ཱུ'), - (0xF76, 'M', u'ྲྀ'), - (0xF77, 'M', u'ྲཱྀ'), - (0xF78, 'M', u'ླྀ'), - (0xF79, 'M', u'ླཱྀ'), - (0xF7A, 'V'), - (0xF81, 'M', u'ཱྀ'), - (0xF82, 'V'), - (0xF93, 'M', u'ྒྷ'), - (0xF94, 'V'), - (0xF98, 'X'), - (0xF99, 'V'), - (0xF9D, 'M', u'ྜྷ'), - (0xF9E, 'V'), - (0xFA2, 'M', u'ྡྷ'), - (0xFA3, 'V'), - (0xFA7, 'M', u'ྦྷ'), - (0xFA8, 'V'), - (0xFAC, 'M', u'ྫྷ'), - (0xFAD, 'V'), - (0xFB9, 'M', u'ྐྵ'), - (0xFBA, 'V'), - (0xFBD, 'X'), - (0xFBE, 'V'), - (0xFCD, 'X'), - (0xFCE, 'V'), - (0xFDB, 'X'), - (0x1000, 'V'), - (0x10A0, 'X'), - (0x10C7, 'M', u'ⴧ'), - (0x10C8, 'X'), - (0x10CD, 'M', u'ⴭ'), - (0x10CE, 'X'), - (0x10D0, 'V'), - (0x10FC, 'M', u'ნ'), - (0x10FD, 'V'), - (0x115F, 'X'), - (0x1161, 'V'), - (0x1249, 'X'), - (0x124A, 'V'), - (0x124E, 'X'), - (0x1250, 'V'), - (0x1257, 'X'), - (0x1258, 'V'), - ] - -def _seg_14(): - return [ - (0x1259, 'X'), - (0x125A, 'V'), - (0x125E, 'X'), - (0x1260, 'V'), - (0x1289, 'X'), - (0x128A, 'V'), - (0x128E, 'X'), - (0x1290, 'V'), - (0x12B1, 'X'), - (0x12B2, 'V'), - (0x12B6, 'X'), - (0x12B8, 'V'), - (0x12BF, 'X'), - (0x12C0, 'V'), - (0x12C1, 'X'), - (0x12C2, 'V'), - (0x12C6, 'X'), - (0x12C8, 'V'), - (0x12D7, 'X'), - (0x12D8, 'V'), - (0x1311, 'X'), - (0x1312, 'V'), - (0x1316, 'X'), - (0x1318, 'V'), - (0x135B, 'X'), - (0x135D, 'V'), - (0x137D, 'X'), - (0x1380, 'V'), - (0x139A, 'X'), - (0x13A0, 'V'), - (0x13F5, 'X'), - (0x1400, 'V'), - (0x1680, 'X'), - (0x1681, 'V'), - (0x169D, 'X'), - (0x16A0, 'V'), - (0x16F1, 'X'), - (0x1700, 'V'), - (0x170D, 'X'), - (0x170E, 'V'), - (0x1715, 'X'), - (0x1720, 'V'), - (0x1737, 'X'), - (0x1740, 'V'), - (0x1754, 'X'), - (0x1760, 'V'), - (0x176D, 'X'), - (0x176E, 'V'), - (0x1771, 'X'), - (0x1772, 'V'), - (0x1774, 'X'), - (0x1780, 'V'), - (0x17B4, 'X'), - (0x17B6, 'V'), - (0x17DE, 'X'), - (0x17E0, 'V'), - (0x17EA, 'X'), - (0x17F0, 'V'), - (0x17FA, 'X'), - (0x1800, 'V'), - (0x1806, 'X'), - (0x1807, 'V'), - (0x180B, 'I'), - (0x180E, 'X'), - (0x1810, 'V'), - (0x181A, 'X'), - (0x1820, 'V'), - (0x1878, 'X'), - (0x1880, 'V'), - (0x18AB, 'X'), - (0x18B0, 'V'), - (0x18F6, 'X'), - (0x1900, 'V'), - (0x191D, 'X'), - (0x1920, 'V'), - (0x192C, 'X'), - (0x1930, 'V'), - (0x193C, 'X'), - (0x1940, 'V'), - (0x1941, 'X'), - (0x1944, 'V'), - (0x196E, 'X'), - (0x1970, 'V'), - (0x1975, 'X'), - (0x1980, 'V'), - (0x19AC, 'X'), - (0x19B0, 'V'), - (0x19CA, 'X'), - (0x19D0, 'V'), - (0x19DB, 'X'), - (0x19DE, 'V'), - (0x1A1C, 'X'), - (0x1A1E, 'V'), - (0x1A5F, 'X'), - (0x1A60, 'V'), - (0x1A7D, 'X'), - (0x1A7F, 'V'), - (0x1A8A, 'X'), - (0x1A90, 'V'), - (0x1A9A, 'X'), - ] - -def _seg_15(): - return [ - (0x1AA0, 'V'), - (0x1AAE, 'X'), - (0x1B00, 'V'), - (0x1B4C, 'X'), - (0x1B50, 'V'), - (0x1B7D, 'X'), - (0x1B80, 'V'), - (0x1BF4, 'X'), - (0x1BFC, 'V'), - (0x1C38, 'X'), - (0x1C3B, 'V'), - (0x1C4A, 'X'), - (0x1C4D, 'V'), - (0x1C80, 'X'), - (0x1CC0, 'V'), - (0x1CC8, 'X'), - (0x1CD0, 'V'), - (0x1CF7, 'X'), - (0x1D00, 'V'), - (0x1D2C, 'M', u'a'), - (0x1D2D, 'M', u'æ'), - (0x1D2E, 'M', u'b'), - (0x1D2F, 'V'), - (0x1D30, 'M', u'd'), - (0x1D31, 'M', u'e'), - (0x1D32, 'M', u'ǝ'), - (0x1D33, 'M', u'g'), - (0x1D34, 'M', u'h'), - (0x1D35, 'M', u'i'), - (0x1D36, 'M', u'j'), - (0x1D37, 'M', u'k'), - (0x1D38, 'M', u'l'), - (0x1D39, 'M', u'm'), - (0x1D3A, 'M', u'n'), - (0x1D3B, 'V'), - (0x1D3C, 'M', u'o'), - (0x1D3D, 'M', u'ȣ'), - (0x1D3E, 'M', u'p'), - (0x1D3F, 'M', u'r'), - (0x1D40, 'M', u't'), - (0x1D41, 'M', u'u'), - (0x1D42, 'M', u'w'), - (0x1D43, 'M', u'a'), - (0x1D44, 'M', u'ɐ'), - (0x1D45, 'M', u'ɑ'), - (0x1D46, 'M', u'ᴂ'), - (0x1D47, 'M', u'b'), - (0x1D48, 'M', u'd'), - (0x1D49, 'M', u'e'), - (0x1D4A, 'M', u'ə'), - (0x1D4B, 'M', u'ɛ'), - (0x1D4C, 'M', u'ɜ'), - (0x1D4D, 'M', u'g'), - (0x1D4E, 'V'), - (0x1D4F, 'M', u'k'), - (0x1D50, 'M', u'm'), - (0x1D51, 'M', u'ŋ'), - (0x1D52, 'M', u'o'), - (0x1D53, 'M', u'ɔ'), - (0x1D54, 'M', u'ᴖ'), - (0x1D55, 'M', u'ᴗ'), - (0x1D56, 'M', u'p'), - (0x1D57, 'M', u't'), - (0x1D58, 'M', u'u'), - (0x1D59, 'M', u'ᴝ'), - (0x1D5A, 'M', u'ɯ'), - (0x1D5B, 'M', u'v'), - (0x1D5C, 'M', u'ᴥ'), - (0x1D5D, 'M', u'β'), - (0x1D5E, 'M', u'γ'), - (0x1D5F, 'M', u'δ'), - (0x1D60, 'M', u'φ'), - (0x1D61, 'M', u'χ'), - (0x1D62, 'M', u'i'), - (0x1D63, 'M', u'r'), - (0x1D64, 'M', u'u'), - (0x1D65, 'M', u'v'), - (0x1D66, 'M', u'β'), - (0x1D67, 'M', u'γ'), - (0x1D68, 'M', u'ρ'), - (0x1D69, 'M', u'φ'), - (0x1D6A, 'M', u'χ'), - (0x1D6B, 'V'), - (0x1D78, 'M', u'н'), - (0x1D79, 'V'), - (0x1D9B, 'M', u'ɒ'), - (0x1D9C, 'M', u'c'), - (0x1D9D, 'M', u'ɕ'), - (0x1D9E, 'M', u'ð'), - (0x1D9F, 'M', u'ɜ'), - (0x1DA0, 'M', u'f'), - (0x1DA1, 'M', u'ɟ'), - (0x1DA2, 'M', u'ɡ'), - (0x1DA3, 'M', u'ɥ'), - (0x1DA4, 'M', u'ɨ'), - (0x1DA5, 'M', u'ɩ'), - (0x1DA6, 'M', u'ɪ'), - (0x1DA7, 'M', u'ᵻ'), - (0x1DA8, 'M', u'ʝ'), - (0x1DA9, 'M', u'ɭ'), - ] - -def _seg_16(): - return [ - (0x1DAA, 'M', u'ᶅ'), - (0x1DAB, 'M', u'ʟ'), - (0x1DAC, 'M', u'ɱ'), - (0x1DAD, 'M', u'ɰ'), - (0x1DAE, 'M', u'ɲ'), - (0x1DAF, 'M', u'ɳ'), - (0x1DB0, 'M', u'ɴ'), - (0x1DB1, 'M', u'ɵ'), - (0x1DB2, 'M', u'ɸ'), - (0x1DB3, 'M', u'ʂ'), - (0x1DB4, 'M', u'ʃ'), - (0x1DB5, 'M', u'ƫ'), - (0x1DB6, 'M', u'ʉ'), - (0x1DB7, 'M', u'ʊ'), - (0x1DB8, 'M', u'ᴜ'), - (0x1DB9, 'M', u'ʋ'), - (0x1DBA, 'M', u'ʌ'), - (0x1DBB, 'M', u'z'), - (0x1DBC, 'M', u'ʐ'), - (0x1DBD, 'M', u'ʑ'), - (0x1DBE, 'M', u'ʒ'), - (0x1DBF, 'M', u'θ'), - (0x1DC0, 'V'), - (0x1DE7, 'X'), - (0x1DFC, 'V'), - (0x1E00, 'M', u'ḁ'), - (0x1E01, 'V'), - (0x1E02, 'M', u'ḃ'), - (0x1E03, 'V'), - (0x1E04, 'M', u'ḅ'), - (0x1E05, 'V'), - (0x1E06, 'M', u'ḇ'), - (0x1E07, 'V'), - (0x1E08, 'M', u'ḉ'), - (0x1E09, 'V'), - (0x1E0A, 'M', u'ḋ'), - (0x1E0B, 'V'), - (0x1E0C, 'M', u'ḍ'), - (0x1E0D, 'V'), - (0x1E0E, 'M', u'ḏ'), - (0x1E0F, 'V'), - (0x1E10, 'M', u'ḑ'), - (0x1E11, 'V'), - (0x1E12, 'M', u'ḓ'), - (0x1E13, 'V'), - (0x1E14, 'M', u'ḕ'), - (0x1E15, 'V'), - (0x1E16, 'M', u'ḗ'), - (0x1E17, 'V'), - (0x1E18, 'M', u'ḙ'), - (0x1E19, 'V'), - (0x1E1A, 'M', u'ḛ'), - (0x1E1B, 'V'), - (0x1E1C, 'M', u'ḝ'), - (0x1E1D, 'V'), - (0x1E1E, 'M', u'ḟ'), - (0x1E1F, 'V'), - (0x1E20, 'M', u'ḡ'), - (0x1E21, 'V'), - (0x1E22, 'M', u'ḣ'), - (0x1E23, 'V'), - (0x1E24, 'M', u'ḥ'), - (0x1E25, 'V'), - (0x1E26, 'M', u'ḧ'), - (0x1E27, 'V'), - (0x1E28, 'M', u'ḩ'), - (0x1E29, 'V'), - (0x1E2A, 'M', u'ḫ'), - (0x1E2B, 'V'), - (0x1E2C, 'M', u'ḭ'), - (0x1E2D, 'V'), - (0x1E2E, 'M', u'ḯ'), - (0x1E2F, 'V'), - (0x1E30, 'M', u'ḱ'), - (0x1E31, 'V'), - (0x1E32, 'M', u'ḳ'), - (0x1E33, 'V'), - (0x1E34, 'M', u'ḵ'), - (0x1E35, 'V'), - (0x1E36, 'M', u'ḷ'), - (0x1E37, 'V'), - (0x1E38, 'M', u'ḹ'), - (0x1E39, 'V'), - (0x1E3A, 'M', u'ḻ'), - (0x1E3B, 'V'), - (0x1E3C, 'M', u'ḽ'), - (0x1E3D, 'V'), - (0x1E3E, 'M', u'ḿ'), - (0x1E3F, 'V'), - (0x1E40, 'M', u'ṁ'), - (0x1E41, 'V'), - (0x1E42, 'M', u'ṃ'), - (0x1E43, 'V'), - (0x1E44, 'M', u'ṅ'), - (0x1E45, 'V'), - (0x1E46, 'M', u'ṇ'), - (0x1E47, 'V'), - (0x1E48, 'M', u'ṉ'), - (0x1E49, 'V'), - (0x1E4A, 'M', u'ṋ'), - ] - -def _seg_17(): - return [ - (0x1E4B, 'V'), - (0x1E4C, 'M', u'ṍ'), - (0x1E4D, 'V'), - (0x1E4E, 'M', u'ṏ'), - (0x1E4F, 'V'), - (0x1E50, 'M', u'ṑ'), - (0x1E51, 'V'), - (0x1E52, 'M', u'ṓ'), - (0x1E53, 'V'), - (0x1E54, 'M', u'ṕ'), - (0x1E55, 'V'), - (0x1E56, 'M', u'ṗ'), - (0x1E57, 'V'), - (0x1E58, 'M', u'ṙ'), - (0x1E59, 'V'), - (0x1E5A, 'M', u'ṛ'), - (0x1E5B, 'V'), - (0x1E5C, 'M', u'ṝ'), - (0x1E5D, 'V'), - (0x1E5E, 'M', u'ṟ'), - (0x1E5F, 'V'), - (0x1E60, 'M', u'ṡ'), - (0x1E61, 'V'), - (0x1E62, 'M', u'ṣ'), - (0x1E63, 'V'), - (0x1E64, 'M', u'ṥ'), - (0x1E65, 'V'), - (0x1E66, 'M', u'ṧ'), - (0x1E67, 'V'), - (0x1E68, 'M', u'ṩ'), - (0x1E69, 'V'), - (0x1E6A, 'M', u'ṫ'), - (0x1E6B, 'V'), - (0x1E6C, 'M', u'ṭ'), - (0x1E6D, 'V'), - (0x1E6E, 'M', u'ṯ'), - (0x1E6F, 'V'), - (0x1E70, 'M', u'ṱ'), - (0x1E71, 'V'), - (0x1E72, 'M', u'ṳ'), - (0x1E73, 'V'), - (0x1E74, 'M', u'ṵ'), - (0x1E75, 'V'), - (0x1E76, 'M', u'ṷ'), - (0x1E77, 'V'), - (0x1E78, 'M', u'ṹ'), - (0x1E79, 'V'), - (0x1E7A, 'M', u'ṻ'), - (0x1E7B, 'V'), - (0x1E7C, 'M', u'ṽ'), - (0x1E7D, 'V'), - (0x1E7E, 'M', u'ṿ'), - (0x1E7F, 'V'), - (0x1E80, 'M', u'ẁ'), - (0x1E81, 'V'), - (0x1E82, 'M', u'ẃ'), - (0x1E83, 'V'), - (0x1E84, 'M', u'ẅ'), - (0x1E85, 'V'), - (0x1E86, 'M', u'ẇ'), - (0x1E87, 'V'), - (0x1E88, 'M', u'ẉ'), - (0x1E89, 'V'), - (0x1E8A, 'M', u'ẋ'), - (0x1E8B, 'V'), - (0x1E8C, 'M', u'ẍ'), - (0x1E8D, 'V'), - (0x1E8E, 'M', u'ẏ'), - (0x1E8F, 'V'), - (0x1E90, 'M', u'ẑ'), - (0x1E91, 'V'), - (0x1E92, 'M', u'ẓ'), - (0x1E93, 'V'), - (0x1E94, 'M', u'ẕ'), - (0x1E95, 'V'), - (0x1E9A, 'M', u'aʾ'), - (0x1E9B, 'M', u'ṡ'), - (0x1E9C, 'V'), - (0x1E9E, 'M', u'ss'), - (0x1E9F, 'V'), - (0x1EA0, 'M', u'ạ'), - (0x1EA1, 'V'), - (0x1EA2, 'M', u'ả'), - (0x1EA3, 'V'), - (0x1EA4, 'M', u'ấ'), - (0x1EA5, 'V'), - (0x1EA6, 'M', u'ầ'), - (0x1EA7, 'V'), - (0x1EA8, 'M', u'ẩ'), - (0x1EA9, 'V'), - (0x1EAA, 'M', u'ẫ'), - (0x1EAB, 'V'), - (0x1EAC, 'M', u'ậ'), - (0x1EAD, 'V'), - (0x1EAE, 'M', u'ắ'), - (0x1EAF, 'V'), - (0x1EB0, 'M', u'ằ'), - (0x1EB1, 'V'), - (0x1EB2, 'M', u'ẳ'), - (0x1EB3, 'V'), - ] - -def _seg_18(): - return [ - (0x1EB4, 'M', u'ẵ'), - (0x1EB5, 'V'), - (0x1EB6, 'M', u'ặ'), - (0x1EB7, 'V'), - (0x1EB8, 'M', u'ẹ'), - (0x1EB9, 'V'), - (0x1EBA, 'M', u'ẻ'), - (0x1EBB, 'V'), - (0x1EBC, 'M', u'ẽ'), - (0x1EBD, 'V'), - (0x1EBE, 'M', u'ế'), - (0x1EBF, 'V'), - (0x1EC0, 'M', u'ề'), - (0x1EC1, 'V'), - (0x1EC2, 'M', u'ể'), - (0x1EC3, 'V'), - (0x1EC4, 'M', u'ễ'), - (0x1EC5, 'V'), - (0x1EC6, 'M', u'ệ'), - (0x1EC7, 'V'), - (0x1EC8, 'M', u'ỉ'), - (0x1EC9, 'V'), - (0x1ECA, 'M', u'ị'), - (0x1ECB, 'V'), - (0x1ECC, 'M', u'ọ'), - (0x1ECD, 'V'), - (0x1ECE, 'M', u'ỏ'), - (0x1ECF, 'V'), - (0x1ED0, 'M', u'ố'), - (0x1ED1, 'V'), - (0x1ED2, 'M', u'ồ'), - (0x1ED3, 'V'), - (0x1ED4, 'M', u'ổ'), - (0x1ED5, 'V'), - (0x1ED6, 'M', u'ỗ'), - (0x1ED7, 'V'), - (0x1ED8, 'M', u'ộ'), - (0x1ED9, 'V'), - (0x1EDA, 'M', u'ớ'), - (0x1EDB, 'V'), - (0x1EDC, 'M', u'ờ'), - (0x1EDD, 'V'), - (0x1EDE, 'M', u'ở'), - (0x1EDF, 'V'), - (0x1EE0, 'M', u'ỡ'), - (0x1EE1, 'V'), - (0x1EE2, 'M', u'ợ'), - (0x1EE3, 'V'), - (0x1EE4, 'M', u'ụ'), - (0x1EE5, 'V'), - (0x1EE6, 'M', u'ủ'), - (0x1EE7, 'V'), - (0x1EE8, 'M', u'ứ'), - (0x1EE9, 'V'), - (0x1EEA, 'M', u'ừ'), - (0x1EEB, 'V'), - (0x1EEC, 'M', u'ử'), - (0x1EED, 'V'), - (0x1EEE, 'M', u'ữ'), - (0x1EEF, 'V'), - (0x1EF0, 'M', u'ự'), - (0x1EF1, 'V'), - (0x1EF2, 'M', u'ỳ'), - (0x1EF3, 'V'), - (0x1EF4, 'M', u'ỵ'), - (0x1EF5, 'V'), - (0x1EF6, 'M', u'ỷ'), - (0x1EF7, 'V'), - (0x1EF8, 'M', u'ỹ'), - (0x1EF9, 'V'), - (0x1EFA, 'M', u'ỻ'), - (0x1EFB, 'V'), - (0x1EFC, 'M', u'ỽ'), - (0x1EFD, 'V'), - (0x1EFE, 'M', u'ỿ'), - (0x1EFF, 'V'), - (0x1F08, 'M', u'ἀ'), - (0x1F09, 'M', u'ἁ'), - (0x1F0A, 'M', u'ἂ'), - (0x1F0B, 'M', u'ἃ'), - (0x1F0C, 'M', u'ἄ'), - (0x1F0D, 'M', u'ἅ'), - (0x1F0E, 'M', u'ἆ'), - (0x1F0F, 'M', u'ἇ'), - (0x1F10, 'V'), - (0x1F16, 'X'), - (0x1F18, 'M', u'ἐ'), - (0x1F19, 'M', u'ἑ'), - (0x1F1A, 'M', u'ἒ'), - (0x1F1B, 'M', u'ἓ'), - (0x1F1C, 'M', u'ἔ'), - (0x1F1D, 'M', u'ἕ'), - (0x1F1E, 'X'), - (0x1F20, 'V'), - (0x1F28, 'M', u'ἠ'), - (0x1F29, 'M', u'ἡ'), - (0x1F2A, 'M', u'ἢ'), - (0x1F2B, 'M', u'ἣ'), - (0x1F2C, 'M', u'ἤ'), - (0x1F2D, 'M', u'ἥ'), - ] - -def _seg_19(): - return [ - (0x1F2E, 'M', u'ἦ'), - (0x1F2F, 'M', u'ἧ'), - (0x1F30, 'V'), - (0x1F38, 'M', u'ἰ'), - (0x1F39, 'M', u'ἱ'), - (0x1F3A, 'M', u'ἲ'), - (0x1F3B, 'M', u'ἳ'), - (0x1F3C, 'M', u'ἴ'), - (0x1F3D, 'M', u'ἵ'), - (0x1F3E, 'M', u'ἶ'), - (0x1F3F, 'M', u'ἷ'), - (0x1F40, 'V'), - (0x1F46, 'X'), - (0x1F48, 'M', u'ὀ'), - (0x1F49, 'M', u'ὁ'), - (0x1F4A, 'M', u'ὂ'), - (0x1F4B, 'M', u'ὃ'), - (0x1F4C, 'M', u'ὄ'), - (0x1F4D, 'M', u'ὅ'), - (0x1F4E, 'X'), - (0x1F50, 'V'), - (0x1F58, 'X'), - (0x1F59, 'M', u'ὑ'), - (0x1F5A, 'X'), - (0x1F5B, 'M', u'ὓ'), - (0x1F5C, 'X'), - (0x1F5D, 'M', u'ὕ'), - (0x1F5E, 'X'), - (0x1F5F, 'M', u'ὗ'), - (0x1F60, 'V'), - (0x1F68, 'M', u'ὠ'), - (0x1F69, 'M', u'ὡ'), - (0x1F6A, 'M', u'ὢ'), - (0x1F6B, 'M', u'ὣ'), - (0x1F6C, 'M', u'ὤ'), - (0x1F6D, 'M', u'ὥ'), - (0x1F6E, 'M', u'ὦ'), - (0x1F6F, 'M', u'ὧ'), - (0x1F70, 'V'), - (0x1F71, 'M', u'ά'), - (0x1F72, 'V'), - (0x1F73, 'M', u'έ'), - (0x1F74, 'V'), - (0x1F75, 'M', u'ή'), - (0x1F76, 'V'), - (0x1F77, 'M', u'ί'), - (0x1F78, 'V'), - (0x1F79, 'M', u'ό'), - (0x1F7A, 'V'), - (0x1F7B, 'M', u'ύ'), - (0x1F7C, 'V'), - (0x1F7D, 'M', u'ώ'), - (0x1F7E, 'X'), - (0x1F80, 'M', u'ἀι'), - (0x1F81, 'M', u'ἁι'), - (0x1F82, 'M', u'ἂι'), - (0x1F83, 'M', u'ἃι'), - (0x1F84, 'M', u'ἄι'), - (0x1F85, 'M', u'ἅι'), - (0x1F86, 'M', u'ἆι'), - (0x1F87, 'M', u'ἇι'), - (0x1F88, 'M', u'ἀι'), - (0x1F89, 'M', u'ἁι'), - (0x1F8A, 'M', u'ἂι'), - (0x1F8B, 'M', u'ἃι'), - (0x1F8C, 'M', u'ἄι'), - (0x1F8D, 'M', u'ἅι'), - (0x1F8E, 'M', u'ἆι'), - (0x1F8F, 'M', u'ἇι'), - (0x1F90, 'M', u'ἠι'), - (0x1F91, 'M', u'ἡι'), - (0x1F92, 'M', u'ἢι'), - (0x1F93, 'M', u'ἣι'), - (0x1F94, 'M', u'ἤι'), - (0x1F95, 'M', u'ἥι'), - (0x1F96, 'M', u'ἦι'), - (0x1F97, 'M', u'ἧι'), - (0x1F98, 'M', u'ἠι'), - (0x1F99, 'M', u'ἡι'), - (0x1F9A, 'M', u'ἢι'), - (0x1F9B, 'M', u'ἣι'), - (0x1F9C, 'M', u'ἤι'), - (0x1F9D, 'M', u'ἥι'), - (0x1F9E, 'M', u'ἦι'), - (0x1F9F, 'M', u'ἧι'), - (0x1FA0, 'M', u'ὠι'), - (0x1FA1, 'M', u'ὡι'), - (0x1FA2, 'M', u'ὢι'), - (0x1FA3, 'M', u'ὣι'), - (0x1FA4, 'M', u'ὤι'), - (0x1FA5, 'M', u'ὥι'), - (0x1FA6, 'M', u'ὦι'), - (0x1FA7, 'M', u'ὧι'), - (0x1FA8, 'M', u'ὠι'), - (0x1FA9, 'M', u'ὡι'), - (0x1FAA, 'M', u'ὢι'), - (0x1FAB, 'M', u'ὣι'), - (0x1FAC, 'M', u'ὤι'), - (0x1FAD, 'M', u'ὥι'), - (0x1FAE, 'M', u'ὦι'), - ] - -def _seg_20(): - return [ - (0x1FAF, 'M', u'ὧι'), - (0x1FB0, 'V'), - (0x1FB2, 'M', u'ὰι'), - (0x1FB3, 'M', u'αι'), - (0x1FB4, 'M', u'άι'), - (0x1FB5, 'X'), - (0x1FB6, 'V'), - (0x1FB7, 'M', u'ᾶι'), - (0x1FB8, 'M', u'ᾰ'), - (0x1FB9, 'M', u'ᾱ'), - (0x1FBA, 'M', u'ὰ'), - (0x1FBB, 'M', u'ά'), - (0x1FBC, 'M', u'αι'), - (0x1FBD, '3', u' ̓'), - (0x1FBE, 'M', u'ι'), - (0x1FBF, '3', u' ̓'), - (0x1FC0, '3', u' ͂'), - (0x1FC1, '3', u' ̈͂'), - (0x1FC2, 'M', u'ὴι'), - (0x1FC3, 'M', u'ηι'), - (0x1FC4, 'M', u'ήι'), - (0x1FC5, 'X'), - (0x1FC6, 'V'), - (0x1FC7, 'M', u'ῆι'), - (0x1FC8, 'M', u'ὲ'), - (0x1FC9, 'M', u'έ'), - (0x1FCA, 'M', u'ὴ'), - (0x1FCB, 'M', u'ή'), - (0x1FCC, 'M', u'ηι'), - (0x1FCD, '3', u' ̓̀'), - (0x1FCE, '3', u' ̓́'), - (0x1FCF, '3', u' ̓͂'), - (0x1FD0, 'V'), - (0x1FD3, 'M', u'ΐ'), - (0x1FD4, 'X'), - (0x1FD6, 'V'), - (0x1FD8, 'M', u'ῐ'), - (0x1FD9, 'M', u'ῑ'), - (0x1FDA, 'M', u'ὶ'), - (0x1FDB, 'M', u'ί'), - (0x1FDC, 'X'), - (0x1FDD, '3', u' ̔̀'), - (0x1FDE, '3', u' ̔́'), - (0x1FDF, '3', u' ̔͂'), - (0x1FE0, 'V'), - (0x1FE3, 'M', u'ΰ'), - (0x1FE4, 'V'), - (0x1FE8, 'M', u'ῠ'), - (0x1FE9, 'M', u'ῡ'), - (0x1FEA, 'M', u'ὺ'), - (0x1FEB, 'M', u'ύ'), - (0x1FEC, 'M', u'ῥ'), - (0x1FED, '3', u' ̈̀'), - (0x1FEE, '3', u' ̈́'), - (0x1FEF, '3', u'`'), - (0x1FF0, 'X'), - (0x1FF2, 'M', u'ὼι'), - (0x1FF3, 'M', u'ωι'), - (0x1FF4, 'M', u'ώι'), - (0x1FF5, 'X'), - (0x1FF6, 'V'), - (0x1FF7, 'M', u'ῶι'), - (0x1FF8, 'M', u'ὸ'), - (0x1FF9, 'M', u'ό'), - (0x1FFA, 'M', u'ὼ'), - (0x1FFB, 'M', u'ώ'), - (0x1FFC, 'M', u'ωι'), - (0x1FFD, '3', u' ́'), - (0x1FFE, '3', u' ̔'), - (0x1FFF, 'X'), - (0x2000, '3', u' '), - (0x200B, 'I'), - (0x200C, 'D', u''), - (0x200E, 'X'), - (0x2010, 'V'), - (0x2011, 'M', u'‐'), - (0x2012, 'V'), - (0x2017, '3', u' ̳'), - (0x2018, 'V'), - (0x2024, 'X'), - (0x2027, 'V'), - (0x2028, 'X'), - (0x202F, '3', u' '), - (0x2030, 'V'), - (0x2033, 'M', u'′′'), - (0x2034, 'M', u'′′′'), - (0x2035, 'V'), - (0x2036, 'M', u'‵‵'), - (0x2037, 'M', u'‵‵‵'), - (0x2038, 'V'), - (0x203C, '3', u'!!'), - (0x203D, 'V'), - (0x203E, '3', u' ̅'), - (0x203F, 'V'), - (0x2047, '3', u'??'), - (0x2048, '3', u'?!'), - (0x2049, '3', u'!?'), - (0x204A, 'V'), - (0x2057, 'M', u'′′′′'), - (0x2058, 'V'), - ] - -def _seg_21(): - return [ - (0x205F, '3', u' '), - (0x2060, 'I'), - (0x2061, 'X'), - (0x2064, 'I'), - (0x2065, 'X'), - (0x2070, 'M', u'0'), - (0x2071, 'M', u'i'), - (0x2072, 'X'), - (0x2074, 'M', u'4'), - (0x2075, 'M', u'5'), - (0x2076, 'M', u'6'), - (0x2077, 'M', u'7'), - (0x2078, 'M', u'8'), - (0x2079, 'M', u'9'), - (0x207A, '3', u'+'), - (0x207B, 'M', u'−'), - (0x207C, '3', u'='), - (0x207D, '3', u'('), - (0x207E, '3', u')'), - (0x207F, 'M', u'n'), - (0x2080, 'M', u'0'), - (0x2081, 'M', u'1'), - (0x2082, 'M', u'2'), - (0x2083, 'M', u'3'), - (0x2084, 'M', u'4'), - (0x2085, 'M', u'5'), - (0x2086, 'M', u'6'), - (0x2087, 'M', u'7'), - (0x2088, 'M', u'8'), - (0x2089, 'M', u'9'), - (0x208A, '3', u'+'), - (0x208B, 'M', u'−'), - (0x208C, '3', u'='), - (0x208D, '3', u'('), - (0x208E, '3', u')'), - (0x208F, 'X'), - (0x2090, 'M', u'a'), - (0x2091, 'M', u'e'), - (0x2092, 'M', u'o'), - (0x2093, 'M', u'x'), - (0x2094, 'M', u'ə'), - (0x2095, 'M', u'h'), - (0x2096, 'M', u'k'), - (0x2097, 'M', u'l'), - (0x2098, 'M', u'm'), - (0x2099, 'M', u'n'), - (0x209A, 'M', u'p'), - (0x209B, 'M', u's'), - (0x209C, 'M', u't'), - (0x209D, 'X'), - (0x20A0, 'V'), - (0x20A8, 'M', u'rs'), - (0x20A9, 'V'), - (0x20BB, 'X'), - (0x20D0, 'V'), - (0x20F1, 'X'), - (0x2100, '3', u'a/c'), - (0x2101, '3', u'a/s'), - (0x2102, 'M', u'c'), - (0x2103, 'M', u'°c'), - (0x2104, 'V'), - (0x2105, '3', u'c/o'), - (0x2106, '3', u'c/u'), - (0x2107, 'M', u'ɛ'), - (0x2108, 'V'), - (0x2109, 'M', u'°f'), - (0x210A, 'M', u'g'), - (0x210B, 'M', u'h'), - (0x210F, 'M', u'ħ'), - (0x2110, 'M', u'i'), - (0x2112, 'M', u'l'), - (0x2114, 'V'), - (0x2115, 'M', u'n'), - (0x2116, 'M', u'no'), - (0x2117, 'V'), - (0x2119, 'M', u'p'), - (0x211A, 'M', u'q'), - (0x211B, 'M', u'r'), - (0x211E, 'V'), - (0x2120, 'M', u'sm'), - (0x2121, 'M', u'tel'), - (0x2122, 'M', u'tm'), - (0x2123, 'V'), - (0x2124, 'M', u'z'), - (0x2125, 'V'), - (0x2126, 'M', u'ω'), - (0x2127, 'V'), - (0x2128, 'M', u'z'), - (0x2129, 'V'), - (0x212A, 'M', u'k'), - (0x212B, 'M', u'å'), - (0x212C, 'M', u'b'), - (0x212D, 'M', u'c'), - (0x212E, 'V'), - (0x212F, 'M', u'e'), - (0x2131, 'M', u'f'), - (0x2132, 'X'), - (0x2133, 'M', u'm'), - (0x2134, 'M', u'o'), - (0x2135, 'M', u'א'), - ] - -def _seg_22(): - return [ - (0x2136, 'M', u'ב'), - (0x2137, 'M', u'ג'), - (0x2138, 'M', u'ד'), - (0x2139, 'M', u'i'), - (0x213A, 'V'), - (0x213B, 'M', u'fax'), - (0x213C, 'M', u'π'), - (0x213D, 'M', u'γ'), - (0x213F, 'M', u'π'), - (0x2140, 'M', u'∑'), - (0x2141, 'V'), - (0x2145, 'M', u'd'), - (0x2147, 'M', u'e'), - (0x2148, 'M', u'i'), - (0x2149, 'M', u'j'), - (0x214A, 'V'), - (0x2150, 'M', u'1⁄7'), - (0x2151, 'M', u'1⁄9'), - (0x2152, 'M', u'1⁄10'), - (0x2153, 'M', u'1⁄3'), - (0x2154, 'M', u'2⁄3'), - (0x2155, 'M', u'1⁄5'), - (0x2156, 'M', u'2⁄5'), - (0x2157, 'M', u'3⁄5'), - (0x2158, 'M', u'4⁄5'), - (0x2159, 'M', u'1⁄6'), - (0x215A, 'M', u'5⁄6'), - (0x215B, 'M', u'1⁄8'), - (0x215C, 'M', u'3⁄8'), - (0x215D, 'M', u'5⁄8'), - (0x215E, 'M', u'7⁄8'), - (0x215F, 'M', u'1⁄'), - (0x2160, 'M', u'i'), - (0x2161, 'M', u'ii'), - (0x2162, 'M', u'iii'), - (0x2163, 'M', u'iv'), - (0x2164, 'M', u'v'), - (0x2165, 'M', u'vi'), - (0x2166, 'M', u'vii'), - (0x2167, 'M', u'viii'), - (0x2168, 'M', u'ix'), - (0x2169, 'M', u'x'), - (0x216A, 'M', u'xi'), - (0x216B, 'M', u'xii'), - (0x216C, 'M', u'l'), - (0x216D, 'M', u'c'), - (0x216E, 'M', u'd'), - (0x216F, 'M', u'm'), - (0x2170, 'M', u'i'), - (0x2171, 'M', u'ii'), - (0x2172, 'M', u'iii'), - (0x2173, 'M', u'iv'), - (0x2174, 'M', u'v'), - (0x2175, 'M', u'vi'), - (0x2176, 'M', u'vii'), - (0x2177, 'M', u'viii'), - (0x2178, 'M', u'ix'), - (0x2179, 'M', u'x'), - (0x217A, 'M', u'xi'), - (0x217B, 'M', u'xii'), - (0x217C, 'M', u'l'), - (0x217D, 'M', u'c'), - (0x217E, 'M', u'd'), - (0x217F, 'M', u'm'), - (0x2180, 'V'), - (0x2183, 'X'), - (0x2184, 'V'), - (0x2189, 'M', u'0⁄3'), - (0x218A, 'X'), - (0x2190, 'V'), - (0x222C, 'M', u'∫∫'), - (0x222D, 'M', u'∫∫∫'), - (0x222E, 'V'), - (0x222F, 'M', u'∮∮'), - (0x2230, 'M', u'∮∮∮'), - (0x2231, 'V'), - (0x2260, '3'), - (0x2261, 'V'), - (0x226E, '3'), - (0x2270, 'V'), - (0x2329, 'M', u'〈'), - (0x232A, 'M', u'〉'), - (0x232B, 'V'), - (0x23F4, 'X'), - (0x2400, 'V'), - (0x2427, 'X'), - (0x2440, 'V'), - (0x244B, 'X'), - (0x2460, 'M', u'1'), - (0x2461, 'M', u'2'), - (0x2462, 'M', u'3'), - (0x2463, 'M', u'4'), - (0x2464, 'M', u'5'), - (0x2465, 'M', u'6'), - (0x2466, 'M', u'7'), - (0x2467, 'M', u'8'), - (0x2468, 'M', u'9'), - (0x2469, 'M', u'10'), - (0x246A, 'M', u'11'), - (0x246B, 'M', u'12'), - ] - -def _seg_23(): - return [ - (0x246C, 'M', u'13'), - (0x246D, 'M', u'14'), - (0x246E, 'M', u'15'), - (0x246F, 'M', u'16'), - (0x2470, 'M', u'17'), - (0x2471, 'M', u'18'), - (0x2472, 'M', u'19'), - (0x2473, 'M', u'20'), - (0x2474, '3', u'(1)'), - (0x2475, '3', u'(2)'), - (0x2476, '3', u'(3)'), - (0x2477, '3', u'(4)'), - (0x2478, '3', u'(5)'), - (0x2479, '3', u'(6)'), - (0x247A, '3', u'(7)'), - (0x247B, '3', u'(8)'), - (0x247C, '3', u'(9)'), - (0x247D, '3', u'(10)'), - (0x247E, '3', u'(11)'), - (0x247F, '3', u'(12)'), - (0x2480, '3', u'(13)'), - (0x2481, '3', u'(14)'), - (0x2482, '3', u'(15)'), - (0x2483, '3', u'(16)'), - (0x2484, '3', u'(17)'), - (0x2485, '3', u'(18)'), - (0x2486, '3', u'(19)'), - (0x2487, '3', u'(20)'), - (0x2488, 'X'), - (0x249C, '3', u'(a)'), - (0x249D, '3', u'(b)'), - (0x249E, '3', u'(c)'), - (0x249F, '3', u'(d)'), - (0x24A0, '3', u'(e)'), - (0x24A1, '3', u'(f)'), - (0x24A2, '3', u'(g)'), - (0x24A3, '3', u'(h)'), - (0x24A4, '3', u'(i)'), - (0x24A5, '3', u'(j)'), - (0x24A6, '3', u'(k)'), - (0x24A7, '3', u'(l)'), - (0x24A8, '3', u'(m)'), - (0x24A9, '3', u'(n)'), - (0x24AA, '3', u'(o)'), - (0x24AB, '3', u'(p)'), - (0x24AC, '3', u'(q)'), - (0x24AD, '3', u'(r)'), - (0x24AE, '3', u'(s)'), - (0x24AF, '3', u'(t)'), - (0x24B0, '3', u'(u)'), - (0x24B1, '3', u'(v)'), - (0x24B2, '3', u'(w)'), - (0x24B3, '3', u'(x)'), - (0x24B4, '3', u'(y)'), - (0x24B5, '3', u'(z)'), - (0x24B6, 'M', u'a'), - (0x24B7, 'M', u'b'), - (0x24B8, 'M', u'c'), - (0x24B9, 'M', u'd'), - (0x24BA, 'M', u'e'), - (0x24BB, 'M', u'f'), - (0x24BC, 'M', u'g'), - (0x24BD, 'M', u'h'), - (0x24BE, 'M', u'i'), - (0x24BF, 'M', u'j'), - (0x24C0, 'M', u'k'), - (0x24C1, 'M', u'l'), - (0x24C2, 'M', u'm'), - (0x24C3, 'M', u'n'), - (0x24C4, 'M', u'o'), - (0x24C5, 'M', u'p'), - (0x24C6, 'M', u'q'), - (0x24C7, 'M', u'r'), - (0x24C8, 'M', u's'), - (0x24C9, 'M', u't'), - (0x24CA, 'M', u'u'), - (0x24CB, 'M', u'v'), - (0x24CC, 'M', u'w'), - (0x24CD, 'M', u'x'), - (0x24CE, 'M', u'y'), - (0x24CF, 'M', u'z'), - (0x24D0, 'M', u'a'), - (0x24D1, 'M', u'b'), - (0x24D2, 'M', u'c'), - (0x24D3, 'M', u'd'), - (0x24D4, 'M', u'e'), - (0x24D5, 'M', u'f'), - (0x24D6, 'M', u'g'), - (0x24D7, 'M', u'h'), - (0x24D8, 'M', u'i'), - (0x24D9, 'M', u'j'), - (0x24DA, 'M', u'k'), - (0x24DB, 'M', u'l'), - (0x24DC, 'M', u'm'), - (0x24DD, 'M', u'n'), - (0x24DE, 'M', u'o'), - (0x24DF, 'M', u'p'), - (0x24E0, 'M', u'q'), - (0x24E1, 'M', u'r'), - (0x24E2, 'M', u's'), - ] - -def _seg_24(): - return [ - (0x24E3, 'M', u't'), - (0x24E4, 'M', u'u'), - (0x24E5, 'M', u'v'), - (0x24E6, 'M', u'w'), - (0x24E7, 'M', u'x'), - (0x24E8, 'M', u'y'), - (0x24E9, 'M', u'z'), - (0x24EA, 'M', u'0'), - (0x24EB, 'V'), - (0x2700, 'X'), - (0x2701, 'V'), - (0x2A0C, 'M', u'∫∫∫∫'), - (0x2A0D, 'V'), - (0x2A74, '3', u'::='), - (0x2A75, '3', u'=='), - (0x2A76, '3', u'==='), - (0x2A77, 'V'), - (0x2ADC, 'M', u'⫝̸'), - (0x2ADD, 'V'), - (0x2B4D, 'X'), - (0x2B50, 'V'), - (0x2B5A, 'X'), - (0x2C00, 'M', u'ⰰ'), - (0x2C01, 'M', u'ⰱ'), - (0x2C02, 'M', u'ⰲ'), - (0x2C03, 'M', u'ⰳ'), - (0x2C04, 'M', u'ⰴ'), - (0x2C05, 'M', u'ⰵ'), - (0x2C06, 'M', u'ⰶ'), - (0x2C07, 'M', u'ⰷ'), - (0x2C08, 'M', u'ⰸ'), - (0x2C09, 'M', u'ⰹ'), - (0x2C0A, 'M', u'ⰺ'), - (0x2C0B, 'M', u'ⰻ'), - (0x2C0C, 'M', u'ⰼ'), - (0x2C0D, 'M', u'ⰽ'), - (0x2C0E, 'M', u'ⰾ'), - (0x2C0F, 'M', u'ⰿ'), - (0x2C10, 'M', u'ⱀ'), - (0x2C11, 'M', u'ⱁ'), - (0x2C12, 'M', u'ⱂ'), - (0x2C13, 'M', u'ⱃ'), - (0x2C14, 'M', u'ⱄ'), - (0x2C15, 'M', u'ⱅ'), - (0x2C16, 'M', u'ⱆ'), - (0x2C17, 'M', u'ⱇ'), - (0x2C18, 'M', u'ⱈ'), - (0x2C19, 'M', u'ⱉ'), - (0x2C1A, 'M', u'ⱊ'), - (0x2C1B, 'M', u'ⱋ'), - (0x2C1C, 'M', u'ⱌ'), - (0x2C1D, 'M', u'ⱍ'), - (0x2C1E, 'M', u'ⱎ'), - (0x2C1F, 'M', u'ⱏ'), - (0x2C20, 'M', u'ⱐ'), - (0x2C21, 'M', u'ⱑ'), - (0x2C22, 'M', u'ⱒ'), - (0x2C23, 'M', u'ⱓ'), - (0x2C24, 'M', u'ⱔ'), - (0x2C25, 'M', u'ⱕ'), - (0x2C26, 'M', u'ⱖ'), - (0x2C27, 'M', u'ⱗ'), - (0x2C28, 'M', u'ⱘ'), - (0x2C29, 'M', u'ⱙ'), - (0x2C2A, 'M', u'ⱚ'), - (0x2C2B, 'M', u'ⱛ'), - (0x2C2C, 'M', u'ⱜ'), - (0x2C2D, 'M', u'ⱝ'), - (0x2C2E, 'M', u'ⱞ'), - (0x2C2F, 'X'), - (0x2C30, 'V'), - (0x2C5F, 'X'), - (0x2C60, 'M', u'ⱡ'), - (0x2C61, 'V'), - (0x2C62, 'M', u'ɫ'), - (0x2C63, 'M', u'ᵽ'), - (0x2C64, 'M', u'ɽ'), - (0x2C65, 'V'), - (0x2C67, 'M', u'ⱨ'), - (0x2C68, 'V'), - (0x2C69, 'M', u'ⱪ'), - (0x2C6A, 'V'), - (0x2C6B, 'M', u'ⱬ'), - (0x2C6C, 'V'), - (0x2C6D, 'M', u'ɑ'), - (0x2C6E, 'M', u'ɱ'), - (0x2C6F, 'M', u'ɐ'), - (0x2C70, 'M', u'ɒ'), - (0x2C71, 'V'), - (0x2C72, 'M', u'ⱳ'), - (0x2C73, 'V'), - (0x2C75, 'M', u'ⱶ'), - (0x2C76, 'V'), - (0x2C7C, 'M', u'j'), - (0x2C7D, 'M', u'v'), - (0x2C7E, 'M', u'ȿ'), - (0x2C7F, 'M', u'ɀ'), - (0x2C80, 'M', u'ⲁ'), - (0x2C81, 'V'), - (0x2C82, 'M', u'ⲃ'), - ] - -def _seg_25(): - return [ - (0x2C83, 'V'), - (0x2C84, 'M', u'ⲅ'), - (0x2C85, 'V'), - (0x2C86, 'M', u'ⲇ'), - (0x2C87, 'V'), - (0x2C88, 'M', u'ⲉ'), - (0x2C89, 'V'), - (0x2C8A, 'M', u'ⲋ'), - (0x2C8B, 'V'), - (0x2C8C, 'M', u'ⲍ'), - (0x2C8D, 'V'), - (0x2C8E, 'M', u'ⲏ'), - (0x2C8F, 'V'), - (0x2C90, 'M', u'ⲑ'), - (0x2C91, 'V'), - (0x2C92, 'M', u'ⲓ'), - (0x2C93, 'V'), - (0x2C94, 'M', u'ⲕ'), - (0x2C95, 'V'), - (0x2C96, 'M', u'ⲗ'), - (0x2C97, 'V'), - (0x2C98, 'M', u'ⲙ'), - (0x2C99, 'V'), - (0x2C9A, 'M', u'ⲛ'), - (0x2C9B, 'V'), - (0x2C9C, 'M', u'ⲝ'), - (0x2C9D, 'V'), - (0x2C9E, 'M', u'ⲟ'), - (0x2C9F, 'V'), - (0x2CA0, 'M', u'ⲡ'), - (0x2CA1, 'V'), - (0x2CA2, 'M', u'ⲣ'), - (0x2CA3, 'V'), - (0x2CA4, 'M', u'ⲥ'), - (0x2CA5, 'V'), - (0x2CA6, 'M', u'ⲧ'), - (0x2CA7, 'V'), - (0x2CA8, 'M', u'ⲩ'), - (0x2CA9, 'V'), - (0x2CAA, 'M', u'ⲫ'), - (0x2CAB, 'V'), - (0x2CAC, 'M', u'ⲭ'), - (0x2CAD, 'V'), - (0x2CAE, 'M', u'ⲯ'), - (0x2CAF, 'V'), - (0x2CB0, 'M', u'ⲱ'), - (0x2CB1, 'V'), - (0x2CB2, 'M', u'ⲳ'), - (0x2CB3, 'V'), - (0x2CB4, 'M', u'ⲵ'), - (0x2CB5, 'V'), - (0x2CB6, 'M', u'ⲷ'), - (0x2CB7, 'V'), - (0x2CB8, 'M', u'ⲹ'), - (0x2CB9, 'V'), - (0x2CBA, 'M', u'ⲻ'), - (0x2CBB, 'V'), - (0x2CBC, 'M', u'ⲽ'), - (0x2CBD, 'V'), - (0x2CBE, 'M', u'ⲿ'), - (0x2CBF, 'V'), - (0x2CC0, 'M', u'ⳁ'), - (0x2CC1, 'V'), - (0x2CC2, 'M', u'ⳃ'), - (0x2CC3, 'V'), - (0x2CC4, 'M', u'ⳅ'), - (0x2CC5, 'V'), - (0x2CC6, 'M', u'ⳇ'), - (0x2CC7, 'V'), - (0x2CC8, 'M', u'ⳉ'), - (0x2CC9, 'V'), - (0x2CCA, 'M', u'ⳋ'), - (0x2CCB, 'V'), - (0x2CCC, 'M', u'ⳍ'), - (0x2CCD, 'V'), - (0x2CCE, 'M', u'ⳏ'), - (0x2CCF, 'V'), - (0x2CD0, 'M', u'ⳑ'), - (0x2CD1, 'V'), - (0x2CD2, 'M', u'ⳓ'), - (0x2CD3, 'V'), - (0x2CD4, 'M', u'ⳕ'), - (0x2CD5, 'V'), - (0x2CD6, 'M', u'ⳗ'), - (0x2CD7, 'V'), - (0x2CD8, 'M', u'ⳙ'), - (0x2CD9, 'V'), - (0x2CDA, 'M', u'ⳛ'), - (0x2CDB, 'V'), - (0x2CDC, 'M', u'ⳝ'), - (0x2CDD, 'V'), - (0x2CDE, 'M', u'ⳟ'), - (0x2CDF, 'V'), - (0x2CE0, 'M', u'ⳡ'), - (0x2CE1, 'V'), - (0x2CE2, 'M', u'ⳣ'), - (0x2CE3, 'V'), - (0x2CEB, 'M', u'ⳬ'), - (0x2CEC, 'V'), - (0x2CED, 'M', u'ⳮ'), - ] - -def _seg_26(): - return [ - (0x2CEE, 'V'), - (0x2CF2, 'M', u'ⳳ'), - (0x2CF3, 'V'), - (0x2CF4, 'X'), - (0x2CF9, 'V'), - (0x2D26, 'X'), - (0x2D27, 'V'), - (0x2D28, 'X'), - (0x2D2D, 'V'), - (0x2D2E, 'X'), - (0x2D30, 'V'), - (0x2D68, 'X'), - (0x2D6F, 'M', u'ⵡ'), - (0x2D70, 'V'), - (0x2D71, 'X'), - (0x2D7F, 'V'), - (0x2D97, 'X'), - (0x2DA0, 'V'), - (0x2DA7, 'X'), - (0x2DA8, 'V'), - (0x2DAF, 'X'), - (0x2DB0, 'V'), - (0x2DB7, 'X'), - (0x2DB8, 'V'), - (0x2DBF, 'X'), - (0x2DC0, 'V'), - (0x2DC7, 'X'), - (0x2DC8, 'V'), - (0x2DCF, 'X'), - (0x2DD0, 'V'), - (0x2DD7, 'X'), - (0x2DD8, 'V'), - (0x2DDF, 'X'), - (0x2DE0, 'V'), - (0x2E3C, 'X'), - (0x2E80, 'V'), - (0x2E9A, 'X'), - (0x2E9B, 'V'), - (0x2E9F, 'M', u'母'), - (0x2EA0, 'V'), - (0x2EF3, 'M', u'龟'), - (0x2EF4, 'X'), - (0x2F00, 'M', u'一'), - (0x2F01, 'M', u'丨'), - (0x2F02, 'M', u'丶'), - (0x2F03, 'M', u'丿'), - (0x2F04, 'M', u'乙'), - (0x2F05, 'M', u'亅'), - (0x2F06, 'M', u'二'), - (0x2F07, 'M', u'亠'), - (0x2F08, 'M', u'人'), - (0x2F09, 'M', u'儿'), - (0x2F0A, 'M', u'入'), - (0x2F0B, 'M', u'八'), - (0x2F0C, 'M', u'冂'), - (0x2F0D, 'M', u'冖'), - (0x2F0E, 'M', u'冫'), - (0x2F0F, 'M', u'几'), - (0x2F10, 'M', u'凵'), - (0x2F11, 'M', u'刀'), - (0x2F12, 'M', u'力'), - (0x2F13, 'M', u'勹'), - (0x2F14, 'M', u'匕'), - (0x2F15, 'M', u'匚'), - (0x2F16, 'M', u'匸'), - (0x2F17, 'M', u'十'), - (0x2F18, 'M', u'卜'), - (0x2F19, 'M', u'卩'), - (0x2F1A, 'M', u'厂'), - (0x2F1B, 'M', u'厶'), - (0x2F1C, 'M', u'又'), - (0x2F1D, 'M', u'口'), - (0x2F1E, 'M', u'囗'), - (0x2F1F, 'M', u'土'), - (0x2F20, 'M', u'士'), - (0x2F21, 'M', u'夂'), - (0x2F22, 'M', u'夊'), - (0x2F23, 'M', u'夕'), - (0x2F24, 'M', u'大'), - (0x2F25, 'M', u'女'), - (0x2F26, 'M', u'子'), - (0x2F27, 'M', u'宀'), - (0x2F28, 'M', u'寸'), - (0x2F29, 'M', u'小'), - (0x2F2A, 'M', u'尢'), - (0x2F2B, 'M', u'尸'), - (0x2F2C, 'M', u'屮'), - (0x2F2D, 'M', u'山'), - (0x2F2E, 'M', u'巛'), - (0x2F2F, 'M', u'工'), - (0x2F30, 'M', u'己'), - (0x2F31, 'M', u'巾'), - (0x2F32, 'M', u'干'), - (0x2F33, 'M', u'幺'), - (0x2F34, 'M', u'广'), - (0x2F35, 'M', u'廴'), - (0x2F36, 'M', u'廾'), - (0x2F37, 'M', u'弋'), - (0x2F38, 'M', u'弓'), - (0x2F39, 'M', u'彐'), - ] - -def _seg_27(): - return [ - (0x2F3A, 'M', u'彡'), - (0x2F3B, 'M', u'彳'), - (0x2F3C, 'M', u'心'), - (0x2F3D, 'M', u'戈'), - (0x2F3E, 'M', u'戶'), - (0x2F3F, 'M', u'手'), - (0x2F40, 'M', u'支'), - (0x2F41, 'M', u'攴'), - (0x2F42, 'M', u'文'), - (0x2F43, 'M', u'斗'), - (0x2F44, 'M', u'斤'), - (0x2F45, 'M', u'方'), - (0x2F46, 'M', u'无'), - (0x2F47, 'M', u'日'), - (0x2F48, 'M', u'曰'), - (0x2F49, 'M', u'月'), - (0x2F4A, 'M', u'木'), - (0x2F4B, 'M', u'欠'), - (0x2F4C, 'M', u'止'), - (0x2F4D, 'M', u'歹'), - (0x2F4E, 'M', u'殳'), - (0x2F4F, 'M', u'毋'), - (0x2F50, 'M', u'比'), - (0x2F51, 'M', u'毛'), - (0x2F52, 'M', u'氏'), - (0x2F53, 'M', u'气'), - (0x2F54, 'M', u'水'), - (0x2F55, 'M', u'火'), - (0x2F56, 'M', u'爪'), - (0x2F57, 'M', u'父'), - (0x2F58, 'M', u'爻'), - (0x2F59, 'M', u'爿'), - (0x2F5A, 'M', u'片'), - (0x2F5B, 'M', u'牙'), - (0x2F5C, 'M', u'牛'), - (0x2F5D, 'M', u'犬'), - (0x2F5E, 'M', u'玄'), - (0x2F5F, 'M', u'玉'), - (0x2F60, 'M', u'瓜'), - (0x2F61, 'M', u'瓦'), - (0x2F62, 'M', u'甘'), - (0x2F63, 'M', u'生'), - (0x2F64, 'M', u'用'), - (0x2F65, 'M', u'田'), - (0x2F66, 'M', u'疋'), - (0x2F67, 'M', u'疒'), - (0x2F68, 'M', u'癶'), - (0x2F69, 'M', u'白'), - (0x2F6A, 'M', u'皮'), - (0x2F6B, 'M', u'皿'), - (0x2F6C, 'M', u'目'), - (0x2F6D, 'M', u'矛'), - (0x2F6E, 'M', u'矢'), - (0x2F6F, 'M', u'石'), - (0x2F70, 'M', u'示'), - (0x2F71, 'M', u'禸'), - (0x2F72, 'M', u'禾'), - (0x2F73, 'M', u'穴'), - (0x2F74, 'M', u'立'), - (0x2F75, 'M', u'竹'), - (0x2F76, 'M', u'米'), - (0x2F77, 'M', u'糸'), - (0x2F78, 'M', u'缶'), - (0x2F79, 'M', u'网'), - (0x2F7A, 'M', u'羊'), - (0x2F7B, 'M', u'羽'), - (0x2F7C, 'M', u'老'), - (0x2F7D, 'M', u'而'), - (0x2F7E, 'M', u'耒'), - (0x2F7F, 'M', u'耳'), - (0x2F80, 'M', u'聿'), - (0x2F81, 'M', u'肉'), - (0x2F82, 'M', u'臣'), - (0x2F83, 'M', u'自'), - (0x2F84, 'M', u'至'), - (0x2F85, 'M', u'臼'), - (0x2F86, 'M', u'舌'), - (0x2F87, 'M', u'舛'), - (0x2F88, 'M', u'舟'), - (0x2F89, 'M', u'艮'), - (0x2F8A, 'M', u'色'), - (0x2F8B, 'M', u'艸'), - (0x2F8C, 'M', u'虍'), - (0x2F8D, 'M', u'虫'), - (0x2F8E, 'M', u'血'), - (0x2F8F, 'M', u'行'), - (0x2F90, 'M', u'衣'), - (0x2F91, 'M', u'襾'), - (0x2F92, 'M', u'見'), - (0x2F93, 'M', u'角'), - (0x2F94, 'M', u'言'), - (0x2F95, 'M', u'谷'), - (0x2F96, 'M', u'豆'), - (0x2F97, 'M', u'豕'), - (0x2F98, 'M', u'豸'), - (0x2F99, 'M', u'貝'), - (0x2F9A, 'M', u'赤'), - (0x2F9B, 'M', u'走'), - (0x2F9C, 'M', u'足'), - (0x2F9D, 'M', u'身'), - ] - -def _seg_28(): - return [ - (0x2F9E, 'M', u'車'), - (0x2F9F, 'M', u'辛'), - (0x2FA0, 'M', u'辰'), - (0x2FA1, 'M', u'辵'), - (0x2FA2, 'M', u'邑'), - (0x2FA3, 'M', u'酉'), - (0x2FA4, 'M', u'釆'), - (0x2FA5, 'M', u'里'), - (0x2FA6, 'M', u'金'), - (0x2FA7, 'M', u'長'), - (0x2FA8, 'M', u'門'), - (0x2FA9, 'M', u'阜'), - (0x2FAA, 'M', u'隶'), - (0x2FAB, 'M', u'隹'), - (0x2FAC, 'M', u'雨'), - (0x2FAD, 'M', u'靑'), - (0x2FAE, 'M', u'非'), - (0x2FAF, 'M', u'面'), - (0x2FB0, 'M', u'革'), - (0x2FB1, 'M', u'韋'), - (0x2FB2, 'M', u'韭'), - (0x2FB3, 'M', u'音'), - (0x2FB4, 'M', u'頁'), - (0x2FB5, 'M', u'風'), - (0x2FB6, 'M', u'飛'), - (0x2FB7, 'M', u'食'), - (0x2FB8, 'M', u'首'), - (0x2FB9, 'M', u'香'), - (0x2FBA, 'M', u'馬'), - (0x2FBB, 'M', u'骨'), - (0x2FBC, 'M', u'高'), - (0x2FBD, 'M', u'髟'), - (0x2FBE, 'M', u'鬥'), - (0x2FBF, 'M', u'鬯'), - (0x2FC0, 'M', u'鬲'), - (0x2FC1, 'M', u'鬼'), - (0x2FC2, 'M', u'魚'), - (0x2FC3, 'M', u'鳥'), - (0x2FC4, 'M', u'鹵'), - (0x2FC5, 'M', u'鹿'), - (0x2FC6, 'M', u'麥'), - (0x2FC7, 'M', u'麻'), - (0x2FC8, 'M', u'黃'), - (0x2FC9, 'M', u'黍'), - (0x2FCA, 'M', u'黑'), - (0x2FCB, 'M', u'黹'), - (0x2FCC, 'M', u'黽'), - (0x2FCD, 'M', u'鼎'), - (0x2FCE, 'M', u'鼓'), - (0x2FCF, 'M', u'鼠'), - (0x2FD0, 'M', u'鼻'), - (0x2FD1, 'M', u'齊'), - (0x2FD2, 'M', u'齒'), - (0x2FD3, 'M', u'龍'), - (0x2FD4, 'M', u'龜'), - (0x2FD5, 'M', u'龠'), - (0x2FD6, 'X'), - (0x3000, '3', u' '), - (0x3001, 'V'), - (0x3002, 'M', u'.'), - (0x3003, 'V'), - (0x3036, 'M', u'〒'), - (0x3037, 'V'), - (0x3038, 'M', u'十'), - (0x3039, 'M', u'卄'), - (0x303A, 'M', u'卅'), - (0x303B, 'V'), - (0x3040, 'X'), - (0x3041, 'V'), - (0x3097, 'X'), - (0x3099, 'V'), - (0x309B, '3', u' ゙'), - (0x309C, '3', u' ゚'), - (0x309D, 'V'), - (0x309F, 'M', u'より'), - (0x30A0, 'V'), - (0x30FF, 'M', u'コト'), - (0x3100, 'X'), - (0x3105, 'V'), - (0x312E, 'X'), - (0x3131, 'M', u'ᄀ'), - (0x3132, 'M', u'ᄁ'), - (0x3133, 'M', u'ᆪ'), - (0x3134, 'M', u'ᄂ'), - (0x3135, 'M', u'ᆬ'), - (0x3136, 'M', u'ᆭ'), - (0x3137, 'M', u'ᄃ'), - (0x3138, 'M', u'ᄄ'), - (0x3139, 'M', u'ᄅ'), - (0x313A, 'M', u'ᆰ'), - (0x313B, 'M', u'ᆱ'), - (0x313C, 'M', u'ᆲ'), - (0x313D, 'M', u'ᆳ'), - (0x313E, 'M', u'ᆴ'), - (0x313F, 'M', u'ᆵ'), - (0x3140, 'M', u'ᄚ'), - (0x3141, 'M', u'ᄆ'), - (0x3142, 'M', u'ᄇ'), - (0x3143, 'M', u'ᄈ'), - (0x3144, 'M', u'ᄡ'), - ] - -def _seg_29(): - return [ - (0x3145, 'M', u'ᄉ'), - (0x3146, 'M', u'ᄊ'), - (0x3147, 'M', u'ᄋ'), - (0x3148, 'M', u'ᄌ'), - (0x3149, 'M', u'ᄍ'), - (0x314A, 'M', u'ᄎ'), - (0x314B, 'M', u'ᄏ'), - (0x314C, 'M', u'ᄐ'), - (0x314D, 'M', u'ᄑ'), - (0x314E, 'M', u'ᄒ'), - (0x314F, 'M', u'ᅡ'), - (0x3150, 'M', u'ᅢ'), - (0x3151, 'M', u'ᅣ'), - (0x3152, 'M', u'ᅤ'), - (0x3153, 'M', u'ᅥ'), - (0x3154, 'M', u'ᅦ'), - (0x3155, 'M', u'ᅧ'), - (0x3156, 'M', u'ᅨ'), - (0x3157, 'M', u'ᅩ'), - (0x3158, 'M', u'ᅪ'), - (0x3159, 'M', u'ᅫ'), - (0x315A, 'M', u'ᅬ'), - (0x315B, 'M', u'ᅭ'), - (0x315C, 'M', u'ᅮ'), - (0x315D, 'M', u'ᅯ'), - (0x315E, 'M', u'ᅰ'), - (0x315F, 'M', u'ᅱ'), - (0x3160, 'M', u'ᅲ'), - (0x3161, 'M', u'ᅳ'), - (0x3162, 'M', u'ᅴ'), - (0x3163, 'M', u'ᅵ'), - (0x3164, 'X'), - (0x3165, 'M', u'ᄔ'), - (0x3166, 'M', u'ᄕ'), - (0x3167, 'M', u'ᇇ'), - (0x3168, 'M', u'ᇈ'), - (0x3169, 'M', u'ᇌ'), - (0x316A, 'M', u'ᇎ'), - (0x316B, 'M', u'ᇓ'), - (0x316C, 'M', u'ᇗ'), - (0x316D, 'M', u'ᇙ'), - (0x316E, 'M', u'ᄜ'), - (0x316F, 'M', u'ᇝ'), - (0x3170, 'M', u'ᇟ'), - (0x3171, 'M', u'ᄝ'), - (0x3172, 'M', u'ᄞ'), - (0x3173, 'M', u'ᄠ'), - (0x3174, 'M', u'ᄢ'), - (0x3175, 'M', u'ᄣ'), - (0x3176, 'M', u'ᄧ'), - (0x3177, 'M', u'ᄩ'), - (0x3178, 'M', u'ᄫ'), - (0x3179, 'M', u'ᄬ'), - (0x317A, 'M', u'ᄭ'), - (0x317B, 'M', u'ᄮ'), - (0x317C, 'M', u'ᄯ'), - (0x317D, 'M', u'ᄲ'), - (0x317E, 'M', u'ᄶ'), - (0x317F, 'M', u'ᅀ'), - (0x3180, 'M', u'ᅇ'), - (0x3181, 'M', u'ᅌ'), - (0x3182, 'M', u'ᇱ'), - (0x3183, 'M', u'ᇲ'), - (0x3184, 'M', u'ᅗ'), - (0x3185, 'M', u'ᅘ'), - (0x3186, 'M', u'ᅙ'), - (0x3187, 'M', u'ᆄ'), - (0x3188, 'M', u'ᆅ'), - (0x3189, 'M', u'ᆈ'), - (0x318A, 'M', u'ᆑ'), - (0x318B, 'M', u'ᆒ'), - (0x318C, 'M', u'ᆔ'), - (0x318D, 'M', u'ᆞ'), - (0x318E, 'M', u'ᆡ'), - (0x318F, 'X'), - (0x3190, 'V'), - (0x3192, 'M', u'一'), - (0x3193, 'M', u'二'), - (0x3194, 'M', u'三'), - (0x3195, 'M', u'四'), - (0x3196, 'M', u'上'), - (0x3197, 'M', u'中'), - (0x3198, 'M', u'下'), - (0x3199, 'M', u'甲'), - (0x319A, 'M', u'乙'), - (0x319B, 'M', u'丙'), - (0x319C, 'M', u'丁'), - (0x319D, 'M', u'天'), - (0x319E, 'M', u'地'), - (0x319F, 'M', u'人'), - (0x31A0, 'V'), - (0x31BB, 'X'), - (0x31C0, 'V'), - (0x31E4, 'X'), - (0x31F0, 'V'), - (0x3200, '3', u'(ᄀ)'), - (0x3201, '3', u'(ᄂ)'), - (0x3202, '3', u'(ᄃ)'), - (0x3203, '3', u'(ᄅ)'), - (0x3204, '3', u'(ᄆ)'), - ] - -def _seg_30(): - return [ - (0x3205, '3', u'(ᄇ)'), - (0x3206, '3', u'(ᄉ)'), - (0x3207, '3', u'(ᄋ)'), - (0x3208, '3', u'(ᄌ)'), - (0x3209, '3', u'(ᄎ)'), - (0x320A, '3', u'(ᄏ)'), - (0x320B, '3', u'(ᄐ)'), - (0x320C, '3', u'(ᄑ)'), - (0x320D, '3', u'(ᄒ)'), - (0x320E, '3', u'(가)'), - (0x320F, '3', u'(나)'), - (0x3210, '3', u'(다)'), - (0x3211, '3', u'(라)'), - (0x3212, '3', u'(마)'), - (0x3213, '3', u'(바)'), - (0x3214, '3', u'(사)'), - (0x3215, '3', u'(아)'), - (0x3216, '3', u'(자)'), - (0x3217, '3', u'(차)'), - (0x3218, '3', u'(카)'), - (0x3219, '3', u'(타)'), - (0x321A, '3', u'(파)'), - (0x321B, '3', u'(하)'), - (0x321C, '3', u'(주)'), - (0x321D, '3', u'(오전)'), - (0x321E, '3', u'(오후)'), - (0x321F, 'X'), - (0x3220, '3', u'(一)'), - (0x3221, '3', u'(二)'), - (0x3222, '3', u'(三)'), - (0x3223, '3', u'(四)'), - (0x3224, '3', u'(五)'), - (0x3225, '3', u'(六)'), - (0x3226, '3', u'(七)'), - (0x3227, '3', u'(八)'), - (0x3228, '3', u'(九)'), - (0x3229, '3', u'(十)'), - (0x322A, '3', u'(月)'), - (0x322B, '3', u'(火)'), - (0x322C, '3', u'(水)'), - (0x322D, '3', u'(木)'), - (0x322E, '3', u'(金)'), - (0x322F, '3', u'(土)'), - (0x3230, '3', u'(日)'), - (0x3231, '3', u'(株)'), - (0x3232, '3', u'(有)'), - (0x3233, '3', u'(社)'), - (0x3234, '3', u'(名)'), - (0x3235, '3', u'(特)'), - (0x3236, '3', u'(財)'), - (0x3237, '3', u'(祝)'), - (0x3238, '3', u'(労)'), - (0x3239, '3', u'(代)'), - (0x323A, '3', u'(呼)'), - (0x323B, '3', u'(学)'), - (0x323C, '3', u'(監)'), - (0x323D, '3', u'(企)'), - (0x323E, '3', u'(資)'), - (0x323F, '3', u'(協)'), - (0x3240, '3', u'(祭)'), - (0x3241, '3', u'(休)'), - (0x3242, '3', u'(自)'), - (0x3243, '3', u'(至)'), - (0x3244, 'M', u'問'), - (0x3245, 'M', u'幼'), - (0x3246, 'M', u'文'), - (0x3247, 'M', u'箏'), - (0x3248, 'V'), - (0x3250, 'M', u'pte'), - (0x3251, 'M', u'21'), - (0x3252, 'M', u'22'), - (0x3253, 'M', u'23'), - (0x3254, 'M', u'24'), - (0x3255, 'M', u'25'), - (0x3256, 'M', u'26'), - (0x3257, 'M', u'27'), - (0x3258, 'M', u'28'), - (0x3259, 'M', u'29'), - (0x325A, 'M', u'30'), - (0x325B, 'M', u'31'), - (0x325C, 'M', u'32'), - (0x325D, 'M', u'33'), - (0x325E, 'M', u'34'), - (0x325F, 'M', u'35'), - (0x3260, 'M', u'ᄀ'), - (0x3261, 'M', u'ᄂ'), - (0x3262, 'M', u'ᄃ'), - (0x3263, 'M', u'ᄅ'), - (0x3264, 'M', u'ᄆ'), - (0x3265, 'M', u'ᄇ'), - (0x3266, 'M', u'ᄉ'), - (0x3267, 'M', u'ᄋ'), - (0x3268, 'M', u'ᄌ'), - (0x3269, 'M', u'ᄎ'), - (0x326A, 'M', u'ᄏ'), - (0x326B, 'M', u'ᄐ'), - (0x326C, 'M', u'ᄑ'), - (0x326D, 'M', u'ᄒ'), - (0x326E, 'M', u'가'), - (0x326F, 'M', u'나'), - ] - -def _seg_31(): - return [ - (0x3270, 'M', u'다'), - (0x3271, 'M', u'라'), - (0x3272, 'M', u'마'), - (0x3273, 'M', u'바'), - (0x3274, 'M', u'사'), - (0x3275, 'M', u'아'), - (0x3276, 'M', u'자'), - (0x3277, 'M', u'차'), - (0x3278, 'M', u'카'), - (0x3279, 'M', u'타'), - (0x327A, 'M', u'파'), - (0x327B, 'M', u'하'), - (0x327C, 'M', u'참고'), - (0x327D, 'M', u'주의'), - (0x327E, 'M', u'우'), - (0x327F, 'V'), - (0x3280, 'M', u'一'), - (0x3281, 'M', u'二'), - (0x3282, 'M', u'三'), - (0x3283, 'M', u'四'), - (0x3284, 'M', u'五'), - (0x3285, 'M', u'六'), - (0x3286, 'M', u'七'), - (0x3287, 'M', u'八'), - (0x3288, 'M', u'九'), - (0x3289, 'M', u'十'), - (0x328A, 'M', u'月'), - (0x328B, 'M', u'火'), - (0x328C, 'M', u'水'), - (0x328D, 'M', u'木'), - (0x328E, 'M', u'金'), - (0x328F, 'M', u'土'), - (0x3290, 'M', u'日'), - (0x3291, 'M', u'株'), - (0x3292, 'M', u'有'), - (0x3293, 'M', u'社'), - (0x3294, 'M', u'名'), - (0x3295, 'M', u'特'), - (0x3296, 'M', u'財'), - (0x3297, 'M', u'祝'), - (0x3298, 'M', u'労'), - (0x3299, 'M', u'秘'), - (0x329A, 'M', u'男'), - (0x329B, 'M', u'女'), - (0x329C, 'M', u'適'), - (0x329D, 'M', u'優'), - (0x329E, 'M', u'印'), - (0x329F, 'M', u'注'), - (0x32A0, 'M', u'項'), - (0x32A1, 'M', u'休'), - (0x32A2, 'M', u'写'), - (0x32A3, 'M', u'正'), - (0x32A4, 'M', u'上'), - (0x32A5, 'M', u'中'), - (0x32A6, 'M', u'下'), - (0x32A7, 'M', u'左'), - (0x32A8, 'M', u'右'), - (0x32A9, 'M', u'医'), - (0x32AA, 'M', u'宗'), - (0x32AB, 'M', u'学'), - (0x32AC, 'M', u'監'), - (0x32AD, 'M', u'企'), - (0x32AE, 'M', u'資'), - (0x32AF, 'M', u'協'), - (0x32B0, 'M', u'夜'), - (0x32B1, 'M', u'36'), - (0x32B2, 'M', u'37'), - (0x32B3, 'M', u'38'), - (0x32B4, 'M', u'39'), - (0x32B5, 'M', u'40'), - (0x32B6, 'M', u'41'), - (0x32B7, 'M', u'42'), - (0x32B8, 'M', u'43'), - (0x32B9, 'M', u'44'), - (0x32BA, 'M', u'45'), - (0x32BB, 'M', u'46'), - (0x32BC, 'M', u'47'), - (0x32BD, 'M', u'48'), - (0x32BE, 'M', u'49'), - (0x32BF, 'M', u'50'), - (0x32C0, 'M', u'1月'), - (0x32C1, 'M', u'2月'), - (0x32C2, 'M', u'3月'), - (0x32C3, 'M', u'4月'), - (0x32C4, 'M', u'5月'), - (0x32C5, 'M', u'6月'), - (0x32C6, 'M', u'7月'), - (0x32C7, 'M', u'8月'), - (0x32C8, 'M', u'9月'), - (0x32C9, 'M', u'10月'), - (0x32CA, 'M', u'11月'), - (0x32CB, 'M', u'12月'), - (0x32CC, 'M', u'hg'), - (0x32CD, 'M', u'erg'), - (0x32CE, 'M', u'ev'), - (0x32CF, 'M', u'ltd'), - (0x32D0, 'M', u'ア'), - (0x32D1, 'M', u'イ'), - (0x32D2, 'M', u'ウ'), - (0x32D3, 'M', u'エ'), - ] - -def _seg_32(): - return [ - (0x32D4, 'M', u'オ'), - (0x32D5, 'M', u'カ'), - (0x32D6, 'M', u'キ'), - (0x32D7, 'M', u'ク'), - (0x32D8, 'M', u'ケ'), - (0x32D9, 'M', u'コ'), - (0x32DA, 'M', u'サ'), - (0x32DB, 'M', u'シ'), - (0x32DC, 'M', u'ス'), - (0x32DD, 'M', u'セ'), - (0x32DE, 'M', u'ソ'), - (0x32DF, 'M', u'タ'), - (0x32E0, 'M', u'チ'), - (0x32E1, 'M', u'ツ'), - (0x32E2, 'M', u'テ'), - (0x32E3, 'M', u'ト'), - (0x32E4, 'M', u'ナ'), - (0x32E5, 'M', u'ニ'), - (0x32E6, 'M', u'ヌ'), - (0x32E7, 'M', u'ネ'), - (0x32E8, 'M', u'ノ'), - (0x32E9, 'M', u'ハ'), - (0x32EA, 'M', u'ヒ'), - (0x32EB, 'M', u'フ'), - (0x32EC, 'M', u'ヘ'), - (0x32ED, 'M', u'ホ'), - (0x32EE, 'M', u'マ'), - (0x32EF, 'M', u'ミ'), - (0x32F0, 'M', u'ム'), - (0x32F1, 'M', u'メ'), - (0x32F2, 'M', u'モ'), - (0x32F3, 'M', u'ヤ'), - (0x32F4, 'M', u'ユ'), - (0x32F5, 'M', u'ヨ'), - (0x32F6, 'M', u'ラ'), - (0x32F7, 'M', u'リ'), - (0x32F8, 'M', u'ル'), - (0x32F9, 'M', u'レ'), - (0x32FA, 'M', u'ロ'), - (0x32FB, 'M', u'ワ'), - (0x32FC, 'M', u'ヰ'), - (0x32FD, 'M', u'ヱ'), - (0x32FE, 'M', u'ヲ'), - (0x32FF, 'X'), - (0x3300, 'M', u'アパート'), - (0x3301, 'M', u'アルファ'), - (0x3302, 'M', u'アンペア'), - (0x3303, 'M', u'アール'), - (0x3304, 'M', u'イニング'), - (0x3305, 'M', u'インチ'), - (0x3306, 'M', u'ウォン'), - (0x3307, 'M', u'エスクード'), - (0x3308, 'M', u'エーカー'), - (0x3309, 'M', u'オンス'), - (0x330A, 'M', u'オーム'), - (0x330B, 'M', u'カイリ'), - (0x330C, 'M', u'カラット'), - (0x330D, 'M', u'カロリー'), - (0x330E, 'M', u'ガロン'), - (0x330F, 'M', u'ガンマ'), - (0x3310, 'M', u'ギガ'), - (0x3311, 'M', u'ギニー'), - (0x3312, 'M', u'キュリー'), - (0x3313, 'M', u'ギルダー'), - (0x3314, 'M', u'キロ'), - (0x3315, 'M', u'キログラム'), - (0x3316, 'M', u'キロメートル'), - (0x3317, 'M', u'キロワット'), - (0x3318, 'M', u'グラム'), - (0x3319, 'M', u'グラムトン'), - (0x331A, 'M', u'クルゼイロ'), - (0x331B, 'M', u'クローネ'), - (0x331C, 'M', u'ケース'), - (0x331D, 'M', u'コルナ'), - (0x331E, 'M', u'コーポ'), - (0x331F, 'M', u'サイクル'), - (0x3320, 'M', u'サンチーム'), - (0x3321, 'M', u'シリング'), - (0x3322, 'M', u'センチ'), - (0x3323, 'M', u'セント'), - (0x3324, 'M', u'ダース'), - (0x3325, 'M', u'デシ'), - (0x3326, 'M', u'ドル'), - (0x3327, 'M', u'トン'), - (0x3328, 'M', u'ナノ'), - (0x3329, 'M', u'ノット'), - (0x332A, 'M', u'ハイツ'), - (0x332B, 'M', u'パーセント'), - (0x332C, 'M', u'パーツ'), - (0x332D, 'M', u'バーレル'), - (0x332E, 'M', u'ピアストル'), - (0x332F, 'M', u'ピクル'), - (0x3330, 'M', u'ピコ'), - (0x3331, 'M', u'ビル'), - (0x3332, 'M', u'ファラッド'), - (0x3333, 'M', u'フィート'), - (0x3334, 'M', u'ブッシェル'), - (0x3335, 'M', u'フラン'), - (0x3336, 'M', u'ヘクタール'), - (0x3337, 'M', u'ペソ'), - ] - -def _seg_33(): - return [ - (0x3338, 'M', u'ペニヒ'), - (0x3339, 'M', u'ヘルツ'), - (0x333A, 'M', u'ペンス'), - (0x333B, 'M', u'ページ'), - (0x333C, 'M', u'ベータ'), - (0x333D, 'M', u'ポイント'), - (0x333E, 'M', u'ボルト'), - (0x333F, 'M', u'ホン'), - (0x3340, 'M', u'ポンド'), - (0x3341, 'M', u'ホール'), - (0x3342, 'M', u'ホーン'), - (0x3343, 'M', u'マイクロ'), - (0x3344, 'M', u'マイル'), - (0x3345, 'M', u'マッハ'), - (0x3346, 'M', u'マルク'), - (0x3347, 'M', u'マンション'), - (0x3348, 'M', u'ミクロン'), - (0x3349, 'M', u'ミリ'), - (0x334A, 'M', u'ミリバール'), - (0x334B, 'M', u'メガ'), - (0x334C, 'M', u'メガトン'), - (0x334D, 'M', u'メートル'), - (0x334E, 'M', u'ヤード'), - (0x334F, 'M', u'ヤール'), - (0x3350, 'M', u'ユアン'), - (0x3351, 'M', u'リットル'), - (0x3352, 'M', u'リラ'), - (0x3353, 'M', u'ルピー'), - (0x3354, 'M', u'ルーブル'), - (0x3355, 'M', u'レム'), - (0x3356, 'M', u'レントゲン'), - (0x3357, 'M', u'ワット'), - (0x3358, 'M', u'0点'), - (0x3359, 'M', u'1点'), - (0x335A, 'M', u'2点'), - (0x335B, 'M', u'3点'), - (0x335C, 'M', u'4点'), - (0x335D, 'M', u'5点'), - (0x335E, 'M', u'6点'), - (0x335F, 'M', u'7点'), - (0x3360, 'M', u'8点'), - (0x3361, 'M', u'9点'), - (0x3362, 'M', u'10点'), - (0x3363, 'M', u'11点'), - (0x3364, 'M', u'12点'), - (0x3365, 'M', u'13点'), - (0x3366, 'M', u'14点'), - (0x3367, 'M', u'15点'), - (0x3368, 'M', u'16点'), - (0x3369, 'M', u'17点'), - (0x336A, 'M', u'18点'), - (0x336B, 'M', u'19点'), - (0x336C, 'M', u'20点'), - (0x336D, 'M', u'21点'), - (0x336E, 'M', u'22点'), - (0x336F, 'M', u'23点'), - (0x3370, 'M', u'24点'), - (0x3371, 'M', u'hpa'), - (0x3372, 'M', u'da'), - (0x3373, 'M', u'au'), - (0x3374, 'M', u'bar'), - (0x3375, 'M', u'ov'), - (0x3376, 'M', u'pc'), - (0x3377, 'M', u'dm'), - (0x3378, 'M', u'dm2'), - (0x3379, 'M', u'dm3'), - (0x337A, 'M', u'iu'), - (0x337B, 'M', u'平成'), - (0x337C, 'M', u'昭和'), - (0x337D, 'M', u'大正'), - (0x337E, 'M', u'明治'), - (0x337F, 'M', u'株式会社'), - (0x3380, 'M', u'pa'), - (0x3381, 'M', u'na'), - (0x3382, 'M', u'μa'), - (0x3383, 'M', u'ma'), - (0x3384, 'M', u'ka'), - (0x3385, 'M', u'kb'), - (0x3386, 'M', u'mb'), - (0x3387, 'M', u'gb'), - (0x3388, 'M', u'cal'), - (0x3389, 'M', u'kcal'), - (0x338A, 'M', u'pf'), - (0x338B, 'M', u'nf'), - (0x338C, 'M', u'μf'), - (0x338D, 'M', u'μg'), - (0x338E, 'M', u'mg'), - (0x338F, 'M', u'kg'), - (0x3390, 'M', u'hz'), - (0x3391, 'M', u'khz'), - (0x3392, 'M', u'mhz'), - (0x3393, 'M', u'ghz'), - (0x3394, 'M', u'thz'), - (0x3395, 'M', u'μl'), - (0x3396, 'M', u'ml'), - (0x3397, 'M', u'dl'), - (0x3398, 'M', u'kl'), - (0x3399, 'M', u'fm'), - (0x339A, 'M', u'nm'), - (0x339B, 'M', u'μm'), - ] - -def _seg_34(): - return [ - (0x339C, 'M', u'mm'), - (0x339D, 'M', u'cm'), - (0x339E, 'M', u'km'), - (0x339F, 'M', u'mm2'), - (0x33A0, 'M', u'cm2'), - (0x33A1, 'M', u'm2'), - (0x33A2, 'M', u'km2'), - (0x33A3, 'M', u'mm3'), - (0x33A4, 'M', u'cm3'), - (0x33A5, 'M', u'm3'), - (0x33A6, 'M', u'km3'), - (0x33A7, 'M', u'm∕s'), - (0x33A8, 'M', u'm∕s2'), - (0x33A9, 'M', u'pa'), - (0x33AA, 'M', u'kpa'), - (0x33AB, 'M', u'mpa'), - (0x33AC, 'M', u'gpa'), - (0x33AD, 'M', u'rad'), - (0x33AE, 'M', u'rad∕s'), - (0x33AF, 'M', u'rad∕s2'), - (0x33B0, 'M', u'ps'), - (0x33B1, 'M', u'ns'), - (0x33B2, 'M', u'μs'), - (0x33B3, 'M', u'ms'), - (0x33B4, 'M', u'pv'), - (0x33B5, 'M', u'nv'), - (0x33B6, 'M', u'μv'), - (0x33B7, 'M', u'mv'), - (0x33B8, 'M', u'kv'), - (0x33B9, 'M', u'mv'), - (0x33BA, 'M', u'pw'), - (0x33BB, 'M', u'nw'), - (0x33BC, 'M', u'μw'), - (0x33BD, 'M', u'mw'), - (0x33BE, 'M', u'kw'), - (0x33BF, 'M', u'mw'), - (0x33C0, 'M', u'kω'), - (0x33C1, 'M', u'mω'), - (0x33C2, 'X'), - (0x33C3, 'M', u'bq'), - (0x33C4, 'M', u'cc'), - (0x33C5, 'M', u'cd'), - (0x33C6, 'M', u'c∕kg'), - (0x33C7, 'X'), - (0x33C8, 'M', u'db'), - (0x33C9, 'M', u'gy'), - (0x33CA, 'M', u'ha'), - (0x33CB, 'M', u'hp'), - (0x33CC, 'M', u'in'), - (0x33CD, 'M', u'kk'), - (0x33CE, 'M', u'km'), - (0x33CF, 'M', u'kt'), - (0x33D0, 'M', u'lm'), - (0x33D1, 'M', u'ln'), - (0x33D2, 'M', u'log'), - (0x33D3, 'M', u'lx'), - (0x33D4, 'M', u'mb'), - (0x33D5, 'M', u'mil'), - (0x33D6, 'M', u'mol'), - (0x33D7, 'M', u'ph'), - (0x33D8, 'X'), - (0x33D9, 'M', u'ppm'), - (0x33DA, 'M', u'pr'), - (0x33DB, 'M', u'sr'), - (0x33DC, 'M', u'sv'), - (0x33DD, 'M', u'wb'), - (0x33DE, 'M', u'v∕m'), - (0x33DF, 'M', u'a∕m'), - (0x33E0, 'M', u'1日'), - (0x33E1, 'M', u'2日'), - (0x33E2, 'M', u'3日'), - (0x33E3, 'M', u'4日'), - (0x33E4, 'M', u'5日'), - (0x33E5, 'M', u'6日'), - (0x33E6, 'M', u'7日'), - (0x33E7, 'M', u'8日'), - (0x33E8, 'M', u'9日'), - (0x33E9, 'M', u'10日'), - (0x33EA, 'M', u'11日'), - (0x33EB, 'M', u'12日'), - (0x33EC, 'M', u'13日'), - (0x33ED, 'M', u'14日'), - (0x33EE, 'M', u'15日'), - (0x33EF, 'M', u'16日'), - (0x33F0, 'M', u'17日'), - (0x33F1, 'M', u'18日'), - (0x33F2, 'M', u'19日'), - (0x33F3, 'M', u'20日'), - (0x33F4, 'M', u'21日'), - (0x33F5, 'M', u'22日'), - (0x33F6, 'M', u'23日'), - (0x33F7, 'M', u'24日'), - (0x33F8, 'M', u'25日'), - (0x33F9, 'M', u'26日'), - (0x33FA, 'M', u'27日'), - (0x33FB, 'M', u'28日'), - (0x33FC, 'M', u'29日'), - (0x33FD, 'M', u'30日'), - (0x33FE, 'M', u'31日'), - (0x33FF, 'M', u'gal'), - ] - -def _seg_35(): - return [ - (0x3400, 'V'), - (0x4DB6, 'X'), - (0x4DC0, 'V'), - (0x9FCD, 'X'), - (0xA000, 'V'), - (0xA48D, 'X'), - (0xA490, 'V'), - (0xA4C7, 'X'), - (0xA4D0, 'V'), - (0xA62C, 'X'), - (0xA640, 'M', u'ꙁ'), - (0xA641, 'V'), - (0xA642, 'M', u'ꙃ'), - (0xA643, 'V'), - (0xA644, 'M', u'ꙅ'), - (0xA645, 'V'), - (0xA646, 'M', u'ꙇ'), - (0xA647, 'V'), - (0xA648, 'M', u'ꙉ'), - (0xA649, 'V'), - (0xA64A, 'M', u'ꙋ'), - (0xA64B, 'V'), - (0xA64C, 'M', u'ꙍ'), - (0xA64D, 'V'), - (0xA64E, 'M', u'ꙏ'), - (0xA64F, 'V'), - (0xA650, 'M', u'ꙑ'), - (0xA651, 'V'), - (0xA652, 'M', u'ꙓ'), - (0xA653, 'V'), - (0xA654, 'M', u'ꙕ'), - (0xA655, 'V'), - (0xA656, 'M', u'ꙗ'), - (0xA657, 'V'), - (0xA658, 'M', u'ꙙ'), - (0xA659, 'V'), - (0xA65A, 'M', u'ꙛ'), - (0xA65B, 'V'), - (0xA65C, 'M', u'ꙝ'), - (0xA65D, 'V'), - (0xA65E, 'M', u'ꙟ'), - (0xA65F, 'V'), - (0xA660, 'M', u'ꙡ'), - (0xA661, 'V'), - (0xA662, 'M', u'ꙣ'), - (0xA663, 'V'), - (0xA664, 'M', u'ꙥ'), - (0xA665, 'V'), - (0xA666, 'M', u'ꙧ'), - (0xA667, 'V'), - (0xA668, 'M', u'ꙩ'), - (0xA669, 'V'), - (0xA66A, 'M', u'ꙫ'), - (0xA66B, 'V'), - (0xA66C, 'M', u'ꙭ'), - (0xA66D, 'V'), - (0xA680, 'M', u'ꚁ'), - (0xA681, 'V'), - (0xA682, 'M', u'ꚃ'), - (0xA683, 'V'), - (0xA684, 'M', u'ꚅ'), - (0xA685, 'V'), - (0xA686, 'M', u'ꚇ'), - (0xA687, 'V'), - (0xA688, 'M', u'ꚉ'), - (0xA689, 'V'), - (0xA68A, 'M', u'ꚋ'), - (0xA68B, 'V'), - (0xA68C, 'M', u'ꚍ'), - (0xA68D, 'V'), - (0xA68E, 'M', u'ꚏ'), - (0xA68F, 'V'), - (0xA690, 'M', u'ꚑ'), - (0xA691, 'V'), - (0xA692, 'M', u'ꚓ'), - (0xA693, 'V'), - (0xA694, 'M', u'ꚕ'), - (0xA695, 'V'), - (0xA696, 'M', u'ꚗ'), - (0xA697, 'V'), - (0xA698, 'X'), - (0xA69F, 'V'), - (0xA6F8, 'X'), - (0xA700, 'V'), - (0xA722, 'M', u'ꜣ'), - (0xA723, 'V'), - (0xA724, 'M', u'ꜥ'), - (0xA725, 'V'), - (0xA726, 'M', u'ꜧ'), - (0xA727, 'V'), - (0xA728, 'M', u'ꜩ'), - (0xA729, 'V'), - (0xA72A, 'M', u'ꜫ'), - (0xA72B, 'V'), - (0xA72C, 'M', u'ꜭ'), - (0xA72D, 'V'), - (0xA72E, 'M', u'ꜯ'), - (0xA72F, 'V'), - (0xA732, 'M', u'ꜳ'), - (0xA733, 'V'), - ] - -def _seg_36(): - return [ - (0xA734, 'M', u'ꜵ'), - (0xA735, 'V'), - (0xA736, 'M', u'ꜷ'), - (0xA737, 'V'), - (0xA738, 'M', u'ꜹ'), - (0xA739, 'V'), - (0xA73A, 'M', u'ꜻ'), - (0xA73B, 'V'), - (0xA73C, 'M', u'ꜽ'), - (0xA73D, 'V'), - (0xA73E, 'M', u'ꜿ'), - (0xA73F, 'V'), - (0xA740, 'M', u'ꝁ'), - (0xA741, 'V'), - (0xA742, 'M', u'ꝃ'), - (0xA743, 'V'), - (0xA744, 'M', u'ꝅ'), - (0xA745, 'V'), - (0xA746, 'M', u'ꝇ'), - (0xA747, 'V'), - (0xA748, 'M', u'ꝉ'), - (0xA749, 'V'), - (0xA74A, 'M', u'ꝋ'), - (0xA74B, 'V'), - (0xA74C, 'M', u'ꝍ'), - (0xA74D, 'V'), - (0xA74E, 'M', u'ꝏ'), - (0xA74F, 'V'), - (0xA750, 'M', u'ꝑ'), - (0xA751, 'V'), - (0xA752, 'M', u'ꝓ'), - (0xA753, 'V'), - (0xA754, 'M', u'ꝕ'), - (0xA755, 'V'), - (0xA756, 'M', u'ꝗ'), - (0xA757, 'V'), - (0xA758, 'M', u'ꝙ'), - (0xA759, 'V'), - (0xA75A, 'M', u'ꝛ'), - (0xA75B, 'V'), - (0xA75C, 'M', u'ꝝ'), - (0xA75D, 'V'), - (0xA75E, 'M', u'ꝟ'), - (0xA75F, 'V'), - (0xA760, 'M', u'ꝡ'), - (0xA761, 'V'), - (0xA762, 'M', u'ꝣ'), - (0xA763, 'V'), - (0xA764, 'M', u'ꝥ'), - (0xA765, 'V'), - (0xA766, 'M', u'ꝧ'), - (0xA767, 'V'), - (0xA768, 'M', u'ꝩ'), - (0xA769, 'V'), - (0xA76A, 'M', u'ꝫ'), - (0xA76B, 'V'), - (0xA76C, 'M', u'ꝭ'), - (0xA76D, 'V'), - (0xA76E, 'M', u'ꝯ'), - (0xA76F, 'V'), - (0xA770, 'M', u'ꝯ'), - (0xA771, 'V'), - (0xA779, 'M', u'ꝺ'), - (0xA77A, 'V'), - (0xA77B, 'M', u'ꝼ'), - (0xA77C, 'V'), - (0xA77D, 'M', u'ᵹ'), - (0xA77E, 'M', u'ꝿ'), - (0xA77F, 'V'), - (0xA780, 'M', u'ꞁ'), - (0xA781, 'V'), - (0xA782, 'M', u'ꞃ'), - (0xA783, 'V'), - (0xA784, 'M', u'ꞅ'), - (0xA785, 'V'), - (0xA786, 'M', u'ꞇ'), - (0xA787, 'V'), - (0xA78B, 'M', u'ꞌ'), - (0xA78C, 'V'), - (0xA78D, 'M', u'ɥ'), - (0xA78E, 'V'), - (0xA78F, 'X'), - (0xA790, 'M', u'ꞑ'), - (0xA791, 'V'), - (0xA792, 'M', u'ꞓ'), - (0xA793, 'V'), - (0xA794, 'X'), - (0xA7A0, 'M', u'ꞡ'), - (0xA7A1, 'V'), - (0xA7A2, 'M', u'ꞣ'), - (0xA7A3, 'V'), - (0xA7A4, 'M', u'ꞥ'), - (0xA7A5, 'V'), - (0xA7A6, 'M', u'ꞧ'), - (0xA7A7, 'V'), - (0xA7A8, 'M', u'ꞩ'), - (0xA7A9, 'V'), - (0xA7AA, 'M', u'ɦ'), - (0xA7AB, 'X'), - (0xA7F8, 'M', u'ħ'), - ] - -def _seg_37(): - return [ - (0xA7F9, 'M', u'œ'), - (0xA7FA, 'V'), - (0xA82C, 'X'), - (0xA830, 'V'), - (0xA83A, 'X'), - (0xA840, 'V'), - (0xA878, 'X'), - (0xA880, 'V'), - (0xA8C5, 'X'), - (0xA8CE, 'V'), - (0xA8DA, 'X'), - (0xA8E0, 'V'), - (0xA8FC, 'X'), - (0xA900, 'V'), - (0xA954, 'X'), - (0xA95F, 'V'), - (0xA97D, 'X'), - (0xA980, 'V'), - (0xA9CE, 'X'), - (0xA9CF, 'V'), - (0xA9DA, 'X'), - (0xA9DE, 'V'), - (0xA9E0, 'X'), - (0xAA00, 'V'), - (0xAA37, 'X'), - (0xAA40, 'V'), - (0xAA4E, 'X'), - (0xAA50, 'V'), - (0xAA5A, 'X'), - (0xAA5C, 'V'), - (0xAA7C, 'X'), - (0xAA80, 'V'), - (0xAAC3, 'X'), - (0xAADB, 'V'), - (0xAAF7, 'X'), - (0xAB01, 'V'), - (0xAB07, 'X'), - (0xAB09, 'V'), - (0xAB0F, 'X'), - (0xAB11, 'V'), - (0xAB17, 'X'), - (0xAB20, 'V'), - (0xAB27, 'X'), - (0xAB28, 'V'), - (0xAB2F, 'X'), - (0xABC0, 'V'), - (0xABEE, 'X'), - (0xABF0, 'V'), - (0xABFA, 'X'), - (0xAC00, 'V'), - (0xD7A4, 'X'), - (0xD7B0, 'V'), - (0xD7C7, 'X'), - (0xD7CB, 'V'), - (0xD7FC, 'X'), - (0xF900, 'M', u'豈'), - (0xF901, 'M', u'更'), - (0xF902, 'M', u'車'), - (0xF903, 'M', u'賈'), - (0xF904, 'M', u'滑'), - (0xF905, 'M', u'串'), - (0xF906, 'M', u'句'), - (0xF907, 'M', u'龜'), - (0xF909, 'M', u'契'), - (0xF90A, 'M', u'金'), - (0xF90B, 'M', u'喇'), - (0xF90C, 'M', u'奈'), - (0xF90D, 'M', u'懶'), - (0xF90E, 'M', u'癩'), - (0xF90F, 'M', u'羅'), - (0xF910, 'M', u'蘿'), - (0xF911, 'M', u'螺'), - (0xF912, 'M', u'裸'), - (0xF913, 'M', u'邏'), - (0xF914, 'M', u'樂'), - (0xF915, 'M', u'洛'), - (0xF916, 'M', u'烙'), - (0xF917, 'M', u'珞'), - (0xF918, 'M', u'落'), - (0xF919, 'M', u'酪'), - (0xF91A, 'M', u'駱'), - (0xF91B, 'M', u'亂'), - (0xF91C, 'M', u'卵'), - (0xF91D, 'M', u'欄'), - (0xF91E, 'M', u'爛'), - (0xF91F, 'M', u'蘭'), - (0xF920, 'M', u'鸞'), - (0xF921, 'M', u'嵐'), - (0xF922, 'M', u'濫'), - (0xF923, 'M', u'藍'), - (0xF924, 'M', u'襤'), - (0xF925, 'M', u'拉'), - (0xF926, 'M', u'臘'), - (0xF927, 'M', u'蠟'), - (0xF928, 'M', u'廊'), - (0xF929, 'M', u'朗'), - (0xF92A, 'M', u'浪'), - (0xF92B, 'M', u'狼'), - (0xF92C, 'M', u'郎'), - (0xF92D, 'M', u'來'), - ] - -def _seg_38(): - return [ - (0xF92E, 'M', u'冷'), - (0xF92F, 'M', u'勞'), - (0xF930, 'M', u'擄'), - (0xF931, 'M', u'櫓'), - (0xF932, 'M', u'爐'), - (0xF933, 'M', u'盧'), - (0xF934, 'M', u'老'), - (0xF935, 'M', u'蘆'), - (0xF936, 'M', u'虜'), - (0xF937, 'M', u'路'), - (0xF938, 'M', u'露'), - (0xF939, 'M', u'魯'), - (0xF93A, 'M', u'鷺'), - (0xF93B, 'M', u'碌'), - (0xF93C, 'M', u'祿'), - (0xF93D, 'M', u'綠'), - (0xF93E, 'M', u'菉'), - (0xF93F, 'M', u'錄'), - (0xF940, 'M', u'鹿'), - (0xF941, 'M', u'論'), - (0xF942, 'M', u'壟'), - (0xF943, 'M', u'弄'), - (0xF944, 'M', u'籠'), - (0xF945, 'M', u'聾'), - (0xF946, 'M', u'牢'), - (0xF947, 'M', u'磊'), - (0xF948, 'M', u'賂'), - (0xF949, 'M', u'雷'), - (0xF94A, 'M', u'壘'), - (0xF94B, 'M', u'屢'), - (0xF94C, 'M', u'樓'), - (0xF94D, 'M', u'淚'), - (0xF94E, 'M', u'漏'), - (0xF94F, 'M', u'累'), - (0xF950, 'M', u'縷'), - (0xF951, 'M', u'陋'), - (0xF952, 'M', u'勒'), - (0xF953, 'M', u'肋'), - (0xF954, 'M', u'凜'), - (0xF955, 'M', u'凌'), - (0xF956, 'M', u'稜'), - (0xF957, 'M', u'綾'), - (0xF958, 'M', u'菱'), - (0xF959, 'M', u'陵'), - (0xF95A, 'M', u'讀'), - (0xF95B, 'M', u'拏'), - (0xF95C, 'M', u'樂'), - (0xF95D, 'M', u'諾'), - (0xF95E, 'M', u'丹'), - (0xF95F, 'M', u'寧'), - (0xF960, 'M', u'怒'), - (0xF961, 'M', u'率'), - (0xF962, 'M', u'異'), - (0xF963, 'M', u'北'), - (0xF964, 'M', u'磻'), - (0xF965, 'M', u'便'), - (0xF966, 'M', u'復'), - (0xF967, 'M', u'不'), - (0xF968, 'M', u'泌'), - (0xF969, 'M', u'數'), - (0xF96A, 'M', u'索'), - (0xF96B, 'M', u'參'), - (0xF96C, 'M', u'塞'), - (0xF96D, 'M', u'省'), - (0xF96E, 'M', u'葉'), - (0xF96F, 'M', u'說'), - (0xF970, 'M', u'殺'), - (0xF971, 'M', u'辰'), - (0xF972, 'M', u'沈'), - (0xF973, 'M', u'拾'), - (0xF974, 'M', u'若'), - (0xF975, 'M', u'掠'), - (0xF976, 'M', u'略'), - (0xF977, 'M', u'亮'), - (0xF978, 'M', u'兩'), - (0xF979, 'M', u'凉'), - (0xF97A, 'M', u'梁'), - (0xF97B, 'M', u'糧'), - (0xF97C, 'M', u'良'), - (0xF97D, 'M', u'諒'), - (0xF97E, 'M', u'量'), - (0xF97F, 'M', u'勵'), - (0xF980, 'M', u'呂'), - (0xF981, 'M', u'女'), - (0xF982, 'M', u'廬'), - (0xF983, 'M', u'旅'), - (0xF984, 'M', u'濾'), - (0xF985, 'M', u'礪'), - (0xF986, 'M', u'閭'), - (0xF987, 'M', u'驪'), - (0xF988, 'M', u'麗'), - (0xF989, 'M', u'黎'), - (0xF98A, 'M', u'力'), - (0xF98B, 'M', u'曆'), - (0xF98C, 'M', u'歷'), - (0xF98D, 'M', u'轢'), - (0xF98E, 'M', u'年'), - (0xF98F, 'M', u'憐'), - (0xF990, 'M', u'戀'), - (0xF991, 'M', u'撚'), - ] - -def _seg_39(): - return [ - (0xF992, 'M', u'漣'), - (0xF993, 'M', u'煉'), - (0xF994, 'M', u'璉'), - (0xF995, 'M', u'秊'), - (0xF996, 'M', u'練'), - (0xF997, 'M', u'聯'), - (0xF998, 'M', u'輦'), - (0xF999, 'M', u'蓮'), - (0xF99A, 'M', u'連'), - (0xF99B, 'M', u'鍊'), - (0xF99C, 'M', u'列'), - (0xF99D, 'M', u'劣'), - (0xF99E, 'M', u'咽'), - (0xF99F, 'M', u'烈'), - (0xF9A0, 'M', u'裂'), - (0xF9A1, 'M', u'說'), - (0xF9A2, 'M', u'廉'), - (0xF9A3, 'M', u'念'), - (0xF9A4, 'M', u'捻'), - (0xF9A5, 'M', u'殮'), - (0xF9A6, 'M', u'簾'), - (0xF9A7, 'M', u'獵'), - (0xF9A8, 'M', u'令'), - (0xF9A9, 'M', u'囹'), - (0xF9AA, 'M', u'寧'), - (0xF9AB, 'M', u'嶺'), - (0xF9AC, 'M', u'怜'), - (0xF9AD, 'M', u'玲'), - (0xF9AE, 'M', u'瑩'), - (0xF9AF, 'M', u'羚'), - (0xF9B0, 'M', u'聆'), - (0xF9B1, 'M', u'鈴'), - (0xF9B2, 'M', u'零'), - (0xF9B3, 'M', u'靈'), - (0xF9B4, 'M', u'領'), - (0xF9B5, 'M', u'例'), - (0xF9B6, 'M', u'禮'), - (0xF9B7, 'M', u'醴'), - (0xF9B8, 'M', u'隸'), - (0xF9B9, 'M', u'惡'), - (0xF9BA, 'M', u'了'), - (0xF9BB, 'M', u'僚'), - (0xF9BC, 'M', u'寮'), - (0xF9BD, 'M', u'尿'), - (0xF9BE, 'M', u'料'), - (0xF9BF, 'M', u'樂'), - (0xF9C0, 'M', u'燎'), - (0xF9C1, 'M', u'療'), - (0xF9C2, 'M', u'蓼'), - (0xF9C3, 'M', u'遼'), - (0xF9C4, 'M', u'龍'), - (0xF9C5, 'M', u'暈'), - (0xF9C6, 'M', u'阮'), - (0xF9C7, 'M', u'劉'), - (0xF9C8, 'M', u'杻'), - (0xF9C9, 'M', u'柳'), - (0xF9CA, 'M', u'流'), - (0xF9CB, 'M', u'溜'), - (0xF9CC, 'M', u'琉'), - (0xF9CD, 'M', u'留'), - (0xF9CE, 'M', u'硫'), - (0xF9CF, 'M', u'紐'), - (0xF9D0, 'M', u'類'), - (0xF9D1, 'M', u'六'), - (0xF9D2, 'M', u'戮'), - (0xF9D3, 'M', u'陸'), - (0xF9D4, 'M', u'倫'), - (0xF9D5, 'M', u'崙'), - (0xF9D6, 'M', u'淪'), - (0xF9D7, 'M', u'輪'), - (0xF9D8, 'M', u'律'), - (0xF9D9, 'M', u'慄'), - (0xF9DA, 'M', u'栗'), - (0xF9DB, 'M', u'率'), - (0xF9DC, 'M', u'隆'), - (0xF9DD, 'M', u'利'), - (0xF9DE, 'M', u'吏'), - (0xF9DF, 'M', u'履'), - (0xF9E0, 'M', u'易'), - (0xF9E1, 'M', u'李'), - (0xF9E2, 'M', u'梨'), - (0xF9E3, 'M', u'泥'), - (0xF9E4, 'M', u'理'), - (0xF9E5, 'M', u'痢'), - (0xF9E6, 'M', u'罹'), - (0xF9E7, 'M', u'裏'), - (0xF9E8, 'M', u'裡'), - (0xF9E9, 'M', u'里'), - (0xF9EA, 'M', u'離'), - (0xF9EB, 'M', u'匿'), - (0xF9EC, 'M', u'溺'), - (0xF9ED, 'M', u'吝'), - (0xF9EE, 'M', u'燐'), - (0xF9EF, 'M', u'璘'), - (0xF9F0, 'M', u'藺'), - (0xF9F1, 'M', u'隣'), - (0xF9F2, 'M', u'鱗'), - (0xF9F3, 'M', u'麟'), - (0xF9F4, 'M', u'林'), - (0xF9F5, 'M', u'淋'), - ] - -def _seg_40(): - return [ - (0xF9F6, 'M', u'臨'), - (0xF9F7, 'M', u'立'), - (0xF9F8, 'M', u'笠'), - (0xF9F9, 'M', u'粒'), - (0xF9FA, 'M', u'狀'), - (0xF9FB, 'M', u'炙'), - (0xF9FC, 'M', u'識'), - (0xF9FD, 'M', u'什'), - (0xF9FE, 'M', u'茶'), - (0xF9FF, 'M', u'刺'), - (0xFA00, 'M', u'切'), - (0xFA01, 'M', u'度'), - (0xFA02, 'M', u'拓'), - (0xFA03, 'M', u'糖'), - (0xFA04, 'M', u'宅'), - (0xFA05, 'M', u'洞'), - (0xFA06, 'M', u'暴'), - (0xFA07, 'M', u'輻'), - (0xFA08, 'M', u'行'), - (0xFA09, 'M', u'降'), - (0xFA0A, 'M', u'見'), - (0xFA0B, 'M', u'廓'), - (0xFA0C, 'M', u'兀'), - (0xFA0D, 'M', u'嗀'), - (0xFA0E, 'V'), - (0xFA10, 'M', u'塚'), - (0xFA11, 'V'), - (0xFA12, 'M', u'晴'), - (0xFA13, 'V'), - (0xFA15, 'M', u'凞'), - (0xFA16, 'M', u'猪'), - (0xFA17, 'M', u'益'), - (0xFA18, 'M', u'礼'), - (0xFA19, 'M', u'神'), - (0xFA1A, 'M', u'祥'), - (0xFA1B, 'M', u'福'), - (0xFA1C, 'M', u'靖'), - (0xFA1D, 'M', u'精'), - (0xFA1E, 'M', u'羽'), - (0xFA1F, 'V'), - (0xFA20, 'M', u'蘒'), - (0xFA21, 'V'), - (0xFA22, 'M', u'諸'), - (0xFA23, 'V'), - (0xFA25, 'M', u'逸'), - (0xFA26, 'M', u'都'), - (0xFA27, 'V'), - (0xFA2A, 'M', u'飯'), - (0xFA2B, 'M', u'飼'), - (0xFA2C, 'M', u'館'), - (0xFA2D, 'M', u'鶴'), - (0xFA2E, 'M', u'郞'), - (0xFA2F, 'M', u'隷'), - (0xFA30, 'M', u'侮'), - (0xFA31, 'M', u'僧'), - (0xFA32, 'M', u'免'), - (0xFA33, 'M', u'勉'), - (0xFA34, 'M', u'勤'), - (0xFA35, 'M', u'卑'), - (0xFA36, 'M', u'喝'), - (0xFA37, 'M', u'嘆'), - (0xFA38, 'M', u'器'), - (0xFA39, 'M', u'塀'), - (0xFA3A, 'M', u'墨'), - (0xFA3B, 'M', u'層'), - (0xFA3C, 'M', u'屮'), - (0xFA3D, 'M', u'悔'), - (0xFA3E, 'M', u'慨'), - (0xFA3F, 'M', u'憎'), - (0xFA40, 'M', u'懲'), - (0xFA41, 'M', u'敏'), - (0xFA42, 'M', u'既'), - (0xFA43, 'M', u'暑'), - (0xFA44, 'M', u'梅'), - (0xFA45, 'M', u'海'), - (0xFA46, 'M', u'渚'), - (0xFA47, 'M', u'漢'), - (0xFA48, 'M', u'煮'), - (0xFA49, 'M', u'爫'), - (0xFA4A, 'M', u'琢'), - (0xFA4B, 'M', u'碑'), - (0xFA4C, 'M', u'社'), - (0xFA4D, 'M', u'祉'), - (0xFA4E, 'M', u'祈'), - (0xFA4F, 'M', u'祐'), - (0xFA50, 'M', u'祖'), - (0xFA51, 'M', u'祝'), - (0xFA52, 'M', u'禍'), - (0xFA53, 'M', u'禎'), - (0xFA54, 'M', u'穀'), - (0xFA55, 'M', u'突'), - (0xFA56, 'M', u'節'), - (0xFA57, 'M', u'練'), - (0xFA58, 'M', u'縉'), - (0xFA59, 'M', u'繁'), - (0xFA5A, 'M', u'署'), - (0xFA5B, 'M', u'者'), - (0xFA5C, 'M', u'臭'), - (0xFA5D, 'M', u'艹'), - (0xFA5F, 'M', u'著'), - ] - -def _seg_41(): - return [ - (0xFA60, 'M', u'褐'), - (0xFA61, 'M', u'視'), - (0xFA62, 'M', u'謁'), - (0xFA63, 'M', u'謹'), - (0xFA64, 'M', u'賓'), - (0xFA65, 'M', u'贈'), - (0xFA66, 'M', u'辶'), - (0xFA67, 'M', u'逸'), - (0xFA68, 'M', u'難'), - (0xFA69, 'M', u'響'), - (0xFA6A, 'M', u'頻'), - (0xFA6B, 'M', u'恵'), - (0xFA6C, 'M', u'𤋮'), - (0xFA6D, 'M', u'舘'), - (0xFA6E, 'X'), - (0xFA70, 'M', u'並'), - (0xFA71, 'M', u'况'), - (0xFA72, 'M', u'全'), - (0xFA73, 'M', u'侀'), - (0xFA74, 'M', u'充'), - (0xFA75, 'M', u'冀'), - (0xFA76, 'M', u'勇'), - (0xFA77, 'M', u'勺'), - (0xFA78, 'M', u'喝'), - (0xFA79, 'M', u'啕'), - (0xFA7A, 'M', u'喙'), - (0xFA7B, 'M', u'嗢'), - (0xFA7C, 'M', u'塚'), - (0xFA7D, 'M', u'墳'), - (0xFA7E, 'M', u'奄'), - (0xFA7F, 'M', u'奔'), - (0xFA80, 'M', u'婢'), - (0xFA81, 'M', u'嬨'), - (0xFA82, 'M', u'廒'), - (0xFA83, 'M', u'廙'), - (0xFA84, 'M', u'彩'), - (0xFA85, 'M', u'徭'), - (0xFA86, 'M', u'惘'), - (0xFA87, 'M', u'慎'), - (0xFA88, 'M', u'愈'), - (0xFA89, 'M', u'憎'), - (0xFA8A, 'M', u'慠'), - (0xFA8B, 'M', u'懲'), - (0xFA8C, 'M', u'戴'), - (0xFA8D, 'M', u'揄'), - (0xFA8E, 'M', u'搜'), - (0xFA8F, 'M', u'摒'), - (0xFA90, 'M', u'敖'), - (0xFA91, 'M', u'晴'), - (0xFA92, 'M', u'朗'), - (0xFA93, 'M', u'望'), - (0xFA94, 'M', u'杖'), - (0xFA95, 'M', u'歹'), - (0xFA96, 'M', u'殺'), - (0xFA97, 'M', u'流'), - (0xFA98, 'M', u'滛'), - (0xFA99, 'M', u'滋'), - (0xFA9A, 'M', u'漢'), - (0xFA9B, 'M', u'瀞'), - (0xFA9C, 'M', u'煮'), - (0xFA9D, 'M', u'瞧'), - (0xFA9E, 'M', u'爵'), - (0xFA9F, 'M', u'犯'), - (0xFAA0, 'M', u'猪'), - (0xFAA1, 'M', u'瑱'), - (0xFAA2, 'M', u'甆'), - (0xFAA3, 'M', u'画'), - (0xFAA4, 'M', u'瘝'), - (0xFAA5, 'M', u'瘟'), - (0xFAA6, 'M', u'益'), - (0xFAA7, 'M', u'盛'), - (0xFAA8, 'M', u'直'), - (0xFAA9, 'M', u'睊'), - (0xFAAA, 'M', u'着'), - (0xFAAB, 'M', u'磌'), - (0xFAAC, 'M', u'窱'), - (0xFAAD, 'M', u'節'), - (0xFAAE, 'M', u'类'), - (0xFAAF, 'M', u'絛'), - (0xFAB0, 'M', u'練'), - (0xFAB1, 'M', u'缾'), - (0xFAB2, 'M', u'者'), - (0xFAB3, 'M', u'荒'), - (0xFAB4, 'M', u'華'), - (0xFAB5, 'M', u'蝹'), - (0xFAB6, 'M', u'襁'), - (0xFAB7, 'M', u'覆'), - (0xFAB8, 'M', u'視'), - (0xFAB9, 'M', u'調'), - (0xFABA, 'M', u'諸'), - (0xFABB, 'M', u'請'), - (0xFABC, 'M', u'謁'), - (0xFABD, 'M', u'諾'), - (0xFABE, 'M', u'諭'), - (0xFABF, 'M', u'謹'), - (0xFAC0, 'M', u'變'), - (0xFAC1, 'M', u'贈'), - (0xFAC2, 'M', u'輸'), - (0xFAC3, 'M', u'遲'), - (0xFAC4, 'M', u'醙'), - ] - -def _seg_42(): - return [ - (0xFAC5, 'M', u'鉶'), - (0xFAC6, 'M', u'陼'), - (0xFAC7, 'M', u'難'), - (0xFAC8, 'M', u'靖'), - (0xFAC9, 'M', u'韛'), - (0xFACA, 'M', u'響'), - (0xFACB, 'M', u'頋'), - (0xFACC, 'M', u'頻'), - (0xFACD, 'M', u'鬒'), - (0xFACE, 'M', u'龜'), - (0xFACF, 'M', u'𢡊'), - (0xFAD0, 'M', u'𢡄'), - (0xFAD1, 'M', u'𣏕'), - (0xFAD2, 'M', u'㮝'), - (0xFAD3, 'M', u'䀘'), - (0xFAD4, 'M', u'䀹'), - (0xFAD5, 'M', u'𥉉'), - (0xFAD6, 'M', u'𥳐'), - (0xFAD7, 'M', u'𧻓'), - (0xFAD8, 'M', u'齃'), - (0xFAD9, 'M', u'龎'), - (0xFADA, 'X'), - (0xFB00, 'M', u'ff'), - (0xFB01, 'M', u'fi'), - (0xFB02, 'M', u'fl'), - (0xFB03, 'M', u'ffi'), - (0xFB04, 'M', u'ffl'), - (0xFB05, 'M', u'st'), - (0xFB07, 'X'), - (0xFB13, 'M', u'մն'), - (0xFB14, 'M', u'մե'), - (0xFB15, 'M', u'մի'), - (0xFB16, 'M', u'վն'), - (0xFB17, 'M', u'մխ'), - (0xFB18, 'X'), - (0xFB1D, 'M', u'יִ'), - (0xFB1E, 'V'), - (0xFB1F, 'M', u'ײַ'), - (0xFB20, 'M', u'ע'), - (0xFB21, 'M', u'א'), - (0xFB22, 'M', u'ד'), - (0xFB23, 'M', u'ה'), - (0xFB24, 'M', u'כ'), - (0xFB25, 'M', u'ל'), - (0xFB26, 'M', u'ם'), - (0xFB27, 'M', u'ר'), - (0xFB28, 'M', u'ת'), - (0xFB29, '3', u'+'), - (0xFB2A, 'M', u'שׁ'), - (0xFB2B, 'M', u'שׂ'), - (0xFB2C, 'M', u'שּׁ'), - (0xFB2D, 'M', u'שּׂ'), - (0xFB2E, 'M', u'אַ'), - (0xFB2F, 'M', u'אָ'), - (0xFB30, 'M', u'אּ'), - (0xFB31, 'M', u'בּ'), - (0xFB32, 'M', u'גּ'), - (0xFB33, 'M', u'דּ'), - (0xFB34, 'M', u'הּ'), - (0xFB35, 'M', u'וּ'), - (0xFB36, 'M', u'זּ'), - (0xFB37, 'X'), - (0xFB38, 'M', u'טּ'), - (0xFB39, 'M', u'יּ'), - (0xFB3A, 'M', u'ךּ'), - (0xFB3B, 'M', u'כּ'), - (0xFB3C, 'M', u'לּ'), - (0xFB3D, 'X'), - (0xFB3E, 'M', u'מּ'), - (0xFB3F, 'X'), - (0xFB40, 'M', u'נּ'), - (0xFB41, 'M', u'סּ'), - (0xFB42, 'X'), - (0xFB43, 'M', u'ףּ'), - (0xFB44, 'M', u'פּ'), - (0xFB45, 'X'), - (0xFB46, 'M', u'צּ'), - (0xFB47, 'M', u'קּ'), - (0xFB48, 'M', u'רּ'), - (0xFB49, 'M', u'שּ'), - (0xFB4A, 'M', u'תּ'), - (0xFB4B, 'M', u'וֹ'), - (0xFB4C, 'M', u'בֿ'), - (0xFB4D, 'M', u'כֿ'), - (0xFB4E, 'M', u'פֿ'), - (0xFB4F, 'M', u'אל'), - (0xFB50, 'M', u'ٱ'), - (0xFB52, 'M', u'ٻ'), - (0xFB56, 'M', u'پ'), - (0xFB5A, 'M', u'ڀ'), - (0xFB5E, 'M', u'ٺ'), - (0xFB62, 'M', u'ٿ'), - (0xFB66, 'M', u'ٹ'), - (0xFB6A, 'M', u'ڤ'), - (0xFB6E, 'M', u'ڦ'), - (0xFB72, 'M', u'ڄ'), - (0xFB76, 'M', u'ڃ'), - (0xFB7A, 'M', u'چ'), - (0xFB7E, 'M', u'ڇ'), - (0xFB82, 'M', u'ڍ'), - ] - -def _seg_43(): - return [ - (0xFB84, 'M', u'ڌ'), - (0xFB86, 'M', u'ڎ'), - (0xFB88, 'M', u'ڈ'), - (0xFB8A, 'M', u'ژ'), - (0xFB8C, 'M', u'ڑ'), - (0xFB8E, 'M', u'ک'), - (0xFB92, 'M', u'گ'), - (0xFB96, 'M', u'ڳ'), - (0xFB9A, 'M', u'ڱ'), - (0xFB9E, 'M', u'ں'), - (0xFBA0, 'M', u'ڻ'), - (0xFBA4, 'M', u'ۀ'), - (0xFBA6, 'M', u'ہ'), - (0xFBAA, 'M', u'ھ'), - (0xFBAE, 'M', u'ے'), - (0xFBB0, 'M', u'ۓ'), - (0xFBB2, 'V'), - (0xFBC2, 'X'), - (0xFBD3, 'M', u'ڭ'), - (0xFBD7, 'M', u'ۇ'), - (0xFBD9, 'M', u'ۆ'), - (0xFBDB, 'M', u'ۈ'), - (0xFBDD, 'M', u'ۇٴ'), - (0xFBDE, 'M', u'ۋ'), - (0xFBE0, 'M', u'ۅ'), - (0xFBE2, 'M', u'ۉ'), - (0xFBE4, 'M', u'ې'), - (0xFBE8, 'M', u'ى'), - (0xFBEA, 'M', u'ئا'), - (0xFBEC, 'M', u'ئە'), - (0xFBEE, 'M', u'ئو'), - (0xFBF0, 'M', u'ئۇ'), - (0xFBF2, 'M', u'ئۆ'), - (0xFBF4, 'M', u'ئۈ'), - (0xFBF6, 'M', u'ئې'), - (0xFBF9, 'M', u'ئى'), - (0xFBFC, 'M', u'ی'), - (0xFC00, 'M', u'ئج'), - (0xFC01, 'M', u'ئح'), - (0xFC02, 'M', u'ئم'), - (0xFC03, 'M', u'ئى'), - (0xFC04, 'M', u'ئي'), - (0xFC05, 'M', u'بج'), - (0xFC06, 'M', u'بح'), - (0xFC07, 'M', u'بخ'), - (0xFC08, 'M', u'بم'), - (0xFC09, 'M', u'بى'), - (0xFC0A, 'M', u'بي'), - (0xFC0B, 'M', u'تج'), - (0xFC0C, 'M', u'تح'), - (0xFC0D, 'M', u'تخ'), - (0xFC0E, 'M', u'تم'), - (0xFC0F, 'M', u'تى'), - (0xFC10, 'M', u'تي'), - (0xFC11, 'M', u'ثج'), - (0xFC12, 'M', u'ثم'), - (0xFC13, 'M', u'ثى'), - (0xFC14, 'M', u'ثي'), - (0xFC15, 'M', u'جح'), - (0xFC16, 'M', u'جم'), - (0xFC17, 'M', u'حج'), - (0xFC18, 'M', u'حم'), - (0xFC19, 'M', u'خج'), - (0xFC1A, 'M', u'خح'), - (0xFC1B, 'M', u'خم'), - (0xFC1C, 'M', u'سج'), - (0xFC1D, 'M', u'سح'), - (0xFC1E, 'M', u'سخ'), - (0xFC1F, 'M', u'سم'), - (0xFC20, 'M', u'صح'), - (0xFC21, 'M', u'صم'), - (0xFC22, 'M', u'ضج'), - (0xFC23, 'M', u'ضح'), - (0xFC24, 'M', u'ضخ'), - (0xFC25, 'M', u'ضم'), - (0xFC26, 'M', u'طح'), - (0xFC27, 'M', u'طم'), - (0xFC28, 'M', u'ظم'), - (0xFC29, 'M', u'عج'), - (0xFC2A, 'M', u'عم'), - (0xFC2B, 'M', u'غج'), - (0xFC2C, 'M', u'غم'), - (0xFC2D, 'M', u'فج'), - (0xFC2E, 'M', u'فح'), - (0xFC2F, 'M', u'فخ'), - (0xFC30, 'M', u'فم'), - (0xFC31, 'M', u'فى'), - (0xFC32, 'M', u'في'), - (0xFC33, 'M', u'قح'), - (0xFC34, 'M', u'قم'), - (0xFC35, 'M', u'قى'), - (0xFC36, 'M', u'قي'), - (0xFC37, 'M', u'كا'), - (0xFC38, 'M', u'كج'), - (0xFC39, 'M', u'كح'), - (0xFC3A, 'M', u'كخ'), - (0xFC3B, 'M', u'كل'), - (0xFC3C, 'M', u'كم'), - (0xFC3D, 'M', u'كى'), - (0xFC3E, 'M', u'كي'), - ] - -def _seg_44(): - return [ - (0xFC3F, 'M', u'لج'), - (0xFC40, 'M', u'لح'), - (0xFC41, 'M', u'لخ'), - (0xFC42, 'M', u'لم'), - (0xFC43, 'M', u'لى'), - (0xFC44, 'M', u'لي'), - (0xFC45, 'M', u'مج'), - (0xFC46, 'M', u'مح'), - (0xFC47, 'M', u'مخ'), - (0xFC48, 'M', u'مم'), - (0xFC49, 'M', u'مى'), - (0xFC4A, 'M', u'مي'), - (0xFC4B, 'M', u'نج'), - (0xFC4C, 'M', u'نح'), - (0xFC4D, 'M', u'نخ'), - (0xFC4E, 'M', u'نم'), - (0xFC4F, 'M', u'نى'), - (0xFC50, 'M', u'ني'), - (0xFC51, 'M', u'هج'), - (0xFC52, 'M', u'هم'), - (0xFC53, 'M', u'هى'), - (0xFC54, 'M', u'هي'), - (0xFC55, 'M', u'يج'), - (0xFC56, 'M', u'يح'), - (0xFC57, 'M', u'يخ'), - (0xFC58, 'M', u'يم'), - (0xFC59, 'M', u'يى'), - (0xFC5A, 'M', u'يي'), - (0xFC5B, 'M', u'ذٰ'), - (0xFC5C, 'M', u'رٰ'), - (0xFC5D, 'M', u'ىٰ'), - (0xFC5E, '3', u' ٌّ'), - (0xFC5F, '3', u' ٍّ'), - (0xFC60, '3', u' َّ'), - (0xFC61, '3', u' ُّ'), - (0xFC62, '3', u' ِّ'), - (0xFC63, '3', u' ّٰ'), - (0xFC64, 'M', u'ئر'), - (0xFC65, 'M', u'ئز'), - (0xFC66, 'M', u'ئم'), - (0xFC67, 'M', u'ئن'), - (0xFC68, 'M', u'ئى'), - (0xFC69, 'M', u'ئي'), - (0xFC6A, 'M', u'بر'), - (0xFC6B, 'M', u'بز'), - (0xFC6C, 'M', u'بم'), - (0xFC6D, 'M', u'بن'), - (0xFC6E, 'M', u'بى'), - (0xFC6F, 'M', u'بي'), - (0xFC70, 'M', u'تر'), - (0xFC71, 'M', u'تز'), - (0xFC72, 'M', u'تم'), - (0xFC73, 'M', u'تن'), - (0xFC74, 'M', u'تى'), - (0xFC75, 'M', u'تي'), - (0xFC76, 'M', u'ثر'), - (0xFC77, 'M', u'ثز'), - (0xFC78, 'M', u'ثم'), - (0xFC79, 'M', u'ثن'), - (0xFC7A, 'M', u'ثى'), - (0xFC7B, 'M', u'ثي'), - (0xFC7C, 'M', u'فى'), - (0xFC7D, 'M', u'في'), - (0xFC7E, 'M', u'قى'), - (0xFC7F, 'M', u'قي'), - (0xFC80, 'M', u'كا'), - (0xFC81, 'M', u'كل'), - (0xFC82, 'M', u'كم'), - (0xFC83, 'M', u'كى'), - (0xFC84, 'M', u'كي'), - (0xFC85, 'M', u'لم'), - (0xFC86, 'M', u'لى'), - (0xFC87, 'M', u'لي'), - (0xFC88, 'M', u'ما'), - (0xFC89, 'M', u'مم'), - (0xFC8A, 'M', u'نر'), - (0xFC8B, 'M', u'نز'), - (0xFC8C, 'M', u'نم'), - (0xFC8D, 'M', u'نن'), - (0xFC8E, 'M', u'نى'), - (0xFC8F, 'M', u'ني'), - (0xFC90, 'M', u'ىٰ'), - (0xFC91, 'M', u'ير'), - (0xFC92, 'M', u'يز'), - (0xFC93, 'M', u'يم'), - (0xFC94, 'M', u'ين'), - (0xFC95, 'M', u'يى'), - (0xFC96, 'M', u'يي'), - (0xFC97, 'M', u'ئج'), - (0xFC98, 'M', u'ئح'), - (0xFC99, 'M', u'ئخ'), - (0xFC9A, 'M', u'ئم'), - (0xFC9B, 'M', u'ئه'), - (0xFC9C, 'M', u'بج'), - (0xFC9D, 'M', u'بح'), - (0xFC9E, 'M', u'بخ'), - (0xFC9F, 'M', u'بم'), - (0xFCA0, 'M', u'به'), - (0xFCA1, 'M', u'تج'), - (0xFCA2, 'M', u'تح'), - ] - -def _seg_45(): - return [ - (0xFCA3, 'M', u'تخ'), - (0xFCA4, 'M', u'تم'), - (0xFCA5, 'M', u'ته'), - (0xFCA6, 'M', u'ثم'), - (0xFCA7, 'M', u'جح'), - (0xFCA8, 'M', u'جم'), - (0xFCA9, 'M', u'حج'), - (0xFCAA, 'M', u'حم'), - (0xFCAB, 'M', u'خج'), - (0xFCAC, 'M', u'خم'), - (0xFCAD, 'M', u'سج'), - (0xFCAE, 'M', u'سح'), - (0xFCAF, 'M', u'سخ'), - (0xFCB0, 'M', u'سم'), - (0xFCB1, 'M', u'صح'), - (0xFCB2, 'M', u'صخ'), - (0xFCB3, 'M', u'صم'), - (0xFCB4, 'M', u'ضج'), - (0xFCB5, 'M', u'ضح'), - (0xFCB6, 'M', u'ضخ'), - (0xFCB7, 'M', u'ضم'), - (0xFCB8, 'M', u'طح'), - (0xFCB9, 'M', u'ظم'), - (0xFCBA, 'M', u'عج'), - (0xFCBB, 'M', u'عم'), - (0xFCBC, 'M', u'غج'), - (0xFCBD, 'M', u'غم'), - (0xFCBE, 'M', u'فج'), - (0xFCBF, 'M', u'فح'), - (0xFCC0, 'M', u'فخ'), - (0xFCC1, 'M', u'فم'), - (0xFCC2, 'M', u'قح'), - (0xFCC3, 'M', u'قم'), - (0xFCC4, 'M', u'كج'), - (0xFCC5, 'M', u'كح'), - (0xFCC6, 'M', u'كخ'), - (0xFCC7, 'M', u'كل'), - (0xFCC8, 'M', u'كم'), - (0xFCC9, 'M', u'لج'), - (0xFCCA, 'M', u'لح'), - (0xFCCB, 'M', u'لخ'), - (0xFCCC, 'M', u'لم'), - (0xFCCD, 'M', u'له'), - (0xFCCE, 'M', u'مج'), - (0xFCCF, 'M', u'مح'), - (0xFCD0, 'M', u'مخ'), - (0xFCD1, 'M', u'مم'), - (0xFCD2, 'M', u'نج'), - (0xFCD3, 'M', u'نح'), - (0xFCD4, 'M', u'نخ'), - (0xFCD5, 'M', u'نم'), - (0xFCD6, 'M', u'نه'), - (0xFCD7, 'M', u'هج'), - (0xFCD8, 'M', u'هم'), - (0xFCD9, 'M', u'هٰ'), - (0xFCDA, 'M', u'يج'), - (0xFCDB, 'M', u'يح'), - (0xFCDC, 'M', u'يخ'), - (0xFCDD, 'M', u'يم'), - (0xFCDE, 'M', u'يه'), - (0xFCDF, 'M', u'ئم'), - (0xFCE0, 'M', u'ئه'), - (0xFCE1, 'M', u'بم'), - (0xFCE2, 'M', u'به'), - (0xFCE3, 'M', u'تم'), - (0xFCE4, 'M', u'ته'), - (0xFCE5, 'M', u'ثم'), - (0xFCE6, 'M', u'ثه'), - (0xFCE7, 'M', u'سم'), - (0xFCE8, 'M', u'سه'), - (0xFCE9, 'M', u'شم'), - (0xFCEA, 'M', u'شه'), - (0xFCEB, 'M', u'كل'), - (0xFCEC, 'M', u'كم'), - (0xFCED, 'M', u'لم'), - (0xFCEE, 'M', u'نم'), - (0xFCEF, 'M', u'نه'), - (0xFCF0, 'M', u'يم'), - (0xFCF1, 'M', u'يه'), - (0xFCF2, 'M', u'ـَّ'), - (0xFCF3, 'M', u'ـُّ'), - (0xFCF4, 'M', u'ـِّ'), - (0xFCF5, 'M', u'طى'), - (0xFCF6, 'M', u'طي'), - (0xFCF7, 'M', u'عى'), - (0xFCF8, 'M', u'عي'), - (0xFCF9, 'M', u'غى'), - (0xFCFA, 'M', u'غي'), - (0xFCFB, 'M', u'سى'), - (0xFCFC, 'M', u'سي'), - (0xFCFD, 'M', u'شى'), - (0xFCFE, 'M', u'شي'), - (0xFCFF, 'M', u'حى'), - (0xFD00, 'M', u'حي'), - (0xFD01, 'M', u'جى'), - (0xFD02, 'M', u'جي'), - (0xFD03, 'M', u'خى'), - (0xFD04, 'M', u'خي'), - (0xFD05, 'M', u'صى'), - (0xFD06, 'M', u'صي'), - ] - -def _seg_46(): - return [ - (0xFD07, 'M', u'ضى'), - (0xFD08, 'M', u'ضي'), - (0xFD09, 'M', u'شج'), - (0xFD0A, 'M', u'شح'), - (0xFD0B, 'M', u'شخ'), - (0xFD0C, 'M', u'شم'), - (0xFD0D, 'M', u'شر'), - (0xFD0E, 'M', u'سر'), - (0xFD0F, 'M', u'صر'), - (0xFD10, 'M', u'ضر'), - (0xFD11, 'M', u'طى'), - (0xFD12, 'M', u'طي'), - (0xFD13, 'M', u'عى'), - (0xFD14, 'M', u'عي'), - (0xFD15, 'M', u'غى'), - (0xFD16, 'M', u'غي'), - (0xFD17, 'M', u'سى'), - (0xFD18, 'M', u'سي'), - (0xFD19, 'M', u'شى'), - (0xFD1A, 'M', u'شي'), - (0xFD1B, 'M', u'حى'), - (0xFD1C, 'M', u'حي'), - (0xFD1D, 'M', u'جى'), - (0xFD1E, 'M', u'جي'), - (0xFD1F, 'M', u'خى'), - (0xFD20, 'M', u'خي'), - (0xFD21, 'M', u'صى'), - (0xFD22, 'M', u'صي'), - (0xFD23, 'M', u'ضى'), - (0xFD24, 'M', u'ضي'), - (0xFD25, 'M', u'شج'), - (0xFD26, 'M', u'شح'), - (0xFD27, 'M', u'شخ'), - (0xFD28, 'M', u'شم'), - (0xFD29, 'M', u'شر'), - (0xFD2A, 'M', u'سر'), - (0xFD2B, 'M', u'صر'), - (0xFD2C, 'M', u'ضر'), - (0xFD2D, 'M', u'شج'), - (0xFD2E, 'M', u'شح'), - (0xFD2F, 'M', u'شخ'), - (0xFD30, 'M', u'شم'), - (0xFD31, 'M', u'سه'), - (0xFD32, 'M', u'شه'), - (0xFD33, 'M', u'طم'), - (0xFD34, 'M', u'سج'), - (0xFD35, 'M', u'سح'), - (0xFD36, 'M', u'سخ'), - (0xFD37, 'M', u'شج'), - (0xFD38, 'M', u'شح'), - (0xFD39, 'M', u'شخ'), - (0xFD3A, 'M', u'طم'), - (0xFD3B, 'M', u'ظم'), - (0xFD3C, 'M', u'اً'), - (0xFD3E, 'V'), - (0xFD40, 'X'), - (0xFD50, 'M', u'تجم'), - (0xFD51, 'M', u'تحج'), - (0xFD53, 'M', u'تحم'), - (0xFD54, 'M', u'تخم'), - (0xFD55, 'M', u'تمج'), - (0xFD56, 'M', u'تمح'), - (0xFD57, 'M', u'تمخ'), - (0xFD58, 'M', u'جمح'), - (0xFD5A, 'M', u'حمي'), - (0xFD5B, 'M', u'حمى'), - (0xFD5C, 'M', u'سحج'), - (0xFD5D, 'M', u'سجح'), - (0xFD5E, 'M', u'سجى'), - (0xFD5F, 'M', u'سمح'), - (0xFD61, 'M', u'سمج'), - (0xFD62, 'M', u'سمم'), - (0xFD64, 'M', u'صحح'), - (0xFD66, 'M', u'صمم'), - (0xFD67, 'M', u'شحم'), - (0xFD69, 'M', u'شجي'), - (0xFD6A, 'M', u'شمخ'), - (0xFD6C, 'M', u'شمم'), - (0xFD6E, 'M', u'ضحى'), - (0xFD6F, 'M', u'ضخم'), - (0xFD71, 'M', u'طمح'), - (0xFD73, 'M', u'طمم'), - (0xFD74, 'M', u'طمي'), - (0xFD75, 'M', u'عجم'), - (0xFD76, 'M', u'عمم'), - (0xFD78, 'M', u'عمى'), - (0xFD79, 'M', u'غمم'), - (0xFD7A, 'M', u'غمي'), - (0xFD7B, 'M', u'غمى'), - (0xFD7C, 'M', u'فخم'), - (0xFD7E, 'M', u'قمح'), - (0xFD7F, 'M', u'قمم'), - (0xFD80, 'M', u'لحم'), - (0xFD81, 'M', u'لحي'), - (0xFD82, 'M', u'لحى'), - (0xFD83, 'M', u'لجج'), - (0xFD85, 'M', u'لخم'), - (0xFD87, 'M', u'لمح'), - (0xFD89, 'M', u'محج'), - (0xFD8A, 'M', u'محم'), - ] - -def _seg_47(): - return [ - (0xFD8B, 'M', u'محي'), - (0xFD8C, 'M', u'مجح'), - (0xFD8D, 'M', u'مجم'), - (0xFD8E, 'M', u'مخج'), - (0xFD8F, 'M', u'مخم'), - (0xFD90, 'X'), - (0xFD92, 'M', u'مجخ'), - (0xFD93, 'M', u'همج'), - (0xFD94, 'M', u'همم'), - (0xFD95, 'M', u'نحم'), - (0xFD96, 'M', u'نحى'), - (0xFD97, 'M', u'نجم'), - (0xFD99, 'M', u'نجى'), - (0xFD9A, 'M', u'نمي'), - (0xFD9B, 'M', u'نمى'), - (0xFD9C, 'M', u'يمم'), - (0xFD9E, 'M', u'بخي'), - (0xFD9F, 'M', u'تجي'), - (0xFDA0, 'M', u'تجى'), - (0xFDA1, 'M', u'تخي'), - (0xFDA2, 'M', u'تخى'), - (0xFDA3, 'M', u'تمي'), - (0xFDA4, 'M', u'تمى'), - (0xFDA5, 'M', u'جمي'), - (0xFDA6, 'M', u'جحى'), - (0xFDA7, 'M', u'جمى'), - (0xFDA8, 'M', u'سخى'), - (0xFDA9, 'M', u'صحي'), - (0xFDAA, 'M', u'شحي'), - (0xFDAB, 'M', u'ضحي'), - (0xFDAC, 'M', u'لجي'), - (0xFDAD, 'M', u'لمي'), - (0xFDAE, 'M', u'يحي'), - (0xFDAF, 'M', u'يجي'), - (0xFDB0, 'M', u'يمي'), - (0xFDB1, 'M', u'ممي'), - (0xFDB2, 'M', u'قمي'), - (0xFDB3, 'M', u'نحي'), - (0xFDB4, 'M', u'قمح'), - (0xFDB5, 'M', u'لحم'), - (0xFDB6, 'M', u'عمي'), - (0xFDB7, 'M', u'كمي'), - (0xFDB8, 'M', u'نجح'), - (0xFDB9, 'M', u'مخي'), - (0xFDBA, 'M', u'لجم'), - (0xFDBB, 'M', u'كمم'), - (0xFDBC, 'M', u'لجم'), - (0xFDBD, 'M', u'نجح'), - (0xFDBE, 'M', u'جحي'), - (0xFDBF, 'M', u'حجي'), - (0xFDC0, 'M', u'مجي'), - (0xFDC1, 'M', u'فمي'), - (0xFDC2, 'M', u'بحي'), - (0xFDC3, 'M', u'كمم'), - (0xFDC4, 'M', u'عجم'), - (0xFDC5, 'M', u'صمم'), - (0xFDC6, 'M', u'سخي'), - (0xFDC7, 'M', u'نجي'), - (0xFDC8, 'X'), - (0xFDF0, 'M', u'صلے'), - (0xFDF1, 'M', u'قلے'), - (0xFDF2, 'M', u'الله'), - (0xFDF3, 'M', u'اكبر'), - (0xFDF4, 'M', u'محمد'), - (0xFDF5, 'M', u'صلعم'), - (0xFDF6, 'M', u'رسول'), - (0xFDF7, 'M', u'عليه'), - (0xFDF8, 'M', u'وسلم'), - (0xFDF9, 'M', u'صلى'), - (0xFDFA, '3', u'صلى الله عليه وسلم'), - (0xFDFB, '3', u'جل جلاله'), - (0xFDFC, 'M', u'ریال'), - (0xFDFD, 'V'), - (0xFDFE, 'X'), - (0xFE00, 'I'), - (0xFE10, '3', u','), - (0xFE11, 'M', u'、'), - (0xFE12, 'X'), - (0xFE13, '3', u':'), - (0xFE14, '3', u';'), - (0xFE15, '3', u'!'), - (0xFE16, '3', u'?'), - (0xFE17, 'M', u'〖'), - (0xFE18, 'M', u'〗'), - (0xFE19, 'X'), - (0xFE20, 'V'), - (0xFE27, 'X'), - (0xFE31, 'M', u'—'), - (0xFE32, 'M', u'–'), - (0xFE33, '3', u'_'), - (0xFE35, '3', u'('), - (0xFE36, '3', u')'), - (0xFE37, '3', u'{'), - (0xFE38, '3', u'}'), - (0xFE39, 'M', u'〔'), - (0xFE3A, 'M', u'〕'), - (0xFE3B, 'M', u'【'), - (0xFE3C, 'M', u'】'), - (0xFE3D, 'M', u'《'), - (0xFE3E, 'M', u'》'), - ] - -def _seg_48(): - return [ - (0xFE3F, 'M', u'〈'), - (0xFE40, 'M', u'〉'), - (0xFE41, 'M', u'「'), - (0xFE42, 'M', u'」'), - (0xFE43, 'M', u'『'), - (0xFE44, 'M', u'』'), - (0xFE45, 'V'), - (0xFE47, '3', u'['), - (0xFE48, '3', u']'), - (0xFE49, '3', u' ̅'), - (0xFE4D, '3', u'_'), - (0xFE50, '3', u','), - (0xFE51, 'M', u'、'), - (0xFE52, 'X'), - (0xFE54, '3', u';'), - (0xFE55, '3', u':'), - (0xFE56, '3', u'?'), - (0xFE57, '3', u'!'), - (0xFE58, 'M', u'—'), - (0xFE59, '3', u'('), - (0xFE5A, '3', u')'), - (0xFE5B, '3', u'{'), - (0xFE5C, '3', u'}'), - (0xFE5D, 'M', u'〔'), - (0xFE5E, 'M', u'〕'), - (0xFE5F, '3', u'#'), - (0xFE60, '3', u'&'), - (0xFE61, '3', u'*'), - (0xFE62, '3', u'+'), - (0xFE63, 'M', u'-'), - (0xFE64, '3', u'<'), - (0xFE65, '3', u'>'), - (0xFE66, '3', u'='), - (0xFE67, 'X'), - (0xFE68, '3', u'\\'), - (0xFE69, '3', u'$'), - (0xFE6A, '3', u'%'), - (0xFE6B, '3', u'@'), - (0xFE6C, 'X'), - (0xFE70, '3', u' ً'), - (0xFE71, 'M', u'ـً'), - (0xFE72, '3', u' ٌ'), - (0xFE73, 'V'), - (0xFE74, '3', u' ٍ'), - (0xFE75, 'X'), - (0xFE76, '3', u' َ'), - (0xFE77, 'M', u'ـَ'), - (0xFE78, '3', u' ُ'), - (0xFE79, 'M', u'ـُ'), - (0xFE7A, '3', u' ِ'), - (0xFE7B, 'M', u'ـِ'), - (0xFE7C, '3', u' ّ'), - (0xFE7D, 'M', u'ـّ'), - (0xFE7E, '3', u' ْ'), - (0xFE7F, 'M', u'ـْ'), - (0xFE80, 'M', u'ء'), - (0xFE81, 'M', u'آ'), - (0xFE83, 'M', u'أ'), - (0xFE85, 'M', u'ؤ'), - (0xFE87, 'M', u'إ'), - (0xFE89, 'M', u'ئ'), - (0xFE8D, 'M', u'ا'), - (0xFE8F, 'M', u'ب'), - (0xFE93, 'M', u'ة'), - (0xFE95, 'M', u'ت'), - (0xFE99, 'M', u'ث'), - (0xFE9D, 'M', u'ج'), - (0xFEA1, 'M', u'ح'), - (0xFEA5, 'M', u'خ'), - (0xFEA9, 'M', u'د'), - (0xFEAB, 'M', u'ذ'), - (0xFEAD, 'M', u'ر'), - (0xFEAF, 'M', u'ز'), - (0xFEB1, 'M', u'س'), - (0xFEB5, 'M', u'ش'), - (0xFEB9, 'M', u'ص'), - (0xFEBD, 'M', u'ض'), - (0xFEC1, 'M', u'ط'), - (0xFEC5, 'M', u'ظ'), - (0xFEC9, 'M', u'ع'), - (0xFECD, 'M', u'غ'), - (0xFED1, 'M', u'ف'), - (0xFED5, 'M', u'ق'), - (0xFED9, 'M', u'ك'), - (0xFEDD, 'M', u'ل'), - (0xFEE1, 'M', u'م'), - (0xFEE5, 'M', u'ن'), - (0xFEE9, 'M', u'ه'), - (0xFEED, 'M', u'و'), - (0xFEEF, 'M', u'ى'), - (0xFEF1, 'M', u'ي'), - (0xFEF5, 'M', u'لآ'), - (0xFEF7, 'M', u'لأ'), - (0xFEF9, 'M', u'لإ'), - (0xFEFB, 'M', u'لا'), - (0xFEFD, 'X'), - (0xFEFF, 'I'), - (0xFF00, 'X'), - (0xFF01, '3', u'!'), - (0xFF02, '3', u'"'), - ] - -def _seg_49(): - return [ - (0xFF03, '3', u'#'), - (0xFF04, '3', u'$'), - (0xFF05, '3', u'%'), - (0xFF06, '3', u'&'), - (0xFF07, '3', u'\''), - (0xFF08, '3', u'('), - (0xFF09, '3', u')'), - (0xFF0A, '3', u'*'), - (0xFF0B, '3', u'+'), - (0xFF0C, '3', u','), - (0xFF0D, 'M', u'-'), - (0xFF0E, 'M', u'.'), - (0xFF0F, '3', u'/'), - (0xFF10, 'M', u'0'), - (0xFF11, 'M', u'1'), - (0xFF12, 'M', u'2'), - (0xFF13, 'M', u'3'), - (0xFF14, 'M', u'4'), - (0xFF15, 'M', u'5'), - (0xFF16, 'M', u'6'), - (0xFF17, 'M', u'7'), - (0xFF18, 'M', u'8'), - (0xFF19, 'M', u'9'), - (0xFF1A, '3', u':'), - (0xFF1B, '3', u';'), - (0xFF1C, '3', u'<'), - (0xFF1D, '3', u'='), - (0xFF1E, '3', u'>'), - (0xFF1F, '3', u'?'), - (0xFF20, '3', u'@'), - (0xFF21, 'M', u'a'), - (0xFF22, 'M', u'b'), - (0xFF23, 'M', u'c'), - (0xFF24, 'M', u'd'), - (0xFF25, 'M', u'e'), - (0xFF26, 'M', u'f'), - (0xFF27, 'M', u'g'), - (0xFF28, 'M', u'h'), - (0xFF29, 'M', u'i'), - (0xFF2A, 'M', u'j'), - (0xFF2B, 'M', u'k'), - (0xFF2C, 'M', u'l'), - (0xFF2D, 'M', u'm'), - (0xFF2E, 'M', u'n'), - (0xFF2F, 'M', u'o'), - (0xFF30, 'M', u'p'), - (0xFF31, 'M', u'q'), - (0xFF32, 'M', u'r'), - (0xFF33, 'M', u's'), - (0xFF34, 'M', u't'), - (0xFF35, 'M', u'u'), - (0xFF36, 'M', u'v'), - (0xFF37, 'M', u'w'), - (0xFF38, 'M', u'x'), - (0xFF39, 'M', u'y'), - (0xFF3A, 'M', u'z'), - (0xFF3B, '3', u'['), - (0xFF3C, '3', u'\\'), - (0xFF3D, '3', u']'), - (0xFF3E, '3', u'^'), - (0xFF3F, '3', u'_'), - (0xFF40, '3', u'`'), - (0xFF41, 'M', u'a'), - (0xFF42, 'M', u'b'), - (0xFF43, 'M', u'c'), - (0xFF44, 'M', u'd'), - (0xFF45, 'M', u'e'), - (0xFF46, 'M', u'f'), - (0xFF47, 'M', u'g'), - (0xFF48, 'M', u'h'), - (0xFF49, 'M', u'i'), - (0xFF4A, 'M', u'j'), - (0xFF4B, 'M', u'k'), - (0xFF4C, 'M', u'l'), - (0xFF4D, 'M', u'm'), - (0xFF4E, 'M', u'n'), - (0xFF4F, 'M', u'o'), - (0xFF50, 'M', u'p'), - (0xFF51, 'M', u'q'), - (0xFF52, 'M', u'r'), - (0xFF53, 'M', u's'), - (0xFF54, 'M', u't'), - (0xFF55, 'M', u'u'), - (0xFF56, 'M', u'v'), - (0xFF57, 'M', u'w'), - (0xFF58, 'M', u'x'), - (0xFF59, 'M', u'y'), - (0xFF5A, 'M', u'z'), - (0xFF5B, '3', u'{'), - (0xFF5C, '3', u'|'), - (0xFF5D, '3', u'}'), - (0xFF5E, '3', u'~'), - (0xFF5F, 'M', u'⦅'), - (0xFF60, 'M', u'⦆'), - (0xFF61, 'M', u'.'), - (0xFF62, 'M', u'「'), - (0xFF63, 'M', u'」'), - (0xFF64, 'M', u'、'), - (0xFF65, 'M', u'・'), - (0xFF66, 'M', u'ヲ'), - ] - -def _seg_50(): - return [ - (0xFF67, 'M', u'ァ'), - (0xFF68, 'M', u'ィ'), - (0xFF69, 'M', u'ゥ'), - (0xFF6A, 'M', u'ェ'), - (0xFF6B, 'M', u'ォ'), - (0xFF6C, 'M', u'ャ'), - (0xFF6D, 'M', u'ュ'), - (0xFF6E, 'M', u'ョ'), - (0xFF6F, 'M', u'ッ'), - (0xFF70, 'M', u'ー'), - (0xFF71, 'M', u'ア'), - (0xFF72, 'M', u'イ'), - (0xFF73, 'M', u'ウ'), - (0xFF74, 'M', u'エ'), - (0xFF75, 'M', u'オ'), - (0xFF76, 'M', u'カ'), - (0xFF77, 'M', u'キ'), - (0xFF78, 'M', u'ク'), - (0xFF79, 'M', u'ケ'), - (0xFF7A, 'M', u'コ'), - (0xFF7B, 'M', u'サ'), - (0xFF7C, 'M', u'シ'), - (0xFF7D, 'M', u'ス'), - (0xFF7E, 'M', u'セ'), - (0xFF7F, 'M', u'ソ'), - (0xFF80, 'M', u'タ'), - (0xFF81, 'M', u'チ'), - (0xFF82, 'M', u'ツ'), - (0xFF83, 'M', u'テ'), - (0xFF84, 'M', u'ト'), - (0xFF85, 'M', u'ナ'), - (0xFF86, 'M', u'ニ'), - (0xFF87, 'M', u'ヌ'), - (0xFF88, 'M', u'ネ'), - (0xFF89, 'M', u'ノ'), - (0xFF8A, 'M', u'ハ'), - (0xFF8B, 'M', u'ヒ'), - (0xFF8C, 'M', u'フ'), - (0xFF8D, 'M', u'ヘ'), - (0xFF8E, 'M', u'ホ'), - (0xFF8F, 'M', u'マ'), - (0xFF90, 'M', u'ミ'), - (0xFF91, 'M', u'ム'), - (0xFF92, 'M', u'メ'), - (0xFF93, 'M', u'モ'), - (0xFF94, 'M', u'ヤ'), - (0xFF95, 'M', u'ユ'), - (0xFF96, 'M', u'ヨ'), - (0xFF97, 'M', u'ラ'), - (0xFF98, 'M', u'リ'), - (0xFF99, 'M', u'ル'), - (0xFF9A, 'M', u'レ'), - (0xFF9B, 'M', u'ロ'), - (0xFF9C, 'M', u'ワ'), - (0xFF9D, 'M', u'ン'), - (0xFF9E, 'M', u'゙'), - (0xFF9F, 'M', u'゚'), - (0xFFA0, 'X'), - (0xFFA1, 'M', u'ᄀ'), - (0xFFA2, 'M', u'ᄁ'), - (0xFFA3, 'M', u'ᆪ'), - (0xFFA4, 'M', u'ᄂ'), - (0xFFA5, 'M', u'ᆬ'), - (0xFFA6, 'M', u'ᆭ'), - (0xFFA7, 'M', u'ᄃ'), - (0xFFA8, 'M', u'ᄄ'), - (0xFFA9, 'M', u'ᄅ'), - (0xFFAA, 'M', u'ᆰ'), - (0xFFAB, 'M', u'ᆱ'), - (0xFFAC, 'M', u'ᆲ'), - (0xFFAD, 'M', u'ᆳ'), - (0xFFAE, 'M', u'ᆴ'), - (0xFFAF, 'M', u'ᆵ'), - (0xFFB0, 'M', u'ᄚ'), - (0xFFB1, 'M', u'ᄆ'), - (0xFFB2, 'M', u'ᄇ'), - (0xFFB3, 'M', u'ᄈ'), - (0xFFB4, 'M', u'ᄡ'), - (0xFFB5, 'M', u'ᄉ'), - (0xFFB6, 'M', u'ᄊ'), - (0xFFB7, 'M', u'ᄋ'), - (0xFFB8, 'M', u'ᄌ'), - (0xFFB9, 'M', u'ᄍ'), - (0xFFBA, 'M', u'ᄎ'), - (0xFFBB, 'M', u'ᄏ'), - (0xFFBC, 'M', u'ᄐ'), - (0xFFBD, 'M', u'ᄑ'), - (0xFFBE, 'M', u'ᄒ'), - (0xFFBF, 'X'), - (0xFFC2, 'M', u'ᅡ'), - (0xFFC3, 'M', u'ᅢ'), - (0xFFC4, 'M', u'ᅣ'), - (0xFFC5, 'M', u'ᅤ'), - (0xFFC6, 'M', u'ᅥ'), - (0xFFC7, 'M', u'ᅦ'), - (0xFFC8, 'X'), - (0xFFCA, 'M', u'ᅧ'), - (0xFFCB, 'M', u'ᅨ'), - (0xFFCC, 'M', u'ᅩ'), - (0xFFCD, 'M', u'ᅪ'), - ] - -def _seg_51(): - return [ - (0xFFCE, 'M', u'ᅫ'), - (0xFFCF, 'M', u'ᅬ'), - (0xFFD0, 'X'), - (0xFFD2, 'M', u'ᅭ'), - (0xFFD3, 'M', u'ᅮ'), - (0xFFD4, 'M', u'ᅯ'), - (0xFFD5, 'M', u'ᅰ'), - (0xFFD6, 'M', u'ᅱ'), - (0xFFD7, 'M', u'ᅲ'), - (0xFFD8, 'X'), - (0xFFDA, 'M', u'ᅳ'), - (0xFFDB, 'M', u'ᅴ'), - (0xFFDC, 'M', u'ᅵ'), - (0xFFDD, 'X'), - (0xFFE0, 'M', u'¢'), - (0xFFE1, 'M', u'£'), - (0xFFE2, 'M', u'¬'), - (0xFFE3, '3', u' ̄'), - (0xFFE4, 'M', u'¦'), - (0xFFE5, 'M', u'¥'), - (0xFFE6, 'M', u'₩'), - (0xFFE7, 'X'), - (0xFFE8, 'M', u'│'), - (0xFFE9, 'M', u'←'), - (0xFFEA, 'M', u'↑'), - (0xFFEB, 'M', u'→'), - (0xFFEC, 'M', u'↓'), - (0xFFED, 'M', u'■'), - (0xFFEE, 'M', u'○'), - (0xFFEF, 'X'), - (0x10000, 'V'), - (0x1000C, 'X'), - (0x1000D, 'V'), - (0x10027, 'X'), - (0x10028, 'V'), - (0x1003B, 'X'), - (0x1003C, 'V'), - (0x1003E, 'X'), - (0x1003F, 'V'), - (0x1004E, 'X'), - (0x10050, 'V'), - (0x1005E, 'X'), - (0x10080, 'V'), - (0x100FB, 'X'), - (0x10100, 'V'), - (0x10103, 'X'), - (0x10107, 'V'), - (0x10134, 'X'), - (0x10137, 'V'), - (0x1018B, 'X'), - (0x10190, 'V'), - (0x1019C, 'X'), - (0x101D0, 'V'), - (0x101FE, 'X'), - (0x10280, 'V'), - (0x1029D, 'X'), - (0x102A0, 'V'), - (0x102D1, 'X'), - (0x10300, 'V'), - (0x1031F, 'X'), - (0x10320, 'V'), - (0x10324, 'X'), - (0x10330, 'V'), - (0x1034B, 'X'), - (0x10380, 'V'), - (0x1039E, 'X'), - (0x1039F, 'V'), - (0x103C4, 'X'), - (0x103C8, 'V'), - (0x103D6, 'X'), - (0x10400, 'M', u'𐐨'), - (0x10401, 'M', u'𐐩'), - (0x10402, 'M', u'𐐪'), - (0x10403, 'M', u'𐐫'), - (0x10404, 'M', u'𐐬'), - (0x10405, 'M', u'𐐭'), - (0x10406, 'M', u'𐐮'), - (0x10407, 'M', u'𐐯'), - (0x10408, 'M', u'𐐰'), - (0x10409, 'M', u'𐐱'), - (0x1040A, 'M', u'𐐲'), - (0x1040B, 'M', u'𐐳'), - (0x1040C, 'M', u'𐐴'), - (0x1040D, 'M', u'𐐵'), - (0x1040E, 'M', u'𐐶'), - (0x1040F, 'M', u'𐐷'), - (0x10410, 'M', u'𐐸'), - (0x10411, 'M', u'𐐹'), - (0x10412, 'M', u'𐐺'), - (0x10413, 'M', u'𐐻'), - (0x10414, 'M', u'𐐼'), - (0x10415, 'M', u'𐐽'), - (0x10416, 'M', u'𐐾'), - (0x10417, 'M', u'𐐿'), - (0x10418, 'M', u'𐑀'), - (0x10419, 'M', u'𐑁'), - (0x1041A, 'M', u'𐑂'), - (0x1041B, 'M', u'𐑃'), - (0x1041C, 'M', u'𐑄'), - (0x1041D, 'M', u'𐑅'), - ] - -def _seg_52(): - return [ - (0x1041E, 'M', u'𐑆'), - (0x1041F, 'M', u'𐑇'), - (0x10420, 'M', u'𐑈'), - (0x10421, 'M', u'𐑉'), - (0x10422, 'M', u'𐑊'), - (0x10423, 'M', u'𐑋'), - (0x10424, 'M', u'𐑌'), - (0x10425, 'M', u'𐑍'), - (0x10426, 'M', u'𐑎'), - (0x10427, 'M', u'𐑏'), - (0x10428, 'V'), - (0x1049E, 'X'), - (0x104A0, 'V'), - (0x104AA, 'X'), - (0x10800, 'V'), - (0x10806, 'X'), - (0x10808, 'V'), - (0x10809, 'X'), - (0x1080A, 'V'), - (0x10836, 'X'), - (0x10837, 'V'), - (0x10839, 'X'), - (0x1083C, 'V'), - (0x1083D, 'X'), - (0x1083F, 'V'), - (0x10856, 'X'), - (0x10857, 'V'), - (0x10860, 'X'), - (0x10900, 'V'), - (0x1091C, 'X'), - (0x1091F, 'V'), - (0x1093A, 'X'), - (0x1093F, 'V'), - (0x10940, 'X'), - (0x10980, 'V'), - (0x109B8, 'X'), - (0x109BE, 'V'), - (0x109C0, 'X'), - (0x10A00, 'V'), - (0x10A04, 'X'), - (0x10A05, 'V'), - (0x10A07, 'X'), - (0x10A0C, 'V'), - (0x10A14, 'X'), - (0x10A15, 'V'), - (0x10A18, 'X'), - (0x10A19, 'V'), - (0x10A34, 'X'), - (0x10A38, 'V'), - (0x10A3B, 'X'), - (0x10A3F, 'V'), - (0x10A48, 'X'), - (0x10A50, 'V'), - (0x10A59, 'X'), - (0x10A60, 'V'), - (0x10A80, 'X'), - (0x10B00, 'V'), - (0x10B36, 'X'), - (0x10B39, 'V'), - (0x10B56, 'X'), - (0x10B58, 'V'), - (0x10B73, 'X'), - (0x10B78, 'V'), - (0x10B80, 'X'), - (0x10C00, 'V'), - (0x10C49, 'X'), - (0x10E60, 'V'), - (0x10E7F, 'X'), - (0x11000, 'V'), - (0x1104E, 'X'), - (0x11052, 'V'), - (0x11070, 'X'), - (0x11080, 'V'), - (0x110BD, 'X'), - (0x110BE, 'V'), - (0x110C2, 'X'), - (0x110D0, 'V'), - (0x110E9, 'X'), - (0x110F0, 'V'), - (0x110FA, 'X'), - (0x11100, 'V'), - (0x11135, 'X'), - (0x11136, 'V'), - (0x11144, 'X'), - (0x11180, 'V'), - (0x111C9, 'X'), - (0x111D0, 'V'), - (0x111DA, 'X'), - (0x11680, 'V'), - (0x116B8, 'X'), - (0x116C0, 'V'), - (0x116CA, 'X'), - (0x12000, 'V'), - (0x1236F, 'X'), - (0x12400, 'V'), - (0x12463, 'X'), - (0x12470, 'V'), - (0x12474, 'X'), - (0x13000, 'V'), - (0x1342F, 'X'), - ] - -def _seg_53(): - return [ - (0x16800, 'V'), - (0x16A39, 'X'), - (0x16F00, 'V'), - (0x16F45, 'X'), - (0x16F50, 'V'), - (0x16F7F, 'X'), - (0x16F8F, 'V'), - (0x16FA0, 'X'), - (0x1B000, 'V'), - (0x1B002, 'X'), - (0x1D000, 'V'), - (0x1D0F6, 'X'), - (0x1D100, 'V'), - (0x1D127, 'X'), - (0x1D129, 'V'), - (0x1D15E, 'M', u'𝅗𝅥'), - (0x1D15F, 'M', u'𝅘𝅥'), - (0x1D160, 'M', u'𝅘𝅥𝅮'), - (0x1D161, 'M', u'𝅘𝅥𝅯'), - (0x1D162, 'M', u'𝅘𝅥𝅰'), - (0x1D163, 'M', u'𝅘𝅥𝅱'), - (0x1D164, 'M', u'𝅘𝅥𝅲'), - (0x1D165, 'V'), - (0x1D173, 'X'), - (0x1D17B, 'V'), - (0x1D1BB, 'M', u'𝆹𝅥'), - (0x1D1BC, 'M', u'𝆺𝅥'), - (0x1D1BD, 'M', u'𝆹𝅥𝅮'), - (0x1D1BE, 'M', u'𝆺𝅥𝅮'), - (0x1D1BF, 'M', u'𝆹𝅥𝅯'), - (0x1D1C0, 'M', u'𝆺𝅥𝅯'), - (0x1D1C1, 'V'), - (0x1D1DE, 'X'), - (0x1D200, 'V'), - (0x1D246, 'X'), - (0x1D300, 'V'), - (0x1D357, 'X'), - (0x1D360, 'V'), - (0x1D372, 'X'), - (0x1D400, 'M', u'a'), - (0x1D401, 'M', u'b'), - (0x1D402, 'M', u'c'), - (0x1D403, 'M', u'd'), - (0x1D404, 'M', u'e'), - (0x1D405, 'M', u'f'), - (0x1D406, 'M', u'g'), - (0x1D407, 'M', u'h'), - (0x1D408, 'M', u'i'), - (0x1D409, 'M', u'j'), - (0x1D40A, 'M', u'k'), - (0x1D40B, 'M', u'l'), - (0x1D40C, 'M', u'm'), - (0x1D40D, 'M', u'n'), - (0x1D40E, 'M', u'o'), - (0x1D40F, 'M', u'p'), - (0x1D410, 'M', u'q'), - (0x1D411, 'M', u'r'), - (0x1D412, 'M', u's'), - (0x1D413, 'M', u't'), - (0x1D414, 'M', u'u'), - (0x1D415, 'M', u'v'), - (0x1D416, 'M', u'w'), - (0x1D417, 'M', u'x'), - (0x1D418, 'M', u'y'), - (0x1D419, 'M', u'z'), - (0x1D41A, 'M', u'a'), - (0x1D41B, 'M', u'b'), - (0x1D41C, 'M', u'c'), - (0x1D41D, 'M', u'd'), - (0x1D41E, 'M', u'e'), - (0x1D41F, 'M', u'f'), - (0x1D420, 'M', u'g'), - (0x1D421, 'M', u'h'), - (0x1D422, 'M', u'i'), - (0x1D423, 'M', u'j'), - (0x1D424, 'M', u'k'), - (0x1D425, 'M', u'l'), - (0x1D426, 'M', u'm'), - (0x1D427, 'M', u'n'), - (0x1D428, 'M', u'o'), - (0x1D429, 'M', u'p'), - (0x1D42A, 'M', u'q'), - (0x1D42B, 'M', u'r'), - (0x1D42C, 'M', u's'), - (0x1D42D, 'M', u't'), - (0x1D42E, 'M', u'u'), - (0x1D42F, 'M', u'v'), - (0x1D430, 'M', u'w'), - (0x1D431, 'M', u'x'), - (0x1D432, 'M', u'y'), - (0x1D433, 'M', u'z'), - (0x1D434, 'M', u'a'), - (0x1D435, 'M', u'b'), - (0x1D436, 'M', u'c'), - (0x1D437, 'M', u'd'), - (0x1D438, 'M', u'e'), - (0x1D439, 'M', u'f'), - (0x1D43A, 'M', u'g'), - (0x1D43B, 'M', u'h'), - (0x1D43C, 'M', u'i'), - ] - -def _seg_54(): - return [ - (0x1D43D, 'M', u'j'), - (0x1D43E, 'M', u'k'), - (0x1D43F, 'M', u'l'), - (0x1D440, 'M', u'm'), - (0x1D441, 'M', u'n'), - (0x1D442, 'M', u'o'), - (0x1D443, 'M', u'p'), - (0x1D444, 'M', u'q'), - (0x1D445, 'M', u'r'), - (0x1D446, 'M', u's'), - (0x1D447, 'M', u't'), - (0x1D448, 'M', u'u'), - (0x1D449, 'M', u'v'), - (0x1D44A, 'M', u'w'), - (0x1D44B, 'M', u'x'), - (0x1D44C, 'M', u'y'), - (0x1D44D, 'M', u'z'), - (0x1D44E, 'M', u'a'), - (0x1D44F, 'M', u'b'), - (0x1D450, 'M', u'c'), - (0x1D451, 'M', u'd'), - (0x1D452, 'M', u'e'), - (0x1D453, 'M', u'f'), - (0x1D454, 'M', u'g'), - (0x1D455, 'X'), - (0x1D456, 'M', u'i'), - (0x1D457, 'M', u'j'), - (0x1D458, 'M', u'k'), - (0x1D459, 'M', u'l'), - (0x1D45A, 'M', u'm'), - (0x1D45B, 'M', u'n'), - (0x1D45C, 'M', u'o'), - (0x1D45D, 'M', u'p'), - (0x1D45E, 'M', u'q'), - (0x1D45F, 'M', u'r'), - (0x1D460, 'M', u's'), - (0x1D461, 'M', u't'), - (0x1D462, 'M', u'u'), - (0x1D463, 'M', u'v'), - (0x1D464, 'M', u'w'), - (0x1D465, 'M', u'x'), - (0x1D466, 'M', u'y'), - (0x1D467, 'M', u'z'), - (0x1D468, 'M', u'a'), - (0x1D469, 'M', u'b'), - (0x1D46A, 'M', u'c'), - (0x1D46B, 'M', u'd'), - (0x1D46C, 'M', u'e'), - (0x1D46D, 'M', u'f'), - (0x1D46E, 'M', u'g'), - (0x1D46F, 'M', u'h'), - (0x1D470, 'M', u'i'), - (0x1D471, 'M', u'j'), - (0x1D472, 'M', u'k'), - (0x1D473, 'M', u'l'), - (0x1D474, 'M', u'm'), - (0x1D475, 'M', u'n'), - (0x1D476, 'M', u'o'), - (0x1D477, 'M', u'p'), - (0x1D478, 'M', u'q'), - (0x1D479, 'M', u'r'), - (0x1D47A, 'M', u's'), - (0x1D47B, 'M', u't'), - (0x1D47C, 'M', u'u'), - (0x1D47D, 'M', u'v'), - (0x1D47E, 'M', u'w'), - (0x1D47F, 'M', u'x'), - (0x1D480, 'M', u'y'), - (0x1D481, 'M', u'z'), - (0x1D482, 'M', u'a'), - (0x1D483, 'M', u'b'), - (0x1D484, 'M', u'c'), - (0x1D485, 'M', u'd'), - (0x1D486, 'M', u'e'), - (0x1D487, 'M', u'f'), - (0x1D488, 'M', u'g'), - (0x1D489, 'M', u'h'), - (0x1D48A, 'M', u'i'), - (0x1D48B, 'M', u'j'), - (0x1D48C, 'M', u'k'), - (0x1D48D, 'M', u'l'), - (0x1D48E, 'M', u'm'), - (0x1D48F, 'M', u'n'), - (0x1D490, 'M', u'o'), - (0x1D491, 'M', u'p'), - (0x1D492, 'M', u'q'), - (0x1D493, 'M', u'r'), - (0x1D494, 'M', u's'), - (0x1D495, 'M', u't'), - (0x1D496, 'M', u'u'), - (0x1D497, 'M', u'v'), - (0x1D498, 'M', u'w'), - (0x1D499, 'M', u'x'), - (0x1D49A, 'M', u'y'), - (0x1D49B, 'M', u'z'), - (0x1D49C, 'M', u'a'), - (0x1D49D, 'X'), - (0x1D49E, 'M', u'c'), - (0x1D49F, 'M', u'd'), - (0x1D4A0, 'X'), - ] - -def _seg_55(): - return [ - (0x1D4A2, 'M', u'g'), - (0x1D4A3, 'X'), - (0x1D4A5, 'M', u'j'), - (0x1D4A6, 'M', u'k'), - (0x1D4A7, 'X'), - (0x1D4A9, 'M', u'n'), - (0x1D4AA, 'M', u'o'), - (0x1D4AB, 'M', u'p'), - (0x1D4AC, 'M', u'q'), - (0x1D4AD, 'X'), - (0x1D4AE, 'M', u's'), - (0x1D4AF, 'M', u't'), - (0x1D4B0, 'M', u'u'), - (0x1D4B1, 'M', u'v'), - (0x1D4B2, 'M', u'w'), - (0x1D4B3, 'M', u'x'), - (0x1D4B4, 'M', u'y'), - (0x1D4B5, 'M', u'z'), - (0x1D4B6, 'M', u'a'), - (0x1D4B7, 'M', u'b'), - (0x1D4B8, 'M', u'c'), - (0x1D4B9, 'M', u'd'), - (0x1D4BA, 'X'), - (0x1D4BB, 'M', u'f'), - (0x1D4BC, 'X'), - (0x1D4BD, 'M', u'h'), - (0x1D4BE, 'M', u'i'), - (0x1D4BF, 'M', u'j'), - (0x1D4C0, 'M', u'k'), - (0x1D4C1, 'M', u'l'), - (0x1D4C2, 'M', u'm'), - (0x1D4C3, 'M', u'n'), - (0x1D4C4, 'X'), - (0x1D4C5, 'M', u'p'), - (0x1D4C6, 'M', u'q'), - (0x1D4C7, 'M', u'r'), - (0x1D4C8, 'M', u's'), - (0x1D4C9, 'M', u't'), - (0x1D4CA, 'M', u'u'), - (0x1D4CB, 'M', u'v'), - (0x1D4CC, 'M', u'w'), - (0x1D4CD, 'M', u'x'), - (0x1D4CE, 'M', u'y'), - (0x1D4CF, 'M', u'z'), - (0x1D4D0, 'M', u'a'), - (0x1D4D1, 'M', u'b'), - (0x1D4D2, 'M', u'c'), - (0x1D4D3, 'M', u'd'), - (0x1D4D4, 'M', u'e'), - (0x1D4D5, 'M', u'f'), - (0x1D4D6, 'M', u'g'), - (0x1D4D7, 'M', u'h'), - (0x1D4D8, 'M', u'i'), - (0x1D4D9, 'M', u'j'), - (0x1D4DA, 'M', u'k'), - (0x1D4DB, 'M', u'l'), - (0x1D4DC, 'M', u'm'), - (0x1D4DD, 'M', u'n'), - (0x1D4DE, 'M', u'o'), - (0x1D4DF, 'M', u'p'), - (0x1D4E0, 'M', u'q'), - (0x1D4E1, 'M', u'r'), - (0x1D4E2, 'M', u's'), - (0x1D4E3, 'M', u't'), - (0x1D4E4, 'M', u'u'), - (0x1D4E5, 'M', u'v'), - (0x1D4E6, 'M', u'w'), - (0x1D4E7, 'M', u'x'), - (0x1D4E8, 'M', u'y'), - (0x1D4E9, 'M', u'z'), - (0x1D4EA, 'M', u'a'), - (0x1D4EB, 'M', u'b'), - (0x1D4EC, 'M', u'c'), - (0x1D4ED, 'M', u'd'), - (0x1D4EE, 'M', u'e'), - (0x1D4EF, 'M', u'f'), - (0x1D4F0, 'M', u'g'), - (0x1D4F1, 'M', u'h'), - (0x1D4F2, 'M', u'i'), - (0x1D4F3, 'M', u'j'), - (0x1D4F4, 'M', u'k'), - (0x1D4F5, 'M', u'l'), - (0x1D4F6, 'M', u'm'), - (0x1D4F7, 'M', u'n'), - (0x1D4F8, 'M', u'o'), - (0x1D4F9, 'M', u'p'), - (0x1D4FA, 'M', u'q'), - (0x1D4FB, 'M', u'r'), - (0x1D4FC, 'M', u's'), - (0x1D4FD, 'M', u't'), - (0x1D4FE, 'M', u'u'), - (0x1D4FF, 'M', u'v'), - (0x1D500, 'M', u'w'), - (0x1D501, 'M', u'x'), - (0x1D502, 'M', u'y'), - (0x1D503, 'M', u'z'), - (0x1D504, 'M', u'a'), - (0x1D505, 'M', u'b'), - (0x1D506, 'X'), - (0x1D507, 'M', u'd'), - ] - -def _seg_56(): - return [ - (0x1D508, 'M', u'e'), - (0x1D509, 'M', u'f'), - (0x1D50A, 'M', u'g'), - (0x1D50B, 'X'), - (0x1D50D, 'M', u'j'), - (0x1D50E, 'M', u'k'), - (0x1D50F, 'M', u'l'), - (0x1D510, 'M', u'm'), - (0x1D511, 'M', u'n'), - (0x1D512, 'M', u'o'), - (0x1D513, 'M', u'p'), - (0x1D514, 'M', u'q'), - (0x1D515, 'X'), - (0x1D516, 'M', u's'), - (0x1D517, 'M', u't'), - (0x1D518, 'M', u'u'), - (0x1D519, 'M', u'v'), - (0x1D51A, 'M', u'w'), - (0x1D51B, 'M', u'x'), - (0x1D51C, 'M', u'y'), - (0x1D51D, 'X'), - (0x1D51E, 'M', u'a'), - (0x1D51F, 'M', u'b'), - (0x1D520, 'M', u'c'), - (0x1D521, 'M', u'd'), - (0x1D522, 'M', u'e'), - (0x1D523, 'M', u'f'), - (0x1D524, 'M', u'g'), - (0x1D525, 'M', u'h'), - (0x1D526, 'M', u'i'), - (0x1D527, 'M', u'j'), - (0x1D528, 'M', u'k'), - (0x1D529, 'M', u'l'), - (0x1D52A, 'M', u'm'), - (0x1D52B, 'M', u'n'), - (0x1D52C, 'M', u'o'), - (0x1D52D, 'M', u'p'), - (0x1D52E, 'M', u'q'), - (0x1D52F, 'M', u'r'), - (0x1D530, 'M', u's'), - (0x1D531, 'M', u't'), - (0x1D532, 'M', u'u'), - (0x1D533, 'M', u'v'), - (0x1D534, 'M', u'w'), - (0x1D535, 'M', u'x'), - (0x1D536, 'M', u'y'), - (0x1D537, 'M', u'z'), - (0x1D538, 'M', u'a'), - (0x1D539, 'M', u'b'), - (0x1D53A, 'X'), - (0x1D53B, 'M', u'd'), - (0x1D53C, 'M', u'e'), - (0x1D53D, 'M', u'f'), - (0x1D53E, 'M', u'g'), - (0x1D53F, 'X'), - (0x1D540, 'M', u'i'), - (0x1D541, 'M', u'j'), - (0x1D542, 'M', u'k'), - (0x1D543, 'M', u'l'), - (0x1D544, 'M', u'm'), - (0x1D545, 'X'), - (0x1D546, 'M', u'o'), - (0x1D547, 'X'), - (0x1D54A, 'M', u's'), - (0x1D54B, 'M', u't'), - (0x1D54C, 'M', u'u'), - (0x1D54D, 'M', u'v'), - (0x1D54E, 'M', u'w'), - (0x1D54F, 'M', u'x'), - (0x1D550, 'M', u'y'), - (0x1D551, 'X'), - (0x1D552, 'M', u'a'), - (0x1D553, 'M', u'b'), - (0x1D554, 'M', u'c'), - (0x1D555, 'M', u'd'), - (0x1D556, 'M', u'e'), - (0x1D557, 'M', u'f'), - (0x1D558, 'M', u'g'), - (0x1D559, 'M', u'h'), - (0x1D55A, 'M', u'i'), - (0x1D55B, 'M', u'j'), - (0x1D55C, 'M', u'k'), - (0x1D55D, 'M', u'l'), - (0x1D55E, 'M', u'm'), - (0x1D55F, 'M', u'n'), - (0x1D560, 'M', u'o'), - (0x1D561, 'M', u'p'), - (0x1D562, 'M', u'q'), - (0x1D563, 'M', u'r'), - (0x1D564, 'M', u's'), - (0x1D565, 'M', u't'), - (0x1D566, 'M', u'u'), - (0x1D567, 'M', u'v'), - (0x1D568, 'M', u'w'), - (0x1D569, 'M', u'x'), - (0x1D56A, 'M', u'y'), - (0x1D56B, 'M', u'z'), - (0x1D56C, 'M', u'a'), - (0x1D56D, 'M', u'b'), - (0x1D56E, 'M', u'c'), - ] - -def _seg_57(): - return [ - (0x1D56F, 'M', u'd'), - (0x1D570, 'M', u'e'), - (0x1D571, 'M', u'f'), - (0x1D572, 'M', u'g'), - (0x1D573, 'M', u'h'), - (0x1D574, 'M', u'i'), - (0x1D575, 'M', u'j'), - (0x1D576, 'M', u'k'), - (0x1D577, 'M', u'l'), - (0x1D578, 'M', u'm'), - (0x1D579, 'M', u'n'), - (0x1D57A, 'M', u'o'), - (0x1D57B, 'M', u'p'), - (0x1D57C, 'M', u'q'), - (0x1D57D, 'M', u'r'), - (0x1D57E, 'M', u's'), - (0x1D57F, 'M', u't'), - (0x1D580, 'M', u'u'), - (0x1D581, 'M', u'v'), - (0x1D582, 'M', u'w'), - (0x1D583, 'M', u'x'), - (0x1D584, 'M', u'y'), - (0x1D585, 'M', u'z'), - (0x1D586, 'M', u'a'), - (0x1D587, 'M', u'b'), - (0x1D588, 'M', u'c'), - (0x1D589, 'M', u'd'), - (0x1D58A, 'M', u'e'), - (0x1D58B, 'M', u'f'), - (0x1D58C, 'M', u'g'), - (0x1D58D, 'M', u'h'), - (0x1D58E, 'M', u'i'), - (0x1D58F, 'M', u'j'), - (0x1D590, 'M', u'k'), - (0x1D591, 'M', u'l'), - (0x1D592, 'M', u'm'), - (0x1D593, 'M', u'n'), - (0x1D594, 'M', u'o'), - (0x1D595, 'M', u'p'), - (0x1D596, 'M', u'q'), - (0x1D597, 'M', u'r'), - (0x1D598, 'M', u's'), - (0x1D599, 'M', u't'), - (0x1D59A, 'M', u'u'), - (0x1D59B, 'M', u'v'), - (0x1D59C, 'M', u'w'), - (0x1D59D, 'M', u'x'), - (0x1D59E, 'M', u'y'), - (0x1D59F, 'M', u'z'), - (0x1D5A0, 'M', u'a'), - (0x1D5A1, 'M', u'b'), - (0x1D5A2, 'M', u'c'), - (0x1D5A3, 'M', u'd'), - (0x1D5A4, 'M', u'e'), - (0x1D5A5, 'M', u'f'), - (0x1D5A6, 'M', u'g'), - (0x1D5A7, 'M', u'h'), - (0x1D5A8, 'M', u'i'), - (0x1D5A9, 'M', u'j'), - (0x1D5AA, 'M', u'k'), - (0x1D5AB, 'M', u'l'), - (0x1D5AC, 'M', u'm'), - (0x1D5AD, 'M', u'n'), - (0x1D5AE, 'M', u'o'), - (0x1D5AF, 'M', u'p'), - (0x1D5B0, 'M', u'q'), - (0x1D5B1, 'M', u'r'), - (0x1D5B2, 'M', u's'), - (0x1D5B3, 'M', u't'), - (0x1D5B4, 'M', u'u'), - (0x1D5B5, 'M', u'v'), - (0x1D5B6, 'M', u'w'), - (0x1D5B7, 'M', u'x'), - (0x1D5B8, 'M', u'y'), - (0x1D5B9, 'M', u'z'), - (0x1D5BA, 'M', u'a'), - (0x1D5BB, 'M', u'b'), - (0x1D5BC, 'M', u'c'), - (0x1D5BD, 'M', u'd'), - (0x1D5BE, 'M', u'e'), - (0x1D5BF, 'M', u'f'), - (0x1D5C0, 'M', u'g'), - (0x1D5C1, 'M', u'h'), - (0x1D5C2, 'M', u'i'), - (0x1D5C3, 'M', u'j'), - (0x1D5C4, 'M', u'k'), - (0x1D5C5, 'M', u'l'), - (0x1D5C6, 'M', u'm'), - (0x1D5C7, 'M', u'n'), - (0x1D5C8, 'M', u'o'), - (0x1D5C9, 'M', u'p'), - (0x1D5CA, 'M', u'q'), - (0x1D5CB, 'M', u'r'), - (0x1D5CC, 'M', u's'), - (0x1D5CD, 'M', u't'), - (0x1D5CE, 'M', u'u'), - (0x1D5CF, 'M', u'v'), - (0x1D5D0, 'M', u'w'), - (0x1D5D1, 'M', u'x'), - (0x1D5D2, 'M', u'y'), - ] - -def _seg_58(): - return [ - (0x1D5D3, 'M', u'z'), - (0x1D5D4, 'M', u'a'), - (0x1D5D5, 'M', u'b'), - (0x1D5D6, 'M', u'c'), - (0x1D5D7, 'M', u'd'), - (0x1D5D8, 'M', u'e'), - (0x1D5D9, 'M', u'f'), - (0x1D5DA, 'M', u'g'), - (0x1D5DB, 'M', u'h'), - (0x1D5DC, 'M', u'i'), - (0x1D5DD, 'M', u'j'), - (0x1D5DE, 'M', u'k'), - (0x1D5DF, 'M', u'l'), - (0x1D5E0, 'M', u'm'), - (0x1D5E1, 'M', u'n'), - (0x1D5E2, 'M', u'o'), - (0x1D5E3, 'M', u'p'), - (0x1D5E4, 'M', u'q'), - (0x1D5E5, 'M', u'r'), - (0x1D5E6, 'M', u's'), - (0x1D5E7, 'M', u't'), - (0x1D5E8, 'M', u'u'), - (0x1D5E9, 'M', u'v'), - (0x1D5EA, 'M', u'w'), - (0x1D5EB, 'M', u'x'), - (0x1D5EC, 'M', u'y'), - (0x1D5ED, 'M', u'z'), - (0x1D5EE, 'M', u'a'), - (0x1D5EF, 'M', u'b'), - (0x1D5F0, 'M', u'c'), - (0x1D5F1, 'M', u'd'), - (0x1D5F2, 'M', u'e'), - (0x1D5F3, 'M', u'f'), - (0x1D5F4, 'M', u'g'), - (0x1D5F5, 'M', u'h'), - (0x1D5F6, 'M', u'i'), - (0x1D5F7, 'M', u'j'), - (0x1D5F8, 'M', u'k'), - (0x1D5F9, 'M', u'l'), - (0x1D5FA, 'M', u'm'), - (0x1D5FB, 'M', u'n'), - (0x1D5FC, 'M', u'o'), - (0x1D5FD, 'M', u'p'), - (0x1D5FE, 'M', u'q'), - (0x1D5FF, 'M', u'r'), - (0x1D600, 'M', u's'), - (0x1D601, 'M', u't'), - (0x1D602, 'M', u'u'), - (0x1D603, 'M', u'v'), - (0x1D604, 'M', u'w'), - (0x1D605, 'M', u'x'), - (0x1D606, 'M', u'y'), - (0x1D607, 'M', u'z'), - (0x1D608, 'M', u'a'), - (0x1D609, 'M', u'b'), - (0x1D60A, 'M', u'c'), - (0x1D60B, 'M', u'd'), - (0x1D60C, 'M', u'e'), - (0x1D60D, 'M', u'f'), - (0x1D60E, 'M', u'g'), - (0x1D60F, 'M', u'h'), - (0x1D610, 'M', u'i'), - (0x1D611, 'M', u'j'), - (0x1D612, 'M', u'k'), - (0x1D613, 'M', u'l'), - (0x1D614, 'M', u'm'), - (0x1D615, 'M', u'n'), - (0x1D616, 'M', u'o'), - (0x1D617, 'M', u'p'), - (0x1D618, 'M', u'q'), - (0x1D619, 'M', u'r'), - (0x1D61A, 'M', u's'), - (0x1D61B, 'M', u't'), - (0x1D61C, 'M', u'u'), - (0x1D61D, 'M', u'v'), - (0x1D61E, 'M', u'w'), - (0x1D61F, 'M', u'x'), - (0x1D620, 'M', u'y'), - (0x1D621, 'M', u'z'), - (0x1D622, 'M', u'a'), - (0x1D623, 'M', u'b'), - (0x1D624, 'M', u'c'), - (0x1D625, 'M', u'd'), - (0x1D626, 'M', u'e'), - (0x1D627, 'M', u'f'), - (0x1D628, 'M', u'g'), - (0x1D629, 'M', u'h'), - (0x1D62A, 'M', u'i'), - (0x1D62B, 'M', u'j'), - (0x1D62C, 'M', u'k'), - (0x1D62D, 'M', u'l'), - (0x1D62E, 'M', u'm'), - (0x1D62F, 'M', u'n'), - (0x1D630, 'M', u'o'), - (0x1D631, 'M', u'p'), - (0x1D632, 'M', u'q'), - (0x1D633, 'M', u'r'), - (0x1D634, 'M', u's'), - (0x1D635, 'M', u't'), - (0x1D636, 'M', u'u'), - ] - -def _seg_59(): - return [ - (0x1D637, 'M', u'v'), - (0x1D638, 'M', u'w'), - (0x1D639, 'M', u'x'), - (0x1D63A, 'M', u'y'), - (0x1D63B, 'M', u'z'), - (0x1D63C, 'M', u'a'), - (0x1D63D, 'M', u'b'), - (0x1D63E, 'M', u'c'), - (0x1D63F, 'M', u'd'), - (0x1D640, 'M', u'e'), - (0x1D641, 'M', u'f'), - (0x1D642, 'M', u'g'), - (0x1D643, 'M', u'h'), - (0x1D644, 'M', u'i'), - (0x1D645, 'M', u'j'), - (0x1D646, 'M', u'k'), - (0x1D647, 'M', u'l'), - (0x1D648, 'M', u'm'), - (0x1D649, 'M', u'n'), - (0x1D64A, 'M', u'o'), - (0x1D64B, 'M', u'p'), - (0x1D64C, 'M', u'q'), - (0x1D64D, 'M', u'r'), - (0x1D64E, 'M', u's'), - (0x1D64F, 'M', u't'), - (0x1D650, 'M', u'u'), - (0x1D651, 'M', u'v'), - (0x1D652, 'M', u'w'), - (0x1D653, 'M', u'x'), - (0x1D654, 'M', u'y'), - (0x1D655, 'M', u'z'), - (0x1D656, 'M', u'a'), - (0x1D657, 'M', u'b'), - (0x1D658, 'M', u'c'), - (0x1D659, 'M', u'd'), - (0x1D65A, 'M', u'e'), - (0x1D65B, 'M', u'f'), - (0x1D65C, 'M', u'g'), - (0x1D65D, 'M', u'h'), - (0x1D65E, 'M', u'i'), - (0x1D65F, 'M', u'j'), - (0x1D660, 'M', u'k'), - (0x1D661, 'M', u'l'), - (0x1D662, 'M', u'm'), - (0x1D663, 'M', u'n'), - (0x1D664, 'M', u'o'), - (0x1D665, 'M', u'p'), - (0x1D666, 'M', u'q'), - (0x1D667, 'M', u'r'), - (0x1D668, 'M', u's'), - (0x1D669, 'M', u't'), - (0x1D66A, 'M', u'u'), - (0x1D66B, 'M', u'v'), - (0x1D66C, 'M', u'w'), - (0x1D66D, 'M', u'x'), - (0x1D66E, 'M', u'y'), - (0x1D66F, 'M', u'z'), - (0x1D670, 'M', u'a'), - (0x1D671, 'M', u'b'), - (0x1D672, 'M', u'c'), - (0x1D673, 'M', u'd'), - (0x1D674, 'M', u'e'), - (0x1D675, 'M', u'f'), - (0x1D676, 'M', u'g'), - (0x1D677, 'M', u'h'), - (0x1D678, 'M', u'i'), - (0x1D679, 'M', u'j'), - (0x1D67A, 'M', u'k'), - (0x1D67B, 'M', u'l'), - (0x1D67C, 'M', u'm'), - (0x1D67D, 'M', u'n'), - (0x1D67E, 'M', u'o'), - (0x1D67F, 'M', u'p'), - (0x1D680, 'M', u'q'), - (0x1D681, 'M', u'r'), - (0x1D682, 'M', u's'), - (0x1D683, 'M', u't'), - (0x1D684, 'M', u'u'), - (0x1D685, 'M', u'v'), - (0x1D686, 'M', u'w'), - (0x1D687, 'M', u'x'), - (0x1D688, 'M', u'y'), - (0x1D689, 'M', u'z'), - (0x1D68A, 'M', u'a'), - (0x1D68B, 'M', u'b'), - (0x1D68C, 'M', u'c'), - (0x1D68D, 'M', u'd'), - (0x1D68E, 'M', u'e'), - (0x1D68F, 'M', u'f'), - (0x1D690, 'M', u'g'), - (0x1D691, 'M', u'h'), - (0x1D692, 'M', u'i'), - (0x1D693, 'M', u'j'), - (0x1D694, 'M', u'k'), - (0x1D695, 'M', u'l'), - (0x1D696, 'M', u'm'), - (0x1D697, 'M', u'n'), - (0x1D698, 'M', u'o'), - (0x1D699, 'M', u'p'), - (0x1D69A, 'M', u'q'), - ] - -def _seg_60(): - return [ - (0x1D69B, 'M', u'r'), - (0x1D69C, 'M', u's'), - (0x1D69D, 'M', u't'), - (0x1D69E, 'M', u'u'), - (0x1D69F, 'M', u'v'), - (0x1D6A0, 'M', u'w'), - (0x1D6A1, 'M', u'x'), - (0x1D6A2, 'M', u'y'), - (0x1D6A3, 'M', u'z'), - (0x1D6A4, 'M', u'ı'), - (0x1D6A5, 'M', u'ȷ'), - (0x1D6A6, 'X'), - (0x1D6A8, 'M', u'α'), - (0x1D6A9, 'M', u'β'), - (0x1D6AA, 'M', u'γ'), - (0x1D6AB, 'M', u'δ'), - (0x1D6AC, 'M', u'ε'), - (0x1D6AD, 'M', u'ζ'), - (0x1D6AE, 'M', u'η'), - (0x1D6AF, 'M', u'θ'), - (0x1D6B0, 'M', u'ι'), - (0x1D6B1, 'M', u'κ'), - (0x1D6B2, 'M', u'λ'), - (0x1D6B3, 'M', u'μ'), - (0x1D6B4, 'M', u'ν'), - (0x1D6B5, 'M', u'ξ'), - (0x1D6B6, 'M', u'ο'), - (0x1D6B7, 'M', u'π'), - (0x1D6B8, 'M', u'ρ'), - (0x1D6B9, 'M', u'θ'), - (0x1D6BA, 'M', u'σ'), - (0x1D6BB, 'M', u'τ'), - (0x1D6BC, 'M', u'υ'), - (0x1D6BD, 'M', u'φ'), - (0x1D6BE, 'M', u'χ'), - (0x1D6BF, 'M', u'ψ'), - (0x1D6C0, 'M', u'ω'), - (0x1D6C1, 'M', u'∇'), - (0x1D6C2, 'M', u'α'), - (0x1D6C3, 'M', u'β'), - (0x1D6C4, 'M', u'γ'), - (0x1D6C5, 'M', u'δ'), - (0x1D6C6, 'M', u'ε'), - (0x1D6C7, 'M', u'ζ'), - (0x1D6C8, 'M', u'η'), - (0x1D6C9, 'M', u'θ'), - (0x1D6CA, 'M', u'ι'), - (0x1D6CB, 'M', u'κ'), - (0x1D6CC, 'M', u'λ'), - (0x1D6CD, 'M', u'μ'), - (0x1D6CE, 'M', u'ν'), - (0x1D6CF, 'M', u'ξ'), - (0x1D6D0, 'M', u'ο'), - (0x1D6D1, 'M', u'π'), - (0x1D6D2, 'M', u'ρ'), - (0x1D6D3, 'M', u'σ'), - (0x1D6D5, 'M', u'τ'), - (0x1D6D6, 'M', u'υ'), - (0x1D6D7, 'M', u'φ'), - (0x1D6D8, 'M', u'χ'), - (0x1D6D9, 'M', u'ψ'), - (0x1D6DA, 'M', u'ω'), - (0x1D6DB, 'M', u'∂'), - (0x1D6DC, 'M', u'ε'), - (0x1D6DD, 'M', u'θ'), - (0x1D6DE, 'M', u'κ'), - (0x1D6DF, 'M', u'φ'), - (0x1D6E0, 'M', u'ρ'), - (0x1D6E1, 'M', u'π'), - (0x1D6E2, 'M', u'α'), - (0x1D6E3, 'M', u'β'), - (0x1D6E4, 'M', u'γ'), - (0x1D6E5, 'M', u'δ'), - (0x1D6E6, 'M', u'ε'), - (0x1D6E7, 'M', u'ζ'), - (0x1D6E8, 'M', u'η'), - (0x1D6E9, 'M', u'θ'), - (0x1D6EA, 'M', u'ι'), - (0x1D6EB, 'M', u'κ'), - (0x1D6EC, 'M', u'λ'), - (0x1D6ED, 'M', u'μ'), - (0x1D6EE, 'M', u'ν'), - (0x1D6EF, 'M', u'ξ'), - (0x1D6F0, 'M', u'ο'), - (0x1D6F1, 'M', u'π'), - (0x1D6F2, 'M', u'ρ'), - (0x1D6F3, 'M', u'θ'), - (0x1D6F4, 'M', u'σ'), - (0x1D6F5, 'M', u'τ'), - (0x1D6F6, 'M', u'υ'), - (0x1D6F7, 'M', u'φ'), - (0x1D6F8, 'M', u'χ'), - (0x1D6F9, 'M', u'ψ'), - (0x1D6FA, 'M', u'ω'), - (0x1D6FB, 'M', u'∇'), - (0x1D6FC, 'M', u'α'), - (0x1D6FD, 'M', u'β'), - (0x1D6FE, 'M', u'γ'), - (0x1D6FF, 'M', u'δ'), - (0x1D700, 'M', u'ε'), - ] - -def _seg_61(): - return [ - (0x1D701, 'M', u'ζ'), - (0x1D702, 'M', u'η'), - (0x1D703, 'M', u'θ'), - (0x1D704, 'M', u'ι'), - (0x1D705, 'M', u'κ'), - (0x1D706, 'M', u'λ'), - (0x1D707, 'M', u'μ'), - (0x1D708, 'M', u'ν'), - (0x1D709, 'M', u'ξ'), - (0x1D70A, 'M', u'ο'), - (0x1D70B, 'M', u'π'), - (0x1D70C, 'M', u'ρ'), - (0x1D70D, 'M', u'σ'), - (0x1D70F, 'M', u'τ'), - (0x1D710, 'M', u'υ'), - (0x1D711, 'M', u'φ'), - (0x1D712, 'M', u'χ'), - (0x1D713, 'M', u'ψ'), - (0x1D714, 'M', u'ω'), - (0x1D715, 'M', u'∂'), - (0x1D716, 'M', u'ε'), - (0x1D717, 'M', u'θ'), - (0x1D718, 'M', u'κ'), - (0x1D719, 'M', u'φ'), - (0x1D71A, 'M', u'ρ'), - (0x1D71B, 'M', u'π'), - (0x1D71C, 'M', u'α'), - (0x1D71D, 'M', u'β'), - (0x1D71E, 'M', u'γ'), - (0x1D71F, 'M', u'δ'), - (0x1D720, 'M', u'ε'), - (0x1D721, 'M', u'ζ'), - (0x1D722, 'M', u'η'), - (0x1D723, 'M', u'θ'), - (0x1D724, 'M', u'ι'), - (0x1D725, 'M', u'κ'), - (0x1D726, 'M', u'λ'), - (0x1D727, 'M', u'μ'), - (0x1D728, 'M', u'ν'), - (0x1D729, 'M', u'ξ'), - (0x1D72A, 'M', u'ο'), - (0x1D72B, 'M', u'π'), - (0x1D72C, 'M', u'ρ'), - (0x1D72D, 'M', u'θ'), - (0x1D72E, 'M', u'σ'), - (0x1D72F, 'M', u'τ'), - (0x1D730, 'M', u'υ'), - (0x1D731, 'M', u'φ'), - (0x1D732, 'M', u'χ'), - (0x1D733, 'M', u'ψ'), - (0x1D734, 'M', u'ω'), - (0x1D735, 'M', u'∇'), - (0x1D736, 'M', u'α'), - (0x1D737, 'M', u'β'), - (0x1D738, 'M', u'γ'), - (0x1D739, 'M', u'δ'), - (0x1D73A, 'M', u'ε'), - (0x1D73B, 'M', u'ζ'), - (0x1D73C, 'M', u'η'), - (0x1D73D, 'M', u'θ'), - (0x1D73E, 'M', u'ι'), - (0x1D73F, 'M', u'κ'), - (0x1D740, 'M', u'λ'), - (0x1D741, 'M', u'μ'), - (0x1D742, 'M', u'ν'), - (0x1D743, 'M', u'ξ'), - (0x1D744, 'M', u'ο'), - (0x1D745, 'M', u'π'), - (0x1D746, 'M', u'ρ'), - (0x1D747, 'M', u'σ'), - (0x1D749, 'M', u'τ'), - (0x1D74A, 'M', u'υ'), - (0x1D74B, 'M', u'φ'), - (0x1D74C, 'M', u'χ'), - (0x1D74D, 'M', u'ψ'), - (0x1D74E, 'M', u'ω'), - (0x1D74F, 'M', u'∂'), - (0x1D750, 'M', u'ε'), - (0x1D751, 'M', u'θ'), - (0x1D752, 'M', u'κ'), - (0x1D753, 'M', u'φ'), - (0x1D754, 'M', u'ρ'), - (0x1D755, 'M', u'π'), - (0x1D756, 'M', u'α'), - (0x1D757, 'M', u'β'), - (0x1D758, 'M', u'γ'), - (0x1D759, 'M', u'δ'), - (0x1D75A, 'M', u'ε'), - (0x1D75B, 'M', u'ζ'), - (0x1D75C, 'M', u'η'), - (0x1D75D, 'M', u'θ'), - (0x1D75E, 'M', u'ι'), - (0x1D75F, 'M', u'κ'), - (0x1D760, 'M', u'λ'), - (0x1D761, 'M', u'μ'), - (0x1D762, 'M', u'ν'), - (0x1D763, 'M', u'ξ'), - (0x1D764, 'M', u'ο'), - (0x1D765, 'M', u'π'), - (0x1D766, 'M', u'ρ'), - ] - -def _seg_62(): - return [ - (0x1D767, 'M', u'θ'), - (0x1D768, 'M', u'σ'), - (0x1D769, 'M', u'τ'), - (0x1D76A, 'M', u'υ'), - (0x1D76B, 'M', u'φ'), - (0x1D76C, 'M', u'χ'), - (0x1D76D, 'M', u'ψ'), - (0x1D76E, 'M', u'ω'), - (0x1D76F, 'M', u'∇'), - (0x1D770, 'M', u'α'), - (0x1D771, 'M', u'β'), - (0x1D772, 'M', u'γ'), - (0x1D773, 'M', u'δ'), - (0x1D774, 'M', u'ε'), - (0x1D775, 'M', u'ζ'), - (0x1D776, 'M', u'η'), - (0x1D777, 'M', u'θ'), - (0x1D778, 'M', u'ι'), - (0x1D779, 'M', u'κ'), - (0x1D77A, 'M', u'λ'), - (0x1D77B, 'M', u'μ'), - (0x1D77C, 'M', u'ν'), - (0x1D77D, 'M', u'ξ'), - (0x1D77E, 'M', u'ο'), - (0x1D77F, 'M', u'π'), - (0x1D780, 'M', u'ρ'), - (0x1D781, 'M', u'σ'), - (0x1D783, 'M', u'τ'), - (0x1D784, 'M', u'υ'), - (0x1D785, 'M', u'φ'), - (0x1D786, 'M', u'χ'), - (0x1D787, 'M', u'ψ'), - (0x1D788, 'M', u'ω'), - (0x1D789, 'M', u'∂'), - (0x1D78A, 'M', u'ε'), - (0x1D78B, 'M', u'θ'), - (0x1D78C, 'M', u'κ'), - (0x1D78D, 'M', u'φ'), - (0x1D78E, 'M', u'ρ'), - (0x1D78F, 'M', u'π'), - (0x1D790, 'M', u'α'), - (0x1D791, 'M', u'β'), - (0x1D792, 'M', u'γ'), - (0x1D793, 'M', u'δ'), - (0x1D794, 'M', u'ε'), - (0x1D795, 'M', u'ζ'), - (0x1D796, 'M', u'η'), - (0x1D797, 'M', u'θ'), - (0x1D798, 'M', u'ι'), - (0x1D799, 'M', u'κ'), - (0x1D79A, 'M', u'λ'), - (0x1D79B, 'M', u'μ'), - (0x1D79C, 'M', u'ν'), - (0x1D79D, 'M', u'ξ'), - (0x1D79E, 'M', u'ο'), - (0x1D79F, 'M', u'π'), - (0x1D7A0, 'M', u'ρ'), - (0x1D7A1, 'M', u'θ'), - (0x1D7A2, 'M', u'σ'), - (0x1D7A3, 'M', u'τ'), - (0x1D7A4, 'M', u'υ'), - (0x1D7A5, 'M', u'φ'), - (0x1D7A6, 'M', u'χ'), - (0x1D7A7, 'M', u'ψ'), - (0x1D7A8, 'M', u'ω'), - (0x1D7A9, 'M', u'∇'), - (0x1D7AA, 'M', u'α'), - (0x1D7AB, 'M', u'β'), - (0x1D7AC, 'M', u'γ'), - (0x1D7AD, 'M', u'δ'), - (0x1D7AE, 'M', u'ε'), - (0x1D7AF, 'M', u'ζ'), - (0x1D7B0, 'M', u'η'), - (0x1D7B1, 'M', u'θ'), - (0x1D7B2, 'M', u'ι'), - (0x1D7B3, 'M', u'κ'), - (0x1D7B4, 'M', u'λ'), - (0x1D7B5, 'M', u'μ'), - (0x1D7B6, 'M', u'ν'), - (0x1D7B7, 'M', u'ξ'), - (0x1D7B8, 'M', u'ο'), - (0x1D7B9, 'M', u'π'), - (0x1D7BA, 'M', u'ρ'), - (0x1D7BB, 'M', u'σ'), - (0x1D7BD, 'M', u'τ'), - (0x1D7BE, 'M', u'υ'), - (0x1D7BF, 'M', u'φ'), - (0x1D7C0, 'M', u'χ'), - (0x1D7C1, 'M', u'ψ'), - (0x1D7C2, 'M', u'ω'), - (0x1D7C3, 'M', u'∂'), - (0x1D7C4, 'M', u'ε'), - (0x1D7C5, 'M', u'θ'), - (0x1D7C6, 'M', u'κ'), - (0x1D7C7, 'M', u'φ'), - (0x1D7C8, 'M', u'ρ'), - (0x1D7C9, 'M', u'π'), - (0x1D7CA, 'M', u'ϝ'), - (0x1D7CC, 'X'), - (0x1D7CE, 'M', u'0'), - ] - -def _seg_63(): - return [ - (0x1D7CF, 'M', u'1'), - (0x1D7D0, 'M', u'2'), - (0x1D7D1, 'M', u'3'), - (0x1D7D2, 'M', u'4'), - (0x1D7D3, 'M', u'5'), - (0x1D7D4, 'M', u'6'), - (0x1D7D5, 'M', u'7'), - (0x1D7D6, 'M', u'8'), - (0x1D7D7, 'M', u'9'), - (0x1D7D8, 'M', u'0'), - (0x1D7D9, 'M', u'1'), - (0x1D7DA, 'M', u'2'), - (0x1D7DB, 'M', u'3'), - (0x1D7DC, 'M', u'4'), - (0x1D7DD, 'M', u'5'), - (0x1D7DE, 'M', u'6'), - (0x1D7DF, 'M', u'7'), - (0x1D7E0, 'M', u'8'), - (0x1D7E1, 'M', u'9'), - (0x1D7E2, 'M', u'0'), - (0x1D7E3, 'M', u'1'), - (0x1D7E4, 'M', u'2'), - (0x1D7E5, 'M', u'3'), - (0x1D7E6, 'M', u'4'), - (0x1D7E7, 'M', u'5'), - (0x1D7E8, 'M', u'6'), - (0x1D7E9, 'M', u'7'), - (0x1D7EA, 'M', u'8'), - (0x1D7EB, 'M', u'9'), - (0x1D7EC, 'M', u'0'), - (0x1D7ED, 'M', u'1'), - (0x1D7EE, 'M', u'2'), - (0x1D7EF, 'M', u'3'), - (0x1D7F0, 'M', u'4'), - (0x1D7F1, 'M', u'5'), - (0x1D7F2, 'M', u'6'), - (0x1D7F3, 'M', u'7'), - (0x1D7F4, 'M', u'8'), - (0x1D7F5, 'M', u'9'), - (0x1D7F6, 'M', u'0'), - (0x1D7F7, 'M', u'1'), - (0x1D7F8, 'M', u'2'), - (0x1D7F9, 'M', u'3'), - (0x1D7FA, 'M', u'4'), - (0x1D7FB, 'M', u'5'), - (0x1D7FC, 'M', u'6'), - (0x1D7FD, 'M', u'7'), - (0x1D7FE, 'M', u'8'), - (0x1D7FF, 'M', u'9'), - (0x1D800, 'X'), - (0x1EE00, 'M', u'ا'), - (0x1EE01, 'M', u'ب'), - (0x1EE02, 'M', u'ج'), - (0x1EE03, 'M', u'د'), - (0x1EE04, 'X'), - (0x1EE05, 'M', u'و'), - (0x1EE06, 'M', u'ز'), - (0x1EE07, 'M', u'ح'), - (0x1EE08, 'M', u'ط'), - (0x1EE09, 'M', u'ي'), - (0x1EE0A, 'M', u'ك'), - (0x1EE0B, 'M', u'ل'), - (0x1EE0C, 'M', u'م'), - (0x1EE0D, 'M', u'ن'), - (0x1EE0E, 'M', u'س'), - (0x1EE0F, 'M', u'ع'), - (0x1EE10, 'M', u'ف'), - (0x1EE11, 'M', u'ص'), - (0x1EE12, 'M', u'ق'), - (0x1EE13, 'M', u'ر'), - (0x1EE14, 'M', u'ش'), - (0x1EE15, 'M', u'ت'), - (0x1EE16, 'M', u'ث'), - (0x1EE17, 'M', u'خ'), - (0x1EE18, 'M', u'ذ'), - (0x1EE19, 'M', u'ض'), - (0x1EE1A, 'M', u'ظ'), - (0x1EE1B, 'M', u'غ'), - (0x1EE1C, 'M', u'ٮ'), - (0x1EE1D, 'M', u'ں'), - (0x1EE1E, 'M', u'ڡ'), - (0x1EE1F, 'M', u'ٯ'), - (0x1EE20, 'X'), - (0x1EE21, 'M', u'ب'), - (0x1EE22, 'M', u'ج'), - (0x1EE23, 'X'), - (0x1EE24, 'M', u'ه'), - (0x1EE25, 'X'), - (0x1EE27, 'M', u'ح'), - (0x1EE28, 'X'), - (0x1EE29, 'M', u'ي'), - (0x1EE2A, 'M', u'ك'), - (0x1EE2B, 'M', u'ل'), - (0x1EE2C, 'M', u'م'), - (0x1EE2D, 'M', u'ن'), - (0x1EE2E, 'M', u'س'), - (0x1EE2F, 'M', u'ع'), - (0x1EE30, 'M', u'ف'), - (0x1EE31, 'M', u'ص'), - (0x1EE32, 'M', u'ق'), - ] - -def _seg_64(): - return [ - (0x1EE33, 'X'), - (0x1EE34, 'M', u'ش'), - (0x1EE35, 'M', u'ت'), - (0x1EE36, 'M', u'ث'), - (0x1EE37, 'M', u'خ'), - (0x1EE38, 'X'), - (0x1EE39, 'M', u'ض'), - (0x1EE3A, 'X'), - (0x1EE3B, 'M', u'غ'), - (0x1EE3C, 'X'), - (0x1EE42, 'M', u'ج'), - (0x1EE43, 'X'), - (0x1EE47, 'M', u'ح'), - (0x1EE48, 'X'), - (0x1EE49, 'M', u'ي'), - (0x1EE4A, 'X'), - (0x1EE4B, 'M', u'ل'), - (0x1EE4C, 'X'), - (0x1EE4D, 'M', u'ن'), - (0x1EE4E, 'M', u'س'), - (0x1EE4F, 'M', u'ع'), - (0x1EE50, 'X'), - (0x1EE51, 'M', u'ص'), - (0x1EE52, 'M', u'ق'), - (0x1EE53, 'X'), - (0x1EE54, 'M', u'ش'), - (0x1EE55, 'X'), - (0x1EE57, 'M', u'خ'), - (0x1EE58, 'X'), - (0x1EE59, 'M', u'ض'), - (0x1EE5A, 'X'), - (0x1EE5B, 'M', u'غ'), - (0x1EE5C, 'X'), - (0x1EE5D, 'M', u'ں'), - (0x1EE5E, 'X'), - (0x1EE5F, 'M', u'ٯ'), - (0x1EE60, 'X'), - (0x1EE61, 'M', u'ب'), - (0x1EE62, 'M', u'ج'), - (0x1EE63, 'X'), - (0x1EE64, 'M', u'ه'), - (0x1EE65, 'X'), - (0x1EE67, 'M', u'ح'), - (0x1EE68, 'M', u'ط'), - (0x1EE69, 'M', u'ي'), - (0x1EE6A, 'M', u'ك'), - (0x1EE6B, 'X'), - (0x1EE6C, 'M', u'م'), - (0x1EE6D, 'M', u'ن'), - (0x1EE6E, 'M', u'س'), - (0x1EE6F, 'M', u'ع'), - (0x1EE70, 'M', u'ف'), - (0x1EE71, 'M', u'ص'), - (0x1EE72, 'M', u'ق'), - (0x1EE73, 'X'), - (0x1EE74, 'M', u'ش'), - (0x1EE75, 'M', u'ت'), - (0x1EE76, 'M', u'ث'), - (0x1EE77, 'M', u'خ'), - (0x1EE78, 'X'), - (0x1EE79, 'M', u'ض'), - (0x1EE7A, 'M', u'ظ'), - (0x1EE7B, 'M', u'غ'), - (0x1EE7C, 'M', u'ٮ'), - (0x1EE7D, 'X'), - (0x1EE7E, 'M', u'ڡ'), - (0x1EE7F, 'X'), - (0x1EE80, 'M', u'ا'), - (0x1EE81, 'M', u'ب'), - (0x1EE82, 'M', u'ج'), - (0x1EE83, 'M', u'د'), - (0x1EE84, 'M', u'ه'), - (0x1EE85, 'M', u'و'), - (0x1EE86, 'M', u'ز'), - (0x1EE87, 'M', u'ح'), - (0x1EE88, 'M', u'ط'), - (0x1EE89, 'M', u'ي'), - (0x1EE8A, 'X'), - (0x1EE8B, 'M', u'ل'), - (0x1EE8C, 'M', u'م'), - (0x1EE8D, 'M', u'ن'), - (0x1EE8E, 'M', u'س'), - (0x1EE8F, 'M', u'ع'), - (0x1EE90, 'M', u'ف'), - (0x1EE91, 'M', u'ص'), - (0x1EE92, 'M', u'ق'), - (0x1EE93, 'M', u'ر'), - (0x1EE94, 'M', u'ش'), - (0x1EE95, 'M', u'ت'), - (0x1EE96, 'M', u'ث'), - (0x1EE97, 'M', u'خ'), - (0x1EE98, 'M', u'ذ'), - (0x1EE99, 'M', u'ض'), - (0x1EE9A, 'M', u'ظ'), - (0x1EE9B, 'M', u'غ'), - (0x1EE9C, 'X'), - (0x1EEA1, 'M', u'ب'), - (0x1EEA2, 'M', u'ج'), - (0x1EEA3, 'M', u'د'), - (0x1EEA4, 'X'), - ] - -def _seg_65(): - return [ - (0x1EEA5, 'M', u'و'), - (0x1EEA6, 'M', u'ز'), - (0x1EEA7, 'M', u'ح'), - (0x1EEA8, 'M', u'ط'), - (0x1EEA9, 'M', u'ي'), - (0x1EEAA, 'X'), - (0x1EEAB, 'M', u'ل'), - (0x1EEAC, 'M', u'م'), - (0x1EEAD, 'M', u'ن'), - (0x1EEAE, 'M', u'س'), - (0x1EEAF, 'M', u'ع'), - (0x1EEB0, 'M', u'ف'), - (0x1EEB1, 'M', u'ص'), - (0x1EEB2, 'M', u'ق'), - (0x1EEB3, 'M', u'ر'), - (0x1EEB4, 'M', u'ش'), - (0x1EEB5, 'M', u'ت'), - (0x1EEB6, 'M', u'ث'), - (0x1EEB7, 'M', u'خ'), - (0x1EEB8, 'M', u'ذ'), - (0x1EEB9, 'M', u'ض'), - (0x1EEBA, 'M', u'ظ'), - (0x1EEBB, 'M', u'غ'), - (0x1EEBC, 'X'), - (0x1EEF0, 'V'), - (0x1EEF2, 'X'), - (0x1F000, 'V'), - (0x1F02C, 'X'), - (0x1F030, 'V'), - (0x1F094, 'X'), - (0x1F0A0, 'V'), - (0x1F0AF, 'X'), - (0x1F0B1, 'V'), - (0x1F0BF, 'X'), - (0x1F0C1, 'V'), - (0x1F0D0, 'X'), - (0x1F0D1, 'V'), - (0x1F0E0, 'X'), - (0x1F101, '3', u'0,'), - (0x1F102, '3', u'1,'), - (0x1F103, '3', u'2,'), - (0x1F104, '3', u'3,'), - (0x1F105, '3', u'4,'), - (0x1F106, '3', u'5,'), - (0x1F107, '3', u'6,'), - (0x1F108, '3', u'7,'), - (0x1F109, '3', u'8,'), - (0x1F10A, '3', u'9,'), - (0x1F10B, 'X'), - (0x1F110, '3', u'(a)'), - (0x1F111, '3', u'(b)'), - (0x1F112, '3', u'(c)'), - (0x1F113, '3', u'(d)'), - (0x1F114, '3', u'(e)'), - (0x1F115, '3', u'(f)'), - (0x1F116, '3', u'(g)'), - (0x1F117, '3', u'(h)'), - (0x1F118, '3', u'(i)'), - (0x1F119, '3', u'(j)'), - (0x1F11A, '3', u'(k)'), - (0x1F11B, '3', u'(l)'), - (0x1F11C, '3', u'(m)'), - (0x1F11D, '3', u'(n)'), - (0x1F11E, '3', u'(o)'), - (0x1F11F, '3', u'(p)'), - (0x1F120, '3', u'(q)'), - (0x1F121, '3', u'(r)'), - (0x1F122, '3', u'(s)'), - (0x1F123, '3', u'(t)'), - (0x1F124, '3', u'(u)'), - (0x1F125, '3', u'(v)'), - (0x1F126, '3', u'(w)'), - (0x1F127, '3', u'(x)'), - (0x1F128, '3', u'(y)'), - (0x1F129, '3', u'(z)'), - (0x1F12A, 'M', u'〔s〕'), - (0x1F12B, 'M', u'c'), - (0x1F12C, 'M', u'r'), - (0x1F12D, 'M', u'cd'), - (0x1F12E, 'M', u'wz'), - (0x1F12F, 'X'), - (0x1F130, 'M', u'a'), - (0x1F131, 'M', u'b'), - (0x1F132, 'M', u'c'), - (0x1F133, 'M', u'd'), - (0x1F134, 'M', u'e'), - (0x1F135, 'M', u'f'), - (0x1F136, 'M', u'g'), - (0x1F137, 'M', u'h'), - (0x1F138, 'M', u'i'), - (0x1F139, 'M', u'j'), - (0x1F13A, 'M', u'k'), - (0x1F13B, 'M', u'l'), - (0x1F13C, 'M', u'm'), - (0x1F13D, 'M', u'n'), - (0x1F13E, 'M', u'o'), - (0x1F13F, 'M', u'p'), - (0x1F140, 'M', u'q'), - (0x1F141, 'M', u'r'), - (0x1F142, 'M', u's'), - ] - -def _seg_66(): - return [ - (0x1F143, 'M', u't'), - (0x1F144, 'M', u'u'), - (0x1F145, 'M', u'v'), - (0x1F146, 'M', u'w'), - (0x1F147, 'M', u'x'), - (0x1F148, 'M', u'y'), - (0x1F149, 'M', u'z'), - (0x1F14A, 'M', u'hv'), - (0x1F14B, 'M', u'mv'), - (0x1F14C, 'M', u'sd'), - (0x1F14D, 'M', u'ss'), - (0x1F14E, 'M', u'ppv'), - (0x1F14F, 'M', u'wc'), - (0x1F150, 'V'), - (0x1F16A, 'M', u'mc'), - (0x1F16B, 'M', u'md'), - (0x1F16C, 'X'), - (0x1F170, 'V'), - (0x1F190, 'M', u'dj'), - (0x1F191, 'V'), - (0x1F19B, 'X'), - (0x1F1E6, 'V'), - (0x1F200, 'M', u'ほか'), - (0x1F201, 'M', u'ココ'), - (0x1F202, 'M', u'サ'), - (0x1F203, 'X'), - (0x1F210, 'M', u'手'), - (0x1F211, 'M', u'字'), - (0x1F212, 'M', u'双'), - (0x1F213, 'M', u'デ'), - (0x1F214, 'M', u'二'), - (0x1F215, 'M', u'多'), - (0x1F216, 'M', u'解'), - (0x1F217, 'M', u'天'), - (0x1F218, 'M', u'交'), - (0x1F219, 'M', u'映'), - (0x1F21A, 'M', u'無'), - (0x1F21B, 'M', u'料'), - (0x1F21C, 'M', u'前'), - (0x1F21D, 'M', u'後'), - (0x1F21E, 'M', u'再'), - (0x1F21F, 'M', u'新'), - (0x1F220, 'M', u'初'), - (0x1F221, 'M', u'終'), - (0x1F222, 'M', u'生'), - (0x1F223, 'M', u'販'), - (0x1F224, 'M', u'声'), - (0x1F225, 'M', u'吹'), - (0x1F226, 'M', u'演'), - (0x1F227, 'M', u'投'), - (0x1F228, 'M', u'捕'), - (0x1F229, 'M', u'一'), - (0x1F22A, 'M', u'三'), - (0x1F22B, 'M', u'遊'), - (0x1F22C, 'M', u'左'), - (0x1F22D, 'M', u'中'), - (0x1F22E, 'M', u'右'), - (0x1F22F, 'M', u'指'), - (0x1F230, 'M', u'走'), - (0x1F231, 'M', u'打'), - (0x1F232, 'M', u'禁'), - (0x1F233, 'M', u'空'), - (0x1F234, 'M', u'合'), - (0x1F235, 'M', u'満'), - (0x1F236, 'M', u'有'), - (0x1F237, 'M', u'月'), - (0x1F238, 'M', u'申'), - (0x1F239, 'M', u'割'), - (0x1F23A, 'M', u'営'), - (0x1F23B, 'X'), - (0x1F240, 'M', u'〔本〕'), - (0x1F241, 'M', u'〔三〕'), - (0x1F242, 'M', u'〔二〕'), - (0x1F243, 'M', u'〔安〕'), - (0x1F244, 'M', u'〔点〕'), - (0x1F245, 'M', u'〔打〕'), - (0x1F246, 'M', u'〔盗〕'), - (0x1F247, 'M', u'〔勝〕'), - (0x1F248, 'M', u'〔敗〕'), - (0x1F249, 'X'), - (0x1F250, 'M', u'得'), - (0x1F251, 'M', u'可'), - (0x1F252, 'X'), - (0x1F300, 'V'), - (0x1F321, 'X'), - (0x1F330, 'V'), - (0x1F336, 'X'), - (0x1F337, 'V'), - (0x1F37D, 'X'), - (0x1F380, 'V'), - (0x1F394, 'X'), - (0x1F3A0, 'V'), - (0x1F3C5, 'X'), - (0x1F3C6, 'V'), - (0x1F3CB, 'X'), - (0x1F3E0, 'V'), - (0x1F3F1, 'X'), - (0x1F400, 'V'), - (0x1F43F, 'X'), - (0x1F440, 'V'), - ] - -def _seg_67(): - return [ - (0x1F441, 'X'), - (0x1F442, 'V'), - (0x1F4F8, 'X'), - (0x1F4F9, 'V'), - (0x1F4FD, 'X'), - (0x1F500, 'V'), - (0x1F53E, 'X'), - (0x1F540, 'V'), - (0x1F544, 'X'), - (0x1F550, 'V'), - (0x1F568, 'X'), - (0x1F5FB, 'V'), - (0x1F641, 'X'), - (0x1F645, 'V'), - (0x1F650, 'X'), - (0x1F680, 'V'), - (0x1F6C6, 'X'), - (0x1F700, 'V'), - (0x1F774, 'X'), - (0x20000, 'V'), - (0x2A6D7, 'X'), - (0x2A700, 'V'), - (0x2B735, 'X'), - (0x2B740, 'V'), - (0x2B81E, 'X'), - (0x2F800, 'M', u'丽'), - (0x2F801, 'M', u'丸'), - (0x2F802, 'M', u'乁'), - (0x2F803, 'M', u'𠄢'), - (0x2F804, 'M', u'你'), - (0x2F805, 'M', u'侮'), - (0x2F806, 'M', u'侻'), - (0x2F807, 'M', u'倂'), - (0x2F808, 'M', u'偺'), - (0x2F809, 'M', u'備'), - (0x2F80A, 'M', u'僧'), - (0x2F80B, 'M', u'像'), - (0x2F80C, 'M', u'㒞'), - (0x2F80D, 'M', u'𠘺'), - (0x2F80E, 'M', u'免'), - (0x2F80F, 'M', u'兔'), - (0x2F810, 'M', u'兤'), - (0x2F811, 'M', u'具'), - (0x2F812, 'M', u'𠔜'), - (0x2F813, 'M', u'㒹'), - (0x2F814, 'M', u'內'), - (0x2F815, 'M', u'再'), - (0x2F816, 'M', u'𠕋'), - (0x2F817, 'M', u'冗'), - (0x2F818, 'M', u'冤'), - (0x2F819, 'M', u'仌'), - (0x2F81A, 'M', u'冬'), - (0x2F81B, 'M', u'况'), - (0x2F81C, 'M', u'𩇟'), - (0x2F81D, 'M', u'凵'), - (0x2F81E, 'M', u'刃'), - (0x2F81F, 'M', u'㓟'), - (0x2F820, 'M', u'刻'), - (0x2F821, 'M', u'剆'), - (0x2F822, 'M', u'割'), - (0x2F823, 'M', u'剷'), - (0x2F824, 'M', u'㔕'), - (0x2F825, 'M', u'勇'), - (0x2F826, 'M', u'勉'), - (0x2F827, 'M', u'勤'), - (0x2F828, 'M', u'勺'), - (0x2F829, 'M', u'包'), - (0x2F82A, 'M', u'匆'), - (0x2F82B, 'M', u'北'), - (0x2F82C, 'M', u'卉'), - (0x2F82D, 'M', u'卑'), - (0x2F82E, 'M', u'博'), - (0x2F82F, 'M', u'即'), - (0x2F830, 'M', u'卽'), - (0x2F831, 'M', u'卿'), - (0x2F834, 'M', u'𠨬'), - (0x2F835, 'M', u'灰'), - (0x2F836, 'M', u'及'), - (0x2F837, 'M', u'叟'), - (0x2F838, 'M', u'𠭣'), - (0x2F839, 'M', u'叫'), - (0x2F83A, 'M', u'叱'), - (0x2F83B, 'M', u'吆'), - (0x2F83C, 'M', u'咞'), - (0x2F83D, 'M', u'吸'), - (0x2F83E, 'M', u'呈'), - (0x2F83F, 'M', u'周'), - (0x2F840, 'M', u'咢'), - (0x2F841, 'M', u'哶'), - (0x2F842, 'M', u'唐'), - (0x2F843, 'M', u'啓'), - (0x2F844, 'M', u'啣'), - (0x2F845, 'M', u'善'), - (0x2F847, 'M', u'喙'), - (0x2F848, 'M', u'喫'), - (0x2F849, 'M', u'喳'), - (0x2F84A, 'M', u'嗂'), - (0x2F84B, 'M', u'圖'), - (0x2F84C, 'M', u'嘆'), - (0x2F84D, 'M', u'圗'), - ] - -def _seg_68(): - return [ - (0x2F84E, 'M', u'噑'), - (0x2F84F, 'M', u'噴'), - (0x2F850, 'M', u'切'), - (0x2F851, 'M', u'壮'), - (0x2F852, 'M', u'城'), - (0x2F853, 'M', u'埴'), - (0x2F854, 'M', u'堍'), - (0x2F855, 'M', u'型'), - (0x2F856, 'M', u'堲'), - (0x2F857, 'M', u'報'), - (0x2F858, 'M', u'墬'), - (0x2F859, 'M', u'𡓤'), - (0x2F85A, 'M', u'売'), - (0x2F85B, 'M', u'壷'), - (0x2F85C, 'M', u'夆'), - (0x2F85D, 'M', u'多'), - (0x2F85E, 'M', u'夢'), - (0x2F85F, 'M', u'奢'), - (0x2F860, 'M', u'𡚨'), - (0x2F861, 'M', u'𡛪'), - (0x2F862, 'M', u'姬'), - (0x2F863, 'M', u'娛'), - (0x2F864, 'M', u'娧'), - (0x2F865, 'M', u'姘'), - (0x2F866, 'M', u'婦'), - (0x2F867, 'M', u'㛮'), - (0x2F868, 'X'), - (0x2F869, 'M', u'嬈'), - (0x2F86A, 'M', u'嬾'), - (0x2F86C, 'M', u'𡧈'), - (0x2F86D, 'M', u'寃'), - (0x2F86E, 'M', u'寘'), - (0x2F86F, 'M', u'寧'), - (0x2F870, 'M', u'寳'), - (0x2F871, 'M', u'𡬘'), - (0x2F872, 'M', u'寿'), - (0x2F873, 'M', u'将'), - (0x2F874, 'X'), - (0x2F875, 'M', u'尢'), - (0x2F876, 'M', u'㞁'), - (0x2F877, 'M', u'屠'), - (0x2F878, 'M', u'屮'), - (0x2F879, 'M', u'峀'), - (0x2F87A, 'M', u'岍'), - (0x2F87B, 'M', u'𡷤'), - (0x2F87C, 'M', u'嵃'), - (0x2F87D, 'M', u'𡷦'), - (0x2F87E, 'M', u'嵮'), - (0x2F87F, 'M', u'嵫'), - (0x2F880, 'M', u'嵼'), - (0x2F881, 'M', u'巡'), - (0x2F882, 'M', u'巢'), - (0x2F883, 'M', u'㠯'), - (0x2F884, 'M', u'巽'), - (0x2F885, 'M', u'帨'), - (0x2F886, 'M', u'帽'), - (0x2F887, 'M', u'幩'), - (0x2F888, 'M', u'㡢'), - (0x2F889, 'M', u'𢆃'), - (0x2F88A, 'M', u'㡼'), - (0x2F88B, 'M', u'庰'), - (0x2F88C, 'M', u'庳'), - (0x2F88D, 'M', u'庶'), - (0x2F88E, 'M', u'廊'), - (0x2F88F, 'M', u'𪎒'), - (0x2F890, 'M', u'廾'), - (0x2F891, 'M', u'𢌱'), - (0x2F893, 'M', u'舁'), - (0x2F894, 'M', u'弢'), - (0x2F896, 'M', u'㣇'), - (0x2F897, 'M', u'𣊸'), - (0x2F898, 'M', u'𦇚'), - (0x2F899, 'M', u'形'), - (0x2F89A, 'M', u'彫'), - (0x2F89B, 'M', u'㣣'), - (0x2F89C, 'M', u'徚'), - (0x2F89D, 'M', u'忍'), - (0x2F89E, 'M', u'志'), - (0x2F89F, 'M', u'忹'), - (0x2F8A0, 'M', u'悁'), - (0x2F8A1, 'M', u'㤺'), - (0x2F8A2, 'M', u'㤜'), - (0x2F8A3, 'M', u'悔'), - (0x2F8A4, 'M', u'𢛔'), - (0x2F8A5, 'M', u'惇'), - (0x2F8A6, 'M', u'慈'), - (0x2F8A7, 'M', u'慌'), - (0x2F8A8, 'M', u'慎'), - (0x2F8A9, 'M', u'慌'), - (0x2F8AA, 'M', u'慺'), - (0x2F8AB, 'M', u'憎'), - (0x2F8AC, 'M', u'憲'), - (0x2F8AD, 'M', u'憤'), - (0x2F8AE, 'M', u'憯'), - (0x2F8AF, 'M', u'懞'), - (0x2F8B0, 'M', u'懲'), - (0x2F8B1, 'M', u'懶'), - (0x2F8B2, 'M', u'成'), - (0x2F8B3, 'M', u'戛'), - (0x2F8B4, 'M', u'扝'), - ] - -def _seg_69(): - return [ - (0x2F8B5, 'M', u'抱'), - (0x2F8B6, 'M', u'拔'), - (0x2F8B7, 'M', u'捐'), - (0x2F8B8, 'M', u'𢬌'), - (0x2F8B9, 'M', u'挽'), - (0x2F8BA, 'M', u'拼'), - (0x2F8BB, 'M', u'捨'), - (0x2F8BC, 'M', u'掃'), - (0x2F8BD, 'M', u'揤'), - (0x2F8BE, 'M', u'𢯱'), - (0x2F8BF, 'M', u'搢'), - (0x2F8C0, 'M', u'揅'), - (0x2F8C1, 'M', u'掩'), - (0x2F8C2, 'M', u'㨮'), - (0x2F8C3, 'M', u'摩'), - (0x2F8C4, 'M', u'摾'), - (0x2F8C5, 'M', u'撝'), - (0x2F8C6, 'M', u'摷'), - (0x2F8C7, 'M', u'㩬'), - (0x2F8C8, 'M', u'敏'), - (0x2F8C9, 'M', u'敬'), - (0x2F8CA, 'M', u'𣀊'), - (0x2F8CB, 'M', u'旣'), - (0x2F8CC, 'M', u'書'), - (0x2F8CD, 'M', u'晉'), - (0x2F8CE, 'M', u'㬙'), - (0x2F8CF, 'M', u'暑'), - (0x2F8D0, 'M', u'㬈'), - (0x2F8D1, 'M', u'㫤'), - (0x2F8D2, 'M', u'冒'), - (0x2F8D3, 'M', u'冕'), - (0x2F8D4, 'M', u'最'), - (0x2F8D5, 'M', u'暜'), - (0x2F8D6, 'M', u'肭'), - (0x2F8D7, 'M', u'䏙'), - (0x2F8D8, 'M', u'朗'), - (0x2F8D9, 'M', u'望'), - (0x2F8DA, 'M', u'朡'), - (0x2F8DB, 'M', u'杞'), - (0x2F8DC, 'M', u'杓'), - (0x2F8DD, 'M', u'𣏃'), - (0x2F8DE, 'M', u'㭉'), - (0x2F8DF, 'M', u'柺'), - (0x2F8E0, 'M', u'枅'), - (0x2F8E1, 'M', u'桒'), - (0x2F8E2, 'M', u'梅'), - (0x2F8E3, 'M', u'𣑭'), - (0x2F8E4, 'M', u'梎'), - (0x2F8E5, 'M', u'栟'), - (0x2F8E6, 'M', u'椔'), - (0x2F8E7, 'M', u'㮝'), - (0x2F8E8, 'M', u'楂'), - (0x2F8E9, 'M', u'榣'), - (0x2F8EA, 'M', u'槪'), - (0x2F8EB, 'M', u'檨'), - (0x2F8EC, 'M', u'𣚣'), - (0x2F8ED, 'M', u'櫛'), - (0x2F8EE, 'M', u'㰘'), - (0x2F8EF, 'M', u'次'), - (0x2F8F0, 'M', u'𣢧'), - (0x2F8F1, 'M', u'歔'), - (0x2F8F2, 'M', u'㱎'), - (0x2F8F3, 'M', u'歲'), - (0x2F8F4, 'M', u'殟'), - (0x2F8F5, 'M', u'殺'), - (0x2F8F6, 'M', u'殻'), - (0x2F8F7, 'M', u'𣪍'), - (0x2F8F8, 'M', u'𡴋'), - (0x2F8F9, 'M', u'𣫺'), - (0x2F8FA, 'M', u'汎'), - (0x2F8FB, 'M', u'𣲼'), - (0x2F8FC, 'M', u'沿'), - (0x2F8FD, 'M', u'泍'), - (0x2F8FE, 'M', u'汧'), - (0x2F8FF, 'M', u'洖'), - (0x2F900, 'M', u'派'), - (0x2F901, 'M', u'海'), - (0x2F902, 'M', u'流'), - (0x2F903, 'M', u'浩'), - (0x2F904, 'M', u'浸'), - (0x2F905, 'M', u'涅'), - (0x2F906, 'M', u'𣴞'), - (0x2F907, 'M', u'洴'), - (0x2F908, 'M', u'港'), - (0x2F909, 'M', u'湮'), - (0x2F90A, 'M', u'㴳'), - (0x2F90B, 'M', u'滋'), - (0x2F90C, 'M', u'滇'), - (0x2F90D, 'M', u'𣻑'), - (0x2F90E, 'M', u'淹'), - (0x2F90F, 'M', u'潮'), - (0x2F910, 'M', u'𣽞'), - (0x2F911, 'M', u'𣾎'), - (0x2F912, 'M', u'濆'), - (0x2F913, 'M', u'瀹'), - (0x2F914, 'M', u'瀞'), - (0x2F915, 'M', u'瀛'), - (0x2F916, 'M', u'㶖'), - (0x2F917, 'M', u'灊'), - (0x2F918, 'M', u'災'), - ] - -def _seg_70(): - return [ - (0x2F919, 'M', u'灷'), - (0x2F91A, 'M', u'炭'), - (0x2F91B, 'M', u'𠔥'), - (0x2F91C, 'M', u'煅'), - (0x2F91D, 'M', u'𤉣'), - (0x2F91E, 'M', u'熜'), - (0x2F91F, 'X'), - (0x2F920, 'M', u'爨'), - (0x2F921, 'M', u'爵'), - (0x2F922, 'M', u'牐'), - (0x2F923, 'M', u'𤘈'), - (0x2F924, 'M', u'犀'), - (0x2F925, 'M', u'犕'), - (0x2F926, 'M', u'𤜵'), - (0x2F927, 'M', u'𤠔'), - (0x2F928, 'M', u'獺'), - (0x2F929, 'M', u'王'), - (0x2F92A, 'M', u'㺬'), - (0x2F92B, 'M', u'玥'), - (0x2F92C, 'M', u'㺸'), - (0x2F92E, 'M', u'瑇'), - (0x2F92F, 'M', u'瑜'), - (0x2F930, 'M', u'瑱'), - (0x2F931, 'M', u'璅'), - (0x2F932, 'M', u'瓊'), - (0x2F933, 'M', u'㼛'), - (0x2F934, 'M', u'甤'), - (0x2F935, 'M', u'𤰶'), - (0x2F936, 'M', u'甾'), - (0x2F937, 'M', u'𤲒'), - (0x2F938, 'M', u'異'), - (0x2F939, 'M', u'𢆟'), - (0x2F93A, 'M', u'瘐'), - (0x2F93B, 'M', u'𤾡'), - (0x2F93C, 'M', u'𤾸'), - (0x2F93D, 'M', u'𥁄'), - (0x2F93E, 'M', u'㿼'), - (0x2F93F, 'M', u'䀈'), - (0x2F940, 'M', u'直'), - (0x2F941, 'M', u'𥃳'), - (0x2F942, 'M', u'𥃲'), - (0x2F943, 'M', u'𥄙'), - (0x2F944, 'M', u'𥄳'), - (0x2F945, 'M', u'眞'), - (0x2F946, 'M', u'真'), - (0x2F948, 'M', u'睊'), - (0x2F949, 'M', u'䀹'), - (0x2F94A, 'M', u'瞋'), - (0x2F94B, 'M', u'䁆'), - (0x2F94C, 'M', u'䂖'), - (0x2F94D, 'M', u'𥐝'), - (0x2F94E, 'M', u'硎'), - (0x2F94F, 'M', u'碌'), - (0x2F950, 'M', u'磌'), - (0x2F951, 'M', u'䃣'), - (0x2F952, 'M', u'𥘦'), - (0x2F953, 'M', u'祖'), - (0x2F954, 'M', u'𥚚'), - (0x2F955, 'M', u'𥛅'), - (0x2F956, 'M', u'福'), - (0x2F957, 'M', u'秫'), - (0x2F958, 'M', u'䄯'), - (0x2F959, 'M', u'穀'), - (0x2F95A, 'M', u'穊'), - (0x2F95B, 'M', u'穏'), - (0x2F95C, 'M', u'𥥼'), - (0x2F95D, 'M', u'𥪧'), - (0x2F95F, 'X'), - (0x2F960, 'M', u'䈂'), - (0x2F961, 'M', u'𥮫'), - (0x2F962, 'M', u'篆'), - (0x2F963, 'M', u'築'), - (0x2F964, 'M', u'䈧'), - (0x2F965, 'M', u'𥲀'), - (0x2F966, 'M', u'糒'), - (0x2F967, 'M', u'䊠'), - (0x2F968, 'M', u'糨'), - (0x2F969, 'M', u'糣'), - (0x2F96A, 'M', u'紀'), - (0x2F96B, 'M', u'𥾆'), - (0x2F96C, 'M', u'絣'), - (0x2F96D, 'M', u'䌁'), - (0x2F96E, 'M', u'緇'), - (0x2F96F, 'M', u'縂'), - (0x2F970, 'M', u'繅'), - (0x2F971, 'M', u'䌴'), - (0x2F972, 'M', u'𦈨'), - (0x2F973, 'M', u'𦉇'), - (0x2F974, 'M', u'䍙'), - (0x2F975, 'M', u'𦋙'), - (0x2F976, 'M', u'罺'), - (0x2F977, 'M', u'𦌾'), - (0x2F978, 'M', u'羕'), - (0x2F979, 'M', u'翺'), - (0x2F97A, 'M', u'者'), - (0x2F97B, 'M', u'𦓚'), - (0x2F97C, 'M', u'𦔣'), - (0x2F97D, 'M', u'聠'), - (0x2F97E, 'M', u'𦖨'), - (0x2F97F, 'M', u'聰'), - ] - -def _seg_71(): - return [ - (0x2F980, 'M', u'𣍟'), - (0x2F981, 'M', u'䏕'), - (0x2F982, 'M', u'育'), - (0x2F983, 'M', u'脃'), - (0x2F984, 'M', u'䐋'), - (0x2F985, 'M', u'脾'), - (0x2F986, 'M', u'媵'), - (0x2F987, 'M', u'𦞧'), - (0x2F988, 'M', u'𦞵'), - (0x2F989, 'M', u'𣎓'), - (0x2F98A, 'M', u'𣎜'), - (0x2F98B, 'M', u'舁'), - (0x2F98C, 'M', u'舄'), - (0x2F98D, 'M', u'辞'), - (0x2F98E, 'M', u'䑫'), - (0x2F98F, 'M', u'芑'), - (0x2F990, 'M', u'芋'), - (0x2F991, 'M', u'芝'), - (0x2F992, 'M', u'劳'), - (0x2F993, 'M', u'花'), - (0x2F994, 'M', u'芳'), - (0x2F995, 'M', u'芽'), - (0x2F996, 'M', u'苦'), - (0x2F997, 'M', u'𦬼'), - (0x2F998, 'M', u'若'), - (0x2F999, 'M', u'茝'), - (0x2F99A, 'M', u'荣'), - (0x2F99B, 'M', u'莭'), - (0x2F99C, 'M', u'茣'), - (0x2F99D, 'M', u'莽'), - (0x2F99E, 'M', u'菧'), - (0x2F99F, 'M', u'著'), - (0x2F9A0, 'M', u'荓'), - (0x2F9A1, 'M', u'菊'), - (0x2F9A2, 'M', u'菌'), - (0x2F9A3, 'M', u'菜'), - (0x2F9A4, 'M', u'𦰶'), - (0x2F9A5, 'M', u'𦵫'), - (0x2F9A6, 'M', u'𦳕'), - (0x2F9A7, 'M', u'䔫'), - (0x2F9A8, 'M', u'蓱'), - (0x2F9A9, 'M', u'蓳'), - (0x2F9AA, 'M', u'蔖'), - (0x2F9AB, 'M', u'𧏊'), - (0x2F9AC, 'M', u'蕤'), - (0x2F9AD, 'M', u'𦼬'), - (0x2F9AE, 'M', u'䕝'), - (0x2F9AF, 'M', u'䕡'), - (0x2F9B0, 'M', u'𦾱'), - (0x2F9B1, 'M', u'𧃒'), - (0x2F9B2, 'M', u'䕫'), - (0x2F9B3, 'M', u'虐'), - (0x2F9B4, 'M', u'虜'), - (0x2F9B5, 'M', u'虧'), - (0x2F9B6, 'M', u'虩'), - (0x2F9B7, 'M', u'蚩'), - (0x2F9B8, 'M', u'蚈'), - (0x2F9B9, 'M', u'蜎'), - (0x2F9BA, 'M', u'蛢'), - (0x2F9BB, 'M', u'蝹'), - (0x2F9BC, 'M', u'蜨'), - (0x2F9BD, 'M', u'蝫'), - (0x2F9BE, 'M', u'螆'), - (0x2F9BF, 'X'), - (0x2F9C0, 'M', u'蟡'), - (0x2F9C1, 'M', u'蠁'), - (0x2F9C2, 'M', u'䗹'), - (0x2F9C3, 'M', u'衠'), - (0x2F9C4, 'M', u'衣'), - (0x2F9C5, 'M', u'𧙧'), - (0x2F9C6, 'M', u'裗'), - (0x2F9C7, 'M', u'裞'), - (0x2F9C8, 'M', u'䘵'), - (0x2F9C9, 'M', u'裺'), - (0x2F9CA, 'M', u'㒻'), - (0x2F9CB, 'M', u'𧢮'), - (0x2F9CC, 'M', u'𧥦'), - (0x2F9CD, 'M', u'䚾'), - (0x2F9CE, 'M', u'䛇'), - (0x2F9CF, 'M', u'誠'), - (0x2F9D0, 'M', u'諭'), - (0x2F9D1, 'M', u'變'), - (0x2F9D2, 'M', u'豕'), - (0x2F9D3, 'M', u'𧲨'), - (0x2F9D4, 'M', u'貫'), - (0x2F9D5, 'M', u'賁'), - (0x2F9D6, 'M', u'贛'), - (0x2F9D7, 'M', u'起'), - (0x2F9D8, 'M', u'𧼯'), - (0x2F9D9, 'M', u'𠠄'), - (0x2F9DA, 'M', u'跋'), - (0x2F9DB, 'M', u'趼'), - (0x2F9DC, 'M', u'跰'), - (0x2F9DD, 'M', u'𠣞'), - (0x2F9DE, 'M', u'軔'), - (0x2F9DF, 'M', u'輸'), - (0x2F9E0, 'M', u'𨗒'), - (0x2F9E1, 'M', u'𨗭'), - (0x2F9E2, 'M', u'邔'), - (0x2F9E3, 'M', u'郱'), - ] - -def _seg_72(): - return [ - (0x2F9E4, 'M', u'鄑'), - (0x2F9E5, 'M', u'𨜮'), - (0x2F9E6, 'M', u'鄛'), - (0x2F9E7, 'M', u'鈸'), - (0x2F9E8, 'M', u'鋗'), - (0x2F9E9, 'M', u'鋘'), - (0x2F9EA, 'M', u'鉼'), - (0x2F9EB, 'M', u'鏹'), - (0x2F9EC, 'M', u'鐕'), - (0x2F9ED, 'M', u'𨯺'), - (0x2F9EE, 'M', u'開'), - (0x2F9EF, 'M', u'䦕'), - (0x2F9F0, 'M', u'閷'), - (0x2F9F1, 'M', u'𨵷'), - (0x2F9F2, 'M', u'䧦'), - (0x2F9F3, 'M', u'雃'), - (0x2F9F4, 'M', u'嶲'), - (0x2F9F5, 'M', u'霣'), - (0x2F9F6, 'M', u'𩅅'), - (0x2F9F7, 'M', u'𩈚'), - (0x2F9F8, 'M', u'䩮'), - (0x2F9F9, 'M', u'䩶'), - (0x2F9FA, 'M', u'韠'), - (0x2F9FB, 'M', u'𩐊'), - (0x2F9FC, 'M', u'䪲'), - (0x2F9FD, 'M', u'𩒖'), - (0x2F9FE, 'M', u'頋'), - (0x2FA00, 'M', u'頩'), - (0x2FA01, 'M', u'𩖶'), - (0x2FA02, 'M', u'飢'), - (0x2FA03, 'M', u'䬳'), - (0x2FA04, 'M', u'餩'), - (0x2FA05, 'M', u'馧'), - (0x2FA06, 'M', u'駂'), - (0x2FA07, 'M', u'駾'), - (0x2FA08, 'M', u'䯎'), - (0x2FA09, 'M', u'𩬰'), - (0x2FA0A, 'M', u'鬒'), - (0x2FA0B, 'M', u'鱀'), - (0x2FA0C, 'M', u'鳽'), - (0x2FA0D, 'M', u'䳎'), - (0x2FA0E, 'M', u'䳭'), - (0x2FA0F, 'M', u'鵧'), - (0x2FA10, 'M', u'𪃎'), - (0x2FA11, 'M', u'䳸'), - (0x2FA12, 'M', u'𪄅'), - (0x2FA13, 'M', u'𪈎'), - (0x2FA14, 'M', u'𪊑'), - (0x2FA15, 'M', u'麻'), - (0x2FA16, 'M', u'䵖'), - (0x2FA17, 'M', u'黹'), - (0x2FA18, 'M', u'黾'), - (0x2FA19, 'M', u'鼅'), - (0x2FA1A, 'M', u'鼏'), - (0x2FA1B, 'M', u'鼖'), - (0x2FA1C, 'M', u'鼻'), - (0x2FA1D, 'M', u'𪘀'), - (0x2FA1E, 'X'), - (0xE0100, 'I'), - (0xE01F0, 'X'), - ] - -uts46data = tuple( - _seg_0() - + _seg_1() - + _seg_2() - + _seg_3() - + _seg_4() - + _seg_5() - + _seg_6() - + _seg_7() - + _seg_8() - + _seg_9() - + _seg_10() - + _seg_11() - + _seg_12() - + _seg_13() - + _seg_14() - + _seg_15() - + _seg_16() - + _seg_17() - + _seg_18() - + _seg_19() - + _seg_20() - + _seg_21() - + _seg_22() - + _seg_23() - + _seg_24() - + _seg_25() - + _seg_26() - + _seg_27() - + _seg_28() - + _seg_29() - + _seg_30() - + _seg_31() - + _seg_32() - + _seg_33() - + _seg_34() - + _seg_35() - + _seg_36() - + _seg_37() - + _seg_38() - + _seg_39() - + _seg_40() - + _seg_41() - + _seg_42() - + _seg_43() - + _seg_44() - + _seg_45() - + _seg_46() - + _seg_47() - + _seg_48() - + _seg_49() - + _seg_50() - + _seg_51() - + _seg_52() - + _seg_53() - + _seg_54() - + _seg_55() - + _seg_56() - + _seg_57() - + _seg_58() - + _seg_59() - + _seg_60() - + _seg_61() - + _seg_62() - + _seg_63() - + _seg_64() - + _seg_65() - + _seg_66() - + _seg_67() - + _seg_68() - + _seg_69() - + _seg_70() - + _seg_71() - + _seg_72() -) diff --git a/pype/vendor/requests/packages/urllib3/__init__.py b/pype/vendor/requests/packages/urllib3/__init__.py deleted file mode 100644 index 26493ecb9b..0000000000 --- a/pype/vendor/requests/packages/urllib3/__init__.py +++ /dev/null @@ -1,97 +0,0 @@ -""" -urllib3 - Thread-safe connection pooling and re-using. -""" - -from __future__ import absolute_import -import warnings - -from .connectionpool import ( - HTTPConnectionPool, - HTTPSConnectionPool, - connection_from_url -) - -from . import exceptions -from .filepost import encode_multipart_formdata -from .poolmanager import PoolManager, ProxyManager, proxy_from_url -from .response import HTTPResponse -from .util.request import make_headers -from .util.url import get_host -from .util.timeout import Timeout -from .util.retry import Retry - - -# Set default logging handler to avoid "No handler found" warnings. -import logging -try: # Python 2.7+ - from logging import NullHandler -except ImportError: - class NullHandler(logging.Handler): - def emit(self, record): - pass - -__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' -__license__ = 'MIT' -__version__ = '1.21.1' - -__all__ = ( - 'HTTPConnectionPool', - 'HTTPSConnectionPool', - 'PoolManager', - 'ProxyManager', - 'HTTPResponse', - 'Retry', - 'Timeout', - 'add_stderr_logger', - 'connection_from_url', - 'disable_warnings', - 'encode_multipart_formdata', - 'get_host', - 'make_headers', - 'proxy_from_url', -) - -logging.getLogger(__name__).addHandler(NullHandler()) - - -def add_stderr_logger(level=logging.DEBUG): - """ - Helper for quickly adding a StreamHandler to the logger. Useful for - debugging. - - Returns the handler after adding it. - """ - # This method needs to be in this __init__.py to get the __name__ correct - # even if urllib3 is vendored within another package. - logger = logging.getLogger(__name__) - handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) - logger.addHandler(handler) - logger.setLevel(level) - logger.debug('Added a stderr logging handler to logger: %s', __name__) - return handler - - -# ... Clean up. -del NullHandler - - -# All warning filters *must* be appended unless you're really certain that they -# shouldn't be: otherwise, it's very hard for users to use most Python -# mechanisms to silence them. -# SecurityWarning's always go off by default. -warnings.simplefilter('always', exceptions.SecurityWarning, append=True) -# SubjectAltNameWarning's should go off once per host -warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True) -# InsecurePlatformWarning's don't vary between requests, so we keep it default. -warnings.simplefilter('default', exceptions.InsecurePlatformWarning, - append=True) -# SNIMissingWarnings should go off only once. -warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True) - - -def disable_warnings(category=exceptions.HTTPWarning): - """ - Helper for quickly disabling all urllib3 warnings. - """ - warnings.simplefilter('ignore', category) diff --git a/pype/vendor/requests/packages/urllib3/_collections.py b/pype/vendor/requests/packages/urllib3/_collections.py deleted file mode 100644 index 4849ddecd9..0000000000 --- a/pype/vendor/requests/packages/urllib3/_collections.py +++ /dev/null @@ -1,314 +0,0 @@ -from __future__ import absolute_import -from collections import Mapping, MutableMapping -try: - from threading import RLock -except ImportError: # Platform-specific: No threads available - class RLock: - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_value, traceback): - pass - - -try: # Python 2.7+ - from collections import OrderedDict -except ImportError: - from .packages.ordered_dict import OrderedDict -from .packages.six import iterkeys, itervalues, PY3 - - -__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] - - -_Null = object() - - -class RecentlyUsedContainer(MutableMapping): - """ - Provides a thread-safe dict-like container which maintains up to - ``maxsize`` keys while throwing away the least-recently-used keys beyond - ``maxsize``. - - :param maxsize: - Maximum number of recent elements to retain. - - :param dispose_func: - Every time an item is evicted from the container, - ``dispose_func(value)`` is called. Callback which will get called - """ - - ContainerCls = OrderedDict - - def __init__(self, maxsize=10, dispose_func=None): - self._maxsize = maxsize - self.dispose_func = dispose_func - - self._container = self.ContainerCls() - self.lock = RLock() - - def __getitem__(self, key): - # Re-insert the item, moving it to the end of the eviction line. - with self.lock: - item = self._container.pop(key) - self._container[key] = item - return item - - def __setitem__(self, key, value): - evicted_value = _Null - with self.lock: - # Possibly evict the existing value of 'key' - evicted_value = self._container.get(key, _Null) - self._container[key] = value - - # If we didn't evict an existing value, we might have to evict the - # least recently used item from the beginning of the container. - if len(self._container) > self._maxsize: - _key, evicted_value = self._container.popitem(last=False) - - if self.dispose_func and evicted_value is not _Null: - self.dispose_func(evicted_value) - - def __delitem__(self, key): - with self.lock: - value = self._container.pop(key) - - if self.dispose_func: - self.dispose_func(value) - - def __len__(self): - with self.lock: - return len(self._container) - - def __iter__(self): - raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') - - def clear(self): - with self.lock: - # Copy pointers to all values, then wipe the mapping - values = list(itervalues(self._container)) - self._container.clear() - - if self.dispose_func: - for value in values: - self.dispose_func(value) - - def keys(self): - with self.lock: - return list(iterkeys(self._container)) - - -class HTTPHeaderDict(MutableMapping): - """ - :param headers: - An iterable of field-value pairs. Must not contain multiple field names - when compared case-insensitively. - - :param kwargs: - Additional field-value pairs to pass in to ``dict.update``. - - A ``dict`` like container for storing HTTP Headers. - - Field names are stored and compared case-insensitively in compliance with - RFC 7230. Iteration provides the first case-sensitive key seen for each - case-insensitive pair. - - Using ``__setitem__`` syntax overwrites fields that compare equal - case-insensitively in order to maintain ``dict``'s api. For fields that - compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` - in a loop. - - If multiple fields that are equal case-insensitively are passed to the - constructor or ``.update``, the behavior is undefined and some will be - lost. - - >>> headers = HTTPHeaderDict() - >>> headers.add('Set-Cookie', 'foo=bar') - >>> headers.add('set-cookie', 'baz=quxx') - >>> headers['content-length'] = '7' - >>> headers['SET-cookie'] - 'foo=bar, baz=quxx' - >>> headers['Content-Length'] - '7' - """ - - def __init__(self, headers=None, **kwargs): - super(HTTPHeaderDict, self).__init__() - self._container = OrderedDict() - if headers is not None: - if isinstance(headers, HTTPHeaderDict): - self._copy_from(headers) - else: - self.extend(headers) - if kwargs: - self.extend(kwargs) - - def __setitem__(self, key, val): - self._container[key.lower()] = [key, val] - return self._container[key.lower()] - - def __getitem__(self, key): - val = self._container[key.lower()] - return ', '.join(val[1:]) - - def __delitem__(self, key): - del self._container[key.lower()] - - def __contains__(self, key): - return key.lower() in self._container - - def __eq__(self, other): - if not isinstance(other, Mapping) and not hasattr(other, 'keys'): - return False - if not isinstance(other, type(self)): - other = type(self)(other) - return (dict((k.lower(), v) for k, v in self.itermerged()) == - dict((k.lower(), v) for k, v in other.itermerged())) - - def __ne__(self, other): - return not self.__eq__(other) - - if not PY3: # Python 2 - iterkeys = MutableMapping.iterkeys - itervalues = MutableMapping.itervalues - - __marker = object() - - def __len__(self): - return len(self._container) - - def __iter__(self): - # Only provide the originally cased names - for vals in self._container.values(): - yield vals[0] - - def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' - # Using the MutableMapping function directly fails due to the private marker. - # Using ordinary dict.pop would expose the internal structures. - # So let's reinvent the wheel. - try: - value = self[key] - except KeyError: - if default is self.__marker: - raise - return default - else: - del self[key] - return value - - def discard(self, key): - try: - del self[key] - except KeyError: - pass - - def add(self, key, val): - """Adds a (name, value) pair, doesn't overwrite the value if it already - exists. - - >>> headers = HTTPHeaderDict(foo='bar') - >>> headers.add('Foo', 'baz') - >>> headers['foo'] - 'bar, baz' - """ - key_lower = key.lower() - new_vals = [key, val] - # Keep the common case aka no item present as fast as possible - vals = self._container.setdefault(key_lower, new_vals) - if new_vals is not vals: - vals.append(val) - - def extend(self, *args, **kwargs): - """Generic import function for any type of header-like object. - Adapted version of MutableMapping.update in order to insert items - with self.add instead of self.__setitem__ - """ - if len(args) > 1: - raise TypeError("extend() takes at most 1 positional " - "arguments ({0} given)".format(len(args))) - other = args[0] if len(args) >= 1 else () - - if isinstance(other, HTTPHeaderDict): - for key, val in other.iteritems(): - self.add(key, val) - elif isinstance(other, Mapping): - for key in other: - self.add(key, other[key]) - elif hasattr(other, "keys"): - for key in other.keys(): - self.add(key, other[key]) - else: - for key, value in other: - self.add(key, value) - - for key, value in kwargs.items(): - self.add(key, value) - - def getlist(self, key): - """Returns a list of all the values for the named field. Returns an - empty list if the key doesn't exist.""" - try: - vals = self._container[key.lower()] - except KeyError: - return [] - else: - return vals[1:] - - # Backwards compatibility for httplib - getheaders = getlist - getallmatchingheaders = getlist - iget = getlist - - def __repr__(self): - return "%s(%s)" % (type(self).__name__, dict(self.itermerged())) - - def _copy_from(self, other): - for key in other: - val = other.getlist(key) - if isinstance(val, list): - # Don't need to convert tuples - val = list(val) - self._container[key.lower()] = [key] + val - - def copy(self): - clone = type(self)() - clone._copy_from(self) - return clone - - def iteritems(self): - """Iterate over all header lines, including duplicate ones.""" - for key in self: - vals = self._container[key.lower()] - for val in vals[1:]: - yield vals[0], val - - def itermerged(self): - """Iterate over all headers, merging duplicate ones together.""" - for key in self: - val = self._container[key.lower()] - yield val[0], ', '.join(val[1:]) - - def items(self): - return list(self.iteritems()) - - @classmethod - def from_httplib(cls, message): # Python 2 - """Read headers from a Python 2 httplib message object.""" - # python2.7 does not expose a proper API for exporting multiheaders - # efficiently. This function re-reads raw lines from the message - # object and extracts the multiheaders properly. - headers = [] - - for line in message.headers: - if line.startswith((' ', '\t')): - key, value = headers[-1] - headers[-1] = (key, value + '\r\n' + line.rstrip()) - continue - - key, value = line.split(':', 1) - headers.append((key, value.strip())) - - return cls(headers) diff --git a/pype/vendor/requests/packages/urllib3/connection.py b/pype/vendor/requests/packages/urllib3/connection.py deleted file mode 100644 index c0d8329985..0000000000 --- a/pype/vendor/requests/packages/urllib3/connection.py +++ /dev/null @@ -1,373 +0,0 @@ -from __future__ import absolute_import -import datetime -import logging -import os -import sys -import socket -from socket import error as SocketError, timeout as SocketTimeout -import warnings -from .packages import six -from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection -from .packages.six.moves.http_client import HTTPException # noqa: F401 - -try: # Compiled with SSL? - import ssl - BaseSSLError = ssl.SSLError -except (ImportError, AttributeError): # Platform-specific: No SSL. - ssl = None - - class BaseSSLError(BaseException): - pass - - -try: # Python 3: - # Not a no-op, we're adding this to the namespace so it can be imported. - ConnectionError = ConnectionError -except NameError: # Python 2: - class ConnectionError(Exception): - pass - - -from .exceptions import ( - NewConnectionError, - ConnectTimeoutError, - SubjectAltNameWarning, - SystemTimeWarning, -) -from .packages.ssl_match_hostname import match_hostname, CertificateError - -from .util.ssl_ import ( - resolve_cert_reqs, - resolve_ssl_version, - assert_fingerprint, - create_urllib3_context, - ssl_wrap_socket -) - - -from .util import connection - -from ._collections import HTTPHeaderDict - -log = logging.getLogger(__name__) - -port_by_scheme = { - 'http': 80, - 'https': 443, -} - -# When updating RECENT_DATE, move it to -# within two years of the current date, and no -# earlier than 6 months ago. -RECENT_DATE = datetime.date(2016, 1, 1) - - -class DummyConnection(object): - """Used to detect a failed ConnectionCls import.""" - pass - - -class HTTPConnection(_HTTPConnection, object): - """ - Based on httplib.HTTPConnection but provides an extra constructor - backwards-compatibility layer between older and newer Pythons. - - Additional keyword parameters are used to configure attributes of the connection. - Accepted parameters include: - - - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - - ``source_address``: Set the source address for the current connection. - - .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x - - - ``socket_options``: Set specific options on the underlying socket. If not specified, then - defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling - Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. - - For example, if you wish to enable TCP Keep Alive in addition to the defaults, - you might pass:: - - HTTPConnection.default_socket_options + [ - (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), - ] - - Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). - """ - - default_port = port_by_scheme['http'] - - #: Disable Nagle's algorithm by default. - #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` - default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] - - #: Whether this connection verifies the host's certificate. - is_verified = False - - def __init__(self, *args, **kw): - if six.PY3: # Python 3 - kw.pop('strict', None) - - # Pre-set source_address in case we have an older Python like 2.6. - self.source_address = kw.get('source_address') - - if sys.version_info < (2, 7): # Python 2.6 - # _HTTPConnection on Python 2.6 will balk at this keyword arg, but - # not newer versions. We can still use it when creating a - # connection though, so we pop it *after* we have saved it as - # self.source_address. - kw.pop('source_address', None) - - #: The socket options provided by the user. If no options are - #: provided, we use the default options. - self.socket_options = kw.pop('socket_options', self.default_socket_options) - - # Superclass also sets self.source_address in Python 2.7+. - _HTTPConnection.__init__(self, *args, **kw) - - def _new_conn(self): - """ Establish a socket connection and set nodelay settings on it. - - :return: New socket connection. - """ - extra_kw = {} - if self.source_address: - extra_kw['source_address'] = self.source_address - - if self.socket_options: - extra_kw['socket_options'] = self.socket_options - - try: - conn = connection.create_connection( - (self.host, self.port), self.timeout, **extra_kw) - - except SocketTimeout as e: - raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) - - except SocketError as e: - raise NewConnectionError( - self, "Failed to establish a new connection: %s" % e) - - return conn - - def _prepare_conn(self, conn): - self.sock = conn - # the _tunnel_host attribute was added in python 2.6.3 (via - # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do - # not have them. - if getattr(self, '_tunnel_host', None): - # TODO: Fix tunnel so it doesn't depend on self.sock state. - self._tunnel() - # Mark this connection as not reusable - self.auto_open = 0 - - def connect(self): - conn = self._new_conn() - self._prepare_conn(conn) - - def request_chunked(self, method, url, body=None, headers=None): - """ - Alternative to the common request method, which sends the - body with chunked encoding and not as one block - """ - headers = HTTPHeaderDict(headers if headers is not None else {}) - skip_accept_encoding = 'accept-encoding' in headers - skip_host = 'host' in headers - self.putrequest( - method, - url, - skip_accept_encoding=skip_accept_encoding, - skip_host=skip_host - ) - for header, value in headers.items(): - self.putheader(header, value) - if 'transfer-encoding' not in headers: - self.putheader('Transfer-Encoding', 'chunked') - self.endheaders() - - if body is not None: - stringish_types = six.string_types + (six.binary_type,) - if isinstance(body, stringish_types): - body = (body,) - for chunk in body: - if not chunk: - continue - if not isinstance(chunk, six.binary_type): - chunk = chunk.encode('utf8') - len_str = hex(len(chunk))[2:] - self.send(len_str.encode('utf-8')) - self.send(b'\r\n') - self.send(chunk) - self.send(b'\r\n') - - # After the if clause, to always have a closed body - self.send(b'0\r\n\r\n') - - -class HTTPSConnection(HTTPConnection): - default_port = port_by_scheme['https'] - - ssl_version = None - - def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - ssl_context=None, **kw): - - HTTPConnection.__init__(self, host, port, strict=strict, - timeout=timeout, **kw) - - self.key_file = key_file - self.cert_file = cert_file - self.ssl_context = ssl_context - - # Required property for Google AppEngine 1.9.0 which otherwise causes - # HTTPS requests to go out as HTTP. (See Issue #356) - self._protocol = 'https' - - def connect(self): - conn = self._new_conn() - self._prepare_conn(conn) - - if self.ssl_context is None: - self.ssl_context = create_urllib3_context( - ssl_version=resolve_ssl_version(None), - cert_reqs=resolve_cert_reqs(None), - ) - - self.sock = ssl_wrap_socket( - sock=conn, - keyfile=self.key_file, - certfile=self.cert_file, - ssl_context=self.ssl_context, - ) - - -class VerifiedHTTPSConnection(HTTPSConnection): - """ - Based on httplib.HTTPSConnection but wraps the socket with - SSL certification. - """ - cert_reqs = None - ca_certs = None - ca_cert_dir = None - ssl_version = None - assert_fingerprint = None - - def set_cert(self, key_file=None, cert_file=None, - cert_reqs=None, ca_certs=None, - assert_hostname=None, assert_fingerprint=None, - ca_cert_dir=None): - """ - This method should only be called once, before the connection is used. - """ - # If cert_reqs is not provided, we can try to guess. If the user gave - # us a cert database, we assume they want to use it: otherwise, if - # they gave us an SSL Context object we should use whatever is set for - # it. - if cert_reqs is None: - if ca_certs or ca_cert_dir: - cert_reqs = 'CERT_REQUIRED' - elif self.ssl_context is not None: - cert_reqs = self.ssl_context.verify_mode - - self.key_file = key_file - self.cert_file = cert_file - self.cert_reqs = cert_reqs - self.assert_hostname = assert_hostname - self.assert_fingerprint = assert_fingerprint - self.ca_certs = ca_certs and os.path.expanduser(ca_certs) - self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) - - def connect(self): - # Add certificate verification - conn = self._new_conn() - - hostname = self.host - if getattr(self, '_tunnel_host', None): - # _tunnel_host was added in Python 2.6.3 - # (See: http://hg.python.org/cpython/rev/0f57b30a152f) - - self.sock = conn - # Calls self._set_hostport(), so self.host is - # self._tunnel_host below. - self._tunnel() - # Mark this connection as not reusable - self.auto_open = 0 - - # Override the host with the one we're requesting data from. - hostname = self._tunnel_host - - is_time_off = datetime.date.today() < RECENT_DATE - if is_time_off: - warnings.warn(( - 'System time is way off (before {0}). This will probably ' - 'lead to SSL verification errors').format(RECENT_DATE), - SystemTimeWarning - ) - - # Wrap socket using verification with the root certs in - # trusted_root_certs - if self.ssl_context is None: - self.ssl_context = create_urllib3_context( - ssl_version=resolve_ssl_version(self.ssl_version), - cert_reqs=resolve_cert_reqs(self.cert_reqs), - ) - - context = self.ssl_context - context.verify_mode = resolve_cert_reqs(self.cert_reqs) - self.sock = ssl_wrap_socket( - sock=conn, - keyfile=self.key_file, - certfile=self.cert_file, - ca_certs=self.ca_certs, - ca_cert_dir=self.ca_cert_dir, - server_hostname=hostname, - ssl_context=context) - - if self.assert_fingerprint: - assert_fingerprint(self.sock.getpeercert(binary_form=True), - self.assert_fingerprint) - elif context.verify_mode != ssl.CERT_NONE \ - and not getattr(context, 'check_hostname', False) \ - and self.assert_hostname is not False: - # While urllib3 attempts to always turn off hostname matching from - # the TLS library, this cannot always be done. So we check whether - # the TLS Library still thinks it's matching hostnames. - cert = self.sock.getpeercert() - if not cert.get('subjectAltName', ()): - warnings.warn(( - 'Certificate for {0} has no `subjectAltName`, falling back to check for a ' - '`commonName` for now. This feature is being removed by major browsers and ' - 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ' - 'for details.)'.format(hostname)), - SubjectAltNameWarning - ) - _match_hostname(cert, self.assert_hostname or hostname) - - self.is_verified = ( - context.verify_mode == ssl.CERT_REQUIRED or - self.assert_fingerprint is not None - ) - - -def _match_hostname(cert, asserted_hostname): - try: - match_hostname(cert, asserted_hostname) - except CertificateError as e: - log.error( - 'Certificate did not match expected hostname: %s. ' - 'Certificate: %s', asserted_hostname, cert - ) - # Add cert to exception and reraise so client code can inspect - # the cert when catching the exception, if they want to - e._peer_cert = cert - raise - - -if ssl: - # Make a copy for testing. - UnverifiedHTTPSConnection = HTTPSConnection - HTTPSConnection = VerifiedHTTPSConnection -else: - HTTPSConnection = DummyConnection diff --git a/pype/vendor/requests/packages/urllib3/connectionpool.py b/pype/vendor/requests/packages/urllib3/connectionpool.py deleted file mode 100644 index b4f1166a69..0000000000 --- a/pype/vendor/requests/packages/urllib3/connectionpool.py +++ /dev/null @@ -1,899 +0,0 @@ -from __future__ import absolute_import -import errno -import logging -import sys -import warnings - -from socket import error as SocketError, timeout as SocketTimeout -import socket - - -from .exceptions import ( - ClosedPoolError, - ProtocolError, - EmptyPoolError, - HeaderParsingError, - HostChangedError, - LocationValueError, - MaxRetryError, - ProxyError, - ReadTimeoutError, - SSLError, - TimeoutError, - InsecureRequestWarning, - NewConnectionError, -) -from .packages.ssl_match_hostname import CertificateError -from .packages import six -from .packages.six.moves import queue -from .connection import ( - port_by_scheme, - DummyConnection, - HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, - HTTPException, BaseSSLError, -) -from .request import RequestMethods -from .response import HTTPResponse - -from .util.connection import is_connection_dropped -from .util.request import set_file_position -from .util.response import assert_header_parsing -from .util.retry import Retry -from .util.timeout import Timeout -from .util.url import get_host, Url - - -if six.PY2: - # Queue is imported for side effects on MS Windows - import Queue as _unused_module_Queue # noqa: F401 - -xrange = six.moves.xrange - -log = logging.getLogger(__name__) - -_Default = object() - - -# Pool objects -class ConnectionPool(object): - """ - Base class for all connection pools, such as - :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. - """ - - scheme = None - QueueCls = queue.LifoQueue - - def __init__(self, host, port=None): - if not host: - raise LocationValueError("No host specified.") - - self.host = _ipv6_host(host).lower() - self.port = port - - def __str__(self): - return '%s(host=%r, port=%r)' % (type(self).__name__, - self.host, self.port) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close() - # Return False to re-raise any potential exceptions - return False - - def close(self): - """ - Close all pooled connections and disable the pool. - """ - pass - - -# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 -_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) - - -class HTTPConnectionPool(ConnectionPool, RequestMethods): - """ - Thread-safe connection pool for one host. - - :param host: - Host used for this HTTP Connection (e.g. "localhost"), passed into - :class:`httplib.HTTPConnection`. - - :param port: - Port used for this HTTP Connection (None is equivalent to 80), passed - into :class:`httplib.HTTPConnection`. - - :param strict: - Causes BadStatusLine to be raised if the status line can't be parsed - as a valid HTTP/1.0 or 1.1 status line, passed into - :class:`httplib.HTTPConnection`. - - .. note:: - Only works in Python 2. This parameter is ignored in Python 3. - - :param timeout: - Socket timeout in seconds for each individual connection. This can - be a float or integer, which sets the timeout for the HTTP request, - or an instance of :class:`urllib3.util.Timeout` which gives you more - fine-grained control over request timeouts. After the constructor has - been parsed, this is always a `urllib3.util.Timeout` object. - - :param maxsize: - Number of connections to save that can be reused. More than 1 is useful - in multithreaded situations. If ``block`` is set to False, more - connections will be created but they will not be saved once they've - been used. - - :param block: - If set to True, no more than ``maxsize`` connections will be used at - a time. When no free connections are available, the call will block - until a connection has been released. This is a useful side effect for - particular multithreaded situations where one does not want to use more - than maxsize connections per host to prevent flooding. - - :param headers: - Headers to include with all requests, unless other headers are given - explicitly. - - :param retries: - Retry configuration to use by default with requests in this pool. - - :param _proxy: - Parsed proxy URL, should not be used directly, instead, see - :class:`urllib3.connectionpool.ProxyManager`" - - :param _proxy_headers: - A dictionary with proxy headers, should not be used directly, - instead, see :class:`urllib3.connectionpool.ProxyManager`" - - :param \\**conn_kw: - Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, - :class:`urllib3.connection.HTTPSConnection` instances. - """ - - scheme = 'http' - ConnectionCls = HTTPConnection - ResponseCls = HTTPResponse - - def __init__(self, host, port=None, strict=False, - timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, - headers=None, retries=None, - _proxy=None, _proxy_headers=None, - **conn_kw): - ConnectionPool.__init__(self, host, port) - RequestMethods.__init__(self, headers) - - self.strict = strict - - if not isinstance(timeout, Timeout): - timeout = Timeout.from_float(timeout) - - if retries is None: - retries = Retry.DEFAULT - - self.timeout = timeout - self.retries = retries - - self.pool = self.QueueCls(maxsize) - self.block = block - - self.proxy = _proxy - self.proxy_headers = _proxy_headers or {} - - # Fill the queue up so that doing get() on it will block properly - for _ in xrange(maxsize): - self.pool.put(None) - - # These are mostly for testing and debugging purposes. - self.num_connections = 0 - self.num_requests = 0 - self.conn_kw = conn_kw - - if self.proxy: - # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. - # We cannot know if the user has added default socket options, so we cannot replace the - # list. - self.conn_kw.setdefault('socket_options', []) - - def _new_conn(self): - """ - Return a fresh :class:`HTTPConnection`. - """ - self.num_connections += 1 - log.debug("Starting new HTTP connection (%d): %s", - self.num_connections, self.host) - - conn = self.ConnectionCls(host=self.host, port=self.port, - timeout=self.timeout.connect_timeout, - strict=self.strict, **self.conn_kw) - return conn - - def _get_conn(self, timeout=None): - """ - Get a connection. Will return a pooled connection if one is available. - - If no connections are available and :prop:`.block` is ``False``, then a - fresh connection is returned. - - :param timeout: - Seconds to wait before giving up and raising - :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and - :prop:`.block` is ``True``. - """ - conn = None - try: - conn = self.pool.get(block=self.block, timeout=timeout) - - except AttributeError: # self.pool is None - raise ClosedPoolError(self, "Pool is closed.") - - except queue.Empty: - if self.block: - raise EmptyPoolError(self, - "Pool reached maximum size and no more " - "connections are allowed.") - pass # Oh well, we'll create a new connection then - - # If this is a persistent connection, check if it got disconnected - if conn and is_connection_dropped(conn): - log.debug("Resetting dropped connection: %s", self.host) - conn.close() - if getattr(conn, 'auto_open', 1) == 0: - # This is a proxied connection that has been mutated by - # httplib._tunnel() and cannot be reused (since it would - # attempt to bypass the proxy) - conn = None - - return conn or self._new_conn() - - def _put_conn(self, conn): - """ - Put a connection back into the pool. - - :param conn: - Connection object for the current host and port as returned by - :meth:`._new_conn` or :meth:`._get_conn`. - - If the pool is already full, the connection is closed and discarded - because we exceeded maxsize. If connections are discarded frequently, - then maxsize should be increased. - - If the pool is closed, then the connection will be closed and discarded. - """ - try: - self.pool.put(conn, block=False) - return # Everything is dandy, done. - except AttributeError: - # self.pool is None. - pass - except queue.Full: - # This should never happen if self.block == True - log.warning( - "Connection pool is full, discarding connection: %s", - self.host) - - # Connection never got put back into the pool, close it. - if conn: - conn.close() - - def _validate_conn(self, conn): - """ - Called right before a request is made, after the socket is created. - """ - pass - - def _prepare_proxy(self, conn): - # Nothing to do for HTTP connections. - pass - - def _get_timeout(self, timeout): - """ Helper that always returns a :class:`urllib3.util.Timeout` """ - if timeout is _Default: - return self.timeout.clone() - - if isinstance(timeout, Timeout): - return timeout.clone() - else: - # User passed us an int/float. This is for backwards compatibility, - # can be removed later - return Timeout.from_float(timeout) - - def _raise_timeout(self, err, url, timeout_value): - """Is the error actually a timeout? Will raise a ReadTimeout or pass""" - - if isinstance(err, SocketTimeout): - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - - # See the above comment about EAGAIN in Python 3. In Python 2 we have - # to specifically catch it and throw the timeout error - if hasattr(err, 'errno') and err.errno in _blocking_errnos: - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - - # Catch possible read timeouts thrown as SSL errors. If not the - # case, rethrow the original. We need to do this because of: - # http://bugs.python.org/issue10272 - if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6 - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - - def _make_request(self, conn, method, url, timeout=_Default, chunked=False, - **httplib_request_kw): - """ - Perform a request on a given urllib connection object taken from our - pool. - - :param conn: - a connection from one of our connection pools - - :param timeout: - Socket timeout in seconds for the request. This can be a - float or integer, which will set the same timeout value for - the socket connect and the socket read, or an instance of - :class:`urllib3.util.Timeout`, which gives you more fine-grained - control over your timeouts. - """ - self.num_requests += 1 - - timeout_obj = self._get_timeout(timeout) - timeout_obj.start_connect() - conn.timeout = timeout_obj.connect_timeout - - # Trigger any extra validation we need to do. - try: - self._validate_conn(conn) - except (SocketTimeout, BaseSSLError) as e: - # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. - self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) - raise - - # conn.request() calls httplib.*.request, not the method in - # urllib3.request. It also calls makefile (recv) on the socket. - if chunked: - conn.request_chunked(method, url, **httplib_request_kw) - else: - conn.request(method, url, **httplib_request_kw) - - # Reset the timeout for the recv() on the socket - read_timeout = timeout_obj.read_timeout - - # App Engine doesn't have a sock attr - if getattr(conn, 'sock', None): - # In Python 3 socket.py will catch EAGAIN and return None when you - # try and read into the file pointer created by http.client, which - # instead raises a BadStatusLine exception. Instead of catching - # the exception and assuming all BadStatusLine exceptions are read - # timeouts, check for a zero timeout before making the request. - if read_timeout == 0: - raise ReadTimeoutError( - self, url, "Read timed out. (read timeout=%s)" % read_timeout) - if read_timeout is Timeout.DEFAULT_TIMEOUT: - conn.sock.settimeout(socket.getdefaulttimeout()) - else: # None or a value - conn.sock.settimeout(read_timeout) - - # Receive the response from the server - try: - try: # Python 2.7, use buffering of HTTP responses - httplib_response = conn.getresponse(buffering=True) - except TypeError: # Python 2.6 and older, Python 3 - try: - httplib_response = conn.getresponse() - except Exception as e: - # Remove the TypeError from the exception chain in Python 3; - # otherwise it looks like a programming error was the cause. - six.raise_from(e, None) - except (SocketTimeout, BaseSSLError, SocketError) as e: - self._raise_timeout(err=e, url=url, timeout_value=read_timeout) - raise - - # AppEngine doesn't have a version attr. - http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') - log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port, - method, url, http_version, httplib_response.status, - httplib_response.length) - - try: - assert_header_parsing(httplib_response.msg) - except HeaderParsingError as hpe: # Platform-specific: Python 3 - log.warning( - 'Failed to parse headers (url=%s): %s', - self._absolute_url(url), hpe, exc_info=True) - - return httplib_response - - def _absolute_url(self, path): - return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url - - def close(self): - """ - Close all pooled connections and disable the pool. - """ - # Disable access to the pool - old_pool, self.pool = self.pool, None - - try: - while True: - conn = old_pool.get(block=False) - if conn: - conn.close() - - except queue.Empty: - pass # Done. - - def is_same_host(self, url): - """ - Check if the given ``url`` is a member of the same host as this - connection pool. - """ - if url.startswith('/'): - return True - - # TODO: Add optional support for socket.gethostbyname checking. - scheme, host, port = get_host(url) - - host = _ipv6_host(host).lower() - - # Use explicit default port for comparison when none is given - if self.port and not port: - port = port_by_scheme.get(scheme) - elif not self.port and port == port_by_scheme.get(scheme): - port = None - - return (scheme, host, port) == (self.scheme, self.host, self.port) - - def urlopen(self, method, url, body=None, headers=None, retries=None, - redirect=True, assert_same_host=True, timeout=_Default, - pool_timeout=None, release_conn=None, chunked=False, - body_pos=None, **response_kw): - """ - Get a connection from the pool and perform an HTTP request. This is the - lowest level call for making a request, so you'll need to specify all - the raw details. - - .. note:: - - More commonly, it's appropriate to use a convenience method provided - by :class:`.RequestMethods`, such as :meth:`request`. - - .. note:: - - `release_conn` will only behave as expected if - `preload_content=False` because we want to make - `preload_content=False` the default behaviour someday soon without - breaking backwards compatibility. - - :param method: - HTTP request method (such as GET, POST, PUT, etc.) - - :param body: - Data to send in the request body (useful for creating - POST requests, see HTTPConnectionPool.post_url for - more convenience). - - :param headers: - Dictionary of custom headers to send, such as User-Agent, - If-None-Match, etc. If None, pool headers are used. If provided, - these headers completely replace any pool-specific headers. - - :param retries: - Configure the number of retries to allow before raising a - :class:`~urllib3.exceptions.MaxRetryError` exception. - - Pass ``None`` to retry until you receive a response. Pass a - :class:`~urllib3.util.retry.Retry` object for fine-grained control - over different types of retries. - Pass an integer number to retry connection errors that many times, - but no other types of errors. Pass zero to never retry. - - If ``False``, then retries are disabled and any exception is raised - immediately. Also, instead of raising a MaxRetryError on redirects, - the redirect response will be returned. - - :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. - - :param redirect: - If True, automatically handle redirects (status codes 301, 302, - 303, 307, 308). Each redirect counts as a retry. Disabling retries - will disable redirect, too. - - :param assert_same_host: - If ``True``, will make sure that the host of the pool requests is - consistent else will raise HostChangedError. When False, you can - use the pool on an HTTP proxy and request foreign hosts. - - :param timeout: - If specified, overrides the default timeout for this one - request. It may be a float (in seconds) or an instance of - :class:`urllib3.util.Timeout`. - - :param pool_timeout: - If set and the pool is set to block=True, then this method will - block for ``pool_timeout`` seconds and raise EmptyPoolError if no - connection is available within the time period. - - :param release_conn: - If False, then the urlopen call will not release the connection - back into the pool once a response is received (but will release if - you read the entire contents of the response such as when - `preload_content=True`). This is useful if you're not preloading - the response's content immediately. You will need to call - ``r.release_conn()`` on the response ``r`` to return the connection - back into the pool. If None, it takes the value of - ``response_kw.get('preload_content', True)``. - - :param chunked: - If True, urllib3 will send the body using chunked transfer - encoding. Otherwise, urllib3 will send the body using the standard - content-length form. Defaults to False. - - :param int body_pos: - Position to seek to in file-like body in the event of a retry or - redirect. Typically this won't need to be set because urllib3 will - auto-populate the value when needed. - - :param \\**response_kw: - Additional parameters are passed to - :meth:`urllib3.response.HTTPResponse.from_httplib` - """ - if headers is None: - headers = self.headers - - if not isinstance(retries, Retry): - retries = Retry.from_int(retries, redirect=redirect, default=self.retries) - - if release_conn is None: - release_conn = response_kw.get('preload_content', True) - - # Check host - if assert_same_host and not self.is_same_host(url): - raise HostChangedError(self, url, retries) - - conn = None - - # Track whether `conn` needs to be released before - # returning/raising/recursing. Update this variable if necessary, and - # leave `release_conn` constant throughout the function. That way, if - # the function recurses, the original value of `release_conn` will be - # passed down into the recursive call, and its value will be respected. - # - # See issue #651 [1] for details. - # - # [1] - release_this_conn = release_conn - - # Merge the proxy headers. Only do this in HTTP. We have to copy the - # headers dict so we can safely change it without those changes being - # reflected in anyone else's copy. - if self.scheme == 'http': - headers = headers.copy() - headers.update(self.proxy_headers) - - # Must keep the exception bound to a separate variable or else Python 3 - # complains about UnboundLocalError. - err = None - - # Keep track of whether we cleanly exited the except block. This - # ensures we do proper cleanup in finally. - clean_exit = False - - # Rewind body position, if needed. Record current position - # for future rewinds in the event of a redirect/retry. - body_pos = set_file_position(body, body_pos) - - try: - # Request a connection from the queue. - timeout_obj = self._get_timeout(timeout) - conn = self._get_conn(timeout=pool_timeout) - - conn.timeout = timeout_obj.connect_timeout - - is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None) - if is_new_proxy_conn: - self._prepare_proxy(conn) - - # Make the request on the httplib connection object. - httplib_response = self._make_request(conn, method, url, - timeout=timeout_obj, - body=body, headers=headers, - chunked=chunked) - - # If we're going to release the connection in ``finally:``, then - # the response doesn't need to know about the connection. Otherwise - # it will also try to release it and we'll have a double-release - # mess. - response_conn = conn if not release_conn else None - - # Pass method to Response for length checking - response_kw['request_method'] = method - - # Import httplib's response into our own wrapper object - response = self.ResponseCls.from_httplib(httplib_response, - pool=self, - connection=response_conn, - retries=retries, - **response_kw) - - # Everything went great! - clean_exit = True - - except queue.Empty: - # Timed out by queue. - raise EmptyPoolError(self, "No pool connections are available.") - - except (BaseSSLError, CertificateError) as e: - # Close the connection. If a connection is reused on which there - # was a Certificate error, the next request will certainly raise - # another Certificate error. - clean_exit = False - raise SSLError(e) - - except SSLError: - # Treat SSLError separately from BaseSSLError to preserve - # traceback. - clean_exit = False - raise - - except (TimeoutError, HTTPException, SocketError, ProtocolError) as e: - # Discard the connection for these exceptions. It will be - # be replaced during the next _get_conn() call. - clean_exit = False - - if isinstance(e, (SocketError, NewConnectionError)) and self.proxy: - e = ProxyError('Cannot connect to proxy.', e) - elif isinstance(e, (SocketError, HTTPException)): - e = ProtocolError('Connection aborted.', e) - - retries = retries.increment(method, url, error=e, _pool=self, - _stacktrace=sys.exc_info()[2]) - retries.sleep() - - # Keep track of the error for the retry warning. - err = e - - finally: - if not clean_exit: - # We hit some kind of exception, handled or otherwise. We need - # to throw the connection away unless explicitly told not to. - # Close the connection, set the variable to None, and make sure - # we put the None back in the pool to avoid leaking it. - conn = conn and conn.close() - release_this_conn = True - - if release_this_conn: - # Put the connection back to be reused. If the connection is - # expired then it will be None, which will get replaced with a - # fresh connection during _get_conn. - self._put_conn(conn) - - if not conn: - # Try again - log.warning("Retrying (%r) after connection " - "broken by '%r': %s", retries, err, url) - return self.urlopen(method, url, body, headers, retries, - redirect, assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, body_pos=body_pos, - **response_kw) - - # Handle redirect? - redirect_location = redirect and response.get_redirect_location() - if redirect_location: - if response.status == 303: - method = 'GET' - - try: - retries = retries.increment(method, url, response=response, _pool=self) - except MaxRetryError: - if retries.raise_on_redirect: - # Release the connection for this response, since we're not - # returning it to be released manually. - response.release_conn() - raise - return response - - retries.sleep_for_retry(response) - log.debug("Redirecting %s -> %s", url, redirect_location) - return self.urlopen( - method, redirect_location, body, headers, - retries=retries, redirect=redirect, - assert_same_host=assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, body_pos=body_pos, - **response_kw) - - # Check if we should retry the HTTP response. - has_retry_after = bool(response.getheader('Retry-After')) - if retries.is_retry(method, response.status, has_retry_after): - try: - retries = retries.increment(method, url, response=response, _pool=self) - except MaxRetryError: - if retries.raise_on_status: - # Release the connection for this response, since we're not - # returning it to be released manually. - response.release_conn() - raise - return response - retries.sleep(response) - log.debug("Retry: %s", url) - return self.urlopen( - method, url, body, headers, - retries=retries, redirect=redirect, - assert_same_host=assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, - body_pos=body_pos, **response_kw) - - return response - - -class HTTPSConnectionPool(HTTPConnectionPool): - """ - Same as :class:`.HTTPConnectionPool`, but HTTPS. - - When Python is compiled with the :mod:`ssl` module, then - :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, - instead of :class:`.HTTPSConnection`. - - :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, - ``assert_hostname`` and ``host`` in this order to verify connections. - If ``assert_hostname`` is False, no verification is done. - - The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, - ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is - available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade - the connection socket into an SSL socket. - """ - - scheme = 'https' - ConnectionCls = HTTPSConnection - - def __init__(self, host, port=None, - strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, - block=False, headers=None, retries=None, - _proxy=None, _proxy_headers=None, - key_file=None, cert_file=None, cert_reqs=None, - ca_certs=None, ssl_version=None, - assert_hostname=None, assert_fingerprint=None, - ca_cert_dir=None, **conn_kw): - - HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, - block, headers, retries, _proxy, _proxy_headers, - **conn_kw) - - if ca_certs and cert_reqs is None: - cert_reqs = 'CERT_REQUIRED' - - self.key_file = key_file - self.cert_file = cert_file - self.cert_reqs = cert_reqs - self.ca_certs = ca_certs - self.ca_cert_dir = ca_cert_dir - self.ssl_version = ssl_version - self.assert_hostname = assert_hostname - self.assert_fingerprint = assert_fingerprint - - def _prepare_conn(self, conn): - """ - Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` - and establish the tunnel if proxy is used. - """ - - if isinstance(conn, VerifiedHTTPSConnection): - conn.set_cert(key_file=self.key_file, - cert_file=self.cert_file, - cert_reqs=self.cert_reqs, - ca_certs=self.ca_certs, - ca_cert_dir=self.ca_cert_dir, - assert_hostname=self.assert_hostname, - assert_fingerprint=self.assert_fingerprint) - conn.ssl_version = self.ssl_version - return conn - - def _prepare_proxy(self, conn): - """ - Establish tunnel connection early, because otherwise httplib - would improperly set Host: header to proxy's IP:port. - """ - # Python 2.7+ - try: - set_tunnel = conn.set_tunnel - except AttributeError: # Platform-specific: Python 2.6 - set_tunnel = conn._set_tunnel - - if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older - set_tunnel(self.host, self.port) - else: - set_tunnel(self.host, self.port, self.proxy_headers) - - conn.connect() - - def _new_conn(self): - """ - Return a fresh :class:`httplib.HTTPSConnection`. - """ - self.num_connections += 1 - log.debug("Starting new HTTPS connection (%d): %s", - self.num_connections, self.host) - - if not self.ConnectionCls or self.ConnectionCls is DummyConnection: - raise SSLError("Can't connect to HTTPS URL because the SSL " - "module is not available.") - - actual_host = self.host - actual_port = self.port - if self.proxy is not None: - actual_host = self.proxy.host - actual_port = self.proxy.port - - conn = self.ConnectionCls(host=actual_host, port=actual_port, - timeout=self.timeout.connect_timeout, - strict=self.strict, **self.conn_kw) - - return self._prepare_conn(conn) - - def _validate_conn(self, conn): - """ - Called right before a request is made, after the socket is created. - """ - super(HTTPSConnectionPool, self)._validate_conn(conn) - - # Force connect early to allow us to validate the connection. - if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` - conn.connect() - - if not conn.is_verified: - warnings.warn(( - 'Unverified HTTPS request is being made. ' - 'Adding certificate verification is strongly advised. See: ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings'), - InsecureRequestWarning) - - -def connection_from_url(url, **kw): - """ - Given a url, return an :class:`.ConnectionPool` instance of its host. - - This is a shortcut for not having to parse out the scheme, host, and port - of the url before creating an :class:`.ConnectionPool` instance. - - :param url: - Absolute URL string that must include the scheme. Port is optional. - - :param \\**kw: - Passes additional parameters to the constructor of the appropriate - :class:`.ConnectionPool`. Useful for specifying things like - timeout, maxsize, headers, etc. - - Example:: - - >>> conn = connection_from_url('http://google.com/') - >>> r = conn.request('GET', '/') - """ - scheme, host, port = get_host(url) - port = port or port_by_scheme.get(scheme, 80) - if scheme == 'https': - return HTTPSConnectionPool(host, port=port, **kw) - else: - return HTTPConnectionPool(host, port=port, **kw) - - -def _ipv6_host(host): - """ - Process IPv6 address literals - """ - - # httplib doesn't like it when we include brackets in IPv6 addresses - # Specifically, if we include brackets but also pass the port then - # httplib crazily doubles up the square brackets on the Host header. - # Instead, we need to make sure we never pass ``None`` as the port. - # However, for backward compatibility reasons we can't actually - # *assert* that. See http://bugs.python.org/issue28539 - # - # Also if an IPv6 address literal has a zone identifier, the - # percent sign might be URIencoded, convert it back into ASCII - if host.startswith('[') and host.endswith(']'): - host = host.replace('%25', '%').strip('[]') - return host diff --git a/pype/vendor/requests/packages/urllib3/contrib/__init__.py b/pype/vendor/requests/packages/urllib3/contrib/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/requests/packages/urllib3/contrib/_securetransport/__init__.py b/pype/vendor/requests/packages/urllib3/contrib/_securetransport/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/requests/packages/urllib3/contrib/_securetransport/bindings.py b/pype/vendor/requests/packages/urllib3/contrib/_securetransport/bindings.py deleted file mode 100644 index e26b840861..0000000000 --- a/pype/vendor/requests/packages/urllib3/contrib/_securetransport/bindings.py +++ /dev/null @@ -1,590 +0,0 @@ -""" -This module uses ctypes to bind a whole bunch of functions and constants from -SecureTransport. The goal here is to provide the low-level API to -SecureTransport. These are essentially the C-level functions and constants, and -they're pretty gross to work with. - -This code is a bastardised version of the code found in Will Bond's oscrypto -library. An enormous debt is owed to him for blazing this trail for us. For -that reason, this code should be considered to be covered both by urllib3's -license and by oscrypto's: - - Copyright (c) 2015-2016 Will Bond - - Permission is hereby granted, free of charge, to any person obtaining a - copy of this software and associated documentation files (the "Software"), - to deal in the Software without restriction, including without limitation - the rights to use, copy, modify, merge, publish, distribute, sublicense, - and/or sell copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - DEALINGS IN THE SOFTWARE. -""" -from __future__ import absolute_import - -import platform -from ctypes.util import find_library -from ctypes import ( - c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_uint32, c_ulong, c_long, - c_bool -) -from ctypes import CDLL, POINTER, CFUNCTYPE - - -security_path = find_library('Security') -if not security_path: - raise ImportError('The library Security could not be found') - - -core_foundation_path = find_library('CoreFoundation') -if not core_foundation_path: - raise ImportError('The library CoreFoundation could not be found') - - -version = platform.mac_ver()[0] -version_info = tuple(map(int, version.split('.'))) -if version_info < (10, 8): - raise OSError( - 'Only OS X 10.8 and newer are supported, not %s.%s' % ( - version_info[0], version_info[1] - ) - ) - -Security = CDLL(security_path, use_errno=True) -CoreFoundation = CDLL(core_foundation_path, use_errno=True) - -Boolean = c_bool -CFIndex = c_long -CFStringEncoding = c_uint32 -CFData = c_void_p -CFString = c_void_p -CFArray = c_void_p -CFMutableArray = c_void_p -CFDictionary = c_void_p -CFError = c_void_p -CFType = c_void_p -CFTypeID = c_ulong - -CFTypeRef = POINTER(CFType) -CFAllocatorRef = c_void_p - -OSStatus = c_int32 - -CFDataRef = POINTER(CFData) -CFStringRef = POINTER(CFString) -CFArrayRef = POINTER(CFArray) -CFMutableArrayRef = POINTER(CFMutableArray) -CFDictionaryRef = POINTER(CFDictionary) -CFArrayCallBacks = c_void_p -CFDictionaryKeyCallBacks = c_void_p -CFDictionaryValueCallBacks = c_void_p - -SecCertificateRef = POINTER(c_void_p) -SecExternalFormat = c_uint32 -SecExternalItemType = c_uint32 -SecIdentityRef = POINTER(c_void_p) -SecItemImportExportFlags = c_uint32 -SecItemImportExportKeyParameters = c_void_p -SecKeychainRef = POINTER(c_void_p) -SSLProtocol = c_uint32 -SSLCipherSuite = c_uint32 -SSLContextRef = POINTER(c_void_p) -SecTrustRef = POINTER(c_void_p) -SSLConnectionRef = c_uint32 -SecTrustResultType = c_uint32 -SecTrustOptionFlags = c_uint32 -SSLProtocolSide = c_uint32 -SSLConnectionType = c_uint32 -SSLSessionOption = c_uint32 - - -try: - Security.SecItemImport.argtypes = [ - CFDataRef, - CFStringRef, - POINTER(SecExternalFormat), - POINTER(SecExternalItemType), - SecItemImportExportFlags, - POINTER(SecItemImportExportKeyParameters), - SecKeychainRef, - POINTER(CFArrayRef), - ] - Security.SecItemImport.restype = OSStatus - - Security.SecCertificateGetTypeID.argtypes = [] - Security.SecCertificateGetTypeID.restype = CFTypeID - - Security.SecIdentityGetTypeID.argtypes = [] - Security.SecIdentityGetTypeID.restype = CFTypeID - - Security.SecKeyGetTypeID.argtypes = [] - Security.SecKeyGetTypeID.restype = CFTypeID - - Security.SecCertificateCreateWithData.argtypes = [ - CFAllocatorRef, - CFDataRef - ] - Security.SecCertificateCreateWithData.restype = SecCertificateRef - - Security.SecCertificateCopyData.argtypes = [ - SecCertificateRef - ] - Security.SecCertificateCopyData.restype = CFDataRef - - Security.SecCopyErrorMessageString.argtypes = [ - OSStatus, - c_void_p - ] - Security.SecCopyErrorMessageString.restype = CFStringRef - - Security.SecIdentityCreateWithCertificate.argtypes = [ - CFTypeRef, - SecCertificateRef, - POINTER(SecIdentityRef) - ] - Security.SecIdentityCreateWithCertificate.restype = OSStatus - - Security.SecKeychainCreate.argtypes = [ - c_char_p, - c_uint32, - c_void_p, - Boolean, - c_void_p, - POINTER(SecKeychainRef) - ] - Security.SecKeychainCreate.restype = OSStatus - - Security.SecKeychainDelete.argtypes = [ - SecKeychainRef - ] - Security.SecKeychainDelete.restype = OSStatus - - Security.SecPKCS12Import.argtypes = [ - CFDataRef, - CFDictionaryRef, - POINTER(CFArrayRef) - ] - Security.SecPKCS12Import.restype = OSStatus - - SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) - SSLWriteFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)) - - Security.SSLSetIOFuncs.argtypes = [ - SSLContextRef, - SSLReadFunc, - SSLWriteFunc - ] - Security.SSLSetIOFuncs.restype = OSStatus - - Security.SSLSetPeerID.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t - ] - Security.SSLSetPeerID.restype = OSStatus - - Security.SSLSetCertificate.argtypes = [ - SSLContextRef, - CFArrayRef - ] - Security.SSLSetCertificate.restype = OSStatus - - Security.SSLSetCertificateAuthorities.argtypes = [ - SSLContextRef, - CFTypeRef, - Boolean - ] - Security.SSLSetCertificateAuthorities.restype = OSStatus - - Security.SSLSetConnection.argtypes = [ - SSLContextRef, - SSLConnectionRef - ] - Security.SSLSetConnection.restype = OSStatus - - Security.SSLSetPeerDomainName.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t - ] - Security.SSLSetPeerDomainName.restype = OSStatus - - Security.SSLHandshake.argtypes = [ - SSLContextRef - ] - Security.SSLHandshake.restype = OSStatus - - Security.SSLRead.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t, - POINTER(c_size_t) - ] - Security.SSLRead.restype = OSStatus - - Security.SSLWrite.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t, - POINTER(c_size_t) - ] - Security.SSLWrite.restype = OSStatus - - Security.SSLClose.argtypes = [ - SSLContextRef - ] - Security.SSLClose.restype = OSStatus - - Security.SSLGetNumberSupportedCiphers.argtypes = [ - SSLContextRef, - POINTER(c_size_t) - ] - Security.SSLGetNumberSupportedCiphers.restype = OSStatus - - Security.SSLGetSupportedCiphers.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite), - POINTER(c_size_t) - ] - Security.SSLGetSupportedCiphers.restype = OSStatus - - Security.SSLSetEnabledCiphers.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite), - c_size_t - ] - Security.SSLSetEnabledCiphers.restype = OSStatus - - Security.SSLGetNumberEnabledCiphers.argtype = [ - SSLContextRef, - POINTER(c_size_t) - ] - Security.SSLGetNumberEnabledCiphers.restype = OSStatus - - Security.SSLGetEnabledCiphers.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite), - POINTER(c_size_t) - ] - Security.SSLGetEnabledCiphers.restype = OSStatus - - Security.SSLGetNegotiatedCipher.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite) - ] - Security.SSLGetNegotiatedCipher.restype = OSStatus - - Security.SSLGetNegotiatedProtocolVersion.argtypes = [ - SSLContextRef, - POINTER(SSLProtocol) - ] - Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus - - Security.SSLCopyPeerTrust.argtypes = [ - SSLContextRef, - POINTER(SecTrustRef) - ] - Security.SSLCopyPeerTrust.restype = OSStatus - - Security.SecTrustSetAnchorCertificates.argtypes = [ - SecTrustRef, - CFArrayRef - ] - Security.SecTrustSetAnchorCertificates.restype = OSStatus - - Security.SecTrustSetAnchorCertificatesOnly.argstypes = [ - SecTrustRef, - Boolean - ] - Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus - - Security.SecTrustEvaluate.argtypes = [ - SecTrustRef, - POINTER(SecTrustResultType) - ] - Security.SecTrustEvaluate.restype = OSStatus - - Security.SecTrustGetCertificateCount.argtypes = [ - SecTrustRef - ] - Security.SecTrustGetCertificateCount.restype = CFIndex - - Security.SecTrustGetCertificateAtIndex.argtypes = [ - SecTrustRef, - CFIndex - ] - Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef - - Security.SSLCreateContext.argtypes = [ - CFAllocatorRef, - SSLProtocolSide, - SSLConnectionType - ] - Security.SSLCreateContext.restype = SSLContextRef - - Security.SSLSetSessionOption.argtypes = [ - SSLContextRef, - SSLSessionOption, - Boolean - ] - Security.SSLSetSessionOption.restype = OSStatus - - Security.SSLSetProtocolVersionMin.argtypes = [ - SSLContextRef, - SSLProtocol - ] - Security.SSLSetProtocolVersionMin.restype = OSStatus - - Security.SSLSetProtocolVersionMax.argtypes = [ - SSLContextRef, - SSLProtocol - ] - Security.SSLSetProtocolVersionMax.restype = OSStatus - - Security.SecCopyErrorMessageString.argtypes = [ - OSStatus, - c_void_p - ] - Security.SecCopyErrorMessageString.restype = CFStringRef - - Security.SSLReadFunc = SSLReadFunc - Security.SSLWriteFunc = SSLWriteFunc - Security.SSLContextRef = SSLContextRef - Security.SSLProtocol = SSLProtocol - Security.SSLCipherSuite = SSLCipherSuite - Security.SecIdentityRef = SecIdentityRef - Security.SecKeychainRef = SecKeychainRef - Security.SecTrustRef = SecTrustRef - Security.SecTrustResultType = SecTrustResultType - Security.SecExternalFormat = SecExternalFormat - Security.OSStatus = OSStatus - - Security.kSecImportExportPassphrase = CFStringRef.in_dll( - Security, 'kSecImportExportPassphrase' - ) - Security.kSecImportItemIdentity = CFStringRef.in_dll( - Security, 'kSecImportItemIdentity' - ) - - # CoreFoundation time! - CoreFoundation.CFRetain.argtypes = [ - CFTypeRef - ] - CoreFoundation.CFRetain.restype = CFTypeRef - - CoreFoundation.CFRelease.argtypes = [ - CFTypeRef - ] - CoreFoundation.CFRelease.restype = None - - CoreFoundation.CFGetTypeID.argtypes = [ - CFTypeRef - ] - CoreFoundation.CFGetTypeID.restype = CFTypeID - - CoreFoundation.CFStringCreateWithCString.argtypes = [ - CFAllocatorRef, - c_char_p, - CFStringEncoding - ] - CoreFoundation.CFStringCreateWithCString.restype = CFStringRef - - CoreFoundation.CFStringGetCStringPtr.argtypes = [ - CFStringRef, - CFStringEncoding - ] - CoreFoundation.CFStringGetCStringPtr.restype = c_char_p - - CoreFoundation.CFStringGetCString.argtypes = [ - CFStringRef, - c_char_p, - CFIndex, - CFStringEncoding - ] - CoreFoundation.CFStringGetCString.restype = c_bool - - CoreFoundation.CFDataCreate.argtypes = [ - CFAllocatorRef, - c_char_p, - CFIndex - ] - CoreFoundation.CFDataCreate.restype = CFDataRef - - CoreFoundation.CFDataGetLength.argtypes = [ - CFDataRef - ] - CoreFoundation.CFDataGetLength.restype = CFIndex - - CoreFoundation.CFDataGetBytePtr.argtypes = [ - CFDataRef - ] - CoreFoundation.CFDataGetBytePtr.restype = c_void_p - - CoreFoundation.CFDictionaryCreate.argtypes = [ - CFAllocatorRef, - POINTER(CFTypeRef), - POINTER(CFTypeRef), - CFIndex, - CFDictionaryKeyCallBacks, - CFDictionaryValueCallBacks - ] - CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef - - CoreFoundation.CFDictionaryGetValue.argtypes = [ - CFDictionaryRef, - CFTypeRef - ] - CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef - - CoreFoundation.CFArrayCreate.argtypes = [ - CFAllocatorRef, - POINTER(CFTypeRef), - CFIndex, - CFArrayCallBacks, - ] - CoreFoundation.CFArrayCreate.restype = CFArrayRef - - CoreFoundation.CFArrayCreateMutable.argtypes = [ - CFAllocatorRef, - CFIndex, - CFArrayCallBacks - ] - CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef - - CoreFoundation.CFArrayAppendValue.argtypes = [ - CFMutableArrayRef, - c_void_p - ] - CoreFoundation.CFArrayAppendValue.restype = None - - CoreFoundation.CFArrayGetCount.argtypes = [ - CFArrayRef - ] - CoreFoundation.CFArrayGetCount.restype = CFIndex - - CoreFoundation.CFArrayGetValueAtIndex.argtypes = [ - CFArrayRef, - CFIndex - ] - CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p - - CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( - CoreFoundation, 'kCFAllocatorDefault' - ) - CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks') - CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( - CoreFoundation, 'kCFTypeDictionaryKeyCallBacks' - ) - CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( - CoreFoundation, 'kCFTypeDictionaryValueCallBacks' - ) - - CoreFoundation.CFTypeRef = CFTypeRef - CoreFoundation.CFArrayRef = CFArrayRef - CoreFoundation.CFStringRef = CFStringRef - CoreFoundation.CFDictionaryRef = CFDictionaryRef - -except (AttributeError): - raise ImportError('Error initializing ctypes') - - -class CFConst(object): - """ - A class object that acts as essentially a namespace for CoreFoundation - constants. - """ - kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) - - -class SecurityConst(object): - """ - A class object that acts as essentially a namespace for Security constants. - """ - kSSLSessionOptionBreakOnServerAuth = 0 - - kSSLProtocol2 = 1 - kSSLProtocol3 = 2 - kTLSProtocol1 = 4 - kTLSProtocol11 = 7 - kTLSProtocol12 = 8 - - kSSLClientSide = 1 - kSSLStreamType = 0 - - kSecFormatPEMSequence = 10 - - kSecTrustResultInvalid = 0 - kSecTrustResultProceed = 1 - # This gap is present on purpose: this was kSecTrustResultConfirm, which - # is deprecated. - kSecTrustResultDeny = 3 - kSecTrustResultUnspecified = 4 - kSecTrustResultRecoverableTrustFailure = 5 - kSecTrustResultFatalTrustFailure = 6 - kSecTrustResultOtherError = 7 - - errSSLProtocol = -9800 - errSSLWouldBlock = -9803 - errSSLClosedGraceful = -9805 - errSSLClosedNoNotify = -9816 - errSSLClosedAbort = -9806 - - errSSLXCertChainInvalid = -9807 - errSSLCrypto = -9809 - errSSLInternal = -9810 - errSSLCertExpired = -9814 - errSSLCertNotYetValid = -9815 - errSSLUnknownRootCert = -9812 - errSSLNoRootCert = -9813 - errSSLHostNameMismatch = -9843 - errSSLPeerHandshakeFail = -9824 - errSSLPeerUserCancelled = -9839 - errSSLWeakPeerEphemeralDHKey = -9850 - errSSLServerAuthCompleted = -9841 - errSSLRecordOverflow = -9847 - - errSecVerifyFailed = -67808 - errSecNoTrustSettings = -25263 - errSecItemNotFound = -25300 - errSecInvalidTrustSettings = -25262 - - # Cipher suites. We only pick the ones our default cipher string allows. - TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C - TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 - TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B - TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F - TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3 - TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F - TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2 - TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E - TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 - TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 - TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A - TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 - TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B - TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A - TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 - TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038 - TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 - TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 - TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 - TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 - TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 - TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040 - TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 - TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032 - TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D - TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C - TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D - TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C - TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035 - TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F diff --git a/pype/vendor/requests/packages/urllib3/contrib/_securetransport/low_level.py b/pype/vendor/requests/packages/urllib3/contrib/_securetransport/low_level.py deleted file mode 100644 index 5e3494bce6..0000000000 --- a/pype/vendor/requests/packages/urllib3/contrib/_securetransport/low_level.py +++ /dev/null @@ -1,343 +0,0 @@ -""" -Low-level helpers for the SecureTransport bindings. - -These are Python functions that are not directly related to the high-level APIs -but are necessary to get them to work. They include a whole bunch of low-level -CoreFoundation messing about and memory management. The concerns in this module -are almost entirely about trying to avoid memory leaks and providing -appropriate and useful assistance to the higher-level code. -""" -import base64 -import ctypes -import itertools -import re -import os -import ssl -import tempfile - -from .bindings import Security, CoreFoundation, CFConst - - -# This regular expression is used to grab PEM data out of a PEM bundle. -_PEM_CERTS_RE = re.compile( - b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL -) - - -def _cf_data_from_bytes(bytestring): - """ - Given a bytestring, create a CFData object from it. This CFData object must - be CFReleased by the caller. - """ - return CoreFoundation.CFDataCreate( - CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring) - ) - - -def _cf_dictionary_from_tuples(tuples): - """ - Given a list of Python tuples, create an associated CFDictionary. - """ - dictionary_size = len(tuples) - - # We need to get the dictionary keys and values out in the same order. - keys = (t[0] for t in tuples) - values = (t[1] for t in tuples) - cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys) - cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values) - - return CoreFoundation.CFDictionaryCreate( - CoreFoundation.kCFAllocatorDefault, - cf_keys, - cf_values, - dictionary_size, - CoreFoundation.kCFTypeDictionaryKeyCallBacks, - CoreFoundation.kCFTypeDictionaryValueCallBacks, - ) - - -def _cf_string_to_unicode(value): - """ - Creates a Unicode string from a CFString object. Used entirely for error - reporting. - - Yes, it annoys me quite a lot that this function is this complex. - """ - value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) - - string = CoreFoundation.CFStringGetCStringPtr( - value_as_void_p, - CFConst.kCFStringEncodingUTF8 - ) - if string is None: - buffer = ctypes.create_string_buffer(1024) - result = CoreFoundation.CFStringGetCString( - value_as_void_p, - buffer, - 1024, - CFConst.kCFStringEncodingUTF8 - ) - if not result: - raise OSError('Error copying C string from CFStringRef') - string = buffer.value - if string is not None: - string = string.decode('utf-8') - return string - - -def _assert_no_error(error, exception_class=None): - """ - Checks the return code and throws an exception if there is an error to - report - """ - if error == 0: - return - - cf_error_string = Security.SecCopyErrorMessageString(error, None) - output = _cf_string_to_unicode(cf_error_string) - CoreFoundation.CFRelease(cf_error_string) - - if output is None or output == u'': - output = u'OSStatus %s' % error - - if exception_class is None: - exception_class = ssl.SSLError - - raise exception_class(output) - - -def _cert_array_from_pem(pem_bundle): - """ - Given a bundle of certs in PEM format, turns them into a CFArray of certs - that can be used to validate a cert chain. - """ - der_certs = [ - base64.b64decode(match.group(1)) - for match in _PEM_CERTS_RE.finditer(pem_bundle) - ] - if not der_certs: - raise ssl.SSLError("No root certificates specified") - - cert_array = CoreFoundation.CFArrayCreateMutable( - CoreFoundation.kCFAllocatorDefault, - 0, - ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks) - ) - if not cert_array: - raise ssl.SSLError("Unable to allocate memory!") - - try: - for der_bytes in der_certs: - certdata = _cf_data_from_bytes(der_bytes) - if not certdata: - raise ssl.SSLError("Unable to allocate memory!") - cert = Security.SecCertificateCreateWithData( - CoreFoundation.kCFAllocatorDefault, certdata - ) - CoreFoundation.CFRelease(certdata) - if not cert: - raise ssl.SSLError("Unable to build cert object!") - - CoreFoundation.CFArrayAppendValue(cert_array, cert) - CoreFoundation.CFRelease(cert) - except Exception: - # We need to free the array before the exception bubbles further. - # We only want to do that if an error occurs: otherwise, the caller - # should free. - CoreFoundation.CFRelease(cert_array) - - return cert_array - - -def _is_cert(item): - """ - Returns True if a given CFTypeRef is a certificate. - """ - expected = Security.SecCertificateGetTypeID() - return CoreFoundation.CFGetTypeID(item) == expected - - -def _is_identity(item): - """ - Returns True if a given CFTypeRef is an identity. - """ - expected = Security.SecIdentityGetTypeID() - return CoreFoundation.CFGetTypeID(item) == expected - - -def _temporary_keychain(): - """ - This function creates a temporary Mac keychain that we can use to work with - credentials. This keychain uses a one-time password and a temporary file to - store the data. We expect to have one keychain per socket. The returned - SecKeychainRef must be freed by the caller, including calling - SecKeychainDelete. - - Returns a tuple of the SecKeychainRef and the path to the temporary - directory that contains it. - """ - # Unfortunately, SecKeychainCreate requires a path to a keychain. This - # means we cannot use mkstemp to use a generic temporary file. Instead, - # we're going to create a temporary directory and a filename to use there. - # This filename will be 8 random bytes expanded into base64. We also need - # some random bytes to password-protect the keychain we're creating, so we - # ask for 40 random bytes. - random_bytes = os.urandom(40) - filename = base64.b64encode(random_bytes[:8]).decode('utf-8') - password = base64.b64encode(random_bytes[8:]) # Must be valid UTF-8 - tempdirectory = tempfile.mkdtemp() - - keychain_path = os.path.join(tempdirectory, filename).encode('utf-8') - - # We now want to create the keychain itself. - keychain = Security.SecKeychainRef() - status = Security.SecKeychainCreate( - keychain_path, - len(password), - password, - False, - None, - ctypes.byref(keychain) - ) - _assert_no_error(status) - - # Having created the keychain, we want to pass it off to the caller. - return keychain, tempdirectory - - -def _load_items_from_file(keychain, path): - """ - Given a single file, loads all the trust objects from it into arrays and - the keychain. - Returns a tuple of lists: the first list is a list of identities, the - second a list of certs. - """ - certificates = [] - identities = [] - result_array = None - - with open(path, 'rb') as f: - raw_filedata = f.read() - - try: - filedata = CoreFoundation.CFDataCreate( - CoreFoundation.kCFAllocatorDefault, - raw_filedata, - len(raw_filedata) - ) - result_array = CoreFoundation.CFArrayRef() - result = Security.SecItemImport( - filedata, # cert data - None, # Filename, leaving it out for now - None, # What the type of the file is, we don't care - None, # what's in the file, we don't care - 0, # import flags - None, # key params, can include passphrase in the future - keychain, # The keychain to insert into - ctypes.byref(result_array) # Results - ) - _assert_no_error(result) - - # A CFArray is not very useful to us as an intermediary - # representation, so we are going to extract the objects we want - # and then free the array. We don't need to keep hold of keys: the - # keychain already has them! - result_count = CoreFoundation.CFArrayGetCount(result_array) - for index in range(result_count): - item = CoreFoundation.CFArrayGetValueAtIndex( - result_array, index - ) - item = ctypes.cast(item, CoreFoundation.CFTypeRef) - - if _is_cert(item): - CoreFoundation.CFRetain(item) - certificates.append(item) - elif _is_identity(item): - CoreFoundation.CFRetain(item) - identities.append(item) - finally: - if result_array: - CoreFoundation.CFRelease(result_array) - - CoreFoundation.CFRelease(filedata) - - return (identities, certificates) - - -def _load_client_cert_chain(keychain, *paths): - """ - Load certificates and maybe keys from a number of files. Has the end goal - of returning a CFArray containing one SecIdentityRef, and then zero or more - SecCertificateRef objects, suitable for use as a client certificate trust - chain. - """ - # Ok, the strategy. - # - # This relies on knowing that macOS will not give you a SecIdentityRef - # unless you have imported a key into a keychain. This is a somewhat - # artificial limitation of macOS (for example, it doesn't necessarily - # affect iOS), but there is nothing inside Security.framework that lets you - # get a SecIdentityRef without having a key in a keychain. - # - # So the policy here is we take all the files and iterate them in order. - # Each one will use SecItemImport to have one or more objects loaded from - # it. We will also point at a keychain that macOS can use to work with the - # private key. - # - # Once we have all the objects, we'll check what we actually have. If we - # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise, - # we'll take the first certificate (which we assume to be our leaf) and - # ask the keychain to give us a SecIdentityRef with that cert's associated - # key. - # - # We'll then return a CFArray containing the trust chain: one - # SecIdentityRef and then zero-or-more SecCertificateRef objects. The - # responsibility for freeing this CFArray will be with the caller. This - # CFArray must remain alive for the entire connection, so in practice it - # will be stored with a single SSLSocket, along with the reference to the - # keychain. - certificates = [] - identities = [] - - # Filter out bad paths. - paths = (path for path in paths if path) - - try: - for file_path in paths: - new_identities, new_certs = _load_items_from_file( - keychain, file_path - ) - identities.extend(new_identities) - certificates.extend(new_certs) - - # Ok, we have everything. The question is: do we have an identity? If - # not, we want to grab one from the first cert we have. - if not identities: - new_identity = Security.SecIdentityRef() - status = Security.SecIdentityCreateWithCertificate( - keychain, - certificates[0], - ctypes.byref(new_identity) - ) - _assert_no_error(status) - identities.append(new_identity) - - # We now want to release the original certificate, as we no longer - # need it. - CoreFoundation.CFRelease(certificates.pop(0)) - - # We now need to build a new CFArray that holds the trust chain. - trust_chain = CoreFoundation.CFArrayCreateMutable( - CoreFoundation.kCFAllocatorDefault, - 0, - ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), - ) - for item in itertools.chain(identities, certificates): - # ArrayAppendValue does a CFRetain on the item. That's fine, - # because the finally block will release our other refs to them. - CoreFoundation.CFArrayAppendValue(trust_chain, item) - - return trust_chain - finally: - for obj in itertools.chain(identities, certificates): - CoreFoundation.CFRelease(obj) diff --git a/pype/vendor/requests/packages/urllib3/contrib/appengine.py b/pype/vendor/requests/packages/urllib3/contrib/appengine.py deleted file mode 100644 index 814b0222d9..0000000000 --- a/pype/vendor/requests/packages/urllib3/contrib/appengine.py +++ /dev/null @@ -1,296 +0,0 @@ -""" -This module provides a pool manager that uses Google App Engine's -`URLFetch Service `_. - -Example usage:: - - from urllib3 import PoolManager - from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox - - if is_appengine_sandbox(): - # AppEngineManager uses AppEngine's URLFetch API behind the scenes - http = AppEngineManager() - else: - # PoolManager uses a socket-level API behind the scenes - http = PoolManager() - - r = http.request('GET', 'https://google.com/') - -There are `limitations `_ to the URLFetch service and it may not be -the best choice for your application. There are three options for using -urllib3 on Google App Engine: - -1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is - cost-effective in many circumstances as long as your usage is within the - limitations. -2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets. - Sockets also have `limitations and restrictions - `_ and have a lower free quota than URLFetch. - To use sockets, be sure to specify the following in your ``app.yaml``:: - - env_variables: - GAE_USE_SOCKETS_HTTPLIB : 'true' - -3. If you are using `App Engine Flexible -`_, you can use the standard -:class:`PoolManager` without any configuration or special environment variables. -""" - -from __future__ import absolute_import -import logging -import os -import warnings -from ..packages.six.moves.urllib.parse import urljoin - -from ..exceptions import ( - HTTPError, - HTTPWarning, - MaxRetryError, - ProtocolError, - TimeoutError, - SSLError -) - -from ..packages.six import BytesIO -from ..request import RequestMethods -from ..response import HTTPResponse -from ..util.timeout import Timeout -from ..util.retry import Retry - -try: - from google.appengine.api import urlfetch -except ImportError: - urlfetch = None - - -log = logging.getLogger(__name__) - - -class AppEnginePlatformWarning(HTTPWarning): - pass - - -class AppEnginePlatformError(HTTPError): - pass - - -class AppEngineManager(RequestMethods): - """ - Connection manager for Google App Engine sandbox applications. - - This manager uses the URLFetch service directly instead of using the - emulated httplib, and is subject to URLFetch limitations as described in - the App Engine documentation `here - `_. - - Notably it will raise an :class:`AppEnginePlatformError` if: - * URLFetch is not available. - * If you attempt to use this on App Engine Flexible, as full socket - support is available. - * If a request size is more than 10 megabytes. - * If a response size is more than 32 megabtyes. - * If you use an unsupported request method such as OPTIONS. - - Beyond those cases, it will raise normal urllib3 errors. - """ - - def __init__(self, headers=None, retries=None, validate_certificate=True, - urlfetch_retries=True): - if not urlfetch: - raise AppEnginePlatformError( - "URLFetch is not available in this environment.") - - if is_prod_appengine_mvms(): - raise AppEnginePlatformError( - "Use normal urllib3.PoolManager instead of AppEngineManager" - "on Managed VMs, as using URLFetch is not necessary in " - "this environment.") - - warnings.warn( - "urllib3 is using URLFetch on Google App Engine sandbox instead " - "of sockets. To use sockets directly instead of URLFetch see " - "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.", - AppEnginePlatformWarning) - - RequestMethods.__init__(self, headers) - self.validate_certificate = validate_certificate - self.urlfetch_retries = urlfetch_retries - - self.retries = retries or Retry.DEFAULT - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - # Return False to re-raise any potential exceptions - return False - - def urlopen(self, method, url, body=None, headers=None, - retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT, - **response_kw): - - retries = self._get_retries(retries, redirect) - - try: - follow_redirects = ( - redirect and - retries.redirect != 0 and - retries.total) - response = urlfetch.fetch( - url, - payload=body, - method=method, - headers=headers or {}, - allow_truncated=False, - follow_redirects=self.urlfetch_retries and follow_redirects, - deadline=self._get_absolute_timeout(timeout), - validate_certificate=self.validate_certificate, - ) - except urlfetch.DeadlineExceededError as e: - raise TimeoutError(self, e) - - except urlfetch.InvalidURLError as e: - if 'too large' in str(e): - raise AppEnginePlatformError( - "URLFetch request too large, URLFetch only " - "supports requests up to 10mb in size.", e) - raise ProtocolError(e) - - except urlfetch.DownloadError as e: - if 'Too many redirects' in str(e): - raise MaxRetryError(self, url, reason=e) - raise ProtocolError(e) - - except urlfetch.ResponseTooLargeError as e: - raise AppEnginePlatformError( - "URLFetch response too large, URLFetch only supports" - "responses up to 32mb in size.", e) - - except urlfetch.SSLCertificateError as e: - raise SSLError(e) - - except urlfetch.InvalidMethodError as e: - raise AppEnginePlatformError( - "URLFetch does not support method: %s" % method, e) - - http_response = self._urlfetch_response_to_http_response( - response, retries=retries, **response_kw) - - # Handle redirect? - redirect_location = redirect and http_response.get_redirect_location() - if redirect_location: - # Check for redirect response - if (self.urlfetch_retries and retries.raise_on_redirect): - raise MaxRetryError(self, url, "too many redirects") - else: - if http_response.status == 303: - method = 'GET' - - try: - retries = retries.increment(method, url, response=http_response, _pool=self) - except MaxRetryError: - if retries.raise_on_redirect: - raise MaxRetryError(self, url, "too many redirects") - return http_response - - retries.sleep_for_retry(http_response) - log.debug("Redirecting %s -> %s", url, redirect_location) - redirect_url = urljoin(url, redirect_location) - return self.urlopen( - method, redirect_url, body, headers, - retries=retries, redirect=redirect, - timeout=timeout, **response_kw) - - # Check if we should retry the HTTP response. - has_retry_after = bool(http_response.getheader('Retry-After')) - if retries.is_retry(method, http_response.status, has_retry_after): - retries = retries.increment( - method, url, response=http_response, _pool=self) - log.debug("Retry: %s", url) - retries.sleep(http_response) - return self.urlopen( - method, url, - body=body, headers=headers, - retries=retries, redirect=redirect, - timeout=timeout, **response_kw) - - return http_response - - def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): - - if is_prod_appengine(): - # Production GAE handles deflate encoding automatically, but does - # not remove the encoding header. - content_encoding = urlfetch_resp.headers.get('content-encoding') - - if content_encoding == 'deflate': - del urlfetch_resp.headers['content-encoding'] - - transfer_encoding = urlfetch_resp.headers.get('transfer-encoding') - # We have a full response's content, - # so let's make sure we don't report ourselves as chunked data. - if transfer_encoding == 'chunked': - encodings = transfer_encoding.split(",") - encodings.remove('chunked') - urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings) - - return HTTPResponse( - # In order for decoding to work, we must present the content as - # a file-like object. - body=BytesIO(urlfetch_resp.content), - headers=urlfetch_resp.headers, - status=urlfetch_resp.status_code, - **response_kw - ) - - def _get_absolute_timeout(self, timeout): - if timeout is Timeout.DEFAULT_TIMEOUT: - return None # Defer to URLFetch's default. - if isinstance(timeout, Timeout): - if timeout._read is not None or timeout._connect is not None: - warnings.warn( - "URLFetch does not support granular timeout settings, " - "reverting to total or default URLFetch timeout.", - AppEnginePlatformWarning) - return timeout.total - return timeout - - def _get_retries(self, retries, redirect): - if not isinstance(retries, Retry): - retries = Retry.from_int( - retries, redirect=redirect, default=self.retries) - - if retries.connect or retries.read or retries.redirect: - warnings.warn( - "URLFetch only supports total retries and does not " - "recognize connect, read, or redirect retry parameters.", - AppEnginePlatformWarning) - - return retries - - -def is_appengine(): - return (is_local_appengine() or - is_prod_appengine() or - is_prod_appengine_mvms()) - - -def is_appengine_sandbox(): - return is_appengine() and not is_prod_appengine_mvms() - - -def is_local_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Development/' in os.environ['SERVER_SOFTWARE']) - - -def is_prod_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and - not is_prod_appengine_mvms()) - - -def is_prod_appengine_mvms(): - return os.environ.get('GAE_VM', False) == 'true' diff --git a/pype/vendor/requests/packages/urllib3/contrib/ntlmpool.py b/pype/vendor/requests/packages/urllib3/contrib/ntlmpool.py deleted file mode 100644 index 642e99ed2d..0000000000 --- a/pype/vendor/requests/packages/urllib3/contrib/ntlmpool.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -NTLM authenticating pool, contributed by erikcederstran - -Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 -""" -from __future__ import absolute_import - -from logging import getLogger -from ntlm import ntlm - -from .. import HTTPSConnectionPool -from ..packages.six.moves.http_client import HTTPSConnection - - -log = getLogger(__name__) - - -class NTLMConnectionPool(HTTPSConnectionPool): - """ - Implements an NTLM authentication version of an urllib3 connection pool - """ - - scheme = 'https' - - def __init__(self, user, pw, authurl, *args, **kwargs): - """ - authurl is a random URL on the server that is protected by NTLM. - user is the Windows user, probably in the DOMAIN\\username format. - pw is the password for the user. - """ - super(NTLMConnectionPool, self).__init__(*args, **kwargs) - self.authurl = authurl - self.rawuser = user - user_parts = user.split('\\', 1) - self.domain = user_parts[0].upper() - self.user = user_parts[1] - self.pw = pw - - def _new_conn(self): - # Performs the NTLM handshake that secures the connection. The socket - # must be kept open while requests are performed. - self.num_connections += 1 - log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', - self.num_connections, self.host, self.authurl) - - headers = {} - headers['Connection'] = 'Keep-Alive' - req_header = 'Authorization' - resp_header = 'www-authenticate' - - conn = HTTPSConnection(host=self.host, port=self.port) - - # Send negotiation message - headers[req_header] = ( - 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) - log.debug('Request headers: %s', headers) - conn.request('GET', self.authurl, None, headers) - res = conn.getresponse() - reshdr = dict(res.getheaders()) - log.debug('Response status: %s %s', res.status, res.reason) - log.debug('Response headers: %s', reshdr) - log.debug('Response data: %s [...]', res.read(100)) - - # Remove the reference to the socket, so that it can not be closed by - # the response object (we want to keep the socket open) - res.fp = None - - # Server should respond with a challenge message - auth_header_values = reshdr[resp_header].split(', ') - auth_header_value = None - for s in auth_header_values: - if s[:5] == 'NTLM ': - auth_header_value = s[5:] - if auth_header_value is None: - raise Exception('Unexpected %s response header: %s' % - (resp_header, reshdr[resp_header])) - - # Send authentication message - ServerChallenge, NegotiateFlags = \ - ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) - auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, - self.user, - self.domain, - self.pw, - NegotiateFlags) - headers[req_header] = 'NTLM %s' % auth_msg - log.debug('Request headers: %s', headers) - conn.request('GET', self.authurl, None, headers) - res = conn.getresponse() - log.debug('Response status: %s %s', res.status, res.reason) - log.debug('Response headers: %s', dict(res.getheaders())) - log.debug('Response data: %s [...]', res.read()[:100]) - if res.status != 200: - if res.status == 401: - raise Exception('Server rejected request: wrong ' - 'username or password') - raise Exception('Wrong server response: %s %s' % - (res.status, res.reason)) - - res.fp = None - log.debug('Connection established') - return conn - - def urlopen(self, method, url, body=None, headers=None, retries=3, - redirect=True, assert_same_host=True): - if headers is None: - headers = {} - headers['Connection'] = 'Keep-Alive' - return super(NTLMConnectionPool, self).urlopen(method, url, body, - headers, retries, - redirect, - assert_same_host) diff --git a/pype/vendor/requests/packages/urllib3/contrib/pyopenssl.py b/pype/vendor/requests/packages/urllib3/contrib/pyopenssl.py deleted file mode 100644 index c5da28b7f7..0000000000 --- a/pype/vendor/requests/packages/urllib3/contrib/pyopenssl.py +++ /dev/null @@ -1,452 +0,0 @@ -""" -SSL with SNI_-support for Python 2. Follow these instructions if you would -like to verify SSL certificates in Python 2. Note, the default libraries do -*not* do certificate checking; you need to do additional work to validate -certificates yourself. - -This needs the following packages installed: - -* pyOpenSSL (tested with 16.0.0) -* cryptography (minimum 1.3.4, from pyopenssl) -* idna (minimum 2.0, from cryptography) - -However, pyopenssl depends on cryptography, which depends on idna, so while we -use all three directly here we end up having relatively few packages required. - -You can install them with the following command: - - pip install pyopenssl cryptography idna - -To activate certificate checking, call -:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code -before you begin making HTTP requests. This can be done in a ``sitecustomize`` -module, or at any other time before your application begins using ``urllib3``, -like this:: - - try: - import urllib3.contrib.pyopenssl - urllib3.contrib.pyopenssl.inject_into_urllib3() - except ImportError: - pass - -Now you can use :mod:`urllib3` as you normally would, and it will support SNI -when the required modules are installed. - -Activating this module also has the positive side effect of disabling SSL/TLS -compression in Python 2 (see `CRIME attack`_). - -If you want to configure the default list of supported cipher suites, you can -set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. - -.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication -.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) -""" -from __future__ import absolute_import - -import OpenSSL.SSL -from cryptography import x509 -from cryptography.hazmat.backends.openssl import backend as openssl_backend -from cryptography.hazmat.backends.openssl.x509 import _Certificate - -from socket import timeout, error as SocketError -from io import BytesIO - -try: # Platform-specific: Python 2 - from socket import _fileobject -except ImportError: # Platform-specific: Python 3 - _fileobject = None - from ..packages.backports.makefile import backport_makefile - -import logging -import ssl -import six -import sys - -from .. import util - -__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] - -# SNI always works. -HAS_SNI = True - -# Map from urllib3 to PyOpenSSL compatible parameter-values. -_openssl_versions = { - ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, - ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, -} - -if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): - _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD - -if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): - _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD - -try: - _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) -except AttributeError: - pass - -_stdlib_to_openssl_verify = { - ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, - ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, - ssl.CERT_REQUIRED: - OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, -} -_openssl_to_stdlib_verify = dict( - (v, k) for k, v in _stdlib_to_openssl_verify.items() -) - -# OpenSSL will only write 16K at a time -SSL_WRITE_BLOCKSIZE = 16384 - -orig_util_HAS_SNI = util.HAS_SNI -orig_util_SSLContext = util.ssl_.SSLContext - - -log = logging.getLogger(__name__) - - -def inject_into_urllib3(): - 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' - - _validate_dependencies_met() - - util.ssl_.SSLContext = PyOpenSSLContext - util.HAS_SNI = HAS_SNI - util.ssl_.HAS_SNI = HAS_SNI - util.IS_PYOPENSSL = True - util.ssl_.IS_PYOPENSSL = True - - -def extract_from_urllib3(): - 'Undo monkey-patching by :func:`inject_into_urllib3`.' - - util.ssl_.SSLContext = orig_util_SSLContext - util.HAS_SNI = orig_util_HAS_SNI - util.ssl_.HAS_SNI = orig_util_HAS_SNI - util.IS_PYOPENSSL = False - util.ssl_.IS_PYOPENSSL = False - - -def _validate_dependencies_met(): - """ - Verifies that PyOpenSSL's package-level dependencies have been met. - Throws `ImportError` if they are not met. - """ - # Method added in `cryptography==1.1`; not available in older versions - from cryptography.x509.extensions import Extensions - if getattr(Extensions, "get_extension_for_class", None) is None: - raise ImportError("'cryptography' module missing required functionality. " - "Try upgrading to v1.3.4 or newer.") - - # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 - # attribute is only present on those versions. - from OpenSSL.crypto import X509 - x509 = X509() - if getattr(x509, "_x509", None) is None: - raise ImportError("'pyOpenSSL' module missing required functionality. " - "Try upgrading to v0.14 or newer.") - - -def _dnsname_to_stdlib(name): - """ - Converts a dNSName SubjectAlternativeName field to the form used by the - standard library on the given Python version. - - Cryptography produces a dNSName as a unicode string that was idna-decoded - from ASCII bytes. We need to idna-encode that string to get it back, and - then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib - uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). - """ - def idna_encode(name): - """ - Borrowed wholesale from the Python Cryptography Project. It turns out - that we can't just safely call `idna.encode`: it can explode for - wildcard names. This avoids that problem. - """ - import idna - - for prefix in [u'*.', u'.']: - if name.startswith(prefix): - name = name[len(prefix):] - return prefix.encode('ascii') + idna.encode(name) - return idna.encode(name) - - name = idna_encode(name) - if sys.version_info >= (3, 0): - name = name.decode('utf-8') - return name - - -def get_subj_alt_name(peer_cert): - """ - Given an PyOpenSSL certificate, provides all the subject alternative names. - """ - # Pass the cert to cryptography, which has much better APIs for this. - # This is technically using private APIs, but should work across all - # relevant versions until PyOpenSSL gets something proper for this. - cert = _Certificate(openssl_backend, peer_cert._x509) - - # We want to find the SAN extension. Ask Cryptography to locate it (it's - # faster than looping in Python) - try: - ext = cert.extensions.get_extension_for_class( - x509.SubjectAlternativeName - ).value - except x509.ExtensionNotFound: - # No such extension, return the empty list. - return [] - except (x509.DuplicateExtension, x509.UnsupportedExtension, - x509.UnsupportedGeneralNameType, UnicodeError) as e: - # A problem has been found with the quality of the certificate. Assume - # no SAN field is present. - log.warning( - "A problem was encountered with the certificate that prevented " - "urllib3 from finding the SubjectAlternativeName field. This can " - "affect certificate validation. The error was %s", - e, - ) - return [] - - # We want to return dNSName and iPAddress fields. We need to cast the IPs - # back to strings because the match_hostname function wants them as - # strings. - # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 - # decoded. This is pretty frustrating, but that's what the standard library - # does with certificates, and so we need to attempt to do the same. - names = [ - ('DNS', _dnsname_to_stdlib(name)) - for name in ext.get_values_for_type(x509.DNSName) - ] - names.extend( - ('IP Address', str(name)) - for name in ext.get_values_for_type(x509.IPAddress) - ) - - return names - - -class WrappedSocket(object): - '''API-compatibility wrapper for Python OpenSSL's Connection-class. - - Note: _makefile_refs, _drop() and _reuse() are needed for the garbage - collector of pypy. - ''' - - def __init__(self, connection, socket, suppress_ragged_eofs=True): - self.connection = connection - self.socket = socket - self.suppress_ragged_eofs = suppress_ragged_eofs - self._makefile_refs = 0 - self._closed = False - - def fileno(self): - return self.socket.fileno() - - # Copy-pasted from Python 3.5 source code - def _decref_socketios(self): - if self._makefile_refs > 0: - self._makefile_refs -= 1 - if self._closed: - self.close() - - def recv(self, *args, **kwargs): - try: - data = self.connection.recv(*args, **kwargs) - except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): - return b'' - else: - raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError as e: - if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: - return b'' - else: - raise - except OpenSSL.SSL.WantReadError: - rd = util.wait_for_read(self.socket, self.socket.gettimeout()) - if not rd: - raise timeout('The read operation timed out') - else: - return self.recv(*args, **kwargs) - else: - return data - - def recv_into(self, *args, **kwargs): - try: - return self.connection.recv_into(*args, **kwargs) - except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): - return 0 - else: - raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError as e: - if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: - return 0 - else: - raise - except OpenSSL.SSL.WantReadError: - rd = util.wait_for_read(self.socket, self.socket.gettimeout()) - if not rd: - raise timeout('The read operation timed out') - else: - return self.recv_into(*args, **kwargs) - - def settimeout(self, timeout): - return self.socket.settimeout(timeout) - - def _send_until_done(self, data): - while True: - try: - return self.connection.send(data) - except OpenSSL.SSL.WantWriteError: - wr = util.wait_for_write(self.socket, self.socket.gettimeout()) - if not wr: - raise timeout() - continue - except OpenSSL.SSL.SysCallError as e: - raise SocketError(str(e)) - - def sendall(self, data): - total_sent = 0 - while total_sent < len(data): - sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) - total_sent += sent - - def shutdown(self): - # FIXME rethrow compatible exceptions should we ever use this - self.connection.shutdown() - - def close(self): - if self._makefile_refs < 1: - try: - self._closed = True - return self.connection.close() - except OpenSSL.SSL.Error: - return - else: - self._makefile_refs -= 1 - - def getpeercert(self, binary_form=False): - x509 = self.connection.get_peer_certificate() - - if not x509: - return x509 - - if binary_form: - return OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_ASN1, - x509) - - return { - 'subject': ( - (('commonName', x509.get_subject().CN),), - ), - 'subjectAltName': get_subj_alt_name(x509) - } - - def _reuse(self): - self._makefile_refs += 1 - - def _drop(self): - if self._makefile_refs < 1: - self.close() - else: - self._makefile_refs -= 1 - - -if _fileobject: # Platform-specific: Python 2 - def makefile(self, mode, bufsize=-1): - self._makefile_refs += 1 - return _fileobject(self, mode, bufsize, close=True) -else: # Platform-specific: Python 3 - makefile = backport_makefile - -WrappedSocket.makefile = makefile - - -class PyOpenSSLContext(object): - """ - I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible - for translating the interface of the standard library ``SSLContext`` object - to calls into PyOpenSSL. - """ - def __init__(self, protocol): - self.protocol = _openssl_versions[protocol] - self._ctx = OpenSSL.SSL.Context(self.protocol) - self._options = 0 - self.check_hostname = False - - @property - def options(self): - return self._options - - @options.setter - def options(self, value): - self._options = value - self._ctx.set_options(value) - - @property - def verify_mode(self): - return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()] - - @verify_mode.setter - def verify_mode(self, value): - self._ctx.set_verify( - _stdlib_to_openssl_verify[value], - _verify_callback - ) - - def set_default_verify_paths(self): - self._ctx.set_default_verify_paths() - - def set_ciphers(self, ciphers): - if isinstance(ciphers, six.text_type): - ciphers = ciphers.encode('utf-8') - self._ctx.set_cipher_list(ciphers) - - def load_verify_locations(self, cafile=None, capath=None, cadata=None): - if cafile is not None: - cafile = cafile.encode('utf-8') - if capath is not None: - capath = capath.encode('utf-8') - self._ctx.load_verify_locations(cafile, capath) - if cadata is not None: - self._ctx.load_verify_locations(BytesIO(cadata)) - - def load_cert_chain(self, certfile, keyfile=None, password=None): - self._ctx.use_certificate_file(certfile) - if password is not None: - self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password) - self._ctx.use_privatekey_file(keyfile or certfile) - - def wrap_socket(self, sock, server_side=False, - do_handshake_on_connect=True, suppress_ragged_eofs=True, - server_hostname=None): - cnx = OpenSSL.SSL.Connection(self._ctx, sock) - - if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 - server_hostname = server_hostname.encode('utf-8') - - if server_hostname is not None: - cnx.set_tlsext_host_name(server_hostname) - - cnx.set_connect_state() - - while True: - try: - cnx.do_handshake() - except OpenSSL.SSL.WantReadError: - rd = util.wait_for_read(sock, sock.gettimeout()) - if not rd: - raise timeout('select timed out') - continue - except OpenSSL.SSL.Error as e: - raise ssl.SSLError('bad handshake: %r' % e) - break - - return WrappedSocket(cnx, sock) - - -def _verify_callback(cnx, x509, err_no, err_depth, return_code): - return err_no == 0 diff --git a/pype/vendor/requests/packages/urllib3/contrib/securetransport.py b/pype/vendor/requests/packages/urllib3/contrib/securetransport.py deleted file mode 100644 index 72b23ab1c4..0000000000 --- a/pype/vendor/requests/packages/urllib3/contrib/securetransport.py +++ /dev/null @@ -1,807 +0,0 @@ -""" -SecureTranport support for urllib3 via ctypes. - -This makes platform-native TLS available to urllib3 users on macOS without the -use of a compiler. This is an important feature because the Python Package -Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL -that ships with macOS is not capable of doing TLSv1.2. The only way to resolve -this is to give macOS users an alternative solution to the problem, and that -solution is to use SecureTransport. - -We use ctypes here because this solution must not require a compiler. That's -because pip is not allowed to require a compiler either. - -This is not intended to be a seriously long-term solution to this problem. -The hope is that PEP 543 will eventually solve this issue for us, at which -point we can retire this contrib module. But in the short term, we need to -solve the impending tire fire that is Python on Mac without this kind of -contrib module. So...here we are. - -To use this module, simply import and inject it:: - - import urllib3.contrib.securetransport - urllib3.contrib.securetransport.inject_into_urllib3() - -Happy TLSing! -""" -from __future__ import absolute_import - -import contextlib -import ctypes -import errno -import os.path -import shutil -import socket -import ssl -import threading -import weakref - -from .. import util -from ._securetransport.bindings import ( - Security, SecurityConst, CoreFoundation -) -from ._securetransport.low_level import ( - _assert_no_error, _cert_array_from_pem, _temporary_keychain, - _load_client_cert_chain -) - -try: # Platform-specific: Python 2 - from socket import _fileobject -except ImportError: # Platform-specific: Python 3 - _fileobject = None - from ..packages.backports.makefile import backport_makefile - -try: - memoryview(b'') -except NameError: - raise ImportError("SecureTransport only works on Pythons with memoryview") - -__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] - -# SNI always works -HAS_SNI = True - -orig_util_HAS_SNI = util.HAS_SNI -orig_util_SSLContext = util.ssl_.SSLContext - -# This dictionary is used by the read callback to obtain a handle to the -# calling wrapped socket. This is a pretty silly approach, but for now it'll -# do. I feel like I should be able to smuggle a handle to the wrapped socket -# directly in the SSLConnectionRef, but for now this approach will work I -# guess. -# -# We need to lock around this structure for inserts, but we don't do it for -# reads/writes in the callbacks. The reasoning here goes as follows: -# -# 1. It is not possible to call into the callbacks before the dictionary is -# populated, so once in the callback the id must be in the dictionary. -# 2. The callbacks don't mutate the dictionary, they only read from it, and -# so cannot conflict with any of the insertions. -# -# This is good: if we had to lock in the callbacks we'd drastically slow down -# the performance of this code. -_connection_refs = weakref.WeakValueDictionary() -_connection_ref_lock = threading.Lock() - -# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over -# for no better reason than we need *a* limit, and this one is right there. -SSL_WRITE_BLOCKSIZE = 16384 - -# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to -# individual cipher suites. We need to do this becuase this is how -# SecureTransport wants them. -CIPHER_SUITES = [ - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, - SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA, - SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA, -] - -# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of -# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. -_protocol_to_min_max = { - ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), -} - -if hasattr(ssl, "PROTOCOL_SSLv2"): - _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( - SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2 - ) -if hasattr(ssl, "PROTOCOL_SSLv3"): - _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( - SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3 - ) -if hasattr(ssl, "PROTOCOL_TLSv1"): - _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( - SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1 - ) -if hasattr(ssl, "PROTOCOL_TLSv1_1"): - _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( - SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11 - ) -if hasattr(ssl, "PROTOCOL_TLSv1_2"): - _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( - SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12 - ) -if hasattr(ssl, "PROTOCOL_TLS"): - _protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23] - - -def inject_into_urllib3(): - """ - Monkey-patch urllib3 with SecureTransport-backed SSL-support. - """ - util.ssl_.SSLContext = SecureTransportContext - util.HAS_SNI = HAS_SNI - util.ssl_.HAS_SNI = HAS_SNI - util.IS_SECURETRANSPORT = True - util.ssl_.IS_SECURETRANSPORT = True - - -def extract_from_urllib3(): - """ - Undo monkey-patching by :func:`inject_into_urllib3`. - """ - util.ssl_.SSLContext = orig_util_SSLContext - util.HAS_SNI = orig_util_HAS_SNI - util.ssl_.HAS_SNI = orig_util_HAS_SNI - util.IS_SECURETRANSPORT = False - util.ssl_.IS_SECURETRANSPORT = False - - -def _read_callback(connection_id, data_buffer, data_length_pointer): - """ - SecureTransport read callback. This is called by ST to request that data - be returned from the socket. - """ - wrapped_socket = None - try: - wrapped_socket = _connection_refs.get(connection_id) - if wrapped_socket is None: - return SecurityConst.errSSLInternal - base_socket = wrapped_socket.socket - - requested_length = data_length_pointer[0] - - timeout = wrapped_socket.gettimeout() - error = None - read_count = 0 - buffer = (ctypes.c_char * requested_length).from_address(data_buffer) - buffer_view = memoryview(buffer) - - try: - while read_count < requested_length: - if timeout is None or timeout >= 0: - readables = util.wait_for_read([base_socket], timeout) - if not readables: - raise socket.error(errno.EAGAIN, 'timed out') - - # We need to tell ctypes that we have a buffer that can be - # written to. Upsettingly, we do that like this: - chunk_size = base_socket.recv_into( - buffer_view[read_count:requested_length] - ) - read_count += chunk_size - if not chunk_size: - if not read_count: - return SecurityConst.errSSLClosedGraceful - break - except (socket.error) as e: - error = e.errno - - if error is not None and error != errno.EAGAIN: - if error == errno.ECONNRESET: - return SecurityConst.errSSLClosedAbort - raise - - data_length_pointer[0] = read_count - - if read_count != requested_length: - return SecurityConst.errSSLWouldBlock - - return 0 - except Exception as e: - if wrapped_socket is not None: - wrapped_socket._exception = e - return SecurityConst.errSSLInternal - - -def _write_callback(connection_id, data_buffer, data_length_pointer): - """ - SecureTransport write callback. This is called by ST to request that data - actually be sent on the network. - """ - wrapped_socket = None - try: - wrapped_socket = _connection_refs.get(connection_id) - if wrapped_socket is None: - return SecurityConst.errSSLInternal - base_socket = wrapped_socket.socket - - bytes_to_write = data_length_pointer[0] - data = ctypes.string_at(data_buffer, bytes_to_write) - - timeout = wrapped_socket.gettimeout() - error = None - sent = 0 - - try: - while sent < bytes_to_write: - if timeout is None or timeout >= 0: - writables = util.wait_for_write([base_socket], timeout) - if not writables: - raise socket.error(errno.EAGAIN, 'timed out') - chunk_sent = base_socket.send(data) - sent += chunk_sent - - # This has some needless copying here, but I'm not sure there's - # much value in optimising this data path. - data = data[chunk_sent:] - except (socket.error) as e: - error = e.errno - - if error is not None and error != errno.EAGAIN: - if error == errno.ECONNRESET: - return SecurityConst.errSSLClosedAbort - raise - - data_length_pointer[0] = sent - if sent != bytes_to_write: - return SecurityConst.errSSLWouldBlock - - return 0 - except Exception as e: - if wrapped_socket is not None: - wrapped_socket._exception = e - return SecurityConst.errSSLInternal - - -# We need to keep these two objects references alive: if they get GC'd while -# in use then SecureTransport could attempt to call a function that is in freed -# memory. That would be...uh...bad. Yeah, that's the word. Bad. -_read_callback_pointer = Security.SSLReadFunc(_read_callback) -_write_callback_pointer = Security.SSLWriteFunc(_write_callback) - - -class WrappedSocket(object): - """ - API-compatibility wrapper for Python's OpenSSL wrapped socket object. - - Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage - collector of PyPy. - """ - def __init__(self, socket): - self.socket = socket - self.context = None - self._makefile_refs = 0 - self._closed = False - self._exception = None - self._keychain = None - self._keychain_dir = None - self._client_cert_chain = None - - # We save off the previously-configured timeout and then set it to - # zero. This is done because we use select and friends to handle the - # timeouts, but if we leave the timeout set on the lower socket then - # Python will "kindly" call select on that socket again for us. Avoid - # that by forcing the timeout to zero. - self._timeout = self.socket.gettimeout() - self.socket.settimeout(0) - - @contextlib.contextmanager - def _raise_on_error(self): - """ - A context manager that can be used to wrap calls that do I/O from - SecureTransport. If any of the I/O callbacks hit an exception, this - context manager will correctly propagate the exception after the fact. - This avoids silently swallowing those exceptions. - - It also correctly forces the socket closed. - """ - self._exception = None - - # We explicitly don't catch around this yield because in the unlikely - # event that an exception was hit in the block we don't want to swallow - # it. - yield - if self._exception is not None: - exception, self._exception = self._exception, None - self.close() - raise exception - - def _set_ciphers(self): - """ - Sets up the allowed ciphers. By default this matches the set in - util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done - custom and doesn't allow changing at this time, mostly because parsing - OpenSSL cipher strings is going to be a freaking nightmare. - """ - ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES) - result = Security.SSLSetEnabledCiphers( - self.context, ciphers, len(CIPHER_SUITES) - ) - _assert_no_error(result) - - def _custom_validate(self, verify, trust_bundle): - """ - Called when we have set custom validation. We do this in two cases: - first, when cert validation is entirely disabled; and second, when - using a custom trust DB. - """ - # If we disabled cert validation, just say: cool. - if not verify: - return - - # We want data in memory, so load it up. - if os.path.isfile(trust_bundle): - with open(trust_bundle, 'rb') as f: - trust_bundle = f.read() - - cert_array = None - trust = Security.SecTrustRef() - - try: - # Get a CFArray that contains the certs we want. - cert_array = _cert_array_from_pem(trust_bundle) - - # Ok, now the hard part. We want to get the SecTrustRef that ST has - # created for this connection, shove our CAs into it, tell ST to - # ignore everything else it knows, and then ask if it can build a - # chain. This is a buuuunch of code. - result = Security.SSLCopyPeerTrust( - self.context, ctypes.byref(trust) - ) - _assert_no_error(result) - if not trust: - raise ssl.SSLError("Failed to copy trust reference") - - result = Security.SecTrustSetAnchorCertificates(trust, cert_array) - _assert_no_error(result) - - result = Security.SecTrustSetAnchorCertificatesOnly(trust, True) - _assert_no_error(result) - - trust_result = Security.SecTrustResultType() - result = Security.SecTrustEvaluate( - trust, ctypes.byref(trust_result) - ) - _assert_no_error(result) - finally: - if trust: - CoreFoundation.CFRelease(trust) - - if cert_array is None: - CoreFoundation.CFRelease(cert_array) - - # Ok, now we can look at what the result was. - successes = ( - SecurityConst.kSecTrustResultUnspecified, - SecurityConst.kSecTrustResultProceed - ) - if trust_result.value not in successes: - raise ssl.SSLError( - "certificate verify failed, error code: %d" % - trust_result.value - ) - - def handshake(self, - server_hostname, - verify, - trust_bundle, - min_version, - max_version, - client_cert, - client_key, - client_key_passphrase): - """ - Actually performs the TLS handshake. This is run automatically by - wrapped socket, and shouldn't be needed in user code. - """ - # First, we do the initial bits of connection setup. We need to create - # a context, set its I/O funcs, and set the connection reference. - self.context = Security.SSLCreateContext( - None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType - ) - result = Security.SSLSetIOFuncs( - self.context, _read_callback_pointer, _write_callback_pointer - ) - _assert_no_error(result) - - # Here we need to compute the handle to use. We do this by taking the - # id of self modulo 2**31 - 1. If this is already in the dictionary, we - # just keep incrementing by one until we find a free space. - with _connection_ref_lock: - handle = id(self) % 2147483647 - while handle in _connection_refs: - handle = (handle + 1) % 2147483647 - _connection_refs[handle] = self - - result = Security.SSLSetConnection(self.context, handle) - _assert_no_error(result) - - # If we have a server hostname, we should set that too. - if server_hostname: - if not isinstance(server_hostname, bytes): - server_hostname = server_hostname.encode('utf-8') - - result = Security.SSLSetPeerDomainName( - self.context, server_hostname, len(server_hostname) - ) - _assert_no_error(result) - - # Setup the ciphers. - self._set_ciphers() - - # Set the minimum and maximum TLS versions. - result = Security.SSLSetProtocolVersionMin(self.context, min_version) - _assert_no_error(result) - result = Security.SSLSetProtocolVersionMax(self.context, max_version) - _assert_no_error(result) - - # If there's a trust DB, we need to use it. We do that by telling - # SecureTransport to break on server auth. We also do that if we don't - # want to validate the certs at all: we just won't actually do any - # authing in that case. - if not verify or trust_bundle is not None: - result = Security.SSLSetSessionOption( - self.context, - SecurityConst.kSSLSessionOptionBreakOnServerAuth, - True - ) - _assert_no_error(result) - - # If there's a client cert, we need to use it. - if client_cert: - self._keychain, self._keychain_dir = _temporary_keychain() - self._client_cert_chain = _load_client_cert_chain( - self._keychain, client_cert, client_key - ) - result = Security.SSLSetCertificate( - self.context, self._client_cert_chain - ) - _assert_no_error(result) - - while True: - with self._raise_on_error(): - result = Security.SSLHandshake(self.context) - - if result == SecurityConst.errSSLWouldBlock: - raise socket.timeout("handshake timed out") - elif result == SecurityConst.errSSLServerAuthCompleted: - self._custom_validate(verify, trust_bundle) - continue - else: - _assert_no_error(result) - break - - def fileno(self): - return self.socket.fileno() - - # Copy-pasted from Python 3.5 source code - def _decref_socketios(self): - if self._makefile_refs > 0: - self._makefile_refs -= 1 - if self._closed: - self.close() - - def recv(self, bufsiz): - buffer = ctypes.create_string_buffer(bufsiz) - bytes_read = self.recv_into(buffer, bufsiz) - data = buffer[:bytes_read] - return data - - def recv_into(self, buffer, nbytes=None): - # Read short on EOF. - if self._closed: - return 0 - - if nbytes is None: - nbytes = len(buffer) - - buffer = (ctypes.c_char * nbytes).from_buffer(buffer) - processed_bytes = ctypes.c_size_t(0) - - with self._raise_on_error(): - result = Security.SSLRead( - self.context, buffer, nbytes, ctypes.byref(processed_bytes) - ) - - # There are some result codes that we want to treat as "not always - # errors". Specifically, those are errSSLWouldBlock, - # errSSLClosedGraceful, and errSSLClosedNoNotify. - if (result == SecurityConst.errSSLWouldBlock): - # If we didn't process any bytes, then this was just a time out. - # However, we can get errSSLWouldBlock in situations when we *did* - # read some data, and in those cases we should just read "short" - # and return. - if processed_bytes.value == 0: - # Timed out, no data read. - raise socket.timeout("recv timed out") - elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify): - # The remote peer has closed this connection. We should do so as - # well. Note that we don't actually return here because in - # principle this could actually be fired along with return data. - # It's unlikely though. - self.close() - else: - _assert_no_error(result) - - # Ok, we read and probably succeeded. We should return whatever data - # was actually read. - return processed_bytes.value - - def settimeout(self, timeout): - self._timeout = timeout - - def gettimeout(self): - return self._timeout - - def send(self, data): - processed_bytes = ctypes.c_size_t(0) - - with self._raise_on_error(): - result = Security.SSLWrite( - self.context, data, len(data), ctypes.byref(processed_bytes) - ) - - if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0: - # Timed out - raise socket.timeout("send timed out") - else: - _assert_no_error(result) - - # We sent, and probably succeeded. Tell them how much we sent. - return processed_bytes.value - - def sendall(self, data): - total_sent = 0 - while total_sent < len(data): - sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) - total_sent += sent - - def shutdown(self): - with self._raise_on_error(): - Security.SSLClose(self.context) - - def close(self): - # TODO: should I do clean shutdown here? Do I have to? - if self._makefile_refs < 1: - self._closed = True - if self.context: - CoreFoundation.CFRelease(self.context) - self.context = None - if self._client_cert_chain: - CoreFoundation.CFRelease(self._client_cert_chain) - self._client_cert_chain = None - if self._keychain: - Security.SecKeychainDelete(self._keychain) - CoreFoundation.CFRelease(self._keychain) - shutil.rmtree(self._keychain_dir) - self._keychain = self._keychain_dir = None - return self.socket.close() - else: - self._makefile_refs -= 1 - - def getpeercert(self, binary_form=False): - # Urgh, annoying. - # - # Here's how we do this: - # - # 1. Call SSLCopyPeerTrust to get hold of the trust object for this - # connection. - # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf. - # 3. To get the CN, call SecCertificateCopyCommonName and process that - # string so that it's of the appropriate type. - # 4. To get the SAN, we need to do something a bit more complex: - # a. Call SecCertificateCopyValues to get the data, requesting - # kSecOIDSubjectAltName. - # b. Mess about with this dictionary to try to get the SANs out. - # - # This is gross. Really gross. It's going to be a few hundred LoC extra - # just to repeat something that SecureTransport can *already do*. So my - # operating assumption at this time is that what we want to do is - # instead to just flag to urllib3 that it shouldn't do its own hostname - # validation when using SecureTransport. - if not binary_form: - raise ValueError( - "SecureTransport only supports dumping binary certs" - ) - trust = Security.SecTrustRef() - certdata = None - der_bytes = None - - try: - # Grab the trust store. - result = Security.SSLCopyPeerTrust( - self.context, ctypes.byref(trust) - ) - _assert_no_error(result) - if not trust: - # Probably we haven't done the handshake yet. No biggie. - return None - - cert_count = Security.SecTrustGetCertificateCount(trust) - if not cert_count: - # Also a case that might happen if we haven't handshaked. - # Handshook? Handshaken? - return None - - leaf = Security.SecTrustGetCertificateAtIndex(trust, 0) - assert leaf - - # Ok, now we want the DER bytes. - certdata = Security.SecCertificateCopyData(leaf) - assert certdata - - data_length = CoreFoundation.CFDataGetLength(certdata) - data_buffer = CoreFoundation.CFDataGetBytePtr(certdata) - der_bytes = ctypes.string_at(data_buffer, data_length) - finally: - if certdata: - CoreFoundation.CFRelease(certdata) - if trust: - CoreFoundation.CFRelease(trust) - - return der_bytes - - def _reuse(self): - self._makefile_refs += 1 - - def _drop(self): - if self._makefile_refs < 1: - self.close() - else: - self._makefile_refs -= 1 - - -if _fileobject: # Platform-specific: Python 2 - def makefile(self, mode, bufsize=-1): - self._makefile_refs += 1 - return _fileobject(self, mode, bufsize, close=True) -else: # Platform-specific: Python 3 - def makefile(self, mode="r", buffering=None, *args, **kwargs): - # We disable buffering with SecureTransport because it conflicts with - # the buffering that ST does internally (see issue #1153 for more). - buffering = 0 - return backport_makefile(self, mode, buffering, *args, **kwargs) - -WrappedSocket.makefile = makefile - - -class SecureTransportContext(object): - """ - I am a wrapper class for the SecureTransport library, to translate the - interface of the standard library ``SSLContext`` object to calls into - SecureTransport. - """ - def __init__(self, protocol): - self._min_version, self._max_version = _protocol_to_min_max[protocol] - self._options = 0 - self._verify = False - self._trust_bundle = None - self._client_cert = None - self._client_key = None - self._client_key_passphrase = None - - @property - def check_hostname(self): - """ - SecureTransport cannot have its hostname checking disabled. For more, - see the comment on getpeercert() in this file. - """ - return True - - @check_hostname.setter - def check_hostname(self, value): - """ - SecureTransport cannot have its hostname checking disabled. For more, - see the comment on getpeercert() in this file. - """ - pass - - @property - def options(self): - # TODO: Well, crap. - # - # So this is the bit of the code that is the most likely to cause us - # trouble. Essentially we need to enumerate all of the SSL options that - # users might want to use and try to see if we can sensibly translate - # them, or whether we should just ignore them. - return self._options - - @options.setter - def options(self, value): - # TODO: Update in line with above. - self._options = value - - @property - def verify_mode(self): - return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE - - @verify_mode.setter - def verify_mode(self, value): - self._verify = True if value == ssl.CERT_REQUIRED else False - - def set_default_verify_paths(self): - # So, this has to do something a bit weird. Specifically, what it does - # is nothing. - # - # This means that, if we had previously had load_verify_locations - # called, this does not undo that. We need to do that because it turns - # out that the rest of the urllib3 code will attempt to load the - # default verify paths if it hasn't been told about any paths, even if - # the context itself was sometime earlier. We resolve that by just - # ignoring it. - pass - - def load_default_certs(self): - return self.set_default_verify_paths() - - def set_ciphers(self, ciphers): - # For now, we just require the default cipher string. - if ciphers != util.ssl_.DEFAULT_CIPHERS: - raise ValueError( - "SecureTransport doesn't support custom cipher strings" - ) - - def load_verify_locations(self, cafile=None, capath=None, cadata=None): - # OK, we only really support cadata and cafile. - if capath is not None: - raise ValueError( - "SecureTransport does not support cert directories" - ) - - self._trust_bundle = cafile or cadata - - def load_cert_chain(self, certfile, keyfile=None, password=None): - self._client_cert = certfile - self._client_key = keyfile - self._client_cert_passphrase = password - - def wrap_socket(self, sock, server_side=False, - do_handshake_on_connect=True, suppress_ragged_eofs=True, - server_hostname=None): - # So, what do we do here? Firstly, we assert some properties. This is a - # stripped down shim, so there is some functionality we don't support. - # See PEP 543 for the real deal. - assert not server_side - assert do_handshake_on_connect - assert suppress_ragged_eofs - - # Ok, we're good to go. Now we want to create the wrapped socket object - # and store it in the appropriate place. - wrapped_socket = WrappedSocket(sock) - - # Now we can handshake - wrapped_socket.handshake( - server_hostname, self._verify, self._trust_bundle, - self._min_version, self._max_version, self._client_cert, - self._client_key, self._client_key_passphrase - ) - return wrapped_socket diff --git a/pype/vendor/requests/packages/urllib3/contrib/socks.py b/pype/vendor/requests/packages/urllib3/contrib/socks.py deleted file mode 100644 index 39e92fde19..0000000000 --- a/pype/vendor/requests/packages/urllib3/contrib/socks.py +++ /dev/null @@ -1,188 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module contains provisional support for SOCKS proxies from within -urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and -SOCKS5. To enable its functionality, either install PySocks or install this -module with the ``socks`` extra. - -The SOCKS implementation supports the full range of urllib3 features. It also -supports the following SOCKS features: - -- SOCKS4 -- SOCKS4a -- SOCKS5 -- Usernames and passwords for the SOCKS proxy - -Known Limitations: - -- Currently PySocks does not support contacting remote websites via literal - IPv6 addresses. Any such connection attempt will fail. You must use a domain - name. -- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any - such connection attempt will fail. -""" -from __future__ import absolute_import - -try: - import socks -except ImportError: - import warnings - from ..exceptions import DependencyWarning - - warnings.warn(( - 'SOCKS support in urllib3 requires the installation of optional ' - 'dependencies: specifically, PySocks. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies' - ), - DependencyWarning - ) - raise - -from socket import error as SocketError, timeout as SocketTimeout - -from ..connection import ( - HTTPConnection, HTTPSConnection -) -from ..connectionpool import ( - HTTPConnectionPool, HTTPSConnectionPool -) -from ..exceptions import ConnectTimeoutError, NewConnectionError -from ..poolmanager import PoolManager -from ..util.url import parse_url - -try: - import ssl -except ImportError: - ssl = None - - -class SOCKSConnection(HTTPConnection): - """ - A plain-text HTTP connection that connects via a SOCKS proxy. - """ - def __init__(self, *args, **kwargs): - self._socks_options = kwargs.pop('_socks_options') - super(SOCKSConnection, self).__init__(*args, **kwargs) - - def _new_conn(self): - """ - Establish a new connection via the SOCKS proxy. - """ - extra_kw = {} - if self.source_address: - extra_kw['source_address'] = self.source_address - - if self.socket_options: - extra_kw['socket_options'] = self.socket_options - - try: - conn = socks.create_connection( - (self.host, self.port), - proxy_type=self._socks_options['socks_version'], - proxy_addr=self._socks_options['proxy_host'], - proxy_port=self._socks_options['proxy_port'], - proxy_username=self._socks_options['username'], - proxy_password=self._socks_options['password'], - proxy_rdns=self._socks_options['rdns'], - timeout=self.timeout, - **extra_kw - ) - - except SocketTimeout as e: - raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) - - except socks.ProxyError as e: - # This is fragile as hell, but it seems to be the only way to raise - # useful errors here. - if e.socket_err: - error = e.socket_err - if isinstance(error, SocketTimeout): - raise ConnectTimeoutError( - self, - "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout) - ) - else: - raise NewConnectionError( - self, - "Failed to establish a new connection: %s" % error - ) - else: - raise NewConnectionError( - self, - "Failed to establish a new connection: %s" % e - ) - - except SocketError as e: # Defensive: PySocks should catch all these. - raise NewConnectionError( - self, "Failed to establish a new connection: %s" % e) - - return conn - - -# We don't need to duplicate the Verified/Unverified distinction from -# urllib3/connection.py here because the HTTPSConnection will already have been -# correctly set to either the Verified or Unverified form by that module. This -# means the SOCKSHTTPSConnection will automatically be the correct type. -class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): - pass - - -class SOCKSHTTPConnectionPool(HTTPConnectionPool): - ConnectionCls = SOCKSConnection - - -class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): - ConnectionCls = SOCKSHTTPSConnection - - -class SOCKSProxyManager(PoolManager): - """ - A version of the urllib3 ProxyManager that routes connections via the - defined SOCKS proxy. - """ - pool_classes_by_scheme = { - 'http': SOCKSHTTPConnectionPool, - 'https': SOCKSHTTPSConnectionPool, - } - - def __init__(self, proxy_url, username=None, password=None, - num_pools=10, headers=None, **connection_pool_kw): - parsed = parse_url(proxy_url) - - if parsed.scheme == 'socks5': - socks_version = socks.PROXY_TYPE_SOCKS5 - rdns = False - elif parsed.scheme == 'socks5h': - socks_version = socks.PROXY_TYPE_SOCKS5 - rdns = True - elif parsed.scheme == 'socks4': - socks_version = socks.PROXY_TYPE_SOCKS4 - rdns = False - elif parsed.scheme == 'socks4a': - socks_version = socks.PROXY_TYPE_SOCKS4 - rdns = True - else: - raise ValueError( - "Unable to determine SOCKS version from %s" % proxy_url - ) - - self.proxy_url = proxy_url - - socks_options = { - 'socks_version': socks_version, - 'proxy_host': parsed.host, - 'proxy_port': parsed.port, - 'username': username, - 'password': password, - 'rdns': rdns - } - connection_pool_kw['_socks_options'] = socks_options - - super(SOCKSProxyManager, self).__init__( - num_pools, headers, **connection_pool_kw - ) - - self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme diff --git a/pype/vendor/requests/packages/urllib3/exceptions.py b/pype/vendor/requests/packages/urllib3/exceptions.py deleted file mode 100644 index 6c4be58106..0000000000 --- a/pype/vendor/requests/packages/urllib3/exceptions.py +++ /dev/null @@ -1,246 +0,0 @@ -from __future__ import absolute_import -from .packages.six.moves.http_client import ( - IncompleteRead as httplib_IncompleteRead -) -# Base Exceptions - - -class HTTPError(Exception): - "Base exception used by this module." - pass - - -class HTTPWarning(Warning): - "Base warning used by this module." - pass - - -class PoolError(HTTPError): - "Base exception for errors caused within a pool." - def __init__(self, pool, message): - self.pool = pool - HTTPError.__init__(self, "%s: %s" % (pool, message)) - - def __reduce__(self): - # For pickling purposes. - return self.__class__, (None, None) - - -class RequestError(PoolError): - "Base exception for PoolErrors that have associated URLs." - def __init__(self, pool, url, message): - self.url = url - PoolError.__init__(self, pool, message) - - def __reduce__(self): - # For pickling purposes. - return self.__class__, (None, self.url, None) - - -class SSLError(HTTPError): - "Raised when SSL certificate fails in an HTTPS connection." - pass - - -class ProxyError(HTTPError): - "Raised when the connection to a proxy fails." - pass - - -class DecodeError(HTTPError): - "Raised when automatic decoding based on Content-Type fails." - pass - - -class ProtocolError(HTTPError): - "Raised when something unexpected happens mid-request/response." - pass - - -#: Renamed to ProtocolError but aliased for backwards compatibility. -ConnectionError = ProtocolError - - -# Leaf Exceptions - -class MaxRetryError(RequestError): - """Raised when the maximum number of retries is exceeded. - - :param pool: The connection pool - :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` - :param string url: The requested Url - :param exceptions.Exception reason: The underlying error - - """ - - def __init__(self, pool, url, reason=None): - self.reason = reason - - message = "Max retries exceeded with url: %s (Caused by %r)" % ( - url, reason) - - RequestError.__init__(self, pool, url, message) - - -class HostChangedError(RequestError): - "Raised when an existing pool gets a request for a foreign host." - - def __init__(self, pool, url, retries=3): - message = "Tried to open a foreign host with url: %s" % url - RequestError.__init__(self, pool, url, message) - self.retries = retries - - -class TimeoutStateError(HTTPError): - """ Raised when passing an invalid state to a timeout """ - pass - - -class TimeoutError(HTTPError): - """ Raised when a socket timeout error occurs. - - Catching this error will catch both :exc:`ReadTimeoutErrors - ` and :exc:`ConnectTimeoutErrors `. - """ - pass - - -class ReadTimeoutError(TimeoutError, RequestError): - "Raised when a socket timeout occurs while receiving data from a server" - pass - - -# This timeout error does not have a URL attached and needs to inherit from the -# base HTTPError -class ConnectTimeoutError(TimeoutError): - "Raised when a socket timeout occurs while connecting to a server" - pass - - -class NewConnectionError(ConnectTimeoutError, PoolError): - "Raised when we fail to establish a new connection. Usually ECONNREFUSED." - pass - - -class EmptyPoolError(PoolError): - "Raised when a pool runs out of connections and no more are allowed." - pass - - -class ClosedPoolError(PoolError): - "Raised when a request enters a pool after the pool has been closed." - pass - - -class LocationValueError(ValueError, HTTPError): - "Raised when there is something wrong with a given URL input." - pass - - -class LocationParseError(LocationValueError): - "Raised when get_host or similar fails to parse the URL input." - - def __init__(self, location): - message = "Failed to parse: %s" % location - HTTPError.__init__(self, message) - - self.location = location - - -class ResponseError(HTTPError): - "Used as a container for an error reason supplied in a MaxRetryError." - GENERIC_ERROR = 'too many error responses' - SPECIFIC_ERROR = 'too many {status_code} error responses' - - -class SecurityWarning(HTTPWarning): - "Warned when perfoming security reducing actions" - pass - - -class SubjectAltNameWarning(SecurityWarning): - "Warned when connecting to a host with a certificate missing a SAN." - pass - - -class InsecureRequestWarning(SecurityWarning): - "Warned when making an unverified HTTPS request." - pass - - -class SystemTimeWarning(SecurityWarning): - "Warned when system time is suspected to be wrong" - pass - - -class InsecurePlatformWarning(SecurityWarning): - "Warned when certain SSL configuration is not available on a platform." - pass - - -class SNIMissingWarning(HTTPWarning): - "Warned when making a HTTPS request without SNI available." - pass - - -class DependencyWarning(HTTPWarning): - """ - Warned when an attempt is made to import a module with missing optional - dependencies. - """ - pass - - -class ResponseNotChunked(ProtocolError, ValueError): - "Response needs to be chunked in order to read it as chunks." - pass - - -class BodyNotHttplibCompatible(HTTPError): - """ - Body should be httplib.HTTPResponse like (have an fp attribute which - returns raw chunks) for read_chunked(). - """ - pass - - -class IncompleteRead(HTTPError, httplib_IncompleteRead): - """ - Response length doesn't match expected Content-Length - - Subclass of http_client.IncompleteRead to allow int value - for `partial` to avoid creating large objects on streamed - reads. - """ - def __init__(self, partial, expected): - super(IncompleteRead, self).__init__(partial, expected) - - def __repr__(self): - return ('IncompleteRead(%i bytes read, ' - '%i more expected)' % (self.partial, self.expected)) - - -class InvalidHeader(HTTPError): - "The header provided was somehow invalid." - pass - - -class ProxySchemeUnknown(AssertionError, ValueError): - "ProxyManager does not support the supplied scheme" - # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. - - def __init__(self, scheme): - message = "Not supported proxy scheme %s" % scheme - super(ProxySchemeUnknown, self).__init__(message) - - -class HeaderParsingError(HTTPError): - "Raised by assert_header_parsing, but we convert it to a log.warning statement." - def __init__(self, defects, unparsed_data): - message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data) - super(HeaderParsingError, self).__init__(message) - - -class UnrewindableBodyError(HTTPError): - "urllib3 encountered an error when trying to rewind a body" - pass diff --git a/pype/vendor/requests/packages/urllib3/fields.py b/pype/vendor/requests/packages/urllib3/fields.py deleted file mode 100644 index 19b0ae0c88..0000000000 --- a/pype/vendor/requests/packages/urllib3/fields.py +++ /dev/null @@ -1,178 +0,0 @@ -from __future__ import absolute_import -import email.utils -import mimetypes - -from .packages import six - - -def guess_content_type(filename, default='application/octet-stream'): - """ - Guess the "Content-Type" of a file. - - :param filename: - The filename to guess the "Content-Type" of using :mod:`mimetypes`. - :param default: - If no "Content-Type" can be guessed, default to `default`. - """ - if filename: - return mimetypes.guess_type(filename)[0] or default - return default - - -def format_header_param(name, value): - """ - Helper function to format and quote a single header parameter. - - Particularly useful for header parameters which might contain - non-ASCII values, like file names. This follows RFC 2231, as - suggested by RFC 2388 Section 4.4. - - :param name: - The name of the parameter, a string expected to be ASCII only. - :param value: - The value of the parameter, provided as a unicode string. - """ - if not any(ch in value for ch in '"\\\r\n'): - result = '%s="%s"' % (name, value) - try: - result.encode('ascii') - except (UnicodeEncodeError, UnicodeDecodeError): - pass - else: - return result - if not six.PY3 and isinstance(value, six.text_type): # Python 2: - value = value.encode('utf-8') - value = email.utils.encode_rfc2231(value, 'utf-8') - value = '%s*=%s' % (name, value) - return value - - -class RequestField(object): - """ - A data container for request body parameters. - - :param name: - The name of this request field. - :param data: - The data/value body. - :param filename: - An optional filename of the request field. - :param headers: - An optional dict-like object of headers to initially use for the field. - """ - def __init__(self, name, data, filename=None, headers=None): - self._name = name - self._filename = filename - self.data = data - self.headers = {} - if headers: - self.headers = dict(headers) - - @classmethod - def from_tuples(cls, fieldname, value): - """ - A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. - - Supports constructing :class:`~urllib3.fields.RequestField` from - parameter of key/value strings AND key/filetuple. A filetuple is a - (filename, data, MIME type) tuple where the MIME type is optional. - For example:: - - 'foo': 'bar', - 'fakefile': ('foofile.txt', 'contents of foofile'), - 'realfile': ('barfile.txt', open('realfile').read()), - 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), - 'nonamefile': 'contents of nonamefile field', - - Field names and filenames must be unicode. - """ - if isinstance(value, tuple): - if len(value) == 3: - filename, data, content_type = value - else: - filename, data = value - content_type = guess_content_type(filename) - else: - filename = None - content_type = None - data = value - - request_param = cls(fieldname, data, filename=filename) - request_param.make_multipart(content_type=content_type) - - return request_param - - def _render_part(self, name, value): - """ - Overridable helper function to format a single header parameter. - - :param name: - The name of the parameter, a string expected to be ASCII only. - :param value: - The value of the parameter, provided as a unicode string. - """ - return format_header_param(name, value) - - def _render_parts(self, header_parts): - """ - Helper function to format and quote a single header. - - Useful for single headers that are composed of multiple items. E.g., - 'Content-Disposition' fields. - - :param header_parts: - A sequence of (k, v) typles or a :class:`dict` of (k, v) to format - as `k1="v1"; k2="v2"; ...`. - """ - parts = [] - iterable = header_parts - if isinstance(header_parts, dict): - iterable = header_parts.items() - - for name, value in iterable: - if value is not None: - parts.append(self._render_part(name, value)) - - return '; '.join(parts) - - def render_headers(self): - """ - Renders the headers for this request field. - """ - lines = [] - - sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] - for sort_key in sort_keys: - if self.headers.get(sort_key, False): - lines.append('%s: %s' % (sort_key, self.headers[sort_key])) - - for header_name, header_value in self.headers.items(): - if header_name not in sort_keys: - if header_value: - lines.append('%s: %s' % (header_name, header_value)) - - lines.append('\r\n') - return '\r\n'.join(lines) - - def make_multipart(self, content_disposition=None, content_type=None, - content_location=None): - """ - Makes this request field into a multipart request field. - - This method overrides "Content-Disposition", "Content-Type" and - "Content-Location" headers to the request parameter. - - :param content_type: - The 'Content-Type' of the request body. - :param content_location: - The 'Content-Location' of the request body. - - """ - self.headers['Content-Disposition'] = content_disposition or 'form-data' - self.headers['Content-Disposition'] += '; '.join([ - '', self._render_parts( - (('name', self._name), ('filename', self._filename)) - ) - ]) - self.headers['Content-Type'] = content_type - self.headers['Content-Location'] = content_location diff --git a/pype/vendor/requests/packages/urllib3/filepost.py b/pype/vendor/requests/packages/urllib3/filepost.py deleted file mode 100644 index cd11cee464..0000000000 --- a/pype/vendor/requests/packages/urllib3/filepost.py +++ /dev/null @@ -1,94 +0,0 @@ -from __future__ import absolute_import -import codecs - -from uuid import uuid4 -from io import BytesIO - -from .packages import six -from .packages.six import b -from .fields import RequestField - -writer = codecs.lookup('utf-8')[3] - - -def choose_boundary(): - """ - Our embarrassingly-simple replacement for mimetools.choose_boundary. - """ - return uuid4().hex - - -def iter_field_objects(fields): - """ - Iterate over fields. - - Supports list of (k, v) tuples and dicts, and lists of - :class:`~urllib3.fields.RequestField`. - - """ - if isinstance(fields, dict): - i = six.iteritems(fields) - else: - i = iter(fields) - - for field in i: - if isinstance(field, RequestField): - yield field - else: - yield RequestField.from_tuples(*field) - - -def iter_fields(fields): - """ - .. deprecated:: 1.6 - - Iterate over fields. - - The addition of :class:`~urllib3.fields.RequestField` makes this function - obsolete. Instead, use :func:`iter_field_objects`, which returns - :class:`~urllib3.fields.RequestField` objects. - - Supports list of (k, v) tuples and dicts. - """ - if isinstance(fields, dict): - return ((k, v) for k, v in six.iteritems(fields)) - - return ((k, v) for k, v in fields) - - -def encode_multipart_formdata(fields, boundary=None): - """ - Encode a dictionary of ``fields`` using the multipart/form-data MIME format. - - :param fields: - Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). - - :param boundary: - If not specified, then a random boundary will be generated using - :func:`mimetools.choose_boundary`. - """ - body = BytesIO() - if boundary is None: - boundary = choose_boundary() - - for field in iter_field_objects(fields): - body.write(b('--%s\r\n' % (boundary))) - - writer(body).write(field.render_headers()) - data = field.data - - if isinstance(data, int): - data = str(data) # Backwards compatibility - - if isinstance(data, six.text_type): - writer(body).write(data) - else: - body.write(data) - - body.write(b'\r\n') - - body.write(b('--%s--\r\n' % (boundary))) - - content_type = str('multipart/form-data; boundary=%s' % boundary) - - return body.getvalue(), content_type diff --git a/pype/vendor/requests/packages/urllib3/packages/__init__.py b/pype/vendor/requests/packages/urllib3/packages/__init__.py deleted file mode 100644 index 170e974c15..0000000000 --- a/pype/vendor/requests/packages/urllib3/packages/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from __future__ import absolute_import - -from . import ssl_match_hostname - -__all__ = ('ssl_match_hostname', ) diff --git a/pype/vendor/requests/packages/urllib3/packages/backports/__init__.py b/pype/vendor/requests/packages/urllib3/packages/backports/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/requests/packages/urllib3/packages/backports/makefile.py b/pype/vendor/requests/packages/urllib3/packages/backports/makefile.py deleted file mode 100644 index 75b80dcf84..0000000000 --- a/pype/vendor/requests/packages/urllib3/packages/backports/makefile.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -""" -backports.makefile -~~~~~~~~~~~~~~~~~~ - -Backports the Python 3 ``socket.makefile`` method for use with anything that -wants to create a "fake" socket object. -""" -import io - -from socket import SocketIO - - -def backport_makefile(self, mode="r", buffering=None, encoding=None, - errors=None, newline=None): - """ - Backport of ``socket.makefile`` from Python 3.5. - """ - if not set(mode) <= set(["r", "w", "b"]): - raise ValueError( - "invalid mode %r (only r, w, b allowed)" % (mode,) - ) - writing = "w" in mode - reading = "r" in mode or not writing - assert reading or writing - binary = "b" in mode - rawmode = "" - if reading: - rawmode += "r" - if writing: - rawmode += "w" - raw = SocketIO(self, rawmode) - self._makefile_refs += 1 - if buffering is None: - buffering = -1 - if buffering < 0: - buffering = io.DEFAULT_BUFFER_SIZE - if buffering == 0: - if not binary: - raise ValueError("unbuffered streams must be binary") - return raw - if reading and writing: - buffer = io.BufferedRWPair(raw, raw, buffering) - elif reading: - buffer = io.BufferedReader(raw, buffering) - else: - assert writing - buffer = io.BufferedWriter(raw, buffering) - if binary: - return buffer - text = io.TextIOWrapper(buffer, encoding, errors, newline) - text.mode = mode - return text diff --git a/pype/vendor/requests/packages/urllib3/packages/ordered_dict.py b/pype/vendor/requests/packages/urllib3/packages/ordered_dict.py deleted file mode 100644 index 4479363cc4..0000000000 --- a/pype/vendor/requests/packages/urllib3/packages/ordered_dict.py +++ /dev/null @@ -1,259 +0,0 @@ -# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. -# Passes Python2.7's test suite and incorporates all the latest updates. -# Copyright 2009 Raymond Hettinger, released under the MIT License. -# http://code.activestate.com/recipes/576693/ -try: - from thread import get_ident as _get_ident -except ImportError: - from dummy_thread import get_ident as _get_ident - -try: - from _abcoll import KeysView, ValuesView, ItemsView -except ImportError: - pass - - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as for regular dictionaries. - - # The internal self.__map dictionary maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # Each link is stored as a list of length three: [PREV, NEXT, KEY]. - - def __init__(self, *args, **kwds): - '''Initialize an ordered dictionary. Signature is the same as for - regular dictionaries, but keyword arguments are not recommended - because their insertion order is arbitrary. - - ''' - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__root = root = [] # sentinel node - root[:] = [root, root, None] - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, dict_setitem=dict.__setitem__): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link which goes at the end of the linked - # list, and the inherited dictionary is updated with the new key/value pair. - if key not in self: - root = self.__root - last = root[0] - last[1] = root[0] = self.__map[key] = [last, root, key] - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which is - # then removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link_prev, link_next, key = self.__map.pop(key) - link_prev[1] = link_next - link_next[0] = link_prev - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - root = self.__root - curr = root[1] - while curr is not root: - yield curr[2] - curr = curr[1] - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - root = self.__root - curr = root[0] - while curr is not root: - yield curr[2] - curr = curr[0] - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - try: - for node in self.__map.itervalues(): - del node[:] - root = self.__root - root[:] = [root, root, None] - self.__map.clear() - except AttributeError: - pass - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root[0] - link_prev = link[0] - link_prev[1] = root - root[0] = link_prev - else: - link = root[1] - link_next = link[1] - root[1] = link_next - link_next[0] = root - key = link[2] - del self.__map[key] - value = dict.pop(self, key) - return key, value - - # -- the following methods do not depend on the internal structure -- - - def keys(self): - 'od.keys() -> list of keys in od' - return list(self) - - def values(self): - 'od.values() -> list of values in od' - return [self[key] for key in self] - - def items(self): - 'od.items() -> list of (key, value) pairs in od' - return [(key, self[key]) for key in self] - - def iterkeys(self): - 'od.iterkeys() -> an iterator over the keys in od' - return iter(self) - - def itervalues(self): - 'od.itervalues -> an iterator over the values in od' - for k in self: - yield self[k] - - def iteritems(self): - 'od.iteritems -> an iterator over the (key, value) items in od' - for k in self: - yield (k, self[k]) - - def update(*args, **kwds): - '''od.update(E, **F) -> None. Update od from dict/iterable E and F. - - If E is a dict instance, does: for k in E: od[k] = E[k] - If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] - Or if E is an iterable of items, does: for k, v in E: od[k] = v - In either case, this is followed by: for k, v in F.items(): od[k] = v - - ''' - if len(args) > 2: - raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (len(args),)) - elif not args: - raise TypeError('update() takes at least 1 argument (0 given)') - self = args[0] - # Make progressively weaker assumptions about "other" - other = () - if len(args) == 2: - other = args[1] - if isinstance(other, dict): - for key in other: - self[key] = other[key] - elif hasattr(other, 'keys'): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - __update = update # let subclasses override update without breaking __init__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - def __repr__(self, _repr_running={}): - 'od.__repr__() <==> repr(od)' - call_key = id(self), _get_ident() - if call_key in _repr_running: - return '...' - _repr_running[call_key] = 1 - try: - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - finally: - del _repr_running[call_key] - - def __reduce__(self): - 'Return state information for pickling' - items = [[k, self[k]] for k in self] - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S - and values equal to v (which defaults to None). - - ''' - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other - - # -- the following methods are only used in Python 2.7 -- - - def viewkeys(self): - "od.viewkeys() -> a set-like object providing a view on od's keys" - return KeysView(self) - - def viewvalues(self): - "od.viewvalues() -> an object providing a view on od's values" - return ValuesView(self) - - def viewitems(self): - "od.viewitems() -> a set-like object providing a view on od's items" - return ItemsView(self) diff --git a/pype/vendor/requests/packages/urllib3/packages/six.py b/pype/vendor/requests/packages/urllib3/packages/six.py deleted file mode 100644 index 190c0239cd..0000000000 --- a/pype/vendor/requests/packages/urllib3/packages/six.py +++ /dev/null @@ -1,868 +0,0 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2015 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.10.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb -""") - - -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - if from_value is None: - raise value - raise value from from_value -""") -elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - raise value from from_value -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def python_2_unicode_compatible(klass): - """ - A decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/pype/vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py b/pype/vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py deleted file mode 100644 index d6594eb264..0000000000 --- a/pype/vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -import sys - -try: - # Our match_hostname function is the same as 3.5's, so we only want to - # import the match_hostname function if it's at least that good. - if sys.version_info < (3, 5): - raise ImportError("Fallback to vendored code") - - from ssl import CertificateError, match_hostname -except ImportError: - try: - # Backport of the function from a pypi module - from backports.ssl_match_hostname import CertificateError, match_hostname - except ImportError: - # Our vendored copy - from ._implementation import CertificateError, match_hostname - -# Not needed, but documenting what we provide. -__all__ = ('CertificateError', 'match_hostname') diff --git a/pype/vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py b/pype/vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py deleted file mode 100644 index 1fd42f38ae..0000000000 --- a/pype/vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py +++ /dev/null @@ -1,157 +0,0 @@ -"""The match_hostname() function from Python 3.3.3, essential when using SSL.""" - -# Note: This file is under the PSF license as the code comes from the python -# stdlib. http://docs.python.org/3/license.html - -import re -import sys - -# ipaddress has been backported to 2.6+ in pypi. If it is installed on the -# system, use it to handle IPAddress ServerAltnames (this was added in -# python-3.5) otherwise only do DNS matching. This allows -# backports.ssl_match_hostname to continue to be used all the way back to -# python-2.4. -try: - import ipaddress -except ImportError: - ipaddress = None - -__version__ = '3.5.0.1' - - -class CertificateError(ValueError): - pass - - -def _dnsname_match(dn, hostname, max_wildcards=1): - """Matching according to RFC 6125, section 6.4.3 - - http://tools.ietf.org/html/rfc6125#section-6.4.3 - """ - pats = [] - if not dn: - return False - - # Ported from python3-syntax: - # leftmost, *remainder = dn.split(r'.') - parts = dn.split(r'.') - leftmost = parts[0] - remainder = parts[1:] - - wildcards = leftmost.count('*') - if wildcards > max_wildcards: - # Issue #17980: avoid denials of service by refusing more - # than one wildcard per fragment. A survey of established - # policy among SSL implementations showed it to be a - # reasonable choice. - raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) - - # speed up common case w/o wildcards - if not wildcards: - return dn.lower() == hostname.lower() - - # RFC 6125, section 6.4.3, subitem 1. - # The client SHOULD NOT attempt to match a presented identifier in which - # the wildcard character comprises a label other than the left-most label. - if leftmost == '*': - # When '*' is a fragment by itself, it matches a non-empty dotless - # fragment. - pats.append('[^.]+') - elif leftmost.startswith('xn--') or hostname.startswith('xn--'): - # RFC 6125, section 6.4.3, subitem 3. - # The client SHOULD NOT attempt to match a presented identifier - # where the wildcard character is embedded within an A-label or - # U-label of an internationalized domain name. - pats.append(re.escape(leftmost)) - else: - # Otherwise, '*' matches any dotless string, e.g. www* - pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) - - # add the remaining fragments, ignore any wildcards - for frag in remainder: - pats.append(re.escape(frag)) - - pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) - return pat.match(hostname) - - -def _to_unicode(obj): - if isinstance(obj, str) and sys.version_info < (3,): - obj = unicode(obj, encoding='ascii', errors='strict') - return obj - -def _ipaddress_match(ipname, host_ip): - """Exact matching of IP addresses. - - RFC 6125 explicitly doesn't define an algorithm for this - (section 1.7.2 - "Out of Scope"). - """ - # OpenSSL may add a trailing newline to a subjectAltName's IP address - # Divergence from upstream: ipaddress can't handle byte str - ip = ipaddress.ip_address(_to_unicode(ipname).rstrip()) - return ip == host_ip - - -def match_hostname(cert, hostname): - """Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 - rules are followed, but IP addresses are not accepted for *hostname*. - - CertificateError is raised on failure. On success, the function - returns nothing. - """ - if not cert: - raise ValueError("empty or no certificate, match_hostname needs a " - "SSL socket or SSL context with either " - "CERT_OPTIONAL or CERT_REQUIRED") - try: - # Divergence from upstream: ipaddress can't handle byte str - host_ip = ipaddress.ip_address(_to_unicode(hostname)) - except ValueError: - # Not an IP address (common case) - host_ip = None - except UnicodeError: - # Divergence from upstream: Have to deal with ipaddress not taking - # byte strings. addresses should be all ascii, so we consider it not - # an ipaddress in this case - host_ip = None - except AttributeError: - # Divergence from upstream: Make ipaddress library optional - if ipaddress is None: - host_ip = None - else: - raise - dnsnames = [] - san = cert.get('subjectAltName', ()) - for key, value in san: - if key == 'DNS': - if host_ip is None and _dnsname_match(value, hostname): - return - dnsnames.append(value) - elif key == 'IP Address': - if host_ip is not None and _ipaddress_match(value, host_ip): - return - dnsnames.append(value) - if not dnsnames: - # The subject is only checked when there is no dNSName entry - # in subjectAltName - for sub in cert.get('subject', ()): - for key, value in sub: - # XXX according to RFC 2818, the most specific Common Name - # must be used. - if key == 'commonName': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if len(dnsnames) > 1: - raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) - elif len(dnsnames) == 1: - raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) - else: - raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") diff --git a/pype/vendor/requests/packages/urllib3/poolmanager.py b/pype/vendor/requests/packages/urllib3/poolmanager.py deleted file mode 100644 index 4ae91744db..0000000000 --- a/pype/vendor/requests/packages/urllib3/poolmanager.py +++ /dev/null @@ -1,440 +0,0 @@ -from __future__ import absolute_import -import collections -import functools -import logging - -from ._collections import RecentlyUsedContainer -from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool -from .connectionpool import port_by_scheme -from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown -from .packages.six.moves.urllib.parse import urljoin -from .request import RequestMethods -from .util.url import parse_url -from .util.retry import Retry - - -__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] - - -log = logging.getLogger(__name__) - -SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', - 'ssl_version', 'ca_cert_dir', 'ssl_context') - -# All known keyword arguments that could be provided to the pool manager, its -# pools, or the underlying connections. This is used to construct a pool key. -_key_fields = ( - 'key_scheme', # str - 'key_host', # str - 'key_port', # int - 'key_timeout', # int or float or Timeout - 'key_retries', # int or Retry - 'key_strict', # bool - 'key_block', # bool - 'key_source_address', # str - 'key_key_file', # str - 'key_cert_file', # str - 'key_cert_reqs', # str - 'key_ca_certs', # str - 'key_ssl_version', # str - 'key_ca_cert_dir', # str - 'key_ssl_context', # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext - 'key_maxsize', # int - 'key_headers', # dict - 'key__proxy', # parsed proxy url - 'key__proxy_headers', # dict - 'key_socket_options', # list of (level (int), optname (int), value (int or str)) tuples - 'key__socks_options', # dict - 'key_assert_hostname', # bool or string - 'key_assert_fingerprint', # str -) - -#: The namedtuple class used to construct keys for the connection pool. -#: All custom key schemes should include the fields in this key at a minimum. -PoolKey = collections.namedtuple('PoolKey', _key_fields) - - -def _default_key_normalizer(key_class, request_context): - """ - Create a pool key out of a request context dictionary. - - According to RFC 3986, both the scheme and host are case-insensitive. - Therefore, this function normalizes both before constructing the pool - key for an HTTPS request. If you wish to change this behaviour, provide - alternate callables to ``key_fn_by_scheme``. - - :param key_class: - The class to use when constructing the key. This should be a namedtuple - with the ``scheme`` and ``host`` keys at a minimum. - :type key_class: namedtuple - :param request_context: - A dictionary-like object that contain the context for a request. - :type request_context: dict - - :return: A namedtuple that can be used as a connection pool key. - :rtype: PoolKey - """ - # Since we mutate the dictionary, make a copy first - context = request_context.copy() - context['scheme'] = context['scheme'].lower() - context['host'] = context['host'].lower() - - # These are both dictionaries and need to be transformed into frozensets - for key in ('headers', '_proxy_headers', '_socks_options'): - if key in context and context[key] is not None: - context[key] = frozenset(context[key].items()) - - # The socket_options key may be a list and needs to be transformed into a - # tuple. - socket_opts = context.get('socket_options') - if socket_opts is not None: - context['socket_options'] = tuple(socket_opts) - - # Map the kwargs to the names in the namedtuple - this is necessary since - # namedtuples can't have fields starting with '_'. - for key in list(context.keys()): - context['key_' + key] = context.pop(key) - - # Default to ``None`` for keys missing from the context - for field in key_class._fields: - if field not in context: - context[field] = None - - return key_class(**context) - - -#: A dictionary that maps a scheme to a callable that creates a pool key. -#: This can be used to alter the way pool keys are constructed, if desired. -#: Each PoolManager makes a copy of this dictionary so they can be configured -#: globally here, or individually on the instance. -key_fn_by_scheme = { - 'http': functools.partial(_default_key_normalizer, PoolKey), - 'https': functools.partial(_default_key_normalizer, PoolKey), -} - -pool_classes_by_scheme = { - 'http': HTTPConnectionPool, - 'https': HTTPSConnectionPool, -} - - -class PoolManager(RequestMethods): - """ - Allows for arbitrary requests while transparently keeping track of - necessary connection pools for you. - - :param num_pools: - Number of connection pools to cache before discarding the least - recently used pool. - - :param headers: - Headers to include with all requests, unless other headers are given - explicitly. - - :param \\**connection_pool_kw: - Additional parameters are used to create fresh - :class:`urllib3.connectionpool.ConnectionPool` instances. - - Example:: - - >>> manager = PoolManager(num_pools=2) - >>> r = manager.request('GET', 'http://google.com/') - >>> r = manager.request('GET', 'http://google.com/mail') - >>> r = manager.request('GET', 'http://yahoo.com/') - >>> len(manager.pools) - 2 - - """ - - proxy = None - - def __init__(self, num_pools=10, headers=None, **connection_pool_kw): - RequestMethods.__init__(self, headers) - self.connection_pool_kw = connection_pool_kw - self.pools = RecentlyUsedContainer(num_pools, - dispose_func=lambda p: p.close()) - - # Locally set the pool classes and keys so other PoolManagers can - # override them. - self.pool_classes_by_scheme = pool_classes_by_scheme - self.key_fn_by_scheme = key_fn_by_scheme.copy() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.clear() - # Return False to re-raise any potential exceptions - return False - - def _new_pool(self, scheme, host, port, request_context=None): - """ - Create a new :class:`ConnectionPool` based on host, port, scheme, and - any additional pool keyword arguments. - - If ``request_context`` is provided, it is provided as keyword arguments - to the pool class used. This method is used to actually create the - connection pools handed out by :meth:`connection_from_url` and - companion methods. It is intended to be overridden for customization. - """ - pool_cls = self.pool_classes_by_scheme[scheme] - if request_context is None: - request_context = self.connection_pool_kw.copy() - - # Although the context has everything necessary to create the pool, - # this function has historically only used the scheme, host, and port - # in the positional args. When an API change is acceptable these can - # be removed. - for key in ('scheme', 'host', 'port'): - request_context.pop(key, None) - - if scheme == 'http': - for kw in SSL_KEYWORDS: - request_context.pop(kw, None) - - return pool_cls(host, port, **request_context) - - def clear(self): - """ - Empty our store of pools and direct them all to close. - - This will not affect in-flight connections, but they will not be - re-used after completion. - """ - self.pools.clear() - - def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): - """ - Get a :class:`ConnectionPool` based on the host, port, and scheme. - - If ``port`` isn't given, it will be derived from the ``scheme`` using - ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is - provided, it is merged with the instance's ``connection_pool_kw`` - variable and used to create the new connection pool, if one is - needed. - """ - - if not host: - raise LocationValueError("No host specified.") - - request_context = self._merge_pool_kwargs(pool_kwargs) - request_context['scheme'] = scheme or 'http' - if not port: - port = port_by_scheme.get(request_context['scheme'].lower(), 80) - request_context['port'] = port - request_context['host'] = host - - return self.connection_from_context(request_context) - - def connection_from_context(self, request_context): - """ - Get a :class:`ConnectionPool` based on the request context. - - ``request_context`` must at least contain the ``scheme`` key and its - value must be a key in ``key_fn_by_scheme`` instance variable. - """ - scheme = request_context['scheme'].lower() - pool_key_constructor = self.key_fn_by_scheme[scheme] - pool_key = pool_key_constructor(request_context) - - return self.connection_from_pool_key(pool_key, request_context=request_context) - - def connection_from_pool_key(self, pool_key, request_context=None): - """ - Get a :class:`ConnectionPool` based on the provided pool key. - - ``pool_key`` should be a namedtuple that only contains immutable - objects. At a minimum it must have the ``scheme``, ``host``, and - ``port`` fields. - """ - with self.pools.lock: - # If the scheme, host, or port doesn't match existing open - # connections, open a new ConnectionPool. - pool = self.pools.get(pool_key) - if pool: - return pool - - # Make a fresh ConnectionPool of the desired type - scheme = request_context['scheme'] - host = request_context['host'] - port = request_context['port'] - pool = self._new_pool(scheme, host, port, request_context=request_context) - self.pools[pool_key] = pool - - return pool - - def connection_from_url(self, url, pool_kwargs=None): - """ - Similar to :func:`urllib3.connectionpool.connection_from_url`. - - If ``pool_kwargs`` is not provided and a new pool needs to be - constructed, ``self.connection_pool_kw`` is used to initialize - the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` - is provided, it is used instead. Note that if a new pool does not - need to be created for the request, the provided ``pool_kwargs`` are - not used. - """ - u = parse_url(url) - return self.connection_from_host(u.host, port=u.port, scheme=u.scheme, - pool_kwargs=pool_kwargs) - - def _merge_pool_kwargs(self, override): - """ - Merge a dictionary of override values for self.connection_pool_kw. - - This does not modify self.connection_pool_kw and returns a new dict. - Any keys in the override dictionary with a value of ``None`` are - removed from the merged dictionary. - """ - base_pool_kwargs = self.connection_pool_kw.copy() - if override: - for key, value in override.items(): - if value is None: - try: - del base_pool_kwargs[key] - except KeyError: - pass - else: - base_pool_kwargs[key] = value - return base_pool_kwargs - - def urlopen(self, method, url, redirect=True, **kw): - """ - Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` - with custom cross-host redirect logic and only sends the request-uri - portion of the ``url``. - - The given ``url`` parameter must be absolute, such that an appropriate - :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. - """ - u = parse_url(url) - conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) - - kw['assert_same_host'] = False - kw['redirect'] = False - if 'headers' not in kw: - kw['headers'] = self.headers - - if self.proxy is not None and u.scheme == "http": - response = conn.urlopen(method, url, **kw) - else: - response = conn.urlopen(method, u.request_uri, **kw) - - redirect_location = redirect and response.get_redirect_location() - if not redirect_location: - return response - - # Support relative URLs for redirecting. - redirect_location = urljoin(url, redirect_location) - - # RFC 7231, Section 6.4.4 - if response.status == 303: - method = 'GET' - - retries = kw.get('retries') - if not isinstance(retries, Retry): - retries = Retry.from_int(retries, redirect=redirect) - - try: - retries = retries.increment(method, url, response=response, _pool=conn) - except MaxRetryError: - if retries.raise_on_redirect: - raise - return response - - kw['retries'] = retries - kw['redirect'] = redirect - - log.info("Redirecting %s -> %s", url, redirect_location) - return self.urlopen(method, redirect_location, **kw) - - -class ProxyManager(PoolManager): - """ - Behaves just like :class:`PoolManager`, but sends all requests through - the defined proxy, using the CONNECT method for HTTPS URLs. - - :param proxy_url: - The URL of the proxy to be used. - - :param proxy_headers: - A dictionary contaning headers that will be sent to the proxy. In case - of HTTP they are being sent with each request, while in the - HTTPS/CONNECT case they are sent only once. Could be used for proxy - authentication. - - Example: - >>> proxy = urllib3.ProxyManager('http://localhost:3128/') - >>> r1 = proxy.request('GET', 'http://google.com/') - >>> r2 = proxy.request('GET', 'http://httpbin.org/') - >>> len(proxy.pools) - 1 - >>> r3 = proxy.request('GET', 'https://httpbin.org/') - >>> r4 = proxy.request('GET', 'https://twitter.com/') - >>> len(proxy.pools) - 3 - - """ - - def __init__(self, proxy_url, num_pools=10, headers=None, - proxy_headers=None, **connection_pool_kw): - - if isinstance(proxy_url, HTTPConnectionPool): - proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, - proxy_url.port) - proxy = parse_url(proxy_url) - if not proxy.port: - port = port_by_scheme.get(proxy.scheme, 80) - proxy = proxy._replace(port=port) - - if proxy.scheme not in ("http", "https"): - raise ProxySchemeUnknown(proxy.scheme) - - self.proxy = proxy - self.proxy_headers = proxy_headers or {} - - connection_pool_kw['_proxy'] = self.proxy - connection_pool_kw['_proxy_headers'] = self.proxy_headers - - super(ProxyManager, self).__init__( - num_pools, headers, **connection_pool_kw) - - def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): - if scheme == "https": - return super(ProxyManager, self).connection_from_host( - host, port, scheme, pool_kwargs=pool_kwargs) - - return super(ProxyManager, self).connection_from_host( - self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs) - - def _set_proxy_headers(self, url, headers=None): - """ - Sets headers needed by proxies: specifically, the Accept and Host - headers. Only sets headers not provided by the user. - """ - headers_ = {'Accept': '*/*'} - - netloc = parse_url(url).netloc - if netloc: - headers_['Host'] = netloc - - if headers: - headers_.update(headers) - return headers_ - - def urlopen(self, method, url, redirect=True, **kw): - "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." - u = parse_url(url) - - if u.scheme == "http": - # For proxied HTTPS requests, httplib sets the necessary headers - # on the CONNECT to the proxy. For HTTP, we'll definitely - # need to set 'Host' at the very least. - headers = kw.get('headers', self.headers) - kw['headers'] = self._set_proxy_headers(url, headers) - - return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) - - -def proxy_from_url(url, **kw): - return ProxyManager(proxy_url=url, **kw) diff --git a/pype/vendor/requests/packages/urllib3/request.py b/pype/vendor/requests/packages/urllib3/request.py deleted file mode 100644 index c0fddff042..0000000000 --- a/pype/vendor/requests/packages/urllib3/request.py +++ /dev/null @@ -1,148 +0,0 @@ -from __future__ import absolute_import - -from .filepost import encode_multipart_formdata -from .packages.six.moves.urllib.parse import urlencode - - -__all__ = ['RequestMethods'] - - -class RequestMethods(object): - """ - Convenience mixin for classes who implement a :meth:`urlopen` method, such - as :class:`~urllib3.connectionpool.HTTPConnectionPool` and - :class:`~urllib3.poolmanager.PoolManager`. - - Provides behavior for making common types of HTTP request methods and - decides which type of request field encoding to use. - - Specifically, - - :meth:`.request_encode_url` is for sending requests whose fields are - encoded in the URL (such as GET, HEAD, DELETE). - - :meth:`.request_encode_body` is for sending requests whose fields are - encoded in the *body* of the request using multipart or www-form-urlencoded - (such as for POST, PUT, PATCH). - - :meth:`.request` is for making any kind of request, it will look up the - appropriate encoding format and use one of the above two methods to make - the request. - - Initializer parameters: - - :param headers: - Headers to include with all requests, unless other headers are given - explicitly. - """ - - _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS']) - - def __init__(self, headers=None): - self.headers = headers or {} - - def urlopen(self, method, url, body=None, headers=None, - encode_multipart=True, multipart_boundary=None, - **kw): # Abstract - raise NotImplemented("Classes extending RequestMethods must implement " - "their own ``urlopen`` method.") - - def request(self, method, url, fields=None, headers=None, **urlopen_kw): - """ - Make a request using :meth:`urlopen` with the appropriate encoding of - ``fields`` based on the ``method`` used. - - This is a convenience method that requires the least amount of manual - effort. It can be used in most situations, while still having the - option to drop down to more specific methods when necessary, such as - :meth:`request_encode_url`, :meth:`request_encode_body`, - or even the lowest level :meth:`urlopen`. - """ - method = method.upper() - - if method in self._encode_url_methods: - return self.request_encode_url(method, url, fields=fields, - headers=headers, - **urlopen_kw) - else: - return self.request_encode_body(method, url, fields=fields, - headers=headers, - **urlopen_kw) - - def request_encode_url(self, method, url, fields=None, headers=None, - **urlopen_kw): - """ - Make a request using :meth:`urlopen` with the ``fields`` encoded in - the url. This is useful for request methods like GET, HEAD, DELETE, etc. - """ - if headers is None: - headers = self.headers - - extra_kw = {'headers': headers} - extra_kw.update(urlopen_kw) - - if fields: - url += '?' + urlencode(fields) - - return self.urlopen(method, url, **extra_kw) - - def request_encode_body(self, method, url, fields=None, headers=None, - encode_multipart=True, multipart_boundary=None, - **urlopen_kw): - """ - Make a request using :meth:`urlopen` with the ``fields`` encoded in - the body. This is useful for request methods like POST, PUT, PATCH, etc. - - When ``encode_multipart=True`` (default), then - :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode - the payload with the appropriate content type. Otherwise - :meth:`urllib.urlencode` is used with the - 'application/x-www-form-urlencoded' content type. - - Multipart encoding must be used when posting files, and it's reasonably - safe to use it in other times too. However, it may break request - signing, such as with OAuth. - - Supports an optional ``fields`` parameter of key/value strings AND - key/filetuple. A filetuple is a (filename, data, MIME type) tuple where - the MIME type is optional. For example:: - - fields = { - 'foo': 'bar', - 'fakefile': ('foofile.txt', 'contents of foofile'), - 'realfile': ('barfile.txt', open('realfile').read()), - 'typedfile': ('bazfile.bin', open('bazfile').read(), - 'image/jpeg'), - 'nonamefile': 'contents of nonamefile field', - } - - When uploading a file, providing a filename (the first parameter of the - tuple) is optional but recommended to best mimick behavior of browsers. - - Note that if ``headers`` are supplied, the 'Content-Type' header will - be overwritten because it depends on the dynamic random boundary string - which is used to compose the body of the request. The random boundary - string can be explicitly set with the ``multipart_boundary`` parameter. - """ - if headers is None: - headers = self.headers - - extra_kw = {'headers': {}} - - if fields: - if 'body' in urlopen_kw: - raise TypeError( - "request got values for both 'fields' and 'body', can only specify one.") - - if encode_multipart: - body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary) - else: - body, content_type = urlencode(fields), 'application/x-www-form-urlencoded' - - extra_kw['body'] = body - extra_kw['headers'] = {'Content-Type': content_type} - - extra_kw['headers'].update(headers) - extra_kw.update(urlopen_kw) - - return self.urlopen(method, url, **extra_kw) diff --git a/pype/vendor/requests/packages/urllib3/response.py b/pype/vendor/requests/packages/urllib3/response.py deleted file mode 100644 index 408d9996a5..0000000000 --- a/pype/vendor/requests/packages/urllib3/response.py +++ /dev/null @@ -1,622 +0,0 @@ -from __future__ import absolute_import -from contextlib import contextmanager -import zlib -import io -import logging -from socket import timeout as SocketTimeout -from socket import error as SocketError - -from ._collections import HTTPHeaderDict -from .exceptions import ( - BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, - ResponseNotChunked, IncompleteRead, InvalidHeader -) -from .packages.six import string_types as basestring, binary_type, PY3 -from .packages.six.moves import http_client as httplib -from .connection import HTTPException, BaseSSLError -from .util.response import is_fp_closed, is_response_to_head - -log = logging.getLogger(__name__) - - -class DeflateDecoder(object): - - def __init__(self): - self._first_try = True - self._data = binary_type() - self._obj = zlib.decompressobj() - - def __getattr__(self, name): - return getattr(self._obj, name) - - def decompress(self, data): - if not data: - return data - - if not self._first_try: - return self._obj.decompress(data) - - self._data += data - try: - decompressed = self._obj.decompress(data) - if decompressed: - self._first_try = False - self._data = None - return decompressed - except zlib.error: - self._first_try = False - self._obj = zlib.decompressobj(-zlib.MAX_WBITS) - try: - return self.decompress(self._data) - finally: - self._data = None - - -class GzipDecoder(object): - - def __init__(self): - self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) - - def __getattr__(self, name): - return getattr(self._obj, name) - - def decompress(self, data): - if not data: - return data - return self._obj.decompress(data) - - -def _get_decoder(mode): - if mode == 'gzip': - return GzipDecoder() - - return DeflateDecoder() - - -class HTTPResponse(io.IOBase): - """ - HTTP Response container. - - Backwards-compatible to httplib's HTTPResponse but the response ``body`` is - loaded and decoded on-demand when the ``data`` property is accessed. This - class is also compatible with the Python standard library's :mod:`io` - module, and can hence be treated as a readable object in the context of that - framework. - - Extra parameters for behaviour not present in httplib.HTTPResponse: - - :param preload_content: - If True, the response's body will be preloaded during construction. - - :param decode_content: - If True, attempts to decode specific content-encoding's based on headers - (like 'gzip' and 'deflate') will be skipped and raw data will be used - instead. - - :param original_response: - When this HTTPResponse wrapper is generated from an httplib.HTTPResponse - object, it's convenient to include the original for debug purposes. It's - otherwise unused. - - :param retries: - The retries contains the last :class:`~urllib3.util.retry.Retry` that - was used during the request. - - :param enforce_content_length: - Enforce content length checking. Body returned by server must match - value of Content-Length header, if present. Otherwise, raise error. - """ - - CONTENT_DECODERS = ['gzip', 'deflate'] - REDIRECT_STATUSES = [301, 302, 303, 307, 308] - - def __init__(self, body='', headers=None, status=0, version=0, reason=None, - strict=0, preload_content=True, decode_content=True, - original_response=None, pool=None, connection=None, - retries=None, enforce_content_length=False, request_method=None): - - if isinstance(headers, HTTPHeaderDict): - self.headers = headers - else: - self.headers = HTTPHeaderDict(headers) - self.status = status - self.version = version - self.reason = reason - self.strict = strict - self.decode_content = decode_content - self.retries = retries - self.enforce_content_length = enforce_content_length - - self._decoder = None - self._body = None - self._fp = None - self._original_response = original_response - self._fp_bytes_read = 0 - - if body and isinstance(body, (basestring, binary_type)): - self._body = body - - self._pool = pool - self._connection = connection - - if hasattr(body, 'read'): - self._fp = body - - # Are we using the chunked-style of transfer encoding? - self.chunked = False - self.chunk_left = None - tr_enc = self.headers.get('transfer-encoding', '').lower() - # Don't incur the penalty of creating a list and then discarding it - encodings = (enc.strip() for enc in tr_enc.split(",")) - if "chunked" in encodings: - self.chunked = True - - # Determine length of response - self.length_remaining = self._init_length(request_method) - - # If requested, preload the body. - if preload_content and not self._body: - self._body = self.read(decode_content=decode_content) - - def get_redirect_location(self): - """ - Should we redirect and where to? - - :returns: Truthy redirect location string if we got a redirect status - code and valid location. ``None`` if redirect status and no - location. ``False`` if not a redirect status code. - """ - if self.status in self.REDIRECT_STATUSES: - return self.headers.get('location') - - return False - - def release_conn(self): - if not self._pool or not self._connection: - return - - self._pool._put_conn(self._connection) - self._connection = None - - @property - def data(self): - # For backwords-compat with earlier urllib3 0.4 and earlier. - if self._body: - return self._body - - if self._fp: - return self.read(cache_content=True) - - @property - def connection(self): - return self._connection - - def tell(self): - """ - Obtain the number of bytes pulled over the wire so far. May differ from - the amount of content returned by :meth:``HTTPResponse.read`` if bytes - are encoded on the wire (e.g, compressed). - """ - return self._fp_bytes_read - - def _init_length(self, request_method): - """ - Set initial length value for Response content if available. - """ - length = self.headers.get('content-length') - - if length is not None and self.chunked: - # This Response will fail with an IncompleteRead if it can't be - # received as chunked. This method falls back to attempt reading - # the response before raising an exception. - log.warning("Received response with both Content-Length and " - "Transfer-Encoding set. This is expressly forbidden " - "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " - "attempting to process response as Transfer-Encoding: " - "chunked.") - return None - - elif length is not None: - try: - # RFC 7230 section 3.3.2 specifies multiple content lengths can - # be sent in a single Content-Length header - # (e.g. Content-Length: 42, 42). This line ensures the values - # are all valid ints and that as long as the `set` length is 1, - # all values are the same. Otherwise, the header is invalid. - lengths = set([int(val) for val in length.split(',')]) - if len(lengths) > 1: - raise InvalidHeader("Content-Length contained multiple " - "unmatching values (%s)" % length) - length = lengths.pop() - except ValueError: - length = None - else: - if length < 0: - length = None - - # Convert status to int for comparison - # In some cases, httplib returns a status of "_UNKNOWN" - try: - status = int(self.status) - except ValueError: - status = 0 - - # Check for responses that shouldn't include a body - if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD': - length = 0 - - return length - - def _init_decoder(self): - """ - Set-up the _decoder attribute if necessary. - """ - # Note: content-encoding value should be case-insensitive, per RFC 7230 - # Section 3.2 - content_encoding = self.headers.get('content-encoding', '').lower() - if self._decoder is None and content_encoding in self.CONTENT_DECODERS: - self._decoder = _get_decoder(content_encoding) - - def _decode(self, data, decode_content, flush_decoder): - """ - Decode the data passed in and potentially flush the decoder. - """ - try: - if decode_content and self._decoder: - data = self._decoder.decompress(data) - except (IOError, zlib.error) as e: - content_encoding = self.headers.get('content-encoding', '').lower() - raise DecodeError( - "Received response with content-encoding: %s, but " - "failed to decode it." % content_encoding, e) - - if flush_decoder and decode_content: - data += self._flush_decoder() - - return data - - def _flush_decoder(self): - """ - Flushes the decoder. Should only be called if the decoder is actually - being used. - """ - if self._decoder: - buf = self._decoder.decompress(b'') - return buf + self._decoder.flush() - - return b'' - - @contextmanager - def _error_catcher(self): - """ - Catch low-level python exceptions, instead re-raising urllib3 - variants, so that low-level exceptions are not leaked in the - high-level api. - - On exit, release the connection back to the pool. - """ - clean_exit = False - - try: - try: - yield - - except SocketTimeout: - # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but - # there is yet no clean way to get at it from this context. - raise ReadTimeoutError(self._pool, None, 'Read timed out.') - - except BaseSSLError as e: - # FIXME: Is there a better way to differentiate between SSLErrors? - if 'read operation timed out' not in str(e): # Defensive: - # This shouldn't happen but just in case we're missing an edge - # case, let's avoid swallowing SSL errors. - raise - - raise ReadTimeoutError(self._pool, None, 'Read timed out.') - - except (HTTPException, SocketError) as e: - # This includes IncompleteRead. - raise ProtocolError('Connection broken: %r' % e, e) - - # If no exception is thrown, we should avoid cleaning up - # unnecessarily. - clean_exit = True - finally: - # If we didn't terminate cleanly, we need to throw away our - # connection. - if not clean_exit: - # The response may not be closed but we're not going to use it - # anymore so close it now to ensure that the connection is - # released back to the pool. - if self._original_response: - self._original_response.close() - - # Closing the response may not actually be sufficient to close - # everything, so if we have a hold of the connection close that - # too. - if self._connection: - self._connection.close() - - # If we hold the original response but it's closed now, we should - # return the connection back to the pool. - if self._original_response and self._original_response.isclosed(): - self.release_conn() - - def read(self, amt=None, decode_content=None, cache_content=False): - """ - Similar to :meth:`httplib.HTTPResponse.read`, but with two additional - parameters: ``decode_content`` and ``cache_content``. - - :param amt: - How much of the content to read. If specified, caching is skipped - because it doesn't make sense to cache partial content as the full - response. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - - :param cache_content: - If True, will save the returned data such that the same result is - returned despite of the state of the underlying file object. This - is useful if you want the ``.data`` property to continue working - after having ``.read()`` the file object. (Overridden if ``amt`` is - set.) - """ - self._init_decoder() - if decode_content is None: - decode_content = self.decode_content - - if self._fp is None: - return - - flush_decoder = False - data = None - - with self._error_catcher(): - if amt is None: - # cStringIO doesn't like amt=None - data = self._fp.read() - flush_decoder = True - else: - cache_content = False - data = self._fp.read(amt) - if amt != 0 and not data: # Platform-specific: Buggy versions of Python. - # Close the connection when no data is returned - # - # This is redundant to what httplib/http.client _should_ - # already do. However, versions of python released before - # December 15, 2012 (http://bugs.python.org/issue16298) do - # not properly close the connection in all cases. There is - # no harm in redundantly calling close. - self._fp.close() - flush_decoder = True - if self.enforce_content_length and self.length_remaining not in (0, None): - # This is an edge case that httplib failed to cover due - # to concerns of backward compatibility. We're - # addressing it here to make sure IncompleteRead is - # raised during streaming, so all calls with incorrect - # Content-Length are caught. - raise IncompleteRead(self._fp_bytes_read, self.length_remaining) - - if data: - self._fp_bytes_read += len(data) - if self.length_remaining is not None: - self.length_remaining -= len(data) - - data = self._decode(data, decode_content, flush_decoder) - - if cache_content: - self._body = data - - return data - - def stream(self, amt=2**16, decode_content=None): - """ - A generator wrapper for the read() method. A call will block until - ``amt`` bytes have been read from the connection or until the - connection is closed. - - :param amt: - How much of the content to read. The generator will return up to - much data per iteration, but may return less. This is particularly - likely when using compressed data. However, the empty string will - never be returned. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - """ - if self.chunked and self.supports_chunked_reads(): - for line in self.read_chunked(amt, decode_content=decode_content): - yield line - else: - while not is_fp_closed(self._fp): - data = self.read(amt=amt, decode_content=decode_content) - - if data: - yield data - - @classmethod - def from_httplib(ResponseCls, r, **response_kw): - """ - Given an :class:`httplib.HTTPResponse` instance ``r``, return a - corresponding :class:`urllib3.response.HTTPResponse` object. - - Remaining parameters are passed to the HTTPResponse constructor, along - with ``original_response=r``. - """ - headers = r.msg - - if not isinstance(headers, HTTPHeaderDict): - if PY3: # Python 3 - headers = HTTPHeaderDict(headers.items()) - else: # Python 2 - headers = HTTPHeaderDict.from_httplib(headers) - - # HTTPResponse objects in Python 3 don't have a .strict attribute - strict = getattr(r, 'strict', 0) - resp = ResponseCls(body=r, - headers=headers, - status=r.status, - version=r.version, - reason=r.reason, - strict=strict, - original_response=r, - **response_kw) - return resp - - # Backwards-compatibility methods for httplib.HTTPResponse - def getheaders(self): - return self.headers - - def getheader(self, name, default=None): - return self.headers.get(name, default) - - # Overrides from io.IOBase - def close(self): - if not self.closed: - self._fp.close() - - if self._connection: - self._connection.close() - - @property - def closed(self): - if self._fp is None: - return True - elif hasattr(self._fp, 'isclosed'): - return self._fp.isclosed() - elif hasattr(self._fp, 'closed'): - return self._fp.closed - else: - return True - - def fileno(self): - if self._fp is None: - raise IOError("HTTPResponse has no file to get a fileno from") - elif hasattr(self._fp, "fileno"): - return self._fp.fileno() - else: - raise IOError("The file-like object this HTTPResponse is wrapped " - "around has no file descriptor") - - def flush(self): - if self._fp is not None and hasattr(self._fp, 'flush'): - return self._fp.flush() - - def readable(self): - # This method is required for `io` module compatibility. - return True - - def readinto(self, b): - # This method is required for `io` module compatibility. - temp = self.read(len(b)) - if len(temp) == 0: - return 0 - else: - b[:len(temp)] = temp - return len(temp) - - def supports_chunked_reads(self): - """ - Checks if the underlying file-like object looks like a - httplib.HTTPResponse object. We do this by testing for the fp - attribute. If it is present we assume it returns raw chunks as - processed by read_chunked(). - """ - return hasattr(self._fp, 'fp') - - def _update_chunk_length(self): - # First, we'll figure out length of a chunk and then - # we'll try to read it from socket. - if self.chunk_left is not None: - return - line = self._fp.fp.readline() - line = line.split(b';', 1)[0] - try: - self.chunk_left = int(line, 16) - except ValueError: - # Invalid chunked protocol response, abort. - self.close() - raise httplib.IncompleteRead(line) - - def _handle_chunk(self, amt): - returned_chunk = None - if amt is None: - chunk = self._fp._safe_read(self.chunk_left) - returned_chunk = chunk - self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. - self.chunk_left = None - elif amt < self.chunk_left: - value = self._fp._safe_read(amt) - self.chunk_left = self.chunk_left - amt - returned_chunk = value - elif amt == self.chunk_left: - value = self._fp._safe_read(amt) - self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. - self.chunk_left = None - returned_chunk = value - else: # amt > self.chunk_left - returned_chunk = self._fp._safe_read(self.chunk_left) - self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. - self.chunk_left = None - return returned_chunk - - def read_chunked(self, amt=None, decode_content=None): - """ - Similar to :meth:`HTTPResponse.read`, but with an additional - parameter: ``decode_content``. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - """ - self._init_decoder() - # FIXME: Rewrite this method and make it a class with a better structured logic. - if not self.chunked: - raise ResponseNotChunked( - "Response is not chunked. " - "Header 'transfer-encoding: chunked' is missing.") - if not self.supports_chunked_reads(): - raise BodyNotHttplibCompatible( - "Body should be httplib.HTTPResponse like. " - "It should have have an fp attribute which returns raw chunks.") - - # Don't bother reading the body of a HEAD request. - if self._original_response and is_response_to_head(self._original_response): - self._original_response.close() - return - - with self._error_catcher(): - while True: - self._update_chunk_length() - if self.chunk_left == 0: - break - chunk = self._handle_chunk(amt) - decoded = self._decode(chunk, decode_content=decode_content, - flush_decoder=False) - if decoded: - yield decoded - - if decode_content: - # On CPython and PyPy, we should never need to flush the - # decoder. However, on Jython we *might* need to, so - # lets defensively do it anyway. - decoded = self._flush_decoder() - if decoded: # Platform-specific: Jython. - yield decoded - - # Chunk content ends with \r\n: discard it. - while True: - line = self._fp.fp.readline() - if not line: - # Some sites may not end with '\r\n'. - break - if line == b'\r\n': - break - - # We read everything; close the "file". - if self._original_response: - self._original_response.close() diff --git a/pype/vendor/requests/packages/urllib3/util/__init__.py b/pype/vendor/requests/packages/urllib3/util/__init__.py deleted file mode 100644 index 2f2770b622..0000000000 --- a/pype/vendor/requests/packages/urllib3/util/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -from __future__ import absolute_import -# For backwards compatibility, provide imports that used to be here. -from .connection import is_connection_dropped -from .request import make_headers -from .response import is_fp_closed -from .ssl_ import ( - SSLContext, - HAS_SNI, - IS_PYOPENSSL, - IS_SECURETRANSPORT, - assert_fingerprint, - resolve_cert_reqs, - resolve_ssl_version, - ssl_wrap_socket, -) -from .timeout import ( - current_time, - Timeout, -) - -from .retry import Retry -from .url import ( - get_host, - parse_url, - split_first, - Url, -) -from .wait import ( - wait_for_read, - wait_for_write -) - -__all__ = ( - 'HAS_SNI', - 'IS_PYOPENSSL', - 'IS_SECURETRANSPORT', - 'SSLContext', - 'Retry', - 'Timeout', - 'Url', - 'assert_fingerprint', - 'current_time', - 'is_connection_dropped', - 'is_fp_closed', - 'get_host', - 'parse_url', - 'make_headers', - 'resolve_cert_reqs', - 'resolve_ssl_version', - 'split_first', - 'ssl_wrap_socket', - 'wait_for_read', - 'wait_for_write' -) diff --git a/pype/vendor/requests/packages/urllib3/util/connection.py b/pype/vendor/requests/packages/urllib3/util/connection.py deleted file mode 100644 index bf699cfd0e..0000000000 --- a/pype/vendor/requests/packages/urllib3/util/connection.py +++ /dev/null @@ -1,130 +0,0 @@ -from __future__ import absolute_import -import socket -from .wait import wait_for_read -from .selectors import HAS_SELECT, SelectorError - - -def is_connection_dropped(conn): # Platform-specific - """ - Returns True if the connection is dropped and should be closed. - - :param conn: - :class:`httplib.HTTPConnection` object. - - Note: For platforms like AppEngine, this will always return ``False`` to - let the platform handle connection recycling transparently for us. - """ - sock = getattr(conn, 'sock', False) - if sock is False: # Platform-specific: AppEngine - return False - if sock is None: # Connection already closed (such as by httplib). - return True - - if not HAS_SELECT: - return False - - try: - return bool(wait_for_read(sock, timeout=0.0)) - except SelectorError: - return True - - -# This function is copied from socket.py in the Python 2.7 standard -# library test suite. Added to its signature is only `socket_options`. -# One additional modification is that we avoid binding to IPv6 servers -# discovered in DNS if the system doesn't have IPv6 functionality. -def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None, socket_options=None): - """Connect to *address* and return the socket object. - - Convenience function. Connect to *address* (a 2-tuple ``(host, - port)``) and return the socket object. Passing the optional - *timeout* parameter will set the timeout on the socket instance - before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` - is used. If *source_address* is set it must be a tuple of (host, port) - for the socket to bind as a source address before making the connection. - An host of '' or port 0 tells the OS to use the default. - """ - - host, port = address - if host.startswith('['): - host = host.strip('[]') - err = None - - # Using the value from allowed_gai_family() in the context of getaddrinfo lets - # us select whether to work with IPv4 DNS records, IPv6 records, or both. - # The original create_connection function always returns all records. - family = allowed_gai_family() - - for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): - af, socktype, proto, canonname, sa = res - sock = None - try: - sock = socket.socket(af, socktype, proto) - - # If provided, set socket level options before connecting. - _set_socket_options(sock, socket_options) - - if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: - sock.settimeout(timeout) - if source_address: - sock.bind(source_address) - sock.connect(sa) - return sock - - except socket.error as e: - err = e - if sock is not None: - sock.close() - sock = None - - if err is not None: - raise err - - raise socket.error("getaddrinfo returns an empty list") - - -def _set_socket_options(sock, options): - if options is None: - return - - for opt in options: - sock.setsockopt(*opt) - - -def allowed_gai_family(): - """This function is designed to work in the context of - getaddrinfo, where family=socket.AF_UNSPEC is the default and - will perform a DNS search for both IPv6 and IPv4 records.""" - - family = socket.AF_INET - if HAS_IPV6: - family = socket.AF_UNSPEC - return family - - -def _has_ipv6(host): - """ Returns True if the system can bind an IPv6 address. """ - sock = None - has_ipv6 = False - - if socket.has_ipv6: - # has_ipv6 returns true if cPython was compiled with IPv6 support. - # It does not tell us if the system has IPv6 support enabled. To - # determine that we must bind to an IPv6 address. - # https://github.com/shazow/urllib3/pull/611 - # https://bugs.python.org/issue658327 - try: - sock = socket.socket(socket.AF_INET6) - sock.bind((host, 0)) - has_ipv6 = True - except Exception: - pass - - if sock: - sock.close() - return has_ipv6 - - -HAS_IPV6 = _has_ipv6('::1') diff --git a/pype/vendor/requests/packages/urllib3/util/request.py b/pype/vendor/requests/packages/urllib3/util/request.py deleted file mode 100644 index 3ddfcd5594..0000000000 --- a/pype/vendor/requests/packages/urllib3/util/request.py +++ /dev/null @@ -1,118 +0,0 @@ -from __future__ import absolute_import -from base64 import b64encode - -from ..packages.six import b, integer_types -from ..exceptions import UnrewindableBodyError - -ACCEPT_ENCODING = 'gzip,deflate' -_FAILEDTELL = object() - - -def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, - basic_auth=None, proxy_basic_auth=None, disable_cache=None): - """ - Shortcuts for generating request headers. - - :param keep_alive: - If ``True``, adds 'connection: keep-alive' header. - - :param accept_encoding: - Can be a boolean, list, or string. - ``True`` translates to 'gzip,deflate'. - List will get joined by comma. - String will be used as provided. - - :param user_agent: - String representing the user-agent you want, such as - "python-urllib3/0.6" - - :param basic_auth: - Colon-separated username:password string for 'authorization: basic ...' - auth header. - - :param proxy_basic_auth: - Colon-separated username:password string for 'proxy-authorization: basic ...' - auth header. - - :param disable_cache: - If ``True``, adds 'cache-control: no-cache' header. - - Example:: - - >>> make_headers(keep_alive=True, user_agent="Batman/1.0") - {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} - >>> make_headers(accept_encoding=True) - {'accept-encoding': 'gzip,deflate'} - """ - headers = {} - if accept_encoding: - if isinstance(accept_encoding, str): - pass - elif isinstance(accept_encoding, list): - accept_encoding = ','.join(accept_encoding) - else: - accept_encoding = ACCEPT_ENCODING - headers['accept-encoding'] = accept_encoding - - if user_agent: - headers['user-agent'] = user_agent - - if keep_alive: - headers['connection'] = 'keep-alive' - - if basic_auth: - headers['authorization'] = 'Basic ' + \ - b64encode(b(basic_auth)).decode('utf-8') - - if proxy_basic_auth: - headers['proxy-authorization'] = 'Basic ' + \ - b64encode(b(proxy_basic_auth)).decode('utf-8') - - if disable_cache: - headers['cache-control'] = 'no-cache' - - return headers - - -def set_file_position(body, pos): - """ - If a position is provided, move file to that point. - Otherwise, we'll attempt to record a position for future use. - """ - if pos is not None: - rewind_body(body, pos) - elif getattr(body, 'tell', None) is not None: - try: - pos = body.tell() - except (IOError, OSError): - # This differentiates from None, allowing us to catch - # a failed `tell()` later when trying to rewind the body. - pos = _FAILEDTELL - - return pos - - -def rewind_body(body, body_pos): - """ - Attempt to rewind body to a certain position. - Primarily used for request redirects and retries. - - :param body: - File-like object that supports seek. - - :param int pos: - Position to seek to in file. - """ - body_seek = getattr(body, 'seek', None) - if body_seek is not None and isinstance(body_pos, integer_types): - try: - body_seek(body_pos) - except (IOError, OSError): - raise UnrewindableBodyError("An error occurred when rewinding request " - "body for redirect/retry.") - elif body_pos is _FAILEDTELL: - raise UnrewindableBodyError("Unable to record file position for rewinding " - "request body during a redirect/retry.") - else: - raise ValueError("body_pos must be of type integer, " - "instead it was %s." % type(body_pos)) diff --git a/pype/vendor/requests/packages/urllib3/util/response.py b/pype/vendor/requests/packages/urllib3/util/response.py deleted file mode 100644 index 67cf730ab0..0000000000 --- a/pype/vendor/requests/packages/urllib3/util/response.py +++ /dev/null @@ -1,81 +0,0 @@ -from __future__ import absolute_import -from ..packages.six.moves import http_client as httplib - -from ..exceptions import HeaderParsingError - - -def is_fp_closed(obj): - """ - Checks whether a given file-like object is closed. - - :param obj: - The file-like object to check. - """ - - try: - # Check `isclosed()` first, in case Python3 doesn't set `closed`. - # GH Issue #928 - return obj.isclosed() - except AttributeError: - pass - - try: - # Check via the official file-like-object way. - return obj.closed - except AttributeError: - pass - - try: - # Check if the object is a container for another file-like object that - # gets released on exhaustion (e.g. HTTPResponse). - return obj.fp is None - except AttributeError: - pass - - raise ValueError("Unable to determine whether fp is closed.") - - -def assert_header_parsing(headers): - """ - Asserts whether all headers have been successfully parsed. - Extracts encountered errors from the result of parsing headers. - - Only works on Python 3. - - :param headers: Headers to verify. - :type headers: `httplib.HTTPMessage`. - - :raises urllib3.exceptions.HeaderParsingError: - If parsing errors are found. - """ - - # This will fail silently if we pass in the wrong kind of parameter. - # To make debugging easier add an explicit check. - if not isinstance(headers, httplib.HTTPMessage): - raise TypeError('expected httplib.Message, got {0}.'.format( - type(headers))) - - defects = getattr(headers, 'defects', None) - get_payload = getattr(headers, 'get_payload', None) - - unparsed_data = None - if get_payload: # Platform-specific: Python 3. - unparsed_data = get_payload() - - if defects or unparsed_data: - raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) - - -def is_response_to_head(response): - """ - Checks whether the request of a response has been a HEAD-request. - Handles the quirks of AppEngine. - - :param conn: - :type conn: :class:`httplib.HTTPResponse` - """ - # FIXME: Can we do this somehow without accessing private httplib _method? - method = response._method - if isinstance(method, int): # Platform-specific: Appengine - return method == 3 - return method.upper() == 'HEAD' diff --git a/pype/vendor/requests/packages/urllib3/util/retry.py b/pype/vendor/requests/packages/urllib3/util/retry.py deleted file mode 100644 index c603cb4904..0000000000 --- a/pype/vendor/requests/packages/urllib3/util/retry.py +++ /dev/null @@ -1,401 +0,0 @@ -from __future__ import absolute_import -import time -import logging -from collections import namedtuple -from itertools import takewhile -import email -import re - -from ..exceptions import ( - ConnectTimeoutError, - MaxRetryError, - ProtocolError, - ReadTimeoutError, - ResponseError, - InvalidHeader, -) -from ..packages import six - - -log = logging.getLogger(__name__) - -# Data structure for representing the metadata of requests that result in a retry. -RequestHistory = namedtuple('RequestHistory', ["method", "url", "error", - "status", "redirect_location"]) - - -class Retry(object): - """ Retry configuration. - - Each retry attempt will create a new Retry object with updated values, so - they can be safely reused. - - Retries can be defined as a default for a pool:: - - retries = Retry(connect=5, read=2, redirect=5) - http = PoolManager(retries=retries) - response = http.request('GET', 'http://example.com/') - - Or per-request (which overrides the default for the pool):: - - response = http.request('GET', 'http://example.com/', retries=Retry(10)) - - Retries can be disabled by passing ``False``:: - - response = http.request('GET', 'http://example.com/', retries=False) - - Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless - retries are disabled, in which case the causing exception will be raised. - - :param int total: - Total number of retries to allow. Takes precedence over other counts. - - Set to ``None`` to remove this constraint and fall back on other - counts. It's a good idea to set this to some sensibly-high value to - account for unexpected edge cases and avoid infinite retry loops. - - Set to ``0`` to fail on the first retry. - - Set to ``False`` to disable and imply ``raise_on_redirect=False``. - - :param int connect: - How many connection-related errors to retry on. - - These are errors raised before the request is sent to the remote server, - which we assume has not triggered the server to process the request. - - Set to ``0`` to fail on the first retry of this type. - - :param int read: - How many times to retry on read errors. - - These errors are raised after the request was sent to the server, so the - request may have side-effects. - - Set to ``0`` to fail on the first retry of this type. - - :param int redirect: - How many redirects to perform. Limit this to avoid infinite redirect - loops. - - A redirect is a HTTP response with a status code 301, 302, 303, 307 or - 308. - - Set to ``0`` to fail on the first retry of this type. - - Set to ``False`` to disable and imply ``raise_on_redirect=False``. - - :param int status: - How many times to retry on bad status codes. - - These are retries made on responses, where status code matches - ``status_forcelist``. - - Set to ``0`` to fail on the first retry of this type. - - :param iterable method_whitelist: - Set of uppercased HTTP method verbs that we should retry on. - - By default, we only retry on methods which are considered to be - idempotent (multiple requests with the same parameters end with the - same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. - - Set to a ``False`` value to retry on any verb. - - :param iterable status_forcelist: - A set of integer HTTP status codes that we should force a retry on. - A retry is initiated if the request method is in ``method_whitelist`` - and the response status code is in ``status_forcelist``. - - By default, this is disabled with ``None``. - - :param float backoff_factor: - A backoff factor to apply between attempts after the second try - (most errors are resolved immediately by a second try without a - delay). urllib3 will sleep for:: - - {backoff factor} * (2 ^ ({number of total retries} - 1)) - - seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep - for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer - than :attr:`Retry.BACKOFF_MAX`. - - By default, backoff is disabled (set to 0). - - :param bool raise_on_redirect: Whether, if the number of redirects is - exhausted, to raise a MaxRetryError, or to return a response with a - response code in the 3xx range. - - :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: - whether we should raise an exception, or return a response, - if status falls in ``status_forcelist`` range and retries have - been exhausted. - - :param tuple history: The history of the request encountered during - each call to :meth:`~Retry.increment`. The list is in the order - the requests occurred. Each list item is of class :class:`RequestHistory`. - - :param bool respect_retry_after_header: - Whether to respect Retry-After header on status codes defined as - :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. - - """ - - DEFAULT_METHOD_WHITELIST = frozenset([ - 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) - - RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) - - #: Maximum backoff time. - BACKOFF_MAX = 120 - - def __init__(self, total=10, connect=None, read=None, redirect=None, status=None, - method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, - backoff_factor=0, raise_on_redirect=True, raise_on_status=True, - history=None, respect_retry_after_header=True): - - self.total = total - self.connect = connect - self.read = read - self.status = status - - if redirect is False or total is False: - redirect = 0 - raise_on_redirect = False - - self.redirect = redirect - self.status_forcelist = status_forcelist or set() - self.method_whitelist = method_whitelist - self.backoff_factor = backoff_factor - self.raise_on_redirect = raise_on_redirect - self.raise_on_status = raise_on_status - self.history = history or tuple() - self.respect_retry_after_header = respect_retry_after_header - - def new(self, **kw): - params = dict( - total=self.total, - connect=self.connect, read=self.read, redirect=self.redirect, status=self.status, - method_whitelist=self.method_whitelist, - status_forcelist=self.status_forcelist, - backoff_factor=self.backoff_factor, - raise_on_redirect=self.raise_on_redirect, - raise_on_status=self.raise_on_status, - history=self.history, - ) - params.update(kw) - return type(self)(**params) - - @classmethod - def from_int(cls, retries, redirect=True, default=None): - """ Backwards-compatibility for the old retries format.""" - if retries is None: - retries = default if default is not None else cls.DEFAULT - - if isinstance(retries, Retry): - return retries - - redirect = bool(redirect) and None - new_retries = cls(retries, redirect=redirect) - log.debug("Converted retries value: %r -> %r", retries, new_retries) - return new_retries - - def get_backoff_time(self): - """ Formula for computing the current backoff - - :rtype: float - """ - # We want to consider only the last consecutive errors sequence (Ignore redirects). - consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None, - reversed(self.history)))) - if consecutive_errors_len <= 1: - return 0 - - backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) - return min(self.BACKOFF_MAX, backoff_value) - - def parse_retry_after(self, retry_after): - # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 - if re.match(r"^\s*[0-9]+\s*$", retry_after): - seconds = int(retry_after) - else: - retry_date_tuple = email.utils.parsedate(retry_after) - if retry_date_tuple is None: - raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) - retry_date = time.mktime(retry_date_tuple) - seconds = retry_date - time.time() - - if seconds < 0: - seconds = 0 - - return seconds - - def get_retry_after(self, response): - """ Get the value of Retry-After in seconds. """ - - retry_after = response.getheader("Retry-After") - - if retry_after is None: - return None - - return self.parse_retry_after(retry_after) - - def sleep_for_retry(self, response=None): - retry_after = self.get_retry_after(response) - if retry_after: - time.sleep(retry_after) - return True - - return False - - def _sleep_backoff(self): - backoff = self.get_backoff_time() - if backoff <= 0: - return - time.sleep(backoff) - - def sleep(self, response=None): - """ Sleep between retry attempts. - - This method will respect a server's ``Retry-After`` response header - and sleep the duration of the time requested. If that is not present, it - will use an exponential backoff. By default, the backoff factor is 0 and - this method will return immediately. - """ - - if response: - slept = self.sleep_for_retry(response) - if slept: - return - - self._sleep_backoff() - - def _is_connection_error(self, err): - """ Errors when we're fairly sure that the server did not receive the - request, so it should be safe to retry. - """ - return isinstance(err, ConnectTimeoutError) - - def _is_read_error(self, err): - """ Errors that occur after the request has been started, so we should - assume that the server began processing it. - """ - return isinstance(err, (ReadTimeoutError, ProtocolError)) - - def _is_method_retryable(self, method): - """ Checks if a given HTTP method should be retried upon, depending if - it is included on the method whitelist. - """ - if self.method_whitelist and method.upper() not in self.method_whitelist: - return False - - return True - - def is_retry(self, method, status_code, has_retry_after=False): - """ Is this method/status code retryable? (Based on whitelists and control - variables such as the number of total retries to allow, whether to - respect the Retry-After header, whether this header is present, and - whether the returned status code is on the list of status codes to - be retried upon on the presence of the aforementioned header) - """ - if not self._is_method_retryable(method): - return False - - if self.status_forcelist and status_code in self.status_forcelist: - return True - - return (self.total and self.respect_retry_after_header and - has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES)) - - def is_exhausted(self): - """ Are we out of retries? """ - retry_counts = (self.total, self.connect, self.read, self.redirect, self.status) - retry_counts = list(filter(None, retry_counts)) - if not retry_counts: - return False - - return min(retry_counts) < 0 - - def increment(self, method=None, url=None, response=None, error=None, - _pool=None, _stacktrace=None): - """ Return a new Retry object with incremented retry counters. - - :param response: A response object, or None, if the server did not - return a response. - :type response: :class:`~urllib3.response.HTTPResponse` - :param Exception error: An error encountered during the request, or - None if the response was received successfully. - - :return: A new ``Retry`` object. - """ - if self.total is False and error: - # Disabled, indicate to re-raise the error. - raise six.reraise(type(error), error, _stacktrace) - - total = self.total - if total is not None: - total -= 1 - - connect = self.connect - read = self.read - redirect = self.redirect - status_count = self.status - cause = 'unknown' - status = None - redirect_location = None - - if error and self._is_connection_error(error): - # Connect retry? - if connect is False: - raise six.reraise(type(error), error, _stacktrace) - elif connect is not None: - connect -= 1 - - elif error and self._is_read_error(error): - # Read retry? - if read is False or not self._is_method_retryable(method): - raise six.reraise(type(error), error, _stacktrace) - elif read is not None: - read -= 1 - - elif response and response.get_redirect_location(): - # Redirect retry? - if redirect is not None: - redirect -= 1 - cause = 'too many redirects' - redirect_location = response.get_redirect_location() - status = response.status - - else: - # Incrementing because of a server error like a 500 in - # status_forcelist and a the given method is in the whitelist - cause = ResponseError.GENERIC_ERROR - if response and response.status: - if status_count is not None: - status_count -= 1 - cause = ResponseError.SPECIFIC_ERROR.format( - status_code=response.status) - status = response.status - - history = self.history + (RequestHistory(method, url, error, status, redirect_location),) - - new_retry = self.new( - total=total, - connect=connect, read=read, redirect=redirect, status=status_count, - history=history) - - if new_retry.is_exhausted(): - raise MaxRetryError(_pool, url, error or ResponseError(cause)) - - log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) - - return new_retry - - def __repr__(self): - return ('{cls.__name__}(total={self.total}, connect={self.connect}, ' - 'read={self.read}, redirect={self.redirect}, status={self.status})').format( - cls=type(self), self=self) - - -# For backwards compatibility (equivalent to pre-v1.9): -Retry.DEFAULT = Retry(3) diff --git a/pype/vendor/requests/packages/urllib3/util/selectors.py b/pype/vendor/requests/packages/urllib3/util/selectors.py deleted file mode 100644 index d75cb266bc..0000000000 --- a/pype/vendor/requests/packages/urllib3/util/selectors.py +++ /dev/null @@ -1,581 +0,0 @@ -# Backport of selectors.py from Python 3.5+ to support Python < 3.4 -# Also has the behavior specified in PEP 475 which is to retry syscalls -# in the case of an EINTR error. This module is required because selectors34 -# does not follow this behavior and instead returns that no dile descriptor -# events have occurred rather than retry the syscall. The decision to drop -# support for select.devpoll is made to maintain 100% test coverage. - -import errno -import math -import select -import socket -import sys -import time -from collections import namedtuple, Mapping - -try: - monotonic = time.monotonic -except (AttributeError, ImportError): # Python 3.3< - monotonic = time.time - -EVENT_READ = (1 << 0) -EVENT_WRITE = (1 << 1) - -HAS_SELECT = True # Variable that shows whether the platform has a selector. -_SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None. -_DEFAULT_SELECTOR = None - - -class SelectorError(Exception): - def __init__(self, errcode): - super(SelectorError, self).__init__() - self.errno = errcode - - def __repr__(self): - return "".format(self.errno) - - def __str__(self): - return self.__repr__() - - -def _fileobj_to_fd(fileobj): - """ Return a file descriptor from a file object. If - given an integer will simply return that integer back. """ - if isinstance(fileobj, int): - fd = fileobj - else: - try: - fd = int(fileobj.fileno()) - except (AttributeError, TypeError, ValueError): - raise ValueError("Invalid file object: {0!r}".format(fileobj)) - if fd < 0: - raise ValueError("Invalid file descriptor: {0}".format(fd)) - return fd - - -# Determine which function to use to wrap system calls because Python 3.5+ -# already handles the case when system calls are interrupted. -if sys.version_info >= (3, 5): - def _syscall_wrapper(func, _, *args, **kwargs): - """ This is the short-circuit version of the below logic - because in Python 3.5+ all system calls automatically restart - and recalculate their timeouts. """ - try: - return func(*args, **kwargs) - except (OSError, IOError, select.error) as e: - errcode = None - if hasattr(e, "errno"): - errcode = e.errno - raise SelectorError(errcode) -else: - def _syscall_wrapper(func, recalc_timeout, *args, **kwargs): - """ Wrapper function for syscalls that could fail due to EINTR. - All functions should be retried if there is time left in the timeout - in accordance with PEP 475. """ - timeout = kwargs.get("timeout", None) - if timeout is None: - expires = None - recalc_timeout = False - else: - timeout = float(timeout) - if timeout < 0.0: # Timeout less than 0 treated as no timeout. - expires = None - else: - expires = monotonic() + timeout - - args = list(args) - if recalc_timeout and "timeout" not in kwargs: - raise ValueError( - "Timeout must be in args or kwargs to be recalculated") - - result = _SYSCALL_SENTINEL - while result is _SYSCALL_SENTINEL: - try: - result = func(*args, **kwargs) - # OSError is thrown by select.select - # IOError is thrown by select.epoll.poll - # select.error is thrown by select.poll.poll - # Aren't we thankful for Python 3.x rework for exceptions? - except (OSError, IOError, select.error) as e: - # select.error wasn't a subclass of OSError in the past. - errcode = None - if hasattr(e, "errno"): - errcode = e.errno - elif hasattr(e, "args"): - errcode = e.args[0] - - # Also test for the Windows equivalent of EINTR. - is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and - errcode == errno.WSAEINTR)) - - if is_interrupt: - if expires is not None: - current_time = monotonic() - if current_time > expires: - raise OSError(errno=errno.ETIMEDOUT) - if recalc_timeout: - if "timeout" in kwargs: - kwargs["timeout"] = expires - current_time - continue - if errcode: - raise SelectorError(errcode) - else: - raise - return result - - -SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data']) - - -class _SelectorMapping(Mapping): - """ Mapping of file objects to selector keys """ - - def __init__(self, selector): - self._selector = selector - - def __len__(self): - return len(self._selector._fd_to_key) - - def __getitem__(self, fileobj): - try: - fd = self._selector._fileobj_lookup(fileobj) - return self._selector._fd_to_key[fd] - except KeyError: - raise KeyError("{0!r} is not registered.".format(fileobj)) - - def __iter__(self): - return iter(self._selector._fd_to_key) - - -class BaseSelector(object): - """ Abstract Selector class - - A selector supports registering file objects to be monitored - for specific I/O events. - - A file object is a file descriptor or any object with a - `fileno()` method. An arbitrary object can be attached to the - file object which can be used for example to store context info, - a callback, etc. - - A selector can use various implementations (select(), poll(), epoll(), - and kqueue()) depending on the platform. The 'DefaultSelector' class uses - the most efficient implementation for the current platform. - """ - def __init__(self): - # Maps file descriptors to keys. - self._fd_to_key = {} - - # Read-only mapping returned by get_map() - self._map = _SelectorMapping(self) - - def _fileobj_lookup(self, fileobj): - """ Return a file descriptor from a file object. - This wraps _fileobj_to_fd() to do an exhaustive - search in case the object is invalid but we still - have it in our map. Used by unregister() so we can - unregister an object that was previously registered - even if it is closed. It is also used by _SelectorMapping - """ - try: - return _fileobj_to_fd(fileobj) - except ValueError: - - # Search through all our mapped keys. - for key in self._fd_to_key.values(): - if key.fileobj is fileobj: - return key.fd - - # Raise ValueError after all. - raise - - def register(self, fileobj, events, data=None): - """ Register a file object for a set of events to monitor. """ - if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): - raise ValueError("Invalid events: {0!r}".format(events)) - - key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data) - - if key.fd in self._fd_to_key: - raise KeyError("{0!r} (FD {1}) is already registered" - .format(fileobj, key.fd)) - - self._fd_to_key[key.fd] = key - return key - - def unregister(self, fileobj): - """ Unregister a file object from being monitored. """ - try: - key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) - except KeyError: - raise KeyError("{0!r} is not registered".format(fileobj)) - - # Getting the fileno of a closed socket on Windows errors with EBADF. - except socket.error as e: # Platform-specific: Windows. - if e.errno != errno.EBADF: - raise - else: - for key in self._fd_to_key.values(): - if key.fileobj is fileobj: - self._fd_to_key.pop(key.fd) - break - else: - raise KeyError("{0!r} is not registered".format(fileobj)) - return key - - def modify(self, fileobj, events, data=None): - """ Change a registered file object monitored events and data. """ - # NOTE: Some subclasses optimize this operation even further. - try: - key = self._fd_to_key[self._fileobj_lookup(fileobj)] - except KeyError: - raise KeyError("{0!r} is not registered".format(fileobj)) - - if events != key.events: - self.unregister(fileobj) - key = self.register(fileobj, events, data) - - elif data != key.data: - # Use a shortcut to update the data. - key = key._replace(data=data) - self._fd_to_key[key.fd] = key - - return key - - def select(self, timeout=None): - """ Perform the actual selection until some monitored file objects - are ready or the timeout expires. """ - raise NotImplementedError() - - def close(self): - """ Close the selector. This must be called to ensure that all - underlying resources are freed. """ - self._fd_to_key.clear() - self._map = None - - def get_key(self, fileobj): - """ Return the key associated with a registered file object. """ - mapping = self.get_map() - if mapping is None: - raise RuntimeError("Selector is closed") - try: - return mapping[fileobj] - except KeyError: - raise KeyError("{0!r} is not registered".format(fileobj)) - - def get_map(self): - """ Return a mapping of file objects to selector keys """ - return self._map - - def _key_from_fd(self, fd): - """ Return the key associated to a given file descriptor - Return None if it is not found. """ - try: - return self._fd_to_key[fd] - except KeyError: - return None - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - -# Almost all platforms have select.select() -if hasattr(select, "select"): - class SelectSelector(BaseSelector): - """ Select-based selector. """ - def __init__(self): - super(SelectSelector, self).__init__() - self._readers = set() - self._writers = set() - - def register(self, fileobj, events, data=None): - key = super(SelectSelector, self).register(fileobj, events, data) - if events & EVENT_READ: - self._readers.add(key.fd) - if events & EVENT_WRITE: - self._writers.add(key.fd) - return key - - def unregister(self, fileobj): - key = super(SelectSelector, self).unregister(fileobj) - self._readers.discard(key.fd) - self._writers.discard(key.fd) - return key - - def _select(self, r, w, timeout=None): - """ Wrapper for select.select because timeout is a positional arg """ - return select.select(r, w, [], timeout) - - def select(self, timeout=None): - # Selecting on empty lists on Windows errors out. - if not len(self._readers) and not len(self._writers): - return [] - - timeout = None if timeout is None else max(timeout, 0.0) - ready = [] - r, w, _ = _syscall_wrapper(self._select, True, self._readers, - self._writers, timeout) - r = set(r) - w = set(w) - for fd in r | w: - events = 0 - if fd in r: - events |= EVENT_READ - if fd in w: - events |= EVENT_WRITE - - key = self._key_from_fd(fd) - if key: - ready.append((key, events & key.events)) - return ready - - -if hasattr(select, "poll"): - class PollSelector(BaseSelector): - """ Poll-based selector """ - def __init__(self): - super(PollSelector, self).__init__() - self._poll = select.poll() - - def register(self, fileobj, events, data=None): - key = super(PollSelector, self).register(fileobj, events, data) - event_mask = 0 - if events & EVENT_READ: - event_mask |= select.POLLIN - if events & EVENT_WRITE: - event_mask |= select.POLLOUT - self._poll.register(key.fd, event_mask) - return key - - def unregister(self, fileobj): - key = super(PollSelector, self).unregister(fileobj) - self._poll.unregister(key.fd) - return key - - def _wrap_poll(self, timeout=None): - """ Wrapper function for select.poll.poll() so that - _syscall_wrapper can work with only seconds. """ - if timeout is not None: - if timeout <= 0: - timeout = 0 - else: - # select.poll.poll() has a resolution of 1 millisecond, - # round away from zero to wait *at least* timeout seconds. - timeout = math.ceil(timeout * 1e3) - - result = self._poll.poll(timeout) - return result - - def select(self, timeout=None): - ready = [] - fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout) - for fd, event_mask in fd_events: - events = 0 - if event_mask & ~select.POLLIN: - events |= EVENT_WRITE - if event_mask & ~select.POLLOUT: - events |= EVENT_READ - - key = self._key_from_fd(fd) - if key: - ready.append((key, events & key.events)) - - return ready - - -if hasattr(select, "epoll"): - class EpollSelector(BaseSelector): - """ Epoll-based selector """ - def __init__(self): - super(EpollSelector, self).__init__() - self._epoll = select.epoll() - - def fileno(self): - return self._epoll.fileno() - - def register(self, fileobj, events, data=None): - key = super(EpollSelector, self).register(fileobj, events, data) - events_mask = 0 - if events & EVENT_READ: - events_mask |= select.EPOLLIN - if events & EVENT_WRITE: - events_mask |= select.EPOLLOUT - _syscall_wrapper(self._epoll.register, False, key.fd, events_mask) - return key - - def unregister(self, fileobj): - key = super(EpollSelector, self).unregister(fileobj) - try: - _syscall_wrapper(self._epoll.unregister, False, key.fd) - except SelectorError: - # This can occur when the fd was closed since registry. - pass - return key - - def select(self, timeout=None): - if timeout is not None: - if timeout <= 0: - timeout = 0.0 - else: - # select.epoll.poll() has a resolution of 1 millisecond - # but luckily takes seconds so we don't need a wrapper - # like PollSelector. Just for better rounding. - timeout = math.ceil(timeout * 1e3) * 1e-3 - timeout = float(timeout) - else: - timeout = -1.0 # epoll.poll() must have a float. - - # We always want at least 1 to ensure that select can be called - # with no file descriptors registered. Otherwise will fail. - max_events = max(len(self._fd_to_key), 1) - - ready = [] - fd_events = _syscall_wrapper(self._epoll.poll, True, - timeout=timeout, - maxevents=max_events) - for fd, event_mask in fd_events: - events = 0 - if event_mask & ~select.EPOLLIN: - events |= EVENT_WRITE - if event_mask & ~select.EPOLLOUT: - events |= EVENT_READ - - key = self._key_from_fd(fd) - if key: - ready.append((key, events & key.events)) - return ready - - def close(self): - self._epoll.close() - super(EpollSelector, self).close() - - -if hasattr(select, "kqueue"): - class KqueueSelector(BaseSelector): - """ Kqueue / Kevent-based selector """ - def __init__(self): - super(KqueueSelector, self).__init__() - self._kqueue = select.kqueue() - - def fileno(self): - return self._kqueue.fileno() - - def register(self, fileobj, events, data=None): - key = super(KqueueSelector, self).register(fileobj, events, data) - if events & EVENT_READ: - kevent = select.kevent(key.fd, - select.KQ_FILTER_READ, - select.KQ_EV_ADD) - - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - - if events & EVENT_WRITE: - kevent = select.kevent(key.fd, - select.KQ_FILTER_WRITE, - select.KQ_EV_ADD) - - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - - return key - - def unregister(self, fileobj): - key = super(KqueueSelector, self).unregister(fileobj) - if key.events & EVENT_READ: - kevent = select.kevent(key.fd, - select.KQ_FILTER_READ, - select.KQ_EV_DELETE) - try: - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - except SelectorError: - pass - if key.events & EVENT_WRITE: - kevent = select.kevent(key.fd, - select.KQ_FILTER_WRITE, - select.KQ_EV_DELETE) - try: - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - except SelectorError: - pass - - return key - - def select(self, timeout=None): - if timeout is not None: - timeout = max(timeout, 0) - - max_events = len(self._fd_to_key) * 2 - ready_fds = {} - - kevent_list = _syscall_wrapper(self._kqueue.control, True, - None, max_events, timeout) - - for kevent in kevent_list: - fd = kevent.ident - event_mask = kevent.filter - events = 0 - if event_mask == select.KQ_FILTER_READ: - events |= EVENT_READ - if event_mask == select.KQ_FILTER_WRITE: - events |= EVENT_WRITE - - key = self._key_from_fd(fd) - if key: - if key.fd not in ready_fds: - ready_fds[key.fd] = (key, events & key.events) - else: - old_events = ready_fds[key.fd][1] - ready_fds[key.fd] = (key, (events | old_events) & key.events) - - return list(ready_fds.values()) - - def close(self): - self._kqueue.close() - super(KqueueSelector, self).close() - - -if not hasattr(select, 'select'): # Platform-specific: AppEngine - HAS_SELECT = False - - -def _can_allocate(struct): - """ Checks that select structs can be allocated by the underlying - operating system, not just advertised by the select module. We don't - check select() because we'll be hopeful that most platforms that - don't have it available will not advertise it. (ie: GAE) """ - try: - # select.poll() objects won't fail until used. - if struct == 'poll': - p = select.poll() - p.poll(0) - - # All others will fail on allocation. - else: - getattr(select, struct)().close() - return True - except (OSError, AttributeError) as e: - return False - - -# Choose the best implementation, roughly: -# kqueue == epoll > poll > select. Devpoll not supported. (See above) -# select() also can't accept a FD > FD_SETSIZE (usually around 1024) -def DefaultSelector(): - """ This function serves as a first call for DefaultSelector to - detect if the select module is being monkey-patched incorrectly - by eventlet, greenlet, and preserve proper behavior. """ - global _DEFAULT_SELECTOR - if _DEFAULT_SELECTOR is None: - if _can_allocate('kqueue'): - _DEFAULT_SELECTOR = KqueueSelector - elif _can_allocate('epoll'): - _DEFAULT_SELECTOR = EpollSelector - elif _can_allocate('poll'): - _DEFAULT_SELECTOR = PollSelector - elif hasattr(select, 'select'): - _DEFAULT_SELECTOR = SelectSelector - else: # Platform-specific: AppEngine - raise ValueError('Platform does not have a selector') - return _DEFAULT_SELECTOR() diff --git a/pype/vendor/requests/packages/urllib3/util/ssl_.py b/pype/vendor/requests/packages/urllib3/util/ssl_.py deleted file mode 100644 index 33d428ed85..0000000000 --- a/pype/vendor/requests/packages/urllib3/util/ssl_.py +++ /dev/null @@ -1,337 +0,0 @@ -from __future__ import absolute_import -import errno -import warnings -import hmac - -from binascii import hexlify, unhexlify -from hashlib import md5, sha1, sha256 - -from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning - - -SSLContext = None -HAS_SNI = False -IS_PYOPENSSL = False -IS_SECURETRANSPORT = False - -# Maps the length of a digest to a possible hash function producing this digest -HASHFUNC_MAP = { - 32: md5, - 40: sha1, - 64: sha256, -} - - -def _const_compare_digest_backport(a, b): - """ - Compare two digests of equal length in constant time. - - The digests must be of type str/bytes. - Returns True if the digests match, and False otherwise. - """ - result = abs(len(a) - len(b)) - for l, r in zip(bytearray(a), bytearray(b)): - result |= l ^ r - return result == 0 - - -_const_compare_digest = getattr(hmac, 'compare_digest', - _const_compare_digest_backport) - - -try: # Test for SSL features - import ssl - from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 - from ssl import HAS_SNI # Has SNI? -except ImportError: - pass - - -try: - from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION -except ImportError: - OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 - OP_NO_COMPRESSION = 0x20000 - -# A secure default. -# Sources for more information on TLS ciphers: -# -# - https://wiki.mozilla.org/Security/Server_Side_TLS -# - https://www.ssllabs.com/projects/best-practices/index.html -# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ -# -# The general intent is: -# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), -# - prefer ECDHE over DHE for better performance, -# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and -# security, -# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, -# - disable NULL authentication, MD5 MACs and DSS for security reasons. -DEFAULT_CIPHERS = ':'.join([ - 'ECDH+AESGCM', - 'ECDH+CHACHA20', - 'DH+AESGCM', - 'DH+CHACHA20', - 'ECDH+AES256', - 'DH+AES256', - 'ECDH+AES128', - 'DH+AES', - 'RSA+AESGCM', - 'RSA+AES', - '!aNULL', - '!eNULL', - '!MD5', -]) - -try: - from ssl import SSLContext # Modern SSL? -except ImportError: - import sys - - class SSLContext(object): # Platform-specific: Python 2 & 3.1 - supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or - (3, 2) <= sys.version_info) - - def __init__(self, protocol_version): - self.protocol = protocol_version - # Use default values from a real SSLContext - self.check_hostname = False - self.verify_mode = ssl.CERT_NONE - self.ca_certs = None - self.options = 0 - self.certfile = None - self.keyfile = None - self.ciphers = None - - def load_cert_chain(self, certfile, keyfile): - self.certfile = certfile - self.keyfile = keyfile - - def load_verify_locations(self, cafile=None, capath=None): - self.ca_certs = cafile - - if capath is not None: - raise SSLError("CA directories not supported in older Pythons") - - def set_ciphers(self, cipher_suite): - if not self.supports_set_ciphers: - raise TypeError( - 'Your version of Python does not support setting ' - 'a custom cipher suite. Please upgrade to Python ' - '2.7, 3.2, or later if you need this functionality.' - ) - self.ciphers = cipher_suite - - def wrap_socket(self, socket, server_hostname=None, server_side=False): - warnings.warn( - 'A true SSLContext object is not available. This prevents ' - 'urllib3 from configuring SSL appropriately and may cause ' - 'certain SSL connections to fail. You can upgrade to a newer ' - 'version of Python to solve this. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings', - InsecurePlatformWarning - ) - kwargs = { - 'keyfile': self.keyfile, - 'certfile': self.certfile, - 'ca_certs': self.ca_certs, - 'cert_reqs': self.verify_mode, - 'ssl_version': self.protocol, - 'server_side': server_side, - } - if self.supports_set_ciphers: # Platform-specific: Python 2.7+ - return wrap_socket(socket, ciphers=self.ciphers, **kwargs) - else: # Platform-specific: Python 2.6 - return wrap_socket(socket, **kwargs) - - -def assert_fingerprint(cert, fingerprint): - """ - Checks if given fingerprint matches the supplied certificate. - - :param cert: - Certificate as bytes object. - :param fingerprint: - Fingerprint as string of hexdigits, can be interspersed by colons. - """ - - fingerprint = fingerprint.replace(':', '').lower() - digest_length = len(fingerprint) - hashfunc = HASHFUNC_MAP.get(digest_length) - if not hashfunc: - raise SSLError( - 'Fingerprint of invalid length: {0}'.format(fingerprint)) - - # We need encode() here for py32; works on py2 and p33. - fingerprint_bytes = unhexlify(fingerprint.encode()) - - cert_digest = hashfunc(cert).digest() - - if not _const_compare_digest(cert_digest, fingerprint_bytes): - raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' - .format(fingerprint, hexlify(cert_digest))) - - -def resolve_cert_reqs(candidate): - """ - Resolves the argument to a numeric constant, which can be passed to - the wrap_socket function/method from the ssl module. - Defaults to :data:`ssl.CERT_NONE`. - If given a string it is assumed to be the name of the constant in the - :mod:`ssl` module or its abbrevation. - (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. - If it's neither `None` nor a string we assume it is already the numeric - constant which can directly be passed to wrap_socket. - """ - if candidate is None: - return CERT_NONE - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'CERT_' + candidate) - return res - - return candidate - - -def resolve_ssl_version(candidate): - """ - like resolve_cert_reqs - """ - if candidate is None: - return PROTOCOL_SSLv23 - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'PROTOCOL_' + candidate) - return res - - return candidate - - -def create_urllib3_context(ssl_version=None, cert_reqs=None, - options=None, ciphers=None): - """All arguments have the same meaning as ``ssl_wrap_socket``. - - By default, this function does a lot of the same work that - ``ssl.create_default_context`` does on Python 3.4+. It: - - - Disables SSLv2, SSLv3, and compression - - Sets a restricted set of server ciphers - - If you wish to enable SSLv3, you can do:: - - from urllib3.util import ssl_ - context = ssl_.create_urllib3_context() - context.options &= ~ssl_.OP_NO_SSLv3 - - You can do the same to enable compression (substituting ``COMPRESSION`` - for ``SSLv3`` in the last line above). - - :param ssl_version: - The desired protocol version to use. This will default to - PROTOCOL_SSLv23 which will negotiate the highest protocol that both - the server and your installation of OpenSSL support. - :param cert_reqs: - Whether to require the certificate verification. This defaults to - ``ssl.CERT_REQUIRED``. - :param options: - Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, - ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. - :param ciphers: - Which cipher suites to allow the server to select. - :returns: - Constructed SSLContext object with specified options - :rtype: SSLContext - """ - context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) - - # Setting the default here, as we may have no ssl module on import - cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs - - if options is None: - options = 0 - # SSLv2 is easily broken and is considered harmful and dangerous - options |= OP_NO_SSLv2 - # SSLv3 has several problems and is now dangerous - options |= OP_NO_SSLv3 - # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ - # (issue #309) - options |= OP_NO_COMPRESSION - - context.options |= options - - if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6 - context.set_ciphers(ciphers or DEFAULT_CIPHERS) - - context.verify_mode = cert_reqs - if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 - # We do our own verification, including fingerprints and alternative - # hostnames. So disable it here - context.check_hostname = False - return context - - -def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None, ciphers=None, ssl_context=None, - ca_cert_dir=None): - """ - All arguments except for server_hostname, ssl_context, and ca_cert_dir have - the same meaning as they do when using :func:`ssl.wrap_socket`. - - :param server_hostname: - When SNI is supported, the expected hostname of the certificate - :param ssl_context: - A pre-made :class:`SSLContext` object. If none is provided, one will - be created using :func:`create_urllib3_context`. - :param ciphers: - A string of ciphers we wish the client to support. This is not - supported on Python 2.6 as the ssl module does not support it. - :param ca_cert_dir: - A directory containing CA certificates in multiple separate files, as - supported by OpenSSL's -CApath flag or the capath argument to - SSLContext.load_verify_locations(). - """ - context = ssl_context - if context is None: - # Note: This branch of code and all the variables in it are no longer - # used by urllib3 itself. We should consider deprecating and removing - # this code. - context = create_urllib3_context(ssl_version, cert_reqs, - ciphers=ciphers) - - if ca_certs or ca_cert_dir: - try: - context.load_verify_locations(ca_certs, ca_cert_dir) - except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2 - raise SSLError(e) - # Py33 raises FileNotFoundError which subclasses OSError - # These are not equivalent unless we check the errno attribute - except OSError as e: # Platform-specific: Python 3.3 and beyond - if e.errno == errno.ENOENT: - raise SSLError(e) - raise - elif getattr(context, 'load_default_certs', None) is not None: - # try to load OS default certs; works well on Windows (require Python3.4+) - context.load_default_certs() - - if certfile: - context.load_cert_chain(certfile, keyfile) - if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI - return context.wrap_socket(sock, server_hostname=server_hostname) - - warnings.warn( - 'An HTTPS request has been made, but the SNI (Subject Name ' - 'Indication) extension to TLS is not available on this platform. ' - 'This may cause the server to present an incorrect TLS ' - 'certificate, which can cause validation failures. You can upgrade to ' - 'a newer version of Python to solve this. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings', - SNIMissingWarning - ) - return context.wrap_socket(sock) diff --git a/pype/vendor/requests/packages/urllib3/util/timeout.py b/pype/vendor/requests/packages/urllib3/util/timeout.py deleted file mode 100644 index cec817e6ef..0000000000 --- a/pype/vendor/requests/packages/urllib3/util/timeout.py +++ /dev/null @@ -1,242 +0,0 @@ -from __future__ import absolute_import -# The default socket timeout, used by httplib to indicate that no timeout was -# specified by the user -from socket import _GLOBAL_DEFAULT_TIMEOUT -import time - -from ..exceptions import TimeoutStateError - -# A sentinel value to indicate that no timeout was specified by the user in -# urllib3 -_Default = object() - - -# Use time.monotonic if available. -current_time = getattr(time, "monotonic", time.time) - - -class Timeout(object): - """ Timeout configuration. - - Timeouts can be defined as a default for a pool:: - - timeout = Timeout(connect=2.0, read=7.0) - http = PoolManager(timeout=timeout) - response = http.request('GET', 'http://example.com/') - - Or per-request (which overrides the default for the pool):: - - response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) - - Timeouts can be disabled by setting all the parameters to ``None``:: - - no_timeout = Timeout(connect=None, read=None) - response = http.request('GET', 'http://example.com/, timeout=no_timeout) - - - :param total: - This combines the connect and read timeouts into one; the read timeout - will be set to the time leftover from the connect attempt. In the - event that both a connect timeout and a total are specified, or a read - timeout and a total are specified, the shorter timeout will be applied. - - Defaults to None. - - :type total: integer, float, or None - - :param connect: - The maximum amount of time to wait for a connection attempt to a server - to succeed. Omitting the parameter will default the connect timeout to - the system default, probably `the global default timeout in socket.py - `_. - None will set an infinite timeout for connection attempts. - - :type connect: integer, float, or None - - :param read: - The maximum amount of time to wait between consecutive - read operations for a response from the server. Omitting - the parameter will default the read timeout to the system - default, probably `the global default timeout in socket.py - `_. - None will set an infinite timeout. - - :type read: integer, float, or None - - .. note:: - - Many factors can affect the total amount of time for urllib3 to return - an HTTP response. - - For example, Python's DNS resolver does not obey the timeout specified - on the socket. Other factors that can affect total request time include - high CPU load, high swap, the program running at a low priority level, - or other behaviors. - - In addition, the read and total timeouts only measure the time between - read operations on the socket connecting the client and the server, - not the total amount of time for the request to return a complete - response. For most requests, the timeout is raised because the server - has not sent the first byte in the specified time. This is not always - the case; if a server streams one byte every fifteen seconds, a timeout - of 20 seconds will not trigger, even though the request will take - several minutes to complete. - - If your goal is to cut off any request after a set amount of wall clock - time, consider having a second "watcher" thread to cut off a slow - request. - """ - - #: A sentinel object representing the default timeout value - DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT - - def __init__(self, total=None, connect=_Default, read=_Default): - self._connect = self._validate_timeout(connect, 'connect') - self._read = self._validate_timeout(read, 'read') - self.total = self._validate_timeout(total, 'total') - self._start_connect = None - - def __str__(self): - return '%s(connect=%r, read=%r, total=%r)' % ( - type(self).__name__, self._connect, self._read, self.total) - - @classmethod - def _validate_timeout(cls, value, name): - """ Check that a timeout attribute is valid. - - :param value: The timeout value to validate - :param name: The name of the timeout attribute to validate. This is - used to specify in error messages. - :return: The validated and casted version of the given value. - :raises ValueError: If it is a numeric value less than or equal to - zero, or the type is not an integer, float, or None. - """ - if value is _Default: - return cls.DEFAULT_TIMEOUT - - if value is None or value is cls.DEFAULT_TIMEOUT: - return value - - if isinstance(value, bool): - raise ValueError("Timeout cannot be a boolean value. It must " - "be an int, float or None.") - try: - float(value) - except (TypeError, ValueError): - raise ValueError("Timeout value %s was %s, but it must be an " - "int, float or None." % (name, value)) - - try: - if value <= 0: - raise ValueError("Attempted to set %s timeout to %s, but the " - "timeout cannot be set to a value less " - "than or equal to 0." % (name, value)) - except TypeError: # Python 3 - raise ValueError("Timeout value %s was %s, but it must be an " - "int, float or None." % (name, value)) - - return value - - @classmethod - def from_float(cls, timeout): - """ Create a new Timeout from a legacy timeout value. - - The timeout value used by httplib.py sets the same timeout on the - connect(), and recv() socket requests. This creates a :class:`Timeout` - object that sets the individual timeouts to the ``timeout`` value - passed to this function. - - :param timeout: The legacy timeout value. - :type timeout: integer, float, sentinel default object, or None - :return: Timeout object - :rtype: :class:`Timeout` - """ - return Timeout(read=timeout, connect=timeout) - - def clone(self): - """ Create a copy of the timeout object - - Timeout properties are stored per-pool but each request needs a fresh - Timeout object to ensure each one has its own start/stop configured. - - :return: a copy of the timeout object - :rtype: :class:`Timeout` - """ - # We can't use copy.deepcopy because that will also create a new object - # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to - # detect the user default. - return Timeout(connect=self._connect, read=self._read, - total=self.total) - - def start_connect(self): - """ Start the timeout clock, used during a connect() attempt - - :raises urllib3.exceptions.TimeoutStateError: if you attempt - to start a timer that has been started already. - """ - if self._start_connect is not None: - raise TimeoutStateError("Timeout timer has already been started.") - self._start_connect = current_time() - return self._start_connect - - def get_connect_duration(self): - """ Gets the time elapsed since the call to :meth:`start_connect`. - - :return: Elapsed time. - :rtype: float - :raises urllib3.exceptions.TimeoutStateError: if you attempt - to get duration for a timer that hasn't been started. - """ - if self._start_connect is None: - raise TimeoutStateError("Can't get connect duration for timer " - "that has not started.") - return current_time() - self._start_connect - - @property - def connect_timeout(self): - """ Get the value to use when setting a connection timeout. - - This will be a positive float or integer, the value None - (never timeout), or the default system timeout. - - :return: Connect timeout. - :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None - """ - if self.total is None: - return self._connect - - if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: - return self.total - - return min(self._connect, self.total) - - @property - def read_timeout(self): - """ Get the value for the read timeout. - - This assumes some time has elapsed in the connection timeout and - computes the read timeout appropriately. - - If self.total is set, the read timeout is dependent on the amount of - time taken by the connect timeout. If the connection time has not been - established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be - raised. - - :return: Value to use for the read timeout. - :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None - :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` - has not yet been called on this object. - """ - if (self.total is not None and - self.total is not self.DEFAULT_TIMEOUT and - self._read is not None and - self._read is not self.DEFAULT_TIMEOUT): - # In case the connect timeout has not yet been established. - if self._start_connect is None: - return self._read - return max(0, min(self.total - self.get_connect_duration(), - self._read)) - elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: - return max(0, self.total - self.get_connect_duration()) - else: - return self._read diff --git a/pype/vendor/requests/packages/urllib3/util/url.py b/pype/vendor/requests/packages/urllib3/util/url.py deleted file mode 100644 index 6b6f9968d7..0000000000 --- a/pype/vendor/requests/packages/urllib3/util/url.py +++ /dev/null @@ -1,230 +0,0 @@ -from __future__ import absolute_import -from collections import namedtuple - -from ..exceptions import LocationParseError - - -url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] - -# We only want to normalize urls with an HTTP(S) scheme. -# urllib3 infers URLs without a scheme (None) to be http. -NORMALIZABLE_SCHEMES = ('http', 'https', None) - - -class Url(namedtuple('Url', url_attrs)): - """ - Datastructure for representing an HTTP URL. Used as a return value for - :func:`parse_url`. Both the scheme and host are normalized as they are - both case-insensitive according to RFC 3986. - """ - __slots__ = () - - def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, - query=None, fragment=None): - if path and not path.startswith('/'): - path = '/' + path - if scheme: - scheme = scheme.lower() - if host and scheme in NORMALIZABLE_SCHEMES: - host = host.lower() - return super(Url, cls).__new__(cls, scheme, auth, host, port, path, - query, fragment) - - @property - def hostname(self): - """For backwards-compatibility with urlparse. We're nice like that.""" - return self.host - - @property - def request_uri(self): - """Absolute path including the query string.""" - uri = self.path or '/' - - if self.query is not None: - uri += '?' + self.query - - return uri - - @property - def netloc(self): - """Network location including host and port""" - if self.port: - return '%s:%d' % (self.host, self.port) - return self.host - - @property - def url(self): - """ - Convert self into a url - - This function should more or less round-trip with :func:`.parse_url`. The - returned url may not be exactly the same as the url inputted to - :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls - with a blank port will have : removed). - - Example: :: - - >>> U = parse_url('http://google.com/mail/') - >>> U.url - 'http://google.com/mail/' - >>> Url('http', 'username:password', 'host.com', 80, - ... '/path', 'query', 'fragment').url - 'http://username:password@host.com:80/path?query#fragment' - """ - scheme, auth, host, port, path, query, fragment = self - url = '' - - # We use "is not None" we want things to happen with empty strings (or 0 port) - if scheme is not None: - url += scheme + '://' - if auth is not None: - url += auth + '@' - if host is not None: - url += host - if port is not None: - url += ':' + str(port) - if path is not None: - url += path - if query is not None: - url += '?' + query - if fragment is not None: - url += '#' + fragment - - return url - - def __str__(self): - return self.url - - -def split_first(s, delims): - """ - Given a string and an iterable of delimiters, split on the first found - delimiter. Return two split parts and the matched delimiter. - - If not found, then the first part is the full input string. - - Example:: - - >>> split_first('foo/bar?baz', '?/=') - ('foo', 'bar?baz', '/') - >>> split_first('foo/bar?baz', '123') - ('foo/bar?baz', '', None) - - Scales linearly with number of delims. Not ideal for large number of delims. - """ - min_idx = None - min_delim = None - for d in delims: - idx = s.find(d) - if idx < 0: - continue - - if min_idx is None or idx < min_idx: - min_idx = idx - min_delim = d - - if min_idx is None or min_idx < 0: - return s, '', None - - return s[:min_idx], s[min_idx + 1:], min_delim - - -def parse_url(url): - """ - Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is - performed to parse incomplete urls. Fields not provided will be None. - - Partly backwards-compatible with :mod:`urlparse`. - - Example:: - - >>> parse_url('http://google.com/mail/') - Url(scheme='http', host='google.com', port=None, path='/mail/', ...) - >>> parse_url('google.com:80') - Url(scheme=None, host='google.com', port=80, path=None, ...) - >>> parse_url('/foo?bar') - Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) - """ - - # While this code has overlap with stdlib's urlparse, it is much - # simplified for our needs and less annoying. - # Additionally, this implementations does silly things to be optimal - # on CPython. - - if not url: - # Empty - return Url() - - scheme = None - auth = None - host = None - port = None - path = None - fragment = None - query = None - - # Scheme - if '://' in url: - scheme, url = url.split('://', 1) - - # Find the earliest Authority Terminator - # (http://tools.ietf.org/html/rfc3986#section-3.2) - url, path_, delim = split_first(url, ['/', '?', '#']) - - if delim: - # Reassemble the path - path = delim + path_ - - # Auth - if '@' in url: - # Last '@' denotes end of auth part - auth, url = url.rsplit('@', 1) - - # IPv6 - if url and url[0] == '[': - host, url = url.split(']', 1) - host += ']' - - # Port - if ':' in url: - _host, port = url.split(':', 1) - - if not host: - host = _host - - if port: - # If given, ports must be integers. No whitespace, no plus or - # minus prefixes, no non-integer digits such as ^2 (superscript). - if not port.isdigit(): - raise LocationParseError(url) - try: - port = int(port) - except ValueError: - raise LocationParseError(url) - else: - # Blank ports are cool, too. (rfc3986#section-3.2.3) - port = None - - elif not host and url: - host = url - - if not path: - return Url(scheme, auth, host, port, path, query, fragment) - - # Fragment - if '#' in path: - path, fragment = path.split('#', 1) - - # Query - if '?' in path: - path, query = path.split('?', 1) - - return Url(scheme, auth, host, port, path, query, fragment) - - -def get_host(url): - """ - Deprecated. Use :func:`parse_url` instead. - """ - p = parse_url(url) - return p.scheme or 'http', p.hostname, p.port diff --git a/pype/vendor/requests/packages/urllib3/util/wait.py b/pype/vendor/requests/packages/urllib3/util/wait.py deleted file mode 100644 index cb396e508c..0000000000 --- a/pype/vendor/requests/packages/urllib3/util/wait.py +++ /dev/null @@ -1,40 +0,0 @@ -from .selectors import ( - HAS_SELECT, - DefaultSelector, - EVENT_READ, - EVENT_WRITE -) - - -def _wait_for_io_events(socks, events, timeout=None): - """ Waits for IO events to be available from a list of sockets - or optionally a single socket if passed in. Returns a list of - sockets that can be interacted with immediately. """ - if not HAS_SELECT: - raise ValueError('Platform does not have a selector') - if not isinstance(socks, list): - # Probably just a single socket. - if hasattr(socks, "fileno"): - socks = [socks] - # Otherwise it might be a non-list iterable. - else: - socks = list(socks) - with DefaultSelector() as selector: - for sock in socks: - selector.register(sock, events) - return [key[0].fileobj for key in - selector.select(timeout) if key[1] & events] - - -def wait_for_read(socks, timeout=None): - """ Waits for reading to be available from a list of sockets - or optionally a single socket if passed in. Returns a list of - sockets that can be read from immediately. """ - return _wait_for_io_events(socks, EVENT_READ, timeout) - - -def wait_for_write(socks, timeout=None): - """ Waits for writing to be available from a list of sockets - or optionally a single socket if passed in. Returns a list of - sockets that can be written to immediately. """ - return _wait_for_io_events(socks, EVENT_WRITE, timeout) diff --git a/pype/vendor/requests/sessions.py b/pype/vendor/requests/sessions.py deleted file mode 100644 index d73d700fa6..0000000000 --- a/pype/vendor/requests/sessions.py +++ /dev/null @@ -1,770 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.session -~~~~~~~~~~~~~~~~ - -This module provides a Session object to manage and persist settings across -requests (cookies, auth, proxies). -""" -import os -import sys -import time -from datetime import timedelta - -from .auth import _basic_auth_str -from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse, Mapping -from .cookies import ( - cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) -from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT -from .hooks import default_hooks, dispatch_hook -from ._internal_utils import to_native_string -from .utils import to_key_val_list, default_headers, DEFAULT_PORTS -from .exceptions import ( - TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) - -from .structures import CaseInsensitiveDict -from .adapters import HTTPAdapter - -from .utils import ( - requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, - get_auth_from_url, rewind_body -) - -from .status_codes import codes - -# formerly defined here, reexposed here for backward compatibility -from .models import REDIRECT_STATI - -# Preferred clock, based on which one is more accurate on a given system. -if sys.platform == 'win32': - try: # Python 3.4+ - preferred_clock = time.perf_counter - except AttributeError: # Earlier than Python 3. - preferred_clock = time.clock -else: - preferred_clock = time.time - - -def merge_setting(request_setting, session_setting, dict_class=OrderedDict): - """Determines appropriate setting for a given request, taking into account - the explicit setting on that request, and the setting in the session. If a - setting is a dictionary, they will be merged together using `dict_class` - """ - - if session_setting is None: - return request_setting - - if request_setting is None: - return session_setting - - # Bypass if not a dictionary (e.g. verify) - if not ( - isinstance(session_setting, Mapping) and - isinstance(request_setting, Mapping) - ): - return request_setting - - merged_setting = dict_class(to_key_val_list(session_setting)) - merged_setting.update(to_key_val_list(request_setting)) - - # Remove keys that are set to None. Extract keys first to avoid altering - # the dictionary during iteration. - none_keys = [k for (k, v) in merged_setting.items() if v is None] - for key in none_keys: - del merged_setting[key] - - return merged_setting - - -def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): - """Properly merges both requests and session hooks. - - This is necessary because when request_hooks == {'response': []}, the - merge breaks Session hooks entirely. - """ - if session_hooks is None or session_hooks.get('response') == []: - return request_hooks - - if request_hooks is None or request_hooks.get('response') == []: - return session_hooks - - return merge_setting(request_hooks, session_hooks, dict_class) - - -class SessionRedirectMixin(object): - - def get_redirect_target(self, resp): - """Receives a Response. Returns a redirect URI or ``None``""" - # Due to the nature of how requests processes redirects this method will - # be called at least once upon the original response and at least twice - # on each subsequent redirect response (if any). - # If a custom mixin is used to handle this logic, it may be advantageous - # to cache the redirect location onto the response object as a private - # attribute. - if resp.is_redirect: - location = resp.headers['location'] - # Currently the underlying http module on py3 decode headers - # in latin1, but empirical evidence suggests that latin1 is very - # rarely used with non-ASCII characters in HTTP headers. - # It is more likely to get UTF8 header rather than latin1. - # This causes incorrect handling of UTF8 encoded location headers. - # To solve this, we re-encode the location in latin1. - if is_py3: - location = location.encode('latin1') - return to_native_string(location, 'utf8') - return None - - def should_strip_auth(self, old_url, new_url): - """Decide whether Authorization header should be removed when redirecting""" - old_parsed = urlparse(old_url) - new_parsed = urlparse(new_url) - if old_parsed.hostname != new_parsed.hostname: - return True - # Special case: allow http -> https redirect when using the standard - # ports. This isn't specified by RFC 7235, but is kept to avoid - # breaking backwards compatibility with older versions of requests - # that allowed any redirects on the same host. - if (old_parsed.scheme == 'http' and old_parsed.port in (80, None) - and new_parsed.scheme == 'https' and new_parsed.port in (443, None)): - return False - - # Handle default port usage corresponding to scheme. - changed_port = old_parsed.port != new_parsed.port - changed_scheme = old_parsed.scheme != new_parsed.scheme - default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) - if (not changed_scheme and old_parsed.port in default_port - and new_parsed.port in default_port): - return False - - # Standard case: root URI must match - return changed_port or changed_scheme - - def resolve_redirects(self, resp, req, stream=False, timeout=None, - verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): - """Receives a Response. Returns a generator of Responses or Requests.""" - - hist = [] # keep track of history - - url = self.get_redirect_target(resp) - previous_fragment = urlparse(req.url).fragment - while url: - prepared_request = req.copy() - - # Update history and keep track of redirects. - # resp.history must ignore the original request in this loop - hist.append(resp) - resp.history = hist[1:] - - try: - resp.content # Consume socket so it can be released - except (ChunkedEncodingError, ContentDecodingError, RuntimeError): - resp.raw.read(decode_content=False) - - if len(resp.history) >= self.max_redirects: - raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp) - - # Release the connection back into the pool. - resp.close() - - # Handle redirection without scheme (see: RFC 1808 Section 4) - if url.startswith('//'): - parsed_rurl = urlparse(resp.url) - url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url) - - # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) - parsed = urlparse(url) - if parsed.fragment == '' and previous_fragment: - parsed = parsed._replace(fragment=previous_fragment) - elif parsed.fragment: - previous_fragment = parsed.fragment - url = parsed.geturl() - - # Facilitate relative 'location' headers, as allowed by RFC 7231. - # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') - # Compliant with RFC3986, we percent encode the url. - if not parsed.netloc: - url = urljoin(resp.url, requote_uri(url)) - else: - url = requote_uri(url) - - prepared_request.url = to_native_string(url) - - self.rebuild_method(prepared_request, resp) - - # https://github.com/requests/requests/issues/1084 - if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): - # https://github.com/requests/requests/issues/3490 - purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') - for header in purged_headers: - prepared_request.headers.pop(header, None) - prepared_request.body = None - - headers = prepared_request.headers - try: - del headers['Cookie'] - except KeyError: - pass - - # Extract any cookies sent on the response to the cookiejar - # in the new request. Because we've mutated our copied prepared - # request, use the old one that we haven't yet touched. - extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) - merge_cookies(prepared_request._cookies, self.cookies) - prepared_request.prepare_cookies(prepared_request._cookies) - - # Rebuild auth and proxy information. - proxies = self.rebuild_proxies(prepared_request, proxies) - self.rebuild_auth(prepared_request, resp) - - # A failed tell() sets `_body_position` to `object()`. This non-None - # value ensures `rewindable` will be True, allowing us to raise an - # UnrewindableBodyError, instead of hanging the connection. - rewindable = ( - prepared_request._body_position is not None and - ('Content-Length' in headers or 'Transfer-Encoding' in headers) - ) - - # Attempt to rewind consumed file-like object. - if rewindable: - rewind_body(prepared_request) - - # Override the original request. - req = prepared_request - - if yield_requests: - yield req - else: - - resp = self.send( - req, - stream=stream, - timeout=timeout, - verify=verify, - cert=cert, - proxies=proxies, - allow_redirects=False, - **adapter_kwargs - ) - - extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) - - # extract redirect url, if any, for the next loop - url = self.get_redirect_target(resp) - yield resp - - def rebuild_auth(self, prepared_request, response): - """When being redirected we may want to strip authentication from the - request to avoid leaking credentials. This method intelligently removes - and reapplies authentication where possible to avoid credential loss. - """ - headers = prepared_request.headers - url = prepared_request.url - - if 'Authorization' in headers and self.should_strip_auth(response.request.url, url): - # If we get redirected to a new host, we should strip out any - # authentication headers. - del headers['Authorization'] - - # .netrc might have more auth for us on our new host. - new_auth = get_netrc_auth(url) if self.trust_env else None - if new_auth is not None: - prepared_request.prepare_auth(new_auth) - - return - - def rebuild_proxies(self, prepared_request, proxies): - """This method re-evaluates the proxy configuration by considering the - environment variables. If we are redirected to a URL covered by - NO_PROXY, we strip the proxy configuration. Otherwise, we set missing - proxy keys for this URL (in case they were stripped by a previous - redirect). - - This method also replaces the Proxy-Authorization header where - necessary. - - :rtype: dict - """ - proxies = proxies if proxies is not None else {} - headers = prepared_request.headers - url = prepared_request.url - scheme = urlparse(url).scheme - new_proxies = proxies.copy() - no_proxy = proxies.get('no_proxy') - - bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy) - if self.trust_env and not bypass_proxy: - environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) - - proxy = environ_proxies.get(scheme, environ_proxies.get('all')) - - if proxy: - new_proxies.setdefault(scheme, proxy) - - if 'Proxy-Authorization' in headers: - del headers['Proxy-Authorization'] - - try: - username, password = get_auth_from_url(new_proxies[scheme]) - except KeyError: - username, password = None, None - - if username and password: - headers['Proxy-Authorization'] = _basic_auth_str(username, password) - - return new_proxies - - def rebuild_method(self, prepared_request, response): - """When being redirected we may want to change the method of the request - based on certain specs or browser behavior. - """ - method = prepared_request.method - - # https://tools.ietf.org/html/rfc7231#section-6.4.4 - if response.status_code == codes.see_other and method != 'HEAD': - method = 'GET' - - # Do what the browsers do, despite standards... - # First, turn 302s into GETs. - if response.status_code == codes.found and method != 'HEAD': - method = 'GET' - - # Second, if a POST is responded to with a 301, turn it into a GET. - # This bizarre behaviour is explained in Issue 1704. - if response.status_code == codes.moved and method == 'POST': - method = 'GET' - - prepared_request.method = method - - -class Session(SessionRedirectMixin): - """A Requests session. - - Provides cookie persistence, connection-pooling, and configuration. - - Basic Usage:: - - >>> import requests - >>> s = requests.Session() - >>> s.get('https://httpbin.org/get') - - - Or as a context manager:: - - >>> with requests.Session() as s: - >>> s.get('https://httpbin.org/get') - - """ - - __attrs__ = [ - 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', - 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', - 'max_redirects', - ] - - def __init__(self): - - #: A case-insensitive dictionary of headers to be sent on each - #: :class:`Request ` sent from this - #: :class:`Session `. - self.headers = default_headers() - - #: Default Authentication tuple or object to attach to - #: :class:`Request `. - self.auth = None - - #: Dictionary mapping protocol or protocol and host to the URL of the proxy - #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to - #: be used on each :class:`Request `. - self.proxies = {} - - #: Event-handling hooks. - self.hooks = default_hooks() - - #: Dictionary of querystring data to attach to each - #: :class:`Request `. The dictionary values may be lists for - #: representing multivalued query parameters. - self.params = {} - - #: Stream response content default. - self.stream = False - - #: SSL Verification default. - self.verify = True - - #: SSL client certificate default, if String, path to ssl client - #: cert file (.pem). If Tuple, ('cert', 'key') pair. - self.cert = None - - #: Maximum number of redirects allowed. If the request exceeds this - #: limit, a :class:`TooManyRedirects` exception is raised. - #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is - #: 30. - self.max_redirects = DEFAULT_REDIRECT_LIMIT - - #: Trust environment settings for proxy configuration, default - #: authentication and similar. - self.trust_env = True - - #: A CookieJar containing all currently outstanding cookies set on this - #: session. By default it is a - #: :class:`RequestsCookieJar `, but - #: may be any other ``cookielib.CookieJar`` compatible object. - self.cookies = cookiejar_from_dict({}) - - # Default connection adapters. - self.adapters = OrderedDict() - self.mount('https://', HTTPAdapter()) - self.mount('http://', HTTPAdapter()) - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - def prepare_request(self, request): - """Constructs a :class:`PreparedRequest ` for - transmission and returns it. The :class:`PreparedRequest` has settings - merged from the :class:`Request ` instance and those of the - :class:`Session`. - - :param request: :class:`Request` instance to prepare with this - session's settings. - :rtype: requests.PreparedRequest - """ - cookies = request.cookies or {} - - # Bootstrap CookieJar. - if not isinstance(cookies, cookielib.CookieJar): - cookies = cookiejar_from_dict(cookies) - - # Merge with session cookies - merged_cookies = merge_cookies( - merge_cookies(RequestsCookieJar(), self.cookies), cookies) - - # Set environment's basic authentication if not explicitly set. - auth = request.auth - if self.trust_env and not auth and not self.auth: - auth = get_netrc_auth(request.url) - - p = PreparedRequest() - p.prepare( - method=request.method.upper(), - url=request.url, - files=request.files, - data=request.data, - json=request.json, - headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), - params=merge_setting(request.params, self.params), - auth=merge_setting(auth, self.auth), - cookies=merged_cookies, - hooks=merge_hooks(request.hooks, self.hooks), - ) - return p - - def request(self, method, url, - params=None, data=None, headers=None, cookies=None, files=None, - auth=None, timeout=None, allow_redirects=True, proxies=None, - hooks=None, stream=None, verify=None, cert=None, json=None): - """Constructs a :class:`Request `, prepares it and sends it. - Returns :class:`Response ` object. - - :param method: method for the new :class:`Request` object. - :param url: URL for the new :class:`Request` object. - :param params: (optional) Dictionary or bytes to be sent in the query - string for the :class:`Request`. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json to send in the body of the - :class:`Request`. - :param headers: (optional) Dictionary of HTTP Headers to send with the - :class:`Request`. - :param cookies: (optional) Dict or CookieJar object to send with the - :class:`Request`. - :param files: (optional) Dictionary of ``'filename': file-like-objects`` - for multipart encoding upload. - :param auth: (optional) Auth tuple or callable to enable - Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) How long to wait for the server to send - data before giving up, as a float, or a :ref:`(connect timeout, - read timeout) ` tuple. - :type timeout: float or tuple - :param allow_redirects: (optional) Set to True by default. - :type allow_redirects: bool - :param proxies: (optional) Dictionary mapping protocol or protocol and - hostname to the URL of the proxy. - :param stream: (optional) whether to immediately download the response - content. Defaults to ``False``. - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use. Defaults to ``True``. - :param cert: (optional) if String, path to ssl client cert file (.pem). - If Tuple, ('cert', 'key') pair. - :rtype: requests.Response - """ - # Create the Request. - req = Request( - method=method.upper(), - url=url, - headers=headers, - files=files, - data=data or {}, - json=json, - params=params or {}, - auth=auth, - cookies=cookies, - hooks=hooks, - ) - prep = self.prepare_request(req) - - proxies = proxies or {} - - settings = self.merge_environment_settings( - prep.url, proxies, stream, verify, cert - ) - - # Send the request. - send_kwargs = { - 'timeout': timeout, - 'allow_redirects': allow_redirects, - } - send_kwargs.update(settings) - resp = self.send(prep, **send_kwargs) - - return resp - - def get(self, url, **kwargs): - r"""Sends a GET request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', True) - return self.request('GET', url, **kwargs) - - def options(self, url, **kwargs): - r"""Sends a OPTIONS request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', True) - return self.request('OPTIONS', url, **kwargs) - - def head(self, url, **kwargs): - r"""Sends a HEAD request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - kwargs.setdefault('allow_redirects', False) - return self.request('HEAD', url, **kwargs) - - def post(self, url, data=None, json=None, **kwargs): - r"""Sends a POST request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param json: (optional) json to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - return self.request('POST', url, data=data, json=json, **kwargs) - - def put(self, url, data=None, **kwargs): - r"""Sends a PUT request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - return self.request('PUT', url, data=data, **kwargs) - - def patch(self, url, data=None, **kwargs): - r"""Sends a PATCH request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, list of tuples, bytes, or file-like - object to send in the body of the :class:`Request`. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - return self.request('PATCH', url, data=data, **kwargs) - - def delete(self, url, **kwargs): - r"""Sends a DELETE request. Returns :class:`Response` object. - - :param url: URL for the new :class:`Request` object. - :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response - """ - - return self.request('DELETE', url, **kwargs) - - def send(self, request, **kwargs): - """Send a given PreparedRequest. - - :rtype: requests.Response - """ - # Set defaults that the hooks can utilize to ensure they always have - # the correct parameters to reproduce the previous request. - kwargs.setdefault('stream', self.stream) - kwargs.setdefault('verify', self.verify) - kwargs.setdefault('cert', self.cert) - kwargs.setdefault('proxies', self.proxies) - - # It's possible that users might accidentally send a Request object. - # Guard against that specific failure case. - if isinstance(request, Request): - raise ValueError('You can only send PreparedRequests.') - - # Set up variables needed for resolve_redirects and dispatching of hooks - allow_redirects = kwargs.pop('allow_redirects', True) - stream = kwargs.get('stream') - hooks = request.hooks - - # Get the appropriate adapter to use - adapter = self.get_adapter(url=request.url) - - # Start time (approximately) of the request - start = preferred_clock() - - # Send the request - r = adapter.send(request, **kwargs) - - # Total elapsed time of the request (approximately) - elapsed = preferred_clock() - start - r.elapsed = timedelta(seconds=elapsed) - - # Response manipulation hooks - r = dispatch_hook('response', hooks, r, **kwargs) - - # Persist cookies - if r.history: - - # If the hooks create history then we want those cookies too - for resp in r.history: - extract_cookies_to_jar(self.cookies, resp.request, resp.raw) - - extract_cookies_to_jar(self.cookies, request, r.raw) - - # Redirect resolving generator. - gen = self.resolve_redirects(r, request, **kwargs) - - # Resolve redirects if allowed. - history = [resp for resp in gen] if allow_redirects else [] - - # Shuffle things around if there's history. - if history: - # Insert the first (original) request at the start - history.insert(0, r) - # Get the last request made - r = history.pop() - r.history = history - - # If redirects aren't being followed, store the response on the Request for Response.next(). - if not allow_redirects: - try: - r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) - except StopIteration: - pass - - if not stream: - r.content - - return r - - def merge_environment_settings(self, url, proxies, stream, verify, cert): - """ - Check the environment and merge it with some settings. - - :rtype: dict - """ - # Gather clues from the surrounding environment. - if self.trust_env: - # Set environment's proxies. - no_proxy = proxies.get('no_proxy') if proxies is not None else None - env_proxies = get_environ_proxies(url, no_proxy=no_proxy) - for (k, v) in env_proxies.items(): - proxies.setdefault(k, v) - - # Look for requests environment configuration and be compatible - # with cURL. - if verify is True or verify is None: - verify = (os.environ.get('REQUESTS_CA_BUNDLE') or - os.environ.get('CURL_CA_BUNDLE')) - - # Merge all the kwargs. - proxies = merge_setting(proxies, self.proxies) - stream = merge_setting(stream, self.stream) - verify = merge_setting(verify, self.verify) - cert = merge_setting(cert, self.cert) - - return {'verify': verify, 'proxies': proxies, 'stream': stream, - 'cert': cert} - - def get_adapter(self, url): - """ - Returns the appropriate connection adapter for the given URL. - - :rtype: requests.adapters.BaseAdapter - """ - for (prefix, adapter) in self.adapters.items(): - - if url.lower().startswith(prefix.lower()): - return adapter - - # Nothing matches :-/ - raise InvalidSchema("No connection adapters were found for '%s'" % url) - - def close(self): - """Closes all adapters and as such the session""" - for v in self.adapters.values(): - v.close() - - def mount(self, prefix, adapter): - """Registers a connection adapter to a prefix. - - Adapters are sorted in descending order by prefix length. - """ - self.adapters[prefix] = adapter - keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] - - for key in keys_to_move: - self.adapters[key] = self.adapters.pop(key) - - def __getstate__(self): - state = {attr: getattr(self, attr, None) for attr in self.__attrs__} - return state - - def __setstate__(self, state): - for attr, value in state.items(): - setattr(self, attr, value) - - -def session(): - """ - Returns a :class:`Session` for context-management. - - .. deprecated:: 1.0.0 - - This method has been deprecated since version 1.0.0 and is only kept for - backwards compatibility. New code should use :class:`~requests.sessions.Session` - to create a session. This may be removed at a future date. - - :rtype: Session - """ - return Session() diff --git a/pype/vendor/requests/status_codes.py b/pype/vendor/requests/status_codes.py deleted file mode 100644 index 813e8c4e62..0000000000 --- a/pype/vendor/requests/status_codes.py +++ /dev/null @@ -1,120 +0,0 @@ -# -*- coding: utf-8 -*- - -r""" -The ``codes`` object defines a mapping from common names for HTTP statuses -to their numerical codes, accessible either as attributes or as dictionary -items. - ->>> requests.codes['temporary_redirect'] -307 ->>> requests.codes.teapot -418 ->>> requests.codes['\o/'] -200 - -Some codes have multiple names, and both upper- and lower-case versions of -the names are allowed. For example, ``codes.ok``, ``codes.OK``, and -``codes.okay`` all correspond to the HTTP status code 200. -""" - -from .structures import LookupDict - -_codes = { - - # Informational. - 100: ('continue',), - 101: ('switching_protocols',), - 102: ('processing',), - 103: ('checkpoint',), - 122: ('uri_too_long', 'request_uri_too_long'), - 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), - 201: ('created',), - 202: ('accepted',), - 203: ('non_authoritative_info', 'non_authoritative_information'), - 204: ('no_content',), - 205: ('reset_content', 'reset'), - 206: ('partial_content', 'partial'), - 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), - 208: ('already_reported',), - 226: ('im_used',), - - # Redirection. - 300: ('multiple_choices',), - 301: ('moved_permanently', 'moved', '\\o-'), - 302: ('found',), - 303: ('see_other', 'other'), - 304: ('not_modified',), - 305: ('use_proxy',), - 306: ('switch_proxy',), - 307: ('temporary_redirect', 'temporary_moved', 'temporary'), - 308: ('permanent_redirect', - 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 - - # Client Error. - 400: ('bad_request', 'bad'), - 401: ('unauthorized',), - 402: ('payment_required', 'payment'), - 403: ('forbidden',), - 404: ('not_found', '-o-'), - 405: ('method_not_allowed', 'not_allowed'), - 406: ('not_acceptable',), - 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), - 408: ('request_timeout', 'timeout'), - 409: ('conflict',), - 410: ('gone',), - 411: ('length_required',), - 412: ('precondition_failed', 'precondition'), - 413: ('request_entity_too_large',), - 414: ('request_uri_too_large',), - 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), - 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), - 417: ('expectation_failed',), - 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), - 421: ('misdirected_request',), - 422: ('unprocessable_entity', 'unprocessable'), - 423: ('locked',), - 424: ('failed_dependency', 'dependency'), - 425: ('unordered_collection', 'unordered'), - 426: ('upgrade_required', 'upgrade'), - 428: ('precondition_required', 'precondition'), - 429: ('too_many_requests', 'too_many'), - 431: ('header_fields_too_large', 'fields_too_large'), - 444: ('no_response', 'none'), - 449: ('retry_with', 'retry'), - 450: ('blocked_by_windows_parental_controls', 'parental_controls'), - 451: ('unavailable_for_legal_reasons', 'legal_reasons'), - 499: ('client_closed_request',), - - # Server Error. - 500: ('internal_server_error', 'server_error', '/o\\', '✗'), - 501: ('not_implemented',), - 502: ('bad_gateway',), - 503: ('service_unavailable', 'unavailable'), - 504: ('gateway_timeout',), - 505: ('http_version_not_supported', 'http_version'), - 506: ('variant_also_negotiates',), - 507: ('insufficient_storage',), - 509: ('bandwidth_limit_exceeded', 'bandwidth'), - 510: ('not_extended',), - 511: ('network_authentication_required', 'network_auth', 'network_authentication'), -} - -codes = LookupDict(name='status_codes') - -def _init(): - for code, titles in _codes.items(): - for title in titles: - setattr(codes, title, code) - if not title.startswith(('\\', '/')): - setattr(codes, title.upper(), code) - - def doc(code): - names = ', '.join('``%s``' % n for n in _codes[code]) - return '* %d: %s' % (code, names) - - global __doc__ - __doc__ = (__doc__ + '\n' + - '\n'.join(doc(code) for code in sorted(_codes)) - if __doc__ is not None else None) - -_init() diff --git a/pype/vendor/requests/structures.py b/pype/vendor/requests/structures.py deleted file mode 100644 index da930e2852..0000000000 --- a/pype/vendor/requests/structures.py +++ /dev/null @@ -1,103 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.structures -~~~~~~~~~~~~~~~~~~~ - -Data structures that power Requests. -""" - -from .compat import OrderedDict, Mapping, MutableMapping - - -class CaseInsensitiveDict(MutableMapping): - """A case-insensitive ``dict``-like object. - - Implements all methods and operations of - ``MutableMapping`` as well as dict's ``copy``. Also - provides ``lower_items``. - - All keys are expected to be strings. The structure remembers the - case of the last key to be set, and ``iter(instance)``, - ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` - will contain case-sensitive keys. However, querying and contains - testing is case insensitive:: - - cid = CaseInsensitiveDict() - cid['Accept'] = 'application/json' - cid['aCCEPT'] == 'application/json' # True - list(cid) == ['Accept'] # True - - For example, ``headers['content-encoding']`` will return the - value of a ``'Content-Encoding'`` response header, regardless - of how the header name was originally stored. - - If the constructor, ``.update``, or equality comparison - operations are given keys that have equal ``.lower()``s, the - behavior is undefined. - """ - - def __init__(self, data=None, **kwargs): - self._store = OrderedDict() - if data is None: - data = {} - self.update(data, **kwargs) - - def __setitem__(self, key, value): - # Use the lowercased key for lookups, but store the actual - # key alongside the value. - self._store[key.lower()] = (key, value) - - def __getitem__(self, key): - return self._store[key.lower()][1] - - def __delitem__(self, key): - del self._store[key.lower()] - - def __iter__(self): - return (casedkey for casedkey, mappedvalue in self._store.values()) - - def __len__(self): - return len(self._store) - - def lower_items(self): - """Like iteritems(), but with all lowercase keys.""" - return ( - (lowerkey, keyval[1]) - for (lowerkey, keyval) - in self._store.items() - ) - - def __eq__(self, other): - if isinstance(other, Mapping): - other = CaseInsensitiveDict(other) - else: - return NotImplemented - # Compare insensitively - return dict(self.lower_items()) == dict(other.lower_items()) - - # Copy is required - def copy(self): - return CaseInsensitiveDict(self._store.values()) - - def __repr__(self): - return str(dict(self.items())) - - -class LookupDict(dict): - """Dictionary lookup object.""" - - def __init__(self, name=None): - self.name = name - super(LookupDict, self).__init__() - - def __repr__(self): - return '' % (self.name) - - def __getitem__(self, key): - # We allow fall-through here, so values default to None - - return self.__dict__.get(key, None) - - def get(self, key, default=None): - return self.__dict__.get(key, default) diff --git a/pype/vendor/requests/utils.py b/pype/vendor/requests/utils.py deleted file mode 100644 index 8170a8d2c4..0000000000 --- a/pype/vendor/requests/utils.py +++ /dev/null @@ -1,977 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -requests.utils -~~~~~~~~~~~~~~ - -This module provides utility functions that are used within Requests -that are also useful for external consumption. -""" - -import codecs -import contextlib -import io -import os -import re -import socket -import struct -import sys -import tempfile -import warnings -import zipfile - -from .__version__ import __version__ -from . import certs -# to_native_string is unused here, but imported here for backwards compatibility -from ._internal_utils import to_native_string -from .compat import parse_http_list as _parse_list_header -from .compat import ( - quote, urlparse, bytes, str, OrderedDict, unquote, getproxies, - proxy_bypass, urlunparse, basestring, integer_types, is_py3, - proxy_bypass_environment, getproxies_environment, Mapping) -from .cookies import cookiejar_from_dict -from .structures import CaseInsensitiveDict -from .exceptions import ( - InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) - -NETRC_FILES = ('.netrc', '_netrc') - -DEFAULT_CA_BUNDLE_PATH = certs.where() - -DEFAULT_PORTS = {'http': 80, 'https': 443} - - -if sys.platform == 'win32': - # provide a proxy_bypass version on Windows without DNS lookups - - def proxy_bypass_registry(host): - try: - if is_py3: - import winreg - else: - import _winreg as winreg - except ImportError: - return False - - try: - internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, - r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') - # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it - proxyEnable = int(winreg.QueryValueEx(internetSettings, - 'ProxyEnable')[0]) - # ProxyOverride is almost always a string - proxyOverride = winreg.QueryValueEx(internetSettings, - 'ProxyOverride')[0] - except OSError: - return False - if not proxyEnable or not proxyOverride: - return False - - # make a check value list from the registry entry: replace the - # '' string by the localhost entry and the corresponding - # canonical entry. - proxyOverride = proxyOverride.split(';') - # now check if we match one of the registry values. - for test in proxyOverride: - if test == '': - if '.' not in host: - return True - test = test.replace(".", r"\.") # mask dots - test = test.replace("*", r".*") # change glob sequence - test = test.replace("?", r".") # change glob char - if re.match(test, host, re.I): - return True - return False - - def proxy_bypass(host): # noqa - """Return True, if the host should be bypassed. - - Checks proxy settings gathered from the environment, if specified, - or the registry. - """ - if getproxies_environment(): - return proxy_bypass_environment(host) - else: - return proxy_bypass_registry(host) - - -def dict_to_sequence(d): - """Returns an internal sequence dictionary update.""" - - if hasattr(d, 'items'): - d = d.items() - - return d - - -def super_len(o): - total_length = None - current_position = 0 - - if hasattr(o, '__len__'): - total_length = len(o) - - elif hasattr(o, 'len'): - total_length = o.len - - elif hasattr(o, 'fileno'): - try: - fileno = o.fileno() - except io.UnsupportedOperation: - pass - else: - total_length = os.fstat(fileno).st_size - - # Having used fstat to determine the file length, we need to - # confirm that this file was opened up in binary mode. - if 'b' not in o.mode: - warnings.warn(( - "Requests has determined the content-length for this " - "request using the binary size of the file: however, the " - "file has been opened in text mode (i.e. without the 'b' " - "flag in the mode). This may lead to an incorrect " - "content-length. In Requests 3.0, support will be removed " - "for files in text mode."), - FileModeWarning - ) - - if hasattr(o, 'tell'): - try: - current_position = o.tell() - except (OSError, IOError): - # This can happen in some weird situations, such as when the file - # is actually a special file descriptor like stdin. In this - # instance, we don't know what the length is, so set it to zero and - # let requests chunk it instead. - if total_length is not None: - current_position = total_length - else: - if hasattr(o, 'seek') and total_length is None: - # StringIO and BytesIO have seek but no useable fileno - try: - # seek to end of file - o.seek(0, 2) - total_length = o.tell() - - # seek back to current position to support - # partially read file-like objects - o.seek(current_position or 0) - except (OSError, IOError): - total_length = 0 - - if total_length is None: - total_length = 0 - - return max(0, total_length - current_position) - - -def get_netrc_auth(url, raise_errors=False): - """Returns the Requests tuple auth for a given url from netrc.""" - - try: - from netrc import netrc, NetrcParseError - - netrc_path = None - - for f in NETRC_FILES: - try: - loc = os.path.expanduser('~/{}'.format(f)) - except KeyError: - # os.path.expanduser can fail when $HOME is undefined and - # getpwuid fails. See https://bugs.python.org/issue20164 & - # https://github.com/requests/requests/issues/1846 - return - - if os.path.exists(loc): - netrc_path = loc - break - - # Abort early if there isn't one. - if netrc_path is None: - return - - ri = urlparse(url) - - # Strip port numbers from netloc. This weird `if...encode`` dance is - # used for Python 3.2, which doesn't support unicode literals. - splitstr = b':' - if isinstance(url, str): - splitstr = splitstr.decode('ascii') - host = ri.netloc.split(splitstr)[0] - - try: - _netrc = netrc(netrc_path).authenticators(host) - if _netrc: - # Return with login / password - login_i = (0 if _netrc[0] else 1) - return (_netrc[login_i], _netrc[2]) - except (NetrcParseError, IOError): - # If there was a parsing error or a permissions issue reading the file, - # we'll just skip netrc auth unless explicitly asked to raise errors. - if raise_errors: - raise - - # AppEngine hackiness. - except (ImportError, AttributeError): - pass - - -def guess_filename(obj): - """Tries to guess the filename of the given object.""" - name = getattr(obj, 'name', None) - if (name and isinstance(name, basestring) and name[0] != '<' and - name[-1] != '>'): - return os.path.basename(name) - - -def extract_zipped_paths(path): - """Replace nonexistent paths that look like they refer to a member of a zip - archive with the location of an extracted copy of the target, or else - just return the provided path unchanged. - """ - if os.path.exists(path): - # this is already a valid path, no need to do anything further - return path - - # find the first valid part of the provided path and treat that as a zip archive - # assume the rest of the path is the name of a member in the archive - archive, member = os.path.split(path) - while archive and not os.path.exists(archive): - archive, prefix = os.path.split(archive) - member = '/'.join([prefix, member]) - - if not zipfile.is_zipfile(archive): - return path - - zip_file = zipfile.ZipFile(archive) - if member not in zip_file.namelist(): - return path - - # we have a valid zip archive and a valid member of that archive - tmp = tempfile.gettempdir() - extracted_path = os.path.join(tmp, *member.split('/')) - if not os.path.exists(extracted_path): - extracted_path = zip_file.extract(member, path=tmp) - - return extracted_path - - -def from_key_val_list(value): - """Take an object and test to see if it can be represented as a - dictionary. Unless it can not be represented as such, return an - OrderedDict, e.g., - - :: - - >>> from_key_val_list([('key', 'val')]) - OrderedDict([('key', 'val')]) - >>> from_key_val_list('string') - ValueError: cannot encode objects that are not 2-tuples - >>> from_key_val_list({'key': 'val'}) - OrderedDict([('key', 'val')]) - - :rtype: OrderedDict - """ - if value is None: - return None - - if isinstance(value, (str, bytes, bool, int)): - raise ValueError('cannot encode objects that are not 2-tuples') - - return OrderedDict(value) - - -def to_key_val_list(value): - """Take an object and test to see if it can be represented as a - dictionary. If it can be, return a list of tuples, e.g., - - :: - - >>> to_key_val_list([('key', 'val')]) - [('key', 'val')] - >>> to_key_val_list({'key': 'val'}) - [('key', 'val')] - >>> to_key_val_list('string') - ValueError: cannot encode objects that are not 2-tuples. - - :rtype: list - """ - if value is None: - return None - - if isinstance(value, (str, bytes, bool, int)): - raise ValueError('cannot encode objects that are not 2-tuples') - - if isinstance(value, Mapping): - value = value.items() - - return list(value) - - -# From mitsuhiko/werkzeug (used with permission). -def parse_list_header(value): - """Parse lists as described by RFC 2068 Section 2. - - In particular, parse comma-separated lists where the elements of - the list may include quoted-strings. A quoted-string could - contain a comma. A non-quoted string could have quotes in the - middle. Quotes are removed automatically after parsing. - - It basically works like :func:`parse_set_header` just that items - may appear multiple times and case sensitivity is preserved. - - The return value is a standard :class:`list`: - - >>> parse_list_header('token, "quoted value"') - ['token', 'quoted value'] - - To create a header from the :class:`list` again, use the - :func:`dump_header` function. - - :param value: a string with a list header. - :return: :class:`list` - :rtype: list - """ - result = [] - for item in _parse_list_header(value): - if item[:1] == item[-1:] == '"': - item = unquote_header_value(item[1:-1]) - result.append(item) - return result - - -# From mitsuhiko/werkzeug (used with permission). -def parse_dict_header(value): - """Parse lists of key, value pairs as described by RFC 2068 Section 2 and - convert them into a python dict: - - >>> d = parse_dict_header('foo="is a fish", bar="as well"') - >>> type(d) is dict - True - >>> sorted(d.items()) - [('bar', 'as well'), ('foo', 'is a fish')] - - If there is no value for a key it will be `None`: - - >>> parse_dict_header('key_without_value') - {'key_without_value': None} - - To create a header from the :class:`dict` again, use the - :func:`dump_header` function. - - :param value: a string with a dict header. - :return: :class:`dict` - :rtype: dict - """ - result = {} - for item in _parse_list_header(value): - if '=' not in item: - result[item] = None - continue - name, value = item.split('=', 1) - if value[:1] == value[-1:] == '"': - value = unquote_header_value(value[1:-1]) - result[name] = value - return result - - -# From mitsuhiko/werkzeug (used with permission). -def unquote_header_value(value, is_filename=False): - r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). - This does not use the real unquoting but what browsers are actually - using for quoting. - - :param value: the header value to unquote. - :rtype: str - """ - if value and value[0] == value[-1] == '"': - # this is not the real unquoting, but fixing this so that the - # RFC is met will result in bugs with internet explorer and - # probably some other browsers as well. IE for example is - # uploading files with "C:\foo\bar.txt" as filename - value = value[1:-1] - - # if this is a filename and the starting characters look like - # a UNC path, then just return the value without quotes. Using the - # replace sequence below on a UNC path has the effect of turning - # the leading double slash into a single slash and then - # _fix_ie_filename() doesn't work correctly. See #458. - if not is_filename or value[:2] != '\\\\': - return value.replace('\\\\', '\\').replace('\\"', '"') - return value - - -def dict_from_cookiejar(cj): - """Returns a key/value dictionary from a CookieJar. - - :param cj: CookieJar object to extract cookies from. - :rtype: dict - """ - - cookie_dict = {} - - for cookie in cj: - cookie_dict[cookie.name] = cookie.value - - return cookie_dict - - -def add_dict_to_cookiejar(cj, cookie_dict): - """Returns a CookieJar from a key/value dictionary. - - :param cj: CookieJar to insert cookies into. - :param cookie_dict: Dict of key/values to insert into CookieJar. - :rtype: CookieJar - """ - - return cookiejar_from_dict(cookie_dict, cj) - - -def get_encodings_from_content(content): - """Returns encodings from given content string. - - :param content: bytestring to extract encodings from. - """ - warnings.warn(( - 'In requests 3.0, get_encodings_from_content will be removed. For ' - 'more information, please see the discussion on issue #2266. (This' - ' warning should only appear once.)'), - DeprecationWarning) - - charset_re = re.compile(r']', flags=re.I) - pragma_re = re.compile(r']', flags=re.I) - xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') - - return (charset_re.findall(content) + - pragma_re.findall(content) + - xml_re.findall(content)) - - -def _parse_content_type_header(header): - """Returns content type and parameters from given header - - :param header: string - :return: tuple containing content type and dictionary of - parameters - """ - - tokens = header.split(';') - content_type, params = tokens[0].strip(), tokens[1:] - params_dict = {} - items_to_strip = "\"' " - - for param in params: - param = param.strip() - if param: - key, value = param, True - index_of_equals = param.find("=") - if index_of_equals != -1: - key = param[:index_of_equals].strip(items_to_strip) - value = param[index_of_equals + 1:].strip(items_to_strip) - params_dict[key.lower()] = value - return content_type, params_dict - - -def get_encoding_from_headers(headers): - """Returns encodings from given HTTP Header Dict. - - :param headers: dictionary to extract encoding from. - :rtype: str - """ - - content_type = headers.get('content-type') - - if not content_type: - return None - - content_type, params = _parse_content_type_header(content_type) - - if 'charset' in params: - return params['charset'].strip("'\"") - - if 'text' in content_type: - return 'ISO-8859-1' - - -def stream_decode_response_unicode(iterator, r): - """Stream decodes a iterator.""" - - if r.encoding is None: - for item in iterator: - yield item - return - - decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') - for chunk in iterator: - rv = decoder.decode(chunk) - if rv: - yield rv - rv = decoder.decode(b'', final=True) - if rv: - yield rv - - -def iter_slices(string, slice_length): - """Iterate over slices of a string.""" - pos = 0 - if slice_length is None or slice_length <= 0: - slice_length = len(string) - while pos < len(string): - yield string[pos:pos + slice_length] - pos += slice_length - - -def get_unicode_from_response(r): - """Returns the requested content back in unicode. - - :param r: Response object to get unicode content from. - - Tried: - - 1. charset from content-type - 2. fall back and replace all unicode characters - - :rtype: str - """ - warnings.warn(( - 'In requests 3.0, get_unicode_from_response will be removed. For ' - 'more information, please see the discussion on issue #2266. (This' - ' warning should only appear once.)'), - DeprecationWarning) - - tried_encodings = [] - - # Try charset from content-type - encoding = get_encoding_from_headers(r.headers) - - if encoding: - try: - return str(r.content, encoding) - except UnicodeError: - tried_encodings.append(encoding) - - # Fall back: - try: - return str(r.content, encoding, errors='replace') - except TypeError: - return r.content - - -# The unreserved URI characters (RFC 3986) -UNRESERVED_SET = frozenset( - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") - - -def unquote_unreserved(uri): - """Un-escape any percent-escape sequences in a URI that are unreserved - characters. This leaves all reserved, illegal and non-ASCII bytes encoded. - - :rtype: str - """ - parts = uri.split('%') - for i in range(1, len(parts)): - h = parts[i][0:2] - if len(h) == 2 and h.isalnum(): - try: - c = chr(int(h, 16)) - except ValueError: - raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) - - if c in UNRESERVED_SET: - parts[i] = c + parts[i][2:] - else: - parts[i] = '%' + parts[i] - else: - parts[i] = '%' + parts[i] - return ''.join(parts) - - -def requote_uri(uri): - """Re-quote the given URI. - - This function passes the given URI through an unquote/quote cycle to - ensure that it is fully and consistently quoted. - - :rtype: str - """ - safe_with_percent = "!#$%&'()*+,/:;=?@[]~" - safe_without_percent = "!#$&'()*+,/:;=?@[]~" - try: - # Unquote only the unreserved characters - # Then quote only illegal characters (do not quote reserved, - # unreserved, or '%') - return quote(unquote_unreserved(uri), safe=safe_with_percent) - except InvalidURL: - # We couldn't unquote the given URI, so let's try quoting it, but - # there may be unquoted '%'s in the URI. We need to make sure they're - # properly quoted so they do not cause issues elsewhere. - return quote(uri, safe=safe_without_percent) - - -def address_in_network(ip, net): - """This function allows you to check if an IP belongs to a network subnet - - Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 - returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 - - :rtype: bool - """ - ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] - netaddr, bits = net.split('/') - netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] - network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask - return (ipaddr & netmask) == (network & netmask) - - -def dotted_netmask(mask): - """Converts mask from /xx format to xxx.xxx.xxx.xxx - - Example: if mask is 24 function returns 255.255.255.0 - - :rtype: str - """ - bits = 0xffffffff ^ (1 << 32 - mask) - 1 - return socket.inet_ntoa(struct.pack('>I', bits)) - - -def is_ipv4_address(string_ip): - """ - :rtype: bool - """ - try: - socket.inet_aton(string_ip) - except socket.error: - return False - return True - - -def is_valid_cidr(string_network): - """ - Very simple check of the cidr format in no_proxy variable. - - :rtype: bool - """ - if string_network.count('/') == 1: - try: - mask = int(string_network.split('/')[1]) - except ValueError: - return False - - if mask < 1 or mask > 32: - return False - - try: - socket.inet_aton(string_network.split('/')[0]) - except socket.error: - return False - else: - return False - return True - - -@contextlib.contextmanager -def set_environ(env_name, value): - """Set the environment variable 'env_name' to 'value' - - Save previous value, yield, and then restore the previous value stored in - the environment variable 'env_name'. - - If 'value' is None, do nothing""" - value_changed = value is not None - if value_changed: - old_value = os.environ.get(env_name) - os.environ[env_name] = value - try: - yield - finally: - if value_changed: - if old_value is None: - del os.environ[env_name] - else: - os.environ[env_name] = old_value - - -def should_bypass_proxies(url, no_proxy): - """ - Returns whether we should bypass proxies or not. - - :rtype: bool - """ - # Prioritize lowercase environment variables over uppercase - # to keep a consistent behaviour with other http projects (curl, wget). - get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) - - # First check whether no_proxy is defined. If it is, check that the URL - # we're getting isn't in the no_proxy list. - no_proxy_arg = no_proxy - if no_proxy is None: - no_proxy = get_proxy('no_proxy') - parsed = urlparse(url) - - if parsed.hostname is None: - # URLs don't always have hostnames, e.g. file:/// urls. - return True - - if no_proxy: - # We need to check whether we match here. We need to see if we match - # the end of the hostname, both with and without the port. - no_proxy = ( - host for host in no_proxy.replace(' ', '').split(',') if host - ) - - if is_ipv4_address(parsed.hostname): - for proxy_ip in no_proxy: - if is_valid_cidr(proxy_ip): - if address_in_network(parsed.hostname, proxy_ip): - return True - elif parsed.hostname == proxy_ip: - # If no_proxy ip was defined in plain IP notation instead of cidr notation & - # matches the IP of the index - return True - else: - host_with_port = parsed.hostname - if parsed.port: - host_with_port += ':{}'.format(parsed.port) - - for host in no_proxy: - if parsed.hostname.endswith(host) or host_with_port.endswith(host): - # The URL does match something in no_proxy, so we don't want - # to apply the proxies on this URL. - return True - - with set_environ('no_proxy', no_proxy_arg): - # parsed.hostname can be `None` in cases such as a file URI. - try: - bypass = proxy_bypass(parsed.hostname) - except (TypeError, socket.gaierror): - bypass = False - - if bypass: - return True - - return False - - -def get_environ_proxies(url, no_proxy=None): - """ - Return a dict of environment proxies. - - :rtype: dict - """ - if should_bypass_proxies(url, no_proxy=no_proxy): - return {} - else: - return getproxies() - - -def select_proxy(url, proxies): - """Select a proxy for the url, if applicable. - - :param url: The url being for the request - :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs - """ - proxies = proxies or {} - urlparts = urlparse(url) - if urlparts.hostname is None: - return proxies.get(urlparts.scheme, proxies.get('all')) - - proxy_keys = [ - urlparts.scheme + '://' + urlparts.hostname, - urlparts.scheme, - 'all://' + urlparts.hostname, - 'all', - ] - proxy = None - for proxy_key in proxy_keys: - if proxy_key in proxies: - proxy = proxies[proxy_key] - break - - return proxy - - -def default_user_agent(name="python-requests"): - """ - Return a string representing the default user agent. - - :rtype: str - """ - return '%s/%s' % (name, __version__) - - -def default_headers(): - """ - :rtype: requests.structures.CaseInsensitiveDict - """ - return CaseInsensitiveDict({ - 'User-Agent': default_user_agent(), - 'Accept-Encoding': ', '.join(('gzip', 'deflate')), - 'Accept': '*/*', - 'Connection': 'keep-alive', - }) - - -def parse_header_links(value): - """Return a list of parsed link headers proxies. - - i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" - - :rtype: list - """ - - links = [] - - replace_chars = ' \'"' - - value = value.strip(replace_chars) - if not value: - return links - - for val in re.split(', *<', value): - try: - url, params = val.split(';', 1) - except ValueError: - url, params = val, '' - - link = {'url': url.strip('<> \'"')} - - for param in params.split(';'): - try: - key, value = param.split('=') - except ValueError: - break - - link[key.strip(replace_chars)] = value.strip(replace_chars) - - links.append(link) - - return links - - -# Null bytes; no need to recreate these on each call to guess_json_utf -_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 -_null2 = _null * 2 -_null3 = _null * 3 - - -def guess_json_utf(data): - """ - :rtype: str - """ - # JSON always starts with two ASCII characters, so detection is as - # easy as counting the nulls and from their location and count - # determine the encoding. Also detect a BOM, if present. - sample = data[:4] - if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): - return 'utf-32' # BOM included - if sample[:3] == codecs.BOM_UTF8: - return 'utf-8-sig' # BOM included, MS style (discouraged) - if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): - return 'utf-16' # BOM included - nullcount = sample.count(_null) - if nullcount == 0: - return 'utf-8' - if nullcount == 2: - if sample[::2] == _null2: # 1st and 3rd are null - return 'utf-16-be' - if sample[1::2] == _null2: # 2nd and 4th are null - return 'utf-16-le' - # Did not detect 2 valid UTF-16 ascii-range characters - if nullcount == 3: - if sample[:3] == _null3: - return 'utf-32-be' - if sample[1:] == _null3: - return 'utf-32-le' - # Did not detect a valid UTF-32 ascii-range character - return None - - -def prepend_scheme_if_needed(url, new_scheme): - """Given a URL that may or may not have a scheme, prepend the given scheme. - Does not replace a present scheme with the one provided as an argument. - - :rtype: str - """ - scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) - - # urlparse is a finicky beast, and sometimes decides that there isn't a - # netloc present. Assume that it's being over-cautious, and switch netloc - # and path if urlparse decided there was no netloc. - if not netloc: - netloc, path = path, netloc - - return urlunparse((scheme, netloc, path, params, query, fragment)) - - -def get_auth_from_url(url): - """Given a url with authentication components, extract them into a tuple of - username,password. - - :rtype: (str,str) - """ - parsed = urlparse(url) - - try: - auth = (unquote(parsed.username), unquote(parsed.password)) - except (AttributeError, TypeError): - auth = ('', '') - - return auth - - -# Moved outside of function to avoid recompile every call -_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') -_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') - - -def check_header_validity(header): - """Verifies that header value is a string which doesn't contain - leading whitespace or return characters. This prevents unintended - header injection. - - :param header: tuple, in the format (name, value). - """ - name, value = header - - if isinstance(value, bytes): - pat = _CLEAN_HEADER_REGEX_BYTE - else: - pat = _CLEAN_HEADER_REGEX_STR - try: - if not pat.match(value): - raise InvalidHeader("Invalid return character or leading space in header: %s" % name) - except TypeError: - raise InvalidHeader("Value for header {%s: %s} must be of type str or " - "bytes, not %s" % (name, value, type(value))) - - -def urldefragauth(url): - """ - Given a url remove the fragment and the authentication part. - - :rtype: str - """ - scheme, netloc, path, params, query, fragment = urlparse(url) - - # see func:`prepend_scheme_if_needed` - if not netloc: - netloc, path = path, netloc - - netloc = netloc.rsplit('@', 1)[-1] - - return urlunparse((scheme, netloc, path, params, query, '')) - - -def rewind_body(prepared_request): - """Move file pointer back to its recorded starting position - so it can be read again on redirect. - """ - body_seek = getattr(prepared_request.body, 'seek', None) - if body_seek is not None and isinstance(prepared_request._body_position, integer_types): - try: - body_seek(prepared_request._body_position) - except (IOError, OSError): - raise UnrewindableBodyError("An error occurred when rewinding request " - "body for redirect.") - else: - raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/pype/vendor/scriptsmenu/__init__.py b/pype/vendor/scriptsmenu/__init__.py deleted file mode 100644 index a881f73533..0000000000 --- a/pype/vendor/scriptsmenu/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .scriptsmenu import ScriptsMenu -from . import version - -__all__ = ["ScriptsMenu"] -__version__ = version.version diff --git a/pype/vendor/scriptsmenu/action.py b/pype/vendor/scriptsmenu/action.py deleted file mode 100644 index 5e68628406..0000000000 --- a/pype/vendor/scriptsmenu/action.py +++ /dev/null @@ -1,208 +0,0 @@ -import os - -from .vendor.Qt import QtWidgets - - -class Action(QtWidgets.QAction): - """Custom Action widget""" - - def __init__(self, parent=None): - - QtWidgets.QAction.__init__(self, parent) - - self._root = None - self._tags = list() - self._command = None - self._sourcetype = None - self._iconfile = None - self._label = None - - self._COMMAND = """import imp -f, filepath, descr = imp.find_module('{module_name}', ['{dirname}']) -module = imp.load_module('{module_name}', f, filepath, descr) -module.{module_name}()""" - - @property - def root(self): - return self._root - - @root.setter - def root(self, value): - self._root = value - - @property - def tags(self): - return self._tags - - @tags.setter - def tags(self, value): - self._tags = value - - @property - def command(self): - return self._command - - @command.setter - def command(self, value): - """ - Store the command in the QAction - - Args: - value (str): the full command which will be executed when clicked - - Return: - None - """ - self._command = value - - @property - def sourcetype(self): - return self._sourcetype - - @sourcetype.setter - def sourcetype(self, value): - """ - Set the command type to get the correct execution of the command given - - Args: - value (str): the name of the command type - - Returns: - None - - """ - self._sourcetype = value - - @property - def iconfile(self): - return self._iconfile - - @iconfile.setter - def iconfile(self, value): - """Store the path to the image file which needs to be displayed - - Args: - value (str): the path to the image - - Returns: - None - """ - self._iconfile = value - - @property - def label(self): - return self._label - - @label.setter - def label(self, value): - """ - Set the abbreviation which will be used as overlay text in the shelf - - Args: - value (str): an abbreviation of the name - - Returns: - None - - """ - self._label = value - - def run_command(self): - """ - Run the command of the instance or copy the command to the active shelf - based on the current modifiers. - - If callbacks have been registered with fouind modifier integer the - function will trigger all callbacks. When a callback function returns a - non zero integer it will not execute the action's command - - """ - - # get the current application and its linked keyboard modifiers - app = QtWidgets.QApplication.instance() - modifiers = app.keyboardModifiers() - - # If the menu has a callback registered for the current modifier - # we run the callback instead of the action itself. - registered = self._root.registered_callbacks - callbacks = registered.get(int(modifiers), []) - for callback in callbacks: - signal = callback(self) - if signal != 0: - # Exit function on non-zero return code - return - - exec(self.process_command()) - - def process_command(self): - """ - Check if the command is a file which needs to be launched and if it - has a relative path, if so return the full path by expanding - environment variables. Wrap any mel command in a executable string - for Python and return the string if the source type is - - Add your own source type and required process to ensure callback - is stored correctly. - - An example of a process is the sourcetype is MEL - (Maya Embedded Language) as Python cannot run it on its own so it - needs to be wrapped in a string in which we explicitly import mel and - run it as a mel.eval. The string is then parsed to python as - exec("command"). - - Returns: - str: a clean command which can be used - - """ - if self._sourcetype == "python": - return self._command - - if self._sourcetype == "mel": - # Escape single quotes - conversion = self._command.replace("'", "\\'") - return "import maya; maya.mel.eval('{}')".format(conversion) - - if self._sourcetype == "file": - if os.path.isabs(self._command): - filepath = self._command - else: - filepath = os.path.normpath(os.path.expandvars(self._command)) - - return self._wrap_filepath(filepath) - - def has_tag(self, tag): - """Check whether the tag matches with the action's tags. - - A partial match will also return True, for example tag `a` will match - correctly with the `ape` tag on the Action. - - Args: - tag (str): The tag - - Returns - bool: Whether the action is tagged with given tag - - """ - - for tagitem in self.tags: - if tag not in tagitem: - continue - return True - - return False - - def _wrap_filepath(self, file_path): - """Create a wrapped string for the python command - - Args: - file_path (str): the filepath of a script - - Returns: - str: the wrapped command - """ - - dirname = os.path.dirname(r"{}".format(file_path)) - dirpath = dirname.replace("\\", "/") - module_name = os.path.splitext(os.path.basename(file_path))[0] - - return self._COMMAND.format(module_name=module_name, dirname=dirpath) diff --git a/pype/vendor/scriptsmenu/launchformaya.py b/pype/vendor/scriptsmenu/launchformaya.py deleted file mode 100644 index 7ad66f0ad2..0000000000 --- a/pype/vendor/scriptsmenu/launchformaya.py +++ /dev/null @@ -1,137 +0,0 @@ -import logging - -import maya.cmds as cmds -import maya.mel as mel - -import scriptsmenu -from .vendor.Qt import QtCore, QtWidgets - -log = logging.getLogger(__name__) - - -def register_repeat_last(action): - """Register the action in repeatLast to ensure the RepeatLast hotkey works - - Args: - action (action.Action): Action wigdet instance - - Returns: - int: 0 - - """ - command = action.process_command() - command = command.replace("\n", "; ") - # Register command to Maya (mel) - cmds.repeatLast(addCommand='python("{}")'.format(command), - addCommandLabel=action.label) - - return 0 - - -def to_shelf(action): - """Copy clicked menu item to the currently active Maya shelf - Args: - action (action.Action): the action instance which is clicked - - Returns: - int: 1 - - """ - - shelftoplevel = mel.eval("$gShelfTopLevel = $gShelfTopLevel;") - current_active_shelf = cmds.tabLayout(shelftoplevel, - query=True, - selectTab=True) - - cmds.shelfButton(command=action.process_command(), - sourceType="python", - parent=current_active_shelf, - image=action.iconfile or "pythonFamily.png", - annotation=action.statusTip(), - imageOverlayLabel=action.label or "") - - return 1 - - -def _maya_main_window(): - """Return Maya's main window""" - for obj in QtWidgets.QApplication.topLevelWidgets(): - if obj.objectName() == 'MayaWindow': - return obj - raise RuntimeError('Could not find MayaWindow instance') - - -def _maya_main_menubar(): - """Retrieve the main menubar of the Maya window""" - mayawindow = _maya_main_window() - menubar = [i for i in mayawindow.children() - if isinstance(i, QtWidgets.QMenuBar)] - - assert len(menubar) == 1, "Error, could not find menu bar!" - - return menubar[0] - - -def find_scripts_menu(title, parent): - """ - Check if the menu exists with the given title in the parent - - Args: - title (str): the title name of the scripts menu - - parent (QtWidgets.QMenuBar): the menubar to check - - Returns: - QtWidgets.QMenu or None - - """ - - menu = None - search = [i for i in parent.children() if - isinstance(i, scriptsmenu.ScriptsMenu) - and i.title() == title] - - if search: - assert len(search) < 2, ("Multiple instances of menu '{}' " - "in menu bar".format(title)) - menu = search[0] - - return menu - - -def main(title="Scripts", parent=None, objectName=None): - """Build the main scripts menu in Maya - - Args: - title (str): name of the menu in the application - - parent (QtWidgets.QtMenuBar): the parent object for the menu - - objectName (str): custom objectName for scripts menu - - Returns: - scriptsmenu.ScriptsMenu instance - - """ - - mayamainbar = parent or _maya_main_menubar() - try: - # check menu already exists - menu = find_scripts_menu(title, mayamainbar) - if not menu: - log.info("Attempting to build menu ...") - object_name = objectName or title.lower() - menu = scriptsmenu.ScriptsMenu(title=title, - parent=mayamainbar, - objectName=object_name) - except Exception as e: - log.error(e) - return - - # Register control + shift callback to add to shelf (maya behavior) - modifiers = QtCore.Qt.ControlModifier | QtCore.Qt.ShiftModifier - menu.register_callback(int(modifiers), to_shelf) - - menu.register_callback(0, register_repeat_last) - - return menu diff --git a/pype/vendor/scriptsmenu/launchfornuke.py b/pype/vendor/scriptsmenu/launchfornuke.py deleted file mode 100644 index 23e4ed1b4d..0000000000 --- a/pype/vendor/scriptsmenu/launchfornuke.py +++ /dev/null @@ -1,36 +0,0 @@ -import scriptsmenu -from .vendor.Qt import QtWidgets - - -def _nuke_main_window(): - """Return Nuke's main window""" - for obj in QtWidgets.QApplication.topLevelWidgets(): - if (obj.inherits('QMainWindow') and - obj.metaObject().className() == 'Foundry::UI::DockMainWindow'): - return obj - raise RuntimeError('Could not find Nuke MainWindow instance') - - -def _nuke_main_menubar(): - """Retrieve the main menubar of the Nuke window""" - nuke_window = _nuke_main_window() - menubar = [i for i in nuke_window.children() - if isinstance(i, QtWidgets.QMenuBar)] - - assert len(menubar) == 1, "Error, could not find menu bar!" - return menubar[0] - - -def main(title="Scripts"): - # Register control + shift callback to add to shelf (Nuke behavior) - # modifiers = QtCore.Qt.ControlModifier | QtCore.Qt.ShiftModifier - # menu.register_callback(modifiers, to_shelf) - nuke_main_bar = _nuke_main_menubar() - for nuke_bar in nuke_main_bar.children(): - if isinstance(nuke_bar, scriptsmenu.ScriptsMenu): - if nuke_bar.title() == title: - menu = nuke_bar - return menu - - menu = scriptsmenu.ScriptsMenu(title=title, parent=nuke_main_bar) - return menu \ No newline at end of file diff --git a/pype/vendor/scriptsmenu/scriptsmenu.py b/pype/vendor/scriptsmenu/scriptsmenu.py deleted file mode 100644 index e2b7ff96c7..0000000000 --- a/pype/vendor/scriptsmenu/scriptsmenu.py +++ /dev/null @@ -1,316 +0,0 @@ -import os -import json -import logging -from collections import defaultdict - -from .vendor.Qt import QtWidgets, QtCore -from . import action - -log = logging.getLogger(__name__) - - -class ScriptsMenu(QtWidgets.QMenu): - """A Qt menu that displays a list of searchable actions""" - - updated = QtCore.Signal(QtWidgets.QMenu) - - def __init__(self, *args, **kwargs): - """Initialize Scripts menu - - Args: - title (str): the name of the root menu which will be created - - parent (QtWidgets.QObject) : the QObject to parent the menu to - - Returns: - None - - """ - QtWidgets.QMenu.__init__(self, *args, **kwargs) - - self.searchbar = None - self.update_action = None - - self._script_actions = [] - self._callbacks = defaultdict(list) - - # Automatically add it to the parent menu - parent = kwargs.get("parent", None) - if parent: - parent.addMenu(self) - - objectname = kwargs.get("objectName", "scripts") - title = kwargs.get("title", "Scripts") - self.setObjectName(objectname) - self.setTitle(title) - - # add default items in the menu - self.create_default_items() - - def on_update(self): - self.updated.emit(self) - - @property - def registered_callbacks(self): - return self._callbacks.copy() - - def create_default_items(self): - """Add a search bar to the top of the menu given""" - - # create widget and link function - searchbar = QtWidgets.QLineEdit() - searchbar.setFixedWidth(120) - searchbar.setPlaceholderText("Search ...") - searchbar.textChanged.connect(self._update_search) - self.searchbar = searchbar - - # create widget holder - searchbar_action = QtWidgets.QWidgetAction(self) - - # add widget to widget holder - searchbar_action.setDefaultWidget(self.searchbar) - searchbar_action.setObjectName("Searchbar") - - # add update button and link function - update_action = QtWidgets.QAction(self) - update_action.setObjectName("Update Scripts") - update_action.setText("Update Scripts") - update_action.setVisible(False) - update_action.triggered.connect(self.on_update) - self.update_action = update_action - - # add action to menu - self.addAction(searchbar_action) - self.addAction(update_action) - - # add separator object - separator = self.addSeparator() - separator.setObjectName("separator") - - def add_menu(self, title, parent=None): - """Create a sub menu for a parent widget - - Args: - parent(QtWidgets.QWidget): the object to parent the menu to - - title(str): the title of the menu - - Returns: - QtWidget.QMenu instance - """ - - if not parent: - parent = self - - menu = QtWidgets.QMenu(parent, title) - menu.setTitle(title) - menu.setObjectName(title) - menu.setTearOffEnabled(True) - parent.addMenu(menu) - - return menu - - def add_script(self, parent, title, command, sourcetype, icon=None, - tags=None, label=None, tooltip=None): - """Create an action item which runs a script when clicked - - Args: - parent (QtWidget.QWidget): The widget to parent the item to - - title (str): The text which will be displayed in the menu - - command (str): The command which needs to be run when the item is - clicked. - - sourcetype (str): The type of command, the way the command is - processed is based on the source type. - - icon (str): The file path of an icon to display with the menu item - - tags (list, tuple): Keywords which describe the action - - label (str): A short description of the script which will be displayed - when hovering over the menu item - - tooltip (str): A tip for the user about the usage fo the tool - - Returns: - QtWidget.QAction instance - - """ - - assert tags is None or isinstance(tags, (list, tuple)) - # Ensure tags is a list - tags = list() if tags is None else list(tags) - tags.append(title.lower()) - - assert icon is None or isinstance(icon, str), ( - "Invalid data type for icon, supported : None, string") - - # create new action - script_action = action.Action(parent) - script_action.setText(title) - script_action.setObjectName(title) - script_action.tags = tags - - # link action to root for callback library - script_action.root = self - - # Set up the command - script_action.sourcetype = sourcetype - script_action.command = command - - try: - script_action.process_command() - except RuntimeError as e: - raise RuntimeError("Script action can't be " - "processed: {}".format(e)) - - if icon: - iconfile = os.path.expandvars(icon) - script_action.iconfile = iconfile - script_action_icon = QtWidgets.QIcon(iconfile) - script_action.setIcon(script_action_icon) - - if label: - script_action.label = label - - if tooltip: - script_action.setStatusTip(tooltip) - - script_action.triggered.connect(script_action.run_command) - parent.addAction(script_action) - - # Add to our searchable actions - self._script_actions.append(script_action) - - return script_action - - def build_from_configuration(self, parent, configuration): - """Process the configurations and store the configuration - - This creates all submenus from a configuration.json file. - - When the configuration holds the key `main` all scripts under `main` will - be added to the main menu first before adding the rest - - Args: - parent (ScriptsMenu): script menu instance - configuration (list): A ScriptsMenu configuration list - - Returns: - None - - """ - - for item in configuration: - assert isinstance(item, dict), "Configuration is wrong!" - - # skip items which have no `type` key - item_type = item.get('type', None) - if not item_type: - log.warning("Missing 'type' from configuration item") - continue - - # add separator - # Special behavior for separators - if item_type == "separator": - parent.addSeparator() - - # add submenu - # items should hold a collection of submenu items (dict) - elif item_type == "menu": - assert "items" in item, "Menu is missing 'items' key" - menu = self.add_menu(parent=parent, title=item["title"]) - self.build_from_configuration(menu, item["items"]) - - # add script - elif item_type == "action": - # filter out `type` from the item dict - config = {key: value for key, value in - item.items() if key != "type"} - - self.add_script(parent=parent, **config) - - def set_update_visible(self, state): - self.update_action.setVisible(state) - - def clear_menu(self): - """Clear all menu items which are not default - - Returns: - None - - """ - - # TODO: Set up a more robust implementation for this - # Delete all except the first three actions - for _action in self.actions()[3:]: - self.removeAction(_action) - - def register_callback(self, modifiers, callback): - self._callbacks[modifiers].append(callback) - - def _update_search(self, search): - """Hide all the samples which do not match the user's import - - Returns: - None - - """ - - if not search: - for action in self._script_actions: - action.setVisible(True) - else: - for action in self._script_actions: - if not action.has_tag(search.lower()): - action.setVisible(False) - - # Set visibility for all submenus - for action in self.actions(): - if not action.menu(): - continue - - menu = action.menu() - visible = any(action.isVisible() for action in menu.actions()) - action.setVisible(visible) - - -def load_configuration(path): - """Load the configuration from a file - - Read out the JSON file which will dictate the structure of the scripts menu - - Args: - path (str): file path of the .JSON file - - Returns: - dict - - """ - - if not os.path.isfile(path): - raise AttributeError("Given configuration is not " - "a file!\n'{}'".format(path)) - - extension = os.path.splitext(path)[-1] - if extension != ".json": - raise AttributeError("Given configuration file has unsupported " - "file type, provide a .json file") - - # retrieve and store config - with open(path, "r") as f: - configuration = json.load(f) - - return configuration - - -def application(configuration, parent): - import sys - app = QtWidgets.QApplication(sys.argv) - - scriptsmenu = ScriptsMenu(configuration, parent) - scriptsmenu.show() - - sys.exit(app.exec_()) diff --git a/pype/vendor/scriptsmenu/vendor/Qt.py b/pype/vendor/scriptsmenu/vendor/Qt.py deleted file mode 100644 index 633c2a52ca..0000000000 --- a/pype/vendor/scriptsmenu/vendor/Qt.py +++ /dev/null @@ -1,446 +0,0 @@ -"""The MIT License (MIT) - -Copyright (c) 2016 Marcus Ottosson - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - ---- - -Map all bindings to PySide2 - -This module replaces itself with the most desirable binding. - -Project goals: - Qt.py was born in the film and visual effects industry to address - the growing need for the development of software capable of running - with more than one flavour of the Qt bindings for Python - PySide, - PySide2, PyQt4 and PyQt5. - - 1. Build for one, run with all - 2. Explicit is better than implicit - 3. Support co-existence - -Default resolution order: - - PySide2 - - PyQt5 - - PySide - - PyQt4 - -Usage: - >> import sys - >> from Qt import QtWidgets - >> app = QtWidgets.QApplication(sys.argv) - >> button = QtWidgets.QPushButton("Hello World") - >> button.show() - >> app.exec_() - -""" - -import os -import sys -import shutil - -# Flags from environment variables -QT_VERBOSE = bool(os.getenv("QT_VERBOSE")) # Extra output -QT_TESTING = bool(os.getenv("QT_TESTING")) # Extra constraints -QT_PREFERRED_BINDING = os.getenv("QT_PREFERRED_BINDING") # Override default - -self = sys.modules[__name__] - -# Internal members, may be used externally for debugging -self.__added__ = list() # All members added to QtCompat -self.__remapped__ = list() # Members copied from elsewhere -self.__modified__ = list() # Existing members modified in some way - -# Below members are set dynamically on import relative the original binding. -self.__version__ = "0.6.9" -self.__qt_version__ = "0.0.0" -self.__binding__ = "None" -self.__binding_version__ = "0.0.0" - -self.load_ui = lambda fname: None -self.translate = lambda context, sourceText, disambiguation, n: None -self.setSectionResizeMode = lambda logicalIndex, hide: None - -# All members of this module is directly accessible via QtCompat -# Take care not to access any "private" members; i.e. those with -# a leading underscore. -QtCompat = self - - -def convert(lines): - """Convert compiled .ui file from PySide2 to Qt.py - - Arguments: - lines (list): Each line of of .ui file - - Usage: - >> with open("myui.py") as f: - .. lines = convert(f.readlines()) - - """ - - def parse(line): - line = line.replace("from PySide2 import", "from Qt import") - line = line.replace("QtWidgets.QApplication.translate", - "Qt.QtCompat.translate") - return line - - parsed = list() - for line in lines: - line = parse(line) - parsed.append(line) - - return parsed - - -def _remap(object, name, value, safe=True): - """Prevent accidental assignment of existing members - - Arguments: - object (object): Parent of new attribute - name (str): Name of new attribute - value (object): Value of new attribute - safe (bool): Whether or not to guarantee that - the new attribute was not overwritten. - Can be set to False under condition that - it is superseded by extensive testing. - - """ - - if QT_TESTING and safe: - # Cannot alter original binding. - if hasattr(object, name): - raise AttributeError("Cannot override existing name: " - "%s.%s" % (object.__name__, name)) - - # Cannot alter classes of functions - if type(object).__name__ != "module": - raise AttributeError("%s != 'module': Cannot alter " - "anything but modules" % object) - - elif hasattr(object, name): - # Keep track of modifications - self.__modified__.append(name) - - self.__remapped__.append(name) - - setattr(object, name, value) - - -def _add(object, name, value): - """Append to self, accessible via Qt.QtCompat""" - self.__added__.append(name) - setattr(object, name, value) - - -def _pyqt5(): - import PyQt5.Qt - from PyQt5 import QtCore, QtWidgets, uic - - _remap(QtCore, "Signal", QtCore.pyqtSignal) - _remap(QtCore, "Slot", QtCore.pyqtSlot) - _remap(QtCore, "Property", QtCore.pyqtProperty) - - _add(QtCompat, "__binding__", PyQt5.__name__) - _add(QtCompat, "__binding_version__", PyQt5.QtCore.PYQT_VERSION_STR) - _add(QtCompat, "__qt_version__", PyQt5.QtCore.QT_VERSION_STR) - _add(QtCompat, "load_ui", lambda fname: uic.loadUi(fname)) - _add(QtCompat, "translate", QtCore.QCoreApplication.translate) - _add(QtCompat, "setSectionResizeMode", - QtWidgets.QHeaderView.setSectionResizeMode) - - _maintain_backwards_compatibility(PyQt5) - - return PyQt5 - - -def _pyqt4(): - # Attempt to set sip API v2 (must be done prior to importing PyQt4) - import sip - try: - sip.setapi("QString", 2) - sip.setapi("QVariant", 2) - sip.setapi("QDate", 2) - sip.setapi("QDateTime", 2) - sip.setapi("QTextStream", 2) - sip.setapi("QTime", 2) - sip.setapi("QUrl", 2) - except AttributeError: - raise ImportError - # PyQt4 < v4.6 - except ValueError: - # API version already set to v1 - raise ImportError - - import PyQt4.Qt - from PyQt4 import QtCore, QtGui, uic - - - _remap(PyQt4, "QtWidgets", QtGui) - _remap(QtCore, "Signal", QtCore.pyqtSignal) - _remap(QtCore, "Slot", QtCore.pyqtSlot) - _remap(QtCore, "Property", QtCore.pyqtProperty) - _remap(QtCore, "QItemSelection", QtGui.QItemSelection) - _remap(QtCore, "QStringListModel", QtGui.QStringListModel) - _remap(QtCore, "QItemSelectionModel", QtGui.QItemSelectionModel) - _remap(QtCore, "QSortFilterProxyModel", QtGui.QSortFilterProxyModel) - _remap(QtCore, "QAbstractProxyModel", QtGui.QAbstractProxyModel) - - try: - from PyQt4 import QtWebKit - _remap(PyQt4, "QtWebKitWidgets", QtWebKit) - except ImportError: - "QtWebkit is optional in Qt , therefore might not be available" - - _add(QtCompat, "__binding__", PyQt4.__name__) - _add(QtCompat, "__binding_version__", PyQt4.QtCore.PYQT_VERSION_STR) - _add(QtCompat, "__qt_version__", PyQt4.QtCore.QT_VERSION_STR) - _add(QtCompat, "load_ui", lambda fname: uic.loadUi(fname)) - _add(QtCompat, "setSectionResizeMode", QtGui.QHeaderView.setResizeMode) - - # PySide2 differs from Qt4 in that Qt4 has one extra argument - # which is always `None`. The lambda arguments represents the PySide2 - # interface, whereas the arguments passed to `.translate` represent - # those expected of a Qt4 binding. - _add(QtCompat, "translate", - lambda context, sourceText, disambiguation, n: - QtCore.QCoreApplication.translate(context, - sourceText, - disambiguation, - QtCore.QCoreApplication.CodecForTr, - n)) - - _maintain_backwards_compatibility(PyQt4) - - return PyQt4 - - -def _pyside2(): - import PySide2 - from PySide2 import QtGui, QtWidgets, QtCore, QtUiTools - - _remap(QtCore, "QStringListModel", QtGui.QStringListModel) - - _add(QtCompat, "__binding__", PySide2.__name__) - _add(QtCompat, "__binding_version__", PySide2.__version__) - _add(QtCompat, "__qt_version__", PySide2.QtCore.qVersion()) - _add(QtCompat, "load_ui", lambda fname: QtUiTools.QUiLoader().load(fname)) - - _add(QtCompat, "setSectionResizeMode", - QtWidgets.QHeaderView.setSectionResizeMode) - - _add(QtCompat, "translate", QtCore.QCoreApplication.translate) - - _maintain_backwards_compatibility(PySide2) - - return PySide2 - - -def _pyside(): - import PySide - from PySide import QtGui, QtCore, QtUiTools - - _remap(PySide, "QtWidgets", QtGui) - _remap(QtCore, "QSortFilterProxyModel", QtGui.QSortFilterProxyModel) - _remap(QtCore, "QStringListModel", QtGui.QStringListModel) - _remap(QtCore, "QItemSelection", QtGui.QItemSelection) - _remap(QtCore, "QItemSelectionModel", QtGui.QItemSelectionModel) - _remap(QtCore, "QAbstractProxyModel", QtGui.QAbstractProxyModel) - - try: - from PySide import QtWebKit - _remap(PySide, "QtWebKitWidgets", QtWebKit) - except ImportError: - "QtWebkit is optional in Qt, therefore might not be available" - - _add(QtCompat, "__binding__", PySide.__name__) - _add(QtCompat, "__binding_version__", PySide.__version__) - _add(QtCompat, "__qt_version__", PySide.QtCore.qVersion()) - _add(QtCompat, "load_ui", lambda fname: QtUiTools.QUiLoader().load(fname)) - _add(QtCompat, "setSectionResizeMode", QtGui.QHeaderView.setResizeMode) - - _add(QtCompat, "translate", - lambda context, sourceText, disambiguation, n: - QtCore.QCoreApplication.translate(context, - sourceText, - disambiguation, - QtCore.QCoreApplication.CodecForTr, - n)) - - _maintain_backwards_compatibility(PySide) - - return PySide - - -def _log(text, verbose): - if verbose: - sys.stdout.write(text + "\n") - - -def cli(args): - """Qt.py command-line interface""" - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument("--convert", - help="Path to compiled Python module, e.g. my_ui.py") - parser.add_argument("--compile", - help="Accept raw .ui file and compile with native " - "PySide2 compiler.") - parser.add_argument("--stdout", - help="Write to stdout instead of file", - action="store_true") - parser.add_argument("--stdin", - help="Read from stdin instead of file", - action="store_true") - - args = parser.parse_args(args) - - if args.stdout: - raise NotImplementedError("--stdout") - - if args.stdin: - raise NotImplementedError("--stdin") - - if args.compile: - raise NotImplementedError("--compile") - - if args.convert: - sys.stdout.write("#\n" - "# WARNING: --convert is an ALPHA feature.\n#\n" - "# See https://github.com/mottosso/Qt.py/pull/132\n" - "# for details.\n" - "#\n") - - # - # ------> Read - # - with open(args.convert) as f: - lines = convert(f.readlines()) - - backup = "%s_backup%s" % os.path.splitext(args.convert) - sys.stdout.write("Creating \"%s\"..\n" % backup) - shutil.copy(args.convert, backup) - - # - # <------ Write - # - with open(args.convert, "w") as f: - f.write("".join(lines)) - - sys.stdout.write("Successfully converted \"%s\"\n" % args.convert) - - -def init(): - """Try loading each binding in turn - - Please note: the entire Qt module is replaced with this code: - sys.modules["Qt"] = binding() - - This means no functions or variables can be called after - this has executed. - - For debugging and testing, this module may be accessed - through `Qt.__shim__`. - - """ - - bindings = (_pyside2, _pyqt5, _pyside, _pyqt4) - - if QT_PREFERRED_BINDING: - # Internal flag (used in installer) - if QT_PREFERRED_BINDING == "None": - self.__wrapper_version__ = self.__version__ - return - - preferred = QT_PREFERRED_BINDING.split(os.pathsep) - available = { - "PySide2": _pyside2, - "PyQt5": _pyqt5, - "PySide": _pyside, - "PyQt4": _pyqt4 - } - - try: - bindings = [available[binding] for binding in preferred] - except KeyError: - raise ImportError( - "Available preferred Qt bindings: " - "\n".join(preferred) - ) - - for binding in bindings: - _log("Trying %s" % binding.__name__, QT_VERBOSE) - - try: - binding = binding() - - except ImportError as e: - _log(" - ImportError(\"%s\")" % e, QT_VERBOSE) - continue - - else: - # Reference to this module - binding.QtCompat = self - binding.__shim__ = self # DEPRECATED - - sys.modules.update({ - __name__: binding, - - # Fix #133, `from Qt.QtWidgets import QPushButton` - __name__ + ".QtWidgets": binding.QtWidgets, - - # Fix #158 `import Qt.QtCore;Qt.QtCore.Signal` - __name__ + ".QtCore": binding.QtCore, - __name__ + ".QtGui": binding.QtGui, - - }) - - return - - # If not binding were found, throw this error - raise ImportError("No Qt binding were found.") - - -def _maintain_backwards_compatibility(binding): - """Add members found in prior versions up till the next major release - - These members are to be considered deprecated. When a new major - release is made, these members are removed. - - """ - - for member in ("__binding__", - "__binding_version__", - "__qt_version__", - "__added__", - "__remapped__", - "__modified__", - "convert", - "load_ui", - "translate"): - setattr(binding, member, getattr(self, member)) - self.__added__.append(member) - - setattr(binding, "__wrapper_version__", self.__version__) - self.__added__.append("__wrapper_version__") - - -cli(sys.argv[1:]) if __name__ == "__main__" else init() \ No newline at end of file diff --git a/pype/vendor/scriptsmenu/vendor/__init__.py b/pype/vendor/scriptsmenu/vendor/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/scriptsmenu/version.py b/pype/vendor/scriptsmenu/version.py deleted file mode 100644 index 7e9d9200fb..0000000000 --- a/pype/vendor/scriptsmenu/version.py +++ /dev/null @@ -1,9 +0,0 @@ -VERSION_MAJOR = 1 -VERSION_MINOR = 4 -VERSION_PATCH = 0 - - -version = '{}.{}.{}'.format(VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH) -__version__ = version - -__all__ = ['version', '__version__'] diff --git a/pype/vendor/site.py b/pype/vendor/site.py deleted file mode 100644 index 40b00de0a7..0000000000 --- a/pype/vendor/site.py +++ /dev/null @@ -1,74 +0,0 @@ -def __boot(): - import sys - import os - PYTHONPATH = os.environ.get('PYTHONPATH') - if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH): - PYTHONPATH = [] - else: - PYTHONPATH = PYTHONPATH.split(os.pathsep) - - pic = getattr(sys, 'path_importer_cache', {}) - stdpath = sys.path[len(PYTHONPATH):] - mydir = os.path.dirname(__file__) - - for item in stdpath: - if item == mydir or not item: - continue # skip if current dir. on Windows, or my own directory - importer = pic.get(item) - if importer is not None: - loader = importer.find_module('site') - if loader is not None: - # This should actually reload the current module - loader.load_module('site') - break - else: - try: - import imp # Avoid import loop in Python 3 - stream, path, descr = imp.find_module('site', [item]) - except ImportError: - continue - if stream is None: - continue - try: - # This should actually reload the current module - imp.load_module('site', stream, path, descr) - finally: - stream.close() - break - else: - raise ImportError("Couldn't find the real 'site' module") - - known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp - - oldpos = getattr(sys, '__egginsert', 0) # save old insertion position - sys.__egginsert = 0 # and reset the current one - - for item in PYTHONPATH: - addsitedir(item) - - sys.__egginsert += oldpos # restore effective old position - - d, nd = makepath(stdpath[0]) - insert_at = None - new_path = [] - - for item in sys.path: - p, np = makepath(item) - - if np == nd and insert_at is None: - # We've hit the first 'system' path entry, so added entries go here - insert_at = len(new_path) - - if np in known_paths or insert_at is None: - new_path.append(item) - else: - # new path after the insert point, back-insert it - new_path.insert(insert_at, item) - insert_at += 1 - - sys.path[:] = new_path - - -if __name__ == 'site': - __boot() - del __boot diff --git a/pype/vendor/six-1.11.0.dist-info/DESCRIPTION.rst b/pype/vendor/six-1.11.0.dist-info/DESCRIPTION.rst deleted file mode 100644 index 09c2c99293..0000000000 --- a/pype/vendor/six-1.11.0.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,27 +0,0 @@ -.. image:: http://img.shields.io/pypi/v/six.svg - :target: https://pypi.python.org/pypi/six - -.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master - :target: https://travis-ci.org/benjaminp/six - -.. image:: http://img.shields.io/badge/license-MIT-green.svg - :target: https://github.com/benjaminp/six/blob/master/LICENSE - -Six is a Python 2 and 3 compatibility library. It provides utility functions -for smoothing over the differences between the Python versions with the goal of -writing Python code that is compatible on both Python versions. See the -documentation for more information on what is provided. - -Six supports every Python version since 2.6. It is contained in only one Python -file, so it can be easily copied into your project. (The copyright and license -notice must be retained.) - -Online documentation is at http://six.rtfd.org. - -Bugs can be reported to https://github.com/benjaminp/six. The code can also -be found there. - -For questions about six or porting in general, email the python-porting mailing -list: https://mail.python.org/mailman/listinfo/python-porting - - diff --git a/pype/vendor/six-1.11.0.dist-info/INSTALLER b/pype/vendor/six-1.11.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e38a..0000000000 --- a/pype/vendor/six-1.11.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/pype/vendor/six-1.11.0.dist-info/METADATA b/pype/vendor/six-1.11.0.dist-info/METADATA deleted file mode 100644 index 04e93dc60e..0000000000 --- a/pype/vendor/six-1.11.0.dist-info/METADATA +++ /dev/null @@ -1,43 +0,0 @@ -Metadata-Version: 2.0 -Name: six -Version: 1.11.0 -Summary: Python 2 and 3 compatibility utilities -Home-page: http://pypi.python.org/pypi/six/ -Author: Benjamin Peterson -Author-email: benjamin@python.org -License: MIT -Platform: UNKNOWN -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 3 -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Topic :: Software Development :: Libraries -Classifier: Topic :: Utilities - -.. image:: http://img.shields.io/pypi/v/six.svg - :target: https://pypi.python.org/pypi/six - -.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master - :target: https://travis-ci.org/benjaminp/six - -.. image:: http://img.shields.io/badge/license-MIT-green.svg - :target: https://github.com/benjaminp/six/blob/master/LICENSE - -Six is a Python 2 and 3 compatibility library. It provides utility functions -for smoothing over the differences between the Python versions with the goal of -writing Python code that is compatible on both Python versions. See the -documentation for more information on what is provided. - -Six supports every Python version since 2.6. It is contained in only one Python -file, so it can be easily copied into your project. (The copyright and license -notice must be retained.) - -Online documentation is at http://six.rtfd.org. - -Bugs can be reported to https://github.com/benjaminp/six. The code can also -be found there. - -For questions about six or porting in general, email the python-porting mailing -list: https://mail.python.org/mailman/listinfo/python-porting - - diff --git a/pype/vendor/six-1.11.0.dist-info/RECORD b/pype/vendor/six-1.11.0.dist-info/RECORD deleted file mode 100644 index e3619c3478..0000000000 --- a/pype/vendor/six-1.11.0.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -__pycache__/six.cpython-36.pyc,, -six-1.11.0.dist-info/DESCRIPTION.rst,sha256=gPBoq1Ruc1QDWyLeXPlieL3F-XZz1_WXB-5gctCfg-A,1098 -six-1.11.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -six-1.11.0.dist-info/METADATA,sha256=06nZXaDYN3vnC-pmUjhkECYFH_a--ywvcPIpUdNeH1o,1607 -six-1.11.0.dist-info/RECORD,, -six-1.11.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 -six-1.11.0.dist-info/metadata.json,sha256=ac3f4f7MpSHSnZ1SqhHCwsL7FGWMG0gBEb0hhS2eSSM,703 -six-1.11.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 -six.py,sha256=A08MPb-Gi9FfInI3IW7HimXFmEH2T2IPzHgDvdhZPRA,30888 diff --git a/pype/vendor/six-1.11.0.dist-info/WHEEL b/pype/vendor/six-1.11.0.dist-info/WHEEL deleted file mode 100644 index 8b6dd1b5a8..0000000000 --- a/pype/vendor/six-1.11.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/pype/vendor/six-1.11.0.dist-info/metadata.json b/pype/vendor/six-1.11.0.dist-info/metadata.json deleted file mode 100644 index 2c7fcea95b..0000000000 --- a/pype/vendor/six-1.11.0.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Libraries", "Topic :: Utilities"], "extensions": {"python.details": {"contacts": [{"email": "benjamin@python.org", "name": "Benjamin Peterson", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://pypi.python.org/pypi/six/"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT", "metadata_version": "2.0", "name": "six", "summary": "Python 2 and 3 compatibility utilities", "test_requires": [{"requires": ["pytest"]}], "version": "1.11.0"} \ No newline at end of file diff --git a/pype/vendor/six-1.11.0.dist-info/top_level.txt b/pype/vendor/six-1.11.0.dist-info/top_level.txt deleted file mode 100644 index ffe2fce498..0000000000 --- a/pype/vendor/six-1.11.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -six diff --git a/pype/vendor/six.py b/pype/vendor/six.py deleted file mode 100644 index 6bf4fd3810..0000000000 --- a/pype/vendor/six.py +++ /dev/null @@ -1,891 +0,0 @@ -# Copyright (c) 2010-2017 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Utilities for writing code that runs on Python 2 and 3""" - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.11.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("getoutput", "commands", "subprocess"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("splitvalue", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), - MovedAttribute("parse_http_list", "urllib2", "urllib.request"), - MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - try: - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - finally: - value = None - tb = None - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - try: - raise tp, value, tb - finally: - tb = None -""") - - -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - try: - if from_value is None: - raise value - raise value from from_value - finally: - value = None -""") -elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - try: - raise value from from_value - finally: - value = None -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(type): - - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - - @classmethod - def __prepare__(cls, name, this_bases): - return meta.__prepare__(name, bases) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def python_2_unicode_compatible(klass): - """ - A decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/pype/vendor/urllib3/__init__.py b/pype/vendor/urllib3/__init__.py deleted file mode 100644 index 148a9c31a7..0000000000 --- a/pype/vendor/urllib3/__init__.py +++ /dev/null @@ -1,92 +0,0 @@ -""" -urllib3 - Thread-safe connection pooling and re-using. -""" - -from __future__ import absolute_import -import warnings - -from .connectionpool import ( - HTTPConnectionPool, - HTTPSConnectionPool, - connection_from_url -) - -from . import exceptions -from .filepost import encode_multipart_formdata -from .poolmanager import PoolManager, ProxyManager, proxy_from_url -from .response import HTTPResponse -from .util.request import make_headers -from .util.url import get_host -from .util.timeout import Timeout -from .util.retry import Retry - - -# Set default logging handler to avoid "No handler found" warnings. -import logging -from logging import NullHandler - -__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' -__license__ = 'MIT' -__version__ = '1.24.1' - -__all__ = ( - 'HTTPConnectionPool', - 'HTTPSConnectionPool', - 'PoolManager', - 'ProxyManager', - 'HTTPResponse', - 'Retry', - 'Timeout', - 'add_stderr_logger', - 'connection_from_url', - 'disable_warnings', - 'encode_multipart_formdata', - 'get_host', - 'make_headers', - 'proxy_from_url', -) - -logging.getLogger(__name__).addHandler(NullHandler()) - - -def add_stderr_logger(level=logging.DEBUG): - """ - Helper for quickly adding a StreamHandler to the logger. Useful for - debugging. - - Returns the handler after adding it. - """ - # This method needs to be in this __init__.py to get the __name__ correct - # even if urllib3 is vendored within another package. - logger = logging.getLogger(__name__) - handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) - logger.addHandler(handler) - logger.setLevel(level) - logger.debug('Added a stderr logging handler to logger: %s', __name__) - return handler - - -# ... Clean up. -del NullHandler - - -# All warning filters *must* be appended unless you're really certain that they -# shouldn't be: otherwise, it's very hard for users to use most Python -# mechanisms to silence them. -# SecurityWarning's always go off by default. -warnings.simplefilter('always', exceptions.SecurityWarning, append=True) -# SubjectAltNameWarning's should go off once per host -warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True) -# InsecurePlatformWarning's don't vary between requests, so we keep it default. -warnings.simplefilter('default', exceptions.InsecurePlatformWarning, - append=True) -# SNIMissingWarnings should go off only once. -warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True) - - -def disable_warnings(category=exceptions.HTTPWarning): - """ - Helper for quickly disabling all urllib3 warnings. - """ - warnings.simplefilter('ignore', category) diff --git a/pype/vendor/urllib3/_collections.py b/pype/vendor/urllib3/_collections.py deleted file mode 100644 index 34f23811c6..0000000000 --- a/pype/vendor/urllib3/_collections.py +++ /dev/null @@ -1,329 +0,0 @@ -from __future__ import absolute_import -try: - from collections.abc import Mapping, MutableMapping -except ImportError: - from collections import Mapping, MutableMapping -try: - from threading import RLock -except ImportError: # Platform-specific: No threads available - class RLock: - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_value, traceback): - pass - - -from collections import OrderedDict -from .exceptions import InvalidHeader -from .packages.six import iterkeys, itervalues, PY3 - - -__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] - - -_Null = object() - - -class RecentlyUsedContainer(MutableMapping): - """ - Provides a thread-safe dict-like container which maintains up to - ``maxsize`` keys while throwing away the least-recently-used keys beyond - ``maxsize``. - - :param maxsize: - Maximum number of recent elements to retain. - - :param dispose_func: - Every time an item is evicted from the container, - ``dispose_func(value)`` is called. Callback which will get called - """ - - ContainerCls = OrderedDict - - def __init__(self, maxsize=10, dispose_func=None): - self._maxsize = maxsize - self.dispose_func = dispose_func - - self._container = self.ContainerCls() - self.lock = RLock() - - def __getitem__(self, key): - # Re-insert the item, moving it to the end of the eviction line. - with self.lock: - item = self._container.pop(key) - self._container[key] = item - return item - - def __setitem__(self, key, value): - evicted_value = _Null - with self.lock: - # Possibly evict the existing value of 'key' - evicted_value = self._container.get(key, _Null) - self._container[key] = value - - # If we didn't evict an existing value, we might have to evict the - # least recently used item from the beginning of the container. - if len(self._container) > self._maxsize: - _key, evicted_value = self._container.popitem(last=False) - - if self.dispose_func and evicted_value is not _Null: - self.dispose_func(evicted_value) - - def __delitem__(self, key): - with self.lock: - value = self._container.pop(key) - - if self.dispose_func: - self.dispose_func(value) - - def __len__(self): - with self.lock: - return len(self._container) - - def __iter__(self): - raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') - - def clear(self): - with self.lock: - # Copy pointers to all values, then wipe the mapping - values = list(itervalues(self._container)) - self._container.clear() - - if self.dispose_func: - for value in values: - self.dispose_func(value) - - def keys(self): - with self.lock: - return list(iterkeys(self._container)) - - -class HTTPHeaderDict(MutableMapping): - """ - :param headers: - An iterable of field-value pairs. Must not contain multiple field names - when compared case-insensitively. - - :param kwargs: - Additional field-value pairs to pass in to ``dict.update``. - - A ``dict`` like container for storing HTTP Headers. - - Field names are stored and compared case-insensitively in compliance with - RFC 7230. Iteration provides the first case-sensitive key seen for each - case-insensitive pair. - - Using ``__setitem__`` syntax overwrites fields that compare equal - case-insensitively in order to maintain ``dict``'s api. For fields that - compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` - in a loop. - - If multiple fields that are equal case-insensitively are passed to the - constructor or ``.update``, the behavior is undefined and some will be - lost. - - >>> headers = HTTPHeaderDict() - >>> headers.add('Set-Cookie', 'foo=bar') - >>> headers.add('set-cookie', 'baz=quxx') - >>> headers['content-length'] = '7' - >>> headers['SET-cookie'] - 'foo=bar, baz=quxx' - >>> headers['Content-Length'] - '7' - """ - - def __init__(self, headers=None, **kwargs): - super(HTTPHeaderDict, self).__init__() - self._container = OrderedDict() - if headers is not None: - if isinstance(headers, HTTPHeaderDict): - self._copy_from(headers) - else: - self.extend(headers) - if kwargs: - self.extend(kwargs) - - def __setitem__(self, key, val): - self._container[key.lower()] = [key, val] - return self._container[key.lower()] - - def __getitem__(self, key): - val = self._container[key.lower()] - return ', '.join(val[1:]) - - def __delitem__(self, key): - del self._container[key.lower()] - - def __contains__(self, key): - return key.lower() in self._container - - def __eq__(self, other): - if not isinstance(other, Mapping) and not hasattr(other, 'keys'): - return False - if not isinstance(other, type(self)): - other = type(self)(other) - return (dict((k.lower(), v) for k, v in self.itermerged()) == - dict((k.lower(), v) for k, v in other.itermerged())) - - def __ne__(self, other): - return not self.__eq__(other) - - if not PY3: # Python 2 - iterkeys = MutableMapping.iterkeys - itervalues = MutableMapping.itervalues - - __marker = object() - - def __len__(self): - return len(self._container) - - def __iter__(self): - # Only provide the originally cased names - for vals in self._container.values(): - yield vals[0] - - def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' - # Using the MutableMapping function directly fails due to the private marker. - # Using ordinary dict.pop would expose the internal structures. - # So let's reinvent the wheel. - try: - value = self[key] - except KeyError: - if default is self.__marker: - raise - return default - else: - del self[key] - return value - - def discard(self, key): - try: - del self[key] - except KeyError: - pass - - def add(self, key, val): - """Adds a (name, value) pair, doesn't overwrite the value if it already - exists. - - >>> headers = HTTPHeaderDict(foo='bar') - >>> headers.add('Foo', 'baz') - >>> headers['foo'] - 'bar, baz' - """ - key_lower = key.lower() - new_vals = [key, val] - # Keep the common case aka no item present as fast as possible - vals = self._container.setdefault(key_lower, new_vals) - if new_vals is not vals: - vals.append(val) - - def extend(self, *args, **kwargs): - """Generic import function for any type of header-like object. - Adapted version of MutableMapping.update in order to insert items - with self.add instead of self.__setitem__ - """ - if len(args) > 1: - raise TypeError("extend() takes at most 1 positional " - "arguments ({0} given)".format(len(args))) - other = args[0] if len(args) >= 1 else () - - if isinstance(other, HTTPHeaderDict): - for key, val in other.iteritems(): - self.add(key, val) - elif isinstance(other, Mapping): - for key in other: - self.add(key, other[key]) - elif hasattr(other, "keys"): - for key in other.keys(): - self.add(key, other[key]) - else: - for key, value in other: - self.add(key, value) - - for key, value in kwargs.items(): - self.add(key, value) - - def getlist(self, key, default=__marker): - """Returns a list of all the values for the named field. Returns an - empty list if the key doesn't exist.""" - try: - vals = self._container[key.lower()] - except KeyError: - if default is self.__marker: - return [] - return default - else: - return vals[1:] - - # Backwards compatibility for httplib - getheaders = getlist - getallmatchingheaders = getlist - iget = getlist - - # Backwards compatibility for http.cookiejar - get_all = getlist - - def __repr__(self): - return "%s(%s)" % (type(self).__name__, dict(self.itermerged())) - - def _copy_from(self, other): - for key in other: - val = other.getlist(key) - if isinstance(val, list): - # Don't need to convert tuples - val = list(val) - self._container[key.lower()] = [key] + val - - def copy(self): - clone = type(self)() - clone._copy_from(self) - return clone - - def iteritems(self): - """Iterate over all header lines, including duplicate ones.""" - for key in self: - vals = self._container[key.lower()] - for val in vals[1:]: - yield vals[0], val - - def itermerged(self): - """Iterate over all headers, merging duplicate ones together.""" - for key in self: - val = self._container[key.lower()] - yield val[0], ', '.join(val[1:]) - - def items(self): - return list(self.iteritems()) - - @classmethod - def from_httplib(cls, message): # Python 2 - """Read headers from a Python 2 httplib message object.""" - # python2.7 does not expose a proper API for exporting multiheaders - # efficiently. This function re-reads raw lines from the message - # object and extracts the multiheaders properly. - obs_fold_continued_leaders = (' ', '\t') - headers = [] - - for line in message.headers: - if line.startswith(obs_fold_continued_leaders): - if not headers: - # We received a header line that starts with OWS as described - # in RFC-7230 S3.2.4. This indicates a multiline header, but - # there exists no previous header to which we can attach it. - raise InvalidHeader( - 'Header continuation with no previous header: %s' % line - ) - else: - key, value = headers[-1] - headers[-1] = (key, value + ' ' + line.strip()) - continue - - key, value = line.split(':', 1) - headers.append((key, value.strip())) - - return cls(headers) diff --git a/pype/vendor/urllib3/connection.py b/pype/vendor/urllib3/connection.py deleted file mode 100644 index 02b36654bd..0000000000 --- a/pype/vendor/urllib3/connection.py +++ /dev/null @@ -1,391 +0,0 @@ -from __future__ import absolute_import -import datetime -import logging -import os -import socket -from socket import error as SocketError, timeout as SocketTimeout -import warnings -from .packages import six -from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection -from .packages.six.moves.http_client import HTTPException # noqa: F401 - -try: # Compiled with SSL? - import ssl - BaseSSLError = ssl.SSLError -except (ImportError, AttributeError): # Platform-specific: No SSL. - ssl = None - - class BaseSSLError(BaseException): - pass - - -try: # Python 3: - # Not a no-op, we're adding this to the namespace so it can be imported. - ConnectionError = ConnectionError -except NameError: # Python 2: - class ConnectionError(Exception): - pass - - -from .exceptions import ( - NewConnectionError, - ConnectTimeoutError, - SubjectAltNameWarning, - SystemTimeWarning, -) -from .packages.ssl_match_hostname import match_hostname, CertificateError - -from .util.ssl_ import ( - resolve_cert_reqs, - resolve_ssl_version, - assert_fingerprint, - create_urllib3_context, - ssl_wrap_socket -) - - -from .util import connection - -from ._collections import HTTPHeaderDict - -log = logging.getLogger(__name__) - -port_by_scheme = { - 'http': 80, - 'https': 443, -} - -# When updating RECENT_DATE, move it to within two years of the current date, -# and not less than 6 months ago. -# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or -# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months) -RECENT_DATE = datetime.date(2017, 6, 30) - - -class DummyConnection(object): - """Used to detect a failed ConnectionCls import.""" - pass - - -class HTTPConnection(_HTTPConnection, object): - """ - Based on httplib.HTTPConnection but provides an extra constructor - backwards-compatibility layer between older and newer Pythons. - - Additional keyword parameters are used to configure attributes of the connection. - Accepted parameters include: - - - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - - ``source_address``: Set the source address for the current connection. - - ``socket_options``: Set specific options on the underlying socket. If not specified, then - defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling - Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. - - For example, if you wish to enable TCP Keep Alive in addition to the defaults, - you might pass:: - - HTTPConnection.default_socket_options + [ - (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), - ] - - Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). - """ - - default_port = port_by_scheme['http'] - - #: Disable Nagle's algorithm by default. - #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` - default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] - - #: Whether this connection verifies the host's certificate. - is_verified = False - - def __init__(self, *args, **kw): - if six.PY3: # Python 3 - kw.pop('strict', None) - - # Pre-set source_address. - self.source_address = kw.get('source_address') - - #: The socket options provided by the user. If no options are - #: provided, we use the default options. - self.socket_options = kw.pop('socket_options', self.default_socket_options) - - _HTTPConnection.__init__(self, *args, **kw) - - @property - def host(self): - """ - Getter method to remove any trailing dots that indicate the hostname is an FQDN. - - In general, SSL certificates don't include the trailing dot indicating a - fully-qualified domain name, and thus, they don't validate properly when - checked against a domain name that includes the dot. In addition, some - servers may not expect to receive the trailing dot when provided. - - However, the hostname with trailing dot is critical to DNS resolution; doing a - lookup with the trailing dot will properly only resolve the appropriate FQDN, - whereas a lookup without a trailing dot will search the system's search domain - list. Thus, it's important to keep the original host around for use only in - those cases where it's appropriate (i.e., when doing DNS lookup to establish the - actual TCP connection across which we're going to send HTTP requests). - """ - return self._dns_host.rstrip('.') - - @host.setter - def host(self, value): - """ - Setter for the `host` property. - - We assume that only urllib3 uses the _dns_host attribute; httplib itself - only uses `host`, and it seems reasonable that other libraries follow suit. - """ - self._dns_host = value - - def _new_conn(self): - """ Establish a socket connection and set nodelay settings on it. - - :return: New socket connection. - """ - extra_kw = {} - if self.source_address: - extra_kw['source_address'] = self.source_address - - if self.socket_options: - extra_kw['socket_options'] = self.socket_options - - try: - conn = connection.create_connection( - (self._dns_host, self.port), self.timeout, **extra_kw) - - except SocketTimeout as e: - raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) - - except SocketError as e: - raise NewConnectionError( - self, "Failed to establish a new connection: %s" % e) - - return conn - - def _prepare_conn(self, conn): - self.sock = conn - if self._tunnel_host: - # TODO: Fix tunnel so it doesn't depend on self.sock state. - self._tunnel() - # Mark this connection as not reusable - self.auto_open = 0 - - def connect(self): - conn = self._new_conn() - self._prepare_conn(conn) - - def request_chunked(self, method, url, body=None, headers=None): - """ - Alternative to the common request method, which sends the - body with chunked encoding and not as one block - """ - headers = HTTPHeaderDict(headers if headers is not None else {}) - skip_accept_encoding = 'accept-encoding' in headers - skip_host = 'host' in headers - self.putrequest( - method, - url, - skip_accept_encoding=skip_accept_encoding, - skip_host=skip_host - ) - for header, value in headers.items(): - self.putheader(header, value) - if 'transfer-encoding' not in headers: - self.putheader('Transfer-Encoding', 'chunked') - self.endheaders() - - if body is not None: - stringish_types = six.string_types + (bytes,) - if isinstance(body, stringish_types): - body = (body,) - for chunk in body: - if not chunk: - continue - if not isinstance(chunk, bytes): - chunk = chunk.encode('utf8') - len_str = hex(len(chunk))[2:] - self.send(len_str.encode('utf-8')) - self.send(b'\r\n') - self.send(chunk) - self.send(b'\r\n') - - # After the if clause, to always have a closed body - self.send(b'0\r\n\r\n') - - -class HTTPSConnection(HTTPConnection): - default_port = port_by_scheme['https'] - - ssl_version = None - - def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - ssl_context=None, server_hostname=None, **kw): - - HTTPConnection.__init__(self, host, port, strict=strict, - timeout=timeout, **kw) - - self.key_file = key_file - self.cert_file = cert_file - self.ssl_context = ssl_context - self.server_hostname = server_hostname - - # Required property for Google AppEngine 1.9.0 which otherwise causes - # HTTPS requests to go out as HTTP. (See Issue #356) - self._protocol = 'https' - - def connect(self): - conn = self._new_conn() - self._prepare_conn(conn) - - if self.ssl_context is None: - self.ssl_context = create_urllib3_context( - ssl_version=resolve_ssl_version(None), - cert_reqs=resolve_cert_reqs(None), - ) - - self.sock = ssl_wrap_socket( - sock=conn, - keyfile=self.key_file, - certfile=self.cert_file, - ssl_context=self.ssl_context, - server_hostname=self.server_hostname - ) - - -class VerifiedHTTPSConnection(HTTPSConnection): - """ - Based on httplib.HTTPSConnection but wraps the socket with - SSL certification. - """ - cert_reqs = None - ca_certs = None - ca_cert_dir = None - ssl_version = None - assert_fingerprint = None - - def set_cert(self, key_file=None, cert_file=None, - cert_reqs=None, ca_certs=None, - assert_hostname=None, assert_fingerprint=None, - ca_cert_dir=None): - """ - This method should only be called once, before the connection is used. - """ - # If cert_reqs is not provided, we can try to guess. If the user gave - # us a cert database, we assume they want to use it: otherwise, if - # they gave us an SSL Context object we should use whatever is set for - # it. - if cert_reqs is None: - if ca_certs or ca_cert_dir: - cert_reqs = 'CERT_REQUIRED' - elif self.ssl_context is not None: - cert_reqs = self.ssl_context.verify_mode - - self.key_file = key_file - self.cert_file = cert_file - self.cert_reqs = cert_reqs - self.assert_hostname = assert_hostname - self.assert_fingerprint = assert_fingerprint - self.ca_certs = ca_certs and os.path.expanduser(ca_certs) - self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) - - def connect(self): - # Add certificate verification - conn = self._new_conn() - hostname = self.host - - if self._tunnel_host: - self.sock = conn - # Calls self._set_hostport(), so self.host is - # self._tunnel_host below. - self._tunnel() - # Mark this connection as not reusable - self.auto_open = 0 - - # Override the host with the one we're requesting data from. - hostname = self._tunnel_host - - server_hostname = hostname - if self.server_hostname is not None: - server_hostname = self.server_hostname - - is_time_off = datetime.date.today() < RECENT_DATE - if is_time_off: - warnings.warn(( - 'System time is way off (before {0}). This will probably ' - 'lead to SSL verification errors').format(RECENT_DATE), - SystemTimeWarning - ) - - # Wrap socket using verification with the root certs in - # trusted_root_certs - if self.ssl_context is None: - self.ssl_context = create_urllib3_context( - ssl_version=resolve_ssl_version(self.ssl_version), - cert_reqs=resolve_cert_reqs(self.cert_reqs), - ) - - context = self.ssl_context - context.verify_mode = resolve_cert_reqs(self.cert_reqs) - self.sock = ssl_wrap_socket( - sock=conn, - keyfile=self.key_file, - certfile=self.cert_file, - ca_certs=self.ca_certs, - ca_cert_dir=self.ca_cert_dir, - server_hostname=server_hostname, - ssl_context=context) - - if self.assert_fingerprint: - assert_fingerprint(self.sock.getpeercert(binary_form=True), - self.assert_fingerprint) - elif context.verify_mode != ssl.CERT_NONE \ - and not getattr(context, 'check_hostname', False) \ - and self.assert_hostname is not False: - # While urllib3 attempts to always turn off hostname matching from - # the TLS library, this cannot always be done. So we check whether - # the TLS Library still thinks it's matching hostnames. - cert = self.sock.getpeercert() - if not cert.get('subjectAltName', ()): - warnings.warn(( - 'Certificate for {0} has no `subjectAltName`, falling back to check for a ' - '`commonName` for now. This feature is being removed by major browsers and ' - 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ' - 'for details.)'.format(hostname)), - SubjectAltNameWarning - ) - _match_hostname(cert, self.assert_hostname or server_hostname) - - self.is_verified = ( - context.verify_mode == ssl.CERT_REQUIRED or - self.assert_fingerprint is not None - ) - - -def _match_hostname(cert, asserted_hostname): - try: - match_hostname(cert, asserted_hostname) - except CertificateError as e: - log.error( - 'Certificate did not match expected hostname: %s. ' - 'Certificate: %s', asserted_hostname, cert - ) - # Add cert to exception and reraise so client code can inspect - # the cert when catching the exception, if they want to - e._peer_cert = cert - raise - - -if ssl: - # Make a copy for testing. - UnverifiedHTTPSConnection = HTTPSConnection - HTTPSConnection = VerifiedHTTPSConnection -else: - HTTPSConnection = DummyConnection diff --git a/pype/vendor/urllib3/connectionpool.py b/pype/vendor/urllib3/connectionpool.py deleted file mode 100644 index f7a8f193d1..0000000000 --- a/pype/vendor/urllib3/connectionpool.py +++ /dev/null @@ -1,896 +0,0 @@ -from __future__ import absolute_import -import errno -import logging -import sys -import warnings - -from socket import error as SocketError, timeout as SocketTimeout -import socket - - -from .exceptions import ( - ClosedPoolError, - ProtocolError, - EmptyPoolError, - HeaderParsingError, - HostChangedError, - LocationValueError, - MaxRetryError, - ProxyError, - ReadTimeoutError, - SSLError, - TimeoutError, - InsecureRequestWarning, - NewConnectionError, -) -from .packages.ssl_match_hostname import CertificateError -from .packages import six -from .packages.six.moves import queue -from .connection import ( - port_by_scheme, - DummyConnection, - HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, - HTTPException, BaseSSLError, -) -from .request import RequestMethods -from .response import HTTPResponse - -from .util.connection import is_connection_dropped -from .util.request import set_file_position -from .util.response import assert_header_parsing -from .util.retry import Retry -from .util.timeout import Timeout -from .util.url import get_host, Url, NORMALIZABLE_SCHEMES -from .util.queue import LifoQueue - - -xrange = six.moves.xrange - -log = logging.getLogger(__name__) - -_Default = object() - - -# Pool objects -class ConnectionPool(object): - """ - Base class for all connection pools, such as - :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. - """ - - scheme = None - QueueCls = LifoQueue - - def __init__(self, host, port=None): - if not host: - raise LocationValueError("No host specified.") - - self.host = _ipv6_host(host, self.scheme) - self._proxy_host = host.lower() - self.port = port - - def __str__(self): - return '%s(host=%r, port=%r)' % (type(self).__name__, - self.host, self.port) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close() - # Return False to re-raise any potential exceptions - return False - - def close(self): - """ - Close all pooled connections and disable the pool. - """ - pass - - -# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 -_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} - - -class HTTPConnectionPool(ConnectionPool, RequestMethods): - """ - Thread-safe connection pool for one host. - - :param host: - Host used for this HTTP Connection (e.g. "localhost"), passed into - :class:`httplib.HTTPConnection`. - - :param port: - Port used for this HTTP Connection (None is equivalent to 80), passed - into :class:`httplib.HTTPConnection`. - - :param strict: - Causes BadStatusLine to be raised if the status line can't be parsed - as a valid HTTP/1.0 or 1.1 status line, passed into - :class:`httplib.HTTPConnection`. - - .. note:: - Only works in Python 2. This parameter is ignored in Python 3. - - :param timeout: - Socket timeout in seconds for each individual connection. This can - be a float or integer, which sets the timeout for the HTTP request, - or an instance of :class:`urllib3.util.Timeout` which gives you more - fine-grained control over request timeouts. After the constructor has - been parsed, this is always a `urllib3.util.Timeout` object. - - :param maxsize: - Number of connections to save that can be reused. More than 1 is useful - in multithreaded situations. If ``block`` is set to False, more - connections will be created but they will not be saved once they've - been used. - - :param block: - If set to True, no more than ``maxsize`` connections will be used at - a time. When no free connections are available, the call will block - until a connection has been released. This is a useful side effect for - particular multithreaded situations where one does not want to use more - than maxsize connections per host to prevent flooding. - - :param headers: - Headers to include with all requests, unless other headers are given - explicitly. - - :param retries: - Retry configuration to use by default with requests in this pool. - - :param _proxy: - Parsed proxy URL, should not be used directly, instead, see - :class:`urllib3.connectionpool.ProxyManager`" - - :param _proxy_headers: - A dictionary with proxy headers, should not be used directly, - instead, see :class:`urllib3.connectionpool.ProxyManager`" - - :param \\**conn_kw: - Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, - :class:`urllib3.connection.HTTPSConnection` instances. - """ - - scheme = 'http' - ConnectionCls = HTTPConnection - ResponseCls = HTTPResponse - - def __init__(self, host, port=None, strict=False, - timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, - headers=None, retries=None, - _proxy=None, _proxy_headers=None, - **conn_kw): - ConnectionPool.__init__(self, host, port) - RequestMethods.__init__(self, headers) - - self.strict = strict - - if not isinstance(timeout, Timeout): - timeout = Timeout.from_float(timeout) - - if retries is None: - retries = Retry.DEFAULT - - self.timeout = timeout - self.retries = retries - - self.pool = self.QueueCls(maxsize) - self.block = block - - self.proxy = _proxy - self.proxy_headers = _proxy_headers or {} - - # Fill the queue up so that doing get() on it will block properly - for _ in xrange(maxsize): - self.pool.put(None) - - # These are mostly for testing and debugging purposes. - self.num_connections = 0 - self.num_requests = 0 - self.conn_kw = conn_kw - - if self.proxy: - # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. - # We cannot know if the user has added default socket options, so we cannot replace the - # list. - self.conn_kw.setdefault('socket_options', []) - - def _new_conn(self): - """ - Return a fresh :class:`HTTPConnection`. - """ - self.num_connections += 1 - log.debug("Starting new HTTP connection (%d): %s:%s", - self.num_connections, self.host, self.port or "80") - - conn = self.ConnectionCls(host=self.host, port=self.port, - timeout=self.timeout.connect_timeout, - strict=self.strict, **self.conn_kw) - return conn - - def _get_conn(self, timeout=None): - """ - Get a connection. Will return a pooled connection if one is available. - - If no connections are available and :prop:`.block` is ``False``, then a - fresh connection is returned. - - :param timeout: - Seconds to wait before giving up and raising - :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and - :prop:`.block` is ``True``. - """ - conn = None - try: - conn = self.pool.get(block=self.block, timeout=timeout) - - except AttributeError: # self.pool is None - raise ClosedPoolError(self, "Pool is closed.") - - except queue.Empty: - if self.block: - raise EmptyPoolError(self, - "Pool reached maximum size and no more " - "connections are allowed.") - pass # Oh well, we'll create a new connection then - - # If this is a persistent connection, check if it got disconnected - if conn and is_connection_dropped(conn): - log.debug("Resetting dropped connection: %s", self.host) - conn.close() - if getattr(conn, 'auto_open', 1) == 0: - # This is a proxied connection that has been mutated by - # httplib._tunnel() and cannot be reused (since it would - # attempt to bypass the proxy) - conn = None - - return conn or self._new_conn() - - def _put_conn(self, conn): - """ - Put a connection back into the pool. - - :param conn: - Connection object for the current host and port as returned by - :meth:`._new_conn` or :meth:`._get_conn`. - - If the pool is already full, the connection is closed and discarded - because we exceeded maxsize. If connections are discarded frequently, - then maxsize should be increased. - - If the pool is closed, then the connection will be closed and discarded. - """ - try: - self.pool.put(conn, block=False) - return # Everything is dandy, done. - except AttributeError: - # self.pool is None. - pass - except queue.Full: - # This should never happen if self.block == True - log.warning( - "Connection pool is full, discarding connection: %s", - self.host) - - # Connection never got put back into the pool, close it. - if conn: - conn.close() - - def _validate_conn(self, conn): - """ - Called right before a request is made, after the socket is created. - """ - pass - - def _prepare_proxy(self, conn): - # Nothing to do for HTTP connections. - pass - - def _get_timeout(self, timeout): - """ Helper that always returns a :class:`urllib3.util.Timeout` """ - if timeout is _Default: - return self.timeout.clone() - - if isinstance(timeout, Timeout): - return timeout.clone() - else: - # User passed us an int/float. This is for backwards compatibility, - # can be removed later - return Timeout.from_float(timeout) - - def _raise_timeout(self, err, url, timeout_value): - """Is the error actually a timeout? Will raise a ReadTimeout or pass""" - - if isinstance(err, SocketTimeout): - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - - # See the above comment about EAGAIN in Python 3. In Python 2 we have - # to specifically catch it and throw the timeout error - if hasattr(err, 'errno') and err.errno in _blocking_errnos: - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - - # Catch possible read timeouts thrown as SSL errors. If not the - # case, rethrow the original. We need to do this because of: - # http://bugs.python.org/issue10272 - if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python < 2.7.4 - raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) - - def _make_request(self, conn, method, url, timeout=_Default, chunked=False, - **httplib_request_kw): - """ - Perform a request on a given urllib connection object taken from our - pool. - - :param conn: - a connection from one of our connection pools - - :param timeout: - Socket timeout in seconds for the request. This can be a - float or integer, which will set the same timeout value for - the socket connect and the socket read, or an instance of - :class:`urllib3.util.Timeout`, which gives you more fine-grained - control over your timeouts. - """ - self.num_requests += 1 - - timeout_obj = self._get_timeout(timeout) - timeout_obj.start_connect() - conn.timeout = timeout_obj.connect_timeout - - # Trigger any extra validation we need to do. - try: - self._validate_conn(conn) - except (SocketTimeout, BaseSSLError) as e: - # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. - self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) - raise - - # conn.request() calls httplib.*.request, not the method in - # urllib3.request. It also calls makefile (recv) on the socket. - if chunked: - conn.request_chunked(method, url, **httplib_request_kw) - else: - conn.request(method, url, **httplib_request_kw) - - # Reset the timeout for the recv() on the socket - read_timeout = timeout_obj.read_timeout - - # App Engine doesn't have a sock attr - if getattr(conn, 'sock', None): - # In Python 3 socket.py will catch EAGAIN and return None when you - # try and read into the file pointer created by http.client, which - # instead raises a BadStatusLine exception. Instead of catching - # the exception and assuming all BadStatusLine exceptions are read - # timeouts, check for a zero timeout before making the request. - if read_timeout == 0: - raise ReadTimeoutError( - self, url, "Read timed out. (read timeout=%s)" % read_timeout) - if read_timeout is Timeout.DEFAULT_TIMEOUT: - conn.sock.settimeout(socket.getdefaulttimeout()) - else: # None or a value - conn.sock.settimeout(read_timeout) - - # Receive the response from the server - try: - try: # Python 2.7, use buffering of HTTP responses - httplib_response = conn.getresponse(buffering=True) - except TypeError: # Python 3 - try: - httplib_response = conn.getresponse() - except Exception as e: - # Remove the TypeError from the exception chain in Python 3; - # otherwise it looks like a programming error was the cause. - six.raise_from(e, None) - except (SocketTimeout, BaseSSLError, SocketError) as e: - self._raise_timeout(err=e, url=url, timeout_value=read_timeout) - raise - - # AppEngine doesn't have a version attr. - http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') - log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port, - method, url, http_version, httplib_response.status, - httplib_response.length) - - try: - assert_header_parsing(httplib_response.msg) - except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 - log.warning( - 'Failed to parse headers (url=%s): %s', - self._absolute_url(url), hpe, exc_info=True) - - return httplib_response - - def _absolute_url(self, path): - return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url - - def close(self): - """ - Close all pooled connections and disable the pool. - """ - if self.pool is None: - return - # Disable access to the pool - old_pool, self.pool = self.pool, None - - try: - while True: - conn = old_pool.get(block=False) - if conn: - conn.close() - - except queue.Empty: - pass # Done. - - def is_same_host(self, url): - """ - Check if the given ``url`` is a member of the same host as this - connection pool. - """ - if url.startswith('/'): - return True - - # TODO: Add optional support for socket.gethostbyname checking. - scheme, host, port = get_host(url) - - host = _ipv6_host(host, self.scheme) - - # Use explicit default port for comparison when none is given - if self.port and not port: - port = port_by_scheme.get(scheme) - elif not self.port and port == port_by_scheme.get(scheme): - port = None - - return (scheme, host, port) == (self.scheme, self.host, self.port) - - def urlopen(self, method, url, body=None, headers=None, retries=None, - redirect=True, assert_same_host=True, timeout=_Default, - pool_timeout=None, release_conn=None, chunked=False, - body_pos=None, **response_kw): - """ - Get a connection from the pool and perform an HTTP request. This is the - lowest level call for making a request, so you'll need to specify all - the raw details. - - .. note:: - - More commonly, it's appropriate to use a convenience method provided - by :class:`.RequestMethods`, such as :meth:`request`. - - .. note:: - - `release_conn` will only behave as expected if - `preload_content=False` because we want to make - `preload_content=False` the default behaviour someday soon without - breaking backwards compatibility. - - :param method: - HTTP request method (such as GET, POST, PUT, etc.) - - :param body: - Data to send in the request body (useful for creating - POST requests, see HTTPConnectionPool.post_url for - more convenience). - - :param headers: - Dictionary of custom headers to send, such as User-Agent, - If-None-Match, etc. If None, pool headers are used. If provided, - these headers completely replace any pool-specific headers. - - :param retries: - Configure the number of retries to allow before raising a - :class:`~urllib3.exceptions.MaxRetryError` exception. - - Pass ``None`` to retry until you receive a response. Pass a - :class:`~urllib3.util.retry.Retry` object for fine-grained control - over different types of retries. - Pass an integer number to retry connection errors that many times, - but no other types of errors. Pass zero to never retry. - - If ``False``, then retries are disabled and any exception is raised - immediately. Also, instead of raising a MaxRetryError on redirects, - the redirect response will be returned. - - :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. - - :param redirect: - If True, automatically handle redirects (status codes 301, 302, - 303, 307, 308). Each redirect counts as a retry. Disabling retries - will disable redirect, too. - - :param assert_same_host: - If ``True``, will make sure that the host of the pool requests is - consistent else will raise HostChangedError. When False, you can - use the pool on an HTTP proxy and request foreign hosts. - - :param timeout: - If specified, overrides the default timeout for this one - request. It may be a float (in seconds) or an instance of - :class:`urllib3.util.Timeout`. - - :param pool_timeout: - If set and the pool is set to block=True, then this method will - block for ``pool_timeout`` seconds and raise EmptyPoolError if no - connection is available within the time period. - - :param release_conn: - If False, then the urlopen call will not release the connection - back into the pool once a response is received (but will release if - you read the entire contents of the response such as when - `preload_content=True`). This is useful if you're not preloading - the response's content immediately. You will need to call - ``r.release_conn()`` on the response ``r`` to return the connection - back into the pool. If None, it takes the value of - ``response_kw.get('preload_content', True)``. - - :param chunked: - If True, urllib3 will send the body using chunked transfer - encoding. Otherwise, urllib3 will send the body using the standard - content-length form. Defaults to False. - - :param int body_pos: - Position to seek to in file-like body in the event of a retry or - redirect. Typically this won't need to be set because urllib3 will - auto-populate the value when needed. - - :param \\**response_kw: - Additional parameters are passed to - :meth:`urllib3.response.HTTPResponse.from_httplib` - """ - if headers is None: - headers = self.headers - - if not isinstance(retries, Retry): - retries = Retry.from_int(retries, redirect=redirect, default=self.retries) - - if release_conn is None: - release_conn = response_kw.get('preload_content', True) - - # Check host - if assert_same_host and not self.is_same_host(url): - raise HostChangedError(self, url, retries) - - conn = None - - # Track whether `conn` needs to be released before - # returning/raising/recursing. Update this variable if necessary, and - # leave `release_conn` constant throughout the function. That way, if - # the function recurses, the original value of `release_conn` will be - # passed down into the recursive call, and its value will be respected. - # - # See issue #651 [1] for details. - # - # [1] - release_this_conn = release_conn - - # Merge the proxy headers. Only do this in HTTP. We have to copy the - # headers dict so we can safely change it without those changes being - # reflected in anyone else's copy. - if self.scheme == 'http': - headers = headers.copy() - headers.update(self.proxy_headers) - - # Must keep the exception bound to a separate variable or else Python 3 - # complains about UnboundLocalError. - err = None - - # Keep track of whether we cleanly exited the except block. This - # ensures we do proper cleanup in finally. - clean_exit = False - - # Rewind body position, if needed. Record current position - # for future rewinds in the event of a redirect/retry. - body_pos = set_file_position(body, body_pos) - - try: - # Request a connection from the queue. - timeout_obj = self._get_timeout(timeout) - conn = self._get_conn(timeout=pool_timeout) - - conn.timeout = timeout_obj.connect_timeout - - is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None) - if is_new_proxy_conn: - self._prepare_proxy(conn) - - # Make the request on the httplib connection object. - httplib_response = self._make_request(conn, method, url, - timeout=timeout_obj, - body=body, headers=headers, - chunked=chunked) - - # If we're going to release the connection in ``finally:``, then - # the response doesn't need to know about the connection. Otherwise - # it will also try to release it and we'll have a double-release - # mess. - response_conn = conn if not release_conn else None - - # Pass method to Response for length checking - response_kw['request_method'] = method - - # Import httplib's response into our own wrapper object - response = self.ResponseCls.from_httplib(httplib_response, - pool=self, - connection=response_conn, - retries=retries, - **response_kw) - - # Everything went great! - clean_exit = True - - except queue.Empty: - # Timed out by queue. - raise EmptyPoolError(self, "No pool connections are available.") - - except (TimeoutError, HTTPException, SocketError, ProtocolError, - BaseSSLError, SSLError, CertificateError) as e: - # Discard the connection for these exceptions. It will be - # replaced during the next _get_conn() call. - clean_exit = False - if isinstance(e, (BaseSSLError, CertificateError)): - e = SSLError(e) - elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: - e = ProxyError('Cannot connect to proxy.', e) - elif isinstance(e, (SocketError, HTTPException)): - e = ProtocolError('Connection aborted.', e) - - retries = retries.increment(method, url, error=e, _pool=self, - _stacktrace=sys.exc_info()[2]) - retries.sleep() - - # Keep track of the error for the retry warning. - err = e - - finally: - if not clean_exit: - # We hit some kind of exception, handled or otherwise. We need - # to throw the connection away unless explicitly told not to. - # Close the connection, set the variable to None, and make sure - # we put the None back in the pool to avoid leaking it. - conn = conn and conn.close() - release_this_conn = True - - if release_this_conn: - # Put the connection back to be reused. If the connection is - # expired then it will be None, which will get replaced with a - # fresh connection during _get_conn. - self._put_conn(conn) - - if not conn: - # Try again - log.warning("Retrying (%r) after connection " - "broken by '%r': %s", retries, err, url) - return self.urlopen(method, url, body, headers, retries, - redirect, assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, body_pos=body_pos, - **response_kw) - - def drain_and_release_conn(response): - try: - # discard any remaining response body, the connection will be - # released back to the pool once the entire response is read - response.read() - except (TimeoutError, HTTPException, SocketError, ProtocolError, - BaseSSLError, SSLError) as e: - pass - - # Handle redirect? - redirect_location = redirect and response.get_redirect_location() - if redirect_location: - if response.status == 303: - method = 'GET' - - try: - retries = retries.increment(method, url, response=response, _pool=self) - except MaxRetryError: - if retries.raise_on_redirect: - # Drain and release the connection for this response, since - # we're not returning it to be released manually. - drain_and_release_conn(response) - raise - return response - - # drain and return the connection to the pool before recursing - drain_and_release_conn(response) - - retries.sleep_for_retry(response) - log.debug("Redirecting %s -> %s", url, redirect_location) - return self.urlopen( - method, redirect_location, body, headers, - retries=retries, redirect=redirect, - assert_same_host=assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, body_pos=body_pos, - **response_kw) - - # Check if we should retry the HTTP response. - has_retry_after = bool(response.getheader('Retry-After')) - if retries.is_retry(method, response.status, has_retry_after): - try: - retries = retries.increment(method, url, response=response, _pool=self) - except MaxRetryError: - if retries.raise_on_status: - # Drain and release the connection for this response, since - # we're not returning it to be released manually. - drain_and_release_conn(response) - raise - return response - - # drain and return the connection to the pool before recursing - drain_and_release_conn(response) - - retries.sleep(response) - log.debug("Retry: %s", url) - return self.urlopen( - method, url, body, headers, - retries=retries, redirect=redirect, - assert_same_host=assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, - body_pos=body_pos, **response_kw) - - return response - - -class HTTPSConnectionPool(HTTPConnectionPool): - """ - Same as :class:`.HTTPConnectionPool`, but HTTPS. - - When Python is compiled with the :mod:`ssl` module, then - :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, - instead of :class:`.HTTPSConnection`. - - :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, - ``assert_hostname`` and ``host`` in this order to verify connections. - If ``assert_hostname`` is False, no verification is done. - - The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, - ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is - available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade - the connection socket into an SSL socket. - """ - - scheme = 'https' - ConnectionCls = HTTPSConnection - - def __init__(self, host, port=None, - strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, - block=False, headers=None, retries=None, - _proxy=None, _proxy_headers=None, - key_file=None, cert_file=None, cert_reqs=None, - ca_certs=None, ssl_version=None, - assert_hostname=None, assert_fingerprint=None, - ca_cert_dir=None, **conn_kw): - - HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, - block, headers, retries, _proxy, _proxy_headers, - **conn_kw) - - if ca_certs and cert_reqs is None: - cert_reqs = 'CERT_REQUIRED' - - self.key_file = key_file - self.cert_file = cert_file - self.cert_reqs = cert_reqs - self.ca_certs = ca_certs - self.ca_cert_dir = ca_cert_dir - self.ssl_version = ssl_version - self.assert_hostname = assert_hostname - self.assert_fingerprint = assert_fingerprint - - def _prepare_conn(self, conn): - """ - Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` - and establish the tunnel if proxy is used. - """ - - if isinstance(conn, VerifiedHTTPSConnection): - conn.set_cert(key_file=self.key_file, - cert_file=self.cert_file, - cert_reqs=self.cert_reqs, - ca_certs=self.ca_certs, - ca_cert_dir=self.ca_cert_dir, - assert_hostname=self.assert_hostname, - assert_fingerprint=self.assert_fingerprint) - conn.ssl_version = self.ssl_version - return conn - - def _prepare_proxy(self, conn): - """ - Establish tunnel connection early, because otherwise httplib - would improperly set Host: header to proxy's IP:port. - """ - conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers) - conn.connect() - - def _new_conn(self): - """ - Return a fresh :class:`httplib.HTTPSConnection`. - """ - self.num_connections += 1 - log.debug("Starting new HTTPS connection (%d): %s:%s", - self.num_connections, self.host, self.port or "443") - - if not self.ConnectionCls or self.ConnectionCls is DummyConnection: - raise SSLError("Can't connect to HTTPS URL because the SSL " - "module is not available.") - - actual_host = self.host - actual_port = self.port - if self.proxy is not None: - actual_host = self.proxy.host - actual_port = self.proxy.port - - conn = self.ConnectionCls(host=actual_host, port=actual_port, - timeout=self.timeout.connect_timeout, - strict=self.strict, **self.conn_kw) - - return self._prepare_conn(conn) - - def _validate_conn(self, conn): - """ - Called right before a request is made, after the socket is created. - """ - super(HTTPSConnectionPool, self)._validate_conn(conn) - - # Force connect early to allow us to validate the connection. - if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` - conn.connect() - - if not conn.is_verified: - warnings.warn(( - 'Unverified HTTPS request is being made. ' - 'Adding certificate verification is strongly advised. See: ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings'), - InsecureRequestWarning) - - -def connection_from_url(url, **kw): - """ - Given a url, return an :class:`.ConnectionPool` instance of its host. - - This is a shortcut for not having to parse out the scheme, host, and port - of the url before creating an :class:`.ConnectionPool` instance. - - :param url: - Absolute URL string that must include the scheme. Port is optional. - - :param \\**kw: - Passes additional parameters to the constructor of the appropriate - :class:`.ConnectionPool`. Useful for specifying things like - timeout, maxsize, headers, etc. - - Example:: - - >>> conn = connection_from_url('http://google.com/') - >>> r = conn.request('GET', '/') - """ - scheme, host, port = get_host(url) - port = port or port_by_scheme.get(scheme, 80) - if scheme == 'https': - return HTTPSConnectionPool(host, port=port, **kw) - else: - return HTTPConnectionPool(host, port=port, **kw) - - -def _ipv6_host(host, scheme): - """ - Process IPv6 address literals - """ - - # httplib doesn't like it when we include brackets in IPv6 addresses - # Specifically, if we include brackets but also pass the port then - # httplib crazily doubles up the square brackets on the Host header. - # Instead, we need to make sure we never pass ``None`` as the port. - # However, for backward compatibility reasons we can't actually - # *assert* that. See http://bugs.python.org/issue28539 - # - # Also if an IPv6 address literal has a zone identifier, the - # percent sign might be URIencoded, convert it back into ASCII - if host.startswith('[') and host.endswith(']'): - host = host.replace('%25', '%').strip('[]') - if scheme in NORMALIZABLE_SCHEMES: - host = host.lower() - return host diff --git a/pype/vendor/urllib3/contrib/__init__.py b/pype/vendor/urllib3/contrib/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/urllib3/contrib/_appengine_environ.py b/pype/vendor/urllib3/contrib/_appengine_environ.py deleted file mode 100644 index f3e00942cb..0000000000 --- a/pype/vendor/urllib3/contrib/_appengine_environ.py +++ /dev/null @@ -1,30 +0,0 @@ -""" -This module provides means to detect the App Engine environment. -""" - -import os - - -def is_appengine(): - return (is_local_appengine() or - is_prod_appengine() or - is_prod_appengine_mvms()) - - -def is_appengine_sandbox(): - return is_appengine() and not is_prod_appengine_mvms() - - -def is_local_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Development/' in os.environ['SERVER_SOFTWARE']) - - -def is_prod_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and - not is_prod_appengine_mvms()) - - -def is_prod_appengine_mvms(): - return os.environ.get('GAE_VM', False) == 'true' diff --git a/pype/vendor/urllib3/contrib/_securetransport/__init__.py b/pype/vendor/urllib3/contrib/_securetransport/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/urllib3/contrib/_securetransport/bindings.py b/pype/vendor/urllib3/contrib/_securetransport/bindings.py deleted file mode 100644 index bcf41c02b2..0000000000 --- a/pype/vendor/urllib3/contrib/_securetransport/bindings.py +++ /dev/null @@ -1,593 +0,0 @@ -""" -This module uses ctypes to bind a whole bunch of functions and constants from -SecureTransport. The goal here is to provide the low-level API to -SecureTransport. These are essentially the C-level functions and constants, and -they're pretty gross to work with. - -This code is a bastardised version of the code found in Will Bond's oscrypto -library. An enormous debt is owed to him for blazing this trail for us. For -that reason, this code should be considered to be covered both by urllib3's -license and by oscrypto's: - - Copyright (c) 2015-2016 Will Bond - - Permission is hereby granted, free of charge, to any person obtaining a - copy of this software and associated documentation files (the "Software"), - to deal in the Software without restriction, including without limitation - the rights to use, copy, modify, merge, publish, distribute, sublicense, - and/or sell copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - DEALINGS IN THE SOFTWARE. -""" -from __future__ import absolute_import - -import platform -from ctypes.util import find_library -from ctypes import ( - c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_uint32, c_ulong, c_long, - c_bool -) -from ctypes import CDLL, POINTER, CFUNCTYPE - - -security_path = find_library('Security') -if not security_path: - raise ImportError('The library Security could not be found') - - -core_foundation_path = find_library('CoreFoundation') -if not core_foundation_path: - raise ImportError('The library CoreFoundation could not be found') - - -version = platform.mac_ver()[0] -version_info = tuple(map(int, version.split('.'))) -if version_info < (10, 8): - raise OSError( - 'Only OS X 10.8 and newer are supported, not %s.%s' % ( - version_info[0], version_info[1] - ) - ) - -Security = CDLL(security_path, use_errno=True) -CoreFoundation = CDLL(core_foundation_path, use_errno=True) - -Boolean = c_bool -CFIndex = c_long -CFStringEncoding = c_uint32 -CFData = c_void_p -CFString = c_void_p -CFArray = c_void_p -CFMutableArray = c_void_p -CFDictionary = c_void_p -CFError = c_void_p -CFType = c_void_p -CFTypeID = c_ulong - -CFTypeRef = POINTER(CFType) -CFAllocatorRef = c_void_p - -OSStatus = c_int32 - -CFDataRef = POINTER(CFData) -CFStringRef = POINTER(CFString) -CFArrayRef = POINTER(CFArray) -CFMutableArrayRef = POINTER(CFMutableArray) -CFDictionaryRef = POINTER(CFDictionary) -CFArrayCallBacks = c_void_p -CFDictionaryKeyCallBacks = c_void_p -CFDictionaryValueCallBacks = c_void_p - -SecCertificateRef = POINTER(c_void_p) -SecExternalFormat = c_uint32 -SecExternalItemType = c_uint32 -SecIdentityRef = POINTER(c_void_p) -SecItemImportExportFlags = c_uint32 -SecItemImportExportKeyParameters = c_void_p -SecKeychainRef = POINTER(c_void_p) -SSLProtocol = c_uint32 -SSLCipherSuite = c_uint32 -SSLContextRef = POINTER(c_void_p) -SecTrustRef = POINTER(c_void_p) -SSLConnectionRef = c_uint32 -SecTrustResultType = c_uint32 -SecTrustOptionFlags = c_uint32 -SSLProtocolSide = c_uint32 -SSLConnectionType = c_uint32 -SSLSessionOption = c_uint32 - - -try: - Security.SecItemImport.argtypes = [ - CFDataRef, - CFStringRef, - POINTER(SecExternalFormat), - POINTER(SecExternalItemType), - SecItemImportExportFlags, - POINTER(SecItemImportExportKeyParameters), - SecKeychainRef, - POINTER(CFArrayRef), - ] - Security.SecItemImport.restype = OSStatus - - Security.SecCertificateGetTypeID.argtypes = [] - Security.SecCertificateGetTypeID.restype = CFTypeID - - Security.SecIdentityGetTypeID.argtypes = [] - Security.SecIdentityGetTypeID.restype = CFTypeID - - Security.SecKeyGetTypeID.argtypes = [] - Security.SecKeyGetTypeID.restype = CFTypeID - - Security.SecCertificateCreateWithData.argtypes = [ - CFAllocatorRef, - CFDataRef - ] - Security.SecCertificateCreateWithData.restype = SecCertificateRef - - Security.SecCertificateCopyData.argtypes = [ - SecCertificateRef - ] - Security.SecCertificateCopyData.restype = CFDataRef - - Security.SecCopyErrorMessageString.argtypes = [ - OSStatus, - c_void_p - ] - Security.SecCopyErrorMessageString.restype = CFStringRef - - Security.SecIdentityCreateWithCertificate.argtypes = [ - CFTypeRef, - SecCertificateRef, - POINTER(SecIdentityRef) - ] - Security.SecIdentityCreateWithCertificate.restype = OSStatus - - Security.SecKeychainCreate.argtypes = [ - c_char_p, - c_uint32, - c_void_p, - Boolean, - c_void_p, - POINTER(SecKeychainRef) - ] - Security.SecKeychainCreate.restype = OSStatus - - Security.SecKeychainDelete.argtypes = [ - SecKeychainRef - ] - Security.SecKeychainDelete.restype = OSStatus - - Security.SecPKCS12Import.argtypes = [ - CFDataRef, - CFDictionaryRef, - POINTER(CFArrayRef) - ] - Security.SecPKCS12Import.restype = OSStatus - - SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) - SSLWriteFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)) - - Security.SSLSetIOFuncs.argtypes = [ - SSLContextRef, - SSLReadFunc, - SSLWriteFunc - ] - Security.SSLSetIOFuncs.restype = OSStatus - - Security.SSLSetPeerID.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t - ] - Security.SSLSetPeerID.restype = OSStatus - - Security.SSLSetCertificate.argtypes = [ - SSLContextRef, - CFArrayRef - ] - Security.SSLSetCertificate.restype = OSStatus - - Security.SSLSetCertificateAuthorities.argtypes = [ - SSLContextRef, - CFTypeRef, - Boolean - ] - Security.SSLSetCertificateAuthorities.restype = OSStatus - - Security.SSLSetConnection.argtypes = [ - SSLContextRef, - SSLConnectionRef - ] - Security.SSLSetConnection.restype = OSStatus - - Security.SSLSetPeerDomainName.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t - ] - Security.SSLSetPeerDomainName.restype = OSStatus - - Security.SSLHandshake.argtypes = [ - SSLContextRef - ] - Security.SSLHandshake.restype = OSStatus - - Security.SSLRead.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t, - POINTER(c_size_t) - ] - Security.SSLRead.restype = OSStatus - - Security.SSLWrite.argtypes = [ - SSLContextRef, - c_char_p, - c_size_t, - POINTER(c_size_t) - ] - Security.SSLWrite.restype = OSStatus - - Security.SSLClose.argtypes = [ - SSLContextRef - ] - Security.SSLClose.restype = OSStatus - - Security.SSLGetNumberSupportedCiphers.argtypes = [ - SSLContextRef, - POINTER(c_size_t) - ] - Security.SSLGetNumberSupportedCiphers.restype = OSStatus - - Security.SSLGetSupportedCiphers.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite), - POINTER(c_size_t) - ] - Security.SSLGetSupportedCiphers.restype = OSStatus - - Security.SSLSetEnabledCiphers.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite), - c_size_t - ] - Security.SSLSetEnabledCiphers.restype = OSStatus - - Security.SSLGetNumberEnabledCiphers.argtype = [ - SSLContextRef, - POINTER(c_size_t) - ] - Security.SSLGetNumberEnabledCiphers.restype = OSStatus - - Security.SSLGetEnabledCiphers.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite), - POINTER(c_size_t) - ] - Security.SSLGetEnabledCiphers.restype = OSStatus - - Security.SSLGetNegotiatedCipher.argtypes = [ - SSLContextRef, - POINTER(SSLCipherSuite) - ] - Security.SSLGetNegotiatedCipher.restype = OSStatus - - Security.SSLGetNegotiatedProtocolVersion.argtypes = [ - SSLContextRef, - POINTER(SSLProtocol) - ] - Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus - - Security.SSLCopyPeerTrust.argtypes = [ - SSLContextRef, - POINTER(SecTrustRef) - ] - Security.SSLCopyPeerTrust.restype = OSStatus - - Security.SecTrustSetAnchorCertificates.argtypes = [ - SecTrustRef, - CFArrayRef - ] - Security.SecTrustSetAnchorCertificates.restype = OSStatus - - Security.SecTrustSetAnchorCertificatesOnly.argstypes = [ - SecTrustRef, - Boolean - ] - Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus - - Security.SecTrustEvaluate.argtypes = [ - SecTrustRef, - POINTER(SecTrustResultType) - ] - Security.SecTrustEvaluate.restype = OSStatus - - Security.SecTrustGetCertificateCount.argtypes = [ - SecTrustRef - ] - Security.SecTrustGetCertificateCount.restype = CFIndex - - Security.SecTrustGetCertificateAtIndex.argtypes = [ - SecTrustRef, - CFIndex - ] - Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef - - Security.SSLCreateContext.argtypes = [ - CFAllocatorRef, - SSLProtocolSide, - SSLConnectionType - ] - Security.SSLCreateContext.restype = SSLContextRef - - Security.SSLSetSessionOption.argtypes = [ - SSLContextRef, - SSLSessionOption, - Boolean - ] - Security.SSLSetSessionOption.restype = OSStatus - - Security.SSLSetProtocolVersionMin.argtypes = [ - SSLContextRef, - SSLProtocol - ] - Security.SSLSetProtocolVersionMin.restype = OSStatus - - Security.SSLSetProtocolVersionMax.argtypes = [ - SSLContextRef, - SSLProtocol - ] - Security.SSLSetProtocolVersionMax.restype = OSStatus - - Security.SecCopyErrorMessageString.argtypes = [ - OSStatus, - c_void_p - ] - Security.SecCopyErrorMessageString.restype = CFStringRef - - Security.SSLReadFunc = SSLReadFunc - Security.SSLWriteFunc = SSLWriteFunc - Security.SSLContextRef = SSLContextRef - Security.SSLProtocol = SSLProtocol - Security.SSLCipherSuite = SSLCipherSuite - Security.SecIdentityRef = SecIdentityRef - Security.SecKeychainRef = SecKeychainRef - Security.SecTrustRef = SecTrustRef - Security.SecTrustResultType = SecTrustResultType - Security.SecExternalFormat = SecExternalFormat - Security.OSStatus = OSStatus - - Security.kSecImportExportPassphrase = CFStringRef.in_dll( - Security, 'kSecImportExportPassphrase' - ) - Security.kSecImportItemIdentity = CFStringRef.in_dll( - Security, 'kSecImportItemIdentity' - ) - - # CoreFoundation time! - CoreFoundation.CFRetain.argtypes = [ - CFTypeRef - ] - CoreFoundation.CFRetain.restype = CFTypeRef - - CoreFoundation.CFRelease.argtypes = [ - CFTypeRef - ] - CoreFoundation.CFRelease.restype = None - - CoreFoundation.CFGetTypeID.argtypes = [ - CFTypeRef - ] - CoreFoundation.CFGetTypeID.restype = CFTypeID - - CoreFoundation.CFStringCreateWithCString.argtypes = [ - CFAllocatorRef, - c_char_p, - CFStringEncoding - ] - CoreFoundation.CFStringCreateWithCString.restype = CFStringRef - - CoreFoundation.CFStringGetCStringPtr.argtypes = [ - CFStringRef, - CFStringEncoding - ] - CoreFoundation.CFStringGetCStringPtr.restype = c_char_p - - CoreFoundation.CFStringGetCString.argtypes = [ - CFStringRef, - c_char_p, - CFIndex, - CFStringEncoding - ] - CoreFoundation.CFStringGetCString.restype = c_bool - - CoreFoundation.CFDataCreate.argtypes = [ - CFAllocatorRef, - c_char_p, - CFIndex - ] - CoreFoundation.CFDataCreate.restype = CFDataRef - - CoreFoundation.CFDataGetLength.argtypes = [ - CFDataRef - ] - CoreFoundation.CFDataGetLength.restype = CFIndex - - CoreFoundation.CFDataGetBytePtr.argtypes = [ - CFDataRef - ] - CoreFoundation.CFDataGetBytePtr.restype = c_void_p - - CoreFoundation.CFDictionaryCreate.argtypes = [ - CFAllocatorRef, - POINTER(CFTypeRef), - POINTER(CFTypeRef), - CFIndex, - CFDictionaryKeyCallBacks, - CFDictionaryValueCallBacks - ] - CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef - - CoreFoundation.CFDictionaryGetValue.argtypes = [ - CFDictionaryRef, - CFTypeRef - ] - CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef - - CoreFoundation.CFArrayCreate.argtypes = [ - CFAllocatorRef, - POINTER(CFTypeRef), - CFIndex, - CFArrayCallBacks, - ] - CoreFoundation.CFArrayCreate.restype = CFArrayRef - - CoreFoundation.CFArrayCreateMutable.argtypes = [ - CFAllocatorRef, - CFIndex, - CFArrayCallBacks - ] - CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef - - CoreFoundation.CFArrayAppendValue.argtypes = [ - CFMutableArrayRef, - c_void_p - ] - CoreFoundation.CFArrayAppendValue.restype = None - - CoreFoundation.CFArrayGetCount.argtypes = [ - CFArrayRef - ] - CoreFoundation.CFArrayGetCount.restype = CFIndex - - CoreFoundation.CFArrayGetValueAtIndex.argtypes = [ - CFArrayRef, - CFIndex - ] - CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p - - CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( - CoreFoundation, 'kCFAllocatorDefault' - ) - CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks') - CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( - CoreFoundation, 'kCFTypeDictionaryKeyCallBacks' - ) - CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( - CoreFoundation, 'kCFTypeDictionaryValueCallBacks' - ) - - CoreFoundation.CFTypeRef = CFTypeRef - CoreFoundation.CFArrayRef = CFArrayRef - CoreFoundation.CFStringRef = CFStringRef - CoreFoundation.CFDictionaryRef = CFDictionaryRef - -except (AttributeError): - raise ImportError('Error initializing ctypes') - - -class CFConst(object): - """ - A class object that acts as essentially a namespace for CoreFoundation - constants. - """ - kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) - - -class SecurityConst(object): - """ - A class object that acts as essentially a namespace for Security constants. - """ - kSSLSessionOptionBreakOnServerAuth = 0 - - kSSLProtocol2 = 1 - kSSLProtocol3 = 2 - kTLSProtocol1 = 4 - kTLSProtocol11 = 7 - kTLSProtocol12 = 8 - - kSSLClientSide = 1 - kSSLStreamType = 0 - - kSecFormatPEMSequence = 10 - - kSecTrustResultInvalid = 0 - kSecTrustResultProceed = 1 - # This gap is present on purpose: this was kSecTrustResultConfirm, which - # is deprecated. - kSecTrustResultDeny = 3 - kSecTrustResultUnspecified = 4 - kSecTrustResultRecoverableTrustFailure = 5 - kSecTrustResultFatalTrustFailure = 6 - kSecTrustResultOtherError = 7 - - errSSLProtocol = -9800 - errSSLWouldBlock = -9803 - errSSLClosedGraceful = -9805 - errSSLClosedNoNotify = -9816 - errSSLClosedAbort = -9806 - - errSSLXCertChainInvalid = -9807 - errSSLCrypto = -9809 - errSSLInternal = -9810 - errSSLCertExpired = -9814 - errSSLCertNotYetValid = -9815 - errSSLUnknownRootCert = -9812 - errSSLNoRootCert = -9813 - errSSLHostNameMismatch = -9843 - errSSLPeerHandshakeFail = -9824 - errSSLPeerUserCancelled = -9839 - errSSLWeakPeerEphemeralDHKey = -9850 - errSSLServerAuthCompleted = -9841 - errSSLRecordOverflow = -9847 - - errSecVerifyFailed = -67808 - errSecNoTrustSettings = -25263 - errSecItemNotFound = -25300 - errSecInvalidTrustSettings = -25262 - - # Cipher suites. We only pick the ones our default cipher string allows. - TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C - TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 - TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B - TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F - TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3 - TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F - TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2 - TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E - TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 - TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 - TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A - TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 - TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B - TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A - TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 - TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038 - TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 - TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 - TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 - TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 - TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 - TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040 - TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 - TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032 - TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D - TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C - TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D - TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C - TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035 - TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F - TLS_AES_128_GCM_SHA256 = 0x1301 - TLS_AES_256_GCM_SHA384 = 0x1302 - TLS_CHACHA20_POLY1305_SHA256 = 0x1303 diff --git a/pype/vendor/urllib3/contrib/_securetransport/low_level.py b/pype/vendor/urllib3/contrib/_securetransport/low_level.py deleted file mode 100644 index b13cd9e72c..0000000000 --- a/pype/vendor/urllib3/contrib/_securetransport/low_level.py +++ /dev/null @@ -1,346 +0,0 @@ -""" -Low-level helpers for the SecureTransport bindings. - -These are Python functions that are not directly related to the high-level APIs -but are necessary to get them to work. They include a whole bunch of low-level -CoreFoundation messing about and memory management. The concerns in this module -are almost entirely about trying to avoid memory leaks and providing -appropriate and useful assistance to the higher-level code. -""" -import base64 -import ctypes -import itertools -import re -import os -import ssl -import tempfile - -from .bindings import Security, CoreFoundation, CFConst - - -# This regular expression is used to grab PEM data out of a PEM bundle. -_PEM_CERTS_RE = re.compile( - b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL -) - - -def _cf_data_from_bytes(bytestring): - """ - Given a bytestring, create a CFData object from it. This CFData object must - be CFReleased by the caller. - """ - return CoreFoundation.CFDataCreate( - CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring) - ) - - -def _cf_dictionary_from_tuples(tuples): - """ - Given a list of Python tuples, create an associated CFDictionary. - """ - dictionary_size = len(tuples) - - # We need to get the dictionary keys and values out in the same order. - keys = (t[0] for t in tuples) - values = (t[1] for t in tuples) - cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys) - cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values) - - return CoreFoundation.CFDictionaryCreate( - CoreFoundation.kCFAllocatorDefault, - cf_keys, - cf_values, - dictionary_size, - CoreFoundation.kCFTypeDictionaryKeyCallBacks, - CoreFoundation.kCFTypeDictionaryValueCallBacks, - ) - - -def _cf_string_to_unicode(value): - """ - Creates a Unicode string from a CFString object. Used entirely for error - reporting. - - Yes, it annoys me quite a lot that this function is this complex. - """ - value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) - - string = CoreFoundation.CFStringGetCStringPtr( - value_as_void_p, - CFConst.kCFStringEncodingUTF8 - ) - if string is None: - buffer = ctypes.create_string_buffer(1024) - result = CoreFoundation.CFStringGetCString( - value_as_void_p, - buffer, - 1024, - CFConst.kCFStringEncodingUTF8 - ) - if not result: - raise OSError('Error copying C string from CFStringRef') - string = buffer.value - if string is not None: - string = string.decode('utf-8') - return string - - -def _assert_no_error(error, exception_class=None): - """ - Checks the return code and throws an exception if there is an error to - report - """ - if error == 0: - return - - cf_error_string = Security.SecCopyErrorMessageString(error, None) - output = _cf_string_to_unicode(cf_error_string) - CoreFoundation.CFRelease(cf_error_string) - - if output is None or output == u'': - output = u'OSStatus %s' % error - - if exception_class is None: - exception_class = ssl.SSLError - - raise exception_class(output) - - -def _cert_array_from_pem(pem_bundle): - """ - Given a bundle of certs in PEM format, turns them into a CFArray of certs - that can be used to validate a cert chain. - """ - # Normalize the PEM bundle's line endings. - pem_bundle = pem_bundle.replace(b"\r\n", b"\n") - - der_certs = [ - base64.b64decode(match.group(1)) - for match in _PEM_CERTS_RE.finditer(pem_bundle) - ] - if not der_certs: - raise ssl.SSLError("No root certificates specified") - - cert_array = CoreFoundation.CFArrayCreateMutable( - CoreFoundation.kCFAllocatorDefault, - 0, - ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks) - ) - if not cert_array: - raise ssl.SSLError("Unable to allocate memory!") - - try: - for der_bytes in der_certs: - certdata = _cf_data_from_bytes(der_bytes) - if not certdata: - raise ssl.SSLError("Unable to allocate memory!") - cert = Security.SecCertificateCreateWithData( - CoreFoundation.kCFAllocatorDefault, certdata - ) - CoreFoundation.CFRelease(certdata) - if not cert: - raise ssl.SSLError("Unable to build cert object!") - - CoreFoundation.CFArrayAppendValue(cert_array, cert) - CoreFoundation.CFRelease(cert) - except Exception: - # We need to free the array before the exception bubbles further. - # We only want to do that if an error occurs: otherwise, the caller - # should free. - CoreFoundation.CFRelease(cert_array) - - return cert_array - - -def _is_cert(item): - """ - Returns True if a given CFTypeRef is a certificate. - """ - expected = Security.SecCertificateGetTypeID() - return CoreFoundation.CFGetTypeID(item) == expected - - -def _is_identity(item): - """ - Returns True if a given CFTypeRef is an identity. - """ - expected = Security.SecIdentityGetTypeID() - return CoreFoundation.CFGetTypeID(item) == expected - - -def _temporary_keychain(): - """ - This function creates a temporary Mac keychain that we can use to work with - credentials. This keychain uses a one-time password and a temporary file to - store the data. We expect to have one keychain per socket. The returned - SecKeychainRef must be freed by the caller, including calling - SecKeychainDelete. - - Returns a tuple of the SecKeychainRef and the path to the temporary - directory that contains it. - """ - # Unfortunately, SecKeychainCreate requires a path to a keychain. This - # means we cannot use mkstemp to use a generic temporary file. Instead, - # we're going to create a temporary directory and a filename to use there. - # This filename will be 8 random bytes expanded into base64. We also need - # some random bytes to password-protect the keychain we're creating, so we - # ask for 40 random bytes. - random_bytes = os.urandom(40) - filename = base64.b16encode(random_bytes[:8]).decode('utf-8') - password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8 - tempdirectory = tempfile.mkdtemp() - - keychain_path = os.path.join(tempdirectory, filename).encode('utf-8') - - # We now want to create the keychain itself. - keychain = Security.SecKeychainRef() - status = Security.SecKeychainCreate( - keychain_path, - len(password), - password, - False, - None, - ctypes.byref(keychain) - ) - _assert_no_error(status) - - # Having created the keychain, we want to pass it off to the caller. - return keychain, tempdirectory - - -def _load_items_from_file(keychain, path): - """ - Given a single file, loads all the trust objects from it into arrays and - the keychain. - Returns a tuple of lists: the first list is a list of identities, the - second a list of certs. - """ - certificates = [] - identities = [] - result_array = None - - with open(path, 'rb') as f: - raw_filedata = f.read() - - try: - filedata = CoreFoundation.CFDataCreate( - CoreFoundation.kCFAllocatorDefault, - raw_filedata, - len(raw_filedata) - ) - result_array = CoreFoundation.CFArrayRef() - result = Security.SecItemImport( - filedata, # cert data - None, # Filename, leaving it out for now - None, # What the type of the file is, we don't care - None, # what's in the file, we don't care - 0, # import flags - None, # key params, can include passphrase in the future - keychain, # The keychain to insert into - ctypes.byref(result_array) # Results - ) - _assert_no_error(result) - - # A CFArray is not very useful to us as an intermediary - # representation, so we are going to extract the objects we want - # and then free the array. We don't need to keep hold of keys: the - # keychain already has them! - result_count = CoreFoundation.CFArrayGetCount(result_array) - for index in range(result_count): - item = CoreFoundation.CFArrayGetValueAtIndex( - result_array, index - ) - item = ctypes.cast(item, CoreFoundation.CFTypeRef) - - if _is_cert(item): - CoreFoundation.CFRetain(item) - certificates.append(item) - elif _is_identity(item): - CoreFoundation.CFRetain(item) - identities.append(item) - finally: - if result_array: - CoreFoundation.CFRelease(result_array) - - CoreFoundation.CFRelease(filedata) - - return (identities, certificates) - - -def _load_client_cert_chain(keychain, *paths): - """ - Load certificates and maybe keys from a number of files. Has the end goal - of returning a CFArray containing one SecIdentityRef, and then zero or more - SecCertificateRef objects, suitable for use as a client certificate trust - chain. - """ - # Ok, the strategy. - # - # This relies on knowing that macOS will not give you a SecIdentityRef - # unless you have imported a key into a keychain. This is a somewhat - # artificial limitation of macOS (for example, it doesn't necessarily - # affect iOS), but there is nothing inside Security.framework that lets you - # get a SecIdentityRef without having a key in a keychain. - # - # So the policy here is we take all the files and iterate them in order. - # Each one will use SecItemImport to have one or more objects loaded from - # it. We will also point at a keychain that macOS can use to work with the - # private key. - # - # Once we have all the objects, we'll check what we actually have. If we - # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise, - # we'll take the first certificate (which we assume to be our leaf) and - # ask the keychain to give us a SecIdentityRef with that cert's associated - # key. - # - # We'll then return a CFArray containing the trust chain: one - # SecIdentityRef and then zero-or-more SecCertificateRef objects. The - # responsibility for freeing this CFArray will be with the caller. This - # CFArray must remain alive for the entire connection, so in practice it - # will be stored with a single SSLSocket, along with the reference to the - # keychain. - certificates = [] - identities = [] - - # Filter out bad paths. - paths = (path for path in paths if path) - - try: - for file_path in paths: - new_identities, new_certs = _load_items_from_file( - keychain, file_path - ) - identities.extend(new_identities) - certificates.extend(new_certs) - - # Ok, we have everything. The question is: do we have an identity? If - # not, we want to grab one from the first cert we have. - if not identities: - new_identity = Security.SecIdentityRef() - status = Security.SecIdentityCreateWithCertificate( - keychain, - certificates[0], - ctypes.byref(new_identity) - ) - _assert_no_error(status) - identities.append(new_identity) - - # We now want to release the original certificate, as we no longer - # need it. - CoreFoundation.CFRelease(certificates.pop(0)) - - # We now need to build a new CFArray that holds the trust chain. - trust_chain = CoreFoundation.CFArrayCreateMutable( - CoreFoundation.kCFAllocatorDefault, - 0, - ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), - ) - for item in itertools.chain(identities, certificates): - # ArrayAppendValue does a CFRetain on the item. That's fine, - # because the finally block will release our other refs to them. - CoreFoundation.CFArrayAppendValue(trust_chain, item) - - return trust_chain - finally: - for obj in itertools.chain(identities, certificates): - CoreFoundation.CFRelease(obj) diff --git a/pype/vendor/urllib3/contrib/appengine.py b/pype/vendor/urllib3/contrib/appengine.py deleted file mode 100644 index 2952f114df..0000000000 --- a/pype/vendor/urllib3/contrib/appengine.py +++ /dev/null @@ -1,289 +0,0 @@ -""" -This module provides a pool manager that uses Google App Engine's -`URLFetch Service `_. - -Example usage:: - - from urllib3 import PoolManager - from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox - - if is_appengine_sandbox(): - # AppEngineManager uses AppEngine's URLFetch API behind the scenes - http = AppEngineManager() - else: - # PoolManager uses a socket-level API behind the scenes - http = PoolManager() - - r = http.request('GET', 'https://google.com/') - -There are `limitations `_ to the URLFetch service and it may not be -the best choice for your application. There are three options for using -urllib3 on Google App Engine: - -1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is - cost-effective in many circumstances as long as your usage is within the - limitations. -2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets. - Sockets also have `limitations and restrictions - `_ and have a lower free quota than URLFetch. - To use sockets, be sure to specify the following in your ``app.yaml``:: - - env_variables: - GAE_USE_SOCKETS_HTTPLIB : 'true' - -3. If you are using `App Engine Flexible -`_, you can use the standard -:class:`PoolManager` without any configuration or special environment variables. -""" - -from __future__ import absolute_import -import io -import logging -import warnings -from ..packages.six.moves.urllib.parse import urljoin - -from ..exceptions import ( - HTTPError, - HTTPWarning, - MaxRetryError, - ProtocolError, - TimeoutError, - SSLError -) - -from ..request import RequestMethods -from ..response import HTTPResponse -from ..util.timeout import Timeout -from ..util.retry import Retry -from . import _appengine_environ - -try: - from google.appengine.api import urlfetch -except ImportError: - urlfetch = None - - -log = logging.getLogger(__name__) - - -class AppEnginePlatformWarning(HTTPWarning): - pass - - -class AppEnginePlatformError(HTTPError): - pass - - -class AppEngineManager(RequestMethods): - """ - Connection manager for Google App Engine sandbox applications. - - This manager uses the URLFetch service directly instead of using the - emulated httplib, and is subject to URLFetch limitations as described in - the App Engine documentation `here - `_. - - Notably it will raise an :class:`AppEnginePlatformError` if: - * URLFetch is not available. - * If you attempt to use this on App Engine Flexible, as full socket - support is available. - * If a request size is more than 10 megabytes. - * If a response size is more than 32 megabtyes. - * If you use an unsupported request method such as OPTIONS. - - Beyond those cases, it will raise normal urllib3 errors. - """ - - def __init__(self, headers=None, retries=None, validate_certificate=True, - urlfetch_retries=True): - if not urlfetch: - raise AppEnginePlatformError( - "URLFetch is not available in this environment.") - - if is_prod_appengine_mvms(): - raise AppEnginePlatformError( - "Use normal urllib3.PoolManager instead of AppEngineManager" - "on Managed VMs, as using URLFetch is not necessary in " - "this environment.") - - warnings.warn( - "urllib3 is using URLFetch on Google App Engine sandbox instead " - "of sockets. To use sockets directly instead of URLFetch see " - "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.", - AppEnginePlatformWarning) - - RequestMethods.__init__(self, headers) - self.validate_certificate = validate_certificate - self.urlfetch_retries = urlfetch_retries - - self.retries = retries or Retry.DEFAULT - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - # Return False to re-raise any potential exceptions - return False - - def urlopen(self, method, url, body=None, headers=None, - retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT, - **response_kw): - - retries = self._get_retries(retries, redirect) - - try: - follow_redirects = ( - redirect and - retries.redirect != 0 and - retries.total) - response = urlfetch.fetch( - url, - payload=body, - method=method, - headers=headers or {}, - allow_truncated=False, - follow_redirects=self.urlfetch_retries and follow_redirects, - deadline=self._get_absolute_timeout(timeout), - validate_certificate=self.validate_certificate, - ) - except urlfetch.DeadlineExceededError as e: - raise TimeoutError(self, e) - - except urlfetch.InvalidURLError as e: - if 'too large' in str(e): - raise AppEnginePlatformError( - "URLFetch request too large, URLFetch only " - "supports requests up to 10mb in size.", e) - raise ProtocolError(e) - - except urlfetch.DownloadError as e: - if 'Too many redirects' in str(e): - raise MaxRetryError(self, url, reason=e) - raise ProtocolError(e) - - except urlfetch.ResponseTooLargeError as e: - raise AppEnginePlatformError( - "URLFetch response too large, URLFetch only supports" - "responses up to 32mb in size.", e) - - except urlfetch.SSLCertificateError as e: - raise SSLError(e) - - except urlfetch.InvalidMethodError as e: - raise AppEnginePlatformError( - "URLFetch does not support method: %s" % method, e) - - http_response = self._urlfetch_response_to_http_response( - response, retries=retries, **response_kw) - - # Handle redirect? - redirect_location = redirect and http_response.get_redirect_location() - if redirect_location: - # Check for redirect response - if (self.urlfetch_retries and retries.raise_on_redirect): - raise MaxRetryError(self, url, "too many redirects") - else: - if http_response.status == 303: - method = 'GET' - - try: - retries = retries.increment(method, url, response=http_response, _pool=self) - except MaxRetryError: - if retries.raise_on_redirect: - raise MaxRetryError(self, url, "too many redirects") - return http_response - - retries.sleep_for_retry(http_response) - log.debug("Redirecting %s -> %s", url, redirect_location) - redirect_url = urljoin(url, redirect_location) - return self.urlopen( - method, redirect_url, body, headers, - retries=retries, redirect=redirect, - timeout=timeout, **response_kw) - - # Check if we should retry the HTTP response. - has_retry_after = bool(http_response.getheader('Retry-After')) - if retries.is_retry(method, http_response.status, has_retry_after): - retries = retries.increment( - method, url, response=http_response, _pool=self) - log.debug("Retry: %s", url) - retries.sleep(http_response) - return self.urlopen( - method, url, - body=body, headers=headers, - retries=retries, redirect=redirect, - timeout=timeout, **response_kw) - - return http_response - - def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): - - if is_prod_appengine(): - # Production GAE handles deflate encoding automatically, but does - # not remove the encoding header. - content_encoding = urlfetch_resp.headers.get('content-encoding') - - if content_encoding == 'deflate': - del urlfetch_resp.headers['content-encoding'] - - transfer_encoding = urlfetch_resp.headers.get('transfer-encoding') - # We have a full response's content, - # so let's make sure we don't report ourselves as chunked data. - if transfer_encoding == 'chunked': - encodings = transfer_encoding.split(",") - encodings.remove('chunked') - urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings) - - original_response = HTTPResponse( - # In order for decoding to work, we must present the content as - # a file-like object. - body=io.BytesIO(urlfetch_resp.content), - msg=urlfetch_resp.header_msg, - headers=urlfetch_resp.headers, - status=urlfetch_resp.status_code, - **response_kw - ) - - return HTTPResponse( - body=io.BytesIO(urlfetch_resp.content), - headers=urlfetch_resp.headers, - status=urlfetch_resp.status_code, - original_response=original_response, - **response_kw - ) - - def _get_absolute_timeout(self, timeout): - if timeout is Timeout.DEFAULT_TIMEOUT: - return None # Defer to URLFetch's default. - if isinstance(timeout, Timeout): - if timeout._read is not None or timeout._connect is not None: - warnings.warn( - "URLFetch does not support granular timeout settings, " - "reverting to total or default URLFetch timeout.", - AppEnginePlatformWarning) - return timeout.total - return timeout - - def _get_retries(self, retries, redirect): - if not isinstance(retries, Retry): - retries = Retry.from_int( - retries, redirect=redirect, default=self.retries) - - if retries.connect or retries.read or retries.redirect: - warnings.warn( - "URLFetch only supports total retries and does not " - "recognize connect, read, or redirect retry parameters.", - AppEnginePlatformWarning) - - return retries - - -# Alias methods from _appengine_environ to maintain public API interface. - -is_appengine = _appengine_environ.is_appengine -is_appengine_sandbox = _appengine_environ.is_appengine_sandbox -is_local_appengine = _appengine_environ.is_local_appengine -is_prod_appengine = _appengine_environ.is_prod_appengine -is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms diff --git a/pype/vendor/urllib3/contrib/ntlmpool.py b/pype/vendor/urllib3/contrib/ntlmpool.py deleted file mode 100644 index 8ea127c583..0000000000 --- a/pype/vendor/urllib3/contrib/ntlmpool.py +++ /dev/null @@ -1,111 +0,0 @@ -""" -NTLM authenticating pool, contributed by erikcederstran - -Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 -""" -from __future__ import absolute_import - -from logging import getLogger -from ntlm import ntlm - -from .. import HTTPSConnectionPool -from ..packages.six.moves.http_client import HTTPSConnection - - -log = getLogger(__name__) - - -class NTLMConnectionPool(HTTPSConnectionPool): - """ - Implements an NTLM authentication version of an urllib3 connection pool - """ - - scheme = 'https' - - def __init__(self, user, pw, authurl, *args, **kwargs): - """ - authurl is a random URL on the server that is protected by NTLM. - user is the Windows user, probably in the DOMAIN\\username format. - pw is the password for the user. - """ - super(NTLMConnectionPool, self).__init__(*args, **kwargs) - self.authurl = authurl - self.rawuser = user - user_parts = user.split('\\', 1) - self.domain = user_parts[0].upper() - self.user = user_parts[1] - self.pw = pw - - def _new_conn(self): - # Performs the NTLM handshake that secures the connection. The socket - # must be kept open while requests are performed. - self.num_connections += 1 - log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', - self.num_connections, self.host, self.authurl) - - headers = {'Connection': 'Keep-Alive'} - req_header = 'Authorization' - resp_header = 'www-authenticate' - - conn = HTTPSConnection(host=self.host, port=self.port) - - # Send negotiation message - headers[req_header] = ( - 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) - log.debug('Request headers: %s', headers) - conn.request('GET', self.authurl, None, headers) - res = conn.getresponse() - reshdr = dict(res.getheaders()) - log.debug('Response status: %s %s', res.status, res.reason) - log.debug('Response headers: %s', reshdr) - log.debug('Response data: %s [...]', res.read(100)) - - # Remove the reference to the socket, so that it can not be closed by - # the response object (we want to keep the socket open) - res.fp = None - - # Server should respond with a challenge message - auth_header_values = reshdr[resp_header].split(', ') - auth_header_value = None - for s in auth_header_values: - if s[:5] == 'NTLM ': - auth_header_value = s[5:] - if auth_header_value is None: - raise Exception('Unexpected %s response header: %s' % - (resp_header, reshdr[resp_header])) - - # Send authentication message - ServerChallenge, NegotiateFlags = \ - ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) - auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, - self.user, - self.domain, - self.pw, - NegotiateFlags) - headers[req_header] = 'NTLM %s' % auth_msg - log.debug('Request headers: %s', headers) - conn.request('GET', self.authurl, None, headers) - res = conn.getresponse() - log.debug('Response status: %s %s', res.status, res.reason) - log.debug('Response headers: %s', dict(res.getheaders())) - log.debug('Response data: %s [...]', res.read()[:100]) - if res.status != 200: - if res.status == 401: - raise Exception('Server rejected request: wrong ' - 'username or password') - raise Exception('Wrong server response: %s %s' % - (res.status, res.reason)) - - res.fp = None - log.debug('Connection established') - return conn - - def urlopen(self, method, url, body=None, headers=None, retries=3, - redirect=True, assert_same_host=True): - if headers is None: - headers = {} - headers['Connection'] = 'Keep-Alive' - return super(NTLMConnectionPool, self).urlopen(method, url, body, - headers, retries, - redirect, - assert_same_host) diff --git a/pype/vendor/urllib3/contrib/pyopenssl.py b/pype/vendor/urllib3/contrib/pyopenssl.py deleted file mode 100644 index 7c0e9465d9..0000000000 --- a/pype/vendor/urllib3/contrib/pyopenssl.py +++ /dev/null @@ -1,466 +0,0 @@ -""" -SSL with SNI_-support for Python 2. Follow these instructions if you would -like to verify SSL certificates in Python 2. Note, the default libraries do -*not* do certificate checking; you need to do additional work to validate -certificates yourself. - -This needs the following packages installed: - -* pyOpenSSL (tested with 16.0.0) -* cryptography (minimum 1.3.4, from pyopenssl) -* idna (minimum 2.0, from cryptography) - -However, pyopenssl depends on cryptography, which depends on idna, so while we -use all three directly here we end up having relatively few packages required. - -You can install them with the following command: - - pip install pyopenssl cryptography idna - -To activate certificate checking, call -:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code -before you begin making HTTP requests. This can be done in a ``sitecustomize`` -module, or at any other time before your application begins using ``urllib3``, -like this:: - - try: - import urllib3.contrib.pyopenssl - urllib3.contrib.pyopenssl.inject_into_urllib3() - except ImportError: - pass - -Now you can use :mod:`urllib3` as you normally would, and it will support SNI -when the required modules are installed. - -Activating this module also has the positive side effect of disabling SSL/TLS -compression in Python 2 (see `CRIME attack`_). - -If you want to configure the default list of supported cipher suites, you can -set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. - -.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication -.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) -""" -from __future__ import absolute_import - -import OpenSSL.SSL -from cryptography import x509 -from cryptography.hazmat.backends.openssl import backend as openssl_backend -from cryptography.hazmat.backends.openssl.x509 import _Certificate -try: - from cryptography.x509 import UnsupportedExtension -except ImportError: - # UnsupportedExtension is gone in cryptography >= 2.1.0 - class UnsupportedExtension(Exception): - pass - -from socket import timeout, error as SocketError -from io import BytesIO - -try: # Platform-specific: Python 2 - from socket import _fileobject -except ImportError: # Platform-specific: Python 3 - _fileobject = None - from ..packages.backports.makefile import backport_makefile - -import logging -import ssl -from ..packages import six -import sys - -from .. import util - -__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] - -# SNI always works. -HAS_SNI = True - -# Map from urllib3 to PyOpenSSL compatible parameter-values. -_openssl_versions = { - ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, - ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, -} - -if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): - _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD - -if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): - _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD - -try: - _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) -except AttributeError: - pass - -_stdlib_to_openssl_verify = { - ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, - ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, - ssl.CERT_REQUIRED: - OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, -} -_openssl_to_stdlib_verify = dict( - (v, k) for k, v in _stdlib_to_openssl_verify.items() -) - -# OpenSSL will only write 16K at a time -SSL_WRITE_BLOCKSIZE = 16384 - -orig_util_HAS_SNI = util.HAS_SNI -orig_util_SSLContext = util.ssl_.SSLContext - - -log = logging.getLogger(__name__) - - -def inject_into_urllib3(): - 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' - - _validate_dependencies_met() - - util.ssl_.SSLContext = PyOpenSSLContext - util.HAS_SNI = HAS_SNI - util.ssl_.HAS_SNI = HAS_SNI - util.IS_PYOPENSSL = True - util.ssl_.IS_PYOPENSSL = True - - -def extract_from_urllib3(): - 'Undo monkey-patching by :func:`inject_into_urllib3`.' - - util.ssl_.SSLContext = orig_util_SSLContext - util.HAS_SNI = orig_util_HAS_SNI - util.ssl_.HAS_SNI = orig_util_HAS_SNI - util.IS_PYOPENSSL = False - util.ssl_.IS_PYOPENSSL = False - - -def _validate_dependencies_met(): - """ - Verifies that PyOpenSSL's package-level dependencies have been met. - Throws `ImportError` if they are not met. - """ - # Method added in `cryptography==1.1`; not available in older versions - from cryptography.x509.extensions import Extensions - if getattr(Extensions, "get_extension_for_class", None) is None: - raise ImportError("'cryptography' module missing required functionality. " - "Try upgrading to v1.3.4 or newer.") - - # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 - # attribute is only present on those versions. - from OpenSSL.crypto import X509 - x509 = X509() - if getattr(x509, "_x509", None) is None: - raise ImportError("'pyOpenSSL' module missing required functionality. " - "Try upgrading to v0.14 or newer.") - - -def _dnsname_to_stdlib(name): - """ - Converts a dNSName SubjectAlternativeName field to the form used by the - standard library on the given Python version. - - Cryptography produces a dNSName as a unicode string that was idna-decoded - from ASCII bytes. We need to idna-encode that string to get it back, and - then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib - uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). - - If the name cannot be idna-encoded then we return None signalling that - the name given should be skipped. - """ - def idna_encode(name): - """ - Borrowed wholesale from the Python Cryptography Project. It turns out - that we can't just safely call `idna.encode`: it can explode for - wildcard names. This avoids that problem. - """ - import idna - - try: - for prefix in [u'*.', u'.']: - if name.startswith(prefix): - name = name[len(prefix):] - return prefix.encode('ascii') + idna.encode(name) - return idna.encode(name) - except idna.core.IDNAError: - return None - - name = idna_encode(name) - if name is None: - return None - elif sys.version_info >= (3, 0): - name = name.decode('utf-8') - return name - - -def get_subj_alt_name(peer_cert): - """ - Given an PyOpenSSL certificate, provides all the subject alternative names. - """ - # Pass the cert to cryptography, which has much better APIs for this. - if hasattr(peer_cert, "to_cryptography"): - cert = peer_cert.to_cryptography() - else: - # This is technically using private APIs, but should work across all - # relevant versions before PyOpenSSL got a proper API for this. - cert = _Certificate(openssl_backend, peer_cert._x509) - - # We want to find the SAN extension. Ask Cryptography to locate it (it's - # faster than looping in Python) - try: - ext = cert.extensions.get_extension_for_class( - x509.SubjectAlternativeName - ).value - except x509.ExtensionNotFound: - # No such extension, return the empty list. - return [] - except (x509.DuplicateExtension, UnsupportedExtension, - x509.UnsupportedGeneralNameType, UnicodeError) as e: - # A problem has been found with the quality of the certificate. Assume - # no SAN field is present. - log.warning( - "A problem was encountered with the certificate that prevented " - "urllib3 from finding the SubjectAlternativeName field. This can " - "affect certificate validation. The error was %s", - e, - ) - return [] - - # We want to return dNSName and iPAddress fields. We need to cast the IPs - # back to strings because the match_hostname function wants them as - # strings. - # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 - # decoded. This is pretty frustrating, but that's what the standard library - # does with certificates, and so we need to attempt to do the same. - # We also want to skip over names which cannot be idna encoded. - names = [ - ('DNS', name) for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) - if name is not None - ] - names.extend( - ('IP Address', str(name)) - for name in ext.get_values_for_type(x509.IPAddress) - ) - - return names - - -class WrappedSocket(object): - '''API-compatibility wrapper for Python OpenSSL's Connection-class. - - Note: _makefile_refs, _drop() and _reuse() are needed for the garbage - collector of pypy. - ''' - - def __init__(self, connection, socket, suppress_ragged_eofs=True): - self.connection = connection - self.socket = socket - self.suppress_ragged_eofs = suppress_ragged_eofs - self._makefile_refs = 0 - self._closed = False - - def fileno(self): - return self.socket.fileno() - - # Copy-pasted from Python 3.5 source code - def _decref_socketios(self): - if self._makefile_refs > 0: - self._makefile_refs -= 1 - if self._closed: - self.close() - - def recv(self, *args, **kwargs): - try: - data = self.connection.recv(*args, **kwargs) - except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): - return b'' - else: - raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError as e: - if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: - return b'' - else: - raise - except OpenSSL.SSL.WantReadError: - if not util.wait_for_read(self.socket, self.socket.gettimeout()): - raise timeout('The read operation timed out') - else: - return self.recv(*args, **kwargs) - else: - return data - - def recv_into(self, *args, **kwargs): - try: - return self.connection.recv_into(*args, **kwargs) - except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): - return 0 - else: - raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError as e: - if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: - return 0 - else: - raise - except OpenSSL.SSL.WantReadError: - if not util.wait_for_read(self.socket, self.socket.gettimeout()): - raise timeout('The read operation timed out') - else: - return self.recv_into(*args, **kwargs) - - def settimeout(self, timeout): - return self.socket.settimeout(timeout) - - def _send_until_done(self, data): - while True: - try: - return self.connection.send(data) - except OpenSSL.SSL.WantWriteError: - if not util.wait_for_write(self.socket, self.socket.gettimeout()): - raise timeout() - continue - except OpenSSL.SSL.SysCallError as e: - raise SocketError(str(e)) - - def sendall(self, data): - total_sent = 0 - while total_sent < len(data): - sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) - total_sent += sent - - def shutdown(self): - # FIXME rethrow compatible exceptions should we ever use this - self.connection.shutdown() - - def close(self): - if self._makefile_refs < 1: - try: - self._closed = True - return self.connection.close() - except OpenSSL.SSL.Error: - return - else: - self._makefile_refs -= 1 - - def getpeercert(self, binary_form=False): - x509 = self.connection.get_peer_certificate() - - if not x509: - return x509 - - if binary_form: - return OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_ASN1, - x509) - - return { - 'subject': ( - (('commonName', x509.get_subject().CN),), - ), - 'subjectAltName': get_subj_alt_name(x509) - } - - def _reuse(self): - self._makefile_refs += 1 - - def _drop(self): - if self._makefile_refs < 1: - self.close() - else: - self._makefile_refs -= 1 - - -if _fileobject: # Platform-specific: Python 2 - def makefile(self, mode, bufsize=-1): - self._makefile_refs += 1 - return _fileobject(self, mode, bufsize, close=True) -else: # Platform-specific: Python 3 - makefile = backport_makefile - -WrappedSocket.makefile = makefile - - -class PyOpenSSLContext(object): - """ - I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible - for translating the interface of the standard library ``SSLContext`` object - to calls into PyOpenSSL. - """ - def __init__(self, protocol): - self.protocol = _openssl_versions[protocol] - self._ctx = OpenSSL.SSL.Context(self.protocol) - self._options = 0 - self.check_hostname = False - - @property - def options(self): - return self._options - - @options.setter - def options(self, value): - self._options = value - self._ctx.set_options(value) - - @property - def verify_mode(self): - return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()] - - @verify_mode.setter - def verify_mode(self, value): - self._ctx.set_verify( - _stdlib_to_openssl_verify[value], - _verify_callback - ) - - def set_default_verify_paths(self): - self._ctx.set_default_verify_paths() - - def set_ciphers(self, ciphers): - if isinstance(ciphers, six.text_type): - ciphers = ciphers.encode('utf-8') - self._ctx.set_cipher_list(ciphers) - - def load_verify_locations(self, cafile=None, capath=None, cadata=None): - if cafile is not None: - cafile = cafile.encode('utf-8') - if capath is not None: - capath = capath.encode('utf-8') - self._ctx.load_verify_locations(cafile, capath) - if cadata is not None: - self._ctx.load_verify_locations(BytesIO(cadata)) - - def load_cert_chain(self, certfile, keyfile=None, password=None): - self._ctx.use_certificate_chain_file(certfile) - if password is not None: - self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password) - self._ctx.use_privatekey_file(keyfile or certfile) - - def wrap_socket(self, sock, server_side=False, - do_handshake_on_connect=True, suppress_ragged_eofs=True, - server_hostname=None): - cnx = OpenSSL.SSL.Connection(self._ctx, sock) - - if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 - server_hostname = server_hostname.encode('utf-8') - - if server_hostname is not None: - cnx.set_tlsext_host_name(server_hostname) - - cnx.set_connect_state() - - while True: - try: - cnx.do_handshake() - except OpenSSL.SSL.WantReadError: - if not util.wait_for_read(sock, sock.gettimeout()): - raise timeout('select timed out') - continue - except OpenSSL.SSL.Error as e: - raise ssl.SSLError('bad handshake: %r' % e) - break - - return WrappedSocket(cnx, sock) - - -def _verify_callback(cnx, x509, err_no, err_depth, return_code): - return err_no == 0 diff --git a/pype/vendor/urllib3/contrib/securetransport.py b/pype/vendor/urllib3/contrib/securetransport.py deleted file mode 100644 index 77cb59ed71..0000000000 --- a/pype/vendor/urllib3/contrib/securetransport.py +++ /dev/null @@ -1,804 +0,0 @@ -""" -SecureTranport support for urllib3 via ctypes. - -This makes platform-native TLS available to urllib3 users on macOS without the -use of a compiler. This is an important feature because the Python Package -Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL -that ships with macOS is not capable of doing TLSv1.2. The only way to resolve -this is to give macOS users an alternative solution to the problem, and that -solution is to use SecureTransport. - -We use ctypes here because this solution must not require a compiler. That's -because pip is not allowed to require a compiler either. - -This is not intended to be a seriously long-term solution to this problem. -The hope is that PEP 543 will eventually solve this issue for us, at which -point we can retire this contrib module. But in the short term, we need to -solve the impending tire fire that is Python on Mac without this kind of -contrib module. So...here we are. - -To use this module, simply import and inject it:: - - import urllib3.contrib.securetransport - urllib3.contrib.securetransport.inject_into_urllib3() - -Happy TLSing! -""" -from __future__ import absolute_import - -import contextlib -import ctypes -import errno -import os.path -import shutil -import socket -import ssl -import threading -import weakref - -from .. import util -from ._securetransport.bindings import ( - Security, SecurityConst, CoreFoundation -) -from ._securetransport.low_level import ( - _assert_no_error, _cert_array_from_pem, _temporary_keychain, - _load_client_cert_chain -) - -try: # Platform-specific: Python 2 - from socket import _fileobject -except ImportError: # Platform-specific: Python 3 - _fileobject = None - from ..packages.backports.makefile import backport_makefile - -__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] - -# SNI always works -HAS_SNI = True - -orig_util_HAS_SNI = util.HAS_SNI -orig_util_SSLContext = util.ssl_.SSLContext - -# This dictionary is used by the read callback to obtain a handle to the -# calling wrapped socket. This is a pretty silly approach, but for now it'll -# do. I feel like I should be able to smuggle a handle to the wrapped socket -# directly in the SSLConnectionRef, but for now this approach will work I -# guess. -# -# We need to lock around this structure for inserts, but we don't do it for -# reads/writes in the callbacks. The reasoning here goes as follows: -# -# 1. It is not possible to call into the callbacks before the dictionary is -# populated, so once in the callback the id must be in the dictionary. -# 2. The callbacks don't mutate the dictionary, they only read from it, and -# so cannot conflict with any of the insertions. -# -# This is good: if we had to lock in the callbacks we'd drastically slow down -# the performance of this code. -_connection_refs = weakref.WeakValueDictionary() -_connection_ref_lock = threading.Lock() - -# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over -# for no better reason than we need *a* limit, and this one is right there. -SSL_WRITE_BLOCKSIZE = 16384 - -# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to -# individual cipher suites. We need to do this because this is how -# SecureTransport wants them. -CIPHER_SUITES = [ - SecurityConst.TLS_AES_256_GCM_SHA384, - SecurityConst.TLS_CHACHA20_POLY1305_SHA256, - SecurityConst.TLS_AES_128_GCM_SHA256, - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, - SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA, - SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA, -] - -# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of -# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. -_protocol_to_min_max = { - ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), -} - -if hasattr(ssl, "PROTOCOL_SSLv2"): - _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( - SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2 - ) -if hasattr(ssl, "PROTOCOL_SSLv3"): - _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( - SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3 - ) -if hasattr(ssl, "PROTOCOL_TLSv1"): - _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( - SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1 - ) -if hasattr(ssl, "PROTOCOL_TLSv1_1"): - _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( - SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11 - ) -if hasattr(ssl, "PROTOCOL_TLSv1_2"): - _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( - SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12 - ) -if hasattr(ssl, "PROTOCOL_TLS"): - _protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23] - - -def inject_into_urllib3(): - """ - Monkey-patch urllib3 with SecureTransport-backed SSL-support. - """ - util.ssl_.SSLContext = SecureTransportContext - util.HAS_SNI = HAS_SNI - util.ssl_.HAS_SNI = HAS_SNI - util.IS_SECURETRANSPORT = True - util.ssl_.IS_SECURETRANSPORT = True - - -def extract_from_urllib3(): - """ - Undo monkey-patching by :func:`inject_into_urllib3`. - """ - util.ssl_.SSLContext = orig_util_SSLContext - util.HAS_SNI = orig_util_HAS_SNI - util.ssl_.HAS_SNI = orig_util_HAS_SNI - util.IS_SECURETRANSPORT = False - util.ssl_.IS_SECURETRANSPORT = False - - -def _read_callback(connection_id, data_buffer, data_length_pointer): - """ - SecureTransport read callback. This is called by ST to request that data - be returned from the socket. - """ - wrapped_socket = None - try: - wrapped_socket = _connection_refs.get(connection_id) - if wrapped_socket is None: - return SecurityConst.errSSLInternal - base_socket = wrapped_socket.socket - - requested_length = data_length_pointer[0] - - timeout = wrapped_socket.gettimeout() - error = None - read_count = 0 - - try: - while read_count < requested_length: - if timeout is None or timeout >= 0: - if not util.wait_for_read(base_socket, timeout): - raise socket.error(errno.EAGAIN, 'timed out') - - remaining = requested_length - read_count - buffer = (ctypes.c_char * remaining).from_address( - data_buffer + read_count - ) - chunk_size = base_socket.recv_into(buffer, remaining) - read_count += chunk_size - if not chunk_size: - if not read_count: - return SecurityConst.errSSLClosedGraceful - break - except (socket.error) as e: - error = e.errno - - if error is not None and error != errno.EAGAIN: - data_length_pointer[0] = read_count - if error == errno.ECONNRESET or error == errno.EPIPE: - return SecurityConst.errSSLClosedAbort - raise - - data_length_pointer[0] = read_count - - if read_count != requested_length: - return SecurityConst.errSSLWouldBlock - - return 0 - except Exception as e: - if wrapped_socket is not None: - wrapped_socket._exception = e - return SecurityConst.errSSLInternal - - -def _write_callback(connection_id, data_buffer, data_length_pointer): - """ - SecureTransport write callback. This is called by ST to request that data - actually be sent on the network. - """ - wrapped_socket = None - try: - wrapped_socket = _connection_refs.get(connection_id) - if wrapped_socket is None: - return SecurityConst.errSSLInternal - base_socket = wrapped_socket.socket - - bytes_to_write = data_length_pointer[0] - data = ctypes.string_at(data_buffer, bytes_to_write) - - timeout = wrapped_socket.gettimeout() - error = None - sent = 0 - - try: - while sent < bytes_to_write: - if timeout is None or timeout >= 0: - if not util.wait_for_write(base_socket, timeout): - raise socket.error(errno.EAGAIN, 'timed out') - chunk_sent = base_socket.send(data) - sent += chunk_sent - - # This has some needless copying here, but I'm not sure there's - # much value in optimising this data path. - data = data[chunk_sent:] - except (socket.error) as e: - error = e.errno - - if error is not None and error != errno.EAGAIN: - data_length_pointer[0] = sent - if error == errno.ECONNRESET or error == errno.EPIPE: - return SecurityConst.errSSLClosedAbort - raise - - data_length_pointer[0] = sent - - if sent != bytes_to_write: - return SecurityConst.errSSLWouldBlock - - return 0 - except Exception as e: - if wrapped_socket is not None: - wrapped_socket._exception = e - return SecurityConst.errSSLInternal - - -# We need to keep these two objects references alive: if they get GC'd while -# in use then SecureTransport could attempt to call a function that is in freed -# memory. That would be...uh...bad. Yeah, that's the word. Bad. -_read_callback_pointer = Security.SSLReadFunc(_read_callback) -_write_callback_pointer = Security.SSLWriteFunc(_write_callback) - - -class WrappedSocket(object): - """ - API-compatibility wrapper for Python's OpenSSL wrapped socket object. - - Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage - collector of PyPy. - """ - def __init__(self, socket): - self.socket = socket - self.context = None - self._makefile_refs = 0 - self._closed = False - self._exception = None - self._keychain = None - self._keychain_dir = None - self._client_cert_chain = None - - # We save off the previously-configured timeout and then set it to - # zero. This is done because we use select and friends to handle the - # timeouts, but if we leave the timeout set on the lower socket then - # Python will "kindly" call select on that socket again for us. Avoid - # that by forcing the timeout to zero. - self._timeout = self.socket.gettimeout() - self.socket.settimeout(0) - - @contextlib.contextmanager - def _raise_on_error(self): - """ - A context manager that can be used to wrap calls that do I/O from - SecureTransport. If any of the I/O callbacks hit an exception, this - context manager will correctly propagate the exception after the fact. - This avoids silently swallowing those exceptions. - - It also correctly forces the socket closed. - """ - self._exception = None - - # We explicitly don't catch around this yield because in the unlikely - # event that an exception was hit in the block we don't want to swallow - # it. - yield - if self._exception is not None: - exception, self._exception = self._exception, None - self.close() - raise exception - - def _set_ciphers(self): - """ - Sets up the allowed ciphers. By default this matches the set in - util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done - custom and doesn't allow changing at this time, mostly because parsing - OpenSSL cipher strings is going to be a freaking nightmare. - """ - ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES) - result = Security.SSLSetEnabledCiphers( - self.context, ciphers, len(CIPHER_SUITES) - ) - _assert_no_error(result) - - def _custom_validate(self, verify, trust_bundle): - """ - Called when we have set custom validation. We do this in two cases: - first, when cert validation is entirely disabled; and second, when - using a custom trust DB. - """ - # If we disabled cert validation, just say: cool. - if not verify: - return - - # We want data in memory, so load it up. - if os.path.isfile(trust_bundle): - with open(trust_bundle, 'rb') as f: - trust_bundle = f.read() - - cert_array = None - trust = Security.SecTrustRef() - - try: - # Get a CFArray that contains the certs we want. - cert_array = _cert_array_from_pem(trust_bundle) - - # Ok, now the hard part. We want to get the SecTrustRef that ST has - # created for this connection, shove our CAs into it, tell ST to - # ignore everything else it knows, and then ask if it can build a - # chain. This is a buuuunch of code. - result = Security.SSLCopyPeerTrust( - self.context, ctypes.byref(trust) - ) - _assert_no_error(result) - if not trust: - raise ssl.SSLError("Failed to copy trust reference") - - result = Security.SecTrustSetAnchorCertificates(trust, cert_array) - _assert_no_error(result) - - result = Security.SecTrustSetAnchorCertificatesOnly(trust, True) - _assert_no_error(result) - - trust_result = Security.SecTrustResultType() - result = Security.SecTrustEvaluate( - trust, ctypes.byref(trust_result) - ) - _assert_no_error(result) - finally: - if trust: - CoreFoundation.CFRelease(trust) - - if cert_array is not None: - CoreFoundation.CFRelease(cert_array) - - # Ok, now we can look at what the result was. - successes = ( - SecurityConst.kSecTrustResultUnspecified, - SecurityConst.kSecTrustResultProceed - ) - if trust_result.value not in successes: - raise ssl.SSLError( - "certificate verify failed, error code: %d" % - trust_result.value - ) - - def handshake(self, - server_hostname, - verify, - trust_bundle, - min_version, - max_version, - client_cert, - client_key, - client_key_passphrase): - """ - Actually performs the TLS handshake. This is run automatically by - wrapped socket, and shouldn't be needed in user code. - """ - # First, we do the initial bits of connection setup. We need to create - # a context, set its I/O funcs, and set the connection reference. - self.context = Security.SSLCreateContext( - None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType - ) - result = Security.SSLSetIOFuncs( - self.context, _read_callback_pointer, _write_callback_pointer - ) - _assert_no_error(result) - - # Here we need to compute the handle to use. We do this by taking the - # id of self modulo 2**31 - 1. If this is already in the dictionary, we - # just keep incrementing by one until we find a free space. - with _connection_ref_lock: - handle = id(self) % 2147483647 - while handle in _connection_refs: - handle = (handle + 1) % 2147483647 - _connection_refs[handle] = self - - result = Security.SSLSetConnection(self.context, handle) - _assert_no_error(result) - - # If we have a server hostname, we should set that too. - if server_hostname: - if not isinstance(server_hostname, bytes): - server_hostname = server_hostname.encode('utf-8') - - result = Security.SSLSetPeerDomainName( - self.context, server_hostname, len(server_hostname) - ) - _assert_no_error(result) - - # Setup the ciphers. - self._set_ciphers() - - # Set the minimum and maximum TLS versions. - result = Security.SSLSetProtocolVersionMin(self.context, min_version) - _assert_no_error(result) - result = Security.SSLSetProtocolVersionMax(self.context, max_version) - _assert_no_error(result) - - # If there's a trust DB, we need to use it. We do that by telling - # SecureTransport to break on server auth. We also do that if we don't - # want to validate the certs at all: we just won't actually do any - # authing in that case. - if not verify or trust_bundle is not None: - result = Security.SSLSetSessionOption( - self.context, - SecurityConst.kSSLSessionOptionBreakOnServerAuth, - True - ) - _assert_no_error(result) - - # If there's a client cert, we need to use it. - if client_cert: - self._keychain, self._keychain_dir = _temporary_keychain() - self._client_cert_chain = _load_client_cert_chain( - self._keychain, client_cert, client_key - ) - result = Security.SSLSetCertificate( - self.context, self._client_cert_chain - ) - _assert_no_error(result) - - while True: - with self._raise_on_error(): - result = Security.SSLHandshake(self.context) - - if result == SecurityConst.errSSLWouldBlock: - raise socket.timeout("handshake timed out") - elif result == SecurityConst.errSSLServerAuthCompleted: - self._custom_validate(verify, trust_bundle) - continue - else: - _assert_no_error(result) - break - - def fileno(self): - return self.socket.fileno() - - # Copy-pasted from Python 3.5 source code - def _decref_socketios(self): - if self._makefile_refs > 0: - self._makefile_refs -= 1 - if self._closed: - self.close() - - def recv(self, bufsiz): - buffer = ctypes.create_string_buffer(bufsiz) - bytes_read = self.recv_into(buffer, bufsiz) - data = buffer[:bytes_read] - return data - - def recv_into(self, buffer, nbytes=None): - # Read short on EOF. - if self._closed: - return 0 - - if nbytes is None: - nbytes = len(buffer) - - buffer = (ctypes.c_char * nbytes).from_buffer(buffer) - processed_bytes = ctypes.c_size_t(0) - - with self._raise_on_error(): - result = Security.SSLRead( - self.context, buffer, nbytes, ctypes.byref(processed_bytes) - ) - - # There are some result codes that we want to treat as "not always - # errors". Specifically, those are errSSLWouldBlock, - # errSSLClosedGraceful, and errSSLClosedNoNotify. - if (result == SecurityConst.errSSLWouldBlock): - # If we didn't process any bytes, then this was just a time out. - # However, we can get errSSLWouldBlock in situations when we *did* - # read some data, and in those cases we should just read "short" - # and return. - if processed_bytes.value == 0: - # Timed out, no data read. - raise socket.timeout("recv timed out") - elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify): - # The remote peer has closed this connection. We should do so as - # well. Note that we don't actually return here because in - # principle this could actually be fired along with return data. - # It's unlikely though. - self.close() - else: - _assert_no_error(result) - - # Ok, we read and probably succeeded. We should return whatever data - # was actually read. - return processed_bytes.value - - def settimeout(self, timeout): - self._timeout = timeout - - def gettimeout(self): - return self._timeout - - def send(self, data): - processed_bytes = ctypes.c_size_t(0) - - with self._raise_on_error(): - result = Security.SSLWrite( - self.context, data, len(data), ctypes.byref(processed_bytes) - ) - - if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0: - # Timed out - raise socket.timeout("send timed out") - else: - _assert_no_error(result) - - # We sent, and probably succeeded. Tell them how much we sent. - return processed_bytes.value - - def sendall(self, data): - total_sent = 0 - while total_sent < len(data): - sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) - total_sent += sent - - def shutdown(self): - with self._raise_on_error(): - Security.SSLClose(self.context) - - def close(self): - # TODO: should I do clean shutdown here? Do I have to? - if self._makefile_refs < 1: - self._closed = True - if self.context: - CoreFoundation.CFRelease(self.context) - self.context = None - if self._client_cert_chain: - CoreFoundation.CFRelease(self._client_cert_chain) - self._client_cert_chain = None - if self._keychain: - Security.SecKeychainDelete(self._keychain) - CoreFoundation.CFRelease(self._keychain) - shutil.rmtree(self._keychain_dir) - self._keychain = self._keychain_dir = None - return self.socket.close() - else: - self._makefile_refs -= 1 - - def getpeercert(self, binary_form=False): - # Urgh, annoying. - # - # Here's how we do this: - # - # 1. Call SSLCopyPeerTrust to get hold of the trust object for this - # connection. - # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf. - # 3. To get the CN, call SecCertificateCopyCommonName and process that - # string so that it's of the appropriate type. - # 4. To get the SAN, we need to do something a bit more complex: - # a. Call SecCertificateCopyValues to get the data, requesting - # kSecOIDSubjectAltName. - # b. Mess about with this dictionary to try to get the SANs out. - # - # This is gross. Really gross. It's going to be a few hundred LoC extra - # just to repeat something that SecureTransport can *already do*. So my - # operating assumption at this time is that what we want to do is - # instead to just flag to urllib3 that it shouldn't do its own hostname - # validation when using SecureTransport. - if not binary_form: - raise ValueError( - "SecureTransport only supports dumping binary certs" - ) - trust = Security.SecTrustRef() - certdata = None - der_bytes = None - - try: - # Grab the trust store. - result = Security.SSLCopyPeerTrust( - self.context, ctypes.byref(trust) - ) - _assert_no_error(result) - if not trust: - # Probably we haven't done the handshake yet. No biggie. - return None - - cert_count = Security.SecTrustGetCertificateCount(trust) - if not cert_count: - # Also a case that might happen if we haven't handshaked. - # Handshook? Handshaken? - return None - - leaf = Security.SecTrustGetCertificateAtIndex(trust, 0) - assert leaf - - # Ok, now we want the DER bytes. - certdata = Security.SecCertificateCopyData(leaf) - assert certdata - - data_length = CoreFoundation.CFDataGetLength(certdata) - data_buffer = CoreFoundation.CFDataGetBytePtr(certdata) - der_bytes = ctypes.string_at(data_buffer, data_length) - finally: - if certdata: - CoreFoundation.CFRelease(certdata) - if trust: - CoreFoundation.CFRelease(trust) - - return der_bytes - - def _reuse(self): - self._makefile_refs += 1 - - def _drop(self): - if self._makefile_refs < 1: - self.close() - else: - self._makefile_refs -= 1 - - -if _fileobject: # Platform-specific: Python 2 - def makefile(self, mode, bufsize=-1): - self._makefile_refs += 1 - return _fileobject(self, mode, bufsize, close=True) -else: # Platform-specific: Python 3 - def makefile(self, mode="r", buffering=None, *args, **kwargs): - # We disable buffering with SecureTransport because it conflicts with - # the buffering that ST does internally (see issue #1153 for more). - buffering = 0 - return backport_makefile(self, mode, buffering, *args, **kwargs) - -WrappedSocket.makefile = makefile - - -class SecureTransportContext(object): - """ - I am a wrapper class for the SecureTransport library, to translate the - interface of the standard library ``SSLContext`` object to calls into - SecureTransport. - """ - def __init__(self, protocol): - self._min_version, self._max_version = _protocol_to_min_max[protocol] - self._options = 0 - self._verify = False - self._trust_bundle = None - self._client_cert = None - self._client_key = None - self._client_key_passphrase = None - - @property - def check_hostname(self): - """ - SecureTransport cannot have its hostname checking disabled. For more, - see the comment on getpeercert() in this file. - """ - return True - - @check_hostname.setter - def check_hostname(self, value): - """ - SecureTransport cannot have its hostname checking disabled. For more, - see the comment on getpeercert() in this file. - """ - pass - - @property - def options(self): - # TODO: Well, crap. - # - # So this is the bit of the code that is the most likely to cause us - # trouble. Essentially we need to enumerate all of the SSL options that - # users might want to use and try to see if we can sensibly translate - # them, or whether we should just ignore them. - return self._options - - @options.setter - def options(self, value): - # TODO: Update in line with above. - self._options = value - - @property - def verify_mode(self): - return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE - - @verify_mode.setter - def verify_mode(self, value): - self._verify = True if value == ssl.CERT_REQUIRED else False - - def set_default_verify_paths(self): - # So, this has to do something a bit weird. Specifically, what it does - # is nothing. - # - # This means that, if we had previously had load_verify_locations - # called, this does not undo that. We need to do that because it turns - # out that the rest of the urllib3 code will attempt to load the - # default verify paths if it hasn't been told about any paths, even if - # the context itself was sometime earlier. We resolve that by just - # ignoring it. - pass - - def load_default_certs(self): - return self.set_default_verify_paths() - - def set_ciphers(self, ciphers): - # For now, we just require the default cipher string. - if ciphers != util.ssl_.DEFAULT_CIPHERS: - raise ValueError( - "SecureTransport doesn't support custom cipher strings" - ) - - def load_verify_locations(self, cafile=None, capath=None, cadata=None): - # OK, we only really support cadata and cafile. - if capath is not None: - raise ValueError( - "SecureTransport does not support cert directories" - ) - - self._trust_bundle = cafile or cadata - - def load_cert_chain(self, certfile, keyfile=None, password=None): - self._client_cert = certfile - self._client_key = keyfile - self._client_cert_passphrase = password - - def wrap_socket(self, sock, server_side=False, - do_handshake_on_connect=True, suppress_ragged_eofs=True, - server_hostname=None): - # So, what do we do here? Firstly, we assert some properties. This is a - # stripped down shim, so there is some functionality we don't support. - # See PEP 543 for the real deal. - assert not server_side - assert do_handshake_on_connect - assert suppress_ragged_eofs - - # Ok, we're good to go. Now we want to create the wrapped socket object - # and store it in the appropriate place. - wrapped_socket = WrappedSocket(sock) - - # Now we can handshake - wrapped_socket.handshake( - server_hostname, self._verify, self._trust_bundle, - self._min_version, self._max_version, self._client_cert, - self._client_key, self._client_key_passphrase - ) - return wrapped_socket diff --git a/pype/vendor/urllib3/contrib/socks.py b/pype/vendor/urllib3/contrib/socks.py deleted file mode 100644 index 811e312ec8..0000000000 --- a/pype/vendor/urllib3/contrib/socks.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module contains provisional support for SOCKS proxies from within -urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and -SOCKS5. To enable its functionality, either install PySocks or install this -module with the ``socks`` extra. - -The SOCKS implementation supports the full range of urllib3 features. It also -supports the following SOCKS features: - -- SOCKS4 -- SOCKS4a -- SOCKS5 -- Usernames and passwords for the SOCKS proxy - -Known Limitations: - -- Currently PySocks does not support contacting remote websites via literal - IPv6 addresses. Any such connection attempt will fail. You must use a domain - name. -- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any - such connection attempt will fail. -""" -from __future__ import absolute_import - -try: - import socks -except ImportError: - import warnings - from ..exceptions import DependencyWarning - - warnings.warn(( - 'SOCKS support in urllib3 requires the installation of optional ' - 'dependencies: specifically, PySocks. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies' - ), - DependencyWarning - ) - raise - -from socket import error as SocketError, timeout as SocketTimeout - -from ..connection import ( - HTTPConnection, HTTPSConnection -) -from ..connectionpool import ( - HTTPConnectionPool, HTTPSConnectionPool -) -from ..exceptions import ConnectTimeoutError, NewConnectionError -from ..poolmanager import PoolManager -from ..util.url import parse_url - -try: - import ssl -except ImportError: - ssl = None - - -class SOCKSConnection(HTTPConnection): - """ - A plain-text HTTP connection that connects via a SOCKS proxy. - """ - def __init__(self, *args, **kwargs): - self._socks_options = kwargs.pop('_socks_options') - super(SOCKSConnection, self).__init__(*args, **kwargs) - - def _new_conn(self): - """ - Establish a new connection via the SOCKS proxy. - """ - extra_kw = {} - if self.source_address: - extra_kw['source_address'] = self.source_address - - if self.socket_options: - extra_kw['socket_options'] = self.socket_options - - try: - conn = socks.create_connection( - (self.host, self.port), - proxy_type=self._socks_options['socks_version'], - proxy_addr=self._socks_options['proxy_host'], - proxy_port=self._socks_options['proxy_port'], - proxy_username=self._socks_options['username'], - proxy_password=self._socks_options['password'], - proxy_rdns=self._socks_options['rdns'], - timeout=self.timeout, - **extra_kw - ) - - except SocketTimeout as e: - raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) - - except socks.ProxyError as e: - # This is fragile as hell, but it seems to be the only way to raise - # useful errors here. - if e.socket_err: - error = e.socket_err - if isinstance(error, SocketTimeout): - raise ConnectTimeoutError( - self, - "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout) - ) - else: - raise NewConnectionError( - self, - "Failed to establish a new connection: %s" % error - ) - else: - raise NewConnectionError( - self, - "Failed to establish a new connection: %s" % e - ) - - except SocketError as e: # Defensive: PySocks should catch all these. - raise NewConnectionError( - self, "Failed to establish a new connection: %s" % e) - - return conn - - -# We don't need to duplicate the Verified/Unverified distinction from -# urllib3/connection.py here because the HTTPSConnection will already have been -# correctly set to either the Verified or Unverified form by that module. This -# means the SOCKSHTTPSConnection will automatically be the correct type. -class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): - pass - - -class SOCKSHTTPConnectionPool(HTTPConnectionPool): - ConnectionCls = SOCKSConnection - - -class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): - ConnectionCls = SOCKSHTTPSConnection - - -class SOCKSProxyManager(PoolManager): - """ - A version of the urllib3 ProxyManager that routes connections via the - defined SOCKS proxy. - """ - pool_classes_by_scheme = { - 'http': SOCKSHTTPConnectionPool, - 'https': SOCKSHTTPSConnectionPool, - } - - def __init__(self, proxy_url, username=None, password=None, - num_pools=10, headers=None, **connection_pool_kw): - parsed = parse_url(proxy_url) - - if username is None and password is None and parsed.auth is not None: - split = parsed.auth.split(':') - if len(split) == 2: - username, password = split - if parsed.scheme == 'socks5': - socks_version = socks.PROXY_TYPE_SOCKS5 - rdns = False - elif parsed.scheme == 'socks5h': - socks_version = socks.PROXY_TYPE_SOCKS5 - rdns = True - elif parsed.scheme == 'socks4': - socks_version = socks.PROXY_TYPE_SOCKS4 - rdns = False - elif parsed.scheme == 'socks4a': - socks_version = socks.PROXY_TYPE_SOCKS4 - rdns = True - else: - raise ValueError( - "Unable to determine SOCKS version from %s" % proxy_url - ) - - self.proxy_url = proxy_url - - socks_options = { - 'socks_version': socks_version, - 'proxy_host': parsed.host, - 'proxy_port': parsed.port, - 'username': username, - 'password': password, - 'rdns': rdns - } - connection_pool_kw['_socks_options'] = socks_options - - super(SOCKSProxyManager, self).__init__( - num_pools, headers, **connection_pool_kw - ) - - self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme diff --git a/pype/vendor/urllib3/exceptions.py b/pype/vendor/urllib3/exceptions.py deleted file mode 100644 index 7bbaa9871f..0000000000 --- a/pype/vendor/urllib3/exceptions.py +++ /dev/null @@ -1,246 +0,0 @@ -from __future__ import absolute_import -from .packages.six.moves.http_client import ( - IncompleteRead as httplib_IncompleteRead -) -# Base Exceptions - - -class HTTPError(Exception): - "Base exception used by this module." - pass - - -class HTTPWarning(Warning): - "Base warning used by this module." - pass - - -class PoolError(HTTPError): - "Base exception for errors caused within a pool." - def __init__(self, pool, message): - self.pool = pool - HTTPError.__init__(self, "%s: %s" % (pool, message)) - - def __reduce__(self): - # For pickling purposes. - return self.__class__, (None, None) - - -class RequestError(PoolError): - "Base exception for PoolErrors that have associated URLs." - def __init__(self, pool, url, message): - self.url = url - PoolError.__init__(self, pool, message) - - def __reduce__(self): - # For pickling purposes. - return self.__class__, (None, self.url, None) - - -class SSLError(HTTPError): - "Raised when SSL certificate fails in an HTTPS connection." - pass - - -class ProxyError(HTTPError): - "Raised when the connection to a proxy fails." - pass - - -class DecodeError(HTTPError): - "Raised when automatic decoding based on Content-Type fails." - pass - - -class ProtocolError(HTTPError): - "Raised when something unexpected happens mid-request/response." - pass - - -#: Renamed to ProtocolError but aliased for backwards compatibility. -ConnectionError = ProtocolError - - -# Leaf Exceptions - -class MaxRetryError(RequestError): - """Raised when the maximum number of retries is exceeded. - - :param pool: The connection pool - :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` - :param string url: The requested Url - :param exceptions.Exception reason: The underlying error - - """ - - def __init__(self, pool, url, reason=None): - self.reason = reason - - message = "Max retries exceeded with url: %s (Caused by %r)" % ( - url, reason) - - RequestError.__init__(self, pool, url, message) - - -class HostChangedError(RequestError): - "Raised when an existing pool gets a request for a foreign host." - - def __init__(self, pool, url, retries=3): - message = "Tried to open a foreign host with url: %s" % url - RequestError.__init__(self, pool, url, message) - self.retries = retries - - -class TimeoutStateError(HTTPError): - """ Raised when passing an invalid state to a timeout """ - pass - - -class TimeoutError(HTTPError): - """ Raised when a socket timeout error occurs. - - Catching this error will catch both :exc:`ReadTimeoutErrors - ` and :exc:`ConnectTimeoutErrors `. - """ - pass - - -class ReadTimeoutError(TimeoutError, RequestError): - "Raised when a socket timeout occurs while receiving data from a server" - pass - - -# This timeout error does not have a URL attached and needs to inherit from the -# base HTTPError -class ConnectTimeoutError(TimeoutError): - "Raised when a socket timeout occurs while connecting to a server" - pass - - -class NewConnectionError(ConnectTimeoutError, PoolError): - "Raised when we fail to establish a new connection. Usually ECONNREFUSED." - pass - - -class EmptyPoolError(PoolError): - "Raised when a pool runs out of connections and no more are allowed." - pass - - -class ClosedPoolError(PoolError): - "Raised when a request enters a pool after the pool has been closed." - pass - - -class LocationValueError(ValueError, HTTPError): - "Raised when there is something wrong with a given URL input." - pass - - -class LocationParseError(LocationValueError): - "Raised when get_host or similar fails to parse the URL input." - - def __init__(self, location): - message = "Failed to parse: %s" % location - HTTPError.__init__(self, message) - - self.location = location - - -class ResponseError(HTTPError): - "Used as a container for an error reason supplied in a MaxRetryError." - GENERIC_ERROR = 'too many error responses' - SPECIFIC_ERROR = 'too many {status_code} error responses' - - -class SecurityWarning(HTTPWarning): - "Warned when performing security reducing actions" - pass - - -class SubjectAltNameWarning(SecurityWarning): - "Warned when connecting to a host with a certificate missing a SAN." - pass - - -class InsecureRequestWarning(SecurityWarning): - "Warned when making an unverified HTTPS request." - pass - - -class SystemTimeWarning(SecurityWarning): - "Warned when system time is suspected to be wrong" - pass - - -class InsecurePlatformWarning(SecurityWarning): - "Warned when certain SSL configuration is not available on a platform." - pass - - -class SNIMissingWarning(HTTPWarning): - "Warned when making a HTTPS request without SNI available." - pass - - -class DependencyWarning(HTTPWarning): - """ - Warned when an attempt is made to import a module with missing optional - dependencies. - """ - pass - - -class ResponseNotChunked(ProtocolError, ValueError): - "Response needs to be chunked in order to read it as chunks." - pass - - -class BodyNotHttplibCompatible(HTTPError): - """ - Body should be httplib.HTTPResponse like (have an fp attribute which - returns raw chunks) for read_chunked(). - """ - pass - - -class IncompleteRead(HTTPError, httplib_IncompleteRead): - """ - Response length doesn't match expected Content-Length - - Subclass of http_client.IncompleteRead to allow int value - for `partial` to avoid creating large objects on streamed - reads. - """ - def __init__(self, partial, expected): - super(IncompleteRead, self).__init__(partial, expected) - - def __repr__(self): - return ('IncompleteRead(%i bytes read, ' - '%i more expected)' % (self.partial, self.expected)) - - -class InvalidHeader(HTTPError): - "The header provided was somehow invalid." - pass - - -class ProxySchemeUnknown(AssertionError, ValueError): - "ProxyManager does not support the supplied scheme" - # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. - - def __init__(self, scheme): - message = "Not supported proxy scheme %s" % scheme - super(ProxySchemeUnknown, self).__init__(message) - - -class HeaderParsingError(HTTPError): - "Raised by assert_header_parsing, but we convert it to a log.warning statement." - def __init__(self, defects, unparsed_data): - message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data) - super(HeaderParsingError, self).__init__(message) - - -class UnrewindableBodyError(HTTPError): - "urllib3 encountered an error when trying to rewind a body" - pass diff --git a/pype/vendor/urllib3/fields.py b/pype/vendor/urllib3/fields.py deleted file mode 100644 index 37fe64a3e8..0000000000 --- a/pype/vendor/urllib3/fields.py +++ /dev/null @@ -1,178 +0,0 @@ -from __future__ import absolute_import -import email.utils -import mimetypes - -from .packages import six - - -def guess_content_type(filename, default='application/octet-stream'): - """ - Guess the "Content-Type" of a file. - - :param filename: - The filename to guess the "Content-Type" of using :mod:`mimetypes`. - :param default: - If no "Content-Type" can be guessed, default to `default`. - """ - if filename: - return mimetypes.guess_type(filename)[0] or default - return default - - -def format_header_param(name, value): - """ - Helper function to format and quote a single header parameter. - - Particularly useful for header parameters which might contain - non-ASCII values, like file names. This follows RFC 2231, as - suggested by RFC 2388 Section 4.4. - - :param name: - The name of the parameter, a string expected to be ASCII only. - :param value: - The value of the parameter, provided as a unicode string. - """ - if not any(ch in value for ch in '"\\\r\n'): - result = '%s="%s"' % (name, value) - try: - result.encode('ascii') - except (UnicodeEncodeError, UnicodeDecodeError): - pass - else: - return result - if not six.PY3 and isinstance(value, six.text_type): # Python 2: - value = value.encode('utf-8') - value = email.utils.encode_rfc2231(value, 'utf-8') - value = '%s*=%s' % (name, value) - return value - - -class RequestField(object): - """ - A data container for request body parameters. - - :param name: - The name of this request field. - :param data: - The data/value body. - :param filename: - An optional filename of the request field. - :param headers: - An optional dict-like object of headers to initially use for the field. - """ - def __init__(self, name, data, filename=None, headers=None): - self._name = name - self._filename = filename - self.data = data - self.headers = {} - if headers: - self.headers = dict(headers) - - @classmethod - def from_tuples(cls, fieldname, value): - """ - A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. - - Supports constructing :class:`~urllib3.fields.RequestField` from - parameter of key/value strings AND key/filetuple. A filetuple is a - (filename, data, MIME type) tuple where the MIME type is optional. - For example:: - - 'foo': 'bar', - 'fakefile': ('foofile.txt', 'contents of foofile'), - 'realfile': ('barfile.txt', open('realfile').read()), - 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), - 'nonamefile': 'contents of nonamefile field', - - Field names and filenames must be unicode. - """ - if isinstance(value, tuple): - if len(value) == 3: - filename, data, content_type = value - else: - filename, data = value - content_type = guess_content_type(filename) - else: - filename = None - content_type = None - data = value - - request_param = cls(fieldname, data, filename=filename) - request_param.make_multipart(content_type=content_type) - - return request_param - - def _render_part(self, name, value): - """ - Overridable helper function to format a single header parameter. - - :param name: - The name of the parameter, a string expected to be ASCII only. - :param value: - The value of the parameter, provided as a unicode string. - """ - return format_header_param(name, value) - - def _render_parts(self, header_parts): - """ - Helper function to format and quote a single header. - - Useful for single headers that are composed of multiple items. E.g., - 'Content-Disposition' fields. - - :param header_parts: - A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format - as `k1="v1"; k2="v2"; ...`. - """ - parts = [] - iterable = header_parts - if isinstance(header_parts, dict): - iterable = header_parts.items() - - for name, value in iterable: - if value is not None: - parts.append(self._render_part(name, value)) - - return '; '.join(parts) - - def render_headers(self): - """ - Renders the headers for this request field. - """ - lines = [] - - sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] - for sort_key in sort_keys: - if self.headers.get(sort_key, False): - lines.append('%s: %s' % (sort_key, self.headers[sort_key])) - - for header_name, header_value in self.headers.items(): - if header_name not in sort_keys: - if header_value: - lines.append('%s: %s' % (header_name, header_value)) - - lines.append('\r\n') - return '\r\n'.join(lines) - - def make_multipart(self, content_disposition=None, content_type=None, - content_location=None): - """ - Makes this request field into a multipart request field. - - This method overrides "Content-Disposition", "Content-Type" and - "Content-Location" headers to the request parameter. - - :param content_type: - The 'Content-Type' of the request body. - :param content_location: - The 'Content-Location' of the request body. - - """ - self.headers['Content-Disposition'] = content_disposition or 'form-data' - self.headers['Content-Disposition'] += '; '.join([ - '', self._render_parts( - (('name', self._name), ('filename', self._filename)) - ) - ]) - self.headers['Content-Type'] = content_type - self.headers['Content-Location'] = content_location diff --git a/pype/vendor/urllib3/filepost.py b/pype/vendor/urllib3/filepost.py deleted file mode 100644 index 78f1e19b0e..0000000000 --- a/pype/vendor/urllib3/filepost.py +++ /dev/null @@ -1,98 +0,0 @@ -from __future__ import absolute_import -import binascii -import codecs -import os - -from io import BytesIO - -from .packages import six -from .packages.six import b -from .fields import RequestField - -writer = codecs.lookup('utf-8')[3] - - -def choose_boundary(): - """ - Our embarrassingly-simple replacement for mimetools.choose_boundary. - """ - boundary = binascii.hexlify(os.urandom(16)) - if six.PY3: - boundary = boundary.decode('ascii') - return boundary - - -def iter_field_objects(fields): - """ - Iterate over fields. - - Supports list of (k, v) tuples and dicts, and lists of - :class:`~urllib3.fields.RequestField`. - - """ - if isinstance(fields, dict): - i = six.iteritems(fields) - else: - i = iter(fields) - - for field in i: - if isinstance(field, RequestField): - yield field - else: - yield RequestField.from_tuples(*field) - - -def iter_fields(fields): - """ - .. deprecated:: 1.6 - - Iterate over fields. - - The addition of :class:`~urllib3.fields.RequestField` makes this function - obsolete. Instead, use :func:`iter_field_objects`, which returns - :class:`~urllib3.fields.RequestField` objects. - - Supports list of (k, v) tuples and dicts. - """ - if isinstance(fields, dict): - return ((k, v) for k, v in six.iteritems(fields)) - - return ((k, v) for k, v in fields) - - -def encode_multipart_formdata(fields, boundary=None): - """ - Encode a dictionary of ``fields`` using the multipart/form-data MIME format. - - :param fields: - Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). - - :param boundary: - If not specified, then a random boundary will be generated using - :func:`urllib3.filepost.choose_boundary`. - """ - body = BytesIO() - if boundary is None: - boundary = choose_boundary() - - for field in iter_field_objects(fields): - body.write(b('--%s\r\n' % (boundary))) - - writer(body).write(field.render_headers()) - data = field.data - - if isinstance(data, int): - data = str(data) # Backwards compatibility - - if isinstance(data, six.text_type): - writer(body).write(data) - else: - body.write(data) - - body.write(b'\r\n') - - body.write(b('--%s--\r\n' % (boundary))) - - content_type = str('multipart/form-data; boundary=%s' % boundary) - - return body.getvalue(), content_type diff --git a/pype/vendor/urllib3/packages/__init__.py b/pype/vendor/urllib3/packages/__init__.py deleted file mode 100644 index 170e974c15..0000000000 --- a/pype/vendor/urllib3/packages/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from __future__ import absolute_import - -from . import ssl_match_hostname - -__all__ = ('ssl_match_hostname', ) diff --git a/pype/vendor/urllib3/packages/backports/__init__.py b/pype/vendor/urllib3/packages/backports/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/urllib3/packages/backports/makefile.py b/pype/vendor/urllib3/packages/backports/makefile.py deleted file mode 100644 index 740db377d9..0000000000 --- a/pype/vendor/urllib3/packages/backports/makefile.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -""" -backports.makefile -~~~~~~~~~~~~~~~~~~ - -Backports the Python 3 ``socket.makefile`` method for use with anything that -wants to create a "fake" socket object. -""" -import io - -from socket import SocketIO - - -def backport_makefile(self, mode="r", buffering=None, encoding=None, - errors=None, newline=None): - """ - Backport of ``socket.makefile`` from Python 3.5. - """ - if not set(mode) <= {"r", "w", "b"}: - raise ValueError( - "invalid mode %r (only r, w, b allowed)" % (mode,) - ) - writing = "w" in mode - reading = "r" in mode or not writing - assert reading or writing - binary = "b" in mode - rawmode = "" - if reading: - rawmode += "r" - if writing: - rawmode += "w" - raw = SocketIO(self, rawmode) - self._makefile_refs += 1 - if buffering is None: - buffering = -1 - if buffering < 0: - buffering = io.DEFAULT_BUFFER_SIZE - if buffering == 0: - if not binary: - raise ValueError("unbuffered streams must be binary") - return raw - if reading and writing: - buffer = io.BufferedRWPair(raw, raw, buffering) - elif reading: - buffer = io.BufferedReader(raw, buffering) - else: - assert writing - buffer = io.BufferedWriter(raw, buffering) - if binary: - return buffer - text = io.TextIOWrapper(buffer, encoding, errors, newline) - text.mode = mode - return text diff --git a/pype/vendor/urllib3/packages/six.py b/pype/vendor/urllib3/packages/six.py deleted file mode 100644 index 190c0239cd..0000000000 --- a/pype/vendor/urllib3/packages/six.py +++ /dev/null @@ -1,868 +0,0 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2015 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.10.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb -""") - - -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - if from_value is None: - raise value - raise value from from_value -""") -elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - raise value from from_value -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def python_2_unicode_compatible(klass): - """ - A decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/pype/vendor/urllib3/packages/ssl_match_hostname/__init__.py b/pype/vendor/urllib3/packages/ssl_match_hostname/__init__.py deleted file mode 100644 index d6594eb264..0000000000 --- a/pype/vendor/urllib3/packages/ssl_match_hostname/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -import sys - -try: - # Our match_hostname function is the same as 3.5's, so we only want to - # import the match_hostname function if it's at least that good. - if sys.version_info < (3, 5): - raise ImportError("Fallback to vendored code") - - from ssl import CertificateError, match_hostname -except ImportError: - try: - # Backport of the function from a pypi module - from backports.ssl_match_hostname import CertificateError, match_hostname - except ImportError: - # Our vendored copy - from ._implementation import CertificateError, match_hostname - -# Not needed, but documenting what we provide. -__all__ = ('CertificateError', 'match_hostname') diff --git a/pype/vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/pype/vendor/urllib3/packages/ssl_match_hostname/_implementation.py deleted file mode 100644 index d6e66c0196..0000000000 --- a/pype/vendor/urllib3/packages/ssl_match_hostname/_implementation.py +++ /dev/null @@ -1,156 +0,0 @@ -"""The match_hostname() function from Python 3.3.3, essential when using SSL.""" - -# Note: This file is under the PSF license as the code comes from the python -# stdlib. http://docs.python.org/3/license.html - -import re -import sys - -# ipaddress has been backported to 2.6+ in pypi. If it is installed on the -# system, use it to handle IPAddress ServerAltnames (this was added in -# python-3.5) otherwise only do DNS matching. This allows -# backports.ssl_match_hostname to continue to be used in Python 2.7. -try: - import ipaddress -except ImportError: - ipaddress = None - -__version__ = '3.5.0.1' - - -class CertificateError(ValueError): - pass - - -def _dnsname_match(dn, hostname, max_wildcards=1): - """Matching according to RFC 6125, section 6.4.3 - - http://tools.ietf.org/html/rfc6125#section-6.4.3 - """ - pats = [] - if not dn: - return False - - # Ported from python3-syntax: - # leftmost, *remainder = dn.split(r'.') - parts = dn.split(r'.') - leftmost = parts[0] - remainder = parts[1:] - - wildcards = leftmost.count('*') - if wildcards > max_wildcards: - # Issue #17980: avoid denials of service by refusing more - # than one wildcard per fragment. A survey of established - # policy among SSL implementations showed it to be a - # reasonable choice. - raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) - - # speed up common case w/o wildcards - if not wildcards: - return dn.lower() == hostname.lower() - - # RFC 6125, section 6.4.3, subitem 1. - # The client SHOULD NOT attempt to match a presented identifier in which - # the wildcard character comprises a label other than the left-most label. - if leftmost == '*': - # When '*' is a fragment by itself, it matches a non-empty dotless - # fragment. - pats.append('[^.]+') - elif leftmost.startswith('xn--') or hostname.startswith('xn--'): - # RFC 6125, section 6.4.3, subitem 3. - # The client SHOULD NOT attempt to match a presented identifier - # where the wildcard character is embedded within an A-label or - # U-label of an internationalized domain name. - pats.append(re.escape(leftmost)) - else: - # Otherwise, '*' matches any dotless string, e.g. www* - pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) - - # add the remaining fragments, ignore any wildcards - for frag in remainder: - pats.append(re.escape(frag)) - - pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) - return pat.match(hostname) - - -def _to_unicode(obj): - if isinstance(obj, str) and sys.version_info < (3,): - obj = unicode(obj, encoding='ascii', errors='strict') - return obj - -def _ipaddress_match(ipname, host_ip): - """Exact matching of IP addresses. - - RFC 6125 explicitly doesn't define an algorithm for this - (section 1.7.2 - "Out of Scope"). - """ - # OpenSSL may add a trailing newline to a subjectAltName's IP address - # Divergence from upstream: ipaddress can't handle byte str - ip = ipaddress.ip_address(_to_unicode(ipname).rstrip()) - return ip == host_ip - - -def match_hostname(cert, hostname): - """Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 - rules are followed, but IP addresses are not accepted for *hostname*. - - CertificateError is raised on failure. On success, the function - returns nothing. - """ - if not cert: - raise ValueError("empty or no certificate, match_hostname needs a " - "SSL socket or SSL context with either " - "CERT_OPTIONAL or CERT_REQUIRED") - try: - # Divergence from upstream: ipaddress can't handle byte str - host_ip = ipaddress.ip_address(_to_unicode(hostname)) - except ValueError: - # Not an IP address (common case) - host_ip = None - except UnicodeError: - # Divergence from upstream: Have to deal with ipaddress not taking - # byte strings. addresses should be all ascii, so we consider it not - # an ipaddress in this case - host_ip = None - except AttributeError: - # Divergence from upstream: Make ipaddress library optional - if ipaddress is None: - host_ip = None - else: - raise - dnsnames = [] - san = cert.get('subjectAltName', ()) - for key, value in san: - if key == 'DNS': - if host_ip is None and _dnsname_match(value, hostname): - return - dnsnames.append(value) - elif key == 'IP Address': - if host_ip is not None and _ipaddress_match(value, host_ip): - return - dnsnames.append(value) - if not dnsnames: - # The subject is only checked when there is no dNSName entry - # in subjectAltName - for sub in cert.get('subject', ()): - for key, value in sub: - # XXX according to RFC 2818, the most specific Common Name - # must be used. - if key == 'commonName': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if len(dnsnames) > 1: - raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) - elif len(dnsnames) == 1: - raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) - else: - raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") diff --git a/pype/vendor/urllib3/poolmanager.py b/pype/vendor/urllib3/poolmanager.py deleted file mode 100644 index fe5491cfda..0000000000 --- a/pype/vendor/urllib3/poolmanager.py +++ /dev/null @@ -1,450 +0,0 @@ -from __future__ import absolute_import -import collections -import functools -import logging - -from ._collections import RecentlyUsedContainer -from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool -from .connectionpool import port_by_scheme -from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown -from .packages.six.moves.urllib.parse import urljoin -from .request import RequestMethods -from .util.url import parse_url -from .util.retry import Retry - - -__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] - - -log = logging.getLogger(__name__) - -SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', - 'ssl_version', 'ca_cert_dir', 'ssl_context') - -# All known keyword arguments that could be provided to the pool manager, its -# pools, or the underlying connections. This is used to construct a pool key. -_key_fields = ( - 'key_scheme', # str - 'key_host', # str - 'key_port', # int - 'key_timeout', # int or float or Timeout - 'key_retries', # int or Retry - 'key_strict', # bool - 'key_block', # bool - 'key_source_address', # str - 'key_key_file', # str - 'key_cert_file', # str - 'key_cert_reqs', # str - 'key_ca_certs', # str - 'key_ssl_version', # str - 'key_ca_cert_dir', # str - 'key_ssl_context', # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext - 'key_maxsize', # int - 'key_headers', # dict - 'key__proxy', # parsed proxy url - 'key__proxy_headers', # dict - 'key_socket_options', # list of (level (int), optname (int), value (int or str)) tuples - 'key__socks_options', # dict - 'key_assert_hostname', # bool or string - 'key_assert_fingerprint', # str - 'key_server_hostname', #str -) - -#: The namedtuple class used to construct keys for the connection pool. -#: All custom key schemes should include the fields in this key at a minimum. -PoolKey = collections.namedtuple('PoolKey', _key_fields) - - -def _default_key_normalizer(key_class, request_context): - """ - Create a pool key out of a request context dictionary. - - According to RFC 3986, both the scheme and host are case-insensitive. - Therefore, this function normalizes both before constructing the pool - key for an HTTPS request. If you wish to change this behaviour, provide - alternate callables to ``key_fn_by_scheme``. - - :param key_class: - The class to use when constructing the key. This should be a namedtuple - with the ``scheme`` and ``host`` keys at a minimum. - :type key_class: namedtuple - :param request_context: - A dictionary-like object that contain the context for a request. - :type request_context: dict - - :return: A namedtuple that can be used as a connection pool key. - :rtype: PoolKey - """ - # Since we mutate the dictionary, make a copy first - context = request_context.copy() - context['scheme'] = context['scheme'].lower() - context['host'] = context['host'].lower() - - # These are both dictionaries and need to be transformed into frozensets - for key in ('headers', '_proxy_headers', '_socks_options'): - if key in context and context[key] is not None: - context[key] = frozenset(context[key].items()) - - # The socket_options key may be a list and needs to be transformed into a - # tuple. - socket_opts = context.get('socket_options') - if socket_opts is not None: - context['socket_options'] = tuple(socket_opts) - - # Map the kwargs to the names in the namedtuple - this is necessary since - # namedtuples can't have fields starting with '_'. - for key in list(context.keys()): - context['key_' + key] = context.pop(key) - - # Default to ``None`` for keys missing from the context - for field in key_class._fields: - if field not in context: - context[field] = None - - return key_class(**context) - - -#: A dictionary that maps a scheme to a callable that creates a pool key. -#: This can be used to alter the way pool keys are constructed, if desired. -#: Each PoolManager makes a copy of this dictionary so they can be configured -#: globally here, or individually on the instance. -key_fn_by_scheme = { - 'http': functools.partial(_default_key_normalizer, PoolKey), - 'https': functools.partial(_default_key_normalizer, PoolKey), -} - -pool_classes_by_scheme = { - 'http': HTTPConnectionPool, - 'https': HTTPSConnectionPool, -} - - -class PoolManager(RequestMethods): - """ - Allows for arbitrary requests while transparently keeping track of - necessary connection pools for you. - - :param num_pools: - Number of connection pools to cache before discarding the least - recently used pool. - - :param headers: - Headers to include with all requests, unless other headers are given - explicitly. - - :param \\**connection_pool_kw: - Additional parameters are used to create fresh - :class:`urllib3.connectionpool.ConnectionPool` instances. - - Example:: - - >>> manager = PoolManager(num_pools=2) - >>> r = manager.request('GET', 'http://google.com/') - >>> r = manager.request('GET', 'http://google.com/mail') - >>> r = manager.request('GET', 'http://yahoo.com/') - >>> len(manager.pools) - 2 - - """ - - proxy = None - - def __init__(self, num_pools=10, headers=None, **connection_pool_kw): - RequestMethods.__init__(self, headers) - self.connection_pool_kw = connection_pool_kw - self.pools = RecentlyUsedContainer(num_pools, - dispose_func=lambda p: p.close()) - - # Locally set the pool classes and keys so other PoolManagers can - # override them. - self.pool_classes_by_scheme = pool_classes_by_scheme - self.key_fn_by_scheme = key_fn_by_scheme.copy() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.clear() - # Return False to re-raise any potential exceptions - return False - - def _new_pool(self, scheme, host, port, request_context=None): - """ - Create a new :class:`ConnectionPool` based on host, port, scheme, and - any additional pool keyword arguments. - - If ``request_context`` is provided, it is provided as keyword arguments - to the pool class used. This method is used to actually create the - connection pools handed out by :meth:`connection_from_url` and - companion methods. It is intended to be overridden for customization. - """ - pool_cls = self.pool_classes_by_scheme[scheme] - if request_context is None: - request_context = self.connection_pool_kw.copy() - - # Although the context has everything necessary to create the pool, - # this function has historically only used the scheme, host, and port - # in the positional args. When an API change is acceptable these can - # be removed. - for key in ('scheme', 'host', 'port'): - request_context.pop(key, None) - - if scheme == 'http': - for kw in SSL_KEYWORDS: - request_context.pop(kw, None) - - return pool_cls(host, port, **request_context) - - def clear(self): - """ - Empty our store of pools and direct them all to close. - - This will not affect in-flight connections, but they will not be - re-used after completion. - """ - self.pools.clear() - - def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): - """ - Get a :class:`ConnectionPool` based on the host, port, and scheme. - - If ``port`` isn't given, it will be derived from the ``scheme`` using - ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is - provided, it is merged with the instance's ``connection_pool_kw`` - variable and used to create the new connection pool, if one is - needed. - """ - - if not host: - raise LocationValueError("No host specified.") - - request_context = self._merge_pool_kwargs(pool_kwargs) - request_context['scheme'] = scheme or 'http' - if not port: - port = port_by_scheme.get(request_context['scheme'].lower(), 80) - request_context['port'] = port - request_context['host'] = host - - return self.connection_from_context(request_context) - - def connection_from_context(self, request_context): - """ - Get a :class:`ConnectionPool` based on the request context. - - ``request_context`` must at least contain the ``scheme`` key and its - value must be a key in ``key_fn_by_scheme`` instance variable. - """ - scheme = request_context['scheme'].lower() - pool_key_constructor = self.key_fn_by_scheme[scheme] - pool_key = pool_key_constructor(request_context) - - return self.connection_from_pool_key(pool_key, request_context=request_context) - - def connection_from_pool_key(self, pool_key, request_context=None): - """ - Get a :class:`ConnectionPool` based on the provided pool key. - - ``pool_key`` should be a namedtuple that only contains immutable - objects. At a minimum it must have the ``scheme``, ``host``, and - ``port`` fields. - """ - with self.pools.lock: - # If the scheme, host, or port doesn't match existing open - # connections, open a new ConnectionPool. - pool = self.pools.get(pool_key) - if pool: - return pool - - # Make a fresh ConnectionPool of the desired type - scheme = request_context['scheme'] - host = request_context['host'] - port = request_context['port'] - pool = self._new_pool(scheme, host, port, request_context=request_context) - self.pools[pool_key] = pool - - return pool - - def connection_from_url(self, url, pool_kwargs=None): - """ - Similar to :func:`urllib3.connectionpool.connection_from_url`. - - If ``pool_kwargs`` is not provided and a new pool needs to be - constructed, ``self.connection_pool_kw`` is used to initialize - the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` - is provided, it is used instead. Note that if a new pool does not - need to be created for the request, the provided ``pool_kwargs`` are - not used. - """ - u = parse_url(url) - return self.connection_from_host(u.host, port=u.port, scheme=u.scheme, - pool_kwargs=pool_kwargs) - - def _merge_pool_kwargs(self, override): - """ - Merge a dictionary of override values for self.connection_pool_kw. - - This does not modify self.connection_pool_kw and returns a new dict. - Any keys in the override dictionary with a value of ``None`` are - removed from the merged dictionary. - """ - base_pool_kwargs = self.connection_pool_kw.copy() - if override: - for key, value in override.items(): - if value is None: - try: - del base_pool_kwargs[key] - except KeyError: - pass - else: - base_pool_kwargs[key] = value - return base_pool_kwargs - - def urlopen(self, method, url, redirect=True, **kw): - """ - Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` - with custom cross-host redirect logic and only sends the request-uri - portion of the ``url``. - - The given ``url`` parameter must be absolute, such that an appropriate - :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. - """ - u = parse_url(url) - conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) - - kw['assert_same_host'] = False - kw['redirect'] = False - - if 'headers' not in kw: - kw['headers'] = self.headers.copy() - - if self.proxy is not None and u.scheme == "http": - response = conn.urlopen(method, url, **kw) - else: - response = conn.urlopen(method, u.request_uri, **kw) - - redirect_location = redirect and response.get_redirect_location() - if not redirect_location: - return response - - # Support relative URLs for redirecting. - redirect_location = urljoin(url, redirect_location) - - # RFC 7231, Section 6.4.4 - if response.status == 303: - method = 'GET' - - retries = kw.get('retries') - if not isinstance(retries, Retry): - retries = Retry.from_int(retries, redirect=redirect) - - # Strip headers marked as unsafe to forward to the redirected location. - # Check remove_headers_on_redirect to avoid a potential network call within - # conn.is_same_host() which may use socket.gethostbyname() in the future. - if (retries.remove_headers_on_redirect - and not conn.is_same_host(redirect_location)): - for header in retries.remove_headers_on_redirect: - kw['headers'].pop(header, None) - - try: - retries = retries.increment(method, url, response=response, _pool=conn) - except MaxRetryError: - if retries.raise_on_redirect: - raise - return response - - kw['retries'] = retries - kw['redirect'] = redirect - - log.info("Redirecting %s -> %s", url, redirect_location) - return self.urlopen(method, redirect_location, **kw) - - -class ProxyManager(PoolManager): - """ - Behaves just like :class:`PoolManager`, but sends all requests through - the defined proxy, using the CONNECT method for HTTPS URLs. - - :param proxy_url: - The URL of the proxy to be used. - - :param proxy_headers: - A dictionary containing headers that will be sent to the proxy. In case - of HTTP they are being sent with each request, while in the - HTTPS/CONNECT case they are sent only once. Could be used for proxy - authentication. - - Example: - >>> proxy = urllib3.ProxyManager('http://localhost:3128/') - >>> r1 = proxy.request('GET', 'http://google.com/') - >>> r2 = proxy.request('GET', 'http://httpbin.org/') - >>> len(proxy.pools) - 1 - >>> r3 = proxy.request('GET', 'https://httpbin.org/') - >>> r4 = proxy.request('GET', 'https://twitter.com/') - >>> len(proxy.pools) - 3 - - """ - - def __init__(self, proxy_url, num_pools=10, headers=None, - proxy_headers=None, **connection_pool_kw): - - if isinstance(proxy_url, HTTPConnectionPool): - proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, - proxy_url.port) - proxy = parse_url(proxy_url) - if not proxy.port: - port = port_by_scheme.get(proxy.scheme, 80) - proxy = proxy._replace(port=port) - - if proxy.scheme not in ("http", "https"): - raise ProxySchemeUnknown(proxy.scheme) - - self.proxy = proxy - self.proxy_headers = proxy_headers or {} - - connection_pool_kw['_proxy'] = self.proxy - connection_pool_kw['_proxy_headers'] = self.proxy_headers - - super(ProxyManager, self).__init__( - num_pools, headers, **connection_pool_kw) - - def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): - if scheme == "https": - return super(ProxyManager, self).connection_from_host( - host, port, scheme, pool_kwargs=pool_kwargs) - - return super(ProxyManager, self).connection_from_host( - self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs) - - def _set_proxy_headers(self, url, headers=None): - """ - Sets headers needed by proxies: specifically, the Accept and Host - headers. Only sets headers not provided by the user. - """ - headers_ = {'Accept': '*/*'} - - netloc = parse_url(url).netloc - if netloc: - headers_['Host'] = netloc - - if headers: - headers_.update(headers) - return headers_ - - def urlopen(self, method, url, redirect=True, **kw): - "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." - u = parse_url(url) - - if u.scheme == "http": - # For proxied HTTPS requests, httplib sets the necessary headers - # on the CONNECT to the proxy. For HTTP, we'll definitely - # need to set 'Host' at the very least. - headers = kw.get('headers', self.headers) - kw['headers'] = self._set_proxy_headers(url, headers) - - return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) - - -def proxy_from_url(url, **kw): - return ProxyManager(proxy_url=url, **kw) diff --git a/pype/vendor/urllib3/request.py b/pype/vendor/urllib3/request.py deleted file mode 100644 index 8f2f44bb21..0000000000 --- a/pype/vendor/urllib3/request.py +++ /dev/null @@ -1,150 +0,0 @@ -from __future__ import absolute_import - -from .filepost import encode_multipart_formdata -from .packages.six.moves.urllib.parse import urlencode - - -__all__ = ['RequestMethods'] - - -class RequestMethods(object): - """ - Convenience mixin for classes who implement a :meth:`urlopen` method, such - as :class:`~urllib3.connectionpool.HTTPConnectionPool` and - :class:`~urllib3.poolmanager.PoolManager`. - - Provides behavior for making common types of HTTP request methods and - decides which type of request field encoding to use. - - Specifically, - - :meth:`.request_encode_url` is for sending requests whose fields are - encoded in the URL (such as GET, HEAD, DELETE). - - :meth:`.request_encode_body` is for sending requests whose fields are - encoded in the *body* of the request using multipart or www-form-urlencoded - (such as for POST, PUT, PATCH). - - :meth:`.request` is for making any kind of request, it will look up the - appropriate encoding format and use one of the above two methods to make - the request. - - Initializer parameters: - - :param headers: - Headers to include with all requests, unless other headers are given - explicitly. - """ - - _encode_url_methods = {'DELETE', 'GET', 'HEAD', 'OPTIONS'} - - def __init__(self, headers=None): - self.headers = headers or {} - - def urlopen(self, method, url, body=None, headers=None, - encode_multipart=True, multipart_boundary=None, - **kw): # Abstract - raise NotImplementedError("Classes extending RequestMethods must implement " - "their own ``urlopen`` method.") - - def request(self, method, url, fields=None, headers=None, **urlopen_kw): - """ - Make a request using :meth:`urlopen` with the appropriate encoding of - ``fields`` based on the ``method`` used. - - This is a convenience method that requires the least amount of manual - effort. It can be used in most situations, while still having the - option to drop down to more specific methods when necessary, such as - :meth:`request_encode_url`, :meth:`request_encode_body`, - or even the lowest level :meth:`urlopen`. - """ - method = method.upper() - - urlopen_kw['request_url'] = url - - if method in self._encode_url_methods: - return self.request_encode_url(method, url, fields=fields, - headers=headers, - **urlopen_kw) - else: - return self.request_encode_body(method, url, fields=fields, - headers=headers, - **urlopen_kw) - - def request_encode_url(self, method, url, fields=None, headers=None, - **urlopen_kw): - """ - Make a request using :meth:`urlopen` with the ``fields`` encoded in - the url. This is useful for request methods like GET, HEAD, DELETE, etc. - """ - if headers is None: - headers = self.headers - - extra_kw = {'headers': headers} - extra_kw.update(urlopen_kw) - - if fields: - url += '?' + urlencode(fields) - - return self.urlopen(method, url, **extra_kw) - - def request_encode_body(self, method, url, fields=None, headers=None, - encode_multipart=True, multipart_boundary=None, - **urlopen_kw): - """ - Make a request using :meth:`urlopen` with the ``fields`` encoded in - the body. This is useful for request methods like POST, PUT, PATCH, etc. - - When ``encode_multipart=True`` (default), then - :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode - the payload with the appropriate content type. Otherwise - :meth:`urllib.urlencode` is used with the - 'application/x-www-form-urlencoded' content type. - - Multipart encoding must be used when posting files, and it's reasonably - safe to use it in other times too. However, it may break request - signing, such as with OAuth. - - Supports an optional ``fields`` parameter of key/value strings AND - key/filetuple. A filetuple is a (filename, data, MIME type) tuple where - the MIME type is optional. For example:: - - fields = { - 'foo': 'bar', - 'fakefile': ('foofile.txt', 'contents of foofile'), - 'realfile': ('barfile.txt', open('realfile').read()), - 'typedfile': ('bazfile.bin', open('bazfile').read(), - 'image/jpeg'), - 'nonamefile': 'contents of nonamefile field', - } - - When uploading a file, providing a filename (the first parameter of the - tuple) is optional but recommended to best mimic behavior of browsers. - - Note that if ``headers`` are supplied, the 'Content-Type' header will - be overwritten because it depends on the dynamic random boundary string - which is used to compose the body of the request. The random boundary - string can be explicitly set with the ``multipart_boundary`` parameter. - """ - if headers is None: - headers = self.headers - - extra_kw = {'headers': {}} - - if fields: - if 'body' in urlopen_kw: - raise TypeError( - "request got values for both 'fields' and 'body', can only specify one.") - - if encode_multipart: - body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary) - else: - body, content_type = urlencode(fields), 'application/x-www-form-urlencoded' - - extra_kw['body'] = body - extra_kw['headers'] = {'Content-Type': content_type} - - extra_kw['headers'].update(headers) - extra_kw.update(urlopen_kw) - - return self.urlopen(method, url, **extra_kw) diff --git a/pype/vendor/urllib3/response.py b/pype/vendor/urllib3/response.py deleted file mode 100644 index c112690b0a..0000000000 --- a/pype/vendor/urllib3/response.py +++ /dev/null @@ -1,705 +0,0 @@ -from __future__ import absolute_import -from contextlib import contextmanager -import zlib -import io -import logging -from socket import timeout as SocketTimeout -from socket import error as SocketError - -from ._collections import HTTPHeaderDict -from .exceptions import ( - BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, - ResponseNotChunked, IncompleteRead, InvalidHeader -) -from .packages.six import string_types as basestring, PY3 -from .packages.six.moves import http_client as httplib -from .connection import HTTPException, BaseSSLError -from .util.response import is_fp_closed, is_response_to_head - -log = logging.getLogger(__name__) - - -class DeflateDecoder(object): - - def __init__(self): - self._first_try = True - self._data = b'' - self._obj = zlib.decompressobj() - - def __getattr__(self, name): - return getattr(self._obj, name) - - def decompress(self, data): - if not data: - return data - - if not self._first_try: - return self._obj.decompress(data) - - self._data += data - try: - decompressed = self._obj.decompress(data) - if decompressed: - self._first_try = False - self._data = None - return decompressed - except zlib.error: - self._first_try = False - self._obj = zlib.decompressobj(-zlib.MAX_WBITS) - try: - return self.decompress(self._data) - finally: - self._data = None - - -class GzipDecoderState(object): - - FIRST_MEMBER = 0 - OTHER_MEMBERS = 1 - SWALLOW_DATA = 2 - - -class GzipDecoder(object): - - def __init__(self): - self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) - self._state = GzipDecoderState.FIRST_MEMBER - - def __getattr__(self, name): - return getattr(self._obj, name) - - def decompress(self, data): - ret = bytearray() - if self._state == GzipDecoderState.SWALLOW_DATA or not data: - return bytes(ret) - while True: - try: - ret += self._obj.decompress(data) - except zlib.error: - previous_state = self._state - # Ignore data after the first error - self._state = GzipDecoderState.SWALLOW_DATA - if previous_state == GzipDecoderState.OTHER_MEMBERS: - # Allow trailing garbage acceptable in other gzip clients - return bytes(ret) - raise - data = self._obj.unused_data - if not data: - return bytes(ret) - self._state = GzipDecoderState.OTHER_MEMBERS - self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) - - -class MultiDecoder(object): - """ - From RFC7231: - If one or more encodings have been applied to a representation, the - sender that applied the encodings MUST generate a Content-Encoding - header field that lists the content codings in the order in which - they were applied. - """ - - def __init__(self, modes): - self._decoders = [_get_decoder(m.strip()) for m in modes.split(',')] - - def flush(self): - return self._decoders[0].flush() - - def decompress(self, data): - for d in reversed(self._decoders): - data = d.decompress(data) - return data - - -def _get_decoder(mode): - if ',' in mode: - return MultiDecoder(mode) - - if mode == 'gzip': - return GzipDecoder() - - return DeflateDecoder() - - -class HTTPResponse(io.IOBase): - """ - HTTP Response container. - - Backwards-compatible to httplib's HTTPResponse but the response ``body`` is - loaded and decoded on-demand when the ``data`` property is accessed. This - class is also compatible with the Python standard library's :mod:`io` - module, and can hence be treated as a readable object in the context of that - framework. - - Extra parameters for behaviour not present in httplib.HTTPResponse: - - :param preload_content: - If True, the response's body will be preloaded during construction. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - - :param original_response: - When this HTTPResponse wrapper is generated from an httplib.HTTPResponse - object, it's convenient to include the original for debug purposes. It's - otherwise unused. - - :param retries: - The retries contains the last :class:`~urllib3.util.retry.Retry` that - was used during the request. - - :param enforce_content_length: - Enforce content length checking. Body returned by server must match - value of Content-Length header, if present. Otherwise, raise error. - """ - - CONTENT_DECODERS = ['gzip', 'deflate'] - REDIRECT_STATUSES = [301, 302, 303, 307, 308] - - def __init__(self, body='', headers=None, status=0, version=0, reason=None, - strict=0, preload_content=True, decode_content=True, - original_response=None, pool=None, connection=None, msg=None, - retries=None, enforce_content_length=False, - request_method=None, request_url=None): - - if isinstance(headers, HTTPHeaderDict): - self.headers = headers - else: - self.headers = HTTPHeaderDict(headers) - self.status = status - self.version = version - self.reason = reason - self.strict = strict - self.decode_content = decode_content - self.retries = retries - self.enforce_content_length = enforce_content_length - - self._decoder = None - self._body = None - self._fp = None - self._original_response = original_response - self._fp_bytes_read = 0 - self.msg = msg - self._request_url = request_url - - if body and isinstance(body, (basestring, bytes)): - self._body = body - - self._pool = pool - self._connection = connection - - if hasattr(body, 'read'): - self._fp = body - - # Are we using the chunked-style of transfer encoding? - self.chunked = False - self.chunk_left = None - tr_enc = self.headers.get('transfer-encoding', '').lower() - # Don't incur the penalty of creating a list and then discarding it - encodings = (enc.strip() for enc in tr_enc.split(",")) - if "chunked" in encodings: - self.chunked = True - - # Determine length of response - self.length_remaining = self._init_length(request_method) - - # If requested, preload the body. - if preload_content and not self._body: - self._body = self.read(decode_content=decode_content) - - def get_redirect_location(self): - """ - Should we redirect and where to? - - :returns: Truthy redirect location string if we got a redirect status - code and valid location. ``None`` if redirect status and no - location. ``False`` if not a redirect status code. - """ - if self.status in self.REDIRECT_STATUSES: - return self.headers.get('location') - - return False - - def release_conn(self): - if not self._pool or not self._connection: - return - - self._pool._put_conn(self._connection) - self._connection = None - - @property - def data(self): - # For backwords-compat with earlier urllib3 0.4 and earlier. - if self._body: - return self._body - - if self._fp: - return self.read(cache_content=True) - - @property - def connection(self): - return self._connection - - def isclosed(self): - return is_fp_closed(self._fp) - - def tell(self): - """ - Obtain the number of bytes pulled over the wire so far. May differ from - the amount of content returned by :meth:``HTTPResponse.read`` if bytes - are encoded on the wire (e.g, compressed). - """ - return self._fp_bytes_read - - def _init_length(self, request_method): - """ - Set initial length value for Response content if available. - """ - length = self.headers.get('content-length') - - if length is not None: - if self.chunked: - # This Response will fail with an IncompleteRead if it can't be - # received as chunked. This method falls back to attempt reading - # the response before raising an exception. - log.warning("Received response with both Content-Length and " - "Transfer-Encoding set. This is expressly forbidden " - "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " - "attempting to process response as Transfer-Encoding: " - "chunked.") - return None - - try: - # RFC 7230 section 3.3.2 specifies multiple content lengths can - # be sent in a single Content-Length header - # (e.g. Content-Length: 42, 42). This line ensures the values - # are all valid ints and that as long as the `set` length is 1, - # all values are the same. Otherwise, the header is invalid. - lengths = set([int(val) for val in length.split(',')]) - if len(lengths) > 1: - raise InvalidHeader("Content-Length contained multiple " - "unmatching values (%s)" % length) - length = lengths.pop() - except ValueError: - length = None - else: - if length < 0: - length = None - - # Convert status to int for comparison - # In some cases, httplib returns a status of "_UNKNOWN" - try: - status = int(self.status) - except ValueError: - status = 0 - - # Check for responses that shouldn't include a body - if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD': - length = 0 - - return length - - def _init_decoder(self): - """ - Set-up the _decoder attribute if necessary. - """ - # Note: content-encoding value should be case-insensitive, per RFC 7230 - # Section 3.2 - content_encoding = self.headers.get('content-encoding', '').lower() - if self._decoder is None: - if content_encoding in self.CONTENT_DECODERS: - self._decoder = _get_decoder(content_encoding) - elif ',' in content_encoding: - encodings = [e.strip() for e in content_encoding.split(',') if e.strip() in self.CONTENT_DECODERS] - if len(encodings): - self._decoder = _get_decoder(content_encoding) - - def _decode(self, data, decode_content, flush_decoder): - """ - Decode the data passed in and potentially flush the decoder. - """ - try: - if decode_content and self._decoder: - data = self._decoder.decompress(data) - except (IOError, zlib.error) as e: - content_encoding = self.headers.get('content-encoding', '').lower() - raise DecodeError( - "Received response with content-encoding: %s, but " - "failed to decode it." % content_encoding, e) - - if flush_decoder and decode_content: - data += self._flush_decoder() - - return data - - def _flush_decoder(self): - """ - Flushes the decoder. Should only be called if the decoder is actually - being used. - """ - if self._decoder: - buf = self._decoder.decompress(b'') - return buf + self._decoder.flush() - - return b'' - - @contextmanager - def _error_catcher(self): - """ - Catch low-level python exceptions, instead re-raising urllib3 - variants, so that low-level exceptions are not leaked in the - high-level api. - - On exit, release the connection back to the pool. - """ - clean_exit = False - - try: - try: - yield - - except SocketTimeout: - # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but - # there is yet no clean way to get at it from this context. - raise ReadTimeoutError(self._pool, None, 'Read timed out.') - - except BaseSSLError as e: - # FIXME: Is there a better way to differentiate between SSLErrors? - if 'read operation timed out' not in str(e): # Defensive: - # This shouldn't happen but just in case we're missing an edge - # case, let's avoid swallowing SSL errors. - raise - - raise ReadTimeoutError(self._pool, None, 'Read timed out.') - - except (HTTPException, SocketError) as e: - # This includes IncompleteRead. - raise ProtocolError('Connection broken: %r' % e, e) - - # If no exception is thrown, we should avoid cleaning up - # unnecessarily. - clean_exit = True - finally: - # If we didn't terminate cleanly, we need to throw away our - # connection. - if not clean_exit: - # The response may not be closed but we're not going to use it - # anymore so close it now to ensure that the connection is - # released back to the pool. - if self._original_response: - self._original_response.close() - - # Closing the response may not actually be sufficient to close - # everything, so if we have a hold of the connection close that - # too. - if self._connection: - self._connection.close() - - # If we hold the original response but it's closed now, we should - # return the connection back to the pool. - if self._original_response and self._original_response.isclosed(): - self.release_conn() - - def read(self, amt=None, decode_content=None, cache_content=False): - """ - Similar to :meth:`httplib.HTTPResponse.read`, but with two additional - parameters: ``decode_content`` and ``cache_content``. - - :param amt: - How much of the content to read. If specified, caching is skipped - because it doesn't make sense to cache partial content as the full - response. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - - :param cache_content: - If True, will save the returned data such that the same result is - returned despite of the state of the underlying file object. This - is useful if you want the ``.data`` property to continue working - after having ``.read()`` the file object. (Overridden if ``amt`` is - set.) - """ - self._init_decoder() - if decode_content is None: - decode_content = self.decode_content - - if self._fp is None: - return - - flush_decoder = False - data = None - - with self._error_catcher(): - if amt is None: - # cStringIO doesn't like amt=None - data = self._fp.read() - flush_decoder = True - else: - cache_content = False - data = self._fp.read(amt) - if amt != 0 and not data: # Platform-specific: Buggy versions of Python. - # Close the connection when no data is returned - # - # This is redundant to what httplib/http.client _should_ - # already do. However, versions of python released before - # December 15, 2012 (http://bugs.python.org/issue16298) do - # not properly close the connection in all cases. There is - # no harm in redundantly calling close. - self._fp.close() - flush_decoder = True - if self.enforce_content_length and self.length_remaining not in (0, None): - # This is an edge case that httplib failed to cover due - # to concerns of backward compatibility. We're - # addressing it here to make sure IncompleteRead is - # raised during streaming, so all calls with incorrect - # Content-Length are caught. - raise IncompleteRead(self._fp_bytes_read, self.length_remaining) - - if data: - self._fp_bytes_read += len(data) - if self.length_remaining is not None: - self.length_remaining -= len(data) - - data = self._decode(data, decode_content, flush_decoder) - - if cache_content: - self._body = data - - return data - - def stream(self, amt=2**16, decode_content=None): - """ - A generator wrapper for the read() method. A call will block until - ``amt`` bytes have been read from the connection or until the - connection is closed. - - :param amt: - How much of the content to read. The generator will return up to - much data per iteration, but may return less. This is particularly - likely when using compressed data. However, the empty string will - never be returned. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - """ - if self.chunked and self.supports_chunked_reads(): - for line in self.read_chunked(amt, decode_content=decode_content): - yield line - else: - while not is_fp_closed(self._fp): - data = self.read(amt=amt, decode_content=decode_content) - - if data: - yield data - - @classmethod - def from_httplib(ResponseCls, r, **response_kw): - """ - Given an :class:`httplib.HTTPResponse` instance ``r``, return a - corresponding :class:`urllib3.response.HTTPResponse` object. - - Remaining parameters are passed to the HTTPResponse constructor, along - with ``original_response=r``. - """ - headers = r.msg - - if not isinstance(headers, HTTPHeaderDict): - if PY3: # Python 3 - headers = HTTPHeaderDict(headers.items()) - else: # Python 2 - headers = HTTPHeaderDict.from_httplib(headers) - - # HTTPResponse objects in Python 3 don't have a .strict attribute - strict = getattr(r, 'strict', 0) - resp = ResponseCls(body=r, - headers=headers, - status=r.status, - version=r.version, - reason=r.reason, - strict=strict, - original_response=r, - **response_kw) - return resp - - # Backwards-compatibility methods for httplib.HTTPResponse - def getheaders(self): - return self.headers - - def getheader(self, name, default=None): - return self.headers.get(name, default) - - # Backwards compatibility for http.cookiejar - def info(self): - return self.headers - - # Overrides from io.IOBase - def close(self): - if not self.closed: - self._fp.close() - - if self._connection: - self._connection.close() - - @property - def closed(self): - if self._fp is None: - return True - elif hasattr(self._fp, 'isclosed'): - return self._fp.isclosed() - elif hasattr(self._fp, 'closed'): - return self._fp.closed - else: - return True - - def fileno(self): - if self._fp is None: - raise IOError("HTTPResponse has no file to get a fileno from") - elif hasattr(self._fp, "fileno"): - return self._fp.fileno() - else: - raise IOError("The file-like object this HTTPResponse is wrapped " - "around has no file descriptor") - - def flush(self): - if self._fp is not None and hasattr(self._fp, 'flush'): - return self._fp.flush() - - def readable(self): - # This method is required for `io` module compatibility. - return True - - def readinto(self, b): - # This method is required for `io` module compatibility. - temp = self.read(len(b)) - if len(temp) == 0: - return 0 - else: - b[:len(temp)] = temp - return len(temp) - - def supports_chunked_reads(self): - """ - Checks if the underlying file-like object looks like a - httplib.HTTPResponse object. We do this by testing for the fp - attribute. If it is present we assume it returns raw chunks as - processed by read_chunked(). - """ - return hasattr(self._fp, 'fp') - - def _update_chunk_length(self): - # First, we'll figure out length of a chunk and then - # we'll try to read it from socket. - if self.chunk_left is not None: - return - line = self._fp.fp.readline() - line = line.split(b';', 1)[0] - try: - self.chunk_left = int(line, 16) - except ValueError: - # Invalid chunked protocol response, abort. - self.close() - raise httplib.IncompleteRead(line) - - def _handle_chunk(self, amt): - returned_chunk = None - if amt is None: - chunk = self._fp._safe_read(self.chunk_left) - returned_chunk = chunk - self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. - self.chunk_left = None - elif amt < self.chunk_left: - value = self._fp._safe_read(amt) - self.chunk_left = self.chunk_left - amt - returned_chunk = value - elif amt == self.chunk_left: - value = self._fp._safe_read(amt) - self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. - self.chunk_left = None - returned_chunk = value - else: # amt > self.chunk_left - returned_chunk = self._fp._safe_read(self.chunk_left) - self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. - self.chunk_left = None - return returned_chunk - - def read_chunked(self, amt=None, decode_content=None): - """ - Similar to :meth:`HTTPResponse.read`, but with an additional - parameter: ``decode_content``. - - :param amt: - How much of the content to read. If specified, caching is skipped - because it doesn't make sense to cache partial content as the full - response. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - """ - self._init_decoder() - # FIXME: Rewrite this method and make it a class with a better structured logic. - if not self.chunked: - raise ResponseNotChunked( - "Response is not chunked. " - "Header 'transfer-encoding: chunked' is missing.") - if not self.supports_chunked_reads(): - raise BodyNotHttplibCompatible( - "Body should be httplib.HTTPResponse like. " - "It should have have an fp attribute which returns raw chunks.") - - with self._error_catcher(): - # Don't bother reading the body of a HEAD request. - if self._original_response and is_response_to_head(self._original_response): - self._original_response.close() - return - - # If a response is already read and closed - # then return immediately. - if self._fp.fp is None: - return - - while True: - self._update_chunk_length() - if self.chunk_left == 0: - break - chunk = self._handle_chunk(amt) - decoded = self._decode(chunk, decode_content=decode_content, - flush_decoder=False) - if decoded: - yield decoded - - if decode_content: - # On CPython and PyPy, we should never need to flush the - # decoder. However, on Jython we *might* need to, so - # lets defensively do it anyway. - decoded = self._flush_decoder() - if decoded: # Platform-specific: Jython. - yield decoded - - # Chunk content ends with \r\n: discard it. - while True: - line = self._fp.fp.readline() - if not line: - # Some sites may not end with '\r\n'. - break - if line == b'\r\n': - break - - # We read everything; close the "file". - if self._original_response: - self._original_response.close() - - def geturl(self): - """ - Returns the URL that was the source of this response. - If the request that generated this response redirected, this method - will return the final redirect location. - """ - if self.retries is not None and len(self.retries.history): - return self.retries.history[-1].redirect_location - else: - return self._request_url diff --git a/pype/vendor/urllib3/util/__init__.py b/pype/vendor/urllib3/util/__init__.py deleted file mode 100644 index 2f2770b622..0000000000 --- a/pype/vendor/urllib3/util/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -from __future__ import absolute_import -# For backwards compatibility, provide imports that used to be here. -from .connection import is_connection_dropped -from .request import make_headers -from .response import is_fp_closed -from .ssl_ import ( - SSLContext, - HAS_SNI, - IS_PYOPENSSL, - IS_SECURETRANSPORT, - assert_fingerprint, - resolve_cert_reqs, - resolve_ssl_version, - ssl_wrap_socket, -) -from .timeout import ( - current_time, - Timeout, -) - -from .retry import Retry -from .url import ( - get_host, - parse_url, - split_first, - Url, -) -from .wait import ( - wait_for_read, - wait_for_write -) - -__all__ = ( - 'HAS_SNI', - 'IS_PYOPENSSL', - 'IS_SECURETRANSPORT', - 'SSLContext', - 'Retry', - 'Timeout', - 'Url', - 'assert_fingerprint', - 'current_time', - 'is_connection_dropped', - 'is_fp_closed', - 'get_host', - 'parse_url', - 'make_headers', - 'resolve_cert_reqs', - 'resolve_ssl_version', - 'split_first', - 'ssl_wrap_socket', - 'wait_for_read', - 'wait_for_write' -) diff --git a/pype/vendor/urllib3/util/connection.py b/pype/vendor/urllib3/util/connection.py deleted file mode 100644 index 5ad70b2f1c..0000000000 --- a/pype/vendor/urllib3/util/connection.py +++ /dev/null @@ -1,134 +0,0 @@ -from __future__ import absolute_import -import socket -from .wait import NoWayToWaitForSocketError, wait_for_read -from ..contrib import _appengine_environ - - -def is_connection_dropped(conn): # Platform-specific - """ - Returns True if the connection is dropped and should be closed. - - :param conn: - :class:`httplib.HTTPConnection` object. - - Note: For platforms like AppEngine, this will always return ``False`` to - let the platform handle connection recycling transparently for us. - """ - sock = getattr(conn, 'sock', False) - if sock is False: # Platform-specific: AppEngine - return False - if sock is None: # Connection already closed (such as by httplib). - return True - try: - # Returns True if readable, which here means it's been dropped - return wait_for_read(sock, timeout=0.0) - except NoWayToWaitForSocketError: # Platform-specific: AppEngine - return False - - -# This function is copied from socket.py in the Python 2.7 standard -# library test suite. Added to its signature is only `socket_options`. -# One additional modification is that we avoid binding to IPv6 servers -# discovered in DNS if the system doesn't have IPv6 functionality. -def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None, socket_options=None): - """Connect to *address* and return the socket object. - - Convenience function. Connect to *address* (a 2-tuple ``(host, - port)``) and return the socket object. Passing the optional - *timeout* parameter will set the timeout on the socket instance - before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` - is used. If *source_address* is set it must be a tuple of (host, port) - for the socket to bind as a source address before making the connection. - An host of '' or port 0 tells the OS to use the default. - """ - - host, port = address - if host.startswith('['): - host = host.strip('[]') - err = None - - # Using the value from allowed_gai_family() in the context of getaddrinfo lets - # us select whether to work with IPv4 DNS records, IPv6 records, or both. - # The original create_connection function always returns all records. - family = allowed_gai_family() - - for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): - af, socktype, proto, canonname, sa = res - sock = None - try: - sock = socket.socket(af, socktype, proto) - - # If provided, set socket level options before connecting. - _set_socket_options(sock, socket_options) - - if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: - sock.settimeout(timeout) - if source_address: - sock.bind(source_address) - sock.connect(sa) - return sock - - except socket.error as e: - err = e - if sock is not None: - sock.close() - sock = None - - if err is not None: - raise err - - raise socket.error("getaddrinfo returns an empty list") - - -def _set_socket_options(sock, options): - if options is None: - return - - for opt in options: - sock.setsockopt(*opt) - - -def allowed_gai_family(): - """This function is designed to work in the context of - getaddrinfo, where family=socket.AF_UNSPEC is the default and - will perform a DNS search for both IPv6 and IPv4 records.""" - - family = socket.AF_INET - if HAS_IPV6: - family = socket.AF_UNSPEC - return family - - -def _has_ipv6(host): - """ Returns True if the system can bind an IPv6 address. """ - sock = None - has_ipv6 = False - - # App Engine doesn't support IPV6 sockets and actually has a quota on the - # number of sockets that can be used, so just early out here instead of - # creating a socket needlessly. - # See https://github.com/urllib3/urllib3/issues/1446 - if _appengine_environ.is_appengine_sandbox(): - return False - - if socket.has_ipv6: - # has_ipv6 returns true if cPython was compiled with IPv6 support. - # It does not tell us if the system has IPv6 support enabled. To - # determine that we must bind to an IPv6 address. - # https://github.com/shazow/urllib3/pull/611 - # https://bugs.python.org/issue658327 - try: - sock = socket.socket(socket.AF_INET6) - sock.bind((host, 0)) - has_ipv6 = True - except Exception: - pass - - if sock: - sock.close() - return has_ipv6 - - -HAS_IPV6 = _has_ipv6('::1') diff --git a/pype/vendor/urllib3/util/queue.py b/pype/vendor/urllib3/util/queue.py deleted file mode 100644 index d3d379a199..0000000000 --- a/pype/vendor/urllib3/util/queue.py +++ /dev/null @@ -1,21 +0,0 @@ -import collections -from ..packages import six -from ..packages.six.moves import queue - -if six.PY2: - # Queue is imported for side effects on MS Windows. See issue #229. - import Queue as _unused_module_Queue # noqa: F401 - - -class LifoQueue(queue.Queue): - def _init(self, _): - self.queue = collections.deque() - - def _qsize(self, len=len): - return len(self.queue) - - def _put(self, item): - self.queue.append(item) - - def _get(self): - return self.queue.pop() diff --git a/pype/vendor/urllib3/util/request.py b/pype/vendor/urllib3/util/request.py deleted file mode 100644 index 3ddfcd5594..0000000000 --- a/pype/vendor/urllib3/util/request.py +++ /dev/null @@ -1,118 +0,0 @@ -from __future__ import absolute_import -from base64 import b64encode - -from ..packages.six import b, integer_types -from ..exceptions import UnrewindableBodyError - -ACCEPT_ENCODING = 'gzip,deflate' -_FAILEDTELL = object() - - -def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, - basic_auth=None, proxy_basic_auth=None, disable_cache=None): - """ - Shortcuts for generating request headers. - - :param keep_alive: - If ``True``, adds 'connection: keep-alive' header. - - :param accept_encoding: - Can be a boolean, list, or string. - ``True`` translates to 'gzip,deflate'. - List will get joined by comma. - String will be used as provided. - - :param user_agent: - String representing the user-agent you want, such as - "python-urllib3/0.6" - - :param basic_auth: - Colon-separated username:password string for 'authorization: basic ...' - auth header. - - :param proxy_basic_auth: - Colon-separated username:password string for 'proxy-authorization: basic ...' - auth header. - - :param disable_cache: - If ``True``, adds 'cache-control: no-cache' header. - - Example:: - - >>> make_headers(keep_alive=True, user_agent="Batman/1.0") - {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} - >>> make_headers(accept_encoding=True) - {'accept-encoding': 'gzip,deflate'} - """ - headers = {} - if accept_encoding: - if isinstance(accept_encoding, str): - pass - elif isinstance(accept_encoding, list): - accept_encoding = ','.join(accept_encoding) - else: - accept_encoding = ACCEPT_ENCODING - headers['accept-encoding'] = accept_encoding - - if user_agent: - headers['user-agent'] = user_agent - - if keep_alive: - headers['connection'] = 'keep-alive' - - if basic_auth: - headers['authorization'] = 'Basic ' + \ - b64encode(b(basic_auth)).decode('utf-8') - - if proxy_basic_auth: - headers['proxy-authorization'] = 'Basic ' + \ - b64encode(b(proxy_basic_auth)).decode('utf-8') - - if disable_cache: - headers['cache-control'] = 'no-cache' - - return headers - - -def set_file_position(body, pos): - """ - If a position is provided, move file to that point. - Otherwise, we'll attempt to record a position for future use. - """ - if pos is not None: - rewind_body(body, pos) - elif getattr(body, 'tell', None) is not None: - try: - pos = body.tell() - except (IOError, OSError): - # This differentiates from None, allowing us to catch - # a failed `tell()` later when trying to rewind the body. - pos = _FAILEDTELL - - return pos - - -def rewind_body(body, body_pos): - """ - Attempt to rewind body to a certain position. - Primarily used for request redirects and retries. - - :param body: - File-like object that supports seek. - - :param int pos: - Position to seek to in file. - """ - body_seek = getattr(body, 'seek', None) - if body_seek is not None and isinstance(body_pos, integer_types): - try: - body_seek(body_pos) - except (IOError, OSError): - raise UnrewindableBodyError("An error occurred when rewinding request " - "body for redirect/retry.") - elif body_pos is _FAILEDTELL: - raise UnrewindableBodyError("Unable to record file position for rewinding " - "request body during a redirect/retry.") - else: - raise ValueError("body_pos must be of type integer, " - "instead it was %s." % type(body_pos)) diff --git a/pype/vendor/urllib3/util/response.py b/pype/vendor/urllib3/util/response.py deleted file mode 100644 index 3d5486485a..0000000000 --- a/pype/vendor/urllib3/util/response.py +++ /dev/null @@ -1,87 +0,0 @@ -from __future__ import absolute_import -from ..packages.six.moves import http_client as httplib - -from ..exceptions import HeaderParsingError - - -def is_fp_closed(obj): - """ - Checks whether a given file-like object is closed. - - :param obj: - The file-like object to check. - """ - - try: - # Check `isclosed()` first, in case Python3 doesn't set `closed`. - # GH Issue #928 - return obj.isclosed() - except AttributeError: - pass - - try: - # Check via the official file-like-object way. - return obj.closed - except AttributeError: - pass - - try: - # Check if the object is a container for another file-like object that - # gets released on exhaustion (e.g. HTTPResponse). - return obj.fp is None - except AttributeError: - pass - - raise ValueError("Unable to determine whether fp is closed.") - - -def assert_header_parsing(headers): - """ - Asserts whether all headers have been successfully parsed. - Extracts encountered errors from the result of parsing headers. - - Only works on Python 3. - - :param headers: Headers to verify. - :type headers: `httplib.HTTPMessage`. - - :raises urllib3.exceptions.HeaderParsingError: - If parsing errors are found. - """ - - # This will fail silently if we pass in the wrong kind of parameter. - # To make debugging easier add an explicit check. - if not isinstance(headers, httplib.HTTPMessage): - raise TypeError('expected httplib.Message, got {0}.'.format( - type(headers))) - - defects = getattr(headers, 'defects', None) - get_payload = getattr(headers, 'get_payload', None) - - unparsed_data = None - if get_payload: - # get_payload is actually email.message.Message.get_payload; - # we're only interested in the result if it's not a multipart message - if not headers.is_multipart(): - payload = get_payload() - - if isinstance(payload, (bytes, str)): - unparsed_data = payload - - if defects or unparsed_data: - raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) - - -def is_response_to_head(response): - """ - Checks whether the request of a response has been a HEAD-request. - Handles the quirks of AppEngine. - - :param conn: - :type conn: :class:`httplib.HTTPResponse` - """ - # FIXME: Can we do this somehow without accessing private httplib _method? - method = response._method - if isinstance(method, int): # Platform-specific: Appengine - return method == 3 - return method.upper() == 'HEAD' diff --git a/pype/vendor/urllib3/util/retry.py b/pype/vendor/urllib3/util/retry.py deleted file mode 100644 index e7d0abd610..0000000000 --- a/pype/vendor/urllib3/util/retry.py +++ /dev/null @@ -1,411 +0,0 @@ -from __future__ import absolute_import -import time -import logging -from collections import namedtuple -from itertools import takewhile -import email -import re - -from ..exceptions import ( - ConnectTimeoutError, - MaxRetryError, - ProtocolError, - ReadTimeoutError, - ResponseError, - InvalidHeader, -) -from ..packages import six - - -log = logging.getLogger(__name__) - - -# Data structure for representing the metadata of requests that result in a retry. -RequestHistory = namedtuple('RequestHistory', ["method", "url", "error", - "status", "redirect_location"]) - - -class Retry(object): - """ Retry configuration. - - Each retry attempt will create a new Retry object with updated values, so - they can be safely reused. - - Retries can be defined as a default for a pool:: - - retries = Retry(connect=5, read=2, redirect=5) - http = PoolManager(retries=retries) - response = http.request('GET', 'http://example.com/') - - Or per-request (which overrides the default for the pool):: - - response = http.request('GET', 'http://example.com/', retries=Retry(10)) - - Retries can be disabled by passing ``False``:: - - response = http.request('GET', 'http://example.com/', retries=False) - - Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless - retries are disabled, in which case the causing exception will be raised. - - :param int total: - Total number of retries to allow. Takes precedence over other counts. - - Set to ``None`` to remove this constraint and fall back on other - counts. It's a good idea to set this to some sensibly-high value to - account for unexpected edge cases and avoid infinite retry loops. - - Set to ``0`` to fail on the first retry. - - Set to ``False`` to disable and imply ``raise_on_redirect=False``. - - :param int connect: - How many connection-related errors to retry on. - - These are errors raised before the request is sent to the remote server, - which we assume has not triggered the server to process the request. - - Set to ``0`` to fail on the first retry of this type. - - :param int read: - How many times to retry on read errors. - - These errors are raised after the request was sent to the server, so the - request may have side-effects. - - Set to ``0`` to fail on the first retry of this type. - - :param int redirect: - How many redirects to perform. Limit this to avoid infinite redirect - loops. - - A redirect is a HTTP response with a status code 301, 302, 303, 307 or - 308. - - Set to ``0`` to fail on the first retry of this type. - - Set to ``False`` to disable and imply ``raise_on_redirect=False``. - - :param int status: - How many times to retry on bad status codes. - - These are retries made on responses, where status code matches - ``status_forcelist``. - - Set to ``0`` to fail on the first retry of this type. - - :param iterable method_whitelist: - Set of uppercased HTTP method verbs that we should retry on. - - By default, we only retry on methods which are considered to be - idempotent (multiple requests with the same parameters end with the - same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. - - Set to a ``False`` value to retry on any verb. - - :param iterable status_forcelist: - A set of integer HTTP status codes that we should force a retry on. - A retry is initiated if the request method is in ``method_whitelist`` - and the response status code is in ``status_forcelist``. - - By default, this is disabled with ``None``. - - :param float backoff_factor: - A backoff factor to apply between attempts after the second try - (most errors are resolved immediately by a second try without a - delay). urllib3 will sleep for:: - - {backoff factor} * (2 ** ({number of total retries} - 1)) - - seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep - for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer - than :attr:`Retry.BACKOFF_MAX`. - - By default, backoff is disabled (set to 0). - - :param bool raise_on_redirect: Whether, if the number of redirects is - exhausted, to raise a MaxRetryError, or to return a response with a - response code in the 3xx range. - - :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: - whether we should raise an exception, or return a response, - if status falls in ``status_forcelist`` range and retries have - been exhausted. - - :param tuple history: The history of the request encountered during - each call to :meth:`~Retry.increment`. The list is in the order - the requests occurred. Each list item is of class :class:`RequestHistory`. - - :param bool respect_retry_after_header: - Whether to respect Retry-After header on status codes defined as - :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. - - :param iterable remove_headers_on_redirect: - Sequence of headers to remove from the request when a response - indicating a redirect is returned before firing off the redirected - request. - """ - - DEFAULT_METHOD_WHITELIST = frozenset([ - 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) - - RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) - - DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization']) - - #: Maximum backoff time. - BACKOFF_MAX = 120 - - def __init__(self, total=10, connect=None, read=None, redirect=None, status=None, - method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, - backoff_factor=0, raise_on_redirect=True, raise_on_status=True, - history=None, respect_retry_after_header=True, - remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST): - - self.total = total - self.connect = connect - self.read = read - self.status = status - - if redirect is False or total is False: - redirect = 0 - raise_on_redirect = False - - self.redirect = redirect - self.status_forcelist = status_forcelist or set() - self.method_whitelist = method_whitelist - self.backoff_factor = backoff_factor - self.raise_on_redirect = raise_on_redirect - self.raise_on_status = raise_on_status - self.history = history or tuple() - self.respect_retry_after_header = respect_retry_after_header - self.remove_headers_on_redirect = remove_headers_on_redirect - - def new(self, **kw): - params = dict( - total=self.total, - connect=self.connect, read=self.read, redirect=self.redirect, status=self.status, - method_whitelist=self.method_whitelist, - status_forcelist=self.status_forcelist, - backoff_factor=self.backoff_factor, - raise_on_redirect=self.raise_on_redirect, - raise_on_status=self.raise_on_status, - history=self.history, - remove_headers_on_redirect=self.remove_headers_on_redirect - ) - params.update(kw) - return type(self)(**params) - - @classmethod - def from_int(cls, retries, redirect=True, default=None): - """ Backwards-compatibility for the old retries format.""" - if retries is None: - retries = default if default is not None else cls.DEFAULT - - if isinstance(retries, Retry): - return retries - - redirect = bool(redirect) and None - new_retries = cls(retries, redirect=redirect) - log.debug("Converted retries value: %r -> %r", retries, new_retries) - return new_retries - - def get_backoff_time(self): - """ Formula for computing the current backoff - - :rtype: float - """ - # We want to consider only the last consecutive errors sequence (Ignore redirects). - consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None, - reversed(self.history)))) - if consecutive_errors_len <= 1: - return 0 - - backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) - return min(self.BACKOFF_MAX, backoff_value) - - def parse_retry_after(self, retry_after): - # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 - if re.match(r"^\s*[0-9]+\s*$", retry_after): - seconds = int(retry_after) - else: - retry_date_tuple = email.utils.parsedate(retry_after) - if retry_date_tuple is None: - raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) - retry_date = time.mktime(retry_date_tuple) - seconds = retry_date - time.time() - - if seconds < 0: - seconds = 0 - - return seconds - - def get_retry_after(self, response): - """ Get the value of Retry-After in seconds. """ - - retry_after = response.getheader("Retry-After") - - if retry_after is None: - return None - - return self.parse_retry_after(retry_after) - - def sleep_for_retry(self, response=None): - retry_after = self.get_retry_after(response) - if retry_after: - time.sleep(retry_after) - return True - - return False - - def _sleep_backoff(self): - backoff = self.get_backoff_time() - if backoff <= 0: - return - time.sleep(backoff) - - def sleep(self, response=None): - """ Sleep between retry attempts. - - This method will respect a server's ``Retry-After`` response header - and sleep the duration of the time requested. If that is not present, it - will use an exponential backoff. By default, the backoff factor is 0 and - this method will return immediately. - """ - - if response: - slept = self.sleep_for_retry(response) - if slept: - return - - self._sleep_backoff() - - def _is_connection_error(self, err): - """ Errors when we're fairly sure that the server did not receive the - request, so it should be safe to retry. - """ - return isinstance(err, ConnectTimeoutError) - - def _is_read_error(self, err): - """ Errors that occur after the request has been started, so we should - assume that the server began processing it. - """ - return isinstance(err, (ReadTimeoutError, ProtocolError)) - - def _is_method_retryable(self, method): - """ Checks if a given HTTP method should be retried upon, depending if - it is included on the method whitelist. - """ - if self.method_whitelist and method.upper() not in self.method_whitelist: - return False - - return True - - def is_retry(self, method, status_code, has_retry_after=False): - """ Is this method/status code retryable? (Based on whitelists and control - variables such as the number of total retries to allow, whether to - respect the Retry-After header, whether this header is present, and - whether the returned status code is on the list of status codes to - be retried upon on the presence of the aforementioned header) - """ - if not self._is_method_retryable(method): - return False - - if self.status_forcelist and status_code in self.status_forcelist: - return True - - return (self.total and self.respect_retry_after_header and - has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES)) - - def is_exhausted(self): - """ Are we out of retries? """ - retry_counts = (self.total, self.connect, self.read, self.redirect, self.status) - retry_counts = list(filter(None, retry_counts)) - if not retry_counts: - return False - - return min(retry_counts) < 0 - - def increment(self, method=None, url=None, response=None, error=None, - _pool=None, _stacktrace=None): - """ Return a new Retry object with incremented retry counters. - - :param response: A response object, or None, if the server did not - return a response. - :type response: :class:`~urllib3.response.HTTPResponse` - :param Exception error: An error encountered during the request, or - None if the response was received successfully. - - :return: A new ``Retry`` object. - """ - if self.total is False and error: - # Disabled, indicate to re-raise the error. - raise six.reraise(type(error), error, _stacktrace) - - total = self.total - if total is not None: - total -= 1 - - connect = self.connect - read = self.read - redirect = self.redirect - status_count = self.status - cause = 'unknown' - status = None - redirect_location = None - - if error and self._is_connection_error(error): - # Connect retry? - if connect is False: - raise six.reraise(type(error), error, _stacktrace) - elif connect is not None: - connect -= 1 - - elif error and self._is_read_error(error): - # Read retry? - if read is False or not self._is_method_retryable(method): - raise six.reraise(type(error), error, _stacktrace) - elif read is not None: - read -= 1 - - elif response and response.get_redirect_location(): - # Redirect retry? - if redirect is not None: - redirect -= 1 - cause = 'too many redirects' - redirect_location = response.get_redirect_location() - status = response.status - - else: - # Incrementing because of a server error like a 500 in - # status_forcelist and a the given method is in the whitelist - cause = ResponseError.GENERIC_ERROR - if response and response.status: - if status_count is not None: - status_count -= 1 - cause = ResponseError.SPECIFIC_ERROR.format( - status_code=response.status) - status = response.status - - history = self.history + (RequestHistory(method, url, error, status, redirect_location),) - - new_retry = self.new( - total=total, - connect=connect, read=read, redirect=redirect, status=status_count, - history=history) - - if new_retry.is_exhausted(): - raise MaxRetryError(_pool, url, error or ResponseError(cause)) - - log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) - - return new_retry - - def __repr__(self): - return ('{cls.__name__}(total={self.total}, connect={self.connect}, ' - 'read={self.read}, redirect={self.redirect}, status={self.status})').format( - cls=type(self), self=self) - - -# For backwards compatibility (equivalent to pre-v1.9): -Retry.DEFAULT = Retry(3) diff --git a/pype/vendor/urllib3/util/ssl_.py b/pype/vendor/urllib3/util/ssl_.py deleted file mode 100644 index 64ea192a85..0000000000 --- a/pype/vendor/urllib3/util/ssl_.py +++ /dev/null @@ -1,381 +0,0 @@ -from __future__ import absolute_import -import errno -import warnings -import hmac -import socket - -from binascii import hexlify, unhexlify -from hashlib import md5, sha1, sha256 - -from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning -from ..packages import six - - -SSLContext = None -HAS_SNI = False -IS_PYOPENSSL = False -IS_SECURETRANSPORT = False - -# Maps the length of a digest to a possible hash function producing this digest -HASHFUNC_MAP = { - 32: md5, - 40: sha1, - 64: sha256, -} - - -def _const_compare_digest_backport(a, b): - """ - Compare two digests of equal length in constant time. - - The digests must be of type str/bytes. - Returns True if the digests match, and False otherwise. - """ - result = abs(len(a) - len(b)) - for l, r in zip(bytearray(a), bytearray(b)): - result |= l ^ r - return result == 0 - - -_const_compare_digest = getattr(hmac, 'compare_digest', - _const_compare_digest_backport) - - -try: # Test for SSL features - import ssl - from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 - from ssl import HAS_SNI # Has SNI? -except ImportError: - pass - - -try: - from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION -except ImportError: - OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 - OP_NO_COMPRESSION = 0x20000 - - -# Python 2.7 doesn't have inet_pton on non-Linux so we fallback on inet_aton in -# those cases. This means that we can only detect IPv4 addresses in this case. -if hasattr(socket, 'inet_pton'): - inet_pton = socket.inet_pton -else: - # Maybe we can use ipaddress if the user has urllib3[secure]? - try: - import ipaddress - - def inet_pton(_, host): - if isinstance(host, bytes): - host = host.decode('ascii') - return ipaddress.ip_address(host) - - except ImportError: # Platform-specific: Non-Linux - def inet_pton(_, host): - return socket.inet_aton(host) - - -# A secure default. -# Sources for more information on TLS ciphers: -# -# - https://wiki.mozilla.org/Security/Server_Side_TLS -# - https://www.ssllabs.com/projects/best-practices/index.html -# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ -# -# The general intent is: -# - Prefer TLS 1.3 cipher suites -# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), -# - prefer ECDHE over DHE for better performance, -# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and -# security, -# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, -# - disable NULL authentication, MD5 MACs and DSS for security reasons. -DEFAULT_CIPHERS = ':'.join([ - 'TLS13-AES-256-GCM-SHA384', - 'TLS13-CHACHA20-POLY1305-SHA256', - 'TLS13-AES-128-GCM-SHA256', - 'ECDH+AESGCM', - 'ECDH+CHACHA20', - 'DH+AESGCM', - 'DH+CHACHA20', - 'ECDH+AES256', - 'DH+AES256', - 'ECDH+AES128', - 'DH+AES', - 'RSA+AESGCM', - 'RSA+AES', - '!aNULL', - '!eNULL', - '!MD5', -]) - -try: - from ssl import SSLContext # Modern SSL? -except ImportError: - import sys - - class SSLContext(object): # Platform-specific: Python 2 - def __init__(self, protocol_version): - self.protocol = protocol_version - # Use default values from a real SSLContext - self.check_hostname = False - self.verify_mode = ssl.CERT_NONE - self.ca_certs = None - self.options = 0 - self.certfile = None - self.keyfile = None - self.ciphers = None - - def load_cert_chain(self, certfile, keyfile): - self.certfile = certfile - self.keyfile = keyfile - - def load_verify_locations(self, cafile=None, capath=None): - self.ca_certs = cafile - - if capath is not None: - raise SSLError("CA directories not supported in older Pythons") - - def set_ciphers(self, cipher_suite): - self.ciphers = cipher_suite - - def wrap_socket(self, socket, server_hostname=None, server_side=False): - warnings.warn( - 'A true SSLContext object is not available. This prevents ' - 'urllib3 from configuring SSL appropriately and may cause ' - 'certain SSL connections to fail. You can upgrade to a newer ' - 'version of Python to solve this. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings', - InsecurePlatformWarning - ) - kwargs = { - 'keyfile': self.keyfile, - 'certfile': self.certfile, - 'ca_certs': self.ca_certs, - 'cert_reqs': self.verify_mode, - 'ssl_version': self.protocol, - 'server_side': server_side, - } - return wrap_socket(socket, ciphers=self.ciphers, **kwargs) - - -def assert_fingerprint(cert, fingerprint): - """ - Checks if given fingerprint matches the supplied certificate. - - :param cert: - Certificate as bytes object. - :param fingerprint: - Fingerprint as string of hexdigits, can be interspersed by colons. - """ - - fingerprint = fingerprint.replace(':', '').lower() - digest_length = len(fingerprint) - hashfunc = HASHFUNC_MAP.get(digest_length) - if not hashfunc: - raise SSLError( - 'Fingerprint of invalid length: {0}'.format(fingerprint)) - - # We need encode() here for py32; works on py2 and p33. - fingerprint_bytes = unhexlify(fingerprint.encode()) - - cert_digest = hashfunc(cert).digest() - - if not _const_compare_digest(cert_digest, fingerprint_bytes): - raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' - .format(fingerprint, hexlify(cert_digest))) - - -def resolve_cert_reqs(candidate): - """ - Resolves the argument to a numeric constant, which can be passed to - the wrap_socket function/method from the ssl module. - Defaults to :data:`ssl.CERT_NONE`. - If given a string it is assumed to be the name of the constant in the - :mod:`ssl` module or its abbreviation. - (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. - If it's neither `None` nor a string we assume it is already the numeric - constant which can directly be passed to wrap_socket. - """ - if candidate is None: - return CERT_NONE - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'CERT_' + candidate) - return res - - return candidate - - -def resolve_ssl_version(candidate): - """ - like resolve_cert_reqs - """ - if candidate is None: - return PROTOCOL_SSLv23 - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'PROTOCOL_' + candidate) - return res - - return candidate - - -def create_urllib3_context(ssl_version=None, cert_reqs=None, - options=None, ciphers=None): - """All arguments have the same meaning as ``ssl_wrap_socket``. - - By default, this function does a lot of the same work that - ``ssl.create_default_context`` does on Python 3.4+. It: - - - Disables SSLv2, SSLv3, and compression - - Sets a restricted set of server ciphers - - If you wish to enable SSLv3, you can do:: - - from urllib3.util import ssl_ - context = ssl_.create_urllib3_context() - context.options &= ~ssl_.OP_NO_SSLv3 - - You can do the same to enable compression (substituting ``COMPRESSION`` - for ``SSLv3`` in the last line above). - - :param ssl_version: - The desired protocol version to use. This will default to - PROTOCOL_SSLv23 which will negotiate the highest protocol that both - the server and your installation of OpenSSL support. - :param cert_reqs: - Whether to require the certificate verification. This defaults to - ``ssl.CERT_REQUIRED``. - :param options: - Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, - ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. - :param ciphers: - Which cipher suites to allow the server to select. - :returns: - Constructed SSLContext object with specified options - :rtype: SSLContext - """ - context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) - - context.set_ciphers(ciphers or DEFAULT_CIPHERS) - - # Setting the default here, as we may have no ssl module on import - cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs - - if options is None: - options = 0 - # SSLv2 is easily broken and is considered harmful and dangerous - options |= OP_NO_SSLv2 - # SSLv3 has several problems and is now dangerous - options |= OP_NO_SSLv3 - # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ - # (issue #309) - options |= OP_NO_COMPRESSION - - context.options |= options - - context.verify_mode = cert_reqs - if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 - # We do our own verification, including fingerprints and alternative - # hostnames. So disable it here - context.check_hostname = False - return context - - -def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None, ciphers=None, ssl_context=None, - ca_cert_dir=None): - """ - All arguments except for server_hostname, ssl_context, and ca_cert_dir have - the same meaning as they do when using :func:`ssl.wrap_socket`. - - :param server_hostname: - When SNI is supported, the expected hostname of the certificate - :param ssl_context: - A pre-made :class:`SSLContext` object. If none is provided, one will - be created using :func:`create_urllib3_context`. - :param ciphers: - A string of ciphers we wish the client to support. - :param ca_cert_dir: - A directory containing CA certificates in multiple separate files, as - supported by OpenSSL's -CApath flag or the capath argument to - SSLContext.load_verify_locations(). - """ - context = ssl_context - if context is None: - # Note: This branch of code and all the variables in it are no longer - # used by urllib3 itself. We should consider deprecating and removing - # this code. - context = create_urllib3_context(ssl_version, cert_reqs, - ciphers=ciphers) - - if ca_certs or ca_cert_dir: - try: - context.load_verify_locations(ca_certs, ca_cert_dir) - except IOError as e: # Platform-specific: Python 2.7 - raise SSLError(e) - # Py33 raises FileNotFoundError which subclasses OSError - # These are not equivalent unless we check the errno attribute - except OSError as e: # Platform-specific: Python 3.3 and beyond - if e.errno == errno.ENOENT: - raise SSLError(e) - raise - elif getattr(context, 'load_default_certs', None) is not None: - # try to load OS default certs; works well on Windows (require Python3.4+) - context.load_default_certs() - - if certfile: - context.load_cert_chain(certfile, keyfile) - - # If we detect server_hostname is an IP address then the SNI - # extension should not be used according to RFC3546 Section 3.1 - # We shouldn't warn the user if SNI isn't available but we would - # not be using SNI anyways due to IP address for server_hostname. - if ((server_hostname is not None and not is_ipaddress(server_hostname)) - or IS_SECURETRANSPORT): - if HAS_SNI and server_hostname is not None: - return context.wrap_socket(sock, server_hostname=server_hostname) - - warnings.warn( - 'An HTTPS request has been made, but the SNI (Server Name ' - 'Indication) extension to TLS is not available on this platform. ' - 'This may cause the server to present an incorrect TLS ' - 'certificate, which can cause validation failures. You can upgrade to ' - 'a newer version of Python to solve this. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings', - SNIMissingWarning - ) - - return context.wrap_socket(sock) - - -def is_ipaddress(hostname): - """Detects whether the hostname given is an IP address. - - :param str hostname: Hostname to examine. - :return: True if the hostname is an IP address, False otherwise. - """ - if six.PY3 and isinstance(hostname, bytes): - # IDN A-label bytes are ASCII compatible. - hostname = hostname.decode('ascii') - - families = [socket.AF_INET] - if hasattr(socket, 'AF_INET6'): - families.append(socket.AF_INET6) - - for af in families: - try: - inet_pton(af, hostname) - except (socket.error, ValueError, OSError): - pass - else: - return True - return False diff --git a/pype/vendor/urllib3/util/timeout.py b/pype/vendor/urllib3/util/timeout.py deleted file mode 100644 index cec817e6ef..0000000000 --- a/pype/vendor/urllib3/util/timeout.py +++ /dev/null @@ -1,242 +0,0 @@ -from __future__ import absolute_import -# The default socket timeout, used by httplib to indicate that no timeout was -# specified by the user -from socket import _GLOBAL_DEFAULT_TIMEOUT -import time - -from ..exceptions import TimeoutStateError - -# A sentinel value to indicate that no timeout was specified by the user in -# urllib3 -_Default = object() - - -# Use time.monotonic if available. -current_time = getattr(time, "monotonic", time.time) - - -class Timeout(object): - """ Timeout configuration. - - Timeouts can be defined as a default for a pool:: - - timeout = Timeout(connect=2.0, read=7.0) - http = PoolManager(timeout=timeout) - response = http.request('GET', 'http://example.com/') - - Or per-request (which overrides the default for the pool):: - - response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) - - Timeouts can be disabled by setting all the parameters to ``None``:: - - no_timeout = Timeout(connect=None, read=None) - response = http.request('GET', 'http://example.com/, timeout=no_timeout) - - - :param total: - This combines the connect and read timeouts into one; the read timeout - will be set to the time leftover from the connect attempt. In the - event that both a connect timeout and a total are specified, or a read - timeout and a total are specified, the shorter timeout will be applied. - - Defaults to None. - - :type total: integer, float, or None - - :param connect: - The maximum amount of time to wait for a connection attempt to a server - to succeed. Omitting the parameter will default the connect timeout to - the system default, probably `the global default timeout in socket.py - `_. - None will set an infinite timeout for connection attempts. - - :type connect: integer, float, or None - - :param read: - The maximum amount of time to wait between consecutive - read operations for a response from the server. Omitting - the parameter will default the read timeout to the system - default, probably `the global default timeout in socket.py - `_. - None will set an infinite timeout. - - :type read: integer, float, or None - - .. note:: - - Many factors can affect the total amount of time for urllib3 to return - an HTTP response. - - For example, Python's DNS resolver does not obey the timeout specified - on the socket. Other factors that can affect total request time include - high CPU load, high swap, the program running at a low priority level, - or other behaviors. - - In addition, the read and total timeouts only measure the time between - read operations on the socket connecting the client and the server, - not the total amount of time for the request to return a complete - response. For most requests, the timeout is raised because the server - has not sent the first byte in the specified time. This is not always - the case; if a server streams one byte every fifteen seconds, a timeout - of 20 seconds will not trigger, even though the request will take - several minutes to complete. - - If your goal is to cut off any request after a set amount of wall clock - time, consider having a second "watcher" thread to cut off a slow - request. - """ - - #: A sentinel object representing the default timeout value - DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT - - def __init__(self, total=None, connect=_Default, read=_Default): - self._connect = self._validate_timeout(connect, 'connect') - self._read = self._validate_timeout(read, 'read') - self.total = self._validate_timeout(total, 'total') - self._start_connect = None - - def __str__(self): - return '%s(connect=%r, read=%r, total=%r)' % ( - type(self).__name__, self._connect, self._read, self.total) - - @classmethod - def _validate_timeout(cls, value, name): - """ Check that a timeout attribute is valid. - - :param value: The timeout value to validate - :param name: The name of the timeout attribute to validate. This is - used to specify in error messages. - :return: The validated and casted version of the given value. - :raises ValueError: If it is a numeric value less than or equal to - zero, or the type is not an integer, float, or None. - """ - if value is _Default: - return cls.DEFAULT_TIMEOUT - - if value is None or value is cls.DEFAULT_TIMEOUT: - return value - - if isinstance(value, bool): - raise ValueError("Timeout cannot be a boolean value. It must " - "be an int, float or None.") - try: - float(value) - except (TypeError, ValueError): - raise ValueError("Timeout value %s was %s, but it must be an " - "int, float or None." % (name, value)) - - try: - if value <= 0: - raise ValueError("Attempted to set %s timeout to %s, but the " - "timeout cannot be set to a value less " - "than or equal to 0." % (name, value)) - except TypeError: # Python 3 - raise ValueError("Timeout value %s was %s, but it must be an " - "int, float or None." % (name, value)) - - return value - - @classmethod - def from_float(cls, timeout): - """ Create a new Timeout from a legacy timeout value. - - The timeout value used by httplib.py sets the same timeout on the - connect(), and recv() socket requests. This creates a :class:`Timeout` - object that sets the individual timeouts to the ``timeout`` value - passed to this function. - - :param timeout: The legacy timeout value. - :type timeout: integer, float, sentinel default object, or None - :return: Timeout object - :rtype: :class:`Timeout` - """ - return Timeout(read=timeout, connect=timeout) - - def clone(self): - """ Create a copy of the timeout object - - Timeout properties are stored per-pool but each request needs a fresh - Timeout object to ensure each one has its own start/stop configured. - - :return: a copy of the timeout object - :rtype: :class:`Timeout` - """ - # We can't use copy.deepcopy because that will also create a new object - # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to - # detect the user default. - return Timeout(connect=self._connect, read=self._read, - total=self.total) - - def start_connect(self): - """ Start the timeout clock, used during a connect() attempt - - :raises urllib3.exceptions.TimeoutStateError: if you attempt - to start a timer that has been started already. - """ - if self._start_connect is not None: - raise TimeoutStateError("Timeout timer has already been started.") - self._start_connect = current_time() - return self._start_connect - - def get_connect_duration(self): - """ Gets the time elapsed since the call to :meth:`start_connect`. - - :return: Elapsed time. - :rtype: float - :raises urllib3.exceptions.TimeoutStateError: if you attempt - to get duration for a timer that hasn't been started. - """ - if self._start_connect is None: - raise TimeoutStateError("Can't get connect duration for timer " - "that has not started.") - return current_time() - self._start_connect - - @property - def connect_timeout(self): - """ Get the value to use when setting a connection timeout. - - This will be a positive float or integer, the value None - (never timeout), or the default system timeout. - - :return: Connect timeout. - :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None - """ - if self.total is None: - return self._connect - - if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: - return self.total - - return min(self._connect, self.total) - - @property - def read_timeout(self): - """ Get the value for the read timeout. - - This assumes some time has elapsed in the connection timeout and - computes the read timeout appropriately. - - If self.total is set, the read timeout is dependent on the amount of - time taken by the connect timeout. If the connection time has not been - established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be - raised. - - :return: Value to use for the read timeout. - :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None - :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` - has not yet been called on this object. - """ - if (self.total is not None and - self.total is not self.DEFAULT_TIMEOUT and - self._read is not None and - self._read is not self.DEFAULT_TIMEOUT): - # In case the connect timeout has not yet been established. - if self._start_connect is None: - return self._read - return max(0, min(self.total - self.get_connect_duration(), - self._read)) - elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: - return max(0, self.total - self.get_connect_duration()) - else: - return self._read diff --git a/pype/vendor/urllib3/util/url.py b/pype/vendor/urllib3/util/url.py deleted file mode 100644 index 6b6f9968d7..0000000000 --- a/pype/vendor/urllib3/util/url.py +++ /dev/null @@ -1,230 +0,0 @@ -from __future__ import absolute_import -from collections import namedtuple - -from ..exceptions import LocationParseError - - -url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] - -# We only want to normalize urls with an HTTP(S) scheme. -# urllib3 infers URLs without a scheme (None) to be http. -NORMALIZABLE_SCHEMES = ('http', 'https', None) - - -class Url(namedtuple('Url', url_attrs)): - """ - Datastructure for representing an HTTP URL. Used as a return value for - :func:`parse_url`. Both the scheme and host are normalized as they are - both case-insensitive according to RFC 3986. - """ - __slots__ = () - - def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, - query=None, fragment=None): - if path and not path.startswith('/'): - path = '/' + path - if scheme: - scheme = scheme.lower() - if host and scheme in NORMALIZABLE_SCHEMES: - host = host.lower() - return super(Url, cls).__new__(cls, scheme, auth, host, port, path, - query, fragment) - - @property - def hostname(self): - """For backwards-compatibility with urlparse. We're nice like that.""" - return self.host - - @property - def request_uri(self): - """Absolute path including the query string.""" - uri = self.path or '/' - - if self.query is not None: - uri += '?' + self.query - - return uri - - @property - def netloc(self): - """Network location including host and port""" - if self.port: - return '%s:%d' % (self.host, self.port) - return self.host - - @property - def url(self): - """ - Convert self into a url - - This function should more or less round-trip with :func:`.parse_url`. The - returned url may not be exactly the same as the url inputted to - :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls - with a blank port will have : removed). - - Example: :: - - >>> U = parse_url('http://google.com/mail/') - >>> U.url - 'http://google.com/mail/' - >>> Url('http', 'username:password', 'host.com', 80, - ... '/path', 'query', 'fragment').url - 'http://username:password@host.com:80/path?query#fragment' - """ - scheme, auth, host, port, path, query, fragment = self - url = '' - - # We use "is not None" we want things to happen with empty strings (or 0 port) - if scheme is not None: - url += scheme + '://' - if auth is not None: - url += auth + '@' - if host is not None: - url += host - if port is not None: - url += ':' + str(port) - if path is not None: - url += path - if query is not None: - url += '?' + query - if fragment is not None: - url += '#' + fragment - - return url - - def __str__(self): - return self.url - - -def split_first(s, delims): - """ - Given a string and an iterable of delimiters, split on the first found - delimiter. Return two split parts and the matched delimiter. - - If not found, then the first part is the full input string. - - Example:: - - >>> split_first('foo/bar?baz', '?/=') - ('foo', 'bar?baz', '/') - >>> split_first('foo/bar?baz', '123') - ('foo/bar?baz', '', None) - - Scales linearly with number of delims. Not ideal for large number of delims. - """ - min_idx = None - min_delim = None - for d in delims: - idx = s.find(d) - if idx < 0: - continue - - if min_idx is None or idx < min_idx: - min_idx = idx - min_delim = d - - if min_idx is None or min_idx < 0: - return s, '', None - - return s[:min_idx], s[min_idx + 1:], min_delim - - -def parse_url(url): - """ - Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is - performed to parse incomplete urls. Fields not provided will be None. - - Partly backwards-compatible with :mod:`urlparse`. - - Example:: - - >>> parse_url('http://google.com/mail/') - Url(scheme='http', host='google.com', port=None, path='/mail/', ...) - >>> parse_url('google.com:80') - Url(scheme=None, host='google.com', port=80, path=None, ...) - >>> parse_url('/foo?bar') - Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) - """ - - # While this code has overlap with stdlib's urlparse, it is much - # simplified for our needs and less annoying. - # Additionally, this implementations does silly things to be optimal - # on CPython. - - if not url: - # Empty - return Url() - - scheme = None - auth = None - host = None - port = None - path = None - fragment = None - query = None - - # Scheme - if '://' in url: - scheme, url = url.split('://', 1) - - # Find the earliest Authority Terminator - # (http://tools.ietf.org/html/rfc3986#section-3.2) - url, path_, delim = split_first(url, ['/', '?', '#']) - - if delim: - # Reassemble the path - path = delim + path_ - - # Auth - if '@' in url: - # Last '@' denotes end of auth part - auth, url = url.rsplit('@', 1) - - # IPv6 - if url and url[0] == '[': - host, url = url.split(']', 1) - host += ']' - - # Port - if ':' in url: - _host, port = url.split(':', 1) - - if not host: - host = _host - - if port: - # If given, ports must be integers. No whitespace, no plus or - # minus prefixes, no non-integer digits such as ^2 (superscript). - if not port.isdigit(): - raise LocationParseError(url) - try: - port = int(port) - except ValueError: - raise LocationParseError(url) - else: - # Blank ports are cool, too. (rfc3986#section-3.2.3) - port = None - - elif not host and url: - host = url - - if not path: - return Url(scheme, auth, host, port, path, query, fragment) - - # Fragment - if '#' in path: - path, fragment = path.split('#', 1) - - # Query - if '?' in path: - path, query = path.split('?', 1) - - return Url(scheme, auth, host, port, path, query, fragment) - - -def get_host(url): - """ - Deprecated. Use :func:`parse_url` instead. - """ - p = parse_url(url) - return p.scheme or 'http', p.hostname, p.port diff --git a/pype/vendor/urllib3/util/wait.py b/pype/vendor/urllib3/util/wait.py deleted file mode 100644 index 4db71bafd8..0000000000 --- a/pype/vendor/urllib3/util/wait.py +++ /dev/null @@ -1,150 +0,0 @@ -import errno -from functools import partial -import select -import sys -try: - from time import monotonic -except ImportError: - from time import time as monotonic - -__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"] - - -class NoWayToWaitForSocketError(Exception): - pass - - -# How should we wait on sockets? -# -# There are two types of APIs you can use for waiting on sockets: the fancy -# modern stateful APIs like epoll/kqueue, and the older stateless APIs like -# select/poll. The stateful APIs are more efficient when you have a lots of -# sockets to keep track of, because you can set them up once and then use them -# lots of times. But we only ever want to wait on a single socket at a time -# and don't want to keep track of state, so the stateless APIs are actually -# more efficient. So we want to use select() or poll(). -# -# Now, how do we choose between select() and poll()? On traditional Unixes, -# select() has a strange calling convention that makes it slow, or fail -# altogether, for high-numbered file descriptors. The point of poll() is to fix -# that, so on Unixes, we prefer poll(). -# -# On Windows, there is no poll() (or at least Python doesn't provide a wrapper -# for it), but that's OK, because on Windows, select() doesn't have this -# strange calling convention; plain select() works fine. -# -# So: on Windows we use select(), and everywhere else we use poll(). We also -# fall back to select() in case poll() is somehow broken or missing. - -if sys.version_info >= (3, 5): - # Modern Python, that retries syscalls by default - def _retry_on_intr(fn, timeout): - return fn(timeout) -else: - # Old and broken Pythons. - def _retry_on_intr(fn, timeout): - if timeout is None: - deadline = float("inf") - else: - deadline = monotonic() + timeout - - while True: - try: - return fn(timeout) - # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7 - except (OSError, select.error) as e: - # 'e.args[0]' incantation works for both OSError and select.error - if e.args[0] != errno.EINTR: - raise - else: - timeout = deadline - monotonic() - if timeout < 0: - timeout = 0 - if timeout == float("inf"): - timeout = None - continue - - -def select_wait_for_socket(sock, read=False, write=False, timeout=None): - if not read and not write: - raise RuntimeError("must specify at least one of read=True, write=True") - rcheck = [] - wcheck = [] - if read: - rcheck.append(sock) - if write: - wcheck.append(sock) - # When doing a non-blocking connect, most systems signal success by - # marking the socket writable. Windows, though, signals success by marked - # it as "exceptional". We paper over the difference by checking the write - # sockets for both conditions. (The stdlib selectors module does the same - # thing.) - fn = partial(select.select, rcheck, wcheck, wcheck) - rready, wready, xready = _retry_on_intr(fn, timeout) - return bool(rready or wready or xready) - - -def poll_wait_for_socket(sock, read=False, write=False, timeout=None): - if not read and not write: - raise RuntimeError("must specify at least one of read=True, write=True") - mask = 0 - if read: - mask |= select.POLLIN - if write: - mask |= select.POLLOUT - poll_obj = select.poll() - poll_obj.register(sock, mask) - - # For some reason, poll() takes timeout in milliseconds - def do_poll(t): - if t is not None: - t *= 1000 - return poll_obj.poll(t) - - return bool(_retry_on_intr(do_poll, timeout)) - - -def null_wait_for_socket(*args, **kwargs): - raise NoWayToWaitForSocketError("no select-equivalent available") - - -def _have_working_poll(): - # Apparently some systems have a select.poll that fails as soon as you try - # to use it, either due to strange configuration or broken monkeypatching - # from libraries like eventlet/greenlet. - try: - poll_obj = select.poll() - _retry_on_intr(poll_obj.poll, 0) - except (AttributeError, OSError): - return False - else: - return True - - -def wait_for_socket(*args, **kwargs): - # We delay choosing which implementation to use until the first time we're - # called. We could do it at import time, but then we might make the wrong - # decision if someone goes wild with monkeypatching select.poll after - # we're imported. - global wait_for_socket - if _have_working_poll(): - wait_for_socket = poll_wait_for_socket - elif hasattr(select, "select"): - wait_for_socket = select_wait_for_socket - else: # Platform-specific: Appengine. - wait_for_socket = null_wait_for_socket - return wait_for_socket(*args, **kwargs) - - -def wait_for_read(sock, timeout=None): - """ Waits for reading to be available on a given socket. - Returns True if the socket is readable, or False if the timeout expired. - """ - return wait_for_socket(sock, read=True, timeout=timeout) - - -def wait_for_write(sock, timeout=None): - """ Waits for writing to be available on a given socket. - Returns True if the socket is readable, or False if the timeout expired. - """ - return wait_for_socket(sock, write=True, timeout=timeout) diff --git a/pype/vendor/websocket/__init__.py b/pype/vendor/websocket/__init__.py deleted file mode 100644 index fe0c6f7bff..0000000000 --- a/pype/vendor/websocket/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" -from ._abnf import * -from ._app import WebSocketApp -from ._core import * -from ._exceptions import * -from ._logging import * -from ._socket import * - -__version__ = "0.53.0" diff --git a/pype/vendor/websocket/_abnf.py b/pype/vendor/websocket/_abnf.py deleted file mode 100644 index a0000fa1c9..0000000000 --- a/pype/vendor/websocket/_abnf.py +++ /dev/null @@ -1,447 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" -import array -import os -import struct - -import six - -from ._exceptions import * -from ._utils import validate_utf8 -from threading import Lock - -try: - if six.PY3: - import numpy - else: - numpy = None -except ImportError: - numpy = None - -try: - # If wsaccel is available we use compiled routines to mask data. - if not numpy: - from wsaccel.xormask import XorMaskerSimple - - def _mask(_m, _d): - return XorMaskerSimple(_m).process(_d) -except ImportError: - # wsaccel is not available, we rely on python implementations. - def _mask(_m, _d): - for i in range(len(_d)): - _d[i] ^= _m[i % 4] - - if six.PY3: - return _d.tobytes() - else: - return _d.tostring() - - -__all__ = [ - 'ABNF', 'continuous_frame', 'frame_buffer', - 'STATUS_NORMAL', - 'STATUS_GOING_AWAY', - 'STATUS_PROTOCOL_ERROR', - 'STATUS_UNSUPPORTED_DATA_TYPE', - 'STATUS_STATUS_NOT_AVAILABLE', - 'STATUS_ABNORMAL_CLOSED', - 'STATUS_INVALID_PAYLOAD', - 'STATUS_POLICY_VIOLATION', - 'STATUS_MESSAGE_TOO_BIG', - 'STATUS_INVALID_EXTENSION', - 'STATUS_UNEXPECTED_CONDITION', - 'STATUS_BAD_GATEWAY', - 'STATUS_TLS_HANDSHAKE_ERROR', -] - -# closing frame status codes. -STATUS_NORMAL = 1000 -STATUS_GOING_AWAY = 1001 -STATUS_PROTOCOL_ERROR = 1002 -STATUS_UNSUPPORTED_DATA_TYPE = 1003 -STATUS_STATUS_NOT_AVAILABLE = 1005 -STATUS_ABNORMAL_CLOSED = 1006 -STATUS_INVALID_PAYLOAD = 1007 -STATUS_POLICY_VIOLATION = 1008 -STATUS_MESSAGE_TOO_BIG = 1009 -STATUS_INVALID_EXTENSION = 1010 -STATUS_UNEXPECTED_CONDITION = 1011 -STATUS_BAD_GATEWAY = 1014 -STATUS_TLS_HANDSHAKE_ERROR = 1015 - -VALID_CLOSE_STATUS = ( - STATUS_NORMAL, - STATUS_GOING_AWAY, - STATUS_PROTOCOL_ERROR, - STATUS_UNSUPPORTED_DATA_TYPE, - STATUS_INVALID_PAYLOAD, - STATUS_POLICY_VIOLATION, - STATUS_MESSAGE_TOO_BIG, - STATUS_INVALID_EXTENSION, - STATUS_UNEXPECTED_CONDITION, - STATUS_BAD_GATEWAY, -) - - -class ABNF(object): - """ - ABNF frame class. - see http://tools.ietf.org/html/rfc5234 - and http://tools.ietf.org/html/rfc6455#section-5.2 - """ - - # operation code values. - OPCODE_CONT = 0x0 - OPCODE_TEXT = 0x1 - OPCODE_BINARY = 0x2 - OPCODE_CLOSE = 0x8 - OPCODE_PING = 0x9 - OPCODE_PONG = 0xa - - # available operation code value tuple - OPCODES = (OPCODE_CONT, OPCODE_TEXT, OPCODE_BINARY, OPCODE_CLOSE, - OPCODE_PING, OPCODE_PONG) - - # opcode human readable string - OPCODE_MAP = { - OPCODE_CONT: "cont", - OPCODE_TEXT: "text", - OPCODE_BINARY: "binary", - OPCODE_CLOSE: "close", - OPCODE_PING: "ping", - OPCODE_PONG: "pong" - } - - # data length threshold. - LENGTH_7 = 0x7e - LENGTH_16 = 1 << 16 - LENGTH_63 = 1 << 63 - - def __init__(self, fin=0, rsv1=0, rsv2=0, rsv3=0, - opcode=OPCODE_TEXT, mask=1, data=""): - """ - Constructor for ABNF. - please check RFC for arguments. - """ - self.fin = fin - self.rsv1 = rsv1 - self.rsv2 = rsv2 - self.rsv3 = rsv3 - self.opcode = opcode - self.mask = mask - if data is None: - data = "" - self.data = data - self.get_mask_key = os.urandom - - def validate(self, skip_utf8_validation=False): - """ - validate the ABNF frame. - skip_utf8_validation: skip utf8 validation. - """ - if self.rsv1 or self.rsv2 or self.rsv3: - raise WebSocketProtocolException("rsv is not implemented, yet") - - if self.opcode not in ABNF.OPCODES: - raise WebSocketProtocolException("Invalid opcode %r", self.opcode) - - if self.opcode == ABNF.OPCODE_PING and not self.fin: - raise WebSocketProtocolException("Invalid ping frame.") - - if self.opcode == ABNF.OPCODE_CLOSE: - l = len(self.data) - if not l: - return - if l == 1 or l >= 126: - raise WebSocketProtocolException("Invalid close frame.") - if l > 2 and not skip_utf8_validation and not validate_utf8(self.data[2:]): - raise WebSocketProtocolException("Invalid close frame.") - - code = 256 * \ - six.byte2int(self.data[0:1]) + six.byte2int(self.data[1:2]) - if not self._is_valid_close_status(code): - raise WebSocketProtocolException("Invalid close opcode.") - - @staticmethod - def _is_valid_close_status(code): - return code in VALID_CLOSE_STATUS or (3000 <= code < 5000) - - def __str__(self): - return "fin=" + str(self.fin) \ - + " opcode=" + str(self.opcode) \ - + " data=" + str(self.data) - - @staticmethod - def create_frame(data, opcode, fin=1): - """ - create frame to send text, binary and other data. - - data: data to send. This is string value(byte array). - if opcode is OPCODE_TEXT and this value is unicode, - data value is converted into unicode string, automatically. - - opcode: operation code. please see OPCODE_XXX. - - fin: fin flag. if set to 0, create continue fragmentation. - """ - if opcode == ABNF.OPCODE_TEXT and isinstance(data, six.text_type): - data = data.encode("utf-8") - # mask must be set if send data from client - return ABNF(fin, 0, 0, 0, opcode, 1, data) - - def format(self): - """ - format this object to string(byte array) to send data to server. - """ - if any(x not in (0, 1) for x in [self.fin, self.rsv1, self.rsv2, self.rsv3]): - raise ValueError("not 0 or 1") - if self.opcode not in ABNF.OPCODES: - raise ValueError("Invalid OPCODE") - length = len(self.data) - if length >= ABNF.LENGTH_63: - raise ValueError("data is too long") - - frame_header = chr(self.fin << 7 - | self.rsv1 << 6 | self.rsv2 << 5 | self.rsv3 << 4 - | self.opcode) - if length < ABNF.LENGTH_7: - frame_header += chr(self.mask << 7 | length) - frame_header = six.b(frame_header) - elif length < ABNF.LENGTH_16: - frame_header += chr(self.mask << 7 | 0x7e) - frame_header = six.b(frame_header) - frame_header += struct.pack("!H", length) - else: - frame_header += chr(self.mask << 7 | 0x7f) - frame_header = six.b(frame_header) - frame_header += struct.pack("!Q", length) - - if not self.mask: - return frame_header + self.data - else: - mask_key = self.get_mask_key(4) - return frame_header + self._get_masked(mask_key) - - def _get_masked(self, mask_key): - s = ABNF.mask(mask_key, self.data) - - if isinstance(mask_key, six.text_type): - mask_key = mask_key.encode('utf-8') - - return mask_key + s - - @staticmethod - def mask(mask_key, data): - """ - mask or unmask data. Just do xor for each byte - - mask_key: 4 byte string(byte). - - data: data to mask/unmask. - """ - if data is None: - data = "" - - if isinstance(mask_key, six.text_type): - mask_key = six.b(mask_key) - - if isinstance(data, six.text_type): - data = six.b(data) - - if numpy: - origlen = len(data) - _mask_key = mask_key[3] << 24 | mask_key[2] << 16 | mask_key[1] << 8 | mask_key[0] - - # We need data to be a multiple of four... - data += bytes(" " * (4 - (len(data) % 4)), "us-ascii") - a = numpy.frombuffer(data, dtype="uint32") - masked = numpy.bitwise_xor(a, [_mask_key]).astype("uint32") - if len(data) > origlen: - return masked.tobytes()[:origlen] - return masked.tobytes() - else: - _m = array.array("B", mask_key) - _d = array.array("B", data) - return _mask(_m, _d) - - -class frame_buffer(object): - _HEADER_MASK_INDEX = 5 - _HEADER_LENGTH_INDEX = 6 - - def __init__(self, recv_fn, skip_utf8_validation): - self.recv = recv_fn - self.skip_utf8_validation = skip_utf8_validation - # Buffers over the packets from the layer beneath until desired amount - # bytes of bytes are received. - self.recv_buffer = [] - self.clear() - self.lock = Lock() - - def clear(self): - self.header = None - self.length = None - self.mask = None - - def has_received_header(self): - return self.header is None - - def recv_header(self): - header = self.recv_strict(2) - b1 = header[0] - - if six.PY2: - b1 = ord(b1) - - fin = b1 >> 7 & 1 - rsv1 = b1 >> 6 & 1 - rsv2 = b1 >> 5 & 1 - rsv3 = b1 >> 4 & 1 - opcode = b1 & 0xf - b2 = header[1] - - if six.PY2: - b2 = ord(b2) - - has_mask = b2 >> 7 & 1 - length_bits = b2 & 0x7f - - self.header = (fin, rsv1, rsv2, rsv3, opcode, has_mask, length_bits) - - def has_mask(self): - if not self.header: - return False - return self.header[frame_buffer._HEADER_MASK_INDEX] - - def has_received_length(self): - return self.length is None - - def recv_length(self): - bits = self.header[frame_buffer._HEADER_LENGTH_INDEX] - length_bits = bits & 0x7f - if length_bits == 0x7e: - v = self.recv_strict(2) - self.length = struct.unpack("!H", v)[0] - elif length_bits == 0x7f: - v = self.recv_strict(8) - self.length = struct.unpack("!Q", v)[0] - else: - self.length = length_bits - - def has_received_mask(self): - return self.mask is None - - def recv_mask(self): - self.mask = self.recv_strict(4) if self.has_mask() else "" - - def recv_frame(self): - - with self.lock: - # Header - if self.has_received_header(): - self.recv_header() - (fin, rsv1, rsv2, rsv3, opcode, has_mask, _) = self.header - - # Frame length - if self.has_received_length(): - self.recv_length() - length = self.length - - # Mask - if self.has_received_mask(): - self.recv_mask() - mask = self.mask - - # Payload - payload = self.recv_strict(length) - if has_mask: - payload = ABNF.mask(mask, payload) - - # Reset for next frame - self.clear() - - frame = ABNF(fin, rsv1, rsv2, rsv3, opcode, has_mask, payload) - frame.validate(self.skip_utf8_validation) - - return frame - - def recv_strict(self, bufsize): - shortage = bufsize - sum(len(x) for x in self.recv_buffer) - while shortage > 0: - # Limit buffer size that we pass to socket.recv() to avoid - # fragmenting the heap -- the number of bytes recv() actually - # reads is limited by socket buffer and is relatively small, - # yet passing large numbers repeatedly causes lots of large - # buffers allocated and then shrunk, which results in - # fragmentation. - bytes_ = self.recv(min(16384, shortage)) - self.recv_buffer.append(bytes_) - shortage -= len(bytes_) - - unified = six.b("").join(self.recv_buffer) - - if shortage == 0: - self.recv_buffer = [] - return unified - else: - self.recv_buffer = [unified[bufsize:]] - return unified[:bufsize] - - -class continuous_frame(object): - - def __init__(self, fire_cont_frame, skip_utf8_validation): - self.fire_cont_frame = fire_cont_frame - self.skip_utf8_validation = skip_utf8_validation - self.cont_data = None - self.recving_frames = None - - def validate(self, frame): - if not self.recving_frames and frame.opcode == ABNF.OPCODE_CONT: - raise WebSocketProtocolException("Illegal frame") - if self.recving_frames and \ - frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY): - raise WebSocketProtocolException("Illegal frame") - - def add(self, frame): - if self.cont_data: - self.cont_data[1] += frame.data - else: - if frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY): - self.recving_frames = frame.opcode - self.cont_data = [frame.opcode, frame.data] - - if frame.fin: - self.recving_frames = None - - def is_fire(self, frame): - return frame.fin or self.fire_cont_frame - - def extract(self, frame): - data = self.cont_data - self.cont_data = None - frame.data = data[1] - if not self.fire_cont_frame and data[0] == ABNF.OPCODE_TEXT and not self.skip_utf8_validation and not validate_utf8(frame.data): - raise WebSocketPayloadException( - "cannot decode: " + repr(frame.data)) - - return [data[0], frame] diff --git a/pype/vendor/websocket/_app.py b/pype/vendor/websocket/_app.py deleted file mode 100644 index a281402c94..0000000000 --- a/pype/vendor/websocket/_app.py +++ /dev/null @@ -1,341 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" - -""" -WebSocketApp provides higher level APIs. -""" -import inspect -import select -import sys -import threading -import time -import traceback - -import six - -from ._abnf import ABNF -from ._core import WebSocket, getdefaulttimeout -from ._exceptions import * -from . import _logging - - -__all__ = ["WebSocketApp"] - -class Dispatcher: - def __init__(self, app, ping_timeout): - self.app = app - self.ping_timeout = ping_timeout - - def read(self, sock, read_callback, check_callback): - while self.app.sock.connected: - r, w, e = select.select( - (self.app.sock.sock, ), (), (), self.ping_timeout) - if r: - if not read_callback(): - break - check_callback() - -class SSLDispacther: - def __init__(self, app, ping_timeout): - self.app = app - self.ping_timeout = ping_timeout - - def read(self, sock, read_callback, check_callback): - while self.app.sock.connected: - r = self.select() - if r: - if not read_callback(): - break - check_callback() - - def select(self): - sock = self.app.sock.sock - if sock.pending(): - return [sock,] - - r, w, e = select.select((sock, ), (), (), self.ping_timeout) - return r - -class WebSocketApp(object): - """ - Higher level of APIs are provided. - The interface is like JavaScript WebSocket object. - """ - - def __init__(self, url, header=None, - on_open=None, on_message=None, on_error=None, - on_close=None, on_ping=None, on_pong=None, - on_cont_message=None, - keep_running=True, get_mask_key=None, cookie=None, - subprotocols=None, - on_data=None): - """ - url: websocket url. - header: custom header for websocket handshake. - on_open: callable object which is called at opening websocket. - this function has one argument. The argument is this class object. - on_message: callable object which is called when received data. - on_message has 2 arguments. - The 1st argument is this class object. - The 2nd argument is utf-8 string which we get from the server. - on_error: callable object which is called when we get error. - on_error has 2 arguments. - The 1st argument is this class object. - The 2nd argument is exception object. - on_close: callable object which is called when closed the connection. - this function has one argument. The argument is this class object. - on_cont_message: callback object which is called when receive continued - frame data. - on_cont_message has 3 arguments. - The 1st argument is this class object. - The 2nd argument is utf-8 string which we get from the server. - The 3rd argument is continue flag. if 0, the data continue - to next frame data - on_data: callback object which is called when a message received. - This is called before on_message or on_cont_message, - and then on_message or on_cont_message is called. - on_data has 4 argument. - The 1st argument is this class object. - The 2nd argument is utf-8 string which we get from the server. - The 3rd argument is data type. ABNF.OPCODE_TEXT or ABNF.OPCODE_BINARY will be came. - The 4th argument is continue flag. if 0, the data continue - keep_running: this parameter is obosleted and ignored it. - get_mask_key: a callable to produce new mask keys, - see the WebSocket.set_mask_key's docstring for more information - subprotocols: array of available sub protocols. default is None. - """ - self.url = url - self.header = header if header is not None else [] - self.cookie = cookie - - self.on_open = on_open - self.on_message = on_message - self.on_data = on_data - self.on_error = on_error - self.on_close = on_close - self.on_ping = on_ping - self.on_pong = on_pong - self.on_cont_message = on_cont_message - self.keep_running = False - self.get_mask_key = get_mask_key - self.sock = None - self.last_ping_tm = 0 - self.last_pong_tm = 0 - self.subprotocols = subprotocols - - def send(self, data, opcode=ABNF.OPCODE_TEXT): - """ - send message. - data: message to send. If you set opcode to OPCODE_TEXT, - data must be utf-8 string or unicode. - opcode: operation code of data. default is OPCODE_TEXT. - """ - - if not self.sock or self.sock.send(data, opcode) == 0: - raise WebSocketConnectionClosedException( - "Connection is already closed.") - - def close(self, **kwargs): - """ - close websocket connection. - """ - self.keep_running = False - if self.sock: - self.sock.close(**kwargs) - self.sock = None - - def _send_ping(self, interval, event): - while not event.wait(interval): - self.last_ping_tm = time.time() - if self.sock: - try: - self.sock.ping() - except Exception as ex: - _logging.warning("send_ping routine terminated: {}".format(ex)) - break - - def run_forever(self, sockopt=None, sslopt=None, - ping_interval=0, ping_timeout=None, - http_proxy_host=None, http_proxy_port=None, - http_no_proxy=None, http_proxy_auth=None, - skip_utf8_validation=False, - host=None, origin=None, dispatcher=None, - supress_origin = False): - """ - run event loop for WebSocket framework. - This loop is infinite loop and is alive during websocket is available. - sockopt: values for socket.setsockopt. - sockopt must be tuple - and each element is argument of sock.setsockopt. - sslopt: ssl socket optional dict. - ping_interval: automatically send "ping" command - every specified period(second) - if set to 0, not send automatically. - ping_timeout: timeout(second) if the pong message is not received. - http_proxy_host: http proxy host name. - http_proxy_port: http proxy port. If not set, set to 80. - http_no_proxy: host names, which doesn't use proxy. - skip_utf8_validation: skip utf8 validation. - host: update host header. - origin: update origin header. - dispatcher: customize reading data from socket. - supress_origin: suppress outputting origin header. - """ - - if ping_timeout is not None and (not ping_timeout or ping_timeout <= 0): - ping_timeout = None - if ping_timeout and ping_interval and ping_interval <= ping_timeout: - raise WebSocketException("Ensure ping_interval > ping_timeout") - if sockopt is None: - sockopt = [] - if sslopt is None: - sslopt = {} - if self.sock: - raise WebSocketException("socket is already opened") - thread = None - close_frame = None - self.keep_running = True - self.last_ping_tm = 0 - self.last_pong_tm = 0 - - def teardown(): - if thread and thread.isAlive(): - event.set() - thread.join() - self.keep_running = False - if self.sock: - self.sock.close() - close_args = self._get_close_args( - close_frame.data if close_frame else None) - self._callback(self.on_close, *close_args) - self.sock = None - - try: - self.sock = WebSocket( - self.get_mask_key, sockopt=sockopt, sslopt=sslopt, - fire_cont_frame=self.on_cont_message and True or False, - skip_utf8_validation=skip_utf8_validation, - enable_multithread=True if ping_interval else False) - self.sock.settimeout(getdefaulttimeout()) - self.sock.connect( - self.url, header=self.header, cookie=self.cookie, - http_proxy_host=http_proxy_host, - http_proxy_port=http_proxy_port, http_no_proxy=http_no_proxy, - http_proxy_auth=http_proxy_auth, subprotocols=self.subprotocols, - host=host, origin=origin, supress_origin = supress_origin) - if not dispatcher: - dispatcher = self.create_dispatcher(ping_timeout) - - self._callback(self.on_open) - - if ping_interval: - event = threading.Event() - thread = threading.Thread( - target=self._send_ping, args=(ping_interval, event)) - thread.setDaemon(True) - thread.start() - - def read(): - if not self.keep_running: - return teardown() - - op_code, frame = self.sock.recv_data_frame(True) - if op_code == ABNF.OPCODE_CLOSE: - close_frame = frame - return teardown() - elif op_code == ABNF.OPCODE_PING: - self._callback(self.on_ping, frame.data) - elif op_code == ABNF.OPCODE_PONG: - self.last_pong_tm = time.time() - self._callback(self.on_pong, frame.data) - elif op_code == ABNF.OPCODE_CONT and self.on_cont_message: - self._callback(self.on_data, frame.data, - frame.opcode, frame.fin) - self._callback(self.on_cont_message, - frame.data, frame.fin) - else: - data = frame.data - if six.PY3 and op_code == ABNF.OPCODE_TEXT: - data = data.decode("utf-8") - self._callback(self.on_data, data, frame.opcode, True) - self._callback(self.on_message, data) - - return True - - def check(): - if (ping_timeout): - has_timeout_expired = time.time() - self.last_ping_tm > ping_timeout - has_pong_not_arrived_after_last_ping = self.last_pong_tm - self.last_ping_tm < 0 - has_pong_arrived_too_late = self.last_pong_tm - self.last_ping_tm > ping_timeout - - if (self.last_ping_tm - and has_timeout_expired - and (has_pong_not_arrived_after_last_ping or has_pong_arrived_too_late)): - raise WebSocketTimeoutException("ping/pong timed out") - return True - - dispatcher.read(self.sock.sock, read, check) - except (Exception, KeyboardInterrupt, SystemExit) as e: - self._callback(self.on_error, e) - if isinstance(e, SystemExit): - # propagate SystemExit further - raise - teardown() - - def create_dispatcher(self, ping_timeout): - timeout = ping_timeout or 10 - if self.sock.is_ssl(): - return SSLDispacther(self, timeout) - - return Dispatcher(self, timeout) - - def _get_close_args(self, data): - """ this functions extracts the code, reason from the close body - if they exists, and if the self.on_close except three arguments """ - # if the on_close callback is "old", just return empty list - if sys.version_info < (3, 0): - if not self.on_close or len(inspect.getargspec(self.on_close).args) != 3: - return [] - else: - if not self.on_close or len(inspect.getfullargspec(self.on_close).args) != 3: - return [] - - if data and len(data) >= 2: - code = 256 * six.byte2int(data[0:1]) + six.byte2int(data[1:2]) - reason = data[2:].decode('utf-8') - return [code, reason] - - return [None, None] - - def _callback(self, callback, *args): - if callback: - try: - if inspect.ismethod(callback): - callback(*args) - else: - callback(self, *args) - - except Exception as e: - _logging.error("error from callback {}: {}".format(callback, e)) - if _logging.isEnabledForDebug(): - _, _, tb = sys.exc_info() - traceback.print_tb(tb) diff --git a/pype/vendor/websocket/_cookiejar.py b/pype/vendor/websocket/_cookiejar.py deleted file mode 100644 index 3efeb0fd2f..0000000000 --- a/pype/vendor/websocket/_cookiejar.py +++ /dev/null @@ -1,52 +0,0 @@ -try: - import Cookie -except: - import http.cookies as Cookie - - -class SimpleCookieJar(object): - def __init__(self): - self.jar = dict() - - def add(self, set_cookie): - if set_cookie: - try: - simpleCookie = Cookie.SimpleCookie(set_cookie) - except: - simpleCookie = Cookie.SimpleCookie(set_cookie.encode('ascii', 'ignore')) - - for k, v in simpleCookie.items(): - domain = v.get("domain") - if domain: - if not domain.startswith("."): - domain = "." + domain - cookie = self.jar.get(domain) if self.jar.get(domain) else Cookie.SimpleCookie() - cookie.update(simpleCookie) - self.jar[domain.lower()] = cookie - - def set(self, set_cookie): - if set_cookie: - try: - simpleCookie = Cookie.SimpleCookie(set_cookie) - except: - simpleCookie = Cookie.SimpleCookie(set_cookie.encode('ascii', 'ignore')) - - for k, v in simpleCookie.items(): - domain = v.get("domain") - if domain: - if not domain.startswith("."): - domain = "." + domain - self.jar[domain.lower()] = simpleCookie - - def get(self, host): - if not host: - return "" - - cookies = [] - for domain, simpleCookie in self.jar.items(): - host = host.lower() - if host.endswith(domain) or host == domain[1:]: - cookies.append(self.jar.get(domain)) - - return "; ".join(filter(None, ["%s=%s" % (k, v.value) for cookie in filter(None, sorted(cookies)) for k, v in - sorted(cookie.items())])) diff --git a/pype/vendor/websocket/_core.py b/pype/vendor/websocket/_core.py deleted file mode 100644 index aea16e6a02..0000000000 --- a/pype/vendor/websocket/_core.py +++ /dev/null @@ -1,503 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" -from __future__ import print_function - -import socket -import struct -import threading -import time - -import six - -# websocket modules -from ._abnf import * -from ._exceptions import * -from ._handshake import * -from ._http import * -from ._logging import * -from ._socket import * -from ._ssl_compat import * -from ._utils import * - -__all__ = ['WebSocket', 'create_connection'] - -""" -websocket python client. -========================= - -This version support only hybi-13. -Please see http://tools.ietf.org/html/rfc6455 for protocol. -""" - - -class WebSocket(object): - """ - Low level WebSocket interface. - This class is based on - The WebSocket protocol draft-hixie-thewebsocketprotocol-76 - http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76 - - We can connect to the websocket server and send/receive data. - The following example is an echo client. - - >>> import websocket - >>> ws = websocket.WebSocket() - >>> ws.connect("ws://echo.websocket.org") - >>> ws.send("Hello, Server") - >>> ws.recv() - 'Hello, Server' - >>> ws.close() - - get_mask_key: a callable to produce new mask keys, see the set_mask_key - function's docstring for more details - sockopt: values for socket.setsockopt. - sockopt must be tuple and each element is argument of sock.setsockopt. - sslopt: dict object for ssl socket option. - fire_cont_frame: fire recv event for each cont frame. default is False - enable_multithread: if set to True, lock send method. - skip_utf8_validation: skip utf8 validation. - """ - - def __init__(self, get_mask_key=None, sockopt=None, sslopt=None, - fire_cont_frame=False, enable_multithread=False, - skip_utf8_validation=False, **_): - """ - Initialize WebSocket object. - """ - self.sock_opt = sock_opt(sockopt, sslopt) - self.handshake_response = None - self.sock = None - - self.connected = False - self.get_mask_key = get_mask_key - # These buffer over the build-up of a single frame. - self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation) - self.cont_frame = continuous_frame( - fire_cont_frame, skip_utf8_validation) - - if enable_multithread: - self.lock = threading.Lock() - self.readlock = threading.Lock() - else: - self.lock = NoLock() - self.readlock = NoLock() - - def __iter__(self): - """ - Allow iteration over websocket, implying sequential `recv` executions. - """ - while True: - yield self.recv() - - def __next__(self): - return self.recv() - - def next(self): - return self.__next__() - - def fileno(self): - return self.sock.fileno() - - def set_mask_key(self, func): - """ - set function to create musk key. You can customize mask key generator. - Mainly, this is for testing purpose. - - func: callable object. the func takes 1 argument as integer. - The argument means length of mask key. - This func must return string(byte array), - which length is argument specified. - """ - self.get_mask_key = func - - def gettimeout(self): - """ - Get the websocket timeout(second). - """ - return self.sock_opt.timeout - - def settimeout(self, timeout): - """ - Set the timeout to the websocket. - - timeout: timeout time(second). - """ - self.sock_opt.timeout = timeout - if self.sock: - self.sock.settimeout(timeout) - - timeout = property(gettimeout, settimeout) - - def getsubprotocol(self): - """ - get subprotocol - """ - if self.handshake_response: - return self.handshake_response.subprotocol - else: - return None - - subprotocol = property(getsubprotocol) - - def getstatus(self): - """ - get handshake status - """ - if self.handshake_response: - return self.handshake_response.status - else: - return None - - status = property(getstatus) - - def getheaders(self): - """ - get handshake response header - """ - if self.handshake_response: - return self.handshake_response.headers - else: - return None - - def is_ssl(self): - return isinstance(self.sock, ssl.SSLSocket) - - headers = property(getheaders) - - def connect(self, url, **options): - """ - Connect to url. url is websocket url scheme. - ie. ws://host:port/resource - You can customize using 'options'. - If you set "header" list object, you can set your own custom header. - - >>> ws = WebSocket() - >>> ws.connect("ws://echo.websocket.org/", - ... header=["User-Agent: MyProgram", - ... "x-custom: header"]) - - timeout: socket timeout time. This value is integer. - if you set None for this value, - it means "use default_timeout value" - - options: "header" -> custom http header list or dict. - "cookie" -> cookie value. - "origin" -> custom origin url. - "supress_origin" -> suppress outputting origin header. - "host" -> custom host header string. - "http_proxy_host" - http proxy host name. - "http_proxy_port" - http proxy port. If not set, set to 80. - "http_no_proxy" - host names, which doesn't use proxy. - "http_proxy_auth" - http proxy auth information. - tuple of username and password. - default is None - "subprotocols" - array of available sub protocols. - default is None. - "socket" - pre-initialized stream socket. - - """ - self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options), - options.pop('socket', None)) - - try: - self.handshake_response = handshake(self.sock, *addrs, **options) - self.connected = True - except: - if self.sock: - self.sock.close() - self.sock = None - raise - - def send(self, payload, opcode=ABNF.OPCODE_TEXT): - """ - Send the data as string. - - payload: Payload must be utf-8 string or unicode, - if the opcode is OPCODE_TEXT. - Otherwise, it must be string(byte array) - - opcode: operation code to send. Please see OPCODE_XXX. - """ - - frame = ABNF.create_frame(payload, opcode) - return self.send_frame(frame) - - def send_frame(self, frame): - """ - Send the data frame. - - frame: frame data created by ABNF.create_frame - - >>> ws = create_connection("ws://echo.websocket.org/") - >>> frame = ABNF.create_frame("Hello", ABNF.OPCODE_TEXT) - >>> ws.send_frame(frame) - >>> cont_frame = ABNF.create_frame("My name is ", ABNF.OPCODE_CONT, 0) - >>> ws.send_frame(frame) - >>> cont_frame = ABNF.create_frame("Foo Bar", ABNF.OPCODE_CONT, 1) - >>> ws.send_frame(frame) - - """ - if self.get_mask_key: - frame.get_mask_key = self.get_mask_key - data = frame.format() - length = len(data) - trace("send: " + repr(data)) - - with self.lock: - while data: - l = self._send(data) - data = data[l:] - - return length - - def send_binary(self, payload): - return self.send(payload, ABNF.OPCODE_BINARY) - - def ping(self, payload=""): - """ - send ping data. - - payload: data payload to send server. - """ - if isinstance(payload, six.text_type): - payload = payload.encode("utf-8") - self.send(payload, ABNF.OPCODE_PING) - - def pong(self, payload): - """ - send pong data. - - payload: data payload to send server. - """ - if isinstance(payload, six.text_type): - payload = payload.encode("utf-8") - self.send(payload, ABNF.OPCODE_PONG) - - def recv(self): - """ - Receive string data(byte array) from the server. - - return value: string(byte array) value. - """ - with self.readlock: - opcode, data = self.recv_data() - if six.PY3 and opcode == ABNF.OPCODE_TEXT: - return data.decode("utf-8") - elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY: - return data - else: - return '' - - def recv_data(self, control_frame=False): - """ - Receive data with operation code. - - control_frame: a boolean flag indicating whether to return control frame - data, defaults to False - - return value: tuple of operation code and string(byte array) value. - """ - opcode, frame = self.recv_data_frame(control_frame) - return opcode, frame.data - - def recv_data_frame(self, control_frame=False): - """ - Receive data with operation code. - - control_frame: a boolean flag indicating whether to return control frame - data, defaults to False - - return value: tuple of operation code and string(byte array) value. - """ - while True: - frame = self.recv_frame() - if not frame: - # handle error: - # 'NoneType' object has no attribute 'opcode' - raise WebSocketProtocolException( - "Not a valid frame %s" % frame) - elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT): - self.cont_frame.validate(frame) - self.cont_frame.add(frame) - - if self.cont_frame.is_fire(frame): - return self.cont_frame.extract(frame) - - elif frame.opcode == ABNF.OPCODE_CLOSE: - self.send_close() - return frame.opcode, frame - elif frame.opcode == ABNF.OPCODE_PING: - if len(frame.data) < 126: - self.pong(frame.data) - else: - raise WebSocketProtocolException( - "Ping message is too long") - if control_frame: - return frame.opcode, frame - elif frame.opcode == ABNF.OPCODE_PONG: - if control_frame: - return frame.opcode, frame - - def recv_frame(self): - """ - receive data as frame from server. - - return value: ABNF frame object. - """ - return self.frame_buffer.recv_frame() - - def send_close(self, status=STATUS_NORMAL, reason=six.b("")): - """ - send close data to the server. - - status: status code to send. see STATUS_XXX. - - reason: the reason to close. This must be string or bytes. - """ - if status < 0 or status >= ABNF.LENGTH_16: - raise ValueError("code is invalid range") - self.connected = False - self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) - - def close(self, status=STATUS_NORMAL, reason=six.b(""), timeout=3): - """ - Close Websocket object - - status: status code to send. see STATUS_XXX. - - reason: the reason to close. This must be string. - - timeout: timeout until receive a close frame. - If None, it will wait forever until receive a close frame. - """ - if self.connected: - if status < 0 or status >= ABNF.LENGTH_16: - raise ValueError("code is invalid range") - - try: - self.connected = False - self.send(struct.pack('!H', status) + - reason, ABNF.OPCODE_CLOSE) - sock_timeout = self.sock.gettimeout() - self.sock.settimeout(timeout) - start_time = time.time() - while timeout is None or time.time() - start_time < timeout: - try: - frame = self.recv_frame() - if frame.opcode != ABNF.OPCODE_CLOSE: - continue - if isEnabledForError(): - recv_status = struct.unpack("!H", frame.data[0:2])[0] - if recv_status != STATUS_NORMAL: - error("close status: " + repr(recv_status)) - break - except: - break - self.sock.settimeout(sock_timeout) - self.sock.shutdown(socket.SHUT_RDWR) - except: - pass - - self.shutdown() - - def abort(self): - """ - Low-level asynchronous abort, wakes up other threads that are waiting in recv_* - """ - if self.connected: - self.sock.shutdown(socket.SHUT_RDWR) - - def shutdown(self): - """close socket, immediately.""" - if self.sock: - self.sock.close() - self.sock = None - self.connected = False - - def _send(self, data): - return send(self.sock, data) - - def _recv(self, bufsize): - try: - return recv(self.sock, bufsize) - except WebSocketConnectionClosedException: - if self.sock: - self.sock.close() - self.sock = None - self.connected = False - raise - - -def create_connection(url, timeout=None, class_=WebSocket, **options): - """ - connect to url and return websocket object. - - Connect to url and return the WebSocket object. - Passing optional timeout parameter will set the timeout on the socket. - If no timeout is supplied, - the global default timeout setting returned by getdefauttimeout() is used. - You can customize using 'options'. - If you set "header" list object, you can set your own custom header. - - >>> conn = create_connection("ws://echo.websocket.org/", - ... header=["User-Agent: MyProgram", - ... "x-custom: header"]) - - - timeout: socket timeout time. This value is integer. - if you set None for this value, - it means "use default_timeout value" - - class_: class to instantiate when creating the connection. It has to implement - settimeout and connect. It's __init__ should be compatible with - WebSocket.__init__, i.e. accept all of it's kwargs. - options: "header" -> custom http header list or dict. - "cookie" -> cookie value. - "origin" -> custom origin url. - "supress_origin" -> suppress outputting origin header. - "host" -> custom host header string. - "http_proxy_host" - http proxy host name. - "http_proxy_port" - http proxy port. If not set, set to 80. - "http_no_proxy" - host names, which doesn't use proxy. - "http_proxy_auth" - http proxy auth information. - tuple of username and password. - default is None - "enable_multithread" -> enable lock for multithread. - "sockopt" -> socket options - "sslopt" -> ssl option - "subprotocols" - array of available sub protocols. - default is None. - "skip_utf8_validation" - skip utf8 validation. - "socket" - pre-initialized stream socket. - """ - sockopt = options.pop("sockopt", []) - sslopt = options.pop("sslopt", {}) - fire_cont_frame = options.pop("fire_cont_frame", False) - enable_multithread = options.pop("enable_multithread", False) - skip_utf8_validation = options.pop("skip_utf8_validation", False) - websock = class_(sockopt=sockopt, sslopt=sslopt, - fire_cont_frame=fire_cont_frame, - enable_multithread=enable_multithread, - skip_utf8_validation=skip_utf8_validation, **options) - websock.settimeout(timeout if timeout is not None else getdefaulttimeout()) - websock.connect(url, **options) - return websock diff --git a/pype/vendor/websocket/_exceptions.py b/pype/vendor/websocket/_exceptions.py deleted file mode 100644 index ee08880837..0000000000 --- a/pype/vendor/websocket/_exceptions.py +++ /dev/null @@ -1,86 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" - - -""" -define websocket exceptions -""" - - -class WebSocketException(Exception): - """ - websocket exception class. - """ - pass - - -class WebSocketProtocolException(WebSocketException): - """ - If the websocket protocol is invalid, this exception will be raised. - """ - pass - - -class WebSocketPayloadException(WebSocketException): - """ - If the websocket payload is invalid, this exception will be raised. - """ - pass - - -class WebSocketConnectionClosedException(WebSocketException): - """ - If remote host closed the connection or some network error happened, - this exception will be raised. - """ - pass - - -class WebSocketTimeoutException(WebSocketException): - """ - WebSocketTimeoutException will be raised at socket timeout during read/write data. - """ - pass - - -class WebSocketProxyException(WebSocketException): - """ - WebSocketProxyException will be raised when proxy error occurred. - """ - pass - - -class WebSocketBadStatusException(WebSocketException): - """ - WebSocketBadStatusException will be raised when we get bad handshake status code. - """ - - def __init__(self, message, status_code, status_message=None): - msg = message % (status_code, status_message) - super(WebSocketBadStatusException, self).__init__(msg) - self.status_code = status_code - -class WebSocketAddressException(WebSocketException): - """ - If the websocket address info cannot be found, this exception will be raised. - """ - pass diff --git a/pype/vendor/websocket/_handshake.py b/pype/vendor/websocket/_handshake.py deleted file mode 100644 index d9dae47967..0000000000 --- a/pype/vendor/websocket/_handshake.py +++ /dev/null @@ -1,185 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" -import hashlib -import hmac -import os - -import six - -from ._cookiejar import SimpleCookieJar -from ._exceptions import * -from ._http import * -from ._logging import * -from ._socket import * - -if six.PY3: - from base64 import encodebytes as base64encode -else: - from base64 import encodestring as base64encode - -__all__ = ["handshake_response", "handshake"] - -if hasattr(hmac, "compare_digest"): - compare_digest = hmac.compare_digest -else: - def compare_digest(s1, s2): - return s1 == s2 - -# websocket supported version. -VERSION = 13 - -CookieJar = SimpleCookieJar() - - -class handshake_response(object): - - def __init__(self, status, headers, subprotocol): - self.status = status - self.headers = headers - self.subprotocol = subprotocol - CookieJar.add(headers.get("set-cookie")) - - -def handshake(sock, hostname, port, resource, **options): - headers, key = _get_handshake_headers(resource, hostname, port, options) - - header_str = "\r\n".join(headers) - send(sock, header_str) - dump("request header", header_str) - - status, resp = _get_resp_headers(sock) - success, subproto = _validate(resp, key, options.get("subprotocols")) - if not success: - raise WebSocketException("Invalid WebSocket Header") - - return handshake_response(status, resp, subproto) - -def _pack_hostname(hostname): - # IPv6 address - if ':' in hostname: - return '[' + hostname + ']' - - return hostname - -def _get_handshake_headers(resource, host, port, options): - headers = [ - "GET %s HTTP/1.1" % resource, - "Upgrade: websocket", - "Connection: Upgrade" - ] - if port == 80 or port == 443: - hostport = _pack_hostname(host) - else: - hostport = "%s:%d" % (_pack_hostname(host), port) - - if "host" in options and options["host"] is not None: - headers.append("Host: %s" % options["host"]) - else: - headers.append("Host: %s" % hostport) - - if "suppress_origin" not in options or not options["suppress_origin"]: - if "origin" in options and options["origin"] is not None: - headers.append("Origin: %s" % options["origin"]) - else: - headers.append("Origin: http://%s" % hostport) - - key = _create_sec_websocket_key() - headers.append("Sec-WebSocket-Key: %s" % key) - headers.append("Sec-WebSocket-Version: %s" % VERSION) - - subprotocols = options.get("subprotocols") - if subprotocols: - headers.append("Sec-WebSocket-Protocol: %s" % ",".join(subprotocols)) - - if "header" in options: - header = options["header"] - if isinstance(header, dict): - header = [ - ": ".join([k, v]) - for k, v in header.items() - if v is not None - ] - headers.extend(header) - - server_cookie = CookieJar.get(host) - client_cookie = options.get("cookie", None) - - cookie = "; ".join(filter(None, [server_cookie, client_cookie])) - - if cookie: - headers.append("Cookie: %s" % cookie) - - headers.append("") - headers.append("") - - return headers, key - - -def _get_resp_headers(sock, success_status=101): - status, resp_headers, status_message = read_headers(sock) - if status != success_status: - raise WebSocketBadStatusException("Handshake status %d %s", status, status_message) - return status, resp_headers - -_HEADERS_TO_CHECK = { - "upgrade": "websocket", - "connection": "upgrade", -} - - -def _validate(headers, key, subprotocols): - subproto = None - for k, v in _HEADERS_TO_CHECK.items(): - r = headers.get(k, None) - if not r: - return False, None - r = r.lower() - if v != r: - return False, None - - if subprotocols: - subproto = headers.get("sec-websocket-protocol", None).lower() - if not subproto or subproto not in [s.lower() for s in subprotocols]: - error("Invalid subprotocol: " + str(subprotocols)) - return False, None - - result = headers.get("sec-websocket-accept", None) - if not result: - return False, None - result = result.lower() - - if isinstance(result, six.text_type): - result = result.encode('utf-8') - - value = (key + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").encode('utf-8') - hashed = base64encode(hashlib.sha1(value).digest()).strip().lower() - success = compare_digest(hashed, result) - - if success: - return True, subproto - else: - return False, None - - -def _create_sec_websocket_key(): - randomness = os.urandom(16) - return base64encode(randomness).decode('utf-8').strip() diff --git a/pype/vendor/websocket/_http.py b/pype/vendor/websocket/_http.py deleted file mode 100644 index 8d82521cf4..0000000000 --- a/pype/vendor/websocket/_http.py +++ /dev/null @@ -1,319 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" -import errno -import os -import socket -import sys - -import six - -from ._exceptions import * -from ._logging import * -from ._socket import* -from ._ssl_compat import * -from ._url import * - -if six.PY3: - from base64 import encodebytes as base64encode -else: - from base64 import encodestring as base64encode - -__all__ = ["proxy_info", "connect", "read_headers"] - -try: - import socks - ProxyConnectionError = socks.ProxyConnectionError - HAS_PYSOCKS = True -except: - class ProxyConnectionError(BaseException): - pass - HAS_PYSOCKS = False - -class proxy_info(object): - - def __init__(self, **options): - self.type = options.get("proxy_type", "http") - if not(self.type in ['http', 'socks4', 'socks5', 'socks5h']): - raise ValueError("proxy_type must be 'http', 'socks4', 'socks5' or 'socks5h'") - self.host = options.get("http_proxy_host", None) - if self.host: - self.port = options.get("http_proxy_port", 0) - self.auth = options.get("http_proxy_auth", None) - self.no_proxy = options.get("http_no_proxy", None) - else: - self.port = 0 - self.auth = None - self.no_proxy = None - -def _open_proxied_socket(url, options, proxy): - hostname, port, resource, is_secure = parse_url(url) - - if not HAS_PYSOCKS: - raise WebSocketException("PySocks module not found.") - - ptype = socks.SOCKS5 - rdns = False - if proxy.type == "socks4": - ptype = socks.SOCKS4 - if proxy.type == "http": - ptype = socks.HTTP - if proxy.type[-1] == "h": - rdns = True - - sock = socks.create_connection( - (hostname, port), - proxy_type = ptype, - proxy_addr = proxy.host, - proxy_port = proxy.port, - proxy_rdns = rdns, - proxy_username = proxy.auth[0] if proxy.auth else None, - proxy_password = proxy.auth[1] if proxy.auth else None, - timeout = options.timeout, - socket_options = DEFAULT_SOCKET_OPTION + options.sockopt - ) - - if is_secure: - if HAVE_SSL: - sock = _ssl_socket(sock, options.sslopt, hostname) - else: - raise WebSocketException("SSL not available.") - - return sock, (hostname, port, resource) - - -def connect(url, options, proxy, socket): - if proxy.host and not socket and not(proxy.type == 'http'): - return _open_proxied_socket(url, options, proxy) - - hostname, port, resource, is_secure = parse_url(url) - - if socket: - return socket, (hostname, port, resource) - - addrinfo_list, need_tunnel, auth = _get_addrinfo_list( - hostname, port, is_secure, proxy) - if not addrinfo_list: - raise WebSocketException( - "Host not found.: " + hostname + ":" + str(port)) - - sock = None - try: - sock = _open_socket(addrinfo_list, options.sockopt, options.timeout) - if need_tunnel: - sock = _tunnel(sock, hostname, port, auth) - - if is_secure: - if HAVE_SSL: - sock = _ssl_socket(sock, options.sslopt, hostname) - else: - raise WebSocketException("SSL not available.") - - return sock, (hostname, port, resource) - except: - if sock: - sock.close() - raise - - -def _get_addrinfo_list(hostname, port, is_secure, proxy): - phost, pport, pauth = get_proxy_info( - hostname, is_secure, proxy.host, proxy.port, proxy.auth, proxy.no_proxy) - try: - if not phost: - addrinfo_list = socket.getaddrinfo( - hostname, port, 0, 0, socket.SOL_TCP) - return addrinfo_list, False, None - else: - pport = pport and pport or 80 - # when running on windows 10, the getaddrinfo used above - # returns a socktype 0. This generates an error exception: - #_on_error: exception Socket type must be stream or datagram, not 0 - # Force the socket type to SOCK_STREAM - addrinfo_list = socket.getaddrinfo(phost, pport, 0, socket.SOCK_STREAM, socket.SOL_TCP) - return addrinfo_list, True, pauth - except socket.gaierror as e: - raise WebSocketAddressException(e) - - -def _open_socket(addrinfo_list, sockopt, timeout): - err = None - for addrinfo in addrinfo_list: - family, socktype, proto = addrinfo[:3] - sock = socket.socket(family, socktype, proto) - sock.settimeout(timeout) - for opts in DEFAULT_SOCKET_OPTION: - sock.setsockopt(*opts) - for opts in sockopt: - sock.setsockopt(*opts) - - address = addrinfo[4] - try: - sock.connect(address) - err = None - except ProxyConnectionError as error: - err = WebSocketProxyException(str(error)) - err.remote_ip = str(address[0]) - continue - except socket.error as error: - error.remote_ip = str(address[0]) - try: - eConnRefused = (errno.ECONNREFUSED, errno.WSAECONNREFUSED) - except: - eConnRefused = (errno.ECONNREFUSED, ) - if error.errno in eConnRefused: - err = error - continue - else: - raise error - else: - break - else: - raise err - - return sock - - -def _can_use_sni(): - return six.PY2 and sys.version_info >= (2, 7, 9) or sys.version_info >= (3, 2) - - -def _wrap_sni_socket(sock, sslopt, hostname, check_hostname): - context = ssl.SSLContext(sslopt.get('ssl_version', ssl.PROTOCOL_SSLv23)) - - if sslopt.get('cert_reqs', ssl.CERT_NONE) != ssl.CERT_NONE: - cafile = sslopt.get('ca_certs', None) - capath = sslopt.get('ca_cert_path', None) - if cafile or capath: - context.load_verify_locations(cafile=cafile, capath=capath) - elif hasattr(context, 'load_default_certs'): - context.load_default_certs(ssl.Purpose.SERVER_AUTH) - if sslopt.get('certfile', None): - context.load_cert_chain( - sslopt['certfile'], - sslopt.get('keyfile', None), - sslopt.get('password', None), - ) - # see - # https://github.com/liris/websocket-client/commit/b96a2e8fa765753e82eea531adb19716b52ca3ca#commitcomment-10803153 - context.verify_mode = sslopt['cert_reqs'] - if HAVE_CONTEXT_CHECK_HOSTNAME: - context.check_hostname = check_hostname - if 'ciphers' in sslopt: - context.set_ciphers(sslopt['ciphers']) - if 'cert_chain' in sslopt: - certfile, keyfile, password = sslopt['cert_chain'] - context.load_cert_chain(certfile, keyfile, password) - if 'ecdh_curve' in sslopt: - context.set_ecdh_curve(sslopt['ecdh_curve']) - - return context.wrap_socket( - sock, - do_handshake_on_connect=sslopt.get('do_handshake_on_connect', True), - suppress_ragged_eofs=sslopt.get('suppress_ragged_eofs', True), - server_hostname=hostname, - ) - - -def _ssl_socket(sock, user_sslopt, hostname): - sslopt = dict(cert_reqs=ssl.CERT_REQUIRED) - sslopt.update(user_sslopt) - - certPath = os.environ.get('WEBSOCKET_CLIENT_CA_BUNDLE') - if certPath and os.path.isfile(certPath) \ - and user_sslopt.get('ca_certs', None) is None \ - and user_sslopt.get('ca_cert', None) is None: - sslopt['ca_certs'] = certPath - elif certPath and os.path.isdir(certPath) \ - and user_sslopt.get('ca_cert_path', None) is None: - sslopt['ca_cert_path'] = certPath - - check_hostname = sslopt["cert_reqs"] != ssl.CERT_NONE and sslopt.pop( - 'check_hostname', True) - - if _can_use_sni(): - sock = _wrap_sni_socket(sock, sslopt, hostname, check_hostname) - else: - sslopt.pop('check_hostname', True) - sock = ssl.wrap_socket(sock, **sslopt) - - if not HAVE_CONTEXT_CHECK_HOSTNAME and check_hostname: - match_hostname(sock.getpeercert(), hostname) - - return sock - - -def _tunnel(sock, host, port, auth): - debug("Connecting proxy...") - connect_header = "CONNECT %s:%d HTTP/1.0\r\n" % (host, port) - # TODO: support digest auth. - if auth and auth[0]: - auth_str = auth[0] - if auth[1]: - auth_str += ":" + auth[1] - encoded_str = base64encode(auth_str.encode()).strip().decode() - connect_header += "Proxy-Authorization: Basic %s\r\n" % encoded_str - connect_header += "\r\n" - dump("request header", connect_header) - - send(sock, connect_header) - - try: - status, resp_headers, status_message = read_headers(sock) - except Exception as e: - raise WebSocketProxyException(str(e)) - - if status != 200: - raise WebSocketProxyException( - "failed CONNECT via proxy status: %r" % status) - - return sock - - -def read_headers(sock): - status = None - status_message = None - headers = {} - trace("--- response header ---") - - while True: - line = recv_line(sock) - line = line.decode('utf-8').strip() - if not line: - break - trace(line) - if not status: - - status_info = line.split(" ", 2) - status = int(status_info[1]) - if len(status_info) > 2: - status_message = status_info[2] - else: - kv = line.split(":", 1) - if len(kv) == 2: - key, value = kv - headers[key.lower()] = value.strip() - else: - raise WebSocketException("Invalid header") - - trace("-----------------------") - - return status, headers, status_message diff --git a/pype/vendor/websocket/_logging.py b/pype/vendor/websocket/_logging.py deleted file mode 100644 index 70a6271d92..0000000000 --- a/pype/vendor/websocket/_logging.py +++ /dev/null @@ -1,82 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" -import logging - -_logger = logging.getLogger('websocket') -try: - from logging import NullHandler -except ImportError: - class NullHandler(logging.Handler): - def emit(self, record): - pass - -_logger.addHandler(NullHandler()) - -_traceEnabled = False - -__all__ = ["enableTrace", "dump", "error", "warning", "debug", "trace", - "isEnabledForError", "isEnabledForDebug"] - - -def enableTrace(traceable, handler = logging.StreamHandler()): - """ - turn on/off the traceability. - - traceable: boolean value. if set True, traceability is enabled. - """ - global _traceEnabled - _traceEnabled = traceable - if traceable: - _logger.addHandler(handler) - _logger.setLevel(logging.DEBUG) - - -def dump(title, message): - if _traceEnabled: - _logger.debug("--- " + title + " ---") - _logger.debug(message) - _logger.debug("-----------------------") - - -def error(msg): - _logger.error(msg) - - -def warning(msg): - _logger.warning(msg) - - -def debug(msg): - _logger.debug(msg) - - -def trace(msg): - if _traceEnabled: - _logger.debug(msg) - - -def isEnabledForError(): - return _logger.isEnabledFor(logging.ERROR) - - -def isEnabledForDebug(): - return _logger.isEnabledFor(logging.DEBUG) diff --git a/pype/vendor/websocket/_socket.py b/pype/vendor/websocket/_socket.py deleted file mode 100644 index c84fcf90a0..0000000000 --- a/pype/vendor/websocket/_socket.py +++ /dev/null @@ -1,126 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" -import socket - -import six -import sys - -from ._exceptions import * -from ._ssl_compat import * -from ._utils import * - -DEFAULT_SOCKET_OPTION = [(socket.SOL_TCP, socket.TCP_NODELAY, 1)] -if hasattr(socket, "SO_KEEPALIVE"): - DEFAULT_SOCKET_OPTION.append((socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)) -if hasattr(socket, "TCP_KEEPIDLE"): - DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPIDLE, 30)) -if hasattr(socket, "TCP_KEEPINTVL"): - DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPINTVL, 10)) -if hasattr(socket, "TCP_KEEPCNT"): - DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPCNT, 3)) - -_default_timeout = None - -__all__ = ["DEFAULT_SOCKET_OPTION", "sock_opt", "setdefaulttimeout", "getdefaulttimeout", - "recv", "recv_line", "send"] - - -class sock_opt(object): - - def __init__(self, sockopt, sslopt): - if sockopt is None: - sockopt = [] - if sslopt is None: - sslopt = {} - self.sockopt = sockopt - self.sslopt = sslopt - self.timeout = None - - -def setdefaulttimeout(timeout): - """ - Set the global timeout setting to connect. - - timeout: default socket timeout time. This value is second. - """ - global _default_timeout - _default_timeout = timeout - - -def getdefaulttimeout(): - """ - Return the global timeout setting(second) to connect. - """ - return _default_timeout - - -def recv(sock, bufsize): - if not sock: - raise WebSocketConnectionClosedException("socket is already closed.") - - try: - bytes_ = sock.recv(bufsize) - except socket.timeout as e: - message = extract_err_message(e) - raise WebSocketTimeoutException(message) - except SSLError as e: - message = extract_err_message(e) - if isinstance(message, str) and 'timed out' in message: - raise WebSocketTimeoutException(message) - else: - raise - - if not bytes_: - raise WebSocketConnectionClosedException( - "Connection is already closed.") - - return bytes_ - - -def recv_line(sock): - line = [] - while True: - c = recv(sock, 1) - line.append(c) - if c == six.b("\n"): - break - return six.b("").join(line) - - -def send(sock, data): - if isinstance(data, six.text_type): - data = data.encode('utf-8') - - if not sock: - raise WebSocketConnectionClosedException("socket is already closed.") - - try: - return sock.send(data) - except socket.timeout as e: - message = extract_err_message(e) - raise WebSocketTimeoutException(message) - except Exception as e: - message = extract_err_message(e) - if isinstance(message, str) and "timed out" in message: - raise WebSocketTimeoutException(message) - else: - raise diff --git a/pype/vendor/websocket/_ssl_compat.py b/pype/vendor/websocket/_ssl_compat.py deleted file mode 100644 index 0304816286..0000000000 --- a/pype/vendor/websocket/_ssl_compat.py +++ /dev/null @@ -1,44 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" -__all__ = ["HAVE_SSL", "ssl", "SSLError"] - -try: - import ssl - from ssl import SSLError - if hasattr(ssl, 'SSLContext') and hasattr(ssl.SSLContext, 'check_hostname'): - HAVE_CONTEXT_CHECK_HOSTNAME = True - else: - HAVE_CONTEXT_CHECK_HOSTNAME = False - if hasattr(ssl, "match_hostname"): - from ssl import match_hostname - else: - from backports.ssl_match_hostname import match_hostname - __all__.append("match_hostname") - __all__.append("HAVE_CONTEXT_CHECK_HOSTNAME") - - HAVE_SSL = True -except ImportError: - # dummy class of SSLError for ssl none-support environment. - class SSLError(Exception): - pass - - HAVE_SSL = False diff --git a/pype/vendor/websocket/_url.py b/pype/vendor/websocket/_url.py deleted file mode 100644 index ae46d6c40b..0000000000 --- a/pype/vendor/websocket/_url.py +++ /dev/null @@ -1,163 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" - -import os -import socket -import struct - -from six.moves.urllib.parse import urlparse - - -__all__ = ["parse_url", "get_proxy_info"] - - -def parse_url(url): - """ - parse url and the result is tuple of - (hostname, port, resource path and the flag of secure mode) - - url: url string. - """ - if ":" not in url: - raise ValueError("url is invalid") - - scheme, url = url.split(":", 1) - - parsed = urlparse(url, scheme="ws") - if parsed.hostname: - hostname = parsed.hostname - else: - raise ValueError("hostname is invalid") - port = 0 - if parsed.port: - port = parsed.port - - is_secure = False - if scheme == "ws": - if not port: - port = 80 - elif scheme == "wss": - is_secure = True - if not port: - port = 443 - else: - raise ValueError("scheme %s is invalid" % scheme) - - if parsed.path: - resource = parsed.path - else: - resource = "/" - - if parsed.query: - resource += "?" + parsed.query - - return hostname, port, resource, is_secure - - -DEFAULT_NO_PROXY_HOST = ["localhost", "127.0.0.1"] - - -def _is_ip_address(addr): - try: - socket.inet_aton(addr) - except socket.error: - return False - else: - return True - - -def _is_subnet_address(hostname): - try: - addr, netmask = hostname.split("/") - return _is_ip_address(addr) and 0 <= int(netmask) < 32 - except ValueError: - return False - - -def _is_address_in_network(ip, net): - ipaddr = struct.unpack('I', socket.inet_aton(ip))[0] - netaddr, bits = net.split('/') - netmask = struct.unpack('I', socket.inet_aton(netaddr))[0] & ((2 << int(bits) - 1) - 1) - return ipaddr & netmask == netmask - - -def _is_no_proxy_host(hostname, no_proxy): - if not no_proxy: - v = os.environ.get("no_proxy", "").replace(" ", "") - no_proxy = v.split(",") - if not no_proxy: - no_proxy = DEFAULT_NO_PROXY_HOST - - if hostname in no_proxy: - return True - elif _is_ip_address(hostname): - return any([_is_address_in_network(hostname, subnet) for subnet in no_proxy if _is_subnet_address(subnet)]) - - return False - - -def get_proxy_info( - hostname, is_secure, proxy_host=None, proxy_port=0, proxy_auth=None, - no_proxy=None, proxy_type='http'): - """ - try to retrieve proxy host and port from environment - if not provided in options. - result is (proxy_host, proxy_port, proxy_auth). - proxy_auth is tuple of username and password - of proxy authentication information. - - hostname: websocket server name. - - is_secure: is the connection secure? (wss) - looks for "https_proxy" in env - before falling back to "http_proxy" - - options: "http_proxy_host" - http proxy host name. - "http_proxy_port" - http proxy port. - "http_no_proxy" - host names, which doesn't use proxy. - "http_proxy_auth" - http proxy auth information. - tuple of username and password. - default is None - "proxy_type" - if set to "socks5" PySocks wrapper - will be used in place of a http proxy. - default is "http" - """ - if _is_no_proxy_host(hostname, no_proxy): - return None, 0, None - - if proxy_host: - port = proxy_port - auth = proxy_auth - return proxy_host, port, auth - - env_keys = ["http_proxy"] - if is_secure: - env_keys.insert(0, "https_proxy") - - for key in env_keys: - value = os.environ.get(key, None) - if value: - proxy = urlparse(value) - auth = (proxy.username, proxy.password) if proxy.username else None - return proxy.hostname, proxy.port, auth - - return None, 0, None diff --git a/pype/vendor/websocket/_utils.py b/pype/vendor/websocket/_utils.py deleted file mode 100644 index 399fb89d9e..0000000000 --- a/pype/vendor/websocket/_utils.py +++ /dev/null @@ -1,105 +0,0 @@ -""" -websocket - WebSocket client library for Python - -Copyright (C) 2010 Hiroki Ohtani(liris) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, - Boston, MA 02110-1335 USA - -""" -import six - -__all__ = ["NoLock", "validate_utf8", "extract_err_message"] - - -class NoLock(object): - - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_value, traceback): - pass - -try: - # If wsaccel is available we use compiled routines to validate UTF-8 - # strings. - from wsaccel.utf8validator import Utf8Validator - - def _validate_utf8(utfbytes): - return Utf8Validator().validate(utfbytes)[0] - -except ImportError: - # UTF-8 validator - # python implementation of http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ - - _UTF8_ACCEPT = 0 - _UTF8_REJECT = 12 - - _UTF8D = [ - # The first part of the table maps bytes to character classes that - # to reduce the size of the transition table and create bitmasks. - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, - 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, - 8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, - 10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8, - - # The second part is a transition table that maps a combination - # of a state of the automaton and a character class to a state. - 0,12,24,36,60,96,84,12,12,12,48,72, 12,12,12,12,12,12,12,12,12,12,12,12, - 12, 0,12,12,12,12,12, 0,12, 0,12,12, 12,24,12,12,12,12,12,24,12,24,12,12, - 12,12,12,12,12,12,12,24,12,12,12,12, 12,24,12,12,12,12,12,12,12,24,12,12, - 12,12,12,12,12,12,12,36,12,36,12,12, 12,36,12,12,12,12,12,36,12,36,12,12, - 12,36,12,12,12,12,12,12,12,12,12,12, ] - - def _decode(state, codep, ch): - tp = _UTF8D[ch] - - codep = (ch & 0x3f) | (codep << 6) if ( - state != _UTF8_ACCEPT) else (0xff >> tp) & ch - state = _UTF8D[256 + state + tp] - - return state, codep - - def _validate_utf8(utfbytes): - state = _UTF8_ACCEPT - codep = 0 - for i in utfbytes: - if six.PY2: - i = ord(i) - state, codep = _decode(state, codep, i) - if state == _UTF8_REJECT: - return False - - return True - - -def validate_utf8(utfbytes): - """ - validate utf8 byte string. - utfbytes: utf byte string to check. - return value: if valid utf8 string, return true. Otherwise, return false. - """ - return _validate_utf8(utfbytes) - - -def extract_err_message(exception): - if exception.args: - return exception.args[0] - else: - return None diff --git a/pype/vendor/websocket/cacert.pem b/pype/vendor/websocket/cacert.pem deleted file mode 100644 index e908bb6a11..0000000000 --- a/pype/vendor/websocket/cacert.pem +++ /dev/null @@ -1,4966 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. -# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. -# Label: "GTE CyberTrust Global Root" -# Serial: 421 -# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db -# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74 -# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36 ------BEGIN CERTIFICATE----- -MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD -VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv -bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv -b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV -UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU -cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds -b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH -iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS -r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4 -04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r -GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9 -3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P -lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ ------END CERTIFICATE----- - -# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division -# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division -# Label: "Thawte Server CA" -# Serial: 1 -# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d -# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c -# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9 ------BEGIN CERTIFICATE----- -MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm -MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx -MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT -DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 -dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl -cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 -DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD -gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 -yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX -L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj -EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG -7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e -QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ -qdq5snUb9kLy78fyGPmJvKP/iiMucEc= ------END CERTIFICATE----- - -# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division -# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division -# Label: "Thawte Premium Server CA" -# Serial: 1 -# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a -# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a -# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72 ------BEGIN CERTIFICATE----- -MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy -dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t -MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB -MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG -A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp -b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl -cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv -bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE -VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ -ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR -uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG -9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI -hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM -pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== ------END CERTIFICATE----- - -# Issuer: O=Equifax OU=Equifax Secure Certificate Authority -# Subject: O=Equifax OU=Equifax Secure Certificate Authority -# Label: "Equifax Secure CA" -# Serial: 903804111 -# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4 -# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a -# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78 ------BEGIN CERTIFICATE----- -MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV -UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy -dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 -MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx -dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B -AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f -BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A -cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC -AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ -MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm -aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw -ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj -IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF -MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA -A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y -7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh -1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 ------END CERTIFICATE----- - -# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Label: "Verisign Class 3 Public Primary Certification Authority" -# Serial: 149843929435818692848040365716851702463 -# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67 -# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2 -# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70 ------BEGIN CERTIFICATE----- -MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz -cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 -MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV -BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt -YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN -ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE -BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is -I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G -CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do -lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc -AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k ------END CERTIFICATE----- - -# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network -# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network -# Label: "Verisign Class 3 Public Primary Certification Authority - G2" -# Serial: 167285380242319648451154478808036881606 -# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9 -# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f -# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b ------BEGIN CERTIFICATE----- -MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ -BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh -c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy -MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp -emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X -DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw -FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg -UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo -YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 -MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB -AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 -pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 -13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID -AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk -U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i -F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY -oJ2daZH9 ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Label: "GlobalSign Root CA - R2" -# Serial: 4835703278459682885658125 -# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 -# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe -# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 -MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL -v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 -eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq -tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd -C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa -zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB -mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH -V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n -bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG -3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs -J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO -291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS -ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd -AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 -TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority -# Label: "ValiCert Class 1 VA" -# Serial: 1 -# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb -# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e -# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04 ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy -NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y -LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+ -TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y -TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0 -LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW -I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw -nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority -# Label: "ValiCert Class 2 VA" -# Serial: 1 -# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87 -# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6 -# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy -NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY -dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 -WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS -v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v -UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu -IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC -W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority -# Label: "RSA Root Certificate 1" -# Serial: 1 -# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72 -# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb -# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy -NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD -cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs -2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY -JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE -Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ -n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A -PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 4 Public Primary Certification Authority - G3" -# Serial: 314531972711909413743075096039378935511 -# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df -# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d -# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 -GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ -+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd -U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm -NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY -ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ -ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 -CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq -g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm -fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c -2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ -bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Secure Server CA" -# Serial: 927650371 -# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee -# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39 -# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50 ------BEGIN CERTIFICATE----- -MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC -VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u -ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc -KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u -ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 -MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE -ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j -b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF -bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg -U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA -A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ -I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 -wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC -AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb -oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 -BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p -dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk -MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp -b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu -dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 -MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi -E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa -MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI -hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN -95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd -2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. -# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. -# Label: "Equifax Secure Global eBusiness CA" -# Serial: 1 -# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc -# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45 -# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07 ------BEGIN CERTIFICATE----- -MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT -ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw -MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj -dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l -c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC -UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc -58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ -o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr -aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA -A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA -Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv -8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV ------END CERTIFICATE----- - -# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. -# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. -# Label: "Equifax Secure eBusiness CA 1" -# Serial: 4 -# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d -# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41 -# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73 ------BEGIN CERTIFICATE----- -MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT -ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw -MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j -LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ -KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo -RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu -WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw -Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD -AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK -eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM -zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ -WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN -/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Low-Value Services Root" -# Serial: 1 -# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc -# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d -# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7 ------BEGIN CERTIFICATE----- -MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw -MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML -QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD -VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA -A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul -CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n -tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl -dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch -PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC -+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O -BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl -MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk -ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB -IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X -7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz -43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY -eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl -pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA -WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Public Services Root" -# Serial: 1 -# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f -# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5 -# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx -MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB -ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV -BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV -6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX -GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP -dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH -1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF -62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW -BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw -AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL -MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU -cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv -b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6 -IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/ -iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao -GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh -4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm -XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Qualified Certificates Root" -# Serial: 1 -# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb -# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf -# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16 ------BEGIN CERTIFICATE----- -MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1 -MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK -EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh -BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq -xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G -87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i -2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U -WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1 -0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G -A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr -pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL -ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm -aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv -hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm -hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X -dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3 -P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y -iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no -xqE= ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: O=RSA Security Inc OU=RSA Security 2048 V3 -# Subject: O=RSA Security Inc OU=RSA Security 2048 V3 -# Label: "RSA Security 2048 v3" -# Serial: 13297492616345471454730593562152402946 -# MD5 Fingerprint: 77:0d:19:b1:21:fd:00:42:9c:3e:0c:a5:dd:0b:02:8e -# SHA1 Fingerprint: 25:01:90:19:cf:fb:d9:99:1c:b7:68:25:74:8d:94:5f:30:93:95:42 -# SHA256 Fingerprint: af:8b:67:62:a1:e5:28:22:81:61:a9:5d:5c:55:9e:e2:66:27:8f:75:d7:9e:83:01:89:a5:03:50:6a:bd:6b:4c ------BEGIN CERTIFICATE----- -MIIDYTCCAkmgAwIBAgIQCgEBAQAAAnwAAAAKAAAAAjANBgkqhkiG9w0BAQUFADA6 -MRkwFwYDVQQKExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJp -dHkgMjA0OCBWMzAeFw0wMTAyMjIyMDM5MjNaFw0yNjAyMjIyMDM5MjNaMDoxGTAX -BgNVBAoTEFJTQSBTZWN1cml0eSBJbmMxHTAbBgNVBAsTFFJTQSBTZWN1cml0eSAy -MDQ4IFYzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt49VcdKA3Xtp -eafwGFAyPGJn9gqVB93mG/Oe2dJBVGutn3y+Gc37RqtBaB4Y6lXIL5F4iSj7Jylg -/9+PjDvJSZu1pJTOAeo+tWN7fyb9Gd3AIb2E0S1PRsNO3Ng3OTsor8udGuorryGl -wSMiuLgbWhOHV4PR8CDn6E8jQrAApX2J6elhc5SYcSa8LWrg903w8bYqODGBDSnh -AMFRD0xS+ARaqn1y07iHKrtjEAMqs6FPDVpeRrc9DvV07Jmf+T0kgYim3WBU6JU2 -PcYJk5qjEoAAVZkZR73QpXzDuvsf9/UP+Ky5tfQ3mBMY3oVbtwyCO4dvlTlYMNpu -AWgXIszACwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAfBgNVHSMEGDAWgBQHw1EwpKrpRa41JPr/JCwz0LGdjDAdBgNVHQ4EFgQUB8NR -MKSq6UWuNST6/yQsM9CxnYwwDQYJKoZIhvcNAQEFBQADggEBAF8+hnZuuDU8TjYc -HnmYv/3VEhF5Ug7uMYm83X/50cYVIeiKAVQNOvtUudZj1LGqlk2iQk3UUx+LEN5/ -Zb5gEydxiKRz44Rj0aRV4VCT5hsOedBnvEbIvz8XDZXmxpBp3ue0L96VfdASPz0+ -f00/FGj1EVDVwfSQpQgdMWD/YIwjVAqv/qFuxdF6Kmh4zx6CCiC0H63lhbJqaHVO -rSU3lIW+vaHU6rcMSzyd6BIA8F+sDeGscGNz9395nzIlQnQFgCi/vcEkllgVsRch -6YlL2weIZ/QVrXA+L02FO8K32/6YaCOJ4XQP3vTFhGMpG8zLB8kApKnXwiJPZ9d3 -7CAFYd4= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. -# Label: "GeoTrust Global CA" -# Serial: 144470 -# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 -# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 -# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Global CA 2" -# Serial: 1 -# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9 -# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d -# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85 ------BEGIN CERTIFICATE----- -MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs -IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg -R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A -PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8 -Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL -TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL -5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7 -S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe -2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE -FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap -EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td -EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv -/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN -A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0 -abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF -I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz -4iIprn2DQKi6bA== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Label: "GeoTrust Universal CA" -# Serial: 1 -# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 -# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 -# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 ------BEGIN CERTIFICATE----- -MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy -c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE -BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 -IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV -VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 -cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT -QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh -F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v -c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w -mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd -VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX -teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ -f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe -Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ -nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY -MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG -9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc -aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX -IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn -ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z -uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN -Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja -QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW -koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 -ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt -DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm -bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Universal CA 2" -# Serial: 1 -# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 -# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 -# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy -c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD -VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 -c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 -WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG -FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq -XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL -se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb -KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd -IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 -y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt -hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc -QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 -Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV -HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ -KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z -dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ -L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr -Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo -ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY -T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz -GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m -1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV -OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH -6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX -QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS ------END CERTIFICATE----- - -# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc. -# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc. -# Label: "America Online Root Certification Authority 1" -# Serial: 1 -# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e -# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a -# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3 ------BEGIN CERTIFICATE----- -MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP -bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2 -MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft -ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg -Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk -hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym -1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW -OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb -2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko -O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU -AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB -BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF -Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb -LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir -oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C -MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds -sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7 ------END CERTIFICATE----- - -# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc. -# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc. -# Label: "America Online Root Certification Authority 2" -# Serial: 1 -# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf -# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84 -# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP -bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2 -MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft -ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg -Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC -206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci -KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2 -JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9 -BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e -Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B -PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67 -Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq -Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ -o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3 -+L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj -YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj -FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn -xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2 -LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc -obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8 -CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe -IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA -DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F -AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX -Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb -AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl -Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw -RY8mkaKO/qk= ------END CERTIFICATE----- - -# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association -# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association -# Label: "Visa eCommerce Root" -# Serial: 25952180776285836048024890241505565794 -# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02 -# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62 -# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22 ------BEGIN CERTIFICATE----- -MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr -MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl -cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv -bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw -CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h -dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l -cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h -2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E -lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV -ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq -299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t -vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL -dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF -AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR -zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3 -LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd -7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw -++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt -398znM/jra6O1I7mT1GvFpLgXPYHDw== ------END CERTIFICATE----- - -# Issuer: CN=Certum CA O=Unizeto Sp. z o.o. -# Subject: CN=Certum CA O=Unizeto Sp. z o.o. -# Label: "Certum Root CA" -# Serial: 65568 -# MD5 Fingerprint: 2c:8f:9f:66:1d:18:90:b1:47:26:9d:8e:86:82:8c:a9 -# SHA1 Fingerprint: 62:52:dc:40:f7:11:43:a2:2f:de:9e:f7:34:8e:06:42:51:b1:81:18 -# SHA256 Fingerprint: d8:e0:fe:bc:1d:b2:e3:8d:00:94:0f:37:d2:7d:41:34:4d:99:3e:73:4b:99:d5:65:6d:97:78:d4:d8:14:36:24 ------BEGIN CERTIFICATE----- -MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBM -MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD -QTAeFw0wMjA2MTExMDQ2MzlaFw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBM -MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD -QTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6xwS7TT3zNJc4YPk/E -jG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdLkKWo -ePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GI -ULdtlkIJ89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapu -Ob7kky/ZR6By6/qmW6/KUz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUg -AKpoC6EahQGcxEZjgoi2IrHu/qpGWX7PNSzVttpd90gzFFS269lvzs2I1qsb2pY7 -HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEA -uI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+GXYkHAQa -TOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTg -xSvgGrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1q -CjqTE5s7FCMTY5w/0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5x -O/fIR/RpbxXyEV6DHpx8Uq79AtoSqFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs -6GAqm4VKQPNriiTsBhYscw== ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=Secure Certificate Services O=Comodo CA Limited -# Subject: CN=Secure Certificate Services O=Comodo CA Limited -# Label: "Comodo Secure Services root" -# Serial: 1 -# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd -# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1 -# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8 ------BEGIN CERTIFICATE----- -MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp -ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow -fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV -BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM -cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S -HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996 -CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk -3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz -6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV -HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud -EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv -Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw -Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww -DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0 -5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj -Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI -gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ -aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl -izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk= ------END CERTIFICATE----- - -# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited -# Subject: CN=Trusted Certificate Services O=Comodo CA Limited -# Label: "Comodo Trusted Services root" -# Serial: 1 -# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27 -# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd -# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 -aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla -MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO -BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD -VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW -fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt -TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL -fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW -1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 -kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G -A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v -ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo -dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu -Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ -HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 -pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS -jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ -xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn -dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Label: "QuoVadis Root CA" -# Serial: 985026699 -# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 -# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 -# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 ------BEGIN CERTIFICATE----- -MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz -MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw -IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR -dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp -li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D -rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ -WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug -F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU -xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC -Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv -dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw -ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl -IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh -c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy -ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh -Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI -KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T -KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq -y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p -dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD -VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL -MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk -fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 -7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R -cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y -mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW -xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK -SnQ2+Q== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 -# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 -# Label: "Security Communication Root CA" -# Serial: 0 -# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a -# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 -# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c ------BEGIN CERTIFICATE----- -MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY -MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t -dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 -WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD -VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 -9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ -DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 -Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N -QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ -xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G -A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG -kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr -Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 -Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU -JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot -RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== ------END CERTIFICATE----- - -# Issuer: CN=Sonera Class2 CA O=Sonera -# Subject: CN=Sonera Class2 CA O=Sonera -# Label: "Sonera Class 2 Root CA" -# Serial: 29 -# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb -# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 -# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 ------BEGIN CERTIFICATE----- -MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP -MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx -MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV -BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o -Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt -5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s -3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej -vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu -8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw -DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG -MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil -zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ -3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD -FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 -Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 -ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA" -# Serial: 10000010 -# MD5 Fingerprint: 60:84:7c:5a:ce:db:0c:d4:cb:a7:e9:fe:02:c6:a9:c0 -# SHA1 Fingerprint: 10:1d:fa:3f:d5:0b:cb:bb:9b:b5:60:0c:19:55:a4:1a:f4:73:3a:04 -# SHA256 Fingerprint: d4:1d:82:9e:8c:16:59:82:2a:f9:3f:ce:62:bf:fc:de:26:4f:c8:4e:8b:95:0c:5f:f2:75:d0:52:35:46:95:a3 ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgIEAJiWijANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJO -TDEeMBwGA1UEChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSYwJAYDVQQDEx1TdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQTAeFw0wMjEyMTcwOTIzNDlaFw0xNTEy -MTYwOTE1MzhaMFUxCzAJBgNVBAYTAk5MMR4wHAYDVQQKExVTdGFhdCBkZXIgTmVk -ZXJsYW5kZW4xJjAkBgNVBAMTHVN0YWF0IGRlciBOZWRlcmxhbmRlbiBSb290IENB -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmNK1URF6gaYUmHFtvszn -ExvWJw56s2oYHLZhWtVhCb/ekBPHZ+7d89rFDBKeNVU+LCeIQGv33N0iYfXCxw71 -9tV2U02PjLwYdjeFnejKScfST5gTCaI+Ioicf9byEGW07l8Y1Rfj+MX94p2i71MO -hXeiD+EwR+4A5zN9RGcaC1Hoi6CeUJhoNFIfLm0B8mBF8jHrqTFoKbt6QZ7GGX+U -tFE5A3+y3qcym7RHjm+0Sq7lr7HcsBthvJly3uSJt3omXdozSVtSnA71iq3DuD3o -BmrC1SoLbHuEvVYFy4ZlkuxEK7COudxwC0barbxjiDn622r+I/q85Ej0ZytqERAh -SQIDAQABo4GRMIGOMAwGA1UdEwQFMAMBAf8wTwYDVR0gBEgwRjBEBgRVHSAAMDww -OgYIKwYBBQUHAgEWLmh0dHA6Ly93d3cucGtpb3ZlcmhlaWQubmwvcG9saWNpZXMv -cm9vdC1wb2xpY3kwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSofeu8Y6R0E3QA -7Jbg0zTBLL9s+DANBgkqhkiG9w0BAQUFAAOCAQEABYSHVXQ2YcG70dTGFagTtJ+k -/rvuFbQvBgwp8qiSpGEN/KtcCFtREytNwiphyPgJWPwtArI5fZlmgb9uXJVFIGzm -eafR2Bwp/MIgJ1HI8XxdNGdphREwxgDS1/PTfLbwMVcoEoJz6TMvplW0C5GUR5z6 -u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3ynGQI0DvDKcWy -7ZAEwbEpkcUwb8GpcjPM/l0WFywRaed+/sWDCN+83CI6LiBpIzlWYGeQiy52OfsR -iJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw== ------END CERTIFICATE----- - -# Issuer: O=TDC Internet OU=TDC Internet Root CA -# Subject: O=TDC Internet OU=TDC Internet Root CA -# Label: "TDC Internet Root CA" -# Serial: 986490188 -# MD5 Fingerprint: 91:f4:03:55:20:a1:f8:63:2c:62:de:ac:fb:61:1c:8e -# SHA1 Fingerprint: 21:fc:bd:8e:7f:6c:af:05:1b:d1:b3:43:ec:a8:e7:61:47:f2:0f:8a -# SHA256 Fingerprint: 48:98:c6:88:8c:0c:ff:b0:d3:e3:1a:ca:8a:37:d4:e3:51:5f:f7:46:d0:26:35:d8:66:46:cf:a0:a3:18:5a:e7 ------BEGIN CERTIFICATE----- -MIIEKzCCAxOgAwIBAgIEOsylTDANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJE -SzEVMBMGA1UEChMMVERDIEludGVybmV0MR0wGwYDVQQLExRUREMgSW50ZXJuZXQg -Um9vdCBDQTAeFw0wMTA0MDUxNjMzMTdaFw0yMTA0MDUxNzAzMTdaMEMxCzAJBgNV -BAYTAkRLMRUwEwYDVQQKEwxUREMgSW50ZXJuZXQxHTAbBgNVBAsTFFREQyBJbnRl -cm5ldCBSb290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxLhA -vJHVYx/XmaCLDEAedLdInUaMArLgJF/wGROnN4NrXceO+YQwzho7+vvOi20jxsNu -Zp+Jpd/gQlBn+h9sHvTQBda/ytZO5GhgbEaqHF1j4QeGDmUApy6mcca8uYGoOn0a -0vnRrEvLznWv3Hv6gXPU/Lq9QYjUdLP5Xjg6PEOo0pVOd20TDJ2PeAG3WiAfAzc1 -4izbSysseLlJ28TQx5yc5IogCSEWVmb/Bexb4/DPqyQkXsN/cHoSxNK1EKC2IeGN -eGlVRGn1ypYcNIUXJXfi9i8nmHj9eQY6otZaQ8H/7AQ77hPv01ha/5Lr7K7a8jcD -R0G2l8ktCkEiu7vmpwIDAQABo4IBJTCCASEwEQYJYIZIAYb4QgEBBAQDAgAHMGUG -A1UdHwReMFwwWqBYoFakVDBSMQswCQYDVQQGEwJESzEVMBMGA1UEChMMVERDIElu -dGVybmV0MR0wGwYDVQQLExRUREMgSW50ZXJuZXQgUm9vdCBDQTENMAsGA1UEAxME -Q1JMMTArBgNVHRAEJDAigA8yMDAxMDQwNTE2MzMxN1qBDzIwMjEwNDA1MTcwMzE3 -WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUbGQBx/2FbazI2p5QCIUItTxWqFAw -HQYDVR0OBBYEFGxkAcf9hW2syNqeUAiFCLU8VqhQMAwGA1UdEwQFMAMBAf8wHQYJ -KoZIhvZ9B0EABBAwDhsIVjUuMDo0LjADAgSQMA0GCSqGSIb3DQEBBQUAA4IBAQBO -Q8zR3R0QGwZ/t6T609lN+yOfI1Rb5osvBCiLtSdtiaHsmGnc540mgwV5dOy0uaOX -wTUA/RXaOYE6lTGQ3pfphqiZdwzlWqCE/xIWrG64jcN7ksKsLtB9KOy282A4aW8+ -2ARVPp7MVdK6/rtHBNcK2RYKNCn1WBPVT8+PVkuzHu7TmHnaCB4Mb7j4Fifvwm89 -9qNLPg7kbWzbO0ESm70NRyN/PErQr8Cv9u8btRXE64PECV90i9kR+8JWsTz4cMo0 -jUNAE4z9mQNUecYu6oah9jrUCbz0vGbMPVjQV0kK7iXiQe4T+Zs4NNEA9X7nlB38 -aQNiuJkFBT1reBK9sG9l ------END CERTIFICATE----- - -# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com -# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com -# Label: "UTN DATACorp SGC Root CA" -# Serial: 91374294542884689855167577680241077609 -# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06 -# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4 -# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48 ------BEGIN CERTIFICATE----- -MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB -kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug -Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho -dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw -IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG -EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD -VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu -dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 -E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ -D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK -4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq -lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW -bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB -o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT -MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js -LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr -BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB -AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft -Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj -j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH -KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv -2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 -mfnGV/TJVTl4uix5yaaIK/QI ------END CERTIFICATE----- - -# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com -# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com -# Label: "UTN USERFirst Hardware Root CA" -# Serial: 91374294542884704022267039221184531197 -# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39 -# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7 -# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37 ------BEGIN CERTIFICATE----- -MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB -lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug -Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho -dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt -SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG -A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe -MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v -d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh -cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn -0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ -M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a -MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd -oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI -DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy -oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD -VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0 -dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy -bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF -BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM -//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli -CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE -CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t -3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS -KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA== ------END CERTIFICATE----- - -# Issuer: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org -# Subject: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org -# Label: "Camerfirma Chambers of Commerce Root" -# Serial: 0 -# MD5 Fingerprint: b0:01:ee:14:d9:af:29:18:94:76:8e:f1:69:33:2a:84 -# SHA1 Fingerprint: 6e:3a:55:a4:19:0c:19:5c:93:84:3c:c0:db:72:2e:31:30:61:f0:b1 -# SHA256 Fingerprint: 0c:25:8a:12:a5:67:4a:ef:25:f2:8b:a7:dc:fa:ec:ee:a3:48:e5:41:e6:f5:cc:4e:e6:3b:71:b3:61:60:6a:c3 ------BEGIN CERTIFICATE----- -MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEn -MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL -ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMg -b2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAxNjEzNDNaFw0zNzA5MzAxNjEzNDRa -MH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZpcm1hIFNBIENJRiBB -ODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3JnMSIw -IAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0B -AQEFAAOCAQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtb -unXF/KGIJPov7coISjlUxFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0d -BmpAPrMMhe5cG3nCYsS4No41XQEMIwRHNaqbYE6gZj3LJgqcQKH0XZi/caulAGgq -7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jWDA+wWFjbw2Y3npuRVDM3 -0pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFVd9oKDMyX -roDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIG -A1UdEwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5j -aGFtYmVyc2lnbi5vcmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p -26EpW1eLTXYGduHRooowDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIA -BzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hhbWJlcnNpZ24ub3JnMCcGA1Ud -EgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYDVR0gBFEwTzBN -BgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz -aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEB -AAxBl8IahsAifJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZd -p0AJPaxJRUXcLo0waLIJuvvDL8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi -1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wNUPf6s+xCX6ndbcj0dc97wXImsQEc -XCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/nADydb47kMgkdTXg0 -eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1erfu -tGWaIZDgqtCYvDi1czyL+Nw= ------END CERTIFICATE----- - -# Issuer: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org -# Subject: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org -# Label: "Camerfirma Global Chambersign Root" -# Serial: 0 -# MD5 Fingerprint: c5:e6:7b:bf:06:d0:4f:43:ed:c4:7a:65:8a:fb:6b:19 -# SHA1 Fingerprint: 33:9b:6b:14:50:24:9b:55:7a:01:87:72:84:d9:e0:2f:c3:d2:d8:e9 -# SHA256 Fingerprint: ef:3c:b4:17:fc:8e:bf:6f:97:87:6c:9e:4e:ce:39:de:1e:a5:fe:64:91:41:d1:02:8b:7d:11:c0:b2:29:8c:ed ------BEGIN CERTIFICATE----- -MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEn -MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL -ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENo -YW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYxNDE4WhcNMzcwOTMwMTYxNDE4WjB9 -MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgy -NzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4G -A1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUA -A4IBDQAwggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0 -Mi+ITaFgCPS3CU6gSS9J1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/s -QJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8Oby4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpV -eAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl6DJWk0aJqCWKZQbua795 -B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c8lCrEqWh -z0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0T -AQH/BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1i -ZXJzaWduLm9yZy9jaGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4w -TcbOX60Qq+UDpfqpFDAOBgNVHQ8BAf8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAH -MCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBjaGFtYmVyc2lnbi5vcmcwKgYD -VR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9yZzBbBgNVHSAE -VDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh -bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0B -AQUFAAOCAQEAPDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUM -bKGKfKX0j//U2K0X1S0E0T9YgOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXi -ryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJPJ7oKXqJ1/6v/2j1pReQvayZzKWG -VwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4IBHNfTIzSJRUTN3c -ecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREest2d/ -AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A== ------END CERTIFICATE----- - -# Issuer: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Subject: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Label: "NetLock Notary (Class A) Root" -# Serial: 259 -# MD5 Fingerprint: 86:38:6d:5e:49:63:6c:85:5c:db:6d:dc:94:b7:d0:f7 -# SHA1 Fingerprint: ac:ed:5f:65:53:fd:25:ce:01:5f:1f:7a:48:3b:6a:74:9f:61:78:c6 -# SHA256 Fingerprint: 7f:12:cd:5f:7e:5e:29:0e:c7:d8:51:79:d5:b7:2c:20:a5:be:75:08:ff:db:5b:f8:1a:b9:68:4a:7f:c9:f6:67 ------BEGIN CERTIFICATE----- -MIIGfTCCBWWgAwIBAgICAQMwDQYJKoZIhvcNAQEEBQAwga8xCzAJBgNVBAYTAkhV -MRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMe -TmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0 -dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFzcyBB -KSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNDIzMTQ0N1oXDTE5MDIxOTIzMTQ0 -N1owga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhC -dWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQu -MRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBL -b3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvHSMD7tM9DceqQWC2ObhbHDqeLVu0ThEDaiD -zl3S1tWBxdRL51uUcCbbO51qTGL3cfNk1mE7PetzozfZz+qMkjvN9wfcZnSX9EUi -3fRc4L9t875lM+QVOr/bmJBVOMTtplVjC7B4BPTjbsE/jvxReB+SnoPC/tmwqcm8 -WgD/qaiYdPv2LD4VOQ22BFWoDpggQrOxJa1+mm9dU7GrDPzr4PN6s6iz/0b2Y6LY -Oph7tqyF/7AlT3Rj5xMHpQqPBffAZG9+pyeAlt7ULoZgx2srXnN7F+eRP2QM2Esi -NCubMvJIH5+hCoR64sKtlz2O1cH5VqNQ6ca0+pii7pXmKgOM3wIDAQABo4ICnzCC -ApswDgYDVR0PAQH/BAQDAgAGMBIGA1UdEwEB/wQIMAYBAf8CAQQwEQYJYIZIAYb4 -QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZRUxFTSEgRXplbiB0 -YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRhdGFz -aSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQu -IEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtm -ZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMg -ZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVs -amFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJhc2EgbWVndGFsYWxoYXRv -IGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBzOi8vd3d3 -Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6 -ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1 -YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3Qg -dG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRs -b2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNAbmV0bG9jay5uZXQuMA0G -CSqGSIb3DQEBBAUAA4IBAQBIJEb3ulZv+sgoA0BO5TE5ayZrU3/b39/zcT0mwBQO -xmd7I6gMc90Bu8bKbjc5VdXHjFYgDigKDtIqpLBJUsY4B/6+CgmM0ZjPytoUMaFP -0jn8DxEsQ8Pdq5PHVT5HfBgaANzze9jyf1JsIPQLX2lS9O74silg6+NJMSEN1rUQ -QeJBCWziGppWS3cC9qCbmieH6FUpccKQn0V4GuEVZD3QDtigdp+uxdAu6tYPVuxk -f1qbFFgBJ34TUMdrKuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK -8CtmdWOMovsEPoMOmzbwGOQmIMOM8CgHrTwXZoi1/baI ------END CERTIFICATE----- - -# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Label: "NetLock Business (Class B) Root" -# Serial: 105 -# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6 -# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af -# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12 ------BEGIN CERTIFICATE----- -MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx -ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 -b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD -EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05 -OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G -A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh -Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l -dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG -SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK -gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX -iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc -Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E -BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G -SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu -b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh -bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv -Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln -aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0 -IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh -c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph -biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo -ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP -UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj -YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo -dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA -bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06 -sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa -n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS -NitjrFgBazMpUIaD8QFI ------END CERTIFICATE----- - -# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok -# Label: "NetLock Express (Class C) Root" -# Serial: 104 -# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4 -# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b -# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f ------BEGIN CERTIFICATE----- -MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx -ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0 -b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD -EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X -DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw -DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u -c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr -TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN -BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA -OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC -2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW -RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P -AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW -ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0 -YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz -b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO -ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB -IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs -b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs -ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s -YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg -a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g -SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0 -aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg -YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg -Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY -ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g -pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4 -Fp1hBWeAyNDYpQcCNJgEjTME1A== ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Label: "StartCom Certification Authority" -# Serial: 1 -# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16 -# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f -# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea ------BEGIN CERTIFICATE----- -MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg -Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9 -MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi -U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh -cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk -pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf -OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C -Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT -Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi -HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM -Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w -+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ -Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 -Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B -26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID -AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE -FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j -ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js -LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM -BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0 -Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy -dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh -cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh -YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg -dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp -bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ -YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT -TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ -9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8 -jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW -FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz -ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1 -ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L -EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu -L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq -yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC -O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V -um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh -NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14= ------END CERTIFICATE----- - -# Issuer: O=Government Root Certification Authority -# Subject: O=Government Root Certification Authority -# Label: "Taiwan GRCA" -# Serial: 42023070807708724159991140556527066870 -# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e -# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 -# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ -MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow -PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR -IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q -gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy -yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts -F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 -jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx -ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC -VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK -YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH -EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN -Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud -DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE -MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK -UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ -TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf -qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK -ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE -JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 -hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 -EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm -nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX -udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz -ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe -LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl -pYYsfPQS ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Firmaprofesional Root CA" -# Serial: 1 -# MD5 Fingerprint: 11:92:79:40:3c:b1:83:40:e5:ab:66:4a:67:92:80:df -# SHA1 Fingerprint: a9:62:8f:4b:98:a9:1b:48:35:ba:d2:c1:46:32:86:bb:66:64:6a:8c -# SHA256 Fingerprint: c1:cf:0b:52:09:64:35:e3:f1:b7:1d:aa:ec:45:5a:23:11:c8:40:4f:55:83:a9:e2:13:c6:9d:85:7d:94:33:05 ------BEGIN CERTIFICATE----- -MIIEVzCCAz+gAwIBAgIBATANBgkqhkiG9w0BAQUFADCBnTELMAkGA1UEBhMCRVMx -IjAgBgNVBAcTGUMvIE11bnRhbmVyIDI0NCBCYXJjZWxvbmExQjBABgNVBAMTOUF1 -dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2 -MjYzNDA2ODEmMCQGCSqGSIb3DQEJARYXY2FAZmlybWFwcm9mZXNpb25hbC5jb20w -HhcNMDExMDI0MjIwMDAwWhcNMTMxMDI0MjIwMDAwWjCBnTELMAkGA1UEBhMCRVMx -IjAgBgNVBAcTGUMvIE11bnRhbmVyIDI0NCBCYXJjZWxvbmExQjBABgNVBAMTOUF1 -dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2 -MjYzNDA2ODEmMCQGCSqGSIb3DQEJARYXY2FAZmlybWFwcm9mZXNpb25hbC5jb20w -ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDnIwNvbyOlXnjOlSztlB5u -Cp4Bx+ow0Syd3Tfom5h5VtP8c9/Qit5Vj1H5WuretXDE7aTt/6MNbg9kUDGvASdY -rv5sp0ovFy3Tc9UTHI9ZpTQsHVQERc1ouKDAA6XPhUJHlShbz++AbOCQl4oBPB3z -hxAwJkh91/zpnZFx/0GaqUC1N5wpIE8fUuOgfRNtVLcK3ulqTgesrBlf3H5idPay -BQC6haD9HThuy1q7hryUZzM1gywfI834yJFxzJeL764P3CkDG8A563DtwW4O2GcL -iam8NeTvtjS0pbbELaW+0MOUJEjb35bTALVmGotmBQ/dPz/LP6pemkr4tErvlTcb -AgMBAAGjgZ8wgZwwKgYDVR0RBCMwIYYfaHR0cDovL3d3dy5maXJtYXByb2Zlc2lv -bmFsLmNvbTASBgNVHRMBAf8ECDAGAQH/AgEBMCsGA1UdEAQkMCKADzIwMDExMDI0 -MjIwMDAwWoEPMjAxMzEwMjQyMjAwMDBaMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E -FgQUMwugZtHq2s7eYpMEKFK1FH84aLcwDQYJKoZIhvcNAQEFBQADggEBAEdz/o0n -VPD11HecJ3lXV7cVVuzH2Fi3AQL0M+2TUIiefEaxvT8Ub/GzR0iLjJcG1+p+o1wq -u00vR+L4OQbJnC4xGgN49Lw4xiKLMzHwFgQEffl25EvXwOaD7FnMP97/T2u3Z36m -hoEyIwOdyPdfwUpgpZKpsaSgYMN4h7Mi8yrrW6ntBas3D7Hi05V2Y1Z0jFhyGzfl -ZKG+TQyTmAyX9odtsz/ny4Cm7YjHX1BiAuiZdBbQ5rQ58SfLyEDW44YQqSMSkuBp -QWOnryULwMWSyx6Yo1q6xTMPoJcB3X/ge9YGVM+h4k0460tQtcsm9MracEpqoeJ5 -quGnM/b9Sh/22WA= ------END CERTIFICATE----- - -# Issuer: CN=Wells Fargo Root Certificate Authority O=Wells Fargo OU=Wells Fargo Certification Authority -# Subject: CN=Wells Fargo Root Certificate Authority O=Wells Fargo OU=Wells Fargo Certification Authority -# Label: "Wells Fargo Root CA" -# Serial: 971282334 -# MD5 Fingerprint: 20:0b:4a:7a:88:a7:a9:42:86:8a:5f:74:56:7b:88:05 -# SHA1 Fingerprint: 93:e6:ab:22:03:03:b5:23:28:dc:da:56:9e:ba:e4:d1:d1:cc:fb:65 -# SHA256 Fingerprint: 03:45:8b:6a:be:ec:c2:14:95:3d:97:14:9a:f4:53:91:69:1d:e9:f9:cd:cc:26:47:86:3a:3d:67:c9:5c:24:3b ------BEGIN CERTIFICATE----- -MIID5TCCAs2gAwIBAgIEOeSXnjANBgkqhkiG9w0BAQUFADCBgjELMAkGA1UEBhMC -VVMxFDASBgNVBAoTC1dlbGxzIEZhcmdvMSwwKgYDVQQLEyNXZWxscyBGYXJnbyBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEvMC0GA1UEAxMmV2VsbHMgRmFyZ28gUm9v -dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDAxMDExMTY0MTI4WhcNMjEwMTE0 -MTY0MTI4WjCBgjELMAkGA1UEBhMCVVMxFDASBgNVBAoTC1dlbGxzIEZhcmdvMSww -KgYDVQQLEyNXZWxscyBGYXJnbyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEvMC0G -A1UEAxMmV2VsbHMgRmFyZ28gUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDVqDM7Jvk0/82bfuUER84A4n13 -5zHCLielTWi5MbqNQ1mXx3Oqfz1cQJ4F5aHiidlMuD+b+Qy0yGIZLEWukR5zcUHE -SxP9cMIlrCL1dQu3U+SlK93OvRw6esP3E48mVJwWa2uv+9iWsWCaSOAlIiR5NM4O -JgALTqv9i86C1y8IcGjBqAr5dE8Hq6T54oN+J3N0Prj5OEL8pahbSCOz6+MlsoCu -ltQKnMJ4msZoGK43YjdeUXWoWGPAUe5AeH6orxqg4bB4nVCMe+ez/I4jsNtlAHCE -AQgAFG5Uhpq6zPk3EPbg3oQtnaSFN9OH4xXQwReQfhkhahKpdv0SAulPIV4XAgMB -AAGjYTBfMA8GA1UdEwEB/wQFMAMBAf8wTAYDVR0gBEUwQzBBBgtghkgBhvt7hwcB -CzAyMDAGCCsGAQUFBwIBFiRodHRwOi8vd3d3LndlbGxzZmFyZ28uY29tL2NlcnRw -b2xpY3kwDQYJKoZIhvcNAQEFBQADggEBANIn3ZwKdyu7IvICtUpKkfnRLb7kuxpo -7w6kAOnu5+/u9vnldKTC2FJYxHT7zmu1Oyl5GFrvm+0fazbuSCUlFLZWohDo7qd/ -0D+j0MNdJu4HzMPBJCGHHt8qElNvQRbn7a6U+oxy+hNH8Dx+rn0ROhPs7fpvcmR7 -nX1/Jv16+yWt6j4pf0zjAFcysLPp7VMX2YuyFA4w6OXVE8Zkr8QA1dhYJPz1j+zx -x32l2w8n0cbyQIjmH/ZhqPRCyLk306m+LFZ4wnKbWV01QIroTmMatukgalHizqSQ -33ZwmVxwQ023tqcZZE6St8WRPH9IFmV7Fv3L/PvZ1dZPIWU7Sn9Ho/s= ------END CERTIFICATE----- - -# Issuer: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services -# Subject: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services -# Label: "Swisscom Root CA 1" -# Serial: 122348795730808398873664200247279986742 -# MD5 Fingerprint: f8:38:7c:77:88:df:2c:16:68:2e:c2:e2:52:4b:b8:f9 -# SHA1 Fingerprint: 5f:3a:fc:0a:8b:64:f6:86:67:34:74:df:7e:a9:a2:fe:f9:fa:7a:51 -# SHA256 Fingerprint: 21:db:20:12:36:60:bb:2e:d4:18:20:5d:a1:1e:e7:a8:5a:65:e2:bc:6e:55:b5:af:7e:78:99:c8:a2:66:d9:2e ------BEGIN CERTIFICATE----- -MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBk -MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0 -YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg -Q0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4MTgyMjA2MjBaMGQxCzAJBgNVBAYT -AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp -Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIICIjAN -BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9 -m2BtRsiMMW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdih -FvkcxC7mlSpnzNApbjyFNDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/ -TilftKaNXXsLmREDA/7n29uj/x2lzZAeAR81sH8A25Bvxn570e56eqeqDFdvpG3F -EzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkCb6dJtDZd0KTeByy2dbco -kdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn7uHbHaBu -HYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNF -vJbNcA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo -19AOeCMgkckkKmUpWyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjC -L3UcPX7ape8eYIVpQtPM+GP+HkM5haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJW -bjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNYMUJDLXT5xp6mig/p/r+D5kNX -JLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw -FDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j -BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzc -K6FptWfUjNP9MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzf -ky9NfEBWMXrrpA9gzXrzvsMnjgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7Ik -Vh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQMbFamIp1TpBcahQq4FJHgmDmHtqB -sfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4HVtA4oJVwIHaM190e -3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtlvrsR -ls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ip -mXeascClOS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HH -b6D0jqTsNFFbjCYDcKF31QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksf -rK/7DZBaZmBwXarNeNQk7shBoJMBkpxqnvy5JMWzFYJ+vq6VK+uxwNrjAWALXmms -hFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCyx/yP2FS1k2Kdzs9Z+z0Y -zirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMWNY6E0F/6 -MBr1mmz0DlP5OlvRHA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=Class 2 Primary CA O=Certplus -# Subject: CN=Class 2 Primary CA O=Certplus -# Label: "Certplus Class 2 Primary CA" -# Serial: 177770208045934040241468760488327595043 -# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b -# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb -# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb ------BEGIN CERTIFICATE----- -MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw -PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz -cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 -MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz -IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ -ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR -VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL -kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd -EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas -H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 -HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud -DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 -QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu -Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ -AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 -yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR -FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA -ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB -kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 -l7+ijrRU ------END CERTIFICATE----- - -# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Label: "DST Root CA X3" -# Serial: 91299735575339953335919266965803778155 -# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 -# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 -# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ -MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT -DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow -PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD -Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O -rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq -OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b -xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw -7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD -aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG -SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 -ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr -AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz -R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 -JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo -Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ ------END CERTIFICATE----- - -# Issuer: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES -# Subject: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES -# Label: "DST ACES CA X6" -# Serial: 17771143917277623872238992636097467865 -# MD5 Fingerprint: 21:d8:4c:82:2b:99:09:33:a2:eb:14:24:8d:8e:5f:e8 -# SHA1 Fingerprint: 40:54:da:6f:1c:3f:40:74:ac:ed:0f:ec:cd:db:79:d1:53:fb:90:1d -# SHA256 Fingerprint: 76:7c:95:5a:76:41:2c:89:af:68:8e:90:a1:c7:0f:55:6c:fd:6b:60:25:db:ea:10:41:6d:7e:b6:83:1f:8c:40 ------BEGIN CERTIFICATE----- -MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBb -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3Qx -ETAPBgNVBAsTCERTVCBBQ0VTMRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0w -MzExMjAyMTE5NThaFw0xNzExMjAyMTE5NThaMFsxCzAJBgNVBAYTAlVTMSAwHgYD -VQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UECxMIRFNUIEFDRVMx -FzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPu -ktKe1jzIDZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7 -gLFViYsx+tC3dr5BPTCapCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZH -fAjIgrrep4c9oW24MFbCswKBXy314powGCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4a -ahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPyMjwmR/onJALJfh1biEIT -ajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1UdEwEB/wQF -MAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rk -c3QuY29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjto -dHRwOi8vd3d3LnRydXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMt -aW5kZXguaHRtbDAdBgNVHQ4EFgQUCXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZI -hvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V25FYrnJmQ6AgwbN99Pe7lv7Uk -QIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6tFr8hlxCBPeP/ -h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq -nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpR -rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2 -9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis= ------END CERTIFICATE----- - -# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=(c) 2005 TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. -# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=(c) 2005 TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. -# Label: "TURKTRUST Certificate Services Provider Root 1" -# Serial: 1 -# MD5 Fingerprint: f1:6a:22:18:c9:cd:df:ce:82:1d:1d:b7:78:5c:a9:a5 -# SHA1 Fingerprint: 79:98:a3:08:e1:4d:65:85:e6:c2:1e:15:3a:71:9f:ba:5a:d3:4a:d9 -# SHA256 Fingerprint: 44:04:e3:3b:5e:14:0d:cf:99:80:51:fd:fc:80:28:c7:c8:16:15:c5:ee:73:7b:11:1b:58:82:33:a9:b5:35:a0 ------BEGIN CERTIFICATE----- -MIID+zCCAuOgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBtzE/MD0GA1UEAww2VMOc -UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx -c8SxMQswCQYDVQQGDAJUUjEPMA0GA1UEBwwGQU5LQVJBMVYwVAYDVQQKDE0oYykg -MjAwNSBUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8 -dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjAeFw0wNTA1MTMxMDI3MTdaFw0xNTAz -MjIxMDI3MTdaMIG3MT8wPQYDVQQDDDZUw5xSS1RSVVNUIEVsZWt0cm9uaWsgU2Vy -dGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLExCzAJBgNVBAYMAlRSMQ8wDQYD -VQQHDAZBTktBUkExVjBUBgNVBAoMTShjKSAyMDA1IFTDnFJLVFJVU1QgQmlsZ2kg -xLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2ZW5sacSfaSBIaXptZXRsZXJpIEEu -xZ4uMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAylIF1mMD2Bxf3dJ7 -XfIMYGFbazt0K3gNfUW9InTojAPBxhEqPZW8qZSwu5GXyGl8hMW0kWxsE2qkVa2k -heiVfrMArwDCBRj1cJ02i67L5BuBf5OI+2pVu32Fks66WJ/bMsW9Xe8iSi9BB35J -YbOG7E6mQW6EvAPs9TscyB/C7qju6hJKjRTP8wrgUDn5CDX4EVmt5yLqS8oUBt5C -urKZ8y1UiBAG6uEaPj1nH/vO+3yC6BFdSsG5FOpU2WabfIl9BJpiyelSPJ6c79L1 -JuTm5Rh8i27fbMx4W09ysstcP4wFjdFMjK2Sx+F4f2VsSQZQLJ4ywtdKxnWKWU51 -b0dewQIDAQABoxAwDjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4IBAQAV -9VX/N5aAWSGk/KEVTCD21F/aAyT8z5Aa9CEKmu46sWrv7/hg0Uw2ZkUd82YCdAR7 -kjCo3gp2D++Vbr3JN+YaDayJSFvMgzbC9UZcWYJWtNX+I7TYVBxEq8Sn5RTOPEFh -fEPmzcSBCYsk+1Ql1haolgxnB2+zUEfjHCQo3SqYpGH+2+oSN7wBGjSFvW5P55Fy -B0SFHljKVETd96y5y4khctuPwGkplyqjrhgjlxxBKot8KsF8kOipKMDTkcatKIdA -aLX/7KfS0zgYnNN9aV3wxqUeJBujR/xpB2jn5Jq07Q+hh4cCzofSSE7hvP/L8XKS -RGQDJereW26fyfJOrN3H ------END CERTIFICATE----- - -# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005 -# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005 -# Label: "TURKTRUST Certificate Services Provider Root 2" -# Serial: 1 -# MD5 Fingerprint: 37:a5:6e:d4:b1:25:84:97:b7:fd:56:15:7a:f9:a2:00 -# SHA1 Fingerprint: b4:35:d4:e1:11:9d:1c:66:90:a7:49:eb:b3:94:bd:63:7b:a7:82:b7 -# SHA256 Fingerprint: c4:70:cf:54:7e:23:02:b9:77:fb:29:dd:71:a8:9a:7b:6c:1f:60:77:7b:03:29:f5:60:17:f3:28:bf:4f:6b:e6 ------BEGIN CERTIFICATE----- -MIIEPDCCAySgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvjE/MD0GA1UEAww2VMOc -UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx -c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xS -S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg -SGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwHhcNMDUxMTA3MTAwNzU3 -WhcNMTUwOTE2MTAwNzU3WjCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVrdHJv -bmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJU -UjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSw -bGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWe -LiAoYykgS2FzxLFtIDIwMDUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCpNn7DkUNMwxmYCMjHWHtPFoylzkkBH3MOrHUTpvqeLCDe2JAOCtFp0if7qnef -J1Il4std2NiDUBd9irWCPwSOtNXwSadktx4uXyCcUHVPr+G1QRT0mJKIx+XlZEdh -R3n9wFHxwZnn3M5q+6+1ATDcRhzviuyV79z/rxAc653YsKpqhRgNF8k+v/Gb0AmJ -Qv2gQrSdiVFVKc8bcLyEVK3BEx+Y9C52YItdP5qtygy/p1Zbj3e41Z55SZI/4PGX -JHpsmxcPbe9TmJEr5A++WXkHeLuXlfSfadRYhwqp48y2WBmfJiGxxFmNskF1wK1p -zpwACPI2/z7woQ8arBT9pmAPAgMBAAGjQzBBMB0GA1UdDgQWBBTZN7NOBf3Zz58S -Fq62iS/rJTqIHDAPBgNVHQ8BAf8EBQMDBwYAMA8GA1UdEwEB/wQFMAMBAf8wDQYJ -KoZIhvcNAQEFBQADggEBAHJglrfJ3NgpXiOFX7KzLXb7iNcX/nttRbj2hWyfIvwq -ECLsqrkw9qtY1jkQMZkpAL2JZkH7dN6RwRgLn7Vhy506vvWolKMiVW4XSf/SKfE4 -Jl3vpao6+XF75tpYHdN0wgH6PmlYX63LaL4ULptswLbcoCb6dxriJNoaN+BnrdFz -gw2lGh1uEpJ+hGIAF728JRhX8tepb1mIvDS3LoV4nZbcFMMsilKbloxSZj2GFotH -uFEJjOp9zYhys2AzsfAKRO8P9Qk3iCQOLGsgOqL6EfJANZxEaGM7rDNvY7wsu/LS -y3Z9fYjYHcgFHW68lKlmjHdxx/qR+i9Rnuk5UrbnBEI= ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Label: "GeoTrust Primary Certification Authority" -# Serial: 32798226551256963324313806436981982369 -# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf -# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 -# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c ------BEGIN CERTIFICATE----- -MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY -MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo -R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx -MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK -Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 -AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA -ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 -7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W -kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI -mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ -KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 -6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl -4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K -oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj -UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU -AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA" -# Serial: 69529181992039203566298953787712940909 -# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 -# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 -# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB -qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV -BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw -NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j -LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG -A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs -W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta -3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk -6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 -Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J -NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP -r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU -DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz -YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 -/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ -LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 -jVaMaA== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" -# Serial: 33037644167568058970164719475676101450 -# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c -# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 -# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW -ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 -nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex -t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz -SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG -BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ -rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ -NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E -BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH -BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv -MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE -p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y -5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK -WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ -4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N -hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - -# Issuer: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA -# Subject: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA -# Label: "WellsSecure Public Root Certificate Authority" -# Serial: 1 -# MD5 Fingerprint: 15:ac:a5:c2:92:2d:79:bc:e8:7f:cb:67:ed:02:cf:36 -# SHA1 Fingerprint: e7:b4:f6:9d:61:ec:90:69:db:7e:90:a7:40:1a:3c:f4:7d:4f:e8:ee -# SHA256 Fingerprint: a7:12:72:ae:aa:a3:cf:e8:72:7f:7f:b3:9f:0f:b3:d1:e5:42:6e:90:60:b0:6e:e6:f1:3e:9a:3c:58:33:cd:43 ------BEGIN CERTIFICATE----- -MIIEvTCCA6WgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCVVMx -IDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxs -cyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9v -dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDcxMjEzMTcwNzU0WhcNMjIxMjE0 -MDAwNzU0WjCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdl -bGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQD -DC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkw -ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDub7S9eeKPCCGeOARBJe+r -WxxTkqxtnt3CxC5FlAM1iGd0V+PfjLindo8796jE2yljDpFoNoqXjopxaAkH5OjU -Dk/41itMpBb570OYj7OeUt9tkTmPOL13i0Nj67eT/DBMHAGTthP796EfvyXhdDcs -HqRePGj4S78NuR4uNuip5Kf4D8uCdXw1LSLWwr8L87T8bJVhHlfXBIEyg1J55oNj -z7fLY4sR4r1e6/aN7ZVyKLSsEmLpSjPmgzKuBXWVvYSV2ypcm44uDLiBK0HmOFaf -SZtsdvqKXfcBeYF8wYNABf5x/Qw/zE5gCQ5lRxAvAcAFP4/4s0HvWkJ+We/Slwxl -AgMBAAGjggE0MIIBMDAPBgNVHRMBAf8EBTADAQH/MDkGA1UdHwQyMDAwLqAsoCqG -KGh0dHA6Ly9jcmwucGtpLndlbGxzZmFyZ28uY29tL3dzcHJjYS5jcmwwDgYDVR0P -AQH/BAQDAgHGMB0GA1UdDgQWBBQmlRkQ2eihl5H/3BnZtQQ+0nMKajCBsgYDVR0j -BIGqMIGngBQmlRkQ2eihl5H/3BnZtQQ+0nMKaqGBi6SBiDCBhTELMAkGA1UEBhMC -VVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNX -ZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMg -Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCAQEwDQYJKoZIhvcNAQEFBQADggEB -ALkVsUSRzCPIK0134/iaeycNzXK7mQDKfGYZUMbVmO2rvwNa5U3lHshPcZeG1eMd -/ZDJPHV3V3p9+N701NX3leZ0bh08rnyd2wIDBSxxSyU+B+NemvVmFymIGjifz6pB -A4SXa5M4esowRBskRDPQ5NHcKDj0E0M1NSljqHyita04pO2t/caaH/+Xc/77szWn -k4bGdpEA5qxRFsQnMlzbc9qlk1eOPm01JghZ1edE13YgY+esE2fDbbFwRnzVlhE9 -iW9dqKHrjQrawx0zbKPqZxmamX9LPYNRKh3KL4YMon4QLSvUFpULB6ouFJJJtylv -2G0xffX8oRAHh84vWdw+WNs= ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=IGC/A O=PM/SGDN OU=DCSSI -# Subject: CN=IGC/A O=PM/SGDN OU=DCSSI -# Label: "IGC/A" -# Serial: 245102874772 -# MD5 Fingerprint: 0c:7f:dd:6a:f4:2a:b9:c8:9b:bd:20:7e:a9:db:5c:37 -# SHA1 Fingerprint: 60:d6:89:74:b5:c2:65:9e:8a:0f:c1:88:7c:88:d2:46:69:1b:18:2c -# SHA256 Fingerprint: b9:be:a7:86:0a:96:2e:a3:61:1d:ab:97:ab:6d:a3:e2:1c:10:68:b9:7d:55:57:5e:d0:e1:12:79:c1:1c:89:32 ------BEGIN CERTIFICATE----- -MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYT -AkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQ -TS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG -9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMB4XDTAyMTIxMzE0MjkyM1oXDTIw -MTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAM -BgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEO -MAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2 -LmZyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaI -s9z4iPf930Pfeo2aSVz2TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2 -xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCWSo7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4 -u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYyHF2fYPepraX/z9E0+X1b -F8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNdfrGoRpAx -Vs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGd -PDPQtQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNV -HSAEDjAMMAoGCCqBegF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAx -NjAfBgNVHSMEGDAWgBSjBS8YYFDCiQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUF -AAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RKq89toB9RlPhJy3Q2FLwV3duJ -L92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3QMZsyK10XZZOY -YLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsg -Crpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2a -NjSaTFR+FwNIlQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R -0982gaEbeC9xs/FZTEYYKKuF0mBWWg== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1 -# Label: "Security Communication EV RootCA1" -# Serial: 0 -# MD5 Fingerprint: 22:2d:a6:01:ea:7c:0a:f7:f0:6c:56:43:3f:77:76:d3 -# SHA1 Fingerprint: fe:b8:c4:32:dc:f9:76:9a:ce:ae:3d:d8:90:8f:fd:28:86:65:64:7d -# SHA256 Fingerprint: a2:2d:ba:68:1e:97:37:6e:2d:39:7d:72:8a:ae:3a:9b:62:96:b9:fd:ba:60:bc:2e:11:f6:47:f2:c6:75:fb:37 ------BEGIN CERTIFICATE----- -MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMh -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIz -MloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09N -IFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNlY3VyaXR5IENvbW11 -bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSE -RMqm4miO/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gO -zXppFodEtZDkBp2uoQSXWHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5 -bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4zZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDF -MxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4bepJz11sS6/vmsJWXMY1 -VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK9U2vP9eC -OKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G -CSqGSIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HW -tWS3irO4G8za+6xmiEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZ -q51ihPZRwSzJIxXYKLerJRO1RuGGAv8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDb -EJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnWmHyojf6GPgcWkuF75x3sM3Z+ -Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEWT1MKZPlO9L9O -VL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490 ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GA CA" -# Serial: 86718877871133159090080555911823548314 -# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 -# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 -# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB -ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly -aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl -ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w -NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G -A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD -VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX -SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR -VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 -w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF -mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg -4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 -4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw -EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx -SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 -ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 -vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa -hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi -Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ -/L7fCg0= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA -# Subject: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA -# Label: "Microsec e-Szigno Root CA" -# Serial: 272122594155480254301341951808045322001 -# MD5 Fingerprint: f0:96:b6:2f:c5:10:d5:67:8e:83:25:32:e8:5e:2e:e5 -# SHA1 Fingerprint: 23:88:c9:d3:71:cc:9e:96:3d:ff:7d:3c:a7:ce:fc:d6:25:ec:19:0d -# SHA256 Fingerprint: 32:7a:3d:76:1a:ba:de:a0:34:eb:99:84:06:27:5c:b1:a4:77:6e:fd:ae:2f:df:6d:01:68:ea:1c:4f:55:67:d0 ------BEGIN CERTIFICATE----- -MIIHqDCCBpCgAwIBAgIRAMy4579OKRr9otxmpRwsDxEwDQYJKoZIhvcNAQEFBQAw -cjELMAkGA1UEBhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MRYwFAYDVQQKEw1NaWNy -b3NlYyBMdGQuMRQwEgYDVQQLEwtlLVN6aWdubyBDQTEiMCAGA1UEAxMZTWljcm9z -ZWMgZS1Temlnbm8gUm9vdCBDQTAeFw0wNTA0MDYxMjI4NDRaFw0xNzA0MDYxMjI4 -NDRaMHIxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEWMBQGA1UEChMN -TWljcm9zZWMgTHRkLjEUMBIGA1UECxMLZS1Temlnbm8gQ0ExIjAgBgNVBAMTGU1p -Y3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw -ggEKAoIBAQDtyADVgXvNOABHzNuEwSFpLHSQDCHZU4ftPkNEU6+r+ICbPHiN1I2u -uO/TEdyB5s87lozWbxXGd36hL+BfkrYn13aaHUM86tnsL+4582pnS4uCzyL4ZVX+ -LMsvfUh6PXX5qqAnu3jCBspRwn5mS6/NoqdNAoI/gqyFxuEPkEeZlApxcpMqyabA -vjxWTHOSJ/FrtfX9/DAFYJLG65Z+AZHCabEeHXtTRbjcQR/Ji3HWVBTji1R4P770 -Yjtb9aPs1ZJ04nQw7wHb4dSrmZsqa/i9phyGI0Jf7Enemotb9HI6QMVJPqW+jqpx -62z69Rrkav17fVVA71hu5tnVvCSrwe+3AgMBAAGjggQ3MIIEMzBnBggrBgEFBQcB -AQRbMFkwKAYIKwYBBQUHMAGGHGh0dHBzOi8vcmNhLmUtc3ppZ25vLmh1L29jc3Aw -LQYIKwYBBQUHMAKGIWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNydDAP -BgNVHRMBAf8EBTADAQH/MIIBcwYDVR0gBIIBajCCAWYwggFiBgwrBgEEAYGoGAIB -AQEwggFQMCgGCCsGAQUFBwIBFhxodHRwOi8vd3d3LmUtc3ppZ25vLmh1L1NaU1ov -MIIBIgYIKwYBBQUHAgIwggEUHoIBEABBACAAdABhAG4A+gBzAO0AdAB2AOEAbgB5 -ACAA6QByAHQAZQBsAG0AZQB6AOkAcwDpAGgAZQB6ACAA6QBzACAAZQBsAGYAbwBn -AGEAZADhAHMA4QBoAG8AegAgAGEAIABTAHoAbwBsAGcA4QBsAHQAYQB0APMAIABT -AHoAbwBsAGcA4QBsAHQAYQB0AOEAcwBpACAAUwB6AGEAYgDhAGwAeQB6AGEAdABh -ACAAcwB6AGUAcgBpAG4AdAAgAGsAZQBsAGwAIABlAGwAagDhAHIAbgBpADoAIABo -AHQAdABwADoALwAvAHcAdwB3AC4AZQAtAHMAegBpAGcAbgBvAC4AaAB1AC8AUwBa -AFMAWgAvMIHIBgNVHR8EgcAwgb0wgbqggbeggbSGIWh0dHA6Ly93d3cuZS1zemln -bm8uaHUvUm9vdENBLmNybIaBjmxkYXA6Ly9sZGFwLmUtc3ppZ25vLmh1L0NOPU1p -Y3Jvc2VjJTIwZS1Temlnbm8lMjBSb290JTIwQ0EsT1U9ZS1Temlnbm8lMjBDQSxP -PU1pY3Jvc2VjJTIwTHRkLixMPUJ1ZGFwZXN0LEM9SFU/Y2VydGlmaWNhdGVSZXZv -Y2F0aW9uTGlzdDtiaW5hcnkwDgYDVR0PAQH/BAQDAgEGMIGWBgNVHREEgY4wgYuB -EGluZm9AZS1zemlnbm8uaHWkdzB1MSMwIQYDVQQDDBpNaWNyb3NlYyBlLVN6aWdu -w7MgUm9vdCBDQTEWMBQGA1UECwwNZS1TemlnbsOzIEhTWjEWMBQGA1UEChMNTWlj -cm9zZWMgS2Z0LjERMA8GA1UEBxMIQnVkYXBlc3QxCzAJBgNVBAYTAkhVMIGsBgNV -HSMEgaQwgaGAFMegSXUWYYTbMUuE0vE3QJDvTtz3oXakdDByMQswCQYDVQQGEwJI -VTERMA8GA1UEBxMIQnVkYXBlc3QxFjAUBgNVBAoTDU1pY3Jvc2VjIEx0ZC4xFDAS -BgNVBAsTC2UtU3ppZ25vIENBMSIwIAYDVQQDExlNaWNyb3NlYyBlLVN6aWdubyBS -b290IENBghEAzLjnv04pGv2i3GalHCwPETAdBgNVHQ4EFgQUx6BJdRZhhNsxS4TS -8TdAkO9O3PcwDQYJKoZIhvcNAQEFBQADggEBANMTnGZjWS7KXHAM/IO8VbH0jgds -ZifOwTsgqRy7RlRw7lrMoHfqaEQn6/Ip3Xep1fvj1KcExJW4C+FEaGAHQzAxQmHl -7tnlJNUb3+FKG6qfx1/4ehHqE5MAyopYse7tDk2016g2JnzgOsHVV4Lxdbb9iV/a -86g4nzUGCM4ilb7N1fy+W955a9x6qWVmvrElWl/tftOsRm1M9DKHtCAE4Gx4sHfR -hUZLphK3dehKyVZs15KrnfVJONJPU+NVkBHbmJbGSfI+9J8b4PeI3CVimUTYc78/ -MPMMNz7UwiiAc7EBt51alhQBS6kRnSlqLtBdgcDPsiBDxwPgN05dCtxZICU= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: CN=AC Raíz Certicámara S.A. O=Sociedad Cameral de Certificación Digital - Certicámara S.A. -# Subject: CN=AC Raíz Certicámara S.A. O=Sociedad Cameral de Certificación Digital - Certicámara S.A. -# Label: "AC Ra\xC3\xADz Certic\xC3\xA1mara S.A." -# Serial: 38908203973182606954752843738508300 -# MD5 Fingerprint: 93:2a:3e:f6:fd:23:69:0d:71:20:d4:2b:47:99:2b:a6 -# SHA1 Fingerprint: cb:a1:c5:f8:b0:e3:5e:b8:b9:45:12:d3:f9:34:a2:e9:06:10:d3:36 -# SHA256 Fingerprint: a6:c5:1e:0d:a5:ca:0a:93:09:d2:e4:c0:e4:0c:2a:f9:10:7a:ae:82:03:85:7f:e1:98:e3:e7:69:e3:43:08:5c ------BEGIN CERTIFICATE----- -MIIGZjCCBE6gAwIBAgIPB35Sk3vgFeNX8GmMy+wMMA0GCSqGSIb3DQEBBQUAMHsx -CzAJBgNVBAYTAkNPMUcwRQYDVQQKDD5Tb2NpZWRhZCBDYW1lcmFsIGRlIENlcnRp -ZmljYWNpw7NuIERpZ2l0YWwgLSBDZXJ0aWPDoW1hcmEgUy5BLjEjMCEGA1UEAwwa -QUMgUmHDrXogQ2VydGljw6FtYXJhIFMuQS4wHhcNMDYxMTI3MjA0NjI5WhcNMzAw -NDAyMjE0MjAyWjB7MQswCQYDVQQGEwJDTzFHMEUGA1UECgw+U29jaWVkYWQgQ2Ft -ZXJhbCBkZSBDZXJ0aWZpY2FjacOzbiBEaWdpdGFsIC0gQ2VydGljw6FtYXJhIFMu -QS4xIzAhBgNVBAMMGkFDIFJhw616IENlcnRpY8OhbWFyYSBTLkEuMIICIjANBgkq -hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq2uJo1PMSCMI+8PPUZYILrgIem08kBeG -qentLhM0R7LQcNzJPNCNyu5LF6vQhbCnIwTLqKL85XXbQMpiiY9QngE9JlsYhBzL -fDe3fezTf3MZsGqy2IiKLUV0qPezuMDU2s0iiXRNWhU5cxh0T7XrmafBHoi0wpOQ -Y5fzp6cSsgkiBzPZkc0OnB8OIMfuuzONj8LSWKdf/WU34ojC2I+GdV75LaeHM/J4 -Ny+LvB2GNzmxlPLYvEqcgxhaBvzz1NS6jBUJJfD5to0EfhcSM2tXSExP2yYe68yQ -54v5aHxwD6Mq0Do43zeX4lvegGHTgNiRg0JaTASJaBE8rF9ogEHMYELODVoqDA+b -MMCm8Ibbq0nXl21Ii/kDwFJnmxL3wvIumGVC2daa49AZMQyth9VXAnow6IYm+48j -ilSH5L887uvDdUhfHjlvgWJsxS3EF1QZtzeNnDeRyPYL1epjb4OsOMLzP96a++Ej -YfDIJss2yKHzMI+ko6Kh3VOz3vCaMh+DkXkwwakfU5tTohVTP92dsxA7SH2JD/zt -A/X7JWR1DhcZDY8AFmd5ekD8LVkH2ZD6mq093ICK5lw1omdMEWux+IBkAC1vImHF -rEsm5VoQgpukg3s0956JkSCXjrdCx2bD0Omk1vUgjcTDlaxECp1bczwmPS9KvqfJ -pxAe+59QafMCAwEAAaOB5jCB4zAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjAdBgNVHQ4EFgQU0QnQ6dfOeXRU+Tows/RtLAMDG2gwgaAGA1UdIASBmDCB -lTCBkgYEVR0gADCBiTArBggrBgEFBQcCARYfaHR0cDovL3d3dy5jZXJ0aWNhbWFy -YS5jb20vZHBjLzBaBggrBgEFBQcCAjBOGkxMaW1pdGFjaW9uZXMgZGUgZ2FyYW50 -7WFzIGRlIGVzdGUgY2VydGlmaWNhZG8gc2UgcHVlZGVuIGVuY29udHJhciBlbiBs -YSBEUEMuMA0GCSqGSIb3DQEBBQUAA4ICAQBclLW4RZFNjmEfAygPU3zmpFmps4p6 -xbD/CHwso3EcIRNnoZUSQDWDg4902zNc8El2CoFS3UnUmjIz75uny3XlesuXEpBc -unvFm9+7OSPI/5jOCk0iAUgHforA1SBClETvv3eiiWdIG0ADBaGJ7M9i4z0ldma/ -Jre7Ir5v/zlXdLp6yQGVwZVR6Kss+LGGIOk/yzVb0hfpKv6DExdA7ohiZVvVO2Dp -ezy4ydV/NgIlqmjCMRW3MGXrfx1IebHPOeJCgBbT9ZMj/EyXyVo3bHwi2ErN0o42 -gzmRkBDI8ck1fj+404HGIGQatlDCIaR43NAvO2STdPCWkPHv+wlaNECW8DYSwaN0 -jJN+Qd53i+yG2dIPPy3RzECiiWZIHiCznCNZc6lEc7wkeZBWN7PGKX6jD/EpOe9+ -XCgycDWs2rjIdWb8m0w5R44bb5tNAlQiM+9hup4phO9OSzNHdpdqy35f/RWmnkJD -W2ZaiogN9xa5P1FlK2Zqi9E4UqLWRhH6/JocdJ6PlwsCT2TG9WjTSy3/pDceiz+/ -RL5hRqGEPQgnTIEgd4kI6mdAXmwIUV80WoyWaM3X94nCHNMyAK9Sy9NgWyo6R35r -MDOhYil/SrnhLecUIw4OGEfhefwVVdCx/CVxY3UzHCMrr1zZ7Ud3YA47Dx7SwNxk -BYn8eNZcLCZDqQ== ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA -# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA -# Label: "TC TrustCenter Class 2 CA II" -# Serial: 941389028203453866782103406992443 -# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23 -# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e -# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4 ------BEGIN CERTIFICATE----- -MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV -BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 -Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1 -OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i -SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc -VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf -tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg -uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J -XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK -8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99 -5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3 -kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy -dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6 -Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz -JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 -Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u -TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS -GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt -ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8 -au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV -hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI -dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ== ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA -# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA -# Label: "TC TrustCenter Class 3 CA II" -# Serial: 1506523511417715638772220530020799 -# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e -# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5 -# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e ------BEGIN CERTIFICATE----- -MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV -BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 -Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1 -OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i -SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc -VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW -Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q -Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2 -1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq -ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1 -Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX -XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy -dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6 -Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz -JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 -Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u -TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN -irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8 -TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6 -g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB -95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj -S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A== ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Label: "TC TrustCenter Universal CA I" -# Serial: 601024842042189035295619584734726 -# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c -# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3 -# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV -BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1 -c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx -MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg -R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD -VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR -JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T -fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu -jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z -wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ -fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD -VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G -CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1 -7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn -8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs -ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT -ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/ -2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY ------END CERTIFICATE----- - -# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Label: "Deutsche Telekom Root CA 2" -# Serial: 38 -# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 -# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf -# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 ------BEGIN CERTIFICATE----- -MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc -MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj -IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB -IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE -RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl -U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 -IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU -ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC -QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr -rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S -NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc -QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH -txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP -BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC -AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp -tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa -IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl -6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ -xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU -Cm26OWMohpLzGITY+9HPBVZkVw== ------END CERTIFICATE----- - -# Issuer: CN=ComSign Secured CA O=ComSign -# Subject: CN=ComSign Secured CA O=ComSign -# Label: "ComSign Secured CA" -# Serial: 264725503855295744117309814499492384489 -# MD5 Fingerprint: 40:01:25:06:8d:21:43:6a:0e:43:00:9c:e7:43:f3:d5 -# SHA1 Fingerprint: f9:cd:0e:2c:da:76:24:c1:8f:bd:f0:f0:ab:b6:45:b8:f7:fe:d5:7a -# SHA256 Fingerprint: 50:79:41:c7:44:60:a0:b4:70:86:22:0d:4e:99:32:57:2a:b5:d1:b5:bb:cb:89:80:ab:1c:b1:76:51:a8:44:d2 ------BEGIN CERTIFICATE----- -MIIDqzCCApOgAwIBAgIRAMcoRwmzuGxFjB36JPU2TukwDQYJKoZIhvcNAQEFBQAw -PDEbMBkGA1UEAxMSQ29tU2lnbiBTZWN1cmVkIENBMRAwDgYDVQQKEwdDb21TaWdu -MQswCQYDVQQGEwJJTDAeFw0wNDAzMjQxMTM3MjBaFw0yOTAzMTYxNTA0NTZaMDwx -GzAZBgNVBAMTEkNvbVNpZ24gU2VjdXJlZCBDQTEQMA4GA1UEChMHQ29tU2lnbjEL -MAkGA1UEBhMCSUwwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGtWhf -HZQVw6QIVS3joFd67+l0Kru5fFdJGhFeTymHDEjWaueP1H5XJLkGieQcPOqs49oh -gHMhCu95mGwfCP+hUH3ymBvJVG8+pSjsIQQPRbsHPaHA+iqYHU4Gk/v1iDurX8sW -v+bznkqH7Rnqwp9D5PGBpX8QTz7RSmKtUxvLg/8HZaWSLWapW7ha9B20IZFKF3ue -Mv5WJDmyVIRD9YTC2LxBkMyd1mja6YJQqTtoz7VdApRgFrFD2UNd3V2Hbuq7s8lr -9gOUCXDeFhF6K+h2j0kQmHe5Y1yLM5d19guMsqtb3nQgJT/j8xH5h2iGNXHDHYwt -6+UarA9z1YJZQIDTAgMBAAGjgacwgaQwDAYDVR0TBAUwAwEB/zBEBgNVHR8EPTA7 -MDmgN6A1hjNodHRwOi8vZmVkaXIuY29tc2lnbi5jby5pbC9jcmwvQ29tU2lnblNl -Y3VyZWRDQS5jcmwwDgYDVR0PAQH/BAQDAgGGMB8GA1UdIwQYMBaAFMFL7XC29z58 -ADsAj8c+DkWfHl3sMB0GA1UdDgQWBBTBS+1wtvc+fAA7AI/HPg5Fnx5d7DANBgkq -hkiG9w0BAQUFAAOCAQEAFs/ukhNQq3sUnjO2QiBq1BW9Cav8cujvR3qQrFHBZE7p -iL1DRYHjZiM/EoZNGeQFsOY3wo3aBijJD4mkU6l1P7CW+6tMM1X5eCZGbxs2mPtC -dsGCuY7e+0X5YxtiOzkGynd6qDwJz2w2PQ8KRUtpFhpFfTMDZflScZAmlaxMDPWL -kz/MdXSFmLr/YnpNH4n+rr2UAJm/EaXc4HnFFgt9AmEd6oX5AhVP51qJThRv4zdL -hfXBPGHg/QVBspJ/wx2g0K5SZGBrGMYmnNj1ZOQ2GmKfig8+/21OGVZOIJFsnzQz -OjRXUDpvgV4GxvU+fE6OK85lBi5d0ipTdF7Tbieejw== ------END CERTIFICATE----- - -# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc -# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc -# Label: "Cybertrust Global Root" -# Serial: 4835703278459682877484360 -# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 -# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 -# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG -A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh -bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE -ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS -b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 -7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS -J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y -HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP -t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz -FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY -XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ -MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw -hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js -MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA -A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj -Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx -XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o -omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc -A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW -WL1WMRJOEcgh4LMRkWXbtKaIOM5V ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi -# Subject: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi -# Label: "T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3" -# Serial: 17 -# MD5 Fingerprint: ed:41:f5:8c:50:c5:2b:9c:73:e6:ee:6c:eb:c2:a8:26 -# SHA1 Fingerprint: 1b:4b:39:61:26:27:6b:64:91:a2:68:6d:d7:02:43:21:2d:1f:1d:96 -# SHA256 Fingerprint: e4:c7:34:30:d7:a5:b5:09:25:df:43:37:0a:0d:21:6e:9a:79:b9:d6:db:83:73:a0:c6:9e:b1:cc:31:c7:c5:2a ------BEGIN CERTIFICATE----- -MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRS -MRgwFgYDVQQHDA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJp -bGltc2VsIHZlIFRla25vbG9qaWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSw -VEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ryb25payB2ZSBLcmlwdG9sb2ppIEFy -YcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNVBAsMGkthbXUgU2Vy -dGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUgS8O2 -ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAe -Fw0wNzA4MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIx -GDAWBgNVBAcMD0dlYnplIC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmls -aW1zZWwgdmUgVGVrbm9sb2ppayBBcmHFn3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBU -QUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZlIEtyaXB0b2xvamkgQXJh -xZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2FtdSBTZXJ0 -aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7Zr -IFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIB -IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4h -gb46ezzb8R1Sf1n68yJMlaCQvEhOEav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yK -O7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1xnnRFDDtG1hba+818qEhTsXO -fJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR6Oqeyjh1jmKw -lZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL -hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQID -AQABo0IwQDAdBgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmP -NOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4N5EY3ATIZJkrGG2AA1nJrvhY0D7t -wyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLTy9LQQfMmNkqblWwM -7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYhLBOh -gLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5n -oN+J1q2MdqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUs -yZyQ2uypQjyttgI= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327 -# Label: "Buypass Class 2 CA 1" -# Serial: 1 -# MD5 Fingerprint: b8:08:9a:f0:03:cc:1b:0d:c8:6c:0b:76:a1:75:64:23 -# SHA1 Fingerprint: a0:a1:ab:90:c9:fc:84:7b:3b:12:61:e8:97:7d:5f:d3:22:61:d3:cc -# SHA256 Fingerprint: 0f:4e:9c:dd:26:4b:02:55:50:d1:70:80:63:40:21:4f:e9:44:34:c9:b0:2f:69:7e:c7:10:fc:5f:ea:fb:5e:38 ------BEGIN CERTIFICATE----- -MIIDUzCCAjugAwIBAgIBATANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg -Q2xhc3MgMiBDQSAxMB4XDTA2MTAxMzEwMjUwOVoXDTE2MTAxMzEwMjUwOVowSzEL -MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD -VQQDDBRCdXlwYXNzIENsYXNzIDIgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAIs8B0XY9t/mx8q6jUPFR42wWsE425KEHK8T1A9vNkYgxC7McXA0 -ojTTNy7Y3Tp3L8DrKehc0rWpkTSHIln+zNvnma+WwajHQN2lFYxuyHyXA8vmIPLX -l18xoS830r7uvqmtqEyeIWZDO6i88wmjONVZJMHCR3axiFyCO7srpgTXjAePzdVB -HfCuuCkslFJgNJQ72uA40Z0zPhX0kzLFANq1KWYOOngPIVJfAuWSeyXTkh4vFZ2B -5J2O6O+JzhRMVB0cgRJNcKi+EAUXfh/RuFdV7c27UsKwHnjCTTZoy1YmwVLBvXb3 -WNVyfh9EdrsAiR0WnVE1703CVu9r4Iw7DekCAwEAAaNCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUP42aWYv8e3uco684sDntkHGA1sgwDgYDVR0PAQH/BAQD -AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQAVGn4TirnoB6NLJzKyQJHyIdFkhb5jatLP -gcIV1Xp+DCmsNx4cfHZSldq1fyOhKXdlyTKdqC5Wq2B2zha0jX94wNWZUYN/Xtm+ -DKhQ7SLHrQVMdvvt7h5HZPb3J31cKA9FxVxiXqaakZG3Uxcu3K1gnZZkOb1naLKu -BctN518fV4bVIJwo+28TOPX2EZL2fZleHwzoq0QkKXJAPTZSr4xYkHPB7GEseaHs -h7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOk -LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 CA 1 O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 CA 1 O=Buypass AS-983163327 -# Label: "Buypass Class 3 CA 1" -# Serial: 2 -# MD5 Fingerprint: df:3c:73:59:81:e7:39:50:81:04:4c:34:a2:cb:b3:7b -# SHA1 Fingerprint: 61:57:3a:11:df:0e:d8:7e:d5:92:65:22:ea:d0:56:d7:44:b3:23:71 -# SHA256 Fingerprint: b7:b1:2b:17:1f:82:1d:aa:99:0c:d0:fe:50:87:b1:28:44:8b:a8:e5:18:4f:84:c5:1e:02:b5:c8:fb:96:2b:24 ------BEGIN CERTIFICATE----- -MIIDUzCCAjugAwIBAgIBAjANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg -Q2xhc3MgMyBDQSAxMB4XDTA1MDUwOTE0MTMwM1oXDTE1MDUwOTE0MTMwM1owSzEL -MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD -VQQDDBRCdXlwYXNzIENsYXNzIDMgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAKSO13TZKWTeXx+HgJHqTjnmGcZEC4DVC69TB4sSveZn8AKxifZg -isRbsELRwCGoy+Gb72RRtqfPFfV0gGgEkKBYouZ0plNTVUhjP5JW3SROjvi6K//z -NIqeKNc0n6wv1g/xpC+9UrJJhW05NfBEMJNGJPO251P7vGGvqaMU+8IXF4Rs4HyI -+MkcVyzwPX6UvCWThOiaAJpFBUJXgPROztmuOfbIUxAMZTpHe2DC1vqRycZxbL2R -hzyRhkmr8w+gbCZ2Xhysm3HljbybIR6c1jh+JIAVMYKWsUnTYjdbiAwKYjT+p0h+ -mbEwi5A3lRyoH6UsjfRVyNvdWQrCrXig9IsCAwEAAaNCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUOBTmyPCppAP0Tj4io1vy1uCtQHQwDgYDVR0PAQH/BAQD -AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQABZ6OMySU9E2NdFm/soT4JXJEVKirZgCFP -Bdy7pYmrEzMqnji3jG8CcmPHc3ceCQa6Oyh7pEfJYWsICCD8igWKH7y6xsL+z27s -EzNxZy5p+qksP2bAEllNC1QCkoS72xLvg3BweMhT+t/Gxv/ciC8HwEmdMldg0/L2 -mSlf56oBzKwzqBwKu5HEA6BvtjT5htOzdlSY9EqBs1OdTUDs5XcTRa9bqh/YL0yC -e/4qxFi7T/ye/QNlGioOw6UgFpRreaaiErS7GqQjel/wroQk5PMr+4okoyeYZdow -dXb8GZHo2+ubPzK/QJcHJrrM85SFSnonk8+QQtS4Wxam58tAA915 ------END CERTIFICATE----- - -# Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. -# Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. -# Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1" -# Serial: 5525761995591021570 -# MD5 Fingerprint: 2c:20:26:9d:cb:1a:4a:00:85:b5:b7:5a:ae:c2:01:37 -# SHA1 Fingerprint: 8c:96:ba:eb:dd:2b:07:07:48:ee:30:32:66:a0:f3:98:6e:7c:ae:58 -# SHA256 Fingerprint: 35:ae:5b:dd:d8:f7:ae:63:5c:ff:ba:56:82:a8:f0:0b:95:f4:84:62:c7:10:8e:e9:a0:e5:29:2b:07:4a:af:b2 ------BEGIN CERTIFICATE----- -MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNV -BAMML0VCRyBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx -c8SxMTcwNQYDVQQKDC5FQkcgQmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXpt -ZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAeFw0wNjA4MTcwMDIxMDlaFw0xNjA4 -MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25payBTZXJ0aWZpa2Eg -SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2ltIFRl -a25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h -4fuXd7hxlugTlkaDT7byX3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAk -tiHq6yOU/im/+4mRDGSaBUorzAzu8T2bgmmkTPiab+ci2hC6X5L8GCcKqKpE+i4s -tPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfreYteIAbTdgtsApWjluTL -dlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZTqNGFav4 -c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8Um -TDGyY5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z -+kI2sSXFCjEmN1ZnuqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0O -Lna9XvNRiYuoP1Vzv9s6xiQFlpJIqkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMW -OeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vmExH8nYQKE3vwO9D8owrXieqW -fo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0Nokb+Clsi7n2 -l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB -/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgw -FoAU587GT/wWZ5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+ -8ygjdsZs93/mQJ7ANtyVDR2tFcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI -6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgmzJNSroIBk5DKd8pNSe/iWtkqvTDO -TLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64kXPBfrAowzIpAoHME -wfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqTbCmY -Iai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJn -xk1Gj7sURT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4Q -DgZxGhBM/nV+/x5XOULK1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9q -Kd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11t -hie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQY9iJSrSq3RZj9W6+YKH4 -7ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9AahH3eU7 -QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=CNNIC ROOT O=CNNIC -# Subject: CN=CNNIC ROOT O=CNNIC -# Label: "CNNIC ROOT" -# Serial: 1228079105 -# MD5 Fingerprint: 21:bc:82:ab:49:c4:13:3b:4b:b2:2b:5c:6b:90:9c:19 -# SHA1 Fingerprint: 8b:af:4c:9b:1d:f0:2a:92:f7:da:12:8e:b9:1b:ac:f4:98:60:4b:6f -# SHA256 Fingerprint: e2:83:93:77:3d:a8:45:a6:79:f2:08:0c:c7:fb:44:a3:b7:a1:c3:79:2c:b7:eb:77:29:fd:cb:6a:8d:99:ae:a7 ------BEGIN CERTIFICATE----- -MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJD -TjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2 -MDcwOTE0WhcNMjcwNDE2MDcwOTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMF -Q05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwggEiMA0GCSqGSIb3DQEBAQUAA4IB -DwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzDo+/hn7E7SIX1mlwh -IhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tizVHa6 -dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZO -V/kbZKKTVrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrC -GHn2emU1z5DrvTOTn1OrczvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gN -v7Sg2Ca+I19zN38m5pIEo3/PIKe38zrKy5nLAgMBAAGjczBxMBEGCWCGSAGG+EIB -AQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscCwQ7vptU7ETAPBgNVHRMB -Af8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991SlgrHAsEO -76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnK -OOK5Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvH -ugDnuL8BV8F3RTIMO/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7Hgvi -yJA/qIYM/PmLXoXLT1tLYhFHxUV8BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fL -buXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2G8kS1sHNzYDzAgE8yGnLRUhj -2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5mmxE= ------END CERTIFICATE----- - -# Issuer: O=Japanese Government OU=ApplicationCA -# Subject: O=Japanese Government OU=ApplicationCA -# Label: "ApplicationCA - Japanese Government" -# Serial: 49 -# MD5 Fingerprint: 7e:23:4e:5b:a7:a5:b4:25:e9:00:07:74:11:62:ae:d6 -# SHA1 Fingerprint: 7f:8a:b0:cf:d0:51:87:6a:66:f3:36:0f:47:c8:8d:8c:d3:35:fc:74 -# SHA256 Fingerprint: 2d:47:43:7d:e1:79:51:21:5a:12:f3:c5:8e:51:c7:29:a5:80:26:ef:1f:cc:0a:5f:b3:d9:dc:01:2f:60:0d:19 ------BEGIN CERTIFICATE----- -MIIDoDCCAoigAwIBAgIBMTANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJKUDEc -MBoGA1UEChMTSmFwYW5lc2UgR292ZXJubWVudDEWMBQGA1UECxMNQXBwbGljYXRp -b25DQTAeFw0wNzEyMTIxNTAwMDBaFw0xNzEyMTIxNTAwMDBaMEMxCzAJBgNVBAYT -AkpQMRwwGgYDVQQKExNKYXBhbmVzZSBHb3Zlcm5tZW50MRYwFAYDVQQLEw1BcHBs -aWNhdGlvbkNBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp23gdE6H -j6UG3mii24aZS2QNcfAKBZuOquHMLtJqO8F6tJdhjYq+xpqcBrSGUeQ3DnR4fl+K -f5Sk10cI/VBaVuRorChzoHvpfxiSQE8tnfWuREhzNgaeZCw7NCPbXCbkcXmP1G55 -IrmTwcrNwVbtiGrXoDkhBFcsovW8R0FPXjQilbUfKW1eSvNNcr5BViCH/OlQR9cw -FO5cjFW6WY2H/CPek9AEjP3vbb3QesmlOmpyM8ZKDQUXKi17safY1vC+9D/qDiht -QWEjdnjDuGWk81quzMKq2edY3rZ+nYVunyoKb58DKTCXKB28t89UKU5RMfkntigm -/qJj5kEW8DOYRwIDAQABo4GeMIGbMB0GA1UdDgQWBBRUWssmP3HMlEYNllPqa0jQ -k/5CdTAOBgNVHQ8BAf8EBAMCAQYwWQYDVR0RBFIwUKROMEwxCzAJBgNVBAYTAkpQ -MRgwFgYDVQQKDA/ml6XmnKzlm73mlL/lupwxIzAhBgNVBAsMGuOCouODl+ODquOC -seODvOOCt+ODp+ODs0NBMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADlqRHZ3ODrso2dGD/mLBqj7apAxzn7s2tGJfHrrLgy9mTLnsCTWw//1sogJ -hyzjVOGjprIIC8CFqMjSnHH2HZ9g/DgzE+Ge3Atf2hZQKXsvcJEPmbo0NI2VdMV+ -eKlmXb3KIXdCEKxmJj3ekav9FfBv7WxfEPjzFvYDio+nEhEMy/0/ecGc/WLuo89U -DNErXxc+4z6/wCs+CZv+iKZ+tJIX/COUgb1up8WMwusRRdv4QcmWdupwX3kSa+Sj -B1oF7ydJzyGfikwJcGapJsErEU4z0g781mzSDjJkaP+tBXhfAx2o45CsJOAPQKdL -rosot4LKGAfmt1t06SAZf7IbiVQ= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G3" -# Serial: 28809105769928564313984085209975885599 -# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 -# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd -# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 ------BEGIN CERTIFICATE----- -MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB -mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT -MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s -eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ -BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg -MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 -BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz -+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm -hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn -5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W -JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL -DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC -huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw -HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB -AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB -zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN -kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD -AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH -SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G -spki4cErx5z481+oghLrGREt ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G2" -# Serial: 71758320672825410020661621085256472406 -# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f -# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 -# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 ------BEGIN CERTIFICATE----- -MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp -IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi -BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw -MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh -d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig -YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v -dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ -BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 -papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K -DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 -KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox -XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G3" -# Serial: 127614157056681299805556476275995414779 -# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 -# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 -# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB -rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV -BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa -Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl -LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u -MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl -ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm -gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 -YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf -b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 -9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S -zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk -OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV -HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA -2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW -oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu -t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c -KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM -m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu -MdRAGmI0Nj81Aa6sY6A= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G2" -# Serial: 80682863203381065782177908751794619243 -# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a -# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 -# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 ------BEGIN CERTIFICATE----- -MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL -MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj -KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 -MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 -eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV -BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw -NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV -BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH -MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL -So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal -tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG -CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT -qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz -rD6ogRLQy7rQkgu2npaqBA+K ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Universal Root Certification Authority" -# Serial: 85209574734084581917763752644031726877 -# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 -# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 -# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c ------BEGIN CERTIFICATE----- -MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB -vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W -ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX -MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 -IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y -IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh -bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF -9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH -H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H -LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN -/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT -rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw -WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs -exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud -DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 -sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ -seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz -4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ -BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR -lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 -7M2CYfE45k+XmCpajQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" -# Serial: 63143484348153506665311985501458640051 -# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 -# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a -# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 ------BEGIN CERTIFICATE----- -MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp -U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg -SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln -biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm -GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve -fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ -aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj -aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW -kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC -4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga -FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services) -# Label: "NetLock Arany (Class Gold) Főtanúsítvány" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G2" -# Serial: 10000012 -# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a -# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 -# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX -DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 -qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp -uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU -Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE -pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp -5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M -UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN -GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy -5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv -6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK -eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 -B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ -BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov -L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG -SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS -CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen -5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 -IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK -gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL -+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL -vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm -bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk -N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC -Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z -ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== ------END CERTIFICATE----- - -# Issuer: CN=CA Disig O=Disig a.s. -# Subject: CN=CA Disig O=Disig a.s. -# Label: "CA Disig" -# Serial: 1 -# MD5 Fingerprint: 3f:45:96:39:e2:50:87:f7:bb:fe:98:0c:3c:20:98:e6 -# SHA1 Fingerprint: 2a:c8:d5:8b:57:ce:bf:2f:49:af:f2:fc:76:8f:51:14:62:90:7a:41 -# SHA256 Fingerprint: 92:bf:51:19:ab:ec:ca:d0:b1:33:2d:c4:e1:d0:5f:ba:75:b5:67:90:44:ee:0c:a2:6e:93:1f:74:4f:2f:33:cf ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBATANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJTSzET -MBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UE -AxMIQ0EgRGlzaWcwHhcNMDYwMzIyMDEzOTM0WhcNMTYwMzIyMDEzOTM0WjBKMQsw -CQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcg -YS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw -ggEKAoIBAQCS9jHBfYj9mQGp2HvycXXxMcbzdWb6UShGhJd4NLxs/LxFWYgmGErE -Nx+hSkS943EE9UQX4j/8SFhvXJ56CbpRNyIjZkMhsDxkovhqFQ4/61HhVKndBpnX -mjxUizkDPw/Fzsbrg3ICqB9x8y34dQjbYkzo+s7552oftms1grrijxaSfQUMbEYD -XcDtab86wYqg6I7ZuUUohwjstMoVvoLdtUSLLa2GDGhibYVW8qwUYzrG0ZmsNHhW -S8+2rT+MitcE5eN4TPWGqvWP+j1scaMtymfraHtuM6kMgiioTGohQBUgDCZbg8Kp -FhXAJIJdKxatymP2dACw30PEEGBWZ2NFAgMBAAGjgf8wgfwwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUjbJJaJ1yCCW5wCf1UJNWSEZx+Y8wDgYDVR0PAQH/BAQD -AgEGMDYGA1UdEQQvMC2BE2Nhb3BlcmF0b3JAZGlzaWcuc2uGFmh0dHA6Ly93d3cu -ZGlzaWcuc2svY2EwZgYDVR0fBF8wXTAtoCugKYYnaHR0cDovL3d3dy5kaXNpZy5z -ay9jYS9jcmwvY2FfZGlzaWcuY3JsMCygKqAohiZodHRwOi8vY2EuZGlzaWcuc2sv -Y2EvY3JsL2NhX2Rpc2lnLmNybDAaBgNVHSAEEzARMA8GDSuBHpGT5goAAAABAQEw -DQYJKoZIhvcNAQEFBQADggEBAF00dGFMrzvY/59tWDYcPQuBDRIrRhCA/ec8J9B6 -yKm2fnQwM6M6int0wHl5QpNt/7EpFIKrIYwvF/k/Ji/1WcbvgAa3mkkp7M5+cTxq -EEHA9tOasnxakZzArFvITV734VP/Q3f8nktnbNfzg9Gg4H8l37iYC5oyOGwwoPP/ -CBUz91BKez6jPiCp3C9WgArtQVCwyfTssuMmRAAOb54GvCKWU3BlxFAKRmukLyeB -EicTXxChds6KezfqwzlhA5WYOudsiCUI/HloDYd9Yvi0X/vF2Ey9WLw/Q1vUHgFN -PGO+I++MzVpQuGhU+QqZMxEA4Z7CRneC9VkGjCFMhwnN5ag= ------END CERTIFICATE----- - -# Issuer: CN=Juur-SK O=AS Sertifitseerimiskeskus -# Subject: CN=Juur-SK O=AS Sertifitseerimiskeskus -# Label: "Juur-SK" -# Serial: 999181308 -# MD5 Fingerprint: aa:8e:5d:d9:f8:db:0a:58:b7:8d:26:87:6c:82:35:55 -# SHA1 Fingerprint: 40:9d:4b:d9:17:b5:5c:27:b6:9b:64:cb:98:22:44:0d:cd:09:b8:89 -# SHA256 Fingerprint: ec:c3:e9:c3:40:75:03:be:e0:91:aa:95:2f:41:34:8f:f8:8b:aa:86:3b:22:64:be:fa:c8:07:90:15:74:e9:39 ------BEGIN CERTIFICATE----- -MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcN -AQkBFglwa2lAc2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZp -dHNlZXJpbWlza2Vza3VzMRAwDgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMw -MVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMQsw -CQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEQ -MA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOB -SvZiF3tfTQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkz -ABpTpyHhOEvWgxutr2TC+Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvH -LCu3GFH+4Hv2qEivbDtPL+/40UceJlfwUR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMP -PbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDaTpxt4brNj3pssAki14sL -2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQFMAMBAf8w -ggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwIC -MIHDHoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDk -AGwAagBhAHMAdABhAHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0 -AHMAZQBlAHIAaQBtAGkAcwBrAGUAcwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABz -AGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABrAGkAbgBuAGkAdABhAG0AaQBz -AGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nwcy8wKwYDVR0f -BCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYE -FASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcY -P2/v6X2+MA4GA1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOi -CfP+JmeaUOTDBS8rNXiRTHyoERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+g -kcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyLabVAyJRld/JXIWY7zoVAtjNjGr95 -HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678IIbsSt4beDI3poHS -na9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkhMp6q -qIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0Z -TbvGRNs2yyqcjg== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Label: "Hongkong Post Root CA 1" -# Serial: 1000 -# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca -# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 -# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx -FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg -Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG -A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr -b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ -jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn -PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh -ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 -nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h -q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED -MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC -mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 -7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB -oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs -EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO -fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi -AmvZWg== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - -# Issuer: CN=ACEDICOM Root O=EDICOM OU=PKI -# Subject: CN=ACEDICOM Root O=EDICOM OU=PKI -# Label: "ACEDICOM Root" -# Serial: 7029493972724711941 -# MD5 Fingerprint: 42:81:a0:e2:1c:e3:55:10:de:55:89:42:65:96:22:e6 -# SHA1 Fingerprint: e0:b4:32:2e:b2:f6:a5:68:b6:54:53:84:48:18:4a:50:36:87:43:84 -# SHA256 Fingerprint: 03:95:0f:b4:9a:53:1f:3e:19:91:94:23:98:df:a9:e0:ea:32:d7:ba:1c:dd:9b:c8:5d:b5:7e:d9:40:0b:43:4a ------BEGIN CERTIFICATE----- -MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE -AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x -CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW -MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF -RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC -AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7 -09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7 -XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P -Grjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAK -t0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+Yb -X79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28 -MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQU -fecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI -2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyH -K9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEae -ZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAP -BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQ -MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAw -RAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv -bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWIm -fQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3 -gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe -I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i -5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi -ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn -MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ -o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6 -zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN -GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt -r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK -Z05phkOTOPu220+DkdRgfks+KzgHVZhepA== ------END CERTIFICATE----- - -# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority -# Label: "Verisign Class 3 Public Primary Certification Authority" -# Serial: 80507572722862485515306429940691309246 -# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4 -# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b -# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05 ------BEGIN CERTIFICATE----- -MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz -cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 -MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV -BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt -YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN -ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE -BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is -I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G -CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i -2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ -2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=e-Guven Kok Elektronik Sertifika Hizmet Saglayicisi O=Elektronik Bilgi Guvenligi A.S. -# Subject: CN=e-Guven Kok Elektronik Sertifika Hizmet Saglayicisi O=Elektronik Bilgi Guvenligi A.S. -# Label: "E-Guven Kok Elektronik Sertifika Hizmet Saglayicisi" -# Serial: 91184789765598910059173000485363494069 -# MD5 Fingerprint: 3d:41:29:cb:1e:aa:11:74:cd:5d:b0:62:af:b0:43:5b -# SHA1 Fingerprint: dd:e1:d2:a9:01:80:2e:1d:87:5e:84:b3:80:7e:4b:b1:fd:99:41:34 -# SHA256 Fingerprint: e6:09:07:84:65:a4:19:78:0c:b6:ac:4c:1c:0b:fb:46:53:d9:d9:cc:6e:b3:94:6e:b7:f3:d6:99:97:ba:d5:98 ------BEGIN CERTIFICATE----- -MIIDtjCCAp6gAwIBAgIQRJmNPMADJ72cdpW56tustTANBgkqhkiG9w0BAQUFADB1 -MQswCQYDVQQGEwJUUjEoMCYGA1UEChMfRWxla3Ryb25payBCaWxnaSBHdXZlbmxp -Z2kgQS5TLjE8MDoGA1UEAxMzZS1HdXZlbiBLb2sgRWxla3Ryb25payBTZXJ0aWZp -a2EgSGl6bWV0IFNhZ2xheWljaXNpMB4XDTA3MDEwNDExMzI0OFoXDTE3MDEwNDEx -MzI0OFowdTELMAkGA1UEBhMCVFIxKDAmBgNVBAoTH0VsZWt0cm9uaWsgQmlsZ2kg -R3V2ZW5saWdpIEEuUy4xPDA6BgNVBAMTM2UtR3V2ZW4gS29rIEVsZWt0cm9uaWsg -U2VydGlmaWthIEhpem1ldCBTYWdsYXlpY2lzaTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAMMSIJ6wXgBljU5Gu4Bc6SwGl9XzcslwuedLZYDBS75+PNdU -MZTe1RK6UxYC6lhj71vY8+0qGqpxSKPcEC1fX+tcS5yWCEIlKBHMilpiAVDV6wlT -L/jDj/6z/P2douNffb7tC+Bg62nsM+3YjfsSSYMAyYuXjDtzKjKzEve5TfL0TW3H -5tYmNwjy2f1rXKPlSFxYvEK+A1qBuhw1DADT9SN+cTAIJjjcJRFHLfO6IxClv7wC -90Nex/6wN1CZew+TzuZDLMN+DfIcQ2Zgy2ExR4ejT669VmxMvLz4Bcpk9Ok0oSy1 -c+HCPujIyTQlCFzz7abHlJ+tiEMl1+E5YP6sOVkCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFJ/uRLOU1fqRTy7ZVZoE -VtstxNulMA0GCSqGSIb3DQEBBQUAA4IBAQB/X7lTW2M9dTLn+sR0GstG30ZpHFLP -qk/CaOv/gKlR6D1id4k9CnU58W5dF4dvaAXBlGzZXd/aslnLpRCKysw5zZ/rTt5S -/wzw9JKp8mxTq5vSR6AfdPebmvEvFZ96ZDAYBzwqD2fK/A+JYZ1lpTzlvBNbCNvj -/+27BrtqBrF6T2XGgv0enIu1De5Iu7i9qgi0+6N8y5/NkHZchpZ4Vwpm+Vganf2X -KWDeEaaQHBkc7gGWIjQ0LpH5t8Qn0Xvmv/uARFoW5evg1Ao4vOSR49XrXMGs3xtq -fJ7lddK2l4fbzIcrQzqECK+rPNv3PGYxhrCdU3nt+CPeQuMtgvEP5fqX ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 6047274297262753887 -# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 -# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa -# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy -MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD -VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp -cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv -ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl -AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF -661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 -am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 -ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 -PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS -3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k -SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF -3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM -ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g -StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz -Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB -jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Label: "Chambers of Commerce Root - 2008" -# Serial: 11806822484801597146 -# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 -# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c -# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 ------BEGIN CERTIFICATE----- -MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz -IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz -MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj -dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw -EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp -MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G -CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 -28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq -VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q -DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR -5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL -ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a -Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl -UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s -+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 -Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj -ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx -hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV -HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 -+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN -YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t -L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy -ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt -IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV -HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w -DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW -PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF -5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 -glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH -FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 -pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD -xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG -tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq -jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De -fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg -OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ -d0jQ ------END CERTIFICATE----- - -# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Label: "Global Chambersign Root - 2008" -# Serial: 14541511773111788494 -# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 -# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c -# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca ------BEGIN CERTIFICATE----- -MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD -aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx -MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy -cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG -A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl -BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI -hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed -KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 -G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 -zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 -ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG -HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 -Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V -yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e -beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r -6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh -wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog -zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW -BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr -ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp -ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk -cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt -YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC -CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow -KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI -hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ -UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz -X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x -fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz -a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd -Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd -SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O -AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso -M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge -v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z -09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903 -# Subject: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903 -# Label: "Certinomis - Autorité Racine" -# Serial: 1 -# MD5 Fingerprint: 7f:30:78:8c:03:e3:ca:c9:0a:e2:c9:ea:1e:aa:55:1a -# SHA1 Fingerprint: 2e:14:da:ec:28:f0:fa:1e:8e:38:9a:4e:ab:eb:26:c0:0a:d3:83:c3 -# SHA256 Fingerprint: fc:bf:e2:88:62:06:f7:2b:27:59:3c:8b:07:02:97:e1:2d:76:9e:d1:0e:d7:93:07:05:a8:09:8e:ff:c1:4d:17 ------BEGIN CERTIFICATE----- -MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjET -MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAk -BgNVBAMMHUNlcnRpbm9taXMgLSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4 -Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNl -cnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYwJAYDVQQDDB1DZXJ0 -aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jY -F1AMnmHawE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N -8y4oH3DfVS9O7cdxbwlyLu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWe -rP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K -/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92NjMD2AR5vpTESOH2VwnHu -7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9qc1pkIuVC -28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6 -lSTClrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1E -nn1So2+WLhl+HPNbxxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB -0iSVL1N6aaLwD4ZFjliCK0wi1F6g530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql09 -5gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna4NH4+ej9Uji29YnfAgMBAAGj -WzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQN -jLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ -KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9s -ov3/4gbIOZ/xWqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZM -OH8oMDX/nyNTt7buFHAAQCvaR6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q -619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40nJ+U8/aGH88bc62UeYdocMMzpXDn -2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1BCxMjidPJC+iKunqj -o3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjvJL1v -nxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG -5ERQL1TEqkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWq -pdEdnV1j6CTmNhTih60bWfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZb -dsLLO7XSAPCjDuGtbkD326C00EauFddEwk01+dIL8hf2rGbVJLJP0RyZwG71fet0 -BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/vgt2Fl43N+bYdJeimUV5 ------END CERTIFICATE----- - -# Issuer: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA -# Subject: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA -# Label: "Root CA Generalitat Valenciana" -# Serial: 994436456 -# MD5 Fingerprint: 2c:8c:17:5e:b1:54:ab:93:17:b5:36:5a:db:d1:c6:f2 -# SHA1 Fingerprint: a0:73:e5:c5:bd:43:61:0d:86:4c:21:13:0a:85:58:57:cc:9c:ea:46 -# SHA256 Fingerprint: 8c:4e:df:d0:43:48:f3:22:96:9e:7e:29:a4:cd:4d:ca:00:46:55:06:1c:16:e1:b0:76:42:2e:f3:42:ad:63:0e ------BEGIN CERTIFICATE----- -MIIGizCCBXOgAwIBAgIEO0XlaDANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJF -UzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJ -R1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwHhcN -MDEwNzA2MTYyMjQ3WhcNMjEwNzAxMTUyMjQ3WjBoMQswCQYDVQQGEwJFUzEfMB0G -A1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScw -JQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGKqtXETcvIorKA3Qdyu0togu8M1JAJke+ -WmmmO3I2F0zo37i7L3bhQEZ0ZQKQUgi0/6iMweDHiVYQOTPvaLRfX9ptI6GJXiKj -SgbwJ/BXufjpTjJ3Cj9BZPPrZe52/lSqfR0grvPXdMIKX/UIKFIIzFVd0g/bmoGl -u6GzwZTNVOAydTGRGmKy3nXiz0+J2ZGQD0EbtFpKd71ng+CT516nDOeB0/RSrFOy -A8dEJvt55cs0YFAQexvba9dHq198aMpunUEDEO5rmXteJajCq+TA81yc477OMUxk -Hl6AovWDfgzWyoxVjr7gvkkHD6MkQXpYHYTqWBLI4bft75PelAgxAgMBAAGjggM7 -MIIDNzAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAGGFmh0dHA6Ly9vY3NwLnBr -aS5ndmEuZXMwEgYDVR0TAQH/BAgwBgEB/wIBAjCCAjQGA1UdIASCAiswggInMIIC -IwYKKwYBBAG/VQIBADCCAhMwggHoBggrBgEFBQcCAjCCAdoeggHWAEEAdQB0AG8A -cgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAFIA -YQDtAHoAIABkAGUAIABsAGEAIABHAGUAbgBlAHIAYQBsAGkAdABhAHQAIABWAGEA -bABlAG4AYwBpAGEAbgBhAC4ADQAKAEwAYQAgAEQAZQBjAGwAYQByAGEAYwBpAPMA -bgAgAGQAZQAgAFAAcgDhAGMAdABpAGMAYQBzACAAZABlACAAQwBlAHIAdABpAGYA -aQBjAGEAYwBpAPMAbgAgAHEAdQBlACAAcgBpAGcAZQAgAGUAbAAgAGYAdQBuAGMA -aQBvAG4AYQBtAGkAZQBuAHQAbwAgAGQAZQAgAGwAYQAgAHAAcgBlAHMAZQBuAHQA -ZQAgAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEA -YwBpAPMAbgAgAHMAZQAgAGUAbgBjAHUAZQBuAHQAcgBhACAAZQBuACAAbABhACAA -ZABpAHIAZQBjAGMAaQDzAG4AIAB3AGUAYgAgAGgAdAB0AHAAOgAvAC8AdwB3AHcA -LgBwAGsAaQAuAGcAdgBhAC4AZQBzAC8AYwBwAHMwJQYIKwYBBQUHAgEWGWh0dHA6 -Ly93d3cucGtpLmd2YS5lcy9jcHMwHQYDVR0OBBYEFHs100DSHHgZZu90ECjcPk+y -eAT8MIGVBgNVHSMEgY0wgYqAFHs100DSHHgZZu90ECjcPk+yeAT8oWykajBoMQsw -CQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0G -A1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVu -Y2lhbmGCBDtF5WgwDQYJKoZIhvcNAQEFBQADggEBACRhTvW1yEICKrNcda3Fbcrn -lD+laJWIwVTAEGmiEi8YPyVQqHxK6sYJ2fR1xkDar1CdPaUWu20xxsdzCkj+IHLt -b8zog2EWRpABlUt9jppSCS/2bxzkoXHPjCpaF3ODR00PNvsETUlR4hTJZGH71BTg -9J63NI8KJr2XXPR5OkowGcytT6CYirQxlyric21+eLj4iIlPsSKRZEv1UN4D2+XF -ducTZnV+ZfsBn5OHiJ35Rld8TWCvmHMTI6QgkYH60GFmuH3Rr9ZvHmw96RH9qfmC -IoaZM3Fa6hlXPZHNqcCjbgcTpsnt+GijnsNacgmHKNHEc8RzGF9QdRYxn7fofMM= ------END CERTIFICATE----- - -# Issuer: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03 -# Subject: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03 -# Label: "A-Trust-nQual-03" -# Serial: 93214 -# MD5 Fingerprint: 49:63:ae:27:f4:d5:95:3d:d8:db:24:86:b8:9c:07:53 -# SHA1 Fingerprint: d3:c0:63:f2:19:ed:07:3e:34:ad:5d:75:0b:32:76:29:ff:d5:9a:f2 -# SHA256 Fingerprint: 79:3c:bf:45:59:b9:fd:e3:8a:b2:2d:f1:68:69:f6:98:81:ae:14:c4:b0:13:9a:c7:88:a7:8a:1a:fc:ca:02:fb ------BEGIN CERTIFICATE----- -MIIDzzCCAregAwIBAgIDAWweMA0GCSqGSIb3DQEBBQUAMIGNMQswCQYDVQQGEwJB -VDFIMEYGA1UECgw/QS1UcnVzdCBHZXMuIGYuIFNpY2hlcmhlaXRzc3lzdGVtZSBp -bSBlbGVrdHIuIERhdGVudmVya2VociBHbWJIMRkwFwYDVQQLDBBBLVRydXN0LW5R -dWFsLTAzMRkwFwYDVQQDDBBBLVRydXN0LW5RdWFsLTAzMB4XDTA1MDgxNzIyMDAw -MFoXDTE1MDgxNzIyMDAwMFowgY0xCzAJBgNVBAYTAkFUMUgwRgYDVQQKDD9BLVRy -dXN0IEdlcy4gZi4gU2ljaGVyaGVpdHNzeXN0ZW1lIGltIGVsZWt0ci4gRGF0ZW52 -ZXJrZWhyIEdtYkgxGTAXBgNVBAsMEEEtVHJ1c3QtblF1YWwtMDMxGTAXBgNVBAMM -EEEtVHJ1c3QtblF1YWwtMDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCtPWFuA/OQO8BBC4SAzewqo51ru27CQoT3URThoKgtUaNR8t4j8DRE/5TrzAUj -lUC5B3ilJfYKvUWG6Nm9wASOhURh73+nyfrBJcyFLGM/BWBzSQXgYHiVEEvc+RFZ -znF/QJuKqiTfC0Li21a8StKlDJu3Qz7dg9MmEALP6iPESU7l0+m0iKsMrmKS1GWH -2WrX9IWf5DMiJaXlyDO6w8dB3F/GaswADm0yqLaHNgBid5seHzTLkDx4iHQF63n1 -k3Flyp3HaxgtPVxO59X4PzF9j4fsCiIvI+n+u33J4PTs63zEsMMtYrWacdaxaujs -2e3Vcuy+VwHOBVWf3tFgiBCzAgMBAAGjNjA0MA8GA1UdEwEB/wQFMAMBAf8wEQYD -VR0OBAoECERqlWdVeRFPMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC -AQEAVdRU0VlIXLOThaq/Yy/kgM40ozRiPvbY7meIMQQDbwvUB/tOdQ/TLtPAF8fG -KOwGDREkDg6lXb+MshOWcdzUzg4NCmgybLlBMRmrsQd7TZjTXLDR8KdCoLXEjq/+ -8T/0709GAHbrAvv5ndJAlseIOrifEXnzgGWovR/TeIGgUUw3tKZdJXDRZslo+S4R -FGjxVJgIrCaSD96JntT6s3kr0qN51OyLrIdTaEJMUVF0HhsnLuP1Hyl0Te2v9+GS -mYHovjrHF1D2t8b8m7CKa9aIA5GPBnc6hQLdmNVDeD/GMBWsm2vLV7eJUYs66MmE -DNuxUCAKGkq6ahq97BvIxYSazQ== ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2011" -# Serial: 0 -# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 -# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d -# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 ------BEGIN CERTIFICATE----- -MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix -RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p -YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw -NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK -EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl -cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz -dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ -fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns -bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD -75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP -FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV -HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp -5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu -b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA -A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p -6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 -TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 -dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys -Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI -l7WdmplNsDz4SgCbZN2fOUvRJ9e4 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: O=Trustis Limited OU=Trustis FPS Root CA -# Subject: O=Trustis Limited OU=Trustis FPS Root CA -# Label: "Trustis FPS Root CA" -# Serial: 36053640375399034304724988975563710553 -# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d -# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 -# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d ------BEGIN CERTIFICATE----- -MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL -ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx -MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc -MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ -AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH -iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj -vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA -0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB -OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ -BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E -FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 -GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW -zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 -1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE -f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F -jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN -ZetX2fNXlrtIzYE= ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Label: "StartCom Certification Authority" -# Serial: 45 -# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16 -# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0 -# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11 ------BEGIN CERTIFICATE----- -MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg -Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9 -MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi -U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh -cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk -pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf -OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C -Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT -Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi -HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM -Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w -+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ -Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 -Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B -26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID -AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul -F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC -ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w -ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk -aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0 -YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg -c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93 -d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG -CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1 -dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF -wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS -Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst -0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc -pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl -CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF -P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK -1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm -KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE -JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ -8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm -fyWl8kgAwKQB2j8= ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd. -# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd. -# Label: "StartCom Certification Authority G2" -# Serial: 59 -# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64 -# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17 -# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95 ------BEGIN CERTIFICATE----- -MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1 -OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG -A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G -CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ -JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD -vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo -D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/ -Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW -RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK -HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN -nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM -0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i -UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9 -Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg -TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL -BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K -2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX -UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl -6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK -9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ -HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI -wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY -XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l -IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo -hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr -so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Label: "EE Certification Centre Root CA" -# Serial: 112324828676200291871926431888494945866 -# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f -# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 -# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 ------BEGIN CERTIFICATE----- -MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 -MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 -czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG -CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy -MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl -ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS -b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy -euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO -bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw -WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d -MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE -1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ -zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB -BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF -BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV -v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG -E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u -uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW -iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v -GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= ------END CERTIFICATE----- - -# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007 -# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007 -# Label: "TURKTRUST Certificate Services Provider Root 2007" -# Serial: 1 -# MD5 Fingerprint: 2b:70:20:56:86:82:a0:18:c8:07:53:12:28:70:21:72 -# SHA1 Fingerprint: f1:7f:6f:b6:31:dc:99:e3:a3:c8:7f:fe:1c:f1:81:10:88:d9:60:33 -# SHA256 Fingerprint: 97:8c:d9:66:f2:fa:a0:7b:a7:aa:95:00:d9:c0:2e:9d:77:f2:cd:ad:a6:ad:6b:a7:4a:f4:b9:1c:66:59:3c:50 ------BEGIN CERTIFICATE----- -MIIEPTCCAyWgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvzE/MD0GA1UEAww2VMOc -UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx -c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV4wXAYDVQQKDFVUw5xS -S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg -SGl6bWV0bGVyaSBBLsWeLiAoYykgQXJhbMSxayAyMDA3MB4XDTA3MTIyNTE4Mzcx -OVoXDTE3MTIyMjE4MzcxOVowgb8xPzA9BgNVBAMMNlTDnFJLVFJVU1QgRWxla3Ry -b25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTELMAkGA1UEBhMC -VFIxDzANBgNVBAcMBkFua2FyYTFeMFwGA1UECgxVVMOcUktUUlVTVCBCaWxnaSDE -sGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7F -ni4gKGMpIEFyYWzEsWsgMjAwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAKu3PgqMyKVYFeaK7yc9SrToJdPNM8Ig3BnuiD9NYvDdE3ePYakqtdTyuTFY -KTsvP2qcb3N2Je40IIDu6rfwxArNK4aUyeNgsURSsloptJGXg9i3phQvKUmi8wUG -+7RP2qFsmmaf8EMJyupyj+sA1zU511YXRxcw9L6/P8JorzZAwan0qafoEGsIiveG -HtyaKhUG9qPw9ODHFNRRf8+0222vR5YXm3dx2KdxnSQM9pQ/hTEST7ruToK4uT6P -IzdezKKqdfcYbwnTrqdUKDT74eA7YH2gvnmJhsifLfkKS8RQouf9eRbHegsYz85M -733WB2+Y8a+xwXrXgTW4qhe04MsCAwEAAaNCMEAwHQYDVR0OBBYEFCnFkKslrxHk -Yb+j/4hhkeYO/pyBMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G -CSqGSIb3DQEBBQUAA4IBAQAQDdr4Ouwo0RSVgrESLFF6QSU2TJ/sPx+EnWVUXKgW -AkD6bho3hO9ynYYKVZ1WKKxmLNA6VpM0ByWtCLCPyA8JWcqdmBzlVPi5RX9ql2+I -aE1KBiY3iAIOtsbWcpnOa3faYjGkVh+uX4132l32iPwa2Z61gfAyuOOI0JzzaqC5 -mxRZNTZPz/OOXl0XrRWV2N2y1RVuAE6zS89mlOTgzbUF2mNXi+WzqtvALhyQRNsa -XRik7r4EW5nVcV9VZWRi1aKbBFmGyGJ353yCRWo9F7/snXUMrqNvWtMvmDb08PUZ -qxFdyKbjKlhqQgnDvZImZjINXQhVdP+MmNAKpoRq0Tl9 ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Raiz del Estado Venezolano O=Sistema Nacional de Certificacion Electronica OU=Superintendencia de Servicios de Certificacion Electronica -# Subject: CN=PSCProcert O=Sistema Nacional de Certificacion Electronica OU=Proveedor de Certificados PROCERT -# Label: "PSCProcert" -# Serial: 11 -# MD5 Fingerprint: e6:24:e9:12:01:ae:0c:de:8e:85:c4:ce:a3:12:dd:ec -# SHA1 Fingerprint: 70:c1:8d:74:b4:28:81:0a:e4:fd:a5:75:d7:01:9f:99:b0:3d:50:74 -# SHA256 Fingerprint: 3c:fc:3c:14:d1:f6:84:ff:17:e3:8c:43:ca:44:0c:00:b9:67:ec:93:3e:8b:fe:06:4c:a1:d7:2c:90:f2:ad:b0 ------BEGIN CERTIFICATE----- -MIIJhjCCB26gAwIBAgIBCzANBgkqhkiG9w0BAQsFADCCAR4xPjA8BgNVBAMTNUF1 -dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIFJhaXogZGVsIEVzdGFkbyBWZW5lem9s -YW5vMQswCQYDVQQGEwJWRTEQMA4GA1UEBxMHQ2FyYWNhczEZMBcGA1UECBMQRGlz -dHJpdG8gQ2FwaXRhbDE2MDQGA1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0 -aWZpY2FjaW9uIEVsZWN0cm9uaWNhMUMwQQYDVQQLEzpTdXBlcmludGVuZGVuY2lh -IGRlIFNlcnZpY2lvcyBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMSUwIwYJ -KoZIhvcNAQkBFhZhY3JhaXpAc3VzY2VydGUuZ29iLnZlMB4XDTEwMTIyODE2NTEw -MFoXDTIwMTIyNTIzNTk1OVowgdExJjAkBgkqhkiG9w0BCQEWF2NvbnRhY3RvQHBy -b2NlcnQubmV0LnZlMQ8wDQYDVQQHEwZDaGFjYW8xEDAOBgNVBAgTB01pcmFuZGEx -KjAoBgNVBAsTIVByb3ZlZWRvciBkZSBDZXJ0aWZpY2Fkb3MgUFJPQ0VSVDE2MDQG -A1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9u -aWNhMQswCQYDVQQGEwJWRTETMBEGA1UEAxMKUFNDUHJvY2VydDCCAiIwDQYJKoZI -hvcNAQEBBQADggIPADCCAgoCggIBANW39KOUM6FGqVVhSQ2oh3NekS1wwQYalNo9 -7BVCwfWMrmoX8Yqt/ICV6oNEolt6Vc5Pp6XVurgfoCfAUFM+jbnADrgV3NZs+J74 -BCXfgI8Qhd19L3uA3VcAZCP4bsm+lU/hdezgfl6VzbHvvnpC2Mks0+saGiKLt38G -ieU89RLAu9MLmV+QfI4tL3czkkohRqipCKzx9hEC2ZUWno0vluYC3XXCFCpa1sl9 -JcLB/KpnheLsvtF8PPqv1W7/U0HU9TI4seJfxPmOEO8GqQKJ/+MMbpfg353bIdD0 -PghpbNjU5Db4g7ayNo+c7zo3Fn2/omnXO1ty0K+qP1xmk6wKImG20qCZyFSTXai2 -0b1dCl53lKItwIKOvMoDKjSuc/HUtQy9vmebVOvh+qBa7Dh+PsHMosdEMXXqP+UH -0quhJZb25uSgXTcYOWEAM11G1ADEtMo88aKjPvM6/2kwLkDd9p+cJsmWN63nOaK/ -6mnbVSKVUyqUtd+tFjiBdWbjxywbk5yqjKPK2Ww8F22c3HxT4CAnQzb5EuE8XL1m -v6JpIzi4mWCZDlZTOpx+FIywBm/xhnaQr/2v/pDGj59/i5IjnOcVdo/Vi5QTcmn7 -K2FjiO/mpF7moxdqWEfLcU8UC17IAggmosvpr2uKGcfLFFb14dq12fy/czja+eev -bqQ34gcnAgMBAAGjggMXMIIDEzASBgNVHRMBAf8ECDAGAQH/AgEBMDcGA1UdEgQw -MC6CD3N1c2NlcnRlLmdvYi52ZaAbBgVghl4CAqASDBBSSUYtRy0yMDAwNDAzNi0w -MB0GA1UdDgQWBBRBDxk4qpl/Qguk1yeYVKIXTC1RVDCCAVAGA1UdIwSCAUcwggFD -gBStuyIdxuDSAaj9dlBSk+2YwU2u06GCASakggEiMIIBHjE+MDwGA1UEAxM1QXV0 -b3JpZGFkIGRlIENlcnRpZmljYWNpb24gUmFpeiBkZWwgRXN0YWRvIFZlbmV6b2xh -bm8xCzAJBgNVBAYTAlZFMRAwDgYDVQQHEwdDYXJhY2FzMRkwFwYDVQQIExBEaXN0 -cml0byBDYXBpdGFsMTYwNAYDVQQKEy1TaXN0ZW1hIE5hY2lvbmFsIGRlIENlcnRp -ZmljYWNpb24gRWxlY3Ryb25pY2ExQzBBBgNVBAsTOlN1cGVyaW50ZW5kZW5jaWEg -ZGUgU2VydmljaW9zIGRlIENlcnRpZmljYWNpb24gRWxlY3Ryb25pY2ExJTAjBgkq -hkiG9w0BCQEWFmFjcmFpekBzdXNjZXJ0ZS5nb2IudmWCAQowDgYDVR0PAQH/BAQD -AgEGME0GA1UdEQRGMESCDnByb2NlcnQubmV0LnZloBUGBWCGXgIBoAwMClBTQy0w -MDAwMDKgGwYFYIZeAgKgEgwQUklGLUotMzE2MzUzNzMtNzB2BgNVHR8EbzBtMEag -RKBChkBodHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9sY3IvQ0VSVElGSUNBRE8t -UkFJWi1TSEEzODRDUkxERVIuY3JsMCOgIaAfhh1sZGFwOi8vYWNyYWl6LnN1c2Nl -cnRlLmdvYi52ZTA3BggrBgEFBQcBAQQrMCkwJwYIKwYBBQUHMAGGG2h0dHA6Ly9v -Y3NwLnN1c2NlcnRlLmdvYi52ZTBBBgNVHSAEOjA4MDYGBmCGXgMBAjAsMCoGCCsG -AQUFBwIBFh5odHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9kcGMwDQYJKoZIhvcN -AQELBQADggIBACtZ6yKZu4SqT96QxtGGcSOeSwORR3C7wJJg7ODU523G0+1ng3dS -1fLld6c2suNUvtm7CpsR72H0xpkzmfWvADmNg7+mvTV+LFwxNG9s2/NkAZiqlCxB -3RWGymspThbASfzXg0gTB1GEMVKIu4YXx2sviiCtxQuPcD4quxtxj7mkoP3Yldmv -Wb8lK5jpY5MvYB7Eqvh39YtsL+1+LrVPQA3uvFd359m21D+VJzog1eWuq2w1n8Gh -HVnchIHuTQfiSLaeS5UtQbHh6N5+LwUeaO6/u5BlOsju6rEYNxxik6SgMexxbJHm -pHmJWhSnFFAFTKQAVzAswbVhltw+HoSvOULP5dAssSS830DD7X9jSr3hTxJkhpXz -sOfIt+FTvZLm8wyWuevo5pLtp4EJFAv8lXrPj9Y0TzYS3F7RNHXGRoAvlQSMx4bE -qCaJqD8Zm4G7UaRKhqsLEQ+xrmNTbSjq3TNWOByyrYDT13K9mmyZY+gAu0F2Bbdb -mRiKw7gSXFbPVgx96OLP7bx0R/vu0xdOIk9W/1DzLuY5poLWccret9W6aAjtmcz9 -opLLabid+Qqkpj5PkygqYWwHJgD/ll9ohri4zspV4KuxPX+Y1zMOWj3YeMLEYC/H -YvBhkdI4sPaeVdtAgAUSM84dkpvRabP/v/GSCmE1P93+hvS84Bpxs2Km ------END CERTIFICATE----- - -# Issuer: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center -# Subject: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center -# Label: "China Internet Network Information Center EV Certificates Root" -# Serial: 1218379777 -# MD5 Fingerprint: 55:5d:63:00:97:bd:6a:97:f5:67:ab:4b:fb:6e:63:15 -# SHA1 Fingerprint: 4f:99:aa:93:fb:2b:d1:37:26:a1:99:4a:ce:7f:f0:05:f2:93:5d:1e -# SHA256 Fingerprint: 1c:01:c6:f4:db:b2:fe:fc:22:55:8b:2b:ca:32:56:3f:49:84:4a:cf:c3:2b:7b:e4:b0:ff:59:9f:9e:8c:7a:f7 ------BEGIN CERTIFICATE----- -MIID9zCCAt+gAwIBAgIESJ8AATANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMC -Q04xMjAwBgNVBAoMKUNoaW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24g -Q2VudGVyMUcwRQYDVQQDDD5DaGluYSBJbnRlcm5ldCBOZXR3b3JrIEluZm9ybWF0 -aW9uIENlbnRlciBFViBDZXJ0aWZpY2F0ZXMgUm9vdDAeFw0xMDA4MzEwNzExMjVa -Fw0zMDA4MzEwNzExMjVaMIGKMQswCQYDVQQGEwJDTjEyMDAGA1UECgwpQ2hpbmEg -SW50ZXJuZXQgTmV0d29yayBJbmZvcm1hdGlvbiBDZW50ZXIxRzBFBgNVBAMMPkNo -aW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24gQ2VudGVyIEVWIENlcnRp -ZmljYXRlcyBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm35z -7r07eKpkQ0H1UN+U8i6yjUqORlTSIRLIOTJCBumD1Z9S7eVnAztUwYyZmczpwA// -DdmEEbK40ctb3B75aDFk4Zv6dOtouSCV98YPjUesWgbdYavi7NifFy2cyjw1l1Vx -zUOFsUcW9SxTgHbP0wBkvUCZ3czY28Sf1hNfQYOL+Q2HklY0bBoQCxfVWhyXWIQ8 -hBouXJE0bhlffxdpxWXvayHG1VA6v2G5BY3vbzQ6sm8UY78WO5upKv23KzhmBsUs -4qpnHkWnjQRmQvaPK++IIGmPMowUc9orhpFjIpryp9vOiYurXccUwVswah+xt54u -gQEC7c+WXmPbqOY4twIDAQABo2MwYTAfBgNVHSMEGDAWgBR8cks5x8DbYqVPm6oY -NJKiyoOCWTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E -FgQUfHJLOcfA22KlT5uqGDSSosqDglkwDQYJKoZIhvcNAQEFBQADggEBACrDx0M3 -j92tpLIM7twUbY8opJhJywyA6vPtI2Z1fcXTIWd50XPFtQO3WKwMVC/GVhMPMdoG -52U7HW8228gd+f2ABsqjPWYWqJ1MFn3AlUa1UeTiH9fqBk1jjZaM7+czV0I664zB -echNdn3e9rG3geCg+aF4RhcaVpjwTj2rHO3sOdwHSPdj/gauwqRcalsyiMXHM4Ws -ZkJHwlgkmeHlPuV1LI5D1l08eB6olYIpUNHRFrrvwb562bTYzB5MRuF3sTGrvSrI -zo9uoV1/A3U05K2JRVRevq4opbs/eHnrc7MKDf2+yfdWrPa37S+bISnHOLaVxATy -wy39FCqQmbkHzJ8= ------END CERTIFICATE----- - -# Issuer: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services -# Subject: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services -# Label: "Swisscom Root CA 2" -# Serial: 40698052477090394928831521023204026294 -# MD5 Fingerprint: 5b:04:69:ec:a5:83:94:63:18:a7:86:d0:e4:f2:6e:19 -# SHA1 Fingerprint: 77:47:4f:c6:30:e4:0f:4c:47:64:3f:84:ba:b8:c6:95:4a:8a:41:ec -# SHA256 Fingerprint: f0:9b:12:2c:71:14:f4:a0:9b:d4:ea:4f:4a:99:d5:58:b4:6e:4c:25:cd:81:14:0d:29:c0:56:13:91:4c:38:41 ------BEGIN CERTIFICATE----- -MIIF2TCCA8GgAwIBAgIQHp4o6Ejy5e/DfEoeWhhntjANBgkqhkiG9w0BAQsFADBk -MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0 -YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg -Q0EgMjAeFw0xMTA2MjQwODM4MTRaFw0zMTA2MjUwNzM4MTRaMGQxCzAJBgNVBAYT -AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp -Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAyMIICIjAN -BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlUJOhJ1R5tMJ6HJaI2nbeHCOFvEr -jw0DzpPMLgAIe6szjPTpQOYXTKueuEcUMncy3SgM3hhLX3af+Dk7/E6J2HzFZ++r -0rk0X2s682Q2zsKwzxNoysjL67XiPS4h3+os1OD5cJZM/2pYmLcX5BtS5X4HAB1f -2uY+lQS3aYg5oUFgJWFLlTloYhyxCwWJwDaCFCE/rtuh/bxvHGCGtlOUSbkrRsVP -ACu/obvLP+DHVxxX6NZp+MEkUp2IVd3Chy50I9AU/SpHWrumnf2U5NGKpV+GY3aF -y6//SSj8gO1MedK75MDvAe5QQQg1I3ArqRa0jG6F6bYRzzHdUyYb3y1aSgJA/MTA -tukxGggo5WDDH8SQjhBiYEQN7Aq+VRhxLKX0srwVYv8c474d2h5Xszx+zYIdkeNL -6yxSNLCK/RJOlrDrcH+eOfdmQrGrrFLadkBXeyq96G4DsguAhYidDMfCd7Camlf0 -uPoTXGiTOmekl9AbmbeGMktg2M7v0Ax/lZ9vh0+Hio5fCHyqW/xavqGRn1V9TrAL -acywlKinh/LTSlDcX3KwFnUey7QYYpqwpzmqm59m2I2mbJYV4+by+PGDYmy7Velh -k6M99bFXi08jsJvllGov34zflVEpYKELKeRcVVi3qPyZ7iVNTA6z00yPhOgpD/0Q -VAKFyPnlw4vP5w8CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw -FDASBgdghXQBUwIBBgdghXQBUwIBMBIGA1UdEwEB/wQIMAYBAf8CAQcwHQYDVR0O -BBYEFE0mICKJS9PVpAqhb97iEoHF8TwuMB8GA1UdIwQYMBaAFE0mICKJS9PVpAqh -b97iEoHF8TwuMA0GCSqGSIb3DQEBCwUAA4ICAQAyCrKkG8t9voJXiblqf/P0wS4R -fbgZPnm3qKhyN2abGu2sEzsOv2LwnN+ee6FTSA5BesogpxcbtnjsQJHzQq0Qw1zv -/2BZf82Fo4s9SBwlAjxnffUy6S8w5X2lejjQ82YqZh6NM4OKb3xuqFp1mrjX2lhI -REeoTPpMSQpKwhI3qEAMw8jh0FcNlzKVxzqfl9NX+Ave5XLzo9v/tdhZsnPdTSpx -srpJ9csc1fV5yJmz/MFMdOO0vSk3FQQoHt5FRnDsr7p4DooqzgB53MBfGWcsa0vv -aGgLQ+OswWIJ76bdZWGgr4RVSJFSHMYlkSrQwSIjYVmvRRGFHQEkNI/Ps/8XciAT -woCqISxxOQ7Qj1zB09GOInJGTB2Wrk9xseEFKZZZ9LuedT3PDTcNYtsmjGOpI99n -Bjx8Oto0QuFmtEYE3saWmA9LSHokMnWRn6z3aOkquVVlzl1h0ydw2Df+n7mvoC5W -t6NlUe07qxS/TFED6F+KBZvuim6c779o+sjaC+NCydAXFJy3SuCvkychVSa1ZC+N -8f+mQAWFBVzKBxlcCxMoTFh/wqXvRdpg065lYZ1Tg3TCrvJcwhbtkj6EPnNgiLx2 -9CzP0H1907he0ZESEOnN3col49XtmS++dYFLJPlFRpTJKSFTnCZFqhMX5OfNeOI5 -wSsSnqaeG8XmDtkx2Q== ------END CERTIFICATE----- - -# Issuer: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services -# Subject: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services -# Label: "Swisscom Root EV CA 2" -# Serial: 322973295377129385374608406479535262296 -# MD5 Fingerprint: 7b:30:34:9f:dd:0a:4b:6b:35:ca:31:51:28:5d:ae:ec -# SHA1 Fingerprint: e7:a1:90:29:d3:d5:52:dc:0d:0f:c6:92:d3:ea:88:0d:15:2e:1a:6b -# SHA256 Fingerprint: d9:5f:ea:3c:a4:ee:dc:e7:4c:d7:6e:75:fc:6d:1f:f6:2c:44:1f:0f:a8:bc:77:f0:34:b1:9e:5d:b2:58:01:5d ------BEGIN CERTIFICATE----- -MIIF4DCCA8igAwIBAgIRAPL6ZOJ0Y9ON/RAdBB92ylgwDQYJKoZIhvcNAQELBQAw -ZzELMAkGA1UEBhMCY2gxETAPBgNVBAoTCFN3aXNzY29tMSUwIwYDVQQLExxEaWdp -dGFsIENlcnRpZmljYXRlIFNlcnZpY2VzMR4wHAYDVQQDExVTd2lzc2NvbSBSb290 -IEVWIENBIDIwHhcNMTEwNjI0MDk0NTA4WhcNMzEwNjI1MDg0NTA4WjBnMQswCQYD -VQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2Vy -dGlmaWNhdGUgU2VydmljZXMxHjAcBgNVBAMTFVN3aXNzY29tIFJvb3QgRVYgQ0Eg -MjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMT3HS9X6lds93BdY7Bx -UglgRCgzo3pOCvrY6myLURYaVa5UJsTMRQdBTxB5f3HSek4/OE6zAMaVylvNwSqD -1ycfMQ4jFrclyxy0uYAyXhqdk/HoPGAsp15XGVhRXrwsVgu42O+LgrQ8uMIkqBPH -oCE2G3pXKSinLr9xJZDzRINpUKTk4RtiGZQJo/PDvO/0vezbE53PnUgJUmfANykR -HvvSEaeFGHR55E+FFOtSN+KxRdjMDUN/rhPSays/p8LiqG12W0OfvrSdsyaGOx9/ -5fLoZigWJdBLlzin5M8J0TbDC77aO0RYjb7xnglrPvMyxyuHxuxenPaHZa0zKcQv -idm5y8kDnftslFGXEBuGCxobP/YCfnvUxVFkKJ3106yDgYjTdLRZncHrYTNaRdHL -OdAGalNgHa/2+2m8atwBz735j9m9W8E6X47aD0upm50qKGsaCnw8qyIL5XctcfaC -NYGu+HuB5ur+rPQam3Rc6I8k9l2dRsQs0h4rIWqDJ2dVSqTjyDKXZpBy2uPUZC5f -46Fq9mDU5zXNysRojddxyNMkM3OxbPlq4SjbX8Y96L5V5jcb7STZDxmPX2MYWFCB -UWVv8p9+agTnNCRxunZLWB4ZvRVgRaoMEkABnRDixzgHcgplwLa7JSnaFp6LNYth -7eVxV4O1PHGf40+/fh6Bn0GXAgMBAAGjgYYwgYMwDgYDVR0PAQH/BAQDAgGGMB0G -A1UdIQQWMBQwEgYHYIV0AVMCAgYHYIV0AVMCAjASBgNVHRMBAf8ECDAGAQH/AgED -MB0GA1UdDgQWBBRF2aWBbj2ITY1x0kbBbkUe88SAnTAfBgNVHSMEGDAWgBRF2aWB -bj2ITY1x0kbBbkUe88SAnTANBgkqhkiG9w0BAQsFAAOCAgEAlDpzBp9SSzBc1P6x -XCX5145v9Ydkn+0UjrgEjihLj6p7jjm02Vj2e6E1CqGdivdj5eu9OYLU43otb98T -PLr+flaYC/NUn81ETm484T4VvwYmneTwkLbUwp4wLh/vx3rEUMfqe9pQy3omywC0 -Wqu1kx+AiYQElY2NfwmTv9SoqORjbdlk5LgpWgi/UOGED1V7XwgiG/W9mR4U9s70 -WBCCswo9GcG/W6uqmdjyMb3lOGbcWAXH7WMaLgqXfIeTK7KK4/HsGOV1timH59yL -Gn602MnTihdsfSlEvoqq9X46Lmgxk7lq2prg2+kupYTNHAq4Sgj5nPFhJpiTt3tm -7JFe3VE/23MPrQRYCd0EApUKPtN236YQHoA96M2kZNEzx5LH4k5E4wnJTsJdhw4S -nr8PyQUQ3nqjsTzyP6WqJ3mtMX0f/fwZacXduT98zca0wjAefm6S139hdlqP65VN -vBFuIXxZN5nQBrz5Bm0yFqXZaajh3DyAHmBR3NdUIR7KYndP+tiPsys6DXhyyWhB -WkdKwqPrGtcKqzwyVcgKEZzfdNbwQBUdyLmPtTbFr/giuMod89a2GQ+fYWVq6nTI -fI/DT11lgh/ZDYnadXL77/FHZxOzyNEZiCcmmpl5fx7kLD977vHeTYuWl8PVP3wb -I+2ksx0WckNLIOFZfsLorSa/ovc= ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R1 O=Disig a.s. -# Subject: CN=CA Disig Root R1 O=Disig a.s. -# Label: "CA Disig Root R1" -# Serial: 14052245610670616104 -# MD5 Fingerprint: be:ec:11:93:9a:f5:69:21:bc:d7:c1:c0:67:89:cc:2a -# SHA1 Fingerprint: 8e:1c:74:f8:a6:20:b9:e5:8a:f4:61:fa:ec:2b:47:56:51:1a:52:c6 -# SHA256 Fingerprint: f9:6f:23:f4:c3:e7:9c:07:7a:46:98:8d:5a:f5:90:06:76:a0:f0:39:cb:64:5d:d1:75:49:b2:16:c8:24:40:ce ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAMMDmu5QkG4oMA0GCSqGSIb3DQEBBQUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIxMB4XDTEyMDcxOTA5MDY1NloXDTQy -MDcxOTA5MDY1NlowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjEw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCqw3j33Jijp1pedxiy3QRk -D2P9m5YJgNXoqqXinCaUOuiZc4yd39ffg/N4T0Dhf9Kn0uXKE5Pn7cZ3Xza1lK/o -OI7bm+V8u8yN63Vz4STN5qctGS7Y1oprFOsIYgrY3LMATcMjfF9DCCMyEtztDK3A -fQ+lekLZWnDZv6fXARz2m6uOt0qGeKAeVjGu74IKgEH3G8muqzIm1Cxr7X1r5OJe -IgpFy4QxTaz+29FHuvlglzmxZcfe+5nkCiKxLU3lSCZpq+Kq8/v8kiky6bM+TR8n -oc2OuRf7JT7JbvN32g0S9l3HuzYQ1VTW8+DiR0jm3hTaYVKvJrT1cU/J19IG32PK -/yHoWQbgCNWEFVP3Q+V8xaCJmGtzxmjOZd69fwX3se72V6FglcXM6pM6vpmumwKj -rckWtc7dXpl4fho5frLABaTAgqWjR56M6ly2vGfb5ipN0gTco65F97yLnByn1tUD -3AjLLhbKXEAz6GfDLuemROoRRRw1ZS0eRWEkG4IupZ0zXWX4Qfkuy5Q/H6MMMSRE -7cderVC6xkGbrPAXZcD4XW9boAo0PO7X6oifmPmvTiT6l7Jkdtqr9O3jw2Dv1fkC -yC2fg69naQanMVXVz0tv/wQFx1isXxYb5dKj6zHbHzMVTdDypVP1y+E9Tmgt2BLd -qvLmTZtJ5cUoobqwWsagtQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUiQq0OJMa5qvum5EY+fU8PjXQ04IwDQYJKoZI -hvcNAQEFBQADggIBADKL9p1Kyb4U5YysOMo6CdQbzoaz3evUuii+Eq5FLAR0rBNR -xVgYZk2C2tXck8An4b58n1KeElb21Zyp9HWc+jcSjxyT7Ff+Bw+r1RL3D65hXlaA -SfX8MPWbTx9BLxyE04nH4toCdu0Jz2zBuByDHBb6lM19oMgY0sidbvW9adRtPTXo -HqJPYNcHKfyyo6SdbhWSVhlMCrDpfNIZTUJG7L399ldb3Zh+pE3McgODWF3vkzpB -emOqfDqo9ayk0d2iLbYq/J8BjuIQscTK5GfbVSUZP/3oNn6z4eGBrxEWi1CXYBmC -AMBrTXO40RMHPuq2MU/wQppt4hF05ZSsjYSVPCGvxdpHyN85YmLLW1AL14FABZyb -7bq2ix4Eb5YgOe2kfSnbSM6C3NQCjR0EMVrHS/BsYVLXtFHCgWzN4funodKSds+x -DzdYpPJScWc/DIh4gInByLUfkmO+p3qKViwaqKactV2zY9ATIKHrkWzQjX2v3wvk -F7mGnjixlAxYjOBVqjtjbZqJYLhkKpLGN/R+Q0O3c+gB53+XD9fyexn9GtePyfqF -a3qdnom2piiZk4hA9z7NUaPK6u95RyG1/jLix8NRb76AdPCkwzryT+lf3xkK8jsT -Q6wxpLPn6/wY1gGp8yqPNg7rtLG8t0zJa7+h89n07eLw4+1knj0vllJPgFOL ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- diff --git a/pype/vendor/websocket/tests/__init__.py b/pype/vendor/websocket/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/vendor/websocket/tests/data/header01.txt b/pype/vendor/websocket/tests/data/header01.txt deleted file mode 100644 index d44d24c205..0000000000 --- a/pype/vendor/websocket/tests/data/header01.txt +++ /dev/null @@ -1,6 +0,0 @@ -HTTP/1.1 101 WebSocket Protocol Handshake -Connection: Upgrade -Upgrade: WebSocket -Sec-WebSocket-Accept: Kxep+hNu9n51529fGidYu7a3wO0= -some_header: something - diff --git a/pype/vendor/websocket/tests/data/header02.txt b/pype/vendor/websocket/tests/data/header02.txt deleted file mode 100644 index f481de928a..0000000000 --- a/pype/vendor/websocket/tests/data/header02.txt +++ /dev/null @@ -1,6 +0,0 @@ -HTTP/1.1 101 WebSocket Protocol Handshake -Connection: Upgrade -Upgrade WebSocket -Sec-WebSocket-Accept: Kxep+hNu9n51529fGidYu7a3wO0= -some_header: something - diff --git a/pype/vendor/websocket/tests/test_cookiejar.py b/pype/vendor/websocket/tests/test_cookiejar.py deleted file mode 100644 index c40a00bd2c..0000000000 --- a/pype/vendor/websocket/tests/test_cookiejar.py +++ /dev/null @@ -1,98 +0,0 @@ -import unittest - -from websocket._cookiejar import SimpleCookieJar - -try: - import Cookie -except: - import http.cookies as Cookie - - -class CookieJarTest(unittest.TestCase): - def testAdd(self): - cookie_jar = SimpleCookieJar() - cookie_jar.add("") - self.assertFalse(cookie_jar.jar, "Cookie with no domain should not be added to the jar") - - cookie_jar = SimpleCookieJar() - cookie_jar.add("a=b") - self.assertFalse(cookie_jar.jar, "Cookie with no domain should not be added to the jar") - - cookie_jar = SimpleCookieJar() - cookie_jar.add("a=b; domain=.abc") - self.assertTrue(".abc" in cookie_jar.jar) - - cookie_jar = SimpleCookieJar() - cookie_jar.add("a=b; domain=abc") - self.assertTrue(".abc" in cookie_jar.jar) - self.assertTrue("abc" not in cookie_jar.jar) - - cookie_jar = SimpleCookieJar() - cookie_jar.add("a=b; c=d; domain=abc") - self.assertEquals(cookie_jar.get("abc"), "a=b; c=d") - - cookie_jar = SimpleCookieJar() - cookie_jar.add("a=b; c=d; domain=abc") - cookie_jar.add("e=f; domain=abc") - self.assertEquals(cookie_jar.get("abc"), "a=b; c=d; e=f") - - cookie_jar = SimpleCookieJar() - cookie_jar.add("a=b; c=d; domain=abc") - cookie_jar.add("e=f; domain=.abc") - self.assertEquals(cookie_jar.get("abc"), "a=b; c=d; e=f") - - cookie_jar = SimpleCookieJar() - cookie_jar.add("a=b; c=d; domain=abc") - cookie_jar.add("e=f; domain=xyz") - self.assertEquals(cookie_jar.get("abc"), "a=b; c=d") - self.assertEquals(cookie_jar.get("xyz"), "e=f") - self.assertEquals(cookie_jar.get("something"), "") - - def testSet(self): - cookie_jar = SimpleCookieJar() - cookie_jar.set("a=b") - self.assertFalse(cookie_jar.jar, "Cookie with no domain should not be added to the jar") - - cookie_jar = SimpleCookieJar() - cookie_jar.set("a=b; domain=.abc") - self.assertTrue(".abc" in cookie_jar.jar) - - cookie_jar = SimpleCookieJar() - cookie_jar.set("a=b; domain=abc") - self.assertTrue(".abc" in cookie_jar.jar) - self.assertTrue("abc" not in cookie_jar.jar) - - cookie_jar = SimpleCookieJar() - cookie_jar.set("a=b; c=d; domain=abc") - self.assertEquals(cookie_jar.get("abc"), "a=b; c=d") - - cookie_jar = SimpleCookieJar() - cookie_jar.set("a=b; c=d; domain=abc") - cookie_jar.set("e=f; domain=abc") - self.assertEquals(cookie_jar.get("abc"), "e=f") - - cookie_jar = SimpleCookieJar() - cookie_jar.set("a=b; c=d; domain=abc") - cookie_jar.set("e=f; domain=.abc") - self.assertEquals(cookie_jar.get("abc"), "e=f") - - cookie_jar = SimpleCookieJar() - cookie_jar.set("a=b; c=d; domain=abc") - cookie_jar.set("e=f; domain=xyz") - self.assertEquals(cookie_jar.get("abc"), "a=b; c=d") - self.assertEquals(cookie_jar.get("xyz"), "e=f") - self.assertEquals(cookie_jar.get("something"), "") - - def testGet(self): - cookie_jar = SimpleCookieJar() - cookie_jar.set("a=b; c=d; domain=abc.com") - self.assertEquals(cookie_jar.get("abc.com"), "a=b; c=d") - self.assertEquals(cookie_jar.get("x.abc.com"), "a=b; c=d") - self.assertEquals(cookie_jar.get("abc.com.es"), "") - self.assertEquals(cookie_jar.get("xabc.com"), "") - - cookie_jar.set("a=b; c=d; domain=.abc.com") - self.assertEquals(cookie_jar.get("abc.com"), "a=b; c=d") - self.assertEquals(cookie_jar.get("x.abc.com"), "a=b; c=d") - self.assertEquals(cookie_jar.get("abc.com.es"), "") - self.assertEquals(cookie_jar.get("xabc.com"), "") diff --git a/pype/vendor/websocket/tests/test_websocket.py b/pype/vendor/websocket/tests/test_websocket.py deleted file mode 100644 index f49a89398b..0000000000 --- a/pype/vendor/websocket/tests/test_websocket.py +++ /dev/null @@ -1,662 +0,0 @@ -# -*- coding: utf-8 -*- -# - -import sys -sys.path[0:0] = [""] - -import os -import os.path -import socket - -import six - -# websocket-client -import websocket as ws -from websocket._handshake import _create_sec_websocket_key, \ - _validate as _validate_header -from websocket._http import read_headers -from websocket._url import get_proxy_info, parse_url -from websocket._utils import validate_utf8 - -if six.PY3: - from base64 import decodebytes as base64decode -else: - from base64 import decodestring as base64decode - -if sys.version_info[0] == 2 and sys.version_info[1] < 7: - import unittest2 as unittest -else: - import unittest - -try: - from ssl import SSLError -except ImportError: - # dummy class of SSLError for ssl none-support environment. - class SSLError(Exception): - pass - -# Skip test to access the internet. -TEST_WITH_INTERNET = os.environ.get('TEST_WITH_INTERNET', '0') == '1' - -# Skip Secure WebSocket test. -TEST_SECURE_WS = True -TRACEABLE = True - - -def create_mask_key(_): - return "abcd" - - -class SockMock(object): - def __init__(self): - self.data = [] - self.sent = [] - - def add_packet(self, data): - self.data.append(data) - - def recv(self, bufsize): - if self.data: - e = self.data.pop(0) - if isinstance(e, Exception): - raise e - if len(e) > bufsize: - self.data.insert(0, e[bufsize:]) - return e[:bufsize] - - def send(self, data): - self.sent.append(data) - return len(data) - - def close(self): - pass - - -class HeaderSockMock(SockMock): - - def __init__(self, fname): - SockMock.__init__(self) - path = os.path.join(os.path.dirname(__file__), fname) - with open(path, "rb") as f: - self.add_packet(f.read()) - - -class WebSocketTest(unittest.TestCase): - def setUp(self): - ws.enableTrace(TRACEABLE) - - def tearDown(self): - pass - - def testDefaultTimeout(self): - self.assertEqual(ws.getdefaulttimeout(), None) - ws.setdefaulttimeout(10) - self.assertEqual(ws.getdefaulttimeout(), 10) - ws.setdefaulttimeout(None) - - def testParseUrl(self): - p = parse_url("ws://www.example.com/r") - self.assertEqual(p[0], "www.example.com") - self.assertEqual(p[1], 80) - self.assertEqual(p[2], "/r") - self.assertEqual(p[3], False) - - p = parse_url("ws://www.example.com/r/") - self.assertEqual(p[0], "www.example.com") - self.assertEqual(p[1], 80) - self.assertEqual(p[2], "/r/") - self.assertEqual(p[3], False) - - p = parse_url("ws://www.example.com/") - self.assertEqual(p[0], "www.example.com") - self.assertEqual(p[1], 80) - self.assertEqual(p[2], "/") - self.assertEqual(p[3], False) - - p = parse_url("ws://www.example.com") - self.assertEqual(p[0], "www.example.com") - self.assertEqual(p[1], 80) - self.assertEqual(p[2], "/") - self.assertEqual(p[3], False) - - p = parse_url("ws://www.example.com:8080/r") - self.assertEqual(p[0], "www.example.com") - self.assertEqual(p[1], 8080) - self.assertEqual(p[2], "/r") - self.assertEqual(p[3], False) - - p = parse_url("ws://www.example.com:8080/") - self.assertEqual(p[0], "www.example.com") - self.assertEqual(p[1], 8080) - self.assertEqual(p[2], "/") - self.assertEqual(p[3], False) - - p = parse_url("ws://www.example.com:8080") - self.assertEqual(p[0], "www.example.com") - self.assertEqual(p[1], 8080) - self.assertEqual(p[2], "/") - self.assertEqual(p[3], False) - - p = parse_url("wss://www.example.com:8080/r") - self.assertEqual(p[0], "www.example.com") - self.assertEqual(p[1], 8080) - self.assertEqual(p[2], "/r") - self.assertEqual(p[3], True) - - p = parse_url("wss://www.example.com:8080/r?key=value") - self.assertEqual(p[0], "www.example.com") - self.assertEqual(p[1], 8080) - self.assertEqual(p[2], "/r?key=value") - self.assertEqual(p[3], True) - - self.assertRaises(ValueError, parse_url, "http://www.example.com/r") - - if sys.version_info[0] == 2 and sys.version_info[1] < 7: - return - - p = parse_url("ws://[2a03:4000:123:83::3]/r") - self.assertEqual(p[0], "2a03:4000:123:83::3") - self.assertEqual(p[1], 80) - self.assertEqual(p[2], "/r") - self.assertEqual(p[3], False) - - p = parse_url("ws://[2a03:4000:123:83::3]:8080/r") - self.assertEqual(p[0], "2a03:4000:123:83::3") - self.assertEqual(p[1], 8080) - self.assertEqual(p[2], "/r") - self.assertEqual(p[3], False) - - p = parse_url("wss://[2a03:4000:123:83::3]/r") - self.assertEqual(p[0], "2a03:4000:123:83::3") - self.assertEqual(p[1], 443) - self.assertEqual(p[2], "/r") - self.assertEqual(p[3], True) - - p = parse_url("wss://[2a03:4000:123:83::3]:8080/r") - self.assertEqual(p[0], "2a03:4000:123:83::3") - self.assertEqual(p[1], 8080) - self.assertEqual(p[2], "/r") - self.assertEqual(p[3], True) - - def testWSKey(self): - key = _create_sec_websocket_key() - self.assertTrue(key != 24) - self.assertTrue(six.u("¥n") not in key) - - def testWsUtils(self): - key = "c6b8hTg4EeGb2gQMztV1/g==" - required_header = { - "upgrade": "websocket", - "connection": "upgrade", - "sec-websocket-accept": "Kxep+hNu9n51529fGidYu7a3wO0=", - } - self.assertEqual(_validate_header(required_header, key, None), (True, None)) - - header = required_header.copy() - header["upgrade"] = "http" - self.assertEqual(_validate_header(header, key, None), (False, None)) - del header["upgrade"] - self.assertEqual(_validate_header(header, key, None), (False, None)) - - header = required_header.copy() - header["connection"] = "something" - self.assertEqual(_validate_header(header, key, None), (False, None)) - del header["connection"] - self.assertEqual(_validate_header(header, key, None), (False, None)) - - header = required_header.copy() - header["sec-websocket-accept"] = "something" - self.assertEqual(_validate_header(header, key, None), (False, None)) - del header["sec-websocket-accept"] - self.assertEqual(_validate_header(header, key, None), (False, None)) - - header = required_header.copy() - header["sec-websocket-protocol"] = "sub1" - self.assertEqual(_validate_header(header, key, ["sub1", "sub2"]), (True, "sub1")) - self.assertEqual(_validate_header(header, key, ["sub2", "sub3"]), (False, None)) - - header = required_header.copy() - header["sec-websocket-protocol"] = "sUb1" - self.assertEqual(_validate_header(header, key, ["Sub1", "suB2"]), (True, "sub1")) - - - def testReadHeader(self): - status, header, status_message = read_headers(HeaderSockMock("data/header01.txt")) - self.assertEqual(status, 101) - self.assertEqual(header["connection"], "Upgrade") - - HeaderSockMock("data/header02.txt") - self.assertRaises(ws.WebSocketException, read_headers, HeaderSockMock("data/header02.txt")) - - def testSend(self): - # TODO: add longer frame data - sock = ws.WebSocket() - sock.set_mask_key(create_mask_key) - s = sock.sock = HeaderSockMock("data/header01.txt") - sock.send("Hello") - self.assertEqual(s.sent[0], six.b("\x81\x85abcd)\x07\x0f\x08\x0e")) - - sock.send("こんにちは") - self.assertEqual(s.sent[1], six.b("\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc")) - - sock.send(u"こんにちは") - self.assertEqual(s.sent[1], six.b("\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc")) - - sock.send("x" * 127) - - def testRecv(self): - # TODO: add longer frame data - sock = ws.WebSocket() - s = sock.sock = SockMock() - something = six.b("\x81\x8fabcd\x82\xe3\xf0\x87\xe3\xf1\x80\xe5\xca\x81\xe2\xc5\x82\xe3\xcc") - s.add_packet(something) - data = sock.recv() - self.assertEqual(data, "こんにちは") - - s.add_packet(six.b("\x81\x85abcd)\x07\x0f\x08\x0e")) - data = sock.recv() - self.assertEqual(data, "Hello") - - @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") - def testIter(self): - count = 2 - for _ in ws.create_connection('ws://stream.meetup.com/2/rsvps'): - count -= 1 - if count == 0: - break - - @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") - def testNext(self): - sock = ws.create_connection('ws://stream.meetup.com/2/rsvps') - self.assertEqual(str, type(next(sock))) - - def testInternalRecvStrict(self): - sock = ws.WebSocket() - s = sock.sock = SockMock() - s.add_packet(six.b("foo")) - s.add_packet(socket.timeout()) - s.add_packet(six.b("bar")) - # s.add_packet(SSLError("The read operation timed out")) - s.add_packet(six.b("baz")) - with self.assertRaises(ws.WebSocketTimeoutException): - sock.frame_buffer.recv_strict(9) - # if six.PY2: - # with self.assertRaises(ws.WebSocketTimeoutException): - # data = sock._recv_strict(9) - # else: - # with self.assertRaises(SSLError): - # data = sock._recv_strict(9) - data = sock.frame_buffer.recv_strict(9) - self.assertEqual(data, six.b("foobarbaz")) - with self.assertRaises(ws.WebSocketConnectionClosedException): - sock.frame_buffer.recv_strict(1) - - def testRecvTimeout(self): - sock = ws.WebSocket() - s = sock.sock = SockMock() - s.add_packet(six.b("\x81")) - s.add_packet(socket.timeout()) - s.add_packet(six.b("\x8dabcd\x29\x07\x0f\x08\x0e")) - s.add_packet(socket.timeout()) - s.add_packet(six.b("\x4e\x43\x33\x0e\x10\x0f\x00\x40")) - with self.assertRaises(ws.WebSocketTimeoutException): - sock.recv() - with self.assertRaises(ws.WebSocketTimeoutException): - sock.recv() - data = sock.recv() - self.assertEqual(data, "Hello, World!") - with self.assertRaises(ws.WebSocketConnectionClosedException): - sock.recv() - - def testRecvWithSimpleFragmentation(self): - sock = ws.WebSocket() - s = sock.sock = SockMock() - # OPCODE=TEXT, FIN=0, MSG="Brevity is " - s.add_packet(six.b("\x01\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C")) - # OPCODE=CONT, FIN=1, MSG="the soul of wit" - s.add_packet(six.b("\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17")) - data = sock.recv() - self.assertEqual(data, "Brevity is the soul of wit") - with self.assertRaises(ws.WebSocketConnectionClosedException): - sock.recv() - - def testRecvWithFireEventOfFragmentation(self): - sock = ws.WebSocket(fire_cont_frame=True) - s = sock.sock = SockMock() - # OPCODE=TEXT, FIN=0, MSG="Brevity is " - s.add_packet(six.b("\x01\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C")) - # OPCODE=CONT, FIN=0, MSG="Brevity is " - s.add_packet(six.b("\x00\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C")) - # OPCODE=CONT, FIN=1, MSG="the soul of wit" - s.add_packet(six.b("\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17")) - - _, data = sock.recv_data() - self.assertEqual(data, six.b("Brevity is ")) - _, data = sock.recv_data() - self.assertEqual(data, six.b("Brevity is ")) - _, data = sock.recv_data() - self.assertEqual(data, six.b("the soul of wit")) - - # OPCODE=CONT, FIN=0, MSG="Brevity is " - s.add_packet(six.b("\x80\x8babcd#\x10\x06\x12\x08\x16\x1aD\x08\x11C")) - - with self.assertRaises(ws.WebSocketException): - sock.recv_data() - - with self.assertRaises(ws.WebSocketConnectionClosedException): - sock.recv() - - def testClose(self): - sock = ws.WebSocket() - sock.sock = SockMock() - sock.connected = True - sock.close() - self.assertEqual(sock.connected, False) - - sock = ws.WebSocket() - s = sock.sock = SockMock() - sock.connected = True - s.add_packet(six.b('\x88\x80\x17\x98p\x84')) - sock.recv() - self.assertEqual(sock.connected, False) - - def testRecvContFragmentation(self): - sock = ws.WebSocket() - s = sock.sock = SockMock() - # OPCODE=CONT, FIN=1, MSG="the soul of wit" - s.add_packet(six.b("\x80\x8fabcd\x15\n\x06D\x12\r\x16\x08A\r\x05D\x16\x0b\x17")) - self.assertRaises(ws.WebSocketException, sock.recv) - - def testRecvWithProlongedFragmentation(self): - sock = ws.WebSocket() - s = sock.sock = SockMock() - # OPCODE=TEXT, FIN=0, MSG="Once more unto the breach, " - s.add_packet(six.b("\x01\x9babcd.\x0c\x00\x01A\x0f\x0c\x16\x04B\x16\n\x15" - "\rC\x10\t\x07C\x06\x13\x07\x02\x07\tNC")) - # OPCODE=CONT, FIN=0, MSG="dear friends, " - s.add_packet(six.b("\x00\x8eabcd\x05\x07\x02\x16A\x04\x11\r\x04\x0c\x07" - "\x17MB")) - # OPCODE=CONT, FIN=1, MSG="once more" - s.add_packet(six.b("\x80\x89abcd\x0e\x0c\x00\x01A\x0f\x0c\x16\x04")) - data = sock.recv() - self.assertEqual( - data, - "Once more unto the breach, dear friends, once more") - with self.assertRaises(ws.WebSocketConnectionClosedException): - sock.recv() - - def testRecvWithFragmentationAndControlFrame(self): - sock = ws.WebSocket() - sock.set_mask_key(create_mask_key) - s = sock.sock = SockMock() - # OPCODE=TEXT, FIN=0, MSG="Too much " - s.add_packet(six.b("\x01\x89abcd5\r\x0cD\x0c\x17\x00\x0cA")) - # OPCODE=PING, FIN=1, MSG="Please PONG this" - s.add_packet(six.b("\x89\x90abcd1\x0e\x06\x05\x12\x07C4.,$D\x15\n\n\x17")) - # OPCODE=CONT, FIN=1, MSG="of a good thing" - s.add_packet(six.b("\x80\x8fabcd\x0e\x04C\x05A\x05\x0c\x0b\x05B\x17\x0c" - "\x08\x0c\x04")) - data = sock.recv() - self.assertEqual(data, "Too much of a good thing") - with self.assertRaises(ws.WebSocketConnectionClosedException): - sock.recv() - self.assertEqual( - s.sent[0], - six.b("\x8a\x90abcd1\x0e\x06\x05\x12\x07C4.,$D\x15\n\n\x17")) - - @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") - def testWebSocket(self): - s = ws.create_connection("ws://echo.websocket.org/") - self.assertNotEqual(s, None) - s.send("Hello, World") - result = s.recv() - self.assertEqual(result, "Hello, World") - - s.send(u"こにゃにゃちは、世界") - result = s.recv() - self.assertEqual(result, "こにゃにゃちは、世界") - s.close() - - @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") - def testPingPong(self): - s = ws.create_connection("ws://echo.websocket.org/") - self.assertNotEqual(s, None) - s.ping("Hello") - s.pong("Hi") - s.close() - - @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") - @unittest.skipUnless(TEST_SECURE_WS, "wss://echo.websocket.org doesn't work well.") - def testSecureWebSocket(self): - if 1: - import ssl - s = ws.create_connection("wss://echo.websocket.org/") - self.assertNotEqual(s, None) - self.assertTrue(isinstance(s.sock, ssl.SSLSocket)) - s.send("Hello, World") - result = s.recv() - self.assertEqual(result, "Hello, World") - s.send(u"こにゃにゃちは、世界") - result = s.recv() - self.assertEqual(result, "こにゃにゃちは、世界") - s.close() - #except: - # pass - - @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") - def testWebSocketWihtCustomHeader(self): - s = ws.create_connection("ws://echo.websocket.org/", - headers={"User-Agent": "PythonWebsocketClient"}) - self.assertNotEqual(s, None) - s.send("Hello, World") - result = s.recv() - self.assertEqual(result, "Hello, World") - s.close() - - @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") - def testAfterClose(self): - s = ws.create_connection("ws://echo.websocket.org/") - self.assertNotEqual(s, None) - s.close() - self.assertRaises(ws.WebSocketConnectionClosedException, s.send, "Hello") - self.assertRaises(ws.WebSocketConnectionClosedException, s.recv) - - def testNonce(self): - """ WebSocket key should be a random 16-byte nonce. - """ - key = _create_sec_websocket_key() - nonce = base64decode(key.encode("utf-8")) - self.assertEqual(16, len(nonce)) - - -class WebSocketAppTest(unittest.TestCase): - - class NotSetYet(object): - """ A marker class for signalling that a value hasn't been set yet. - """ - - def setUp(self): - ws.enableTrace(TRACEABLE) - - WebSocketAppTest.keep_running_open = WebSocketAppTest.NotSetYet() - WebSocketAppTest.keep_running_close = WebSocketAppTest.NotSetYet() - WebSocketAppTest.get_mask_key_id = WebSocketAppTest.NotSetYet() - - def tearDown(self): - WebSocketAppTest.keep_running_open = WebSocketAppTest.NotSetYet() - WebSocketAppTest.keep_running_close = WebSocketAppTest.NotSetYet() - WebSocketAppTest.get_mask_key_id = WebSocketAppTest.NotSetYet() - - @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") - def testKeepRunning(self): - """ A WebSocketApp should keep running as long as its self.keep_running - is not False (in the boolean context). - """ - - def on_open(self, *args, **kwargs): - """ Set the keep_running flag for later inspection and immediately - close the connection. - """ - WebSocketAppTest.keep_running_open = self.keep_running - - self.close() - - def on_close(self, *args, **kwargs): - """ Set the keep_running flag for the test to use. - """ - WebSocketAppTest.keep_running_close = self.keep_running - - app = ws.WebSocketApp('ws://echo.websocket.org/', on_open=on_open, on_close=on_close) - app.run_forever() - - # if numpy is installed, this assertion fail - # self.assertFalse(isinstance(WebSocketAppTest.keep_running_open, - # WebSocketAppTest.NotSetYet)) - - # self.assertFalse(isinstance(WebSocketAppTest.keep_running_close, - # WebSocketAppTest.NotSetYet)) - - # self.assertEqual(True, WebSocketAppTest.keep_running_open) - # self.assertEqual(False, WebSocketAppTest.keep_running_close) - - @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") - def testSockMaskKey(self): - """ A WebSocketApp should forward the received mask_key function down - to the actual socket. - """ - - def my_mask_key_func(): - pass - - def on_open(self, *args, **kwargs): - """ Set the value so the test can use it later on and immediately - close the connection. - """ - WebSocketAppTest.get_mask_key_id = id(self.get_mask_key) - self.close() - - app = ws.WebSocketApp('ws://echo.websocket.org/', on_open=on_open, get_mask_key=my_mask_key_func) - app.run_forever() - - # if numpu is installed, this assertion fail - # Note: We can't use 'is' for comparing the functions directly, need to use 'id'. - # self.assertEqual(WebSocketAppTest.get_mask_key_id, id(my_mask_key_func)) - - -class SockOptTest(unittest.TestCase): - @unittest.skipUnless(TEST_WITH_INTERNET, "Internet-requiring tests are disabled") - def testSockOpt(self): - sockopt = ((socket.IPPROTO_TCP, socket.TCP_NODELAY, 1),) - s = ws.create_connection("ws://echo.websocket.org", sockopt=sockopt) - self.assertNotEqual(s.sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY), 0) - s.close() - - -class UtilsTest(unittest.TestCase): - def testUtf8Validator(self): - state = validate_utf8(six.b('\xf0\x90\x80\x80')) - self.assertEqual(state, True) - state = validate_utf8(six.b('\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5\xed\xa0\x80edited')) - self.assertEqual(state, False) - state = validate_utf8(six.b('')) - self.assertEqual(state, True) - - -class ProxyInfoTest(unittest.TestCase): - def setUp(self): - self.http_proxy = os.environ.get("http_proxy", None) - self.https_proxy = os.environ.get("https_proxy", None) - if "http_proxy" in os.environ: - del os.environ["http_proxy"] - if "https_proxy" in os.environ: - del os.environ["https_proxy"] - - def tearDown(self): - if self.http_proxy: - os.environ["http_proxy"] = self.http_proxy - elif "http_proxy" in os.environ: - del os.environ["http_proxy"] - - if self.https_proxy: - os.environ["https_proxy"] = self.https_proxy - elif "https_proxy" in os.environ: - del os.environ["https_proxy"] - - def testProxyFromArgs(self): - self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost"), ("localhost", 0, None)) - self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_port=3128), ("localhost", 3128, None)) - self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost"), ("localhost", 0, None)) - self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128), ("localhost", 3128, None)) - - self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_auth=("a", "b")), - ("localhost", 0, ("a", "b"))) - self.assertEqual(get_proxy_info("echo.websocket.org", False, proxy_host="localhost", proxy_port=3128, proxy_auth=("a", "b")), - ("localhost", 3128, ("a", "b"))) - self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_auth=("a", "b")), - ("localhost", 0, ("a", "b"))) - self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, proxy_auth=("a", "b")), - ("localhost", 3128, ("a", "b"))) - - self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, no_proxy=["example.com"], proxy_auth=("a", "b")), - ("localhost", 3128, ("a", "b"))) - self.assertEqual(get_proxy_info("echo.websocket.org", True, proxy_host="localhost", proxy_port=3128, no_proxy=["echo.websocket.org"], proxy_auth=("a", "b")), - (None, 0, None)) - - def testProxyFromEnv(self): - os.environ["http_proxy"] = "http://localhost/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, None)) - os.environ["http_proxy"] = "http://localhost:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, None)) - - os.environ["http_proxy"] = "http://localhost/" - os.environ["https_proxy"] = "http://localhost2/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, None)) - os.environ["http_proxy"] = "http://localhost:3128/" - os.environ["https_proxy"] = "http://localhost2:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, None)) - - os.environ["http_proxy"] = "http://localhost/" - os.environ["https_proxy"] = "http://localhost2/" - self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", None, None)) - os.environ["http_proxy"] = "http://localhost:3128/" - os.environ["https_proxy"] = "http://localhost2:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", 3128, None)) - - - os.environ["http_proxy"] = "http://a:b@localhost/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, ("a", "b"))) - os.environ["http_proxy"] = "http://a:b@localhost:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, ("a", "b"))) - - os.environ["http_proxy"] = "http://a:b@localhost/" - os.environ["https_proxy"] = "http://a:b@localhost2/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", None, ("a", "b"))) - os.environ["http_proxy"] = "http://a:b@localhost:3128/" - os.environ["https_proxy"] = "http://a:b@localhost2:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", False), ("localhost", 3128, ("a", "b"))) - - os.environ["http_proxy"] = "http://a:b@localhost/" - os.environ["https_proxy"] = "http://a:b@localhost2/" - self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", None, ("a", "b"))) - os.environ["http_proxy"] = "http://a:b@localhost:3128/" - os.environ["https_proxy"] = "http://a:b@localhost2:3128/" - self.assertEqual(get_proxy_info("echo.websocket.org", True), ("localhost2", 3128, ("a", "b"))) - - os.environ["http_proxy"] = "http://a:b@localhost/" - os.environ["https_proxy"] = "http://a:b@localhost2/" - os.environ["no_proxy"] = "example1.com,example2.com" - self.assertEqual(get_proxy_info("example.1.com", True), ("localhost2", None, ("a", "b"))) - os.environ["http_proxy"] = "http://a:b@localhost:3128/" - os.environ["https_proxy"] = "http://a:b@localhost2:3128/" - os.environ["no_proxy"] = "example1.com,example2.com, echo.websocket.org" - self.assertEqual(get_proxy_info("echo.websocket.org", True), (None, 0, None)) - - os.environ["http_proxy"] = "http://a:b@localhost:3128/" - os.environ["https_proxy"] = "http://a:b@localhost2:3128/" - os.environ["no_proxy"] = "127.0.0.0/8, 192.168.0.0/16" - self.assertEqual(get_proxy_info("127.0.0.1", False), (None, 0, None)) - self.assertEqual(get_proxy_info("192.168.1.1", False), (None, 0, None)) - - -if __name__ == "__main__": - unittest.main() diff --git a/pype/vendor/werkzeug/__init__.py b/pype/vendor/werkzeug/__init__.py deleted file mode 100644 index 56404f4c1f..0000000000 --- a/pype/vendor/werkzeug/__init__.py +++ /dev/null @@ -1,151 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug - ~~~~~~~~ - - Werkzeug is the Swiss Army knife of Python web development. - - It provides useful classes and functions for any WSGI application to make - the life of a python web developer much easier. All of the provided - classes are independent from each other so you can mix it with any other - library. - - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -from types import ModuleType -import sys - -from werkzeug._compat import iteritems - -__version__ = '0.14.1' - - -# This import magic raises concerns quite often which is why the implementation -# and motivation is explained here in detail now. -# -# The majority of the functions and classes provided by Werkzeug work on the -# HTTP and WSGI layer. There is no useful grouping for those which is why -# they are all importable from "werkzeug" instead of the modules where they are -# implemented. The downside of that is, that now everything would be loaded at -# once, even if unused. -# -# The implementation of a lazy-loading module in this file replaces the -# werkzeug package when imported from within. Attribute access to the werkzeug -# module will then lazily import from the modules that implement the objects. - - -# import mapping to objects in other modules -all_by_module = { - 'werkzeug.debug': ['DebuggedApplication'], - 'werkzeug.local': ['Local', 'LocalManager', 'LocalProxy', 'LocalStack', - 'release_local'], - 'werkzeug.serving': ['run_simple'], - 'werkzeug.test': ['Client', 'EnvironBuilder', 'create_environ', - 'run_wsgi_app'], - 'werkzeug.testapp': ['test_app'], - 'werkzeug.exceptions': ['abort', 'Aborter'], - 'werkzeug.urls': ['url_decode', 'url_encode', 'url_quote', - 'url_quote_plus', 'url_unquote', 'url_unquote_plus', - 'url_fix', 'Href', 'iri_to_uri', 'uri_to_iri'], - 'werkzeug.formparser': ['parse_form_data'], - 'werkzeug.utils': ['escape', 'environ_property', 'append_slash_redirect', - 'redirect', 'cached_property', 'import_string', - 'dump_cookie', 'parse_cookie', 'unescape', - 'format_string', 'find_modules', 'header_property', - 'html', 'xhtml', 'HTMLBuilder', 'validate_arguments', - 'ArgumentValidationError', 'bind_arguments', - 'secure_filename'], - 'werkzeug.wsgi': ['get_current_url', 'get_host', 'pop_path_info', - 'peek_path_info', 'SharedDataMiddleware', - 'DispatcherMiddleware', 'ClosingIterator', 'FileWrapper', - 'make_line_iter', 'LimitedStream', 'responder', - 'wrap_file', 'extract_path_info'], - 'werkzeug.datastructures': ['MultiDict', 'CombinedMultiDict', 'Headers', - 'EnvironHeaders', 'ImmutableList', - 'ImmutableDict', 'ImmutableMultiDict', - 'TypeConversionDict', - 'ImmutableTypeConversionDict', 'Accept', - 'MIMEAccept', 'CharsetAccept', - 'LanguageAccept', 'RequestCacheControl', - 'ResponseCacheControl', 'ETags', 'HeaderSet', - 'WWWAuthenticate', 'Authorization', - 'FileMultiDict', 'CallbackDict', 'FileStorage', - 'OrderedMultiDict', 'ImmutableOrderedMultiDict' - ], - 'werkzeug.useragents': ['UserAgent'], - 'werkzeug.http': ['parse_etags', 'parse_date', 'http_date', 'cookie_date', - 'parse_cache_control_header', 'is_resource_modified', - 'parse_accept_header', 'parse_set_header', 'quote_etag', - 'unquote_etag', 'generate_etag', 'dump_header', - 'parse_list_header', 'parse_dict_header', - 'parse_authorization_header', - 'parse_www_authenticate_header', 'remove_entity_headers', - 'is_entity_header', 'remove_hop_by_hop_headers', - 'parse_options_header', 'dump_options_header', - 'is_hop_by_hop_header', 'unquote_header_value', - 'quote_header_value', 'HTTP_STATUS_CODES'], - 'werkzeug.wrappers': ['BaseResponse', 'BaseRequest', 'Request', 'Response', - 'AcceptMixin', 'ETagRequestMixin', - 'ETagResponseMixin', 'ResponseStreamMixin', - 'CommonResponseDescriptorsMixin', 'UserAgentMixin', - 'AuthorizationMixin', 'WWWAuthenticateMixin', - 'CommonRequestDescriptorsMixin'], - 'werkzeug.security': ['generate_password_hash', 'check_password_hash'], - # the undocumented easteregg ;-) - 'werkzeug._internal': ['_easteregg'] -} - -# modules that should be imported when accessed as attributes of werkzeug -attribute_modules = frozenset(['exceptions', 'routing']) - - -object_origins = {} -for module, items in iteritems(all_by_module): - for item in items: - object_origins[item] = module - - -class module(ModuleType): - - """Automatically import objects from the modules.""" - - def __getattr__(self, name): - if name in object_origins: - module = __import__(object_origins[name], None, None, [name]) - for extra_name in all_by_module[module.__name__]: - setattr(self, extra_name, getattr(module, extra_name)) - return getattr(module, name) - elif name in attribute_modules: - __import__('werkzeug.' + name) - return ModuleType.__getattribute__(self, name) - - def __dir__(self): - """Just show what we want to show.""" - result = list(new_module.__all__) - result.extend(('__file__', '__doc__', '__all__', - '__docformat__', '__name__', '__path__', - '__package__', '__version__')) - return result - -# keep a reference to this module so that it's not garbage collected -old_module = sys.modules['werkzeug'] - - -# setup the new module and patch it into the dict of loaded modules -new_module = sys.modules['werkzeug'] = module('werkzeug') -new_module.__dict__.update({ - '__file__': __file__, - '__package__': 'werkzeug', - '__path__': __path__, - '__doc__': __doc__, - '__version__': __version__, - '__all__': tuple(object_origins) + tuple(attribute_modules), - '__docformat__': 'restructuredtext en' -}) - - -# Due to bootstrapping issues we need to import exceptions here. -# Don't ask :-( -__import__('werkzeug.exceptions') diff --git a/pype/vendor/werkzeug/_compat.py b/pype/vendor/werkzeug/_compat.py deleted file mode 100644 index fda038b97b..0000000000 --- a/pype/vendor/werkzeug/_compat.py +++ /dev/null @@ -1,206 +0,0 @@ -# flake8: noqa -# This whole file is full of lint errors -import codecs -import sys -import operator -import functools -import warnings - -try: - import builtins -except ImportError: - import __builtin__ as builtins - - -PY2 = sys.version_info[0] == 2 -WIN = sys.platform.startswith('win') - -_identity = lambda x: x - -if PY2: - unichr = unichr - text_type = unicode - string_types = (str, unicode) - integer_types = (int, long) - - iterkeys = lambda d, *args, **kwargs: d.iterkeys(*args, **kwargs) - itervalues = lambda d, *args, **kwargs: d.itervalues(*args, **kwargs) - iteritems = lambda d, *args, **kwargs: d.iteritems(*args, **kwargs) - - iterlists = lambda d, *args, **kwargs: d.iterlists(*args, **kwargs) - iterlistvalues = lambda d, *args, **kwargs: d.iterlistvalues(*args, **kwargs) - - int_to_byte = chr - iter_bytes = iter - - exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') - - def fix_tuple_repr(obj): - def __repr__(self): - cls = self.__class__ - return '%s(%s)' % (cls.__name__, ', '.join( - '%s=%r' % (field, self[index]) - for index, field in enumerate(cls._fields) - )) - obj.__repr__ = __repr__ - return obj - - def implements_iterator(cls): - cls.next = cls.__next__ - del cls.__next__ - return cls - - def implements_to_string(cls): - cls.__unicode__ = cls.__str__ - cls.__str__ = lambda x: x.__unicode__().encode('utf-8') - return cls - - def native_string_result(func): - def wrapper(*args, **kwargs): - return func(*args, **kwargs).encode('utf-8') - return functools.update_wrapper(wrapper, func) - - def implements_bool(cls): - cls.__nonzero__ = cls.__bool__ - del cls.__bool__ - return cls - - from itertools import imap, izip, ifilter - range_type = xrange - - from StringIO import StringIO - from cStringIO import StringIO as BytesIO - NativeStringIO = BytesIO - - def make_literal_wrapper(reference): - return _identity - - def normalize_string_tuple(tup): - """Normalizes a string tuple to a common type. Following Python 2 - rules, upgrades to unicode are implicit. - """ - if any(isinstance(x, text_type) for x in tup): - return tuple(to_unicode(x) for x in tup) - return tup - - def try_coerce_native(s): - """Try to coerce a unicode string to native if possible. Otherwise, - leave it as unicode. - """ - try: - return to_native(s) - except UnicodeError: - return s - - wsgi_get_bytes = _identity - - def wsgi_decoding_dance(s, charset='utf-8', errors='replace'): - return s.decode(charset, errors) - - def wsgi_encoding_dance(s, charset='utf-8', errors='replace'): - if isinstance(s, bytes): - return s - return s.encode(charset, errors) - - def to_bytes(x, charset=sys.getdefaultencoding(), errors='strict'): - if x is None: - return None - if isinstance(x, (bytes, bytearray, buffer)): - return bytes(x) - if isinstance(x, unicode): - return x.encode(charset, errors) - raise TypeError('Expected bytes') - - def to_native(x, charset=sys.getdefaultencoding(), errors='strict'): - if x is None or isinstance(x, str): - return x - return x.encode(charset, errors) - -else: - unichr = chr - text_type = str - string_types = (str, ) - integer_types = (int, ) - - iterkeys = lambda d, *args, **kwargs: iter(d.keys(*args, **kwargs)) - itervalues = lambda d, *args, **kwargs: iter(d.values(*args, **kwargs)) - iteritems = lambda d, *args, **kwargs: iter(d.items(*args, **kwargs)) - - iterlists = lambda d, *args, **kwargs: iter(d.lists(*args, **kwargs)) - iterlistvalues = lambda d, *args, **kwargs: iter(d.listvalues(*args, **kwargs)) - - int_to_byte = operator.methodcaller('to_bytes', 1, 'big') - iter_bytes = functools.partial(map, int_to_byte) - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - - fix_tuple_repr = _identity - implements_iterator = _identity - implements_to_string = _identity - implements_bool = _identity - native_string_result = _identity - imap = map - izip = zip - ifilter = filter - range_type = range - - from io import StringIO, BytesIO - NativeStringIO = StringIO - - _latin1_encode = operator.methodcaller('encode', 'latin1') - - def make_literal_wrapper(reference): - if isinstance(reference, text_type): - return _identity - return _latin1_encode - - def normalize_string_tuple(tup): - """Ensures that all types in the tuple are either strings - or bytes. - """ - tupiter = iter(tup) - is_text = isinstance(next(tupiter, None), text_type) - for arg in tupiter: - if isinstance(arg, text_type) != is_text: - raise TypeError('Cannot mix str and bytes arguments (got %s)' - % repr(tup)) - return tup - - try_coerce_native = _identity - wsgi_get_bytes = _latin1_encode - - def wsgi_decoding_dance(s, charset='utf-8', errors='replace'): - return s.encode('latin1').decode(charset, errors) - - def wsgi_encoding_dance(s, charset='utf-8', errors='replace'): - if isinstance(s, text_type): - s = s.encode(charset) - return s.decode('latin1', errors) - - def to_bytes(x, charset=sys.getdefaultencoding(), errors='strict'): - if x is None: - return None - if isinstance(x, (bytes, bytearray, memoryview)): # noqa - return bytes(x) - if isinstance(x, str): - return x.encode(charset, errors) - raise TypeError('Expected bytes') - - def to_native(x, charset=sys.getdefaultencoding(), errors='strict'): - if x is None or isinstance(x, str): - return x - return x.decode(charset, errors) - - -def to_unicode(x, charset=sys.getdefaultencoding(), errors='strict', - allow_none_charset=False): - if x is None: - return None - if not isinstance(x, bytes): - return text_type(x) - if charset is None and allow_none_charset: - return x - return x.decode(charset, errors) diff --git a/pype/vendor/werkzeug/_internal.py b/pype/vendor/werkzeug/_internal.py deleted file mode 100644 index 28bfd9fe4a..0000000000 --- a/pype/vendor/werkzeug/_internal.py +++ /dev/null @@ -1,419 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug._internal - ~~~~~~~~~~~~~~~~~~ - - This module provides internally used helpers and constants. - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -import re -import string -import inspect -from weakref import WeakKeyDictionary -from datetime import datetime, date -from itertools import chain - -from werkzeug._compat import iter_bytes, text_type, BytesIO, int_to_byte, \ - range_type, integer_types - - -_logger = None -_empty_stream = BytesIO() -_signature_cache = WeakKeyDictionary() -_epoch_ord = date(1970, 1, 1).toordinal() -_cookie_params = set((b'expires', b'path', b'comment', - b'max-age', b'secure', b'httponly', - b'version')) -_legal_cookie_chars = (string.ascii_letters + - string.digits + - u"/=!#$%&'*+-.^_`|~:").encode('ascii') - -_cookie_quoting_map = { - b',': b'\\054', - b';': b'\\073', - b'"': b'\\"', - b'\\': b'\\\\', -} -for _i in chain(range_type(32), range_type(127, 256)): - _cookie_quoting_map[int_to_byte(_i)] = ('\\%03o' % _i).encode('latin1') - - -_octal_re = re.compile(br'\\[0-3][0-7][0-7]') -_quote_re = re.compile(br'[\\].') -_legal_cookie_chars_re = br'[\w\d!#%&\'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]' -_cookie_re = re.compile(br""" - (?P[^=;]+) - (?:\s*=\s* - (?P - "(?:[^\\"]|\\.)*" | - (?:.*?) - ) - )? - \s*; -""", flags=re.VERBOSE) - - -class _Missing(object): - - def __repr__(self): - return 'no value' - - def __reduce__(self): - return '_missing' - -_missing = _Missing() - - -def _get_environ(obj): - env = getattr(obj, 'environ', obj) - assert isinstance(env, dict), \ - '%r is not a WSGI environment (has to be a dict)' % type(obj).__name__ - return env - - -def _log(type, message, *args, **kwargs): - """Log into the internal werkzeug logger.""" - global _logger - if _logger is None: - import logging - _logger = logging.getLogger('werkzeug') - # Only set up a default log handler if the - # end-user application didn't set anything up. - if not logging.root.handlers and _logger.level == logging.NOTSET: - _logger.setLevel(logging.INFO) - handler = logging.StreamHandler() - _logger.addHandler(handler) - getattr(_logger, type)(message.rstrip(), *args, **kwargs) - - -def _parse_signature(func): - """Return a signature object for the function.""" - if hasattr(func, 'im_func'): - func = func.im_func - - # if we have a cached validator for this function, return it - parse = _signature_cache.get(func) - if parse is not None: - return parse - - # inspect the function signature and collect all the information - if hasattr(inspect, 'getfullargspec'): - tup = inspect.getfullargspec(func) - else: - tup = inspect.getargspec(func) - positional, vararg_var, kwarg_var, defaults = tup[:4] - defaults = defaults or () - arg_count = len(positional) - arguments = [] - for idx, name in enumerate(positional): - if isinstance(name, list): - raise TypeError('cannot parse functions that unpack tuples ' - 'in the function signature') - try: - default = defaults[idx - arg_count] - except IndexError: - param = (name, False, None) - else: - param = (name, True, default) - arguments.append(param) - arguments = tuple(arguments) - - def parse(args, kwargs): - new_args = [] - missing = [] - extra = {} - - # consume as many arguments as positional as possible - for idx, (name, has_default, default) in enumerate(arguments): - try: - new_args.append(args[idx]) - except IndexError: - try: - new_args.append(kwargs.pop(name)) - except KeyError: - if has_default: - new_args.append(default) - else: - missing.append(name) - else: - if name in kwargs: - extra[name] = kwargs.pop(name) - - # handle extra arguments - extra_positional = args[arg_count:] - if vararg_var is not None: - new_args.extend(extra_positional) - extra_positional = () - if kwargs and kwarg_var is None: - extra.update(kwargs) - kwargs = {} - - return new_args, kwargs, missing, extra, extra_positional, \ - arguments, vararg_var, kwarg_var - _signature_cache[func] = parse - return parse - - -def _date_to_unix(arg): - """Converts a timetuple, integer or datetime object into the seconds from - epoch in utc. - """ - if isinstance(arg, datetime): - arg = arg.utctimetuple() - elif isinstance(arg, integer_types + (float,)): - return int(arg) - year, month, day, hour, minute, second = arg[:6] - days = date(year, month, 1).toordinal() - _epoch_ord + day - 1 - hours = days * 24 + hour - minutes = hours * 60 + minute - seconds = minutes * 60 + second - return seconds - - -class _DictAccessorProperty(object): - - """Baseclass for `environ_property` and `header_property`.""" - read_only = False - - def __init__(self, name, default=None, load_func=None, dump_func=None, - read_only=None, doc=None): - self.name = name - self.default = default - self.load_func = load_func - self.dump_func = dump_func - if read_only is not None: - self.read_only = read_only - self.__doc__ = doc - - def __get__(self, obj, type=None): - if obj is None: - return self - storage = self.lookup(obj) - if self.name not in storage: - return self.default - rv = storage[self.name] - if self.load_func is not None: - try: - rv = self.load_func(rv) - except (ValueError, TypeError): - rv = self.default - return rv - - def __set__(self, obj, value): - if self.read_only: - raise AttributeError('read only property') - if self.dump_func is not None: - value = self.dump_func(value) - self.lookup(obj)[self.name] = value - - def __delete__(self, obj): - if self.read_only: - raise AttributeError('read only property') - self.lookup(obj).pop(self.name, None) - - def __repr__(self): - return '<%s %s>' % ( - self.__class__.__name__, - self.name - ) - - -def _cookie_quote(b): - buf = bytearray() - all_legal = True - _lookup = _cookie_quoting_map.get - _push = buf.extend - - for char in iter_bytes(b): - if char not in _legal_cookie_chars: - all_legal = False - char = _lookup(char, char) - _push(char) - - if all_legal: - return bytes(buf) - return bytes(b'"' + buf + b'"') - - -def _cookie_unquote(b): - if len(b) < 2: - return b - if b[:1] != b'"' or b[-1:] != b'"': - return b - - b = b[1:-1] - - i = 0 - n = len(b) - rv = bytearray() - _push = rv.extend - - while 0 <= i < n: - o_match = _octal_re.search(b, i) - q_match = _quote_re.search(b, i) - if not o_match and not q_match: - rv.extend(b[i:]) - break - j = k = -1 - if o_match: - j = o_match.start(0) - if q_match: - k = q_match.start(0) - if q_match and (not o_match or k < j): - _push(b[i:k]) - _push(b[k + 1:k + 2]) - i = k + 2 - else: - _push(b[i:j]) - rv.append(int(b[j + 1:j + 4], 8)) - i = j + 4 - - return bytes(rv) - - -def _cookie_parse_impl(b): - """Lowlevel cookie parsing facility that operates on bytes.""" - i = 0 - n = len(b) - - while i < n: - match = _cookie_re.search(b + b';', i) - if not match: - break - - key = match.group('key').strip() - value = match.group('val') or b'' - i = match.end(0) - - # Ignore parameters. We have no interest in them. - if key.lower() not in _cookie_params: - yield _cookie_unquote(key), _cookie_unquote(value) - - -def _encode_idna(domain): - # If we're given bytes, make sure they fit into ASCII - if not isinstance(domain, text_type): - domain.decode('ascii') - return domain - - # Otherwise check if it's already ascii, then return - try: - return domain.encode('ascii') - except UnicodeError: - pass - - # Otherwise encode each part separately - parts = domain.split('.') - for idx, part in enumerate(parts): - parts[idx] = part.encode('idna') - return b'.'.join(parts) - - -def _decode_idna(domain): - # If the input is a string try to encode it to ascii to - # do the idna decoding. if that fails because of an - # unicode error, then we already have a decoded idna domain - if isinstance(domain, text_type): - try: - domain = domain.encode('ascii') - except UnicodeError: - return domain - - # Decode each part separately. If a part fails, try to - # decode it with ascii and silently ignore errors. This makes - # most sense because the idna codec does not have error handling - parts = domain.split(b'.') - for idx, part in enumerate(parts): - try: - parts[idx] = part.decode('idna') - except UnicodeError: - parts[idx] = part.decode('ascii', 'ignore') - - return '.'.join(parts) - - -def _make_cookie_domain(domain): - if domain is None: - return None - domain = _encode_idna(domain) - if b':' in domain: - domain = domain.split(b':', 1)[0] - if b'.' in domain: - return domain - raise ValueError( - 'Setting \'domain\' for a cookie on a server running locally (ex: ' - 'localhost) is not supported by complying browsers. You should ' - 'have something like: \'127.0.0.1 localhost dev.localhost\' on ' - 'your hosts file and then point your server to run on ' - '\'dev.localhost\' and also set \'domain\' for \'dev.localhost\'' - ) - - -def _easteregg(app=None): - """Like the name says. But who knows how it works?""" - def bzzzzzzz(gyver): - import base64 - import zlib - return zlib.decompress(base64.b64decode(gyver)).decode('ascii') - gyver = u'\n'.join([x + (77 - len(x)) * u' ' for x in bzzzzzzz(b''' -eJyFlzuOJDkMRP06xRjymKgDJCDQStBYT8BCgK4gTwfQ2fcFs2a2FzvZk+hvlcRvRJD148efHt9m -9Xz94dRY5hGt1nrYcXx7us9qlcP9HHNh28rz8dZj+q4rynVFFPdlY4zH873NKCexrDM6zxxRymzz -4QIxzK4bth1PV7+uHn6WXZ5C4ka/+prFzx3zWLMHAVZb8RRUxtFXI5DTQ2n3Hi2sNI+HK43AOWSY -jmEzE4naFp58PdzhPMdslLVWHTGUVpSxImw+pS/D+JhzLfdS1j7PzUMxij+mc2U0I9zcbZ/HcZxc -q1QjvvcThMYFnp93agEx392ZdLJWXbi/Ca4Oivl4h/Y1ErEqP+lrg7Xa4qnUKu5UE9UUA4xeqLJ5 -jWlPKJvR2yhRI7xFPdzPuc6adXu6ovwXwRPXXnZHxlPtkSkqWHilsOrGrvcVWXgGP3daXomCj317 -8P2UOw/NnA0OOikZyFf3zZ76eN9QXNwYdD8f8/LdBRFg0BO3bB+Pe/+G8er8tDJv83XTkj7WeMBJ -v/rnAfdO51d6sFglfi8U7zbnr0u9tyJHhFZNXYfH8Iafv2Oa+DT6l8u9UYlajV/hcEgk1x8E8L/r -XJXl2SK+GJCxtnyhVKv6GFCEB1OO3f9YWAIEbwcRWv/6RPpsEzOkXURMN37J0PoCSYeBnJQd9Giu -LxYQJNlYPSo/iTQwgaihbART7Fcyem2tTSCcwNCs85MOOpJtXhXDe0E7zgZJkcxWTar/zEjdIVCk -iXy87FW6j5aGZhttDBoAZ3vnmlkx4q4mMmCdLtnHkBXFMCReqthSGkQ+MDXLLCpXwBs0t+sIhsDI -tjBB8MwqYQpLygZ56rRHHpw+OAVyGgaGRHWy2QfXez+ZQQTTBkmRXdV/A9LwH6XGZpEAZU8rs4pE -1R4FQ3Uwt8RKEtRc0/CrANUoes3EzM6WYcFyskGZ6UTHJWenBDS7h163Eo2bpzqxNE9aVgEM2CqI -GAJe9Yra4P5qKmta27VjzYdR04Vc7KHeY4vs61C0nbywFmcSXYjzBHdiEjraS7PGG2jHHTpJUMxN -Jlxr3pUuFvlBWLJGE3GcA1/1xxLcHmlO+LAXbhrXah1tD6Ze+uqFGdZa5FM+3eHcKNaEarutAQ0A -QMAZHV+ve6LxAwWnXbbSXEG2DmCX5ijeLCKj5lhVFBrMm+ryOttCAeFpUdZyQLAQkA06RLs56rzG -8MID55vqr/g64Qr/wqwlE0TVxgoiZhHrbY2h1iuuyUVg1nlkpDrQ7Vm1xIkI5XRKLedN9EjzVchu -jQhXcVkjVdgP2O99QShpdvXWoSwkp5uMwyjt3jiWCqWGSiaaPAzohjPanXVLbM3x0dNskJsaCEyz -DTKIs+7WKJD4ZcJGfMhLFBf6hlbnNkLEePF8Cx2o2kwmYF4+MzAxa6i+6xIQkswOqGO+3x9NaZX8 -MrZRaFZpLeVTYI9F/djY6DDVVs340nZGmwrDqTCiiqD5luj3OzwpmQCiQhdRYowUYEA3i1WWGwL4 -GCtSoO4XbIPFeKGU13XPkDf5IdimLpAvi2kVDVQbzOOa4KAXMFlpi/hV8F6IDe0Y2reg3PuNKT3i -RYhZqtkQZqSB2Qm0SGtjAw7RDwaM1roESC8HWiPxkoOy0lLTRFG39kvbLZbU9gFKFRvixDZBJmpi -Xyq3RE5lW00EJjaqwp/v3EByMSpVZYsEIJ4APaHmVtpGSieV5CALOtNUAzTBiw81GLgC0quyzf6c -NlWknzJeCsJ5fup2R4d8CYGN77mu5vnO1UqbfElZ9E6cR6zbHjgsr9ly18fXjZoPeDjPuzlWbFwS -pdvPkhntFvkc13qb9094LL5NrA3NIq3r9eNnop9DizWOqCEbyRBFJTHn6Tt3CG1o8a4HevYh0XiJ -sR0AVVHuGuMOIfbuQ/OKBkGRC6NJ4u7sbPX8bG/n5sNIOQ6/Y/BX3IwRlTSabtZpYLB85lYtkkgm -p1qXK3Du2mnr5INXmT/78KI12n11EFBkJHHp0wJyLe9MvPNUGYsf+170maayRoy2lURGHAIapSpQ -krEDuNoJCHNlZYhKpvw4mspVWxqo415n8cD62N9+EfHrAvqQnINStetek7RY2Urv8nxsnGaZfRr/ -nhXbJ6m/yl1LzYqscDZA9QHLNbdaSTTr+kFg3bC0iYbX/eQy0Bv3h4B50/SGYzKAXkCeOLI3bcAt -mj2Z/FM1vQWgDynsRwNvrWnJHlespkrp8+vO1jNaibm+PhqXPPv30YwDZ6jApe3wUjFQobghvW9p -7f2zLkGNv8b191cD/3vs9Q833z8t''').splitlines()]) - - def easteregged(environ, start_response): - def injecting_start_response(status, headers, exc_info=None): - headers.append(('X-Powered-By', 'Werkzeug')) - return start_response(status, headers, exc_info) - if app is not None and environ.get('QUERY_STRING') != 'macgybarchakku': - return app(environ, injecting_start_response) - injecting_start_response('200 OK', [('Content-Type', 'text/html')]) - return [(u''' - - - -About Werkzeug - - - -

Werkzeug

-

the Swiss Army knife of Python web development.

-
%s\n\n\n
- -''' % gyver).encode('latin1')] - return easteregged diff --git a/pype/vendor/werkzeug/_reloader.py b/pype/vendor/werkzeug/_reloader.py deleted file mode 100644 index 0d23dba4eb..0000000000 --- a/pype/vendor/werkzeug/_reloader.py +++ /dev/null @@ -1,277 +0,0 @@ -import os -import sys -import time -import subprocess -import threading -from itertools import chain - -from werkzeug._internal import _log -from werkzeug._compat import PY2, iteritems, text_type - - -def _iter_module_files(): - """This iterates over all relevant Python files. It goes through all - loaded files from modules, all files in folders of already loaded modules - as well as all files reachable through a package. - """ - # The list call is necessary on Python 3 in case the module - # dictionary modifies during iteration. - for module in list(sys.modules.values()): - if module is None: - continue - filename = getattr(module, '__file__', None) - if filename: - if os.path.isdir(filename) and \ - os.path.exists(os.path.join(filename, "__init__.py")): - filename = os.path.join(filename, "__init__.py") - - old = None - while not os.path.isfile(filename): - old = filename - filename = os.path.dirname(filename) - if filename == old: - break - else: - if filename[-4:] in ('.pyc', '.pyo'): - filename = filename[:-1] - yield filename - - -def _find_observable_paths(extra_files=None): - """Finds all paths that should be observed.""" - rv = set(os.path.dirname(os.path.abspath(x)) - if os.path.isfile(x) else os.path.abspath(x) - for x in sys.path) - - for filename in extra_files or (): - rv.add(os.path.dirname(os.path.abspath(filename))) - - for module in list(sys.modules.values()): - fn = getattr(module, '__file__', None) - if fn is None: - continue - fn = os.path.abspath(fn) - rv.add(os.path.dirname(fn)) - - return _find_common_roots(rv) - - -def _get_args_for_reloading(): - """Returns the executable. This contains a workaround for windows - if the executable is incorrectly reported to not have the .exe - extension which can cause bugs on reloading. - """ - rv = [sys.executable] - py_script = sys.argv[0] - if os.name == 'nt' and not os.path.exists(py_script) and \ - os.path.exists(py_script + '.exe'): - py_script += '.exe' - if os.path.splitext(rv[0])[1] == '.exe' and os.path.splitext(py_script)[1] == '.exe': - rv.pop(0) - rv.append(py_script) - rv.extend(sys.argv[1:]) - return rv - - -def _find_common_roots(paths): - """Out of some paths it finds the common roots that need monitoring.""" - paths = [x.split(os.path.sep) for x in paths] - root = {} - for chunks in sorted(paths, key=len, reverse=True): - node = root - for chunk in chunks: - node = node.setdefault(chunk, {}) - node.clear() - - rv = set() - - def _walk(node, path): - for prefix, child in iteritems(node): - _walk(child, path + (prefix,)) - if not node: - rv.add('/'.join(path)) - _walk(root, ()) - return rv - - -class ReloaderLoop(object): - name = None - - # monkeypatched by testsuite. wrapping with `staticmethod` is required in - # case time.sleep has been replaced by a non-c function (e.g. by - # `eventlet.monkey_patch`) before we get here - _sleep = staticmethod(time.sleep) - - def __init__(self, extra_files=None, interval=1): - self.extra_files = set(os.path.abspath(x) - for x in extra_files or ()) - self.interval = interval - - def run(self): - pass - - def restart_with_reloader(self): - """Spawn a new Python interpreter with the same arguments as this one, - but running the reloader thread. - """ - while 1: - _log('info', ' * Restarting with %s' % self.name) - args = _get_args_for_reloading() - new_environ = os.environ.copy() - new_environ['WERKZEUG_RUN_MAIN'] = 'true' - - # a weird bug on windows. sometimes unicode strings end up in the - # environment and subprocess.call does not like this, encode them - # to latin1 and continue. - if os.name == 'nt' and PY2: - for key, value in iteritems(new_environ): - if isinstance(value, text_type): - new_environ[key] = value.encode('iso-8859-1') - - exit_code = subprocess.call(args, env=new_environ, - close_fds=False) - if exit_code != 3: - return exit_code - - def trigger_reload(self, filename): - self.log_reload(filename) - sys.exit(3) - - def log_reload(self, filename): - filename = os.path.abspath(filename) - _log('info', ' * Detected change in %r, reloading' % filename) - - -class StatReloaderLoop(ReloaderLoop): - name = 'stat' - - def run(self): - mtimes = {} - while 1: - for filename in chain(_iter_module_files(), - self.extra_files): - try: - mtime = os.stat(filename).st_mtime - except OSError: - continue - - old_time = mtimes.get(filename) - if old_time is None: - mtimes[filename] = mtime - continue - elif mtime > old_time: - self.trigger_reload(filename) - self._sleep(self.interval) - - -class WatchdogReloaderLoop(ReloaderLoop): - - def __init__(self, *args, **kwargs): - ReloaderLoop.__init__(self, *args, **kwargs) - from watchdog.observers import Observer - from watchdog.events import FileSystemEventHandler - self.observable_paths = set() - - def _check_modification(filename): - if filename in self.extra_files: - self.trigger_reload(filename) - dirname = os.path.dirname(filename) - if dirname.startswith(tuple(self.observable_paths)): - if filename.endswith(('.pyc', '.pyo', '.py')): - self.trigger_reload(filename) - - class _CustomHandler(FileSystemEventHandler): - - def on_created(self, event): - _check_modification(event.src_path) - - def on_modified(self, event): - _check_modification(event.src_path) - - def on_moved(self, event): - _check_modification(event.src_path) - _check_modification(event.dest_path) - - def on_deleted(self, event): - _check_modification(event.src_path) - - reloader_name = Observer.__name__.lower() - if reloader_name.endswith('observer'): - reloader_name = reloader_name[:-8] - reloader_name += ' reloader' - - self.name = reloader_name - - self.observer_class = Observer - self.event_handler = _CustomHandler() - self.should_reload = False - - def trigger_reload(self, filename): - # This is called inside an event handler, which means throwing - # SystemExit has no effect. - # https://github.com/gorakhargosh/watchdog/issues/294 - self.should_reload = True - self.log_reload(filename) - - def run(self): - watches = {} - observer = self.observer_class() - observer.start() - - try: - while not self.should_reload: - to_delete = set(watches) - paths = _find_observable_paths(self.extra_files) - for path in paths: - if path not in watches: - try: - watches[path] = observer.schedule( - self.event_handler, path, recursive=True) - except OSError: - # Clear this path from list of watches We don't want - # the same error message showing again in the next - # iteration. - watches[path] = None - to_delete.discard(path) - for path in to_delete: - watch = watches.pop(path, None) - if watch is not None: - observer.unschedule(watch) - self.observable_paths = paths - self._sleep(self.interval) - finally: - observer.stop() - observer.join() - - sys.exit(3) - - -reloader_loops = { - 'stat': StatReloaderLoop, - 'watchdog': WatchdogReloaderLoop, -} - -try: - __import__('watchdog.observers') -except ImportError: - reloader_loops['auto'] = reloader_loops['stat'] -else: - reloader_loops['auto'] = reloader_loops['watchdog'] - - -def run_with_reloader(main_func, extra_files=None, interval=1, - reloader_type='auto'): - """Run the given function in an independent python interpreter.""" - import signal - reloader = reloader_loops[reloader_type](extra_files, interval) - signal.signal(signal.SIGTERM, lambda *args: sys.exit(0)) - try: - if os.environ.get('WERKZEUG_RUN_MAIN') == 'true': - t = threading.Thread(target=main_func, args=()) - t.setDaemon(True) - t.start() - reloader.run() - else: - sys.exit(reloader.restart_with_reloader()) - except KeyboardInterrupt: - pass diff --git a/pype/vendor/werkzeug/contrib/__init__.py b/pype/vendor/werkzeug/contrib/__init__.py deleted file mode 100644 index 3e572ebed1..0000000000 --- a/pype/vendor/werkzeug/contrib/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.contrib - ~~~~~~~~~~~~~~~~ - - Contains user-submitted code that other users may find useful, but which - is not part of the Werkzeug core. Anyone can write code for inclusion in - the `contrib` package. All modules in this package are distributed as an - add-on library and thus are not part of Werkzeug itself. - - This file itself is mostly for informational purposes and to tell the - Python interpreter that `contrib` is a package. - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" diff --git a/pype/vendor/werkzeug/contrib/atom.py b/pype/vendor/werkzeug/contrib/atom.py deleted file mode 100644 index 51d1a4e641..0000000000 --- a/pype/vendor/werkzeug/contrib/atom.py +++ /dev/null @@ -1,355 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.contrib.atom - ~~~~~~~~~~~~~~~~~~~~~ - - This module provides a class called :class:`AtomFeed` which can be - used to generate feeds in the Atom syndication format (see :rfc:`4287`). - - Example:: - - def atom_feed(request): - feed = AtomFeed("My Blog", feed_url=request.url, - url=request.host_url, - subtitle="My example blog for a feed test.") - for post in Post.query.limit(10).all(): - feed.add(post.title, post.body, content_type='html', - author=post.author, url=post.url, id=post.uid, - updated=post.last_update, published=post.pub_date) - return feed.get_response() - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -from datetime import datetime - -from werkzeug.utils import escape -from werkzeug.wrappers import BaseResponse -from werkzeug._compat import implements_to_string, string_types - - -XHTML_NAMESPACE = 'http://www.w3.org/1999/xhtml' - - -def _make_text_block(name, content, content_type=None): - """Helper function for the builder that creates an XML text block.""" - if content_type == 'xhtml': - return u'<%s type="xhtml">
%s
\n' % \ - (name, XHTML_NAMESPACE, content, name) - if not content_type: - return u'<%s>%s\n' % (name, escape(content), name) - return u'<%s type="%s">%s\n' % (name, content_type, - escape(content), name) - - -def format_iso8601(obj): - """Format a datetime object for iso8601""" - iso8601 = obj.isoformat() - if obj.tzinfo: - return iso8601 - return iso8601 + 'Z' - - -@implements_to_string -class AtomFeed(object): - - """A helper class that creates Atom feeds. - - :param title: the title of the feed. Required. - :param title_type: the type attribute for the title element. One of - ``'html'``, ``'text'`` or ``'xhtml'``. - :param url: the url for the feed (not the url *of* the feed) - :param id: a globally unique id for the feed. Must be an URI. If - not present the `feed_url` is used, but one of both is - required. - :param updated: the time the feed was modified the last time. Must - be a :class:`datetime.datetime` object. If not - present the latest entry's `updated` is used. - Treated as UTC if naive datetime. - :param feed_url: the URL to the feed. Should be the URL that was - requested. - :param author: the author of the feed. Must be either a string (the - name) or a dict with name (required) and uri or - email (both optional). Can be a list of (may be - mixed, too) strings and dicts, too, if there are - multiple authors. Required if not every entry has an - author element. - :param icon: an icon for the feed. - :param logo: a logo for the feed. - :param rights: copyright information for the feed. - :param rights_type: the type attribute for the rights element. One of - ``'html'``, ``'text'`` or ``'xhtml'``. Default is - ``'text'``. - :param subtitle: a short description of the feed. - :param subtitle_type: the type attribute for the subtitle element. - One of ``'text'``, ``'html'``, ``'text'`` - or ``'xhtml'``. Default is ``'text'``. - :param links: additional links. Must be a list of dictionaries with - href (required) and rel, type, hreflang, title, length - (all optional) - :param generator: the software that generated this feed. This must be - a tuple in the form ``(name, url, version)``. If - you don't want to specify one of them, set the item - to `None`. - :param entries: a list with the entries for the feed. Entries can also - be added later with :meth:`add`. - - For more information on the elements see - http://www.atomenabled.org/developers/syndication/ - - Everywhere where a list is demanded, any iterable can be used. - """ - - default_generator = ('Werkzeug', None, None) - - def __init__(self, title=None, entries=None, **kwargs): - self.title = title - self.title_type = kwargs.get('title_type', 'text') - self.url = kwargs.get('url') - self.feed_url = kwargs.get('feed_url', self.url) - self.id = kwargs.get('id', self.feed_url) - self.updated = kwargs.get('updated') - self.author = kwargs.get('author', ()) - self.icon = kwargs.get('icon') - self.logo = kwargs.get('logo') - self.rights = kwargs.get('rights') - self.rights_type = kwargs.get('rights_type') - self.subtitle = kwargs.get('subtitle') - self.subtitle_type = kwargs.get('subtitle_type', 'text') - self.generator = kwargs.get('generator') - if self.generator is None: - self.generator = self.default_generator - self.links = kwargs.get('links', []) - self.entries = entries and list(entries) or [] - - if not hasattr(self.author, '__iter__') \ - or isinstance(self.author, string_types + (dict,)): - self.author = [self.author] - for i, author in enumerate(self.author): - if not isinstance(author, dict): - self.author[i] = {'name': author} - - if not self.title: - raise ValueError('title is required') - if not self.id: - raise ValueError('id is required') - for author in self.author: - if 'name' not in author: - raise TypeError('author must contain at least a name') - - def add(self, *args, **kwargs): - """Add a new entry to the feed. This function can either be called - with a :class:`FeedEntry` or some keyword and positional arguments - that are forwarded to the :class:`FeedEntry` constructor. - """ - if len(args) == 1 and not kwargs and isinstance(args[0], FeedEntry): - self.entries.append(args[0]) - else: - kwargs['feed_url'] = self.feed_url - self.entries.append(FeedEntry(*args, **kwargs)) - - def __repr__(self): - return '<%s %r (%d entries)>' % ( - self.__class__.__name__, - self.title, - len(self.entries) - ) - - def generate(self): - """Return a generator that yields pieces of XML.""" - # atom demands either an author element in every entry or a global one - if not self.author: - if any(not e.author for e in self.entries): - self.author = ({'name': 'Unknown author'},) - - if not self.updated: - dates = sorted([entry.updated for entry in self.entries]) - self.updated = dates and dates[-1] or datetime.utcnow() - - yield u'\n' - yield u'\n' - yield ' ' + _make_text_block('title', self.title, self.title_type) - yield u' %s\n' % escape(self.id) - yield u' %s\n' % format_iso8601(self.updated) - if self.url: - yield u' \n' % escape(self.url) - if self.feed_url: - yield u' \n' % \ - escape(self.feed_url) - for link in self.links: - yield u' \n' % ''.join('%s="%s" ' % - (k, escape(link[k])) for k in link) - for author in self.author: - yield u' \n' - yield u' %s\n' % escape(author['name']) - if 'uri' in author: - yield u' %s\n' % escape(author['uri']) - if 'email' in author: - yield ' %s\n' % escape(author['email']) - yield ' \n' - if self.subtitle: - yield ' ' + _make_text_block('subtitle', self.subtitle, - self.subtitle_type) - if self.icon: - yield u' %s\n' % escape(self.icon) - if self.logo: - yield u' %s\n' % escape(self.logo) - if self.rights: - yield ' ' + _make_text_block('rights', self.rights, - self.rights_type) - generator_name, generator_url, generator_version = self.generator - if generator_name or generator_url or generator_version: - tmp = [u' %s\n' % escape(generator_name)) - yield u''.join(tmp) - for entry in self.entries: - for line in entry.generate(): - yield u' ' + line - yield u'\n' - - def to_string(self): - """Convert the feed into a string.""" - return u''.join(self.generate()) - - def get_response(self): - """Return a response object for the feed.""" - return BaseResponse(self.to_string(), mimetype='application/atom+xml') - - def __call__(self, environ, start_response): - """Use the class as WSGI response object.""" - return self.get_response()(environ, start_response) - - def __str__(self): - return self.to_string() - - -@implements_to_string -class FeedEntry(object): - - """Represents a single entry in a feed. - - :param title: the title of the entry. Required. - :param title_type: the type attribute for the title element. One of - ``'html'``, ``'text'`` or ``'xhtml'``. - :param content: the content of the entry. - :param content_type: the type attribute for the content element. One - of ``'html'``, ``'text'`` or ``'xhtml'``. - :param summary: a summary of the entry's content. - :param summary_type: the type attribute for the summary element. One - of ``'html'``, ``'text'`` or ``'xhtml'``. - :param url: the url for the entry. - :param id: a globally unique id for the entry. Must be an URI. If - not present the URL is used, but one of both is required. - :param updated: the time the entry was modified the last time. Must - be a :class:`datetime.datetime` object. Treated as - UTC if naive datetime. Required. - :param author: the author of the entry. Must be either a string (the - name) or a dict with name (required) and uri or - email (both optional). Can be a list of (may be - mixed, too) strings and dicts, too, if there are - multiple authors. Required if the feed does not have an - author element. - :param published: the time the entry was initially published. Must - be a :class:`datetime.datetime` object. Treated as - UTC if naive datetime. - :param rights: copyright information for the entry. - :param rights_type: the type attribute for the rights element. One of - ``'html'``, ``'text'`` or ``'xhtml'``. Default is - ``'text'``. - :param links: additional links. Must be a list of dictionaries with - href (required) and rel, type, hreflang, title, length - (all optional) - :param categories: categories for the entry. Must be a list of dictionaries - with term (required), scheme and label (all optional) - :param xml_base: The xml base (url) for this feed item. If not provided - it will default to the item url. - - For more information on the elements see - http://www.atomenabled.org/developers/syndication/ - - Everywhere where a list is demanded, any iterable can be used. - """ - - def __init__(self, title=None, content=None, feed_url=None, **kwargs): - self.title = title - self.title_type = kwargs.get('title_type', 'text') - self.content = content - self.content_type = kwargs.get('content_type', 'html') - self.url = kwargs.get('url') - self.id = kwargs.get('id', self.url) - self.updated = kwargs.get('updated') - self.summary = kwargs.get('summary') - self.summary_type = kwargs.get('summary_type', 'html') - self.author = kwargs.get('author', ()) - self.published = kwargs.get('published') - self.rights = kwargs.get('rights') - self.links = kwargs.get('links', []) - self.categories = kwargs.get('categories', []) - self.xml_base = kwargs.get('xml_base', feed_url) - - if not hasattr(self.author, '__iter__') \ - or isinstance(self.author, string_types + (dict,)): - self.author = [self.author] - for i, author in enumerate(self.author): - if not isinstance(author, dict): - self.author[i] = {'name': author} - - if not self.title: - raise ValueError('title is required') - if not self.id: - raise ValueError('id is required') - if not self.updated: - raise ValueError('updated is required') - - def __repr__(self): - return '<%s %r>' % ( - self.__class__.__name__, - self.title - ) - - def generate(self): - """Yields pieces of ATOM XML.""" - base = '' - if self.xml_base: - base = ' xml:base="%s"' % escape(self.xml_base) - yield u'\n' % base - yield u' ' + _make_text_block('title', self.title, self.title_type) - yield u' %s\n' % escape(self.id) - yield u' %s\n' % format_iso8601(self.updated) - if self.published: - yield u' %s\n' % \ - format_iso8601(self.published) - if self.url: - yield u' \n' % escape(self.url) - for author in self.author: - yield u' \n' - yield u' %s\n' % escape(author['name']) - if 'uri' in author: - yield u' %s\n' % escape(author['uri']) - if 'email' in author: - yield u' %s\n' % escape(author['email']) - yield u' \n' - for link in self.links: - yield u' \n' % ''.join('%s="%s" ' % - (k, escape(link[k])) for k in link) - for category in self.categories: - yield u' \n' % ''.join('%s="%s" ' % - (k, escape(category[k])) for k in category) - if self.summary: - yield u' ' + _make_text_block('summary', self.summary, - self.summary_type) - if self.content: - yield u' ' + _make_text_block('content', self.content, - self.content_type) - yield u'\n' - - def to_string(self): - """Convert the feed item into a unicode object.""" - return u''.join(self.generate()) - - def __str__(self): - return self.to_string() diff --git a/pype/vendor/werkzeug/contrib/cache.py b/pype/vendor/werkzeug/contrib/cache.py deleted file mode 100644 index 179ba24a81..0000000000 --- a/pype/vendor/werkzeug/contrib/cache.py +++ /dev/null @@ -1,913 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.contrib.cache - ~~~~~~~~~~~~~~~~~~~~~~ - - The main problem with dynamic Web sites is, well, they're dynamic. Each - time a user requests a page, the webserver executes a lot of code, queries - the database, renders templates until the visitor gets the page he sees. - - This is a lot more expensive than just loading a file from the file system - and sending it to the visitor. - - For most Web applications, this overhead isn't a big deal but once it - becomes, you will be glad to have a cache system in place. - - How Caching Works - ================= - - Caching is pretty simple. Basically you have a cache object lurking around - somewhere that is connected to a remote cache or the file system or - something else. When the request comes in you check if the current page - is already in the cache and if so, you're returning it from the cache. - Otherwise you generate the page and put it into the cache. (Or a fragment - of the page, you don't have to cache the full thing) - - Here is a simple example of how to cache a sidebar for 5 minutes:: - - def get_sidebar(user): - identifier = 'sidebar_for/user%d' % user.id - value = cache.get(identifier) - if value is not None: - return value - value = generate_sidebar_for(user=user) - cache.set(identifier, value, timeout=60 * 5) - return value - - Creating a Cache Object - ======================= - - To create a cache object you just import the cache system of your choice - from the cache module and instantiate it. Then you can start working - with that object: - - >>> from werkzeug.contrib.cache import SimpleCache - >>> c = SimpleCache() - >>> c.set("foo", "value") - >>> c.get("foo") - 'value' - >>> c.get("missing") is None - True - - Please keep in mind that you have to create the cache and put it somewhere - you have access to it (either as a module global you can import or you just - put it into your WSGI application). - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -import os -import re -import errno -import tempfile -import platform -from hashlib import md5 -from time import time -try: - import cPickle as pickle -except ImportError: # pragma: no cover - import pickle - -from werkzeug._compat import iteritems, string_types, text_type, \ - integer_types, to_native -from werkzeug.posixemulation import rename - - -def _items(mappingorseq): - """Wrapper for efficient iteration over mappings represented by dicts - or sequences:: - - >>> for k, v in _items((i, i*i) for i in xrange(5)): - ... assert k*k == v - - >>> for k, v in _items(dict((i, i*i) for i in xrange(5))): - ... assert k*k == v - - """ - if hasattr(mappingorseq, 'items'): - return iteritems(mappingorseq) - return mappingorseq - - -class BaseCache(object): - - """Baseclass for the cache systems. All the cache systems implement this - API or a superset of it. - - :param default_timeout: the default timeout (in seconds) that is used if - no timeout is specified on :meth:`set`. A timeout - of 0 indicates that the cache never expires. - """ - - def __init__(self, default_timeout=300): - self.default_timeout = default_timeout - - def _normalize_timeout(self, timeout): - if timeout is None: - timeout = self.default_timeout - return timeout - - def get(self, key): - """Look up key in the cache and return the value for it. - - :param key: the key to be looked up. - :returns: The value if it exists and is readable, else ``None``. - """ - return None - - def delete(self, key): - """Delete `key` from the cache. - - :param key: the key to delete. - :returns: Whether the key existed and has been deleted. - :rtype: boolean - """ - return True - - def get_many(self, *keys): - """Returns a list of values for the given keys. - For each key an item in the list is created:: - - foo, bar = cache.get_many("foo", "bar") - - Has the same error handling as :meth:`get`. - - :param keys: The function accepts multiple keys as positional - arguments. - """ - return [self.get(k) for k in keys] - - def get_dict(self, *keys): - """Like :meth:`get_many` but return a dict:: - - d = cache.get_dict("foo", "bar") - foo = d["foo"] - bar = d["bar"] - - :param keys: The function accepts multiple keys as positional - arguments. - """ - return dict(zip(keys, self.get_many(*keys))) - - def set(self, key, value, timeout=None): - """Add a new key/value to the cache (overwrites value, if key already - exists in the cache). - - :param key: the key to set - :param value: the value for the key - :param timeout: the cache timeout for the key in seconds (if not - specified, it uses the default timeout). A timeout of - 0 idicates that the cache never expires. - :returns: ``True`` if key has been updated, ``False`` for backend - errors. Pickling errors, however, will raise a subclass of - ``pickle.PickleError``. - :rtype: boolean - """ - return True - - def add(self, key, value, timeout=None): - """Works like :meth:`set` but does not overwrite the values of already - existing keys. - - :param key: the key to set - :param value: the value for the key - :param timeout: the cache timeout for the key in seconds (if not - specified, it uses the default timeout). A timeout of - 0 idicates that the cache never expires. - :returns: Same as :meth:`set`, but also ``False`` for already - existing keys. - :rtype: boolean - """ - return True - - def set_many(self, mapping, timeout=None): - """Sets multiple keys and values from a mapping. - - :param mapping: a mapping with the keys/values to set. - :param timeout: the cache timeout for the key in seconds (if not - specified, it uses the default timeout). A timeout of - 0 idicates that the cache never expires. - :returns: Whether all given keys have been set. - :rtype: boolean - """ - rv = True - for key, value in _items(mapping): - if not self.set(key, value, timeout): - rv = False - return rv - - def delete_many(self, *keys): - """Deletes multiple keys at once. - - :param keys: The function accepts multiple keys as positional - arguments. - :returns: Whether all given keys have been deleted. - :rtype: boolean - """ - return all(self.delete(key) for key in keys) - - def has(self, key): - """Checks if a key exists in the cache without returning it. This is a - cheap operation that bypasses loading the actual data on the backend. - - This method is optional and may not be implemented on all caches. - - :param key: the key to check - """ - raise NotImplementedError( - '%s doesn\'t have an efficient implementation of `has`. That ' - 'means it is impossible to check whether a key exists without ' - 'fully loading the key\'s data. Consider using `self.get` ' - 'explicitly if you don\'t care about performance.' - ) - - def clear(self): - """Clears the cache. Keep in mind that not all caches support - completely clearing the cache. - - :returns: Whether the cache has been cleared. - :rtype: boolean - """ - return True - - def inc(self, key, delta=1): - """Increments the value of a key by `delta`. If the key does - not yet exist it is initialized with `delta`. - - For supporting caches this is an atomic operation. - - :param key: the key to increment. - :param delta: the delta to add. - :returns: The new value or ``None`` for backend errors. - """ - value = (self.get(key) or 0) + delta - return value if self.set(key, value) else None - - def dec(self, key, delta=1): - """Decrements the value of a key by `delta`. If the key does - not yet exist it is initialized with `-delta`. - - For supporting caches this is an atomic operation. - - :param key: the key to increment. - :param delta: the delta to subtract. - :returns: The new value or `None` for backend errors. - """ - value = (self.get(key) or 0) - delta - return value if self.set(key, value) else None - - -class NullCache(BaseCache): - - """A cache that doesn't cache. This can be useful for unit testing. - - :param default_timeout: a dummy parameter that is ignored but exists - for API compatibility with other caches. - """ - - def has(self, key): - return False - - -class SimpleCache(BaseCache): - - """Simple memory cache for single process environments. This class exists - mainly for the development server and is not 100% thread safe. It tries - to use as many atomic operations as possible and no locks for simplicity - but it could happen under heavy load that keys are added multiple times. - - :param threshold: the maximum number of items the cache stores before - it starts deleting some. - :param default_timeout: the default timeout that is used if no timeout is - specified on :meth:`~BaseCache.set`. A timeout of - 0 indicates that the cache never expires. - """ - - def __init__(self, threshold=500, default_timeout=300): - BaseCache.__init__(self, default_timeout) - self._cache = {} - self.clear = self._cache.clear - self._threshold = threshold - - def _prune(self): - if len(self._cache) > self._threshold: - now = time() - toremove = [] - for idx, (key, (expires, _)) in enumerate(self._cache.items()): - if (expires != 0 and expires <= now) or idx % 3 == 0: - toremove.append(key) - for key in toremove: - self._cache.pop(key, None) - - def _normalize_timeout(self, timeout): - timeout = BaseCache._normalize_timeout(self, timeout) - if timeout > 0: - timeout = time() + timeout - return timeout - - def get(self, key): - try: - expires, value = self._cache[key] - if expires == 0 or expires > time(): - return pickle.loads(value) - except (KeyError, pickle.PickleError): - return None - - def set(self, key, value, timeout=None): - expires = self._normalize_timeout(timeout) - self._prune() - self._cache[key] = (expires, pickle.dumps(value, - pickle.HIGHEST_PROTOCOL)) - return True - - def add(self, key, value, timeout=None): - expires = self._normalize_timeout(timeout) - self._prune() - item = (expires, pickle.dumps(value, - pickle.HIGHEST_PROTOCOL)) - if key in self._cache: - return False - self._cache.setdefault(key, item) - return True - - def delete(self, key): - return self._cache.pop(key, None) is not None - - def has(self, key): - try: - expires, value = self._cache[key] - return expires == 0 or expires > time() - except KeyError: - return False - -_test_memcached_key = re.compile(r'[^\x00-\x21\xff]{1,250}$').match - - -class MemcachedCache(BaseCache): - - """A cache that uses memcached as backend. - - The first argument can either be an object that resembles the API of a - :class:`memcache.Client` or a tuple/list of server addresses. In the - event that a tuple/list is passed, Werkzeug tries to import the best - available memcache library. - - This cache looks into the following packages/modules to find bindings for - memcached: - - - ``pylibmc`` - - ``google.appengine.api.memcached`` - - ``memcached`` - - ``libmc`` - - Implementation notes: This cache backend works around some limitations in - memcached to simplify the interface. For example unicode keys are encoded - to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return - the keys in the same format as passed. Furthermore all get methods - silently ignore key errors to not cause problems when untrusted user data - is passed to the get methods which is often the case in web applications. - - :param servers: a list or tuple of server addresses or alternatively - a :class:`memcache.Client` or a compatible client. - :param default_timeout: the default timeout that is used if no timeout is - specified on :meth:`~BaseCache.set`. A timeout of - 0 indicates that the cache never expires. - :param key_prefix: a prefix that is added before all keys. This makes it - possible to use the same memcached server for different - applications. Keep in mind that - :meth:`~BaseCache.clear` will also clear keys with a - different prefix. - """ - - def __init__(self, servers=None, default_timeout=300, key_prefix=None): - BaseCache.__init__(self, default_timeout) - if servers is None or isinstance(servers, (list, tuple)): - if servers is None: - servers = ['127.0.0.1:11211'] - self._client = self.import_preferred_memcache_lib(servers) - if self._client is None: - raise RuntimeError('no memcache module found') - else: - # NOTE: servers is actually an already initialized memcache - # client. - self._client = servers - - self.key_prefix = to_native(key_prefix) - - def _normalize_key(self, key): - key = to_native(key, 'utf-8') - if self.key_prefix: - key = self.key_prefix + key - return key - - def _normalize_timeout(self, timeout): - timeout = BaseCache._normalize_timeout(self, timeout) - if timeout > 0: - timeout = int(time()) + timeout - return timeout - - def get(self, key): - key = self._normalize_key(key) - # memcached doesn't support keys longer than that. Because often - # checks for so long keys can occur because it's tested from user - # submitted data etc we fail silently for getting. - if _test_memcached_key(key): - return self._client.get(key) - - def get_dict(self, *keys): - key_mapping = {} - have_encoded_keys = False - for key in keys: - encoded_key = self._normalize_key(key) - if not isinstance(key, str): - have_encoded_keys = True - if _test_memcached_key(key): - key_mapping[encoded_key] = key - _keys = list(key_mapping) - d = rv = self._client.get_multi(_keys) - if have_encoded_keys or self.key_prefix: - rv = {} - for key, value in iteritems(d): - rv[key_mapping[key]] = value - if len(rv) < len(keys): - for key in keys: - if key not in rv: - rv[key] = None - return rv - - def add(self, key, value, timeout=None): - key = self._normalize_key(key) - timeout = self._normalize_timeout(timeout) - return self._client.add(key, value, timeout) - - def set(self, key, value, timeout=None): - key = self._normalize_key(key) - timeout = self._normalize_timeout(timeout) - return self._client.set(key, value, timeout) - - def get_many(self, *keys): - d = self.get_dict(*keys) - return [d[key] for key in keys] - - def set_many(self, mapping, timeout=None): - new_mapping = {} - for key, value in _items(mapping): - key = self._normalize_key(key) - new_mapping[key] = value - - timeout = self._normalize_timeout(timeout) - failed_keys = self._client.set_multi(new_mapping, timeout) - return not failed_keys - - def delete(self, key): - key = self._normalize_key(key) - if _test_memcached_key(key): - return self._client.delete(key) - - def delete_many(self, *keys): - new_keys = [] - for key in keys: - key = self._normalize_key(key) - if _test_memcached_key(key): - new_keys.append(key) - return self._client.delete_multi(new_keys) - - def has(self, key): - key = self._normalize_key(key) - if _test_memcached_key(key): - return self._client.append(key, '') - return False - - def clear(self): - return self._client.flush_all() - - def inc(self, key, delta=1): - key = self._normalize_key(key) - return self._client.incr(key, delta) - - def dec(self, key, delta=1): - key = self._normalize_key(key) - return self._client.decr(key, delta) - - def import_preferred_memcache_lib(self, servers): - """Returns an initialized memcache client. Used by the constructor.""" - try: - import pylibmc - except ImportError: - pass - else: - return pylibmc.Client(servers) - - try: - from google.appengine.api import memcache - except ImportError: - pass - else: - return memcache.Client() - - try: - import memcache - except ImportError: - pass - else: - return memcache.Client(servers) - - try: - import libmc - except ImportError: - pass - else: - return libmc.Client(servers) - - -# backwards compatibility -GAEMemcachedCache = MemcachedCache - - -class RedisCache(BaseCache): - - """Uses the Redis key-value store as a cache backend. - - The first argument can be either a string denoting address of the Redis - server or an object resembling an instance of a redis.Redis class. - - Note: Python Redis API already takes care of encoding unicode strings on - the fly. - - .. versionadded:: 0.7 - - .. versionadded:: 0.8 - `key_prefix` was added. - - .. versionchanged:: 0.8 - This cache backend now properly serializes objects. - - .. versionchanged:: 0.8.3 - This cache backend now supports password authentication. - - .. versionchanged:: 0.10 - ``**kwargs`` is now passed to the redis object. - - :param host: address of the Redis server or an object which API is - compatible with the official Python Redis client (redis-py). - :param port: port number on which Redis server listens for connections. - :param password: password authentication for the Redis server. - :param db: db (zero-based numeric index) on Redis Server to connect. - :param default_timeout: the default timeout that is used if no timeout is - specified on :meth:`~BaseCache.set`. A timeout of - 0 indicates that the cache never expires. - :param key_prefix: A prefix that should be added to all keys. - - Any additional keyword arguments will be passed to ``redis.Redis``. - """ - - def __init__(self, host='localhost', port=6379, password=None, - db=0, default_timeout=300, key_prefix=None, **kwargs): - BaseCache.__init__(self, default_timeout) - if host is None: - raise ValueError('RedisCache host parameter may not be None') - if isinstance(host, string_types): - try: - import redis - except ImportError: - raise RuntimeError('no redis module found') - if kwargs.get('decode_responses', None): - raise ValueError('decode_responses is not supported by ' - 'RedisCache.') - self._client = redis.Redis(host=host, port=port, password=password, - db=db, **kwargs) - else: - self._client = host - self.key_prefix = key_prefix or '' - - def _normalize_timeout(self, timeout): - timeout = BaseCache._normalize_timeout(self, timeout) - if timeout == 0: - timeout = -1 - return timeout - - def dump_object(self, value): - """Dumps an object into a string for redis. By default it serializes - integers as regular string and pickle dumps everything else. - """ - t = type(value) - if t in integer_types: - return str(value).encode('ascii') - return b'!' + pickle.dumps(value) - - def load_object(self, value): - """The reversal of :meth:`dump_object`. This might be called with - None. - """ - if value is None: - return None - if value.startswith(b'!'): - try: - return pickle.loads(value[1:]) - except pickle.PickleError: - return None - try: - return int(value) - except ValueError: - # before 0.8 we did not have serialization. Still support that. - return value - - def get(self, key): - return self.load_object(self._client.get(self.key_prefix + key)) - - def get_many(self, *keys): - if self.key_prefix: - keys = [self.key_prefix + key for key in keys] - return [self.load_object(x) for x in self._client.mget(keys)] - - def set(self, key, value, timeout=None): - timeout = self._normalize_timeout(timeout) - dump = self.dump_object(value) - if timeout == -1: - result = self._client.set(name=self.key_prefix + key, - value=dump) - else: - result = self._client.setex(name=self.key_prefix + key, - value=dump, time=timeout) - return result - - def add(self, key, value, timeout=None): - timeout = self._normalize_timeout(timeout) - dump = self.dump_object(value) - return ( - self._client.setnx(name=self.key_prefix + key, value=dump) and - self._client.expire(name=self.key_prefix + key, time=timeout) - ) - - def set_many(self, mapping, timeout=None): - timeout = self._normalize_timeout(timeout) - # Use transaction=False to batch without calling redis MULTI - # which is not supported by twemproxy - pipe = self._client.pipeline(transaction=False) - - for key, value in _items(mapping): - dump = self.dump_object(value) - if timeout == -1: - pipe.set(name=self.key_prefix + key, value=dump) - else: - pipe.setex(name=self.key_prefix + key, value=dump, - time=timeout) - return pipe.execute() - - def delete(self, key): - return self._client.delete(self.key_prefix + key) - - def delete_many(self, *keys): - if not keys: - return - if self.key_prefix: - keys = [self.key_prefix + key for key in keys] - return self._client.delete(*keys) - - def has(self, key): - return self._client.exists(self.key_prefix + key) - - def clear(self): - status = False - if self.key_prefix: - keys = self._client.keys(self.key_prefix + '*') - if keys: - status = self._client.delete(*keys) - else: - status = self._client.flushdb() - return status - - def inc(self, key, delta=1): - return self._client.incr(name=self.key_prefix + key, amount=delta) - - def dec(self, key, delta=1): - return self._client.decr(name=self.key_prefix + key, amount=delta) - - -class FileSystemCache(BaseCache): - - """A cache that stores the items on the file system. This cache depends - on being the only user of the `cache_dir`. Make absolutely sure that - nobody but this cache stores files there or otherwise the cache will - randomly delete files therein. - - :param cache_dir: the directory where cache files are stored. - :param threshold: the maximum number of items the cache stores before - it starts deleting some. A threshold value of 0 - indicates no threshold. - :param default_timeout: the default timeout that is used if no timeout is - specified on :meth:`~BaseCache.set`. A timeout of - 0 indicates that the cache never expires. - :param mode: the file mode wanted for the cache files, default 0600 - """ - - #: used for temporary files by the FileSystemCache - _fs_transaction_suffix = '.__wz_cache' - #: keep amount of files in a cache element - _fs_count_file = '__wz_cache_count' - - def __init__(self, cache_dir, threshold=500, default_timeout=300, - mode=0o600): - BaseCache.__init__(self, default_timeout) - self._path = cache_dir - self._threshold = threshold - self._mode = mode - - try: - os.makedirs(self._path) - except OSError as ex: - if ex.errno != errno.EEXIST: - raise - - self._update_count(value=len(self._list_dir())) - - @property - def _file_count(self): - return self.get(self._fs_count_file) or 0 - - def _update_count(self, delta=None, value=None): - # If we have no threshold, don't count files - if self._threshold == 0: - return - - if delta: - new_count = self._file_count + delta - else: - new_count = value or 0 - self.set(self._fs_count_file, new_count, mgmt_element=True) - - def _normalize_timeout(self, timeout): - timeout = BaseCache._normalize_timeout(self, timeout) - if timeout != 0: - timeout = time() + timeout - return int(timeout) - - def _list_dir(self): - """return a list of (fully qualified) cache filenames - """ - mgmt_files = [self._get_filename(name).split('/')[-1] - for name in (self._fs_count_file,)] - return [os.path.join(self._path, fn) for fn in os.listdir(self._path) - if not fn.endswith(self._fs_transaction_suffix) - and fn not in mgmt_files] - - def _prune(self): - if self._threshold == 0 or not self._file_count > self._threshold: - return - - entries = self._list_dir() - now = time() - for idx, fname in enumerate(entries): - try: - remove = False - with open(fname, 'rb') as f: - expires = pickle.load(f) - remove = (expires != 0 and expires <= now) or idx % 3 == 0 - - if remove: - os.remove(fname) - except (IOError, OSError): - pass - self._update_count(value=len(self._list_dir())) - - def clear(self): - for fname in self._list_dir(): - try: - os.remove(fname) - except (IOError, OSError): - self._update_count(value=len(self._list_dir())) - return False - self._update_count(value=0) - return True - - def _get_filename(self, key): - if isinstance(key, text_type): - key = key.encode('utf-8') # XXX unicode review - hash = md5(key).hexdigest() - return os.path.join(self._path, hash) - - def get(self, key): - filename = self._get_filename(key) - try: - with open(filename, 'rb') as f: - pickle_time = pickle.load(f) - if pickle_time == 0 or pickle_time >= time(): - return pickle.load(f) - else: - os.remove(filename) - return None - except (IOError, OSError, pickle.PickleError): - return None - - def add(self, key, value, timeout=None): - filename = self._get_filename(key) - if not os.path.exists(filename): - return self.set(key, value, timeout) - return False - - def set(self, key, value, timeout=None, mgmt_element=False): - # Management elements have no timeout - if mgmt_element: - timeout = 0 - - # Don't prune on management element update, to avoid loop - else: - self._prune() - - timeout = self._normalize_timeout(timeout) - filename = self._get_filename(key) - try: - fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix, - dir=self._path) - with os.fdopen(fd, 'wb') as f: - pickle.dump(timeout, f, 1) - pickle.dump(value, f, pickle.HIGHEST_PROTOCOL) - rename(tmp, filename) - os.chmod(filename, self._mode) - except (IOError, OSError): - return False - else: - # Management elements should not count towards threshold - if not mgmt_element: - self._update_count(delta=1) - return True - - def delete(self, key, mgmt_element=False): - try: - os.remove(self._get_filename(key)) - except (IOError, OSError): - return False - else: - # Management elements should not count towards threshold - if not mgmt_element: - self._update_count(delta=-1) - return True - - def has(self, key): - filename = self._get_filename(key) - try: - with open(filename, 'rb') as f: - pickle_time = pickle.load(f) - if pickle_time == 0 or pickle_time >= time(): - return True - else: - os.remove(filename) - return False - except (IOError, OSError, pickle.PickleError): - return False - - -class UWSGICache(BaseCache): - """ Implements the cache using uWSGI's caching framework. - - .. note:: - This class cannot be used when running under PyPy, because the uWSGI - API implementation for PyPy is lacking the needed functionality. - - :param default_timeout: The default timeout in seconds. - :param cache: The name of the caching instance to connect to, for - example: mycache@localhost:3031, defaults to an empty string, which - means uWSGI will cache in the local instance. If the cache is in the - same instance as the werkzeug app, you only have to provide the name of - the cache. - """ - def __init__(self, default_timeout=300, cache=''): - BaseCache.__init__(self, default_timeout) - - if platform.python_implementation() == 'PyPy': - raise RuntimeError("uWSGI caching does not work under PyPy, see " - "the docs for more details.") - - try: - import uwsgi - self._uwsgi = uwsgi - except ImportError: - raise RuntimeError("uWSGI could not be imported, are you " - "running under uWSGI?") - - self.cache = cache - - def get(self, key): - rv = self._uwsgi.cache_get(key, self.cache) - if rv is None: - return - return pickle.loads(rv) - - def delete(self, key): - return self._uwsgi.cache_del(key, self.cache) - - def set(self, key, value, timeout=None): - return self._uwsgi.cache_update(key, pickle.dumps(value), - self._normalize_timeout(timeout), - self.cache) - - def add(self, key, value, timeout=None): - return self._uwsgi.cache_set(key, pickle.dumps(value), - self._normalize_timeout(timeout), - self.cache) - - def clear(self): - return self._uwsgi.cache_clear(self.cache) - - def has(self, key): - return self._uwsgi.cache_exists(key, self.cache) is not None diff --git a/pype/vendor/werkzeug/contrib/fixers.py b/pype/vendor/werkzeug/contrib/fixers.py deleted file mode 100644 index b88a861b21..0000000000 --- a/pype/vendor/werkzeug/contrib/fixers.py +++ /dev/null @@ -1,254 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.contrib.fixers - ~~~~~~~~~~~~~~~~~~~~~~~ - - .. versionadded:: 0.5 - - This module includes various helpers that fix bugs in web servers. They may - be necessary for some versions of a buggy web server but not others. We try - to stay updated with the status of the bugs as good as possible but you have - to make sure whether they fix the problem you encounter. - - If you notice bugs in webservers not fixed in this module consider - contributing a patch. - - :copyright: Copyright 2009 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -try: - from urllib import unquote -except ImportError: - from urllib.parse import unquote - -from werkzeug.http import parse_options_header, parse_cache_control_header, \ - parse_set_header -from werkzeug.useragents import UserAgent -from werkzeug.datastructures import Headers, ResponseCacheControl - - -class CGIRootFix(object): - - """Wrap the application in this middleware if you are using FastCGI or CGI - and you have problems with your app root being set to the cgi script's path - instead of the path users are going to visit - - .. versionchanged:: 0.9 - Added `app_root` parameter and renamed from `LighttpdCGIRootFix`. - - :param app: the WSGI application - :param app_root: Defaulting to ``'/'``, you can set this to something else - if your app is mounted somewhere else. - """ - - def __init__(self, app, app_root='/'): - self.app = app - self.app_root = app_root - - def __call__(self, environ, start_response): - # only set PATH_INFO for older versions of Lighty or if no - # server software is provided. That's because the test was - # added in newer Werkzeug versions and we don't want to break - # people's code if they are using this fixer in a test that - # does not set the SERVER_SOFTWARE key. - if 'SERVER_SOFTWARE' not in environ or \ - environ['SERVER_SOFTWARE'] < 'lighttpd/1.4.28': - environ['PATH_INFO'] = environ.get('SCRIPT_NAME', '') + \ - environ.get('PATH_INFO', '') - environ['SCRIPT_NAME'] = self.app_root.strip('/') - return self.app(environ, start_response) - -# backwards compatibility -LighttpdCGIRootFix = CGIRootFix - - -class PathInfoFromRequestUriFix(object): - - """On windows environment variables are limited to the system charset - which makes it impossible to store the `PATH_INFO` variable in the - environment without loss of information on some systems. - - This is for example a problem for CGI scripts on a Windows Apache. - - This fixer works by recreating the `PATH_INFO` from `REQUEST_URI`, - `REQUEST_URL`, or `UNENCODED_URL` (whatever is available). Thus the - fix can only be applied if the webserver supports either of these - variables. - - :param app: the WSGI application - """ - - def __init__(self, app): - self.app = app - - def __call__(self, environ, start_response): - for key in 'REQUEST_URL', 'REQUEST_URI', 'UNENCODED_URL': - if key not in environ: - continue - request_uri = unquote(environ[key]) - script_name = unquote(environ.get('SCRIPT_NAME', '')) - if request_uri.startswith(script_name): - environ['PATH_INFO'] = request_uri[len(script_name):] \ - .split('?', 1)[0] - break - return self.app(environ, start_response) - - -class ProxyFix(object): - - """This middleware can be applied to add HTTP proxy support to an - application that was not designed with HTTP proxies in mind. It - sets `REMOTE_ADDR`, `HTTP_HOST` from `X-Forwarded` headers. While - Werkzeug-based applications already can use - :py:func:`werkzeug.wsgi.get_host` to retrieve the current host even if - behind proxy setups, this middleware can be used for applications which - access the WSGI environment directly. - - If you have more than one proxy server in front of your app, set - `num_proxies` accordingly. - - Do not use this middleware in non-proxy setups for security reasons. - - The original values of `REMOTE_ADDR` and `HTTP_HOST` are stored in - the WSGI environment as `werkzeug.proxy_fix.orig_remote_addr` and - `werkzeug.proxy_fix.orig_http_host`. - - :param app: the WSGI application - :param num_proxies: the number of proxy servers in front of the app. - """ - - def __init__(self, app, num_proxies=1): - self.app = app - self.num_proxies = num_proxies - - def get_remote_addr(self, forwarded_for): - """Selects the new remote addr from the given list of ips in - X-Forwarded-For. By default it picks the one that the `num_proxies` - proxy server provides. Before 0.9 it would always pick the first. - - .. versionadded:: 0.8 - """ - if len(forwarded_for) >= self.num_proxies: - return forwarded_for[-self.num_proxies] - - def __call__(self, environ, start_response): - getter = environ.get - forwarded_proto = getter('HTTP_X_FORWARDED_PROTO', '') - forwarded_for = getter('HTTP_X_FORWARDED_FOR', '').split(',') - forwarded_host = getter('HTTP_X_FORWARDED_HOST', '') - environ.update({ - 'werkzeug.proxy_fix.orig_wsgi_url_scheme': getter('wsgi.url_scheme'), - 'werkzeug.proxy_fix.orig_remote_addr': getter('REMOTE_ADDR'), - 'werkzeug.proxy_fix.orig_http_host': getter('HTTP_HOST') - }) - forwarded_for = [x for x in [x.strip() for x in forwarded_for] if x] - remote_addr = self.get_remote_addr(forwarded_for) - if remote_addr is not None: - environ['REMOTE_ADDR'] = remote_addr - if forwarded_host: - environ['HTTP_HOST'] = forwarded_host - if forwarded_proto: - environ['wsgi.url_scheme'] = forwarded_proto - return self.app(environ, start_response) - - -class HeaderRewriterFix(object): - - """This middleware can remove response headers and add others. This - is for example useful to remove the `Date` header from responses if you - are using a server that adds that header, no matter if it's present or - not or to add `X-Powered-By` headers:: - - app = HeaderRewriterFix(app, remove_headers=['Date'], - add_headers=[('X-Powered-By', 'WSGI')]) - - :param app: the WSGI application - :param remove_headers: a sequence of header keys that should be - removed. - :param add_headers: a sequence of ``(key, value)`` tuples that should - be added. - """ - - def __init__(self, app, remove_headers=None, add_headers=None): - self.app = app - self.remove_headers = set(x.lower() for x in (remove_headers or ())) - self.add_headers = list(add_headers or ()) - - def __call__(self, environ, start_response): - def rewriting_start_response(status, headers, exc_info=None): - new_headers = [] - for key, value in headers: - if key.lower() not in self.remove_headers: - new_headers.append((key, value)) - new_headers += self.add_headers - return start_response(status, new_headers, exc_info) - return self.app(environ, rewriting_start_response) - - -class InternetExplorerFix(object): - - """This middleware fixes a couple of bugs with Microsoft Internet - Explorer. Currently the following fixes are applied: - - - removing of `Vary` headers for unsupported mimetypes which - causes troubles with caching. Can be disabled by passing - ``fix_vary=False`` to the constructor. - see: http://support.microsoft.com/kb/824847/en-us - - - removes offending headers to work around caching bugs in - Internet Explorer if `Content-Disposition` is set. Can be - disabled by passing ``fix_attach=False`` to the constructor. - - If it does not detect affected Internet Explorer versions it won't touch - the request / response. - """ - - # This code was inspired by Django fixers for the same bugs. The - # fix_vary and fix_attach fixers were originally implemented in Django - # by Michael Axiak and is available as part of the Django project: - # http://code.djangoproject.com/ticket/4148 - - def __init__(self, app, fix_vary=True, fix_attach=True): - self.app = app - self.fix_vary = fix_vary - self.fix_attach = fix_attach - - def fix_headers(self, environ, headers, status=None): - if self.fix_vary: - header = headers.get('content-type', '') - mimetype, options = parse_options_header(header) - if mimetype not in ('text/html', 'text/plain', 'text/sgml'): - headers.pop('vary', None) - - if self.fix_attach and 'content-disposition' in headers: - pragma = parse_set_header(headers.get('pragma', '')) - pragma.discard('no-cache') - header = pragma.to_header() - if not header: - headers.pop('pragma', '') - else: - headers['Pragma'] = header - header = headers.get('cache-control', '') - if header: - cc = parse_cache_control_header(header, - cls=ResponseCacheControl) - cc.no_cache = None - cc.no_store = False - header = cc.to_header() - if not header: - headers.pop('cache-control', '') - else: - headers['Cache-Control'] = header - - def run_fixed(self, environ, start_response): - def fixing_start_response(status, headers, exc_info=None): - headers = Headers(headers) - self.fix_headers(environ, headers, status) - return start_response(status, headers.to_wsgi_list(), exc_info) - return self.app(environ, fixing_start_response) - - def __call__(self, environ, start_response): - ua = UserAgent(environ) - if ua.browser != 'msie': - return self.app(environ, start_response) - return self.run_fixed(environ, start_response) diff --git a/pype/vendor/werkzeug/contrib/iterio.py b/pype/vendor/werkzeug/contrib/iterio.py deleted file mode 100644 index c0ced37004..0000000000 --- a/pype/vendor/werkzeug/contrib/iterio.py +++ /dev/null @@ -1,352 +0,0 @@ -# -*- coding: utf-8 -*- -r""" - werkzeug.contrib.iterio - ~~~~~~~~~~~~~~~~~~~~~~~ - - This module implements a :class:`IterIO` that converts an iterator into - a stream object and the other way round. Converting streams into - iterators requires the `greenlet`_ module. - - To convert an iterator into a stream all you have to do is to pass it - directly to the :class:`IterIO` constructor. In this example we pass it - a newly created generator:: - - def foo(): - yield "something\n" - yield "otherthings" - stream = IterIO(foo()) - print stream.read() # read the whole iterator - - The other way round works a bit different because we have to ensure that - the code execution doesn't take place yet. An :class:`IterIO` call with a - callable as first argument does two things. The function itself is passed - an :class:`IterIO` stream it can feed. The object returned by the - :class:`IterIO` constructor on the other hand is not an stream object but - an iterator:: - - def foo(stream): - stream.write("some") - stream.write("thing") - stream.flush() - stream.write("otherthing") - iterator = IterIO(foo) - print iterator.next() # prints something - print iterator.next() # prints otherthing - iterator.next() # raises StopIteration - - .. _greenlet: https://github.com/python-greenlet/greenlet - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -try: - import greenlet -except ImportError: - greenlet = None - -from werkzeug._compat import implements_iterator - - -def _mixed_join(iterable, sentinel): - """concatenate any string type in an intelligent way.""" - iterator = iter(iterable) - first_item = next(iterator, sentinel) - if isinstance(first_item, bytes): - return first_item + b''.join(iterator) - return first_item + u''.join(iterator) - - -def _newline(reference_string): - if isinstance(reference_string, bytes): - return b'\n' - return u'\n' - - -@implements_iterator -class IterIO(object): - - """Instances of this object implement an interface compatible with the - standard Python :class:`file` object. Streams are either read-only or - write-only depending on how the object is created. - - If the first argument is an iterable a file like object is returned that - returns the contents of the iterable. In case the iterable is empty - read operations will return the sentinel value. - - If the first argument is a callable then the stream object will be - created and passed to that function. The caller itself however will - not receive a stream but an iterable. The function will be be executed - step by step as something iterates over the returned iterable. Each - call to :meth:`flush` will create an item for the iterable. If - :meth:`flush` is called without any writes in-between the sentinel - value will be yielded. - - Note for Python 3: due to the incompatible interface of bytes and - streams you should set the sentinel value explicitly to an empty - bytestring (``b''``) if you are expecting to deal with bytes as - otherwise the end of the stream is marked with the wrong sentinel - value. - - .. versionadded:: 0.9 - `sentinel` parameter was added. - """ - - def __new__(cls, obj, sentinel=''): - try: - iterator = iter(obj) - except TypeError: - return IterI(obj, sentinel) - return IterO(iterator, sentinel) - - def __iter__(self): - return self - - def tell(self): - if self.closed: - raise ValueError('I/O operation on closed file') - return self.pos - - def isatty(self): - if self.closed: - raise ValueError('I/O operation on closed file') - return False - - def seek(self, pos, mode=0): - if self.closed: - raise ValueError('I/O operation on closed file') - raise IOError(9, 'Bad file descriptor') - - def truncate(self, size=None): - if self.closed: - raise ValueError('I/O operation on closed file') - raise IOError(9, 'Bad file descriptor') - - def write(self, s): - if self.closed: - raise ValueError('I/O operation on closed file') - raise IOError(9, 'Bad file descriptor') - - def writelines(self, list): - if self.closed: - raise ValueError('I/O operation on closed file') - raise IOError(9, 'Bad file descriptor') - - def read(self, n=-1): - if self.closed: - raise ValueError('I/O operation on closed file') - raise IOError(9, 'Bad file descriptor') - - def readlines(self, sizehint=0): - if self.closed: - raise ValueError('I/O operation on closed file') - raise IOError(9, 'Bad file descriptor') - - def readline(self, length=None): - if self.closed: - raise ValueError('I/O operation on closed file') - raise IOError(9, 'Bad file descriptor') - - def flush(self): - if self.closed: - raise ValueError('I/O operation on closed file') - raise IOError(9, 'Bad file descriptor') - - def __next__(self): - if self.closed: - raise StopIteration() - line = self.readline() - if not line: - raise StopIteration() - return line - - -class IterI(IterIO): - - """Convert an stream into an iterator.""" - - def __new__(cls, func, sentinel=''): - if greenlet is None: - raise RuntimeError('IterI requires greenlet support') - stream = object.__new__(cls) - stream._parent = greenlet.getcurrent() - stream._buffer = [] - stream.closed = False - stream.sentinel = sentinel - stream.pos = 0 - - def run(): - func(stream) - stream.close() - - g = greenlet.greenlet(run, stream._parent) - while 1: - rv = g.switch() - if not rv: - return - yield rv[0] - - def close(self): - if not self.closed: - self.closed = True - self._flush_impl() - - def write(self, s): - if self.closed: - raise ValueError('I/O operation on closed file') - if s: - self.pos += len(s) - self._buffer.append(s) - - def writelines(self, list): - for item in list: - self.write(item) - - def flush(self): - if self.closed: - raise ValueError('I/O operation on closed file') - self._flush_impl() - - def _flush_impl(self): - data = _mixed_join(self._buffer, self.sentinel) - self._buffer = [] - if not data and self.closed: - self._parent.switch() - else: - self._parent.switch((data,)) - - -class IterO(IterIO): - - """Iter output. Wrap an iterator and give it a stream like interface.""" - - def __new__(cls, gen, sentinel=''): - self = object.__new__(cls) - self._gen = gen - self._buf = None - self.sentinel = sentinel - self.closed = False - self.pos = 0 - return self - - def __iter__(self): - return self - - def _buf_append(self, string): - '''Replace string directly without appending to an empty string, - avoiding type issues.''' - if not self._buf: - self._buf = string - else: - self._buf += string - - def close(self): - if not self.closed: - self.closed = True - if hasattr(self._gen, 'close'): - self._gen.close() - - def seek(self, pos, mode=0): - if self.closed: - raise ValueError('I/O operation on closed file') - if mode == 1: - pos += self.pos - elif mode == 2: - self.read() - self.pos = min(self.pos, self.pos + pos) - return - elif mode != 0: - raise IOError('Invalid argument') - buf = [] - try: - tmp_end_pos = len(self._buf) - while pos > tmp_end_pos: - item = next(self._gen) - tmp_end_pos += len(item) - buf.append(item) - except StopIteration: - pass - if buf: - self._buf_append(_mixed_join(buf, self.sentinel)) - self.pos = max(0, pos) - - def read(self, n=-1): - if self.closed: - raise ValueError('I/O operation on closed file') - if n < 0: - self._buf_append(_mixed_join(self._gen, self.sentinel)) - result = self._buf[self.pos:] - self.pos += len(result) - return result - new_pos = self.pos + n - buf = [] - try: - tmp_end_pos = 0 if self._buf is None else len(self._buf) - while new_pos > tmp_end_pos or (self._buf is None and not buf): - item = next(self._gen) - tmp_end_pos += len(item) - buf.append(item) - except StopIteration: - pass - if buf: - self._buf_append(_mixed_join(buf, self.sentinel)) - - if self._buf is None: - return self.sentinel - - new_pos = max(0, new_pos) - try: - return self._buf[self.pos:new_pos] - finally: - self.pos = min(new_pos, len(self._buf)) - - def readline(self, length=None): - if self.closed: - raise ValueError('I/O operation on closed file') - - nl_pos = -1 - if self._buf: - nl_pos = self._buf.find(_newline(self._buf), self.pos) - buf = [] - try: - if self._buf is None: - pos = self.pos - else: - pos = len(self._buf) - while nl_pos < 0: - item = next(self._gen) - local_pos = item.find(_newline(item)) - buf.append(item) - if local_pos >= 0: - nl_pos = pos + local_pos - break - pos += len(item) - except StopIteration: - pass - if buf: - self._buf_append(_mixed_join(buf, self.sentinel)) - - if self._buf is None: - return self.sentinel - - if nl_pos < 0: - new_pos = len(self._buf) - else: - new_pos = nl_pos + 1 - if length is not None and self.pos + length < new_pos: - new_pos = self.pos + length - try: - return self._buf[self.pos:new_pos] - finally: - self.pos = min(new_pos, len(self._buf)) - - def readlines(self, sizehint=0): - total = 0 - lines = [] - line = self.readline() - while line: - lines.append(line) - total += len(line) - if 0 < sizehint <= total: - break - line = self.readline() - return lines diff --git a/pype/vendor/werkzeug/contrib/jsrouting.py b/pype/vendor/werkzeug/contrib/jsrouting.py deleted file mode 100644 index d8158927ee..0000000000 --- a/pype/vendor/werkzeug/contrib/jsrouting.py +++ /dev/null @@ -1,264 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.contrib.jsrouting - ~~~~~~~~~~~~~~~~~~~~~~~~~~ - - Addon module that allows to create a JavaScript function from a map - that generates rules. - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -try: - from simplejson import dumps -except ImportError: - try: - from json import dumps - except ImportError: - def dumps(*args): - raise RuntimeError('simplejson required for jsrouting') - -from inspect import getmro -from werkzeug.routing import NumberConverter -from werkzeug._compat import iteritems - - -def render_template(name_parts, rules, converters): - result = u'' - if name_parts: - for idx in range(0, len(name_parts) - 1): - name = u'.'.join(name_parts[:idx + 1]) - result += u"if (typeof %s === 'undefined') %s = {}\n" % (name, name) - result += '%s = ' % '.'.join(name_parts) - result += """(function (server_name, script_name, subdomain, url_scheme) { - var converters = [%(converters)s]; - var rules = %(rules)s; - function in_array(array, value) { - if (array.indexOf != undefined) { - return array.indexOf(value) != -1; - } - for (var i = 0; i < array.length; i++) { - if (array[i] == value) { - return true; - } - } - return false; - } - function array_diff(array1, array2) { - array1 = array1.slice(); - for (var i = array1.length-1; i >= 0; i--) { - if (in_array(array2, array1[i])) { - array1.splice(i, 1); - } - } - return array1; - } - function split_obj(obj) { - var names = []; - var values = []; - for (var name in obj) { - if (typeof(obj[name]) != 'function') { - names.push(name); - values.push(obj[name]); - } - } - return {names: names, values: values, original: obj}; - } - function suitable(rule, args) { - var default_args = split_obj(rule.defaults || {}); - var diff_arg_names = array_diff(rule.arguments, default_args.names); - - for (var i = 0; i < diff_arg_names.length; i++) { - if (!in_array(args.names, diff_arg_names[i])) { - return false; - } - } - - if (array_diff(rule.arguments, args.names).length == 0) { - if (rule.defaults == null) { - return true; - } - for (var i = 0; i < default_args.names.length; i++) { - var key = default_args.names[i]; - var value = default_args.values[i]; - if (value != args.original[key]) { - return false; - } - } - } - - return true; - } - function build(rule, args) { - var tmp = []; - var processed = rule.arguments.slice(); - for (var i = 0; i < rule.trace.length; i++) { - var part = rule.trace[i]; - if (part.is_dynamic) { - var converter = converters[rule.converters[part.data]]; - var data = converter(args.original[part.data]); - if (data == null) { - return null; - } - tmp.push(data); - processed.push(part.name); - } else { - tmp.push(part.data); - } - } - tmp = tmp.join(''); - var pipe = tmp.indexOf('|'); - var subdomain = tmp.substring(0, pipe); - var url = tmp.substring(pipe+1); - - var unprocessed = array_diff(args.names, processed); - var first_query_var = true; - for (var i = 0; i < unprocessed.length; i++) { - if (first_query_var) { - url += '?'; - } else { - url += '&'; - } - first_query_var = false; - url += encodeURIComponent(unprocessed[i]); - url += '='; - url += encodeURIComponent(args.original[unprocessed[i]]); - } - return {subdomain: subdomain, path: url}; - } - function lstrip(s, c) { - while (s && s.substring(0, 1) == c) { - s = s.substring(1); - } - return s; - } - function rstrip(s, c) { - while (s && s.substring(s.length-1, s.length) == c) { - s = s.substring(0, s.length-1); - } - return s; - } - return function(endpoint, args, force_external) { - args = split_obj(args); - var rv = null; - for (var i = 0; i < rules.length; i++) { - var rule = rules[i]; - if (rule.endpoint != endpoint) continue; - if (suitable(rule, args)) { - rv = build(rule, args); - if (rv != null) { - break; - } - } - } - if (rv == null) { - return null; - } - if (!force_external && rv.subdomain == subdomain) { - return rstrip(script_name, '/') + '/' + lstrip(rv.path, '/'); - } else { - return url_scheme + '://' - + (rv.subdomain ? rv.subdomain + '.' : '') - + server_name + rstrip(script_name, '/') - + '/' + lstrip(rv.path, '/'); - } - }; -})""" % {'converters': u', '.join(converters), - 'rules': rules} - - return result - - -def generate_map(map, name='url_map'): - """ - Generates a JavaScript function containing the rules defined in - this map, to be used with a MapAdapter's generate_javascript - method. If you don't pass a name the returned JavaScript code is - an expression that returns a function. Otherwise it's a standalone - script that assigns the function with that name. Dotted names are - resolved (so you an use a name like 'obj.url_for') - - In order to use JavaScript generation, simplejson must be installed. - - Note that using this feature will expose the rules - defined in your map to users. If your rules contain sensitive - information, don't use JavaScript generation! - """ - from warnings import warn - warn(DeprecationWarning('This module is deprecated')) - map.update() - rules = [] - converters = [] - for rule in map.iter_rules(): - trace = [{ - 'is_dynamic': is_dynamic, - 'data': data - } for is_dynamic, data in rule._trace] - rule_converters = {} - for key, converter in iteritems(rule._converters): - js_func = js_to_url_function(converter) - try: - index = converters.index(js_func) - except ValueError: - converters.append(js_func) - index = len(converters) - 1 - rule_converters[key] = index - rules.append({ - u'endpoint': rule.endpoint, - u'arguments': list(rule.arguments), - u'converters': rule_converters, - u'trace': trace, - u'defaults': rule.defaults - }) - - return render_template(name_parts=name and name.split('.') or [], - rules=dumps(rules), - converters=converters) - - -def generate_adapter(adapter, name='url_for', map_name='url_map'): - """Generates the url building function for a map.""" - values = { - u'server_name': dumps(adapter.server_name), - u'script_name': dumps(adapter.script_name), - u'subdomain': dumps(adapter.subdomain), - u'url_scheme': dumps(adapter.url_scheme), - u'name': name, - u'map_name': map_name - } - return u'''\ -var %(name)s = %(map_name)s( - %(server_name)s, - %(script_name)s, - %(subdomain)s, - %(url_scheme)s -);''' % values - - -def js_to_url_function(converter): - """Get the JavaScript converter function from a rule.""" - if hasattr(converter, 'js_to_url_function'): - data = converter.js_to_url_function() - else: - for cls in getmro(type(converter)): - if cls in js_to_url_functions: - data = js_to_url_functions[cls](converter) - break - else: - return 'encodeURIComponent' - return '(function(value) { %s })' % data - - -def NumberConverter_js_to_url(conv): - if conv.fixed_digits: - return u'''\ -var result = value.toString(); -while (result.length < %s) - result = '0' + result; -return result;''' % conv.fixed_digits - return u'return value.toString();' - - -js_to_url_functions = { - NumberConverter: NumberConverter_js_to_url -} diff --git a/pype/vendor/werkzeug/contrib/limiter.py b/pype/vendor/werkzeug/contrib/limiter.py deleted file mode 100644 index 6bc9d39786..0000000000 --- a/pype/vendor/werkzeug/contrib/limiter.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.contrib.limiter - ~~~~~~~~~~~~~~~~~~~~~~~~ - - A middleware that limits incoming data. This works around problems with - Trac_ or Django_ because those directly stream into the memory. - - .. _Trac: http://trac.edgewall.org/ - .. _Django: http://www.djangoproject.com/ - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -from warnings import warn - -from werkzeug.wsgi import LimitedStream - - -class StreamLimitMiddleware(object): - - """Limits the input stream to a given number of bytes. This is useful if - you have a WSGI application that reads form data into memory (django for - example) and you don't want users to harm the server by uploading tons of - data. - - Default is 10MB - - .. versionchanged:: 0.9 - Deprecated middleware. - """ - - def __init__(self, app, maximum_size=1024 * 1024 * 10): - warn(DeprecationWarning('This middleware is deprecated')) - self.app = app - self.maximum_size = maximum_size - - def __call__(self, environ, start_response): - limit = min(self.maximum_size, int(environ.get('CONTENT_LENGTH') or 0)) - environ['wsgi.input'] = LimitedStream(environ['wsgi.input'], limit) - return self.app(environ, start_response) diff --git a/pype/vendor/werkzeug/contrib/lint.py b/pype/vendor/werkzeug/contrib/lint.py deleted file mode 100644 index 969c687412..0000000000 --- a/pype/vendor/werkzeug/contrib/lint.py +++ /dev/null @@ -1,343 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.contrib.lint - ~~~~~~~~~~~~~~~~~~~~~ - - .. versionadded:: 0.5 - - This module provides a middleware that performs sanity checks of the WSGI - application. It checks that :pep:`333` is properly implemented and warns - on some common HTTP errors such as non-empty responses for 304 status - codes. - - This module provides a middleware, the :class:`LintMiddleware`. Wrap your - application with it and it will warn about common problems with WSGI and - HTTP while your application is running. - - It's strongly recommended to use it during development. - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - -from warnings import warn - -from werkzeug.datastructures import Headers -from werkzeug.http import is_entity_header -from werkzeug.wsgi import FileWrapper -from werkzeug._compat import string_types - - -class WSGIWarning(Warning): - - """Warning class for WSGI warnings.""" - - -class HTTPWarning(Warning): - - """Warning class for HTTP warnings.""" - - -def check_string(context, obj, stacklevel=3): - if type(obj) is not str: - warn(WSGIWarning('%s requires bytestrings, got %s' % - (context, obj.__class__.__name__))) - - -class InputStream(object): - - def __init__(self, stream): - self._stream = stream - - def read(self, *args): - if len(args) == 0: - warn(WSGIWarning('wsgi does not guarantee an EOF marker on the ' - 'input stream, thus making calls to ' - 'wsgi.input.read() unsafe. Conforming servers ' - 'may never return from this call.'), - stacklevel=2) - elif len(args) != 1: - warn(WSGIWarning('too many parameters passed to wsgi.input.read()'), - stacklevel=2) - return self._stream.read(*args) - - def readline(self, *args): - if len(args) == 0: - warn(WSGIWarning('Calls to wsgi.input.readline() without arguments' - ' are unsafe. Use wsgi.input.read() instead.'), - stacklevel=2) - elif len(args) == 1: - warn(WSGIWarning('wsgi.input.readline() was called with a size hint. ' - 'WSGI does not support this, although it\'s available ' - 'on all major servers.'), - stacklevel=2) - else: - raise TypeError('too many arguments passed to wsgi.input.readline()') - return self._stream.readline(*args) - - def __iter__(self): - try: - return iter(self._stream) - except TypeError: - warn(WSGIWarning('wsgi.input is not iterable.'), stacklevel=2) - return iter(()) - - def close(self): - warn(WSGIWarning('application closed the input stream!'), - stacklevel=2) - self._stream.close() - - -class ErrorStream(object): - - def __init__(self, stream): - self._stream = stream - - def write(self, s): - check_string('wsgi.error.write()', s) - self._stream.write(s) - - def flush(self): - self._stream.flush() - - def writelines(self, seq): - for line in seq: - self.write(seq) - - def close(self): - warn(WSGIWarning('application closed the error stream!'), - stacklevel=2) - self._stream.close() - - -class GuardedWrite(object): - - def __init__(self, write, chunks): - self._write = write - self._chunks = chunks - - def __call__(self, s): - check_string('write()', s) - self._write.write(s) - self._chunks.append(len(s)) - - -class GuardedIterator(object): - - def __init__(self, iterator, headers_set, chunks): - self._iterator = iterator - self._next = iter(iterator).next - self.closed = False - self.headers_set = headers_set - self.chunks = chunks - - def __iter__(self): - return self - - def next(self): - if self.closed: - warn(WSGIWarning('iterated over closed app_iter'), - stacklevel=2) - rv = self._next() - if not self.headers_set: - warn(WSGIWarning('Application returned before it ' - 'started the response'), stacklevel=2) - check_string('application iterator items', rv) - self.chunks.append(len(rv)) - return rv - - def close(self): - self.closed = True - if hasattr(self._iterator, 'close'): - self._iterator.close() - - if self.headers_set: - status_code, headers = self.headers_set - bytes_sent = sum(self.chunks) - content_length = headers.get('content-length', type=int) - - if status_code == 304: - for key, value in headers: - key = key.lower() - if key not in ('expires', 'content-location') and \ - is_entity_header(key): - warn(HTTPWarning('entity header %r found in 304 ' - 'response' % key)) - if bytes_sent: - warn(HTTPWarning('304 responses must not have a body')) - elif 100 <= status_code < 200 or status_code == 204: - if content_length != 0: - warn(HTTPWarning('%r responses must have an empty ' - 'content length' % status_code)) - if bytes_sent: - warn(HTTPWarning('%r responses must not have a body' % - status_code)) - elif content_length is not None and content_length != bytes_sent: - warn(WSGIWarning('Content-Length and the number of bytes ' - 'sent to the client do not match.')) - - def __del__(self): - if not self.closed: - try: - warn(WSGIWarning('Iterator was garbage collected before ' - 'it was closed.')) - except Exception: - pass - - -class LintMiddleware(object): - - """This middleware wraps an application and warns on common errors. - Among other thing it currently checks for the following problems: - - - invalid status codes - - non-bytestrings sent to the WSGI server - - strings returned from the WSGI application - - non-empty conditional responses - - unquoted etags - - relative URLs in the Location header - - unsafe calls to wsgi.input - - unclosed iterators - - Detected errors are emitted using the standard Python :mod:`warnings` - system and usually end up on :data:`stderr`. - - :: - - from werkzeug.contrib.lint import LintMiddleware - app = LintMiddleware(app) - - :param app: the application to wrap - """ - - def __init__(self, app): - self.app = app - - def check_environ(self, environ): - if type(environ) is not dict: - warn(WSGIWarning('WSGI environment is not a standard python dict.'), - stacklevel=4) - for key in ('REQUEST_METHOD', 'SERVER_NAME', 'SERVER_PORT', - 'wsgi.version', 'wsgi.input', 'wsgi.errors', - 'wsgi.multithread', 'wsgi.multiprocess', - 'wsgi.run_once'): - if key not in environ: - warn(WSGIWarning('required environment key %r not found' - % key), stacklevel=3) - if environ['wsgi.version'] != (1, 0): - warn(WSGIWarning('environ is not a WSGI 1.0 environ'), - stacklevel=3) - - script_name = environ.get('SCRIPT_NAME', '') - if script_name and script_name[:1] != '/': - warn(WSGIWarning('SCRIPT_NAME does not start with a slash: %r' - % script_name), stacklevel=3) - path_info = environ.get('PATH_INFO', '') - if path_info[:1] != '/': - warn(WSGIWarning('PATH_INFO does not start with a slash: %r' - % path_info), stacklevel=3) - - def check_start_response(self, status, headers, exc_info): - check_string('status', status) - status_code = status.split(None, 1)[0] - if len(status_code) != 3 or not status_code.isdigit(): - warn(WSGIWarning('Status code must be three digits'), stacklevel=3) - if len(status) < 4 or status[3] != ' ': - warn(WSGIWarning('Invalid value for status %r. Valid ' - 'status strings are three digits, a space ' - 'and a status explanation'), stacklevel=3) - status_code = int(status_code) - if status_code < 100: - warn(WSGIWarning('status code < 100 detected'), stacklevel=3) - - if type(headers) is not list: - warn(WSGIWarning('header list is not a list'), stacklevel=3) - for item in headers: - if type(item) is not tuple or len(item) != 2: - warn(WSGIWarning('Headers must tuple 2-item tuples'), - stacklevel=3) - name, value = item - if type(name) is not str or type(value) is not str: - warn(WSGIWarning('header items must be strings'), - stacklevel=3) - if name.lower() == 'status': - warn(WSGIWarning('The status header is not supported due to ' - 'conflicts with the CGI spec.'), - stacklevel=3) - - if exc_info is not None and not isinstance(exc_info, tuple): - warn(WSGIWarning('invalid value for exc_info'), stacklevel=3) - - headers = Headers(headers) - self.check_headers(headers) - - return status_code, headers - - def check_headers(self, headers): - etag = headers.get('etag') - if etag is not None: - if etag.startswith(('W/', 'w/')): - if etag.startswith('w/'): - warn(HTTPWarning('weak etag indicator should be upcase.'), - stacklevel=4) - etag = etag[2:] - if not (etag[:1] == etag[-1:] == '"'): - warn(HTTPWarning('unquoted etag emitted.'), stacklevel=4) - - location = headers.get('location') - if location is not None: - if not urlparse(location).netloc: - warn(HTTPWarning('absolute URLs required for location header'), - stacklevel=4) - - def check_iterator(self, app_iter): - if isinstance(app_iter, string_types): - warn(WSGIWarning('application returned string. Response will ' - 'send character for character to the client ' - 'which will kill the performance. Return a ' - 'list or iterable instead.'), stacklevel=3) - - def __call__(self, *args, **kwargs): - if len(args) != 2: - warn(WSGIWarning('Two arguments to WSGI app required'), stacklevel=2) - if kwargs: - warn(WSGIWarning('No keyword arguments to WSGI app allowed'), - stacklevel=2) - environ, start_response = args - - self.check_environ(environ) - environ['wsgi.input'] = InputStream(environ['wsgi.input']) - environ['wsgi.errors'] = ErrorStream(environ['wsgi.errors']) - - # hook our own file wrapper in so that applications will always - # iterate to the end and we can check the content length - environ['wsgi.file_wrapper'] = FileWrapper - - headers_set = [] - chunks = [] - - def checking_start_response(*args, **kwargs): - if len(args) not in (2, 3): - warn(WSGIWarning('Invalid number of arguments: %s, expected ' - '2 or 3' % len(args), stacklevel=2)) - if kwargs: - warn(WSGIWarning('no keyword arguments allowed.')) - - status, headers = args[:2] - if len(args) == 3: - exc_info = args[2] - else: - exc_info = None - - headers_set[:] = self.check_start_response(status, headers, - exc_info) - return GuardedWrite(start_response(status, headers, exc_info), - chunks) - - app_iter = self.app(environ, checking_start_response) - self.check_iterator(app_iter) - return GuardedIterator(app_iter, headers_set, chunks) diff --git a/pype/vendor/werkzeug/contrib/profiler.py b/pype/vendor/werkzeug/contrib/profiler.py deleted file mode 100644 index be860afbcd..0000000000 --- a/pype/vendor/werkzeug/contrib/profiler.py +++ /dev/null @@ -1,147 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.contrib.profiler - ~~~~~~~~~~~~~~~~~~~~~~~~~ - - This module provides a simple WSGI profiler middleware for finding - bottlenecks in web application. It uses the :mod:`profile` or - :mod:`cProfile` module to do the profiling and writes the stats to the - stream provided (defaults to stderr). - - Example usage:: - - from werkzeug.contrib.profiler import ProfilerMiddleware - app = ProfilerMiddleware(app) - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -import sys -import time -import os.path -try: - try: - from cProfile import Profile - except ImportError: - from profile import Profile - from pstats import Stats - available = True -except ImportError: - available = False - - -class MergeStream(object): - - """An object that redirects `write` calls to multiple streams. - Use this to log to both `sys.stdout` and a file:: - - f = open('profiler.log', 'w') - stream = MergeStream(sys.stdout, f) - profiler = ProfilerMiddleware(app, stream) - """ - - def __init__(self, *streams): - if not streams: - raise TypeError('at least one stream must be given') - self.streams = streams - - def write(self, data): - for stream in self.streams: - stream.write(data) - - -class ProfilerMiddleware(object): - - """Simple profiler middleware. Wraps a WSGI application and profiles - a request. This intentionally buffers the response so that timings are - more exact. - - By giving the `profile_dir` argument, pstat.Stats files are saved to that - directory, one file per request. Without it, a summary is printed to - `stream` instead. - - For the exact meaning of `sort_by` and `restrictions` consult the - :mod:`profile` documentation. - - .. versionadded:: 0.9 - Added support for `restrictions` and `profile_dir`. - - :param app: the WSGI application to profile. - :param stream: the stream for the profiled stats. defaults to stderr. - :param sort_by: a tuple of columns to sort the result by. - :param restrictions: a tuple of profiling strictions, not used if dumping - to `profile_dir`. - :param profile_dir: directory name to save pstat files - """ - - def __init__(self, app, stream=None, - sort_by=('time', 'calls'), restrictions=(), profile_dir=None): - if not available: - raise RuntimeError('the profiler is not available because ' - 'profile or pstat is not installed.') - self._app = app - self._stream = stream or sys.stdout - self._sort_by = sort_by - self._restrictions = restrictions - self._profile_dir = profile_dir - - def __call__(self, environ, start_response): - response_body = [] - - def catching_start_response(status, headers, exc_info=None): - start_response(status, headers, exc_info) - return response_body.append - - def runapp(): - appiter = self._app(environ, catching_start_response) - response_body.extend(appiter) - if hasattr(appiter, 'close'): - appiter.close() - - p = Profile() - start = time.time() - p.runcall(runapp) - body = b''.join(response_body) - elapsed = time.time() - start - - if self._profile_dir is not None: - prof_filename = os.path.join(self._profile_dir, - '%s.%s.%06dms.%d.prof' % ( - environ['REQUEST_METHOD'], - environ.get('PATH_INFO').strip( - '/').replace('/', '.') or 'root', - elapsed * 1000.0, - time.time() - )) - p.dump_stats(prof_filename) - - else: - stats = Stats(p, stream=self._stream) - stats.sort_stats(*self._sort_by) - - self._stream.write('-' * 80) - self._stream.write('\nPATH: %r\n' % environ.get('PATH_INFO')) - stats.print_stats(*self._restrictions) - self._stream.write('-' * 80 + '\n\n') - - return [body] - - -def make_action(app_factory, hostname='localhost', port=5000, - threaded=False, processes=1, stream=None, - sort_by=('time', 'calls'), restrictions=()): - """Return a new callback for :mod:`werkzeug.script` that starts a local - server with the profiler enabled. - - :: - - from werkzeug.contrib import profiler - action_profile = profiler.make_action(make_app) - """ - def action(hostname=('h', hostname), port=('p', port), - threaded=threaded, processes=processes): - """Start a new development server.""" - from werkzeug.serving import run_simple - app = ProfilerMiddleware(app_factory(), stream, sort_by, restrictions) - run_simple(hostname, port, app, False, None, threaded, processes) - return action diff --git a/pype/vendor/werkzeug/contrib/securecookie.py b/pype/vendor/werkzeug/contrib/securecookie.py deleted file mode 100644 index 5927e2b0b2..0000000000 --- a/pype/vendor/werkzeug/contrib/securecookie.py +++ /dev/null @@ -1,323 +0,0 @@ -# -*- coding: utf-8 -*- -r""" - werkzeug.contrib.securecookie - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - This module implements a cookie that is not alterable from the client - because it adds a checksum the server checks for. You can use it as - session replacement if all you have is a user id or something to mark - a logged in user. - - Keep in mind that the data is still readable from the client as a - normal cookie is. However you don't have to store and flush the - sessions you have at the server. - - Example usage: - - >>> from werkzeug.contrib.securecookie import SecureCookie - >>> x = SecureCookie({"foo": 42, "baz": (1, 2, 3)}, "deadbeef") - - Dumping into a string so that one can store it in a cookie: - - >>> value = x.serialize() - - Loading from that string again: - - >>> x = SecureCookie.unserialize(value, "deadbeef") - >>> x["baz"] - (1, 2, 3) - - If someone modifies the cookie and the checksum is wrong the unserialize - method will fail silently and return a new empty `SecureCookie` object. - - Keep in mind that the values will be visible in the cookie so do not - store data in a cookie you don't want the user to see. - - Application Integration - ======================= - - If you are using the werkzeug request objects you could integrate the - secure cookie into your application like this:: - - from werkzeug.utils import cached_property - from werkzeug.wrappers import BaseRequest - from werkzeug.contrib.securecookie import SecureCookie - - # don't use this key but a different one; you could just use - # os.urandom(20) to get something random - SECRET_KEY = '\xfa\xdd\xb8z\xae\xe0}4\x8b\xea' - - class Request(BaseRequest): - - @cached_property - def client_session(self): - data = self.cookies.get('session_data') - if not data: - return SecureCookie(secret_key=SECRET_KEY) - return SecureCookie.unserialize(data, SECRET_KEY) - - def application(environ, start_response): - request = Request(environ) - - # get a response object here - response = ... - - if request.client_session.should_save: - session_data = request.client_session.serialize() - response.set_cookie('session_data', session_data, - httponly=True) - return response(environ, start_response) - - A less verbose integration can be achieved by using shorthand methods:: - - class Request(BaseRequest): - - @cached_property - def client_session(self): - return SecureCookie.load_cookie(self, secret_key=COOKIE_SECRET) - - def application(environ, start_response): - request = Request(environ) - - # get a response object here - response = ... - - request.client_session.save_cookie(response) - return response(environ, start_response) - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -import pickle -import base64 -from hmac import new as hmac -from time import time -from hashlib import sha1 as _default_hash - -from werkzeug._compat import iteritems, text_type, to_bytes -from werkzeug.urls import url_quote_plus, url_unquote_plus -from werkzeug._internal import _date_to_unix -from werkzeug.contrib.sessions import ModificationTrackingDict -from werkzeug.security import safe_str_cmp -from werkzeug._compat import to_native - - -class UnquoteError(Exception): - - """Internal exception used to signal failures on quoting.""" - - -class SecureCookie(ModificationTrackingDict): - - """Represents a secure cookie. You can subclass this class and provide - an alternative mac method. The import thing is that the mac method - is a function with a similar interface to the hashlib. Required - methods are update() and digest(). - - Example usage: - - >>> x = SecureCookie({"foo": 42, "baz": (1, 2, 3)}, "deadbeef") - >>> x["foo"] - 42 - >>> x["baz"] - (1, 2, 3) - >>> x["blafasel"] = 23 - >>> x.should_save - True - - :param data: the initial data. Either a dict, list of tuples or `None`. - :param secret_key: the secret key. If not set `None` or not specified - it has to be set before :meth:`serialize` is called. - :param new: The initial value of the `new` flag. - """ - - #: The hash method to use. This has to be a module with a new function - #: or a function that creates a hashlib object. Such as `hashlib.md5` - #: Subclasses can override this attribute. The default hash is sha1. - #: Make sure to wrap this in staticmethod() if you store an arbitrary - #: function there such as hashlib.sha1 which might be implemented - #: as a function. - hash_method = staticmethod(_default_hash) - - #: the module used for serialization. Unless overriden by subclasses - #: the standard pickle module is used. - serialization_method = pickle - - #: if the contents should be base64 quoted. This can be disabled if the - #: serialization process returns cookie safe strings only. - quote_base64 = True - - def __init__(self, data=None, secret_key=None, new=True): - ModificationTrackingDict.__init__(self, data or ()) - # explicitly convert it into a bytestring because python 2.6 - # no longer performs an implicit string conversion on hmac - if secret_key is not None: - secret_key = to_bytes(secret_key, 'utf-8') - self.secret_key = secret_key - self.new = new - - def __repr__(self): - return '<%s %s%s>' % ( - self.__class__.__name__, - dict.__repr__(self), - self.should_save and '*' or '' - ) - - @property - def should_save(self): - """True if the session should be saved. By default this is only true - for :attr:`modified` cookies, not :attr:`new`. - """ - return self.modified - - @classmethod - def quote(cls, value): - """Quote the value for the cookie. This can be any object supported - by :attr:`serialization_method`. - - :param value: the value to quote. - """ - if cls.serialization_method is not None: - value = cls.serialization_method.dumps(value) - if cls.quote_base64: - value = b''.join(base64.b64encode(value).splitlines()).strip() - return value - - @classmethod - def unquote(cls, value): - """Unquote the value for the cookie. If unquoting does not work a - :exc:`UnquoteError` is raised. - - :param value: the value to unquote. - """ - try: - if cls.quote_base64: - value = base64.b64decode(value) - if cls.serialization_method is not None: - value = cls.serialization_method.loads(value) - return value - except Exception: - # unfortunately pickle and other serialization modules can - # cause pretty every error here. if we get one we catch it - # and convert it into an UnquoteError - raise UnquoteError() - - def serialize(self, expires=None): - """Serialize the secure cookie into a string. - - If expires is provided, the session will be automatically invalidated - after expiration when you unseralize it. This provides better - protection against session cookie theft. - - :param expires: an optional expiration date for the cookie (a - :class:`datetime.datetime` object) - """ - if self.secret_key is None: - raise RuntimeError('no secret key defined') - if expires: - self['_expires'] = _date_to_unix(expires) - result = [] - mac = hmac(self.secret_key, None, self.hash_method) - for key, value in sorted(self.items()): - result.append(('%s=%s' % ( - url_quote_plus(key), - self.quote(value).decode('ascii') - )).encode('ascii')) - mac.update(b'|' + result[-1]) - return b'?'.join([ - base64.b64encode(mac.digest()).strip(), - b'&'.join(result) - ]) - - @classmethod - def unserialize(cls, string, secret_key): - """Load the secure cookie from a serialized string. - - :param string: the cookie value to unserialize. - :param secret_key: the secret key used to serialize the cookie. - :return: a new :class:`SecureCookie`. - """ - if isinstance(string, text_type): - string = string.encode('utf-8', 'replace') - if isinstance(secret_key, text_type): - secret_key = secret_key.encode('utf-8', 'replace') - try: - base64_hash, data = string.split(b'?', 1) - except (ValueError, IndexError): - items = () - else: - items = {} - mac = hmac(secret_key, None, cls.hash_method) - for item in data.split(b'&'): - mac.update(b'|' + item) - if b'=' not in item: - items = None - break - key, value = item.split(b'=', 1) - # try to make the key a string - key = url_unquote_plus(key.decode('ascii')) - try: - key = to_native(key) - except UnicodeError: - pass - items[key] = value - - # no parsing error and the mac looks okay, we can now - # sercurely unpickle our cookie. - try: - client_hash = base64.b64decode(base64_hash) - except TypeError: - items = client_hash = None - if items is not None and safe_str_cmp(client_hash, mac.digest()): - try: - for key, value in iteritems(items): - items[key] = cls.unquote(value) - except UnquoteError: - items = () - else: - if '_expires' in items: - if time() > items['_expires']: - items = () - else: - del items['_expires'] - else: - items = () - return cls(items, secret_key, False) - - @classmethod - def load_cookie(cls, request, key='session', secret_key=None): - """Loads a :class:`SecureCookie` from a cookie in request. If the - cookie is not set, a new :class:`SecureCookie` instanced is - returned. - - :param request: a request object that has a `cookies` attribute - which is a dict of all cookie values. - :param key: the name of the cookie. - :param secret_key: the secret key used to unquote the cookie. - Always provide the value even though it has - no default! - """ - data = request.cookies.get(key) - if not data: - return cls(secret_key=secret_key) - return cls.unserialize(data, secret_key) - - def save_cookie(self, response, key='session', expires=None, - session_expires=None, max_age=None, path='/', domain=None, - secure=None, httponly=False, force=False): - """Saves the SecureCookie in a cookie on response object. All - parameters that are not described here are forwarded directly - to :meth:`~BaseResponse.set_cookie`. - - :param response: a response object that has a - :meth:`~BaseResponse.set_cookie` method. - :param key: the name of the cookie. - :param session_expires: the expiration date of the secure cookie - stored information. If this is not provided - the cookie `expires` date is used instead. - """ - if force or self.should_save: - data = self.serialize(session_expires or expires) - response.set_cookie(key, data, expires=expires, max_age=max_age, - path=path, domain=domain, secure=secure, - httponly=httponly) diff --git a/pype/vendor/werkzeug/contrib/sessions.py b/pype/vendor/werkzeug/contrib/sessions.py deleted file mode 100644 index a9c3aa7caf..0000000000 --- a/pype/vendor/werkzeug/contrib/sessions.py +++ /dev/null @@ -1,352 +0,0 @@ -# -*- coding: utf-8 -*- -r""" - werkzeug.contrib.sessions - ~~~~~~~~~~~~~~~~~~~~~~~~~ - - This module contains some helper classes that help one to add session - support to a python WSGI application. For full client-side session - storage see :mod:`~werkzeug.contrib.securecookie` which implements a - secure, client-side session storage. - - - Application Integration - ======================= - - :: - - from werkzeug.contrib.sessions import SessionMiddleware, \ - FilesystemSessionStore - - app = SessionMiddleware(app, FilesystemSessionStore()) - - The current session will then appear in the WSGI environment as - `werkzeug.session`. However it's recommended to not use the middleware - but the stores directly in the application. However for very simple - scripts a middleware for sessions could be sufficient. - - This module does not implement methods or ways to check if a session is - expired. That should be done by a cronjob and storage specific. For - example to prune unused filesystem sessions one could check the modified - time of the files. If sessions are stored in the database the new() - method should add an expiration timestamp for the session. - - For better flexibility it's recommended to not use the middleware but the - store and session object directly in the application dispatching:: - - session_store = FilesystemSessionStore() - - def application(environ, start_response): - request = Request(environ) - sid = request.cookies.get('cookie_name') - if sid is None: - request.session = session_store.new() - else: - request.session = session_store.get(sid) - response = get_the_response_object(request) - if request.session.should_save: - session_store.save(request.session) - response.set_cookie('cookie_name', request.session.sid) - return response(environ, start_response) - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -import re -import os -import tempfile -from os import path -from time import time -from random import random -from hashlib import sha1 -from pickle import dump, load, HIGHEST_PROTOCOL - -from werkzeug.datastructures import CallbackDict -from werkzeug.utils import dump_cookie, parse_cookie -from werkzeug.wsgi import ClosingIterator -from werkzeug.posixemulation import rename -from werkzeug._compat import PY2, text_type -from werkzeug.filesystem import get_filesystem_encoding - - -_sha1_re = re.compile(r'^[a-f0-9]{40}$') - - -def _urandom(): - if hasattr(os, 'urandom'): - return os.urandom(30) - return text_type(random()).encode('ascii') - - -def generate_key(salt=None): - if salt is None: - salt = repr(salt).encode('ascii') - return sha1(b''.join([ - salt, - str(time()).encode('ascii'), - _urandom() - ])).hexdigest() - - -class ModificationTrackingDict(CallbackDict): - __slots__ = ('modified',) - - def __init__(self, *args, **kwargs): - def on_update(self): - self.modified = True - self.modified = False - CallbackDict.__init__(self, on_update=on_update) - dict.update(self, *args, **kwargs) - - def copy(self): - """Create a flat copy of the dict.""" - missing = object() - result = object.__new__(self.__class__) - for name in self.__slots__: - val = getattr(self, name, missing) - if val is not missing: - setattr(result, name, val) - return result - - def __copy__(self): - return self.copy() - - -class Session(ModificationTrackingDict): - - """Subclass of a dict that keeps track of direct object changes. Changes - in mutable structures are not tracked, for those you have to set - `modified` to `True` by hand. - """ - __slots__ = ModificationTrackingDict.__slots__ + ('sid', 'new') - - def __init__(self, data, sid, new=False): - ModificationTrackingDict.__init__(self, data) - self.sid = sid - self.new = new - - def __repr__(self): - return '<%s %s%s>' % ( - self.__class__.__name__, - dict.__repr__(self), - self.should_save and '*' or '' - ) - - @property - def should_save(self): - """True if the session should be saved. - - .. versionchanged:: 0.6 - By default the session is now only saved if the session is - modified, not if it is new like it was before. - """ - return self.modified - - -class SessionStore(object): - - """Baseclass for all session stores. The Werkzeug contrib module does not - implement any useful stores besides the filesystem store, application - developers are encouraged to create their own stores. - - :param session_class: The session class to use. Defaults to - :class:`Session`. - """ - - def __init__(self, session_class=None): - if session_class is None: - session_class = Session - self.session_class = session_class - - def is_valid_key(self, key): - """Check if a key has the correct format.""" - return _sha1_re.match(key) is not None - - def generate_key(self, salt=None): - """Simple function that generates a new session key.""" - return generate_key(salt) - - def new(self): - """Generate a new session.""" - return self.session_class({}, self.generate_key(), True) - - def save(self, session): - """Save a session.""" - - def save_if_modified(self, session): - """Save if a session class wants an update.""" - if session.should_save: - self.save(session) - - def delete(self, session): - """Delete a session.""" - - def get(self, sid): - """Get a session for this sid or a new session object. This method - has to check if the session key is valid and create a new session if - that wasn't the case. - """ - return self.session_class({}, sid, True) - - -#: used for temporary files by the filesystem session store -_fs_transaction_suffix = '.__wz_sess' - - -class FilesystemSessionStore(SessionStore): - - """Simple example session store that saves sessions on the filesystem. - This store works best on POSIX systems and Windows Vista / Windows - Server 2008 and newer. - - .. versionchanged:: 0.6 - `renew_missing` was added. Previously this was considered `True`, - now the default changed to `False` and it can be explicitly - deactivated. - - :param path: the path to the folder used for storing the sessions. - If not provided the default temporary directory is used. - :param filename_template: a string template used to give the session - a filename. ``%s`` is replaced with the - session id. - :param session_class: The session class to use. Defaults to - :class:`Session`. - :param renew_missing: set to `True` if you want the store to - give the user a new sid if the session was - not yet saved. - """ - - def __init__(self, path=None, filename_template='werkzeug_%s.sess', - session_class=None, renew_missing=False, mode=0o644): - SessionStore.__init__(self, session_class) - if path is None: - path = tempfile.gettempdir() - self.path = path - if isinstance(filename_template, text_type) and PY2: - filename_template = filename_template.encode( - get_filesystem_encoding()) - assert not filename_template.endswith(_fs_transaction_suffix), \ - 'filename templates may not end with %s' % _fs_transaction_suffix - self.filename_template = filename_template - self.renew_missing = renew_missing - self.mode = mode - - def get_session_filename(self, sid): - # out of the box, this should be a strict ASCII subset but - # you might reconfigure the session object to have a more - # arbitrary string. - if isinstance(sid, text_type) and PY2: - sid = sid.encode(get_filesystem_encoding()) - return path.join(self.path, self.filename_template % sid) - - def save(self, session): - fn = self.get_session_filename(session.sid) - fd, tmp = tempfile.mkstemp(suffix=_fs_transaction_suffix, - dir=self.path) - f = os.fdopen(fd, 'wb') - try: - dump(dict(session), f, HIGHEST_PROTOCOL) - finally: - f.close() - try: - rename(tmp, fn) - os.chmod(fn, self.mode) - except (IOError, OSError): - pass - - def delete(self, session): - fn = self.get_session_filename(session.sid) - try: - os.unlink(fn) - except OSError: - pass - - def get(self, sid): - if not self.is_valid_key(sid): - return self.new() - try: - f = open(self.get_session_filename(sid), 'rb') - except IOError: - if self.renew_missing: - return self.new() - data = {} - else: - try: - try: - data = load(f) - except Exception: - data = {} - finally: - f.close() - return self.session_class(data, sid, False) - - def list(self): - """Lists all sessions in the store. - - .. versionadded:: 0.6 - """ - before, after = self.filename_template.split('%s', 1) - filename_re = re.compile(r'%s(.{5,})%s$' % (re.escape(before), - re.escape(after))) - result = [] - for filename in os.listdir(self.path): - #: this is a session that is still being saved. - if filename.endswith(_fs_transaction_suffix): - continue - match = filename_re.match(filename) - if match is not None: - result.append(match.group(1)) - return result - - -class SessionMiddleware(object): - - """A simple middleware that puts the session object of a store provided - into the WSGI environ. It automatically sets cookies and restores - sessions. - - However a middleware is not the preferred solution because it won't be as - fast as sessions managed by the application itself and will put a key into - the WSGI environment only relevant for the application which is against - the concept of WSGI. - - The cookie parameters are the same as for the :func:`~dump_cookie` - function just prefixed with ``cookie_``. Additionally `max_age` is - called `cookie_age` and not `cookie_max_age` because of backwards - compatibility. - """ - - def __init__(self, app, store, cookie_name='session_id', - cookie_age=None, cookie_expires=None, cookie_path='/', - cookie_domain=None, cookie_secure=None, - cookie_httponly=False, environ_key='werkzeug.session'): - self.app = app - self.store = store - self.cookie_name = cookie_name - self.cookie_age = cookie_age - self.cookie_expires = cookie_expires - self.cookie_path = cookie_path - self.cookie_domain = cookie_domain - self.cookie_secure = cookie_secure - self.cookie_httponly = cookie_httponly - self.environ_key = environ_key - - def __call__(self, environ, start_response): - cookie = parse_cookie(environ.get('HTTP_COOKIE', '')) - sid = cookie.get(self.cookie_name, None) - if sid is None: - session = self.store.new() - else: - session = self.store.get(sid) - environ[self.environ_key] = session - - def injecting_start_response(status, headers, exc_info=None): - if session.should_save: - self.store.save(session) - headers.append(('Set-Cookie', dump_cookie(self.cookie_name, - session.sid, self.cookie_age, - self.cookie_expires, self.cookie_path, - self.cookie_domain, self.cookie_secure, - self.cookie_httponly))) - return start_response(status, headers, exc_info) - return ClosingIterator(self.app(environ, injecting_start_response), - lambda: self.store.save_if_modified(session)) diff --git a/pype/vendor/werkzeug/contrib/testtools.py b/pype/vendor/werkzeug/contrib/testtools.py deleted file mode 100644 index 3dab6dbde1..0000000000 --- a/pype/vendor/werkzeug/contrib/testtools.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.contrib.testtools - ~~~~~~~~~~~~~~~~~~~~~~~~~~ - - This module implements extended wrappers for simplified testing. - - `TestResponse` - A response wrapper which adds various cached attributes for - simplified assertions on various content types. - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -from werkzeug.utils import cached_property, import_string -from werkzeug.wrappers import Response - -from warnings import warn -warn(DeprecationWarning('werkzeug.contrib.testtools is deprecated and ' - 'will be removed with Werkzeug 1.0')) - - -class ContentAccessors(object): - - """ - A mixin class for response objects that provides a couple of useful - accessors for unittesting. - """ - - def xml(self): - """Get an etree if possible.""" - if 'xml' not in self.mimetype: - raise AttributeError( - 'Not a XML response (Content-Type: %s)' - % self.mimetype) - for module in ['xml.etree.ElementTree', 'ElementTree', - 'elementtree.ElementTree']: - etree = import_string(module, silent=True) - if etree is not None: - return etree.XML(self.body) - raise RuntimeError('You must have ElementTree installed ' - 'to use TestResponse.xml') - xml = cached_property(xml) - - def lxml(self): - """Get an lxml etree if possible.""" - if ('html' not in self.mimetype and 'xml' not in self.mimetype): - raise AttributeError('Not an HTML/XML response') - from lxml import etree - try: - from lxml.html import fromstring - except ImportError: - fromstring = etree.HTML - if self.mimetype == 'text/html': - return fromstring(self.data) - return etree.XML(self.data) - lxml = cached_property(lxml) - - def json(self): - """Get the result of simplejson.loads if possible.""" - if 'json' not in self.mimetype: - raise AttributeError('Not a JSON response') - try: - from simplejson import loads - except ImportError: - from json import loads - return loads(self.data) - json = cached_property(json) - - -class TestResponse(Response, ContentAccessors): - - """Pass this to `werkzeug.test.Client` for easier unittesting.""" diff --git a/pype/vendor/werkzeug/contrib/wrappers.py b/pype/vendor/werkzeug/contrib/wrappers.py deleted file mode 100644 index 6c86452915..0000000000 --- a/pype/vendor/werkzeug/contrib/wrappers.py +++ /dev/null @@ -1,284 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.contrib.wrappers - ~~~~~~~~~~~~~~~~~~~~~~~~~ - - Extra wrappers or mixins contributed by the community. These wrappers can - be mixed in into request objects to add extra functionality. - - Example:: - - from werkzeug.wrappers import Request as RequestBase - from werkzeug.contrib.wrappers import JSONRequestMixin - - class Request(RequestBase, JSONRequestMixin): - pass - - Afterwards this request object provides the extra functionality of the - :class:`JSONRequestMixin`. - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -import codecs -try: - from simplejson import loads -except ImportError: - from json import loads - -from werkzeug.exceptions import BadRequest -from werkzeug.utils import cached_property -from werkzeug.http import dump_options_header, parse_options_header -from werkzeug._compat import wsgi_decoding_dance - - -def is_known_charset(charset): - """Checks if the given charset is known to Python.""" - try: - codecs.lookup(charset) - except LookupError: - return False - return True - - -class JSONRequestMixin(object): - - """Add json method to a request object. This will parse the input data - through simplejson if possible. - - :exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type - is not json or if the data itself cannot be parsed as json. - """ - - @cached_property - def json(self): - """Get the result of simplejson.loads if possible.""" - if 'json' not in self.environ.get('CONTENT_TYPE', ''): - raise BadRequest('Not a JSON request') - try: - return loads(self.data.decode(self.charset, self.encoding_errors)) - except Exception: - raise BadRequest('Unable to read JSON request') - - -class ProtobufRequestMixin(object): - - """Add protobuf parsing method to a request object. This will parse the - input data through `protobuf`_ if possible. - - :exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type - is not protobuf or if the data itself cannot be parsed property. - - .. _protobuf: http://code.google.com/p/protobuf/ - """ - - #: by default the :class:`ProtobufRequestMixin` will raise a - #: :exc:`~werkzeug.exceptions.BadRequest` if the object is not - #: initialized. You can bypass that check by setting this - #: attribute to `False`. - protobuf_check_initialization = True - - def parse_protobuf(self, proto_type): - """Parse the data into an instance of proto_type.""" - if 'protobuf' not in self.environ.get('CONTENT_TYPE', ''): - raise BadRequest('Not a Protobuf request') - - obj = proto_type() - try: - obj.ParseFromString(self.data) - except Exception: - raise BadRequest("Unable to parse Protobuf request") - - # Fail if not all required fields are set - if self.protobuf_check_initialization and not obj.IsInitialized(): - raise BadRequest("Partial Protobuf request") - - return obj - - -class RoutingArgsRequestMixin(object): - - """This request mixin adds support for the wsgiorg routing args - `specification`_. - - .. _specification: https://wsgi.readthedocs.io/en/latest/specifications/routing_args.html - """ - - def _get_routing_args(self): - return self.environ.get('wsgiorg.routing_args', (()))[0] - - def _set_routing_args(self, value): - if self.shallow: - raise RuntimeError('A shallow request tried to modify the WSGI ' - 'environment. If you really want to do that, ' - 'set `shallow` to False.') - self.environ['wsgiorg.routing_args'] = (value, self.routing_vars) - - routing_args = property(_get_routing_args, _set_routing_args, doc=''' - The positional URL arguments as `tuple`.''') - del _get_routing_args, _set_routing_args - - def _get_routing_vars(self): - rv = self.environ.get('wsgiorg.routing_args') - if rv is not None: - return rv[1] - rv = {} - if not self.shallow: - self.routing_vars = rv - return rv - - def _set_routing_vars(self, value): - if self.shallow: - raise RuntimeError('A shallow request tried to modify the WSGI ' - 'environment. If you really want to do that, ' - 'set `shallow` to False.') - self.environ['wsgiorg.routing_args'] = (self.routing_args, value) - - routing_vars = property(_get_routing_vars, _set_routing_vars, doc=''' - The keyword URL arguments as `dict`.''') - del _get_routing_vars, _set_routing_vars - - -class ReverseSlashBehaviorRequestMixin(object): - - """This mixin reverses the trailing slash behavior of :attr:`script_root` - and :attr:`path`. This makes it possible to use :func:`~urlparse.urljoin` - directly on the paths. - - Because it changes the behavior or :class:`Request` this class has to be - mixed in *before* the actual request class:: - - class MyRequest(ReverseSlashBehaviorRequestMixin, Request): - pass - - This example shows the differences (for an application mounted on - `/application` and the request going to `/application/foo/bar`): - - +---------------+-------------------+---------------------+ - | | normal behavior | reverse behavior | - +===============+===================+=====================+ - | `script_root` | ``/application`` | ``/application/`` | - +---------------+-------------------+---------------------+ - | `path` | ``/foo/bar`` | ``foo/bar`` | - +---------------+-------------------+---------------------+ - """ - - @cached_property - def path(self): - """Requested path as unicode. This works a bit like the regular path - info in the WSGI environment but will not include a leading slash. - """ - path = wsgi_decoding_dance(self.environ.get('PATH_INFO') or '', - self.charset, self.encoding_errors) - return path.lstrip('/') - - @cached_property - def script_root(self): - """The root path of the script includling a trailing slash.""" - path = wsgi_decoding_dance(self.environ.get('SCRIPT_NAME') or '', - self.charset, self.encoding_errors) - return path.rstrip('/') + '/' - - -class DynamicCharsetRequestMixin(object): - - """"If this mixin is mixed into a request class it will provide - a dynamic `charset` attribute. This means that if the charset is - transmitted in the content type headers it's used from there. - - Because it changes the behavior or :class:`Request` this class has - to be mixed in *before* the actual request class:: - - class MyRequest(DynamicCharsetRequestMixin, Request): - pass - - By default the request object assumes that the URL charset is the - same as the data charset. If the charset varies on each request - based on the transmitted data it's not a good idea to let the URLs - change based on that. Most browsers assume either utf-8 or latin1 - for the URLs if they have troubles figuring out. It's strongly - recommended to set the URL charset to utf-8:: - - class MyRequest(DynamicCharsetRequestMixin, Request): - url_charset = 'utf-8' - - .. versionadded:: 0.6 - """ - - #: the default charset that is assumed if the content type header - #: is missing or does not contain a charset parameter. The default - #: is latin1 which is what HTTP specifies as default charset. - #: You may however want to set this to utf-8 to better support - #: browsers that do not transmit a charset for incoming data. - default_charset = 'latin1' - - def unknown_charset(self, charset): - """Called if a charset was provided but is not supported by - the Python codecs module. By default latin1 is assumed then - to not lose any information, you may override this method to - change the behavior. - - :param charset: the charset that was not found. - :return: the replacement charset. - """ - return 'latin1' - - @cached_property - def charset(self): - """The charset from the content type.""" - header = self.environ.get('CONTENT_TYPE') - if header: - ct, options = parse_options_header(header) - charset = options.get('charset') - if charset: - if is_known_charset(charset): - return charset - return self.unknown_charset(charset) - return self.default_charset - - -class DynamicCharsetResponseMixin(object): - - """If this mixin is mixed into a response class it will provide - a dynamic `charset` attribute. This means that if the charset is - looked up and stored in the `Content-Type` header and updates - itself automatically. This also means a small performance hit but - can be useful if you're working with different charsets on - responses. - - Because the charset attribute is no a property at class-level, the - default value is stored in `default_charset`. - - Because it changes the behavior or :class:`Response` this class has - to be mixed in *before* the actual response class:: - - class MyResponse(DynamicCharsetResponseMixin, Response): - pass - - .. versionadded:: 0.6 - """ - - #: the default charset. - default_charset = 'utf-8' - - def _get_charset(self): - header = self.headers.get('content-type') - if header: - charset = parse_options_header(header)[1].get('charset') - if charset: - return charset - return self.default_charset - - def _set_charset(self, charset): - header = self.headers.get('content-type') - ct, options = parse_options_header(header) - if not ct: - raise TypeError('Cannot set charset if Content-Type ' - 'header is missing.') - options['charset'] = charset - self.headers['Content-Type'] = dump_options_header(ct, options) - - charset = property(_get_charset, _set_charset, doc=""" - The charset for the response. It's stored inside the - Content-Type header as a parameter.""") - del _get_charset, _set_charset diff --git a/pype/vendor/werkzeug/datastructures.py b/pype/vendor/werkzeug/datastructures.py deleted file mode 100644 index 9624599da1..0000000000 --- a/pype/vendor/werkzeug/datastructures.py +++ /dev/null @@ -1,2762 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.datastructures - ~~~~~~~~~~~~~~~~~~~~~~~ - - This module provides mixins and classes with an immutable interface. - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -import re -import codecs -import mimetypes -from copy import deepcopy -from itertools import repeat -from collections import Container, Iterable, MutableSet - -from werkzeug._internal import _missing, _empty_stream -from werkzeug._compat import iterkeys, itervalues, iteritems, iterlists, \ - PY2, text_type, integer_types, string_types, make_literal_wrapper, \ - to_native -from werkzeug.filesystem import get_filesystem_encoding - - -_locale_delim_re = re.compile(r'[_-]') - - -def is_immutable(self): - raise TypeError('%r objects are immutable' % self.__class__.__name__) - - -def iter_multi_items(mapping): - """Iterates over the items of a mapping yielding keys and values - without dropping any from more complex structures. - """ - if isinstance(mapping, MultiDict): - for item in iteritems(mapping, multi=True): - yield item - elif isinstance(mapping, dict): - for key, value in iteritems(mapping): - if isinstance(value, (tuple, list)): - for value in value: - yield key, value - else: - yield key, value - else: - for item in mapping: - yield item - - -def native_itermethods(names): - if not PY2: - return lambda x: x - - def setviewmethod(cls, name): - viewmethod_name = 'view%s' % name - viewmethod = lambda self, *a, **kw: ViewItems(self, name, 'view_%s' % name, *a, **kw) - viewmethod.__doc__ = \ - '"""`%s()` object providing a view on %s"""' % (viewmethod_name, name) - setattr(cls, viewmethod_name, viewmethod) - - def setitermethod(cls, name): - itermethod = getattr(cls, name) - setattr(cls, 'iter%s' % name, itermethod) - listmethod = lambda self, *a, **kw: list(itermethod(self, *a, **kw)) - listmethod.__doc__ = \ - 'Like :py:meth:`iter%s`, but returns a list.' % name - setattr(cls, name, listmethod) - - def wrap(cls): - for name in names: - setitermethod(cls, name) - setviewmethod(cls, name) - return cls - return wrap - - -class ImmutableListMixin(object): - - """Makes a :class:`list` immutable. - - .. versionadded:: 0.5 - - :private: - """ - - _hash_cache = None - - def __hash__(self): - if self._hash_cache is not None: - return self._hash_cache - rv = self._hash_cache = hash(tuple(self)) - return rv - - def __reduce_ex__(self, protocol): - return type(self), (list(self),) - - def __delitem__(self, key): - is_immutable(self) - - def __iadd__(self, other): - is_immutable(self) - __imul__ = __iadd__ - - def __setitem__(self, key, value): - is_immutable(self) - - def append(self, item): - is_immutable(self) - remove = append - - def extend(self, iterable): - is_immutable(self) - - def insert(self, pos, value): - is_immutable(self) - - def pop(self, index=-1): - is_immutable(self) - - def reverse(self): - is_immutable(self) - - def sort(self, cmp=None, key=None, reverse=None): - is_immutable(self) - - -class ImmutableList(ImmutableListMixin, list): - - """An immutable :class:`list`. - - .. versionadded:: 0.5 - - :private: - """ - - def __repr__(self): - return '%s(%s)' % ( - self.__class__.__name__, - list.__repr__(self), - ) - - -class ImmutableDictMixin(object): - - """Makes a :class:`dict` immutable. - - .. versionadded:: 0.5 - - :private: - """ - _hash_cache = None - - @classmethod - def fromkeys(cls, keys, value=None): - instance = super(cls, cls).__new__(cls) - instance.__init__(zip(keys, repeat(value))) - return instance - - def __reduce_ex__(self, protocol): - return type(self), (dict(self),) - - def _iter_hashitems(self): - return iteritems(self) - - def __hash__(self): - if self._hash_cache is not None: - return self._hash_cache - rv = self._hash_cache = hash(frozenset(self._iter_hashitems())) - return rv - - def setdefault(self, key, default=None): - is_immutable(self) - - def update(self, *args, **kwargs): - is_immutable(self) - - def pop(self, key, default=None): - is_immutable(self) - - def popitem(self): - is_immutable(self) - - def __setitem__(self, key, value): - is_immutable(self) - - def __delitem__(self, key): - is_immutable(self) - - def clear(self): - is_immutable(self) - - -class ImmutableMultiDictMixin(ImmutableDictMixin): - - """Makes a :class:`MultiDict` immutable. - - .. versionadded:: 0.5 - - :private: - """ - - def __reduce_ex__(self, protocol): - return type(self), (list(iteritems(self, multi=True)),) - - def _iter_hashitems(self): - return iteritems(self, multi=True) - - def add(self, key, value): - is_immutable(self) - - def popitemlist(self): - is_immutable(self) - - def poplist(self, key): - is_immutable(self) - - def setlist(self, key, new_list): - is_immutable(self) - - def setlistdefault(self, key, default_list=None): - is_immutable(self) - - -class UpdateDictMixin(object): - - """Makes dicts call `self.on_update` on modifications. - - .. versionadded:: 0.5 - - :private: - """ - - on_update = None - - def calls_update(name): - def oncall(self, *args, **kw): - rv = getattr(super(UpdateDictMixin, self), name)(*args, **kw) - if self.on_update is not None: - self.on_update(self) - return rv - oncall.__name__ = name - return oncall - - def setdefault(self, key, default=None): - modified = key not in self - rv = super(UpdateDictMixin, self).setdefault(key, default) - if modified and self.on_update is not None: - self.on_update(self) - return rv - - def pop(self, key, default=_missing): - modified = key in self - if default is _missing: - rv = super(UpdateDictMixin, self).pop(key) - else: - rv = super(UpdateDictMixin, self).pop(key, default) - if modified and self.on_update is not None: - self.on_update(self) - return rv - - __setitem__ = calls_update('__setitem__') - __delitem__ = calls_update('__delitem__') - clear = calls_update('clear') - popitem = calls_update('popitem') - update = calls_update('update') - del calls_update - - -class TypeConversionDict(dict): - - """Works like a regular dict but the :meth:`get` method can perform - type conversions. :class:`MultiDict` and :class:`CombinedMultiDict` - are subclasses of this class and provide the same feature. - - .. versionadded:: 0.5 - """ - - def get(self, key, default=None, type=None): - """Return the default value if the requested data doesn't exist. - If `type` is provided and is a callable it should convert the value, - return it or raise a :exc:`ValueError` if that is not possible. In - this case the function will return the default as if the value was not - found: - - >>> d = TypeConversionDict(foo='42', bar='blub') - >>> d.get('foo', type=int) - 42 - >>> d.get('bar', -1, type=int) - -1 - - :param key: The key to be looked up. - :param default: The default value to be returned if the key can't - be looked up. If not further specified `None` is - returned. - :param type: A callable that is used to cast the value in the - :class:`MultiDict`. If a :exc:`ValueError` is raised - by this callable the default value is returned. - """ - try: - rv = self[key] - except KeyError: - return default - if type is not None: - try: - rv = type(rv) - except ValueError: - rv = default - return rv - - -class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict): - - """Works like a :class:`TypeConversionDict` but does not support - modifications. - - .. versionadded:: 0.5 - """ - - def copy(self): - """Return a shallow mutable copy of this object. Keep in mind that - the standard library's :func:`copy` function is a no-op for this class - like for any other python immutable type (eg: :class:`tuple`). - """ - return TypeConversionDict(self) - - def __copy__(self): - return self - - -class ViewItems(object): - - def __init__(self, multi_dict, method, repr_name, *a, **kw): - self.__multi_dict = multi_dict - self.__method = method - self.__repr_name = repr_name - self.__a = a - self.__kw = kw - - def __get_items(self): - return getattr(self.__multi_dict, self.__method)(*self.__a, **self.__kw) - - def __repr__(self): - return '%s(%r)' % (self.__repr_name, list(self.__get_items())) - - def __iter__(self): - return iter(self.__get_items()) - - -@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues']) -class MultiDict(TypeConversionDict): - - """A :class:`MultiDict` is a dictionary subclass customized to deal with - multiple values for the same key which is for example used by the parsing - functions in the wrappers. This is necessary because some HTML form - elements pass multiple values for the same key. - - :class:`MultiDict` implements all standard dictionary methods. - Internally, it saves all values for a key as a list, but the standard dict - access methods will only return the first value for a key. If you want to - gain access to the other values, too, you have to use the `list` methods as - explained below. - - Basic Usage: - - >>> d = MultiDict([('a', 'b'), ('a', 'c')]) - >>> d - MultiDict([('a', 'b'), ('a', 'c')]) - >>> d['a'] - 'b' - >>> d.getlist('a') - ['b', 'c'] - >>> 'a' in d - True - - It behaves like a normal dict thus all dict functions will only return the - first value when multiple values for one key are found. - - From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a - subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will - render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP - exceptions. - - A :class:`MultiDict` can be constructed from an iterable of - ``(key, value)`` tuples, a dict, a :class:`MultiDict` or from Werkzeug 0.2 - onwards some keyword parameters. - - :param mapping: the initial value for the :class:`MultiDict`. Either a - regular dict, an iterable of ``(key, value)`` tuples - or `None`. - """ - - def __init__(self, mapping=None): - if isinstance(mapping, MultiDict): - dict.__init__(self, ((k, l[:]) for k, l in iterlists(mapping))) - elif isinstance(mapping, dict): - tmp = {} - for key, value in iteritems(mapping): - if isinstance(value, (tuple, list)): - if len(value) == 0: - continue - value = list(value) - else: - value = [value] - tmp[key] = value - dict.__init__(self, tmp) - else: - tmp = {} - for key, value in mapping or (): - tmp.setdefault(key, []).append(value) - dict.__init__(self, tmp) - - def __getstate__(self): - return dict(self.lists()) - - def __setstate__(self, value): - dict.clear(self) - dict.update(self, value) - - def __getitem__(self, key): - """Return the first data value for this key; - raises KeyError if not found. - - :param key: The key to be looked up. - :raise KeyError: if the key does not exist. - """ - if key in self: - lst = dict.__getitem__(self, key) - if len(lst) > 0: - return lst[0] - raise exceptions.BadRequestKeyError(key) - - def __setitem__(self, key, value): - """Like :meth:`add` but removes an existing key first. - - :param key: the key for the value. - :param value: the value to set. - """ - dict.__setitem__(self, key, [value]) - - def add(self, key, value): - """Adds a new value for the key. - - .. versionadded:: 0.6 - - :param key: the key for the value. - :param value: the value to add. - """ - dict.setdefault(self, key, []).append(value) - - def getlist(self, key, type=None): - """Return the list of items for a given key. If that key is not in the - `MultiDict`, the return value will be an empty list. Just as `get` - `getlist` accepts a `type` parameter. All items will be converted - with the callable defined there. - - :param key: The key to be looked up. - :param type: A callable that is used to cast the value in the - :class:`MultiDict`. If a :exc:`ValueError` is raised - by this callable the value will be removed from the list. - :return: a :class:`list` of all the values for the key. - """ - try: - rv = dict.__getitem__(self, key) - except KeyError: - return [] - if type is None: - return list(rv) - result = [] - for item in rv: - try: - result.append(type(item)) - except ValueError: - pass - return result - - def setlist(self, key, new_list): - """Remove the old values for a key and add new ones. Note that the list - you pass the values in will be shallow-copied before it is inserted in - the dictionary. - - >>> d = MultiDict() - >>> d.setlist('foo', ['1', '2']) - >>> d['foo'] - '1' - >>> d.getlist('foo') - ['1', '2'] - - :param key: The key for which the values are set. - :param new_list: An iterable with the new values for the key. Old values - are removed first. - """ - dict.__setitem__(self, key, list(new_list)) - - def setdefault(self, key, default=None): - """Returns the value for the key if it is in the dict, otherwise it - returns `default` and sets that value for `key`. - - :param key: The key to be looked up. - :param default: The default value to be returned if the key is not - in the dict. If not further specified it's `None`. - """ - if key not in self: - self[key] = default - else: - default = self[key] - return default - - def setlistdefault(self, key, default_list=None): - """Like `setdefault` but sets multiple values. The list returned - is not a copy, but the list that is actually used internally. This - means that you can put new values into the dict by appending items - to the list: - - >>> d = MultiDict({"foo": 1}) - >>> d.setlistdefault("foo").extend([2, 3]) - >>> d.getlist("foo") - [1, 2, 3] - - :param key: The key to be looked up. - :param default_list: An iterable of default values. It is either copied - (in case it was a list) or converted into a list - before returned. - :return: a :class:`list` - """ - if key not in self: - default_list = list(default_list or ()) - dict.__setitem__(self, key, default_list) - else: - default_list = dict.__getitem__(self, key) - return default_list - - def items(self, multi=False): - """Return an iterator of ``(key, value)`` pairs. - - :param multi: If set to `True` the iterator returned will have a pair - for each value of each key. Otherwise it will only - contain pairs for the first value of each key. - """ - - for key, values in iteritems(dict, self): - if multi: - for value in values: - yield key, value - else: - yield key, values[0] - - def lists(self): - """Return a list of ``(key, values)`` pairs, where values is the list - of all values associated with the key.""" - - for key, values in iteritems(dict, self): - yield key, list(values) - - def keys(self): - return iterkeys(dict, self) - - __iter__ = keys - - def values(self): - """Returns an iterator of the first value on every key's value list.""" - for values in itervalues(dict, self): - yield values[0] - - def listvalues(self): - """Return an iterator of all values associated with a key. Zipping - :meth:`keys` and this is the same as calling :meth:`lists`: - - >>> d = MultiDict({"foo": [1, 2, 3]}) - >>> zip(d.keys(), d.listvalues()) == d.lists() - True - """ - - return itervalues(dict, self) - - def copy(self): - """Return a shallow copy of this object.""" - return self.__class__(self) - - def deepcopy(self, memo=None): - """Return a deep copy of this object.""" - return self.__class__(deepcopy(self.to_dict(flat=False), memo)) - - def to_dict(self, flat=True): - """Return the contents as regular dict. If `flat` is `True` the - returned dict will only have the first item present, if `flat` is - `False` all values will be returned as lists. - - :param flat: If set to `False` the dict returned will have lists - with all the values in it. Otherwise it will only - contain the first value for each key. - :return: a :class:`dict` - """ - if flat: - return dict(iteritems(self)) - return dict(self.lists()) - - def update(self, other_dict): - """update() extends rather than replaces existing key lists: - - >>> a = MultiDict({'x': 1}) - >>> b = MultiDict({'x': 2, 'y': 3}) - >>> a.update(b) - >>> a - MultiDict([('y', 3), ('x', 1), ('x', 2)]) - - If the value list for a key in ``other_dict`` is empty, no new values - will be added to the dict and the key will not be created: - - >>> x = {'empty_list': []} - >>> y = MultiDict() - >>> y.update(x) - >>> y - MultiDict([]) - """ - for key, value in iter_multi_items(other_dict): - MultiDict.add(self, key, value) - - def pop(self, key, default=_missing): - """Pop the first item for a list on the dict. Afterwards the - key is removed from the dict, so additional values are discarded: - - >>> d = MultiDict({"foo": [1, 2, 3]}) - >>> d.pop("foo") - 1 - >>> "foo" in d - False - - :param key: the key to pop. - :param default: if provided the value to return if the key was - not in the dictionary. - """ - try: - lst = dict.pop(self, key) - - if len(lst) == 0: - raise exceptions.BadRequestKeyError() - - return lst[0] - except KeyError as e: - if default is not _missing: - return default - raise exceptions.BadRequestKeyError(str(e)) - - def popitem(self): - """Pop an item from the dict.""" - try: - item = dict.popitem(self) - - if len(item[1]) == 0: - raise exceptions.BadRequestKeyError() - - return (item[0], item[1][0]) - except KeyError as e: - raise exceptions.BadRequestKeyError(str(e)) - - def poplist(self, key): - """Pop the list for a key from the dict. If the key is not in the dict - an empty list is returned. - - .. versionchanged:: 0.5 - If the key does no longer exist a list is returned instead of - raising an error. - """ - return dict.pop(self, key, []) - - def popitemlist(self): - """Pop a ``(key, list)`` tuple from the dict.""" - try: - return dict.popitem(self) - except KeyError as e: - raise exceptions.BadRequestKeyError(str(e)) - - def __copy__(self): - return self.copy() - - def __deepcopy__(self, memo): - return self.deepcopy(memo=memo) - - def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, list(iteritems(self, multi=True))) - - -class _omd_bucket(object): - - """Wraps values in the :class:`OrderedMultiDict`. This makes it - possible to keep an order over multiple different keys. It requires - a lot of extra memory and slows down access a lot, but makes it - possible to access elements in O(1) and iterate in O(n). - """ - __slots__ = ('prev', 'key', 'value', 'next') - - def __init__(self, omd, key, value): - self.prev = omd._last_bucket - self.key = key - self.value = value - self.next = None - - if omd._first_bucket is None: - omd._first_bucket = self - if omd._last_bucket is not None: - omd._last_bucket.next = self - omd._last_bucket = self - - def unlink(self, omd): - if self.prev: - self.prev.next = self.next - if self.next: - self.next.prev = self.prev - if omd._first_bucket is self: - omd._first_bucket = self.next - if omd._last_bucket is self: - omd._last_bucket = self.prev - - -@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues']) -class OrderedMultiDict(MultiDict): - - """Works like a regular :class:`MultiDict` but preserves the - order of the fields. To convert the ordered multi dict into a - list you can use the :meth:`items` method and pass it ``multi=True``. - - In general an :class:`OrderedMultiDict` is an order of magnitude - slower than a :class:`MultiDict`. - - .. admonition:: note - - Due to a limitation in Python you cannot convert an ordered - multi dict into a regular dict by using ``dict(multidict)``. - Instead you have to use the :meth:`to_dict` method, otherwise - the internal bucket objects are exposed. - """ - - def __init__(self, mapping=None): - dict.__init__(self) - self._first_bucket = self._last_bucket = None - if mapping is not None: - OrderedMultiDict.update(self, mapping) - - def __eq__(self, other): - if not isinstance(other, MultiDict): - return NotImplemented - if isinstance(other, OrderedMultiDict): - iter1 = iteritems(self, multi=True) - iter2 = iteritems(other, multi=True) - try: - for k1, v1 in iter1: - k2, v2 = next(iter2) - if k1 != k2 or v1 != v2: - return False - except StopIteration: - return False - try: - next(iter2) - except StopIteration: - return True - return False - if len(self) != len(other): - return False - for key, values in iterlists(self): - if other.getlist(key) != values: - return False - return True - - __hash__ = None - - def __ne__(self, other): - return not self.__eq__(other) - - def __reduce_ex__(self, protocol): - return type(self), (list(iteritems(self, multi=True)),) - - def __getstate__(self): - return list(iteritems(self, multi=True)) - - def __setstate__(self, values): - dict.clear(self) - for key, value in values: - self.add(key, value) - - def __getitem__(self, key): - if key in self: - return dict.__getitem__(self, key)[0].value - raise exceptions.BadRequestKeyError(key) - - def __setitem__(self, key, value): - self.poplist(key) - self.add(key, value) - - def __delitem__(self, key): - self.pop(key) - - def keys(self): - return (key for key, value in iteritems(self)) - - __iter__ = keys - - def values(self): - return (value for key, value in iteritems(self)) - - def items(self, multi=False): - ptr = self._first_bucket - if multi: - while ptr is not None: - yield ptr.key, ptr.value - ptr = ptr.next - else: - returned_keys = set() - while ptr is not None: - if ptr.key not in returned_keys: - returned_keys.add(ptr.key) - yield ptr.key, ptr.value - ptr = ptr.next - - def lists(self): - returned_keys = set() - ptr = self._first_bucket - while ptr is not None: - if ptr.key not in returned_keys: - yield ptr.key, self.getlist(ptr.key) - returned_keys.add(ptr.key) - ptr = ptr.next - - def listvalues(self): - for key, values in iterlists(self): - yield values - - def add(self, key, value): - dict.setdefault(self, key, []).append(_omd_bucket(self, key, value)) - - def getlist(self, key, type=None): - try: - rv = dict.__getitem__(self, key) - except KeyError: - return [] - if type is None: - return [x.value for x in rv] - result = [] - for item in rv: - try: - result.append(type(item.value)) - except ValueError: - pass - return result - - def setlist(self, key, new_list): - self.poplist(key) - for value in new_list: - self.add(key, value) - - def setlistdefault(self, key, default_list=None): - raise TypeError('setlistdefault is unsupported for ' - 'ordered multi dicts') - - def update(self, mapping): - for key, value in iter_multi_items(mapping): - OrderedMultiDict.add(self, key, value) - - def poplist(self, key): - buckets = dict.pop(self, key, ()) - for bucket in buckets: - bucket.unlink(self) - return [x.value for x in buckets] - - def pop(self, key, default=_missing): - try: - buckets = dict.pop(self, key) - except KeyError as e: - if default is not _missing: - return default - raise exceptions.BadRequestKeyError(str(e)) - for bucket in buckets: - bucket.unlink(self) - return buckets[0].value - - def popitem(self): - try: - key, buckets = dict.popitem(self) - except KeyError as e: - raise exceptions.BadRequestKeyError(str(e)) - for bucket in buckets: - bucket.unlink(self) - return key, buckets[0].value - - def popitemlist(self): - try: - key, buckets = dict.popitem(self) - except KeyError as e: - raise exceptions.BadRequestKeyError(str(e)) - for bucket in buckets: - bucket.unlink(self) - return key, [x.value for x in buckets] - - -def _options_header_vkw(value, kw): - return dump_options_header(value, dict((k.replace('_', '-'), v) - for k, v in kw.items())) - - -def _unicodify_header_value(value): - if isinstance(value, bytes): - value = value.decode('latin-1') - if not isinstance(value, text_type): - value = text_type(value) - return value - - -@native_itermethods(['keys', 'values', 'items']) -class Headers(object): - - """An object that stores some headers. It has a dict-like interface - but is ordered and can store the same keys multiple times. - - This data structure is useful if you want a nicer way to handle WSGI - headers which are stored as tuples in a list. - - From Werkzeug 0.3 onwards, the :exc:`KeyError` raised by this class is - also a subclass of the :class:`~exceptions.BadRequest` HTTP exception - and will render a page for a ``400 BAD REQUEST`` if caught in a - catch-all for HTTP exceptions. - - Headers is mostly compatible with the Python :class:`wsgiref.headers.Headers` - class, with the exception of `__getitem__`. :mod:`wsgiref` will return - `None` for ``headers['missing']``, whereas :class:`Headers` will raise - a :class:`KeyError`. - - To create a new :class:`Headers` object pass it a list or dict of headers - which are used as default values. This does not reuse the list passed - to the constructor for internal usage. - - :param defaults: The list of default values for the :class:`Headers`. - - .. versionchanged:: 0.9 - This data structure now stores unicode values similar to how the - multi dicts do it. The main difference is that bytes can be set as - well which will automatically be latin1 decoded. - - .. versionchanged:: 0.9 - The :meth:`linked` function was removed without replacement as it - was an API that does not support the changes to the encoding model. - """ - - def __init__(self, defaults=None): - self._list = [] - if defaults is not None: - if isinstance(defaults, (list, Headers)): - self._list.extend(defaults) - else: - self.extend(defaults) - - def __getitem__(self, key, _get_mode=False): - if not _get_mode: - if isinstance(key, integer_types): - return self._list[key] - elif isinstance(key, slice): - return self.__class__(self._list[key]) - if not isinstance(key, string_types): - raise exceptions.BadRequestKeyError(key) - ikey = key.lower() - for k, v in self._list: - if k.lower() == ikey: - return v - # micro optimization: if we are in get mode we will catch that - # exception one stack level down so we can raise a standard - # key error instead of our special one. - if _get_mode: - raise KeyError() - raise exceptions.BadRequestKeyError(key) - - def __eq__(self, other): - return other.__class__ is self.__class__ and \ - set(other._list) == set(self._list) - - __hash__ = None - - def __ne__(self, other): - return not self.__eq__(other) - - def get(self, key, default=None, type=None, as_bytes=False): - """Return the default value if the requested data doesn't exist. - If `type` is provided and is a callable it should convert the value, - return it or raise a :exc:`ValueError` if that is not possible. In - this case the function will return the default as if the value was not - found: - - >>> d = Headers([('Content-Length', '42')]) - >>> d.get('Content-Length', type=int) - 42 - - If a headers object is bound you must not add unicode strings - because no encoding takes place. - - .. versionadded:: 0.9 - Added support for `as_bytes`. - - :param key: The key to be looked up. - :param default: The default value to be returned if the key can't - be looked up. If not further specified `None` is - returned. - :param type: A callable that is used to cast the value in the - :class:`Headers`. If a :exc:`ValueError` is raised - by this callable the default value is returned. - :param as_bytes: return bytes instead of unicode strings. - """ - try: - rv = self.__getitem__(key, _get_mode=True) - except KeyError: - return default - if as_bytes: - rv = rv.encode('latin1') - if type is None: - return rv - try: - return type(rv) - except ValueError: - return default - - def getlist(self, key, type=None, as_bytes=False): - """Return the list of items for a given key. If that key is not in the - :class:`Headers`, the return value will be an empty list. Just as - :meth:`get` :meth:`getlist` accepts a `type` parameter. All items will - be converted with the callable defined there. - - .. versionadded:: 0.9 - Added support for `as_bytes`. - - :param key: The key to be looked up. - :param type: A callable that is used to cast the value in the - :class:`Headers`. If a :exc:`ValueError` is raised - by this callable the value will be removed from the list. - :return: a :class:`list` of all the values for the key. - :param as_bytes: return bytes instead of unicode strings. - """ - ikey = key.lower() - result = [] - for k, v in self: - if k.lower() == ikey: - if as_bytes: - v = v.encode('latin1') - if type is not None: - try: - v = type(v) - except ValueError: - continue - result.append(v) - return result - - def get_all(self, name): - """Return a list of all the values for the named field. - - This method is compatible with the :mod:`wsgiref` - :meth:`~wsgiref.headers.Headers.get_all` method. - """ - return self.getlist(name) - - def items(self, lower=False): - for key, value in self: - if lower: - key = key.lower() - yield key, value - - def keys(self, lower=False): - for key, _ in iteritems(self, lower): - yield key - - def values(self): - for _, value in iteritems(self): - yield value - - def extend(self, iterable): - """Extend the headers with a dict or an iterable yielding keys and - values. - """ - if isinstance(iterable, dict): - for key, value in iteritems(iterable): - if isinstance(value, (tuple, list)): - for v in value: - self.add(key, v) - else: - self.add(key, value) - else: - for key, value in iterable: - self.add(key, value) - - def __delitem__(self, key, _index_operation=True): - if _index_operation and isinstance(key, (integer_types, slice)): - del self._list[key] - return - key = key.lower() - new = [] - for k, v in self._list: - if k.lower() != key: - new.append((k, v)) - self._list[:] = new - - def remove(self, key): - """Remove a key. - - :param key: The key to be removed. - """ - return self.__delitem__(key, _index_operation=False) - - def pop(self, key=None, default=_missing): - """Removes and returns a key or index. - - :param key: The key to be popped. If this is an integer the item at - that position is removed, if it's a string the value for - that key is. If the key is omitted or `None` the last - item is removed. - :return: an item. - """ - if key is None: - return self._list.pop() - if isinstance(key, integer_types): - return self._list.pop(key) - try: - rv = self[key] - self.remove(key) - except KeyError: - if default is not _missing: - return default - raise - return rv - - def popitem(self): - """Removes a key or index and returns a (key, value) item.""" - return self.pop() - - def __contains__(self, key): - """Check if a key is present.""" - try: - self.__getitem__(key, _get_mode=True) - except KeyError: - return False - return True - - has_key = __contains__ - - def __iter__(self): - """Yield ``(key, value)`` tuples.""" - return iter(self._list) - - def __len__(self): - return len(self._list) - - def add(self, _key, _value, **kw): - """Add a new header tuple to the list. - - Keyword arguments can specify additional parameters for the header - value, with underscores converted to dashes:: - - >>> d = Headers() - >>> d.add('Content-Type', 'text/plain') - >>> d.add('Content-Disposition', 'attachment', filename='foo.png') - - The keyword argument dumping uses :func:`dump_options_header` - behind the scenes. - - .. versionadded:: 0.4.1 - keyword arguments were added for :mod:`wsgiref` compatibility. - """ - if kw: - _value = _options_header_vkw(_value, kw) - _value = _unicodify_header_value(_value) - self._validate_value(_value) - self._list.append((_key, _value)) - - def _validate_value(self, value): - if not isinstance(value, text_type): - raise TypeError('Value should be unicode.') - if u'\n' in value or u'\r' in value: - raise ValueError('Detected newline in header value. This is ' - 'a potential security problem') - - def add_header(self, _key, _value, **_kw): - """Add a new header tuple to the list. - - An alias for :meth:`add` for compatibility with the :mod:`wsgiref` - :meth:`~wsgiref.headers.Headers.add_header` method. - """ - self.add(_key, _value, **_kw) - - def clear(self): - """Clears all headers.""" - del self._list[:] - - def set(self, _key, _value, **kw): - """Remove all header tuples for `key` and add a new one. The newly - added key either appears at the end of the list if there was no - entry or replaces the first one. - - Keyword arguments can specify additional parameters for the header - value, with underscores converted to dashes. See :meth:`add` for - more information. - - .. versionchanged:: 0.6.1 - :meth:`set` now accepts the same arguments as :meth:`add`. - - :param key: The key to be inserted. - :param value: The value to be inserted. - """ - if kw: - _value = _options_header_vkw(_value, kw) - _value = _unicodify_header_value(_value) - self._validate_value(_value) - if not self._list: - self._list.append((_key, _value)) - return - listiter = iter(self._list) - ikey = _key.lower() - for idx, (old_key, old_value) in enumerate(listiter): - if old_key.lower() == ikey: - # replace first ocurrence - self._list[idx] = (_key, _value) - break - else: - self._list.append((_key, _value)) - return - self._list[idx + 1:] = [t for t in listiter if t[0].lower() != ikey] - - def setdefault(self, key, default): - """Returns the value for the key if it is in the dict, otherwise it - returns `default` and sets that value for `key`. - - :param key: The key to be looked up. - :param default: The default value to be returned if the key is not - in the dict. If not further specified it's `None`. - """ - if key in self: - return self[key] - self.set(key, default) - return default - - def __setitem__(self, key, value): - """Like :meth:`set` but also supports index/slice based setting.""" - if isinstance(key, (slice, integer_types)): - if isinstance(key, integer_types): - value = [value] - value = [(k, _unicodify_header_value(v)) for (k, v) in value] - [self._validate_value(v) for (k, v) in value] - if isinstance(key, integer_types): - self._list[key] = value[0] - else: - self._list[key] = value - else: - self.set(key, value) - - def to_list(self, charset='iso-8859-1'): - """Convert the headers into a list suitable for WSGI.""" - from warnings import warn - warn(DeprecationWarning('Method removed, use to_wsgi_list instead'), - stacklevel=2) - return self.to_wsgi_list() - - def to_wsgi_list(self): - """Convert the headers into a list suitable for WSGI. - - The values are byte strings in Python 2 converted to latin1 and unicode - strings in Python 3 for the WSGI server to encode. - - :return: list - """ - if PY2: - return [(to_native(k), v.encode('latin1')) for k, v in self] - return list(self) - - def copy(self): - return self.__class__(self._list) - - def __copy__(self): - return self.copy() - - def __str__(self): - """Returns formatted headers suitable for HTTP transmission.""" - strs = [] - for key, value in self.to_wsgi_list(): - strs.append('%s: %s' % (key, value)) - strs.append('\r\n') - return '\r\n'.join(strs) - - def __repr__(self): - return '%s(%r)' % ( - self.__class__.__name__, - list(self) - ) - - -class ImmutableHeadersMixin(object): - - """Makes a :class:`Headers` immutable. We do not mark them as - hashable though since the only usecase for this datastructure - in Werkzeug is a view on a mutable structure. - - .. versionadded:: 0.5 - - :private: - """ - - def __delitem__(self, key): - is_immutable(self) - - def __setitem__(self, key, value): - is_immutable(self) - set = __setitem__ - - def add(self, item): - is_immutable(self) - remove = add_header = add - - def extend(self, iterable): - is_immutable(self) - - def insert(self, pos, value): - is_immutable(self) - - def pop(self, index=-1): - is_immutable(self) - - def popitem(self): - is_immutable(self) - - def setdefault(self, key, default): - is_immutable(self) - - -class EnvironHeaders(ImmutableHeadersMixin, Headers): - - """Read only version of the headers from a WSGI environment. This - provides the same interface as `Headers` and is constructed from - a WSGI environment. - - From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a - subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will - render a page for a ``400 BAD REQUEST`` if caught in a catch-all for - HTTP exceptions. - """ - - def __init__(self, environ): - self.environ = environ - - def __eq__(self, other): - return self.environ is other.environ - - __hash__ = None - - def __getitem__(self, key, _get_mode=False): - # _get_mode is a no-op for this class as there is no index but - # used because get() calls it. - if not isinstance(key, string_types): - raise KeyError(key) - key = key.upper().replace('-', '_') - if key in ('CONTENT_TYPE', 'CONTENT_LENGTH'): - return _unicodify_header_value(self.environ[key]) - return _unicodify_header_value(self.environ['HTTP_' + key]) - - def __len__(self): - # the iter is necessary because otherwise list calls our - # len which would call list again and so forth. - return len(list(iter(self))) - - def __iter__(self): - for key, value in iteritems(self.environ): - if key.startswith('HTTP_') and key not in \ - ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'): - yield (key[5:].replace('_', '-').title(), - _unicodify_header_value(value)) - elif key in ('CONTENT_TYPE', 'CONTENT_LENGTH') and value: - yield (key.replace('_', '-').title(), - _unicodify_header_value(value)) - - def copy(self): - raise TypeError('cannot create %r copies' % self.__class__.__name__) - - -@native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues']) -class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict): - - """A read only :class:`MultiDict` that you can pass multiple :class:`MultiDict` - instances as sequence and it will combine the return values of all wrapped - dicts: - - >>> from werkzeug.datastructures import CombinedMultiDict, MultiDict - >>> post = MultiDict([('foo', 'bar')]) - >>> get = MultiDict([('blub', 'blah')]) - >>> combined = CombinedMultiDict([get, post]) - >>> combined['foo'] - 'bar' - >>> combined['blub'] - 'blah' - - This works for all read operations and will raise a `TypeError` for - methods that usually change data which isn't possible. - - From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a - subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will - render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP - exceptions. - """ - - def __reduce_ex__(self, protocol): - return type(self), (self.dicts,) - - def __init__(self, dicts=None): - self.dicts = dicts or [] - - @classmethod - def fromkeys(cls): - raise TypeError('cannot create %r instances by fromkeys' % - cls.__name__) - - def __getitem__(self, key): - for d in self.dicts: - if key in d: - return d[key] - raise exceptions.BadRequestKeyError(key) - - def get(self, key, default=None, type=None): - for d in self.dicts: - if key in d: - if type is not None: - try: - return type(d[key]) - except ValueError: - continue - return d[key] - return default - - def getlist(self, key, type=None): - rv = [] - for d in self.dicts: - rv.extend(d.getlist(key, type)) - return rv - - def _keys_impl(self): - """This function exists so __len__ can be implemented more efficiently, - saving one list creation from an iterator. - - Using this for Python 2's ``dict.keys`` behavior would be useless since - `dict.keys` in Python 2 returns a list, while we have a set here. - """ - rv = set() - for d in self.dicts: - rv.update(iterkeys(d)) - return rv - - def keys(self): - return iter(self._keys_impl()) - - __iter__ = keys - - def items(self, multi=False): - found = set() - for d in self.dicts: - for key, value in iteritems(d, multi): - if multi: - yield key, value - elif key not in found: - found.add(key) - yield key, value - - def values(self): - for key, value in iteritems(self): - yield value - - def lists(self): - rv = {} - for d in self.dicts: - for key, values in iterlists(d): - rv.setdefault(key, []).extend(values) - return iteritems(rv) - - def listvalues(self): - return (x[1] for x in self.lists()) - - def copy(self): - """Return a shallow copy of this object.""" - return self.__class__(self.dicts[:]) - - def to_dict(self, flat=True): - """Return the contents as regular dict. If `flat` is `True` the - returned dict will only have the first item present, if `flat` is - `False` all values will be returned as lists. - - :param flat: If set to `False` the dict returned will have lists - with all the values in it. Otherwise it will only - contain the first item for each key. - :return: a :class:`dict` - """ - rv = {} - for d in reversed(self.dicts): - rv.update(d.to_dict(flat)) - return rv - - def __len__(self): - return len(self._keys_impl()) - - def __contains__(self, key): - for d in self.dicts: - if key in d: - return True - return False - - has_key = __contains__ - - def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, self.dicts) - - -class FileMultiDict(MultiDict): - - """A special :class:`MultiDict` that has convenience methods to add - files to it. This is used for :class:`EnvironBuilder` and generally - useful for unittesting. - - .. versionadded:: 0.5 - """ - - def add_file(self, name, file, filename=None, content_type=None): - """Adds a new file to the dict. `file` can be a file name or - a :class:`file`-like or a :class:`FileStorage` object. - - :param name: the name of the field. - :param file: a filename or :class:`file`-like object - :param filename: an optional filename - :param content_type: an optional content type - """ - if isinstance(file, FileStorage): - value = file - else: - if isinstance(file, string_types): - if filename is None: - filename = file - file = open(file, 'rb') - if filename and content_type is None: - content_type = mimetypes.guess_type(filename)[0] or \ - 'application/octet-stream' - value = FileStorage(file, filename, name, content_type) - - self.add(name, value) - - -class ImmutableDict(ImmutableDictMixin, dict): - - """An immutable :class:`dict`. - - .. versionadded:: 0.5 - """ - - def __repr__(self): - return '%s(%s)' % ( - self.__class__.__name__, - dict.__repr__(self), - ) - - def copy(self): - """Return a shallow mutable copy of this object. Keep in mind that - the standard library's :func:`copy` function is a no-op for this class - like for any other python immutable type (eg: :class:`tuple`). - """ - return dict(self) - - def __copy__(self): - return self - - -class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict): - - """An immutable :class:`MultiDict`. - - .. versionadded:: 0.5 - """ - - def copy(self): - """Return a shallow mutable copy of this object. Keep in mind that - the standard library's :func:`copy` function is a no-op for this class - like for any other python immutable type (eg: :class:`tuple`). - """ - return MultiDict(self) - - def __copy__(self): - return self - - -class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict): - - """An immutable :class:`OrderedMultiDict`. - - .. versionadded:: 0.6 - """ - - def _iter_hashitems(self): - return enumerate(iteritems(self, multi=True)) - - def copy(self): - """Return a shallow mutable copy of this object. Keep in mind that - the standard library's :func:`copy` function is a no-op for this class - like for any other python immutable type (eg: :class:`tuple`). - """ - return OrderedMultiDict(self) - - def __copy__(self): - return self - - -@native_itermethods(['values']) -class Accept(ImmutableList): - - """An :class:`Accept` object is just a list subclass for lists of - ``(value, quality)`` tuples. It is automatically sorted by specificity - and quality. - - All :class:`Accept` objects work similar to a list but provide extra - functionality for working with the data. Containment checks are - normalized to the rules of that header: - - >>> a = CharsetAccept([('ISO-8859-1', 1), ('utf-8', 0.7)]) - >>> a.best - 'ISO-8859-1' - >>> 'iso-8859-1' in a - True - >>> 'UTF8' in a - True - >>> 'utf7' in a - False - - To get the quality for an item you can use normal item lookup: - - >>> print a['utf-8'] - 0.7 - >>> a['utf7'] - 0 - - .. versionchanged:: 0.5 - :class:`Accept` objects are forced immutable now. - """ - - def __init__(self, values=()): - if values is None: - list.__init__(self) - self.provided = False - elif isinstance(values, Accept): - self.provided = values.provided - list.__init__(self, values) - else: - self.provided = True - values = sorted(values, key=lambda x: (self._specificity(x[0]), x[1], x[0]), - reverse=True) - list.__init__(self, values) - - def _specificity(self, value): - """Returns a tuple describing the value's specificity.""" - return value != '*', - - def _value_matches(self, value, item): - """Check if a value matches a given accept item.""" - return item == '*' or item.lower() == value.lower() - - def __getitem__(self, key): - """Besides index lookup (getting item n) you can also pass it a string - to get the quality for the item. If the item is not in the list, the - returned quality is ``0``. - """ - if isinstance(key, string_types): - return self.quality(key) - return list.__getitem__(self, key) - - def quality(self, key): - """Returns the quality of the key. - - .. versionadded:: 0.6 - In previous versions you had to use the item-lookup syntax - (eg: ``obj[key]`` instead of ``obj.quality(key)``) - """ - for item, quality in self: - if self._value_matches(key, item): - return quality - return 0 - - def __contains__(self, value): - for item, quality in self: - if self._value_matches(value, item): - return True - return False - - def __repr__(self): - return '%s([%s])' % ( - self.__class__.__name__, - ', '.join('(%r, %s)' % (x, y) for x, y in self) - ) - - def index(self, key): - """Get the position of an entry or raise :exc:`ValueError`. - - :param key: The key to be looked up. - - .. versionchanged:: 0.5 - This used to raise :exc:`IndexError`, which was inconsistent - with the list API. - """ - if isinstance(key, string_types): - for idx, (item, quality) in enumerate(self): - if self._value_matches(key, item): - return idx - raise ValueError(key) - return list.index(self, key) - - def find(self, key): - """Get the position of an entry or return -1. - - :param key: The key to be looked up. - """ - try: - return self.index(key) - except ValueError: - return -1 - - def values(self): - """Iterate over all values.""" - for item in self: - yield item[0] - - def to_header(self): - """Convert the header set into an HTTP header string.""" - result = [] - for value, quality in self: - if quality != 1: - value = '%s;q=%s' % (value, quality) - result.append(value) - return ','.join(result) - - def __str__(self): - return self.to_header() - - def _best_single_match(self, match): - for client_item, quality in self: - if self._value_matches(match, client_item): - # self is sorted by specificity descending, we can exit - return client_item, quality - - def best_match(self, matches, default=None): - """Returns the best match from a list of possible matches based - on the specificity and quality of the client. If two items have the - same quality and specificity, the one is returned that comes first. - - :param matches: a list of matches to check for - :param default: the value that is returned if none match - """ - result = default - best_quality = -1 - best_specificity = (-1,) - for server_item in matches: - match = self._best_single_match(server_item) - if not match: - continue - client_item, quality = match - specificity = self._specificity(client_item) - if quality <= 0 or quality < best_quality: - continue - # better quality or same quality but more specific => better match - if quality > best_quality or specificity > best_specificity: - result = server_item - best_quality = quality - best_specificity = specificity - return result - - @property - def best(self): - """The best match as value.""" - if self: - return self[0][0] - - -class MIMEAccept(Accept): - - """Like :class:`Accept` but with special methods and behavior for - mimetypes. - """ - - def _specificity(self, value): - return tuple(x != '*' for x in value.split('/', 1)) - - def _value_matches(self, value, item): - def _normalize(x): - x = x.lower() - return x == '*' and ('*', '*') or x.split('/', 1) - - # this is from the application which is trusted. to avoid developer - # frustration we actually check these for valid values - if '/' not in value: - raise ValueError('invalid mimetype %r' % value) - value_type, value_subtype = _normalize(value) - if value_type == '*' and value_subtype != '*': - raise ValueError('invalid mimetype %r' % value) - - if '/' not in item: - return False - item_type, item_subtype = _normalize(item) - if item_type == '*' and item_subtype != '*': - return False - return ( - (item_type == item_subtype == '*' or - value_type == value_subtype == '*') or - (item_type == value_type and (item_subtype == '*' or - value_subtype == '*' or - item_subtype == value_subtype)) - ) - - @property - def accept_html(self): - """True if this object accepts HTML.""" - return ( - 'text/html' in self or - 'application/xhtml+xml' in self or - self.accept_xhtml - ) - - @property - def accept_xhtml(self): - """True if this object accepts XHTML.""" - return ( - 'application/xhtml+xml' in self or - 'application/xml' in self - ) - - @property - def accept_json(self): - """True if this object accepts JSON.""" - return 'application/json' in self - - -class LanguageAccept(Accept): - - """Like :class:`Accept` but with normalization for languages.""" - - def _value_matches(self, value, item): - def _normalize(language): - return _locale_delim_re.split(language.lower()) - return item == '*' or _normalize(value) == _normalize(item) - - -class CharsetAccept(Accept): - - """Like :class:`Accept` but with normalization for charsets.""" - - def _value_matches(self, value, item): - def _normalize(name): - try: - return codecs.lookup(name).name - except LookupError: - return name.lower() - return item == '*' or _normalize(value) == _normalize(item) - - -def cache_property(key, empty, type): - """Return a new property object for a cache header. Useful if you - want to add support for a cache extension in a subclass.""" - return property(lambda x: x._get_cache_value(key, empty, type), - lambda x, v: x._set_cache_value(key, v, type), - lambda x: x._del_cache_value(key), - 'accessor for %r' % key) - - -class _CacheControl(UpdateDictMixin, dict): - - """Subclass of a dict that stores values for a Cache-Control header. It - has accessors for all the cache-control directives specified in RFC 2616. - The class does not differentiate between request and response directives. - - Because the cache-control directives in the HTTP header use dashes the - python descriptors use underscores for that. - - To get a header of the :class:`CacheControl` object again you can convert - the object into a string or call the :meth:`to_header` method. If you plan - to subclass it and add your own items have a look at the sourcecode for - that class. - - .. versionchanged:: 0.4 - - Setting `no_cache` or `private` to boolean `True` will set the implicit - none-value which is ``*``: - - >>> cc = ResponseCacheControl() - >>> cc.no_cache = True - >>> cc - - >>> cc.no_cache - '*' - >>> cc.no_cache = None - >>> cc - - - In versions before 0.5 the behavior documented here affected the now - no longer existing `CacheControl` class. - """ - - no_cache = cache_property('no-cache', '*', None) - no_store = cache_property('no-store', None, bool) - max_age = cache_property('max-age', -1, int) - no_transform = cache_property('no-transform', None, None) - - def __init__(self, values=(), on_update=None): - dict.__init__(self, values or ()) - self.on_update = on_update - self.provided = values is not None - - def _get_cache_value(self, key, empty, type): - """Used internally by the accessor properties.""" - if type is bool: - return key in self - if key in self: - value = self[key] - if value is None: - return empty - elif type is not None: - try: - value = type(value) - except ValueError: - pass - return value - - def _set_cache_value(self, key, value, type): - """Used internally by the accessor properties.""" - if type is bool: - if value: - self[key] = None - else: - self.pop(key, None) - else: - if value is None: - self.pop(key) - elif value is True: - self[key] = None - else: - self[key] = value - - def _del_cache_value(self, key): - """Used internally by the accessor properties.""" - if key in self: - del self[key] - - def to_header(self): - """Convert the stored values into a cache control header.""" - return dump_header(self) - - def __str__(self): - return self.to_header() - - def __repr__(self): - return '<%s %s>' % ( - self.__class__.__name__, - " ".join( - "%s=%r" % (k, v) for k, v in sorted(self.items()) - ), - ) - - -class RequestCacheControl(ImmutableDictMixin, _CacheControl): - - """A cache control for requests. This is immutable and gives access - to all the request-relevant cache control headers. - - To get a header of the :class:`RequestCacheControl` object again you can - convert the object into a string or call the :meth:`to_header` method. If - you plan to subclass it and add your own items have a look at the sourcecode - for that class. - - .. versionadded:: 0.5 - In previous versions a `CacheControl` class existed that was used - both for request and response. - """ - - max_stale = cache_property('max-stale', '*', int) - min_fresh = cache_property('min-fresh', '*', int) - no_transform = cache_property('no-transform', None, None) - only_if_cached = cache_property('only-if-cached', None, bool) - - -class ResponseCacheControl(_CacheControl): - - """A cache control for responses. Unlike :class:`RequestCacheControl` - this is mutable and gives access to response-relevant cache control - headers. - - To get a header of the :class:`ResponseCacheControl` object again you can - convert the object into a string or call the :meth:`to_header` method. If - you plan to subclass it and add your own items have a look at the sourcecode - for that class. - - .. versionadded:: 0.5 - In previous versions a `CacheControl` class existed that was used - both for request and response. - """ - - public = cache_property('public', None, bool) - private = cache_property('private', '*', None) - must_revalidate = cache_property('must-revalidate', None, bool) - proxy_revalidate = cache_property('proxy-revalidate', None, bool) - s_maxage = cache_property('s-maxage', None, None) - - -# attach cache_property to the _CacheControl as staticmethod -# so that others can reuse it. -_CacheControl.cache_property = staticmethod(cache_property) - - -class CallbackDict(UpdateDictMixin, dict): - - """A dict that calls a function passed every time something is changed. - The function is passed the dict instance. - """ - - def __init__(self, initial=None, on_update=None): - dict.__init__(self, initial or ()) - self.on_update = on_update - - def __repr__(self): - return '<%s %s>' % ( - self.__class__.__name__, - dict.__repr__(self) - ) - - -class HeaderSet(MutableSet): - - """Similar to the :class:`ETags` class this implements a set-like structure. - Unlike :class:`ETags` this is case insensitive and used for vary, allow, and - content-language headers. - - If not constructed using the :func:`parse_set_header` function the - instantiation works like this: - - >>> hs = HeaderSet(['foo', 'bar', 'baz']) - >>> hs - HeaderSet(['foo', 'bar', 'baz']) - """ - - def __init__(self, headers=None, on_update=None): - self._headers = list(headers or ()) - self._set = set([x.lower() for x in self._headers]) - self.on_update = on_update - - def add(self, header): - """Add a new header to the set.""" - self.update((header,)) - - def remove(self, header): - """Remove a header from the set. This raises an :exc:`KeyError` if the - header is not in the set. - - .. versionchanged:: 0.5 - In older versions a :exc:`IndexError` was raised instead of a - :exc:`KeyError` if the object was missing. - - :param header: the header to be removed. - """ - key = header.lower() - if key not in self._set: - raise KeyError(header) - self._set.remove(key) - for idx, key in enumerate(self._headers): - if key.lower() == header: - del self._headers[idx] - break - if self.on_update is not None: - self.on_update(self) - - def update(self, iterable): - """Add all the headers from the iterable to the set. - - :param iterable: updates the set with the items from the iterable. - """ - inserted_any = False - for header in iterable: - key = header.lower() - if key not in self._set: - self._headers.append(header) - self._set.add(key) - inserted_any = True - if inserted_any and self.on_update is not None: - self.on_update(self) - - def discard(self, header): - """Like :meth:`remove` but ignores errors. - - :param header: the header to be discarded. - """ - try: - return self.remove(header) - except KeyError: - pass - - def find(self, header): - """Return the index of the header in the set or return -1 if not found. - - :param header: the header to be looked up. - """ - header = header.lower() - for idx, item in enumerate(self._headers): - if item.lower() == header: - return idx - return -1 - - def index(self, header): - """Return the index of the header in the set or raise an - :exc:`IndexError`. - - :param header: the header to be looked up. - """ - rv = self.find(header) - if rv < 0: - raise IndexError(header) - return rv - - def clear(self): - """Clear the set.""" - self._set.clear() - del self._headers[:] - if self.on_update is not None: - self.on_update(self) - - def as_set(self, preserve_casing=False): - """Return the set as real python set type. When calling this, all - the items are converted to lowercase and the ordering is lost. - - :param preserve_casing: if set to `True` the items in the set returned - will have the original case like in the - :class:`HeaderSet`, otherwise they will - be lowercase. - """ - if preserve_casing: - return set(self._headers) - return set(self._set) - - def to_header(self): - """Convert the header set into an HTTP header string.""" - return ', '.join(map(quote_header_value, self._headers)) - - def __getitem__(self, idx): - return self._headers[idx] - - def __delitem__(self, idx): - rv = self._headers.pop(idx) - self._set.remove(rv.lower()) - if self.on_update is not None: - self.on_update(self) - - def __setitem__(self, idx, value): - old = self._headers[idx] - self._set.remove(old.lower()) - self._headers[idx] = value - self._set.add(value.lower()) - if self.on_update is not None: - self.on_update(self) - - def __contains__(self, header): - return header.lower() in self._set - - def __len__(self): - return len(self._set) - - def __iter__(self): - return iter(self._headers) - - def __nonzero__(self): - return bool(self._set) - - def __str__(self): - return self.to_header() - - def __repr__(self): - return '%s(%r)' % ( - self.__class__.__name__, - self._headers - ) - - -class ETags(Container, Iterable): - - """A set that can be used to check if one etag is present in a collection - of etags. - """ - - def __init__(self, strong_etags=None, weak_etags=None, star_tag=False): - self._strong = frozenset(not star_tag and strong_etags or ()) - self._weak = frozenset(weak_etags or ()) - self.star_tag = star_tag - - def as_set(self, include_weak=False): - """Convert the `ETags` object into a python set. Per default all the - weak etags are not part of this set.""" - rv = set(self._strong) - if include_weak: - rv.update(self._weak) - return rv - - def is_weak(self, etag): - """Check if an etag is weak.""" - return etag in self._weak - - def is_strong(self, etag): - """Check if an etag is strong.""" - return etag in self._strong - - def contains_weak(self, etag): - """Check if an etag is part of the set including weak and strong tags.""" - return self.is_weak(etag) or self.contains(etag) - - def contains(self, etag): - """Check if an etag is part of the set ignoring weak tags. - It is also possible to use the ``in`` operator. - """ - if self.star_tag: - return True - return self.is_strong(etag) - - def contains_raw(self, etag): - """When passed a quoted tag it will check if this tag is part of the - set. If the tag is weak it is checked against weak and strong tags, - otherwise strong only.""" - etag, weak = unquote_etag(etag) - if weak: - return self.contains_weak(etag) - return self.contains(etag) - - def to_header(self): - """Convert the etags set into a HTTP header string.""" - if self.star_tag: - return '*' - return ', '.join( - ['"%s"' % x for x in self._strong] + - ['W/"%s"' % x for x in self._weak] - ) - - def __call__(self, etag=None, data=None, include_weak=False): - if [etag, data].count(None) != 1: - raise TypeError('either tag or data required, but at least one') - if etag is None: - etag = generate_etag(data) - if include_weak: - if etag in self._weak: - return True - return etag in self._strong - - def __bool__(self): - return bool(self.star_tag or self._strong or self._weak) - - __nonzero__ = __bool__ - - def __str__(self): - return self.to_header() - - def __iter__(self): - return iter(self._strong) - - def __contains__(self, etag): - return self.contains(etag) - - def __repr__(self): - return '<%s %r>' % (self.__class__.__name__, str(self)) - - -class IfRange(object): - - """Very simple object that represents the `If-Range` header in parsed - form. It will either have neither a etag or date or one of either but - never both. - - .. versionadded:: 0.7 - """ - - def __init__(self, etag=None, date=None): - #: The etag parsed and unquoted. Ranges always operate on strong - #: etags so the weakness information is not necessary. - self.etag = etag - #: The date in parsed format or `None`. - self.date = date - - def to_header(self): - """Converts the object back into an HTTP header.""" - if self.date is not None: - return http_date(self.date) - if self.etag is not None: - return quote_etag(self.etag) - return '' - - def __str__(self): - return self.to_header() - - def __repr__(self): - return '<%s %r>' % (self.__class__.__name__, str(self)) - - -class Range(object): - - """Represents a range header. All the methods are only supporting bytes - as unit. It does store multiple ranges but :meth:`range_for_length` will - only work if only one range is provided. - - .. versionadded:: 0.7 - """ - - def __init__(self, units, ranges): - #: The units of this range. Usually "bytes". - self.units = units - #: A list of ``(begin, end)`` tuples for the range header provided. - #: The ranges are non-inclusive. - self.ranges = ranges - - def range_for_length(self, length): - """If the range is for bytes, the length is not None and there is - exactly one range and it is satisfiable it returns a ``(start, stop)`` - tuple, otherwise `None`. - """ - if self.units != 'bytes' or length is None or len(self.ranges) != 1: - return None - start, end = self.ranges[0] - if end is None: - end = length - if start < 0: - start += length - if is_byte_range_valid(start, end, length): - return start, min(end, length) - - def make_content_range(self, length): - """Creates a :class:`~werkzeug.datastructures.ContentRange` object - from the current range and given content length. - """ - rng = self.range_for_length(length) - if rng is not None: - return ContentRange(self.units, rng[0], rng[1], length) - - def to_header(self): - """Converts the object back into an HTTP header.""" - ranges = [] - for begin, end in self.ranges: - if end is None: - ranges.append(begin >= 0 and '%s-' % begin or str(begin)) - else: - ranges.append('%s-%s' % (begin, end - 1)) - return '%s=%s' % (self.units, ','.join(ranges)) - - def to_content_range_header(self, length): - """Converts the object into `Content-Range` HTTP header, - based on given length - """ - range_for_length = self.range_for_length(length) - if range_for_length is not None: - return '%s %d-%d/%d' % (self.units, - range_for_length[0], - range_for_length[1] - 1, length) - return None - - def __str__(self): - return self.to_header() - - def __repr__(self): - return '<%s %r>' % (self.__class__.__name__, str(self)) - - -class ContentRange(object): - - """Represents the content range header. - - .. versionadded:: 0.7 - """ - - def __init__(self, units, start, stop, length=None, on_update=None): - assert is_byte_range_valid(start, stop, length), \ - 'Bad range provided' - self.on_update = on_update - self.set(start, stop, length, units) - - def _callback_property(name): - def fget(self): - return getattr(self, name) - - def fset(self, value): - setattr(self, name, value) - if self.on_update is not None: - self.on_update(self) - return property(fget, fset) - - #: The units to use, usually "bytes" - units = _callback_property('_units') - #: The start point of the range or `None`. - start = _callback_property('_start') - #: The stop point of the range (non-inclusive) or `None`. Can only be - #: `None` if also start is `None`. - stop = _callback_property('_stop') - #: The length of the range or `None`. - length = _callback_property('_length') - - def set(self, start, stop, length=None, units='bytes'): - """Simple method to update the ranges.""" - assert is_byte_range_valid(start, stop, length), \ - 'Bad range provided' - self._units = units - self._start = start - self._stop = stop - self._length = length - if self.on_update is not None: - self.on_update(self) - - def unset(self): - """Sets the units to `None` which indicates that the header should - no longer be used. - """ - self.set(None, None, units=None) - - def to_header(self): - if self.units is None: - return '' - if self.length is None: - length = '*' - else: - length = self.length - if self.start is None: - return '%s */%s' % (self.units, length) - return '%s %s-%s/%s' % ( - self.units, - self.start, - self.stop - 1, - length - ) - - def __nonzero__(self): - return self.units is not None - - __bool__ = __nonzero__ - - def __str__(self): - return self.to_header() - - def __repr__(self): - return '<%s %r>' % (self.__class__.__name__, str(self)) - - -class Authorization(ImmutableDictMixin, dict): - - """Represents an `Authorization` header sent by the client. You should - not create this kind of object yourself but use it when it's returned by - the `parse_authorization_header` function. - - This object is a dict subclass and can be altered by setting dict items - but it should be considered immutable as it's returned by the client and - not meant for modifications. - - .. versionchanged:: 0.5 - This object became immutable. - """ - - def __init__(self, auth_type, data=None): - dict.__init__(self, data or {}) - self.type = auth_type - - username = property(lambda x: x.get('username'), doc=''' - The username transmitted. This is set for both basic and digest - auth all the time.''') - password = property(lambda x: x.get('password'), doc=''' - When the authentication type is basic this is the password - transmitted by the client, else `None`.''') - realm = property(lambda x: x.get('realm'), doc=''' - This is the server realm sent back for HTTP digest auth.''') - nonce = property(lambda x: x.get('nonce'), doc=''' - The nonce the server sent for digest auth, sent back by the client. - A nonce should be unique for every 401 response for HTTP digest - auth.''') - uri = property(lambda x: x.get('uri'), doc=''' - The URI from Request-URI of the Request-Line; duplicated because - proxies are allowed to change the Request-Line in transit. HTTP - digest auth only.''') - nc = property(lambda x: x.get('nc'), doc=''' - The nonce count value transmitted by clients if a qop-header is - also transmitted. HTTP digest auth only.''') - cnonce = property(lambda x: x.get('cnonce'), doc=''' - If the server sent a qop-header in the ``WWW-Authenticate`` - header, the client has to provide this value for HTTP digest auth. - See the RFC for more details.''') - response = property(lambda x: x.get('response'), doc=''' - A string of 32 hex digits computed as defined in RFC 2617, which - proves that the user knows a password. Digest auth only.''') - opaque = property(lambda x: x.get('opaque'), doc=''' - The opaque header from the server returned unchanged by the client. - It is recommended that this string be base64 or hexadecimal data. - Digest auth only.''') - qop = property(lambda x: x.get('qop'), doc=''' - Indicates what "quality of protection" the client has applied to - the message for HTTP digest auth. Note that this is a single token, - not a quoted list of alternatives as in WWW-Authenticate.''') - - -class WWWAuthenticate(UpdateDictMixin, dict): - - """Provides simple access to `WWW-Authenticate` headers.""" - - #: list of keys that require quoting in the generated header - _require_quoting = frozenset(['domain', 'nonce', 'opaque', 'realm', 'qop']) - - def __init__(self, auth_type=None, values=None, on_update=None): - dict.__init__(self, values or ()) - if auth_type: - self['__auth_type__'] = auth_type - self.on_update = on_update - - def set_basic(self, realm='authentication required'): - """Clear the auth info and enable basic auth.""" - dict.clear(self) - dict.update(self, {'__auth_type__': 'basic', 'realm': realm}) - if self.on_update: - self.on_update(self) - - def set_digest(self, realm, nonce, qop=('auth',), opaque=None, - algorithm=None, stale=False): - """Clear the auth info and enable digest auth.""" - d = { - '__auth_type__': 'digest', - 'realm': realm, - 'nonce': nonce, - 'qop': dump_header(qop) - } - if stale: - d['stale'] = 'TRUE' - if opaque is not None: - d['opaque'] = opaque - if algorithm is not None: - d['algorithm'] = algorithm - dict.clear(self) - dict.update(self, d) - if self.on_update: - self.on_update(self) - - def to_header(self): - """Convert the stored values into a WWW-Authenticate header.""" - d = dict(self) - auth_type = d.pop('__auth_type__', None) or 'basic' - return '%s %s' % (auth_type.title(), ', '.join([ - '%s=%s' % (key, quote_header_value(value, - allow_token=key not in self._require_quoting)) - for key, value in iteritems(d) - ])) - - def __str__(self): - return self.to_header() - - def __repr__(self): - return '<%s %r>' % ( - self.__class__.__name__, - self.to_header() - ) - - def auth_property(name, doc=None): - """A static helper function for subclasses to add extra authentication - system properties onto a class:: - - class FooAuthenticate(WWWAuthenticate): - special_realm = auth_property('special_realm') - - For more information have a look at the sourcecode to see how the - regular properties (:attr:`realm` etc.) are implemented. - """ - - def _set_value(self, value): - if value is None: - self.pop(name, None) - else: - self[name] = str(value) - return property(lambda x: x.get(name), _set_value, doc=doc) - - def _set_property(name, doc=None): - def fget(self): - def on_update(header_set): - if not header_set and name in self: - del self[name] - elif header_set: - self[name] = header_set.to_header() - return parse_set_header(self.get(name), on_update) - return property(fget, doc=doc) - - type = auth_property('__auth_type__', doc=''' - The type of the auth mechanism. HTTP currently specifies - `Basic` and `Digest`.''') - realm = auth_property('realm', doc=''' - A string to be displayed to users so they know which username and - password to use. This string should contain at least the name of - the host performing the authentication and might additionally - indicate the collection of users who might have access.''') - domain = _set_property('domain', doc=''' - A list of URIs that define the protection space. If a URI is an - absolute path, it is relative to the canonical root URL of the - server being accessed.''') - nonce = auth_property('nonce', doc=''' - A server-specified data string which should be uniquely generated - each time a 401 response is made. It is recommended that this - string be base64 or hexadecimal data.''') - opaque = auth_property('opaque', doc=''' - A string of data, specified by the server, which should be returned - by the client unchanged in the Authorization header of subsequent - requests with URIs in the same protection space. It is recommended - that this string be base64 or hexadecimal data.''') - algorithm = auth_property('algorithm', doc=''' - A string indicating a pair of algorithms used to produce the digest - and a checksum. If this is not present it is assumed to be "MD5". - If the algorithm is not understood, the challenge should be ignored - (and a different one used, if there is more than one).''') - qop = _set_property('qop', doc=''' - A set of quality-of-privacy directives such as auth and auth-int.''') - - def _get_stale(self): - val = self.get('stale') - if val is not None: - return val.lower() == 'true' - - def _set_stale(self, value): - if value is None: - self.pop('stale', None) - else: - self['stale'] = value and 'TRUE' or 'FALSE' - stale = property(_get_stale, _set_stale, doc=''' - A flag, indicating that the previous request from the client was - rejected because the nonce value was stale.''') - del _get_stale, _set_stale - - # make auth_property a staticmethod so that subclasses of - # `WWWAuthenticate` can use it for new properties. - auth_property = staticmethod(auth_property) - del _set_property - - -class FileStorage(object): - - """The :class:`FileStorage` class is a thin wrapper over incoming files. - It is used by the request object to represent uploaded files. All the - attributes of the wrapper stream are proxied by the file storage so - it's possible to do ``storage.read()`` instead of the long form - ``storage.stream.read()``. - """ - - def __init__(self, stream=None, filename=None, name=None, - content_type=None, content_length=None, - headers=None): - self.name = name - self.stream = stream or _empty_stream - - # if no filename is provided we can attempt to get the filename - # from the stream object passed. There we have to be careful to - # skip things like , etc. Python marks these - # special filenames with angular brackets. - if filename is None: - filename = getattr(stream, 'name', None) - s = make_literal_wrapper(filename) - if filename and filename[0] == s('<') and filename[-1] == s('>'): - filename = None - - # On Python 3 we want to make sure the filename is always unicode. - # This might not be if the name attribute is bytes due to the - # file being opened from the bytes API. - if not PY2 and isinstance(filename, bytes): - filename = filename.decode(get_filesystem_encoding(), - 'replace') - - self.filename = filename - if headers is None: - headers = Headers() - self.headers = headers - if content_type is not None: - headers['Content-Type'] = content_type - if content_length is not None: - headers['Content-Length'] = str(content_length) - - def _parse_content_type(self): - if not hasattr(self, '_parsed_content_type'): - self._parsed_content_type = \ - parse_options_header(self.content_type) - - @property - def content_type(self): - """The content-type sent in the header. Usually not available""" - return self.headers.get('content-type') - - @property - def content_length(self): - """The content-length sent in the header. Usually not available""" - return int(self.headers.get('content-length') or 0) - - @property - def mimetype(self): - """Like :attr:`content_type`, but without parameters (eg, without - charset, type etc.) and always lowercase. For example if the content - type is ``text/HTML; charset=utf-8`` the mimetype would be - ``'text/html'``. - - .. versionadded:: 0.7 - """ - self._parse_content_type() - return self._parsed_content_type[0].lower() - - @property - def mimetype_params(self): - """The mimetype parameters as dict. For example if the content - type is ``text/html; charset=utf-8`` the params would be - ``{'charset': 'utf-8'}``. - - .. versionadded:: 0.7 - """ - self._parse_content_type() - return self._parsed_content_type[1] - - def save(self, dst, buffer_size=16384): - """Save the file to a destination path or file object. If the - destination is a file object you have to close it yourself after the - call. The buffer size is the number of bytes held in memory during - the copy process. It defaults to 16KB. - - For secure file saving also have a look at :func:`secure_filename`. - - :param dst: a filename or open file object the uploaded file - is saved to. - :param buffer_size: the size of the buffer. This works the same as - the `length` parameter of - :func:`shutil.copyfileobj`. - """ - from shutil import copyfileobj - close_dst = False - if isinstance(dst, string_types): - dst = open(dst, 'wb') - close_dst = True - try: - copyfileobj(self.stream, dst, buffer_size) - finally: - if close_dst: - dst.close() - - def close(self): - """Close the underlying file if possible.""" - try: - self.stream.close() - except Exception: - pass - - def __nonzero__(self): - return bool(self.filename) - __bool__ = __nonzero__ - - def __getattr__(self, name): - return getattr(self.stream, name) - - def __iter__(self): - return iter(self.stream) - - def __repr__(self): - return '<%s: %r (%r)>' % ( - self.__class__.__name__, - self.filename, - self.content_type - ) - - -# circular dependencies -from werkzeug.http import dump_options_header, dump_header, generate_etag, \ - quote_header_value, parse_set_header, unquote_etag, quote_etag, \ - parse_options_header, http_date, is_byte_range_valid -from werkzeug import exceptions diff --git a/pype/vendor/werkzeug/debug/__init__.py b/pype/vendor/werkzeug/debug/__init__.py deleted file mode 100644 index 14fe193524..0000000000 --- a/pype/vendor/werkzeug/debug/__init__.py +++ /dev/null @@ -1,470 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.debug - ~~~~~~~~~~~~~~ - - WSGI application traceback debugger. - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD, see LICENSE for more details. -""" -import os -import re -import sys -import uuid -import json -import time -import getpass -import hashlib -import mimetypes -from itertools import chain -from os.path import join, dirname, basename, isfile -from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response -from werkzeug.http import parse_cookie -from werkzeug.debug.tbtools import get_current_traceback, render_console_html -from werkzeug.debug.console import Console -from werkzeug.security import gen_salt -from werkzeug._internal import _log -from werkzeug._compat import text_type - - -# DEPRECATED -#: import this here because it once was documented as being available -#: from this module. In case there are users left ... -from werkzeug.debug.repr import debug_repr # noqa - - -# A week -PIN_TIME = 60 * 60 * 24 * 7 - - -def hash_pin(pin): - if isinstance(pin, text_type): - pin = pin.encode('utf-8', 'replace') - return hashlib.md5(pin + b'shittysalt').hexdigest()[:12] - - -_machine_id = None - - -def get_machine_id(): - global _machine_id - rv = _machine_id - if rv is not None: - return rv - - def _generate(): - # Potential sources of secret information on linux. The machine-id - # is stable across boots, the boot id is not - for filename in '/etc/machine-id', '/proc/sys/kernel/random/boot_id': - try: - with open(filename, 'rb') as f: - return f.readline().strip() - except IOError: - continue - - # On OS X we can use the computer's serial number assuming that - # ioreg exists and can spit out that information. - try: - # Also catch import errors: subprocess may not be available, e.g. - # Google App Engine - # See https://github.com/pallets/werkzeug/issues/925 - from subprocess import Popen, PIPE - dump = Popen(['ioreg', '-c', 'IOPlatformExpertDevice', '-d', '2'], - stdout=PIPE).communicate()[0] - match = re.search(b'"serial-number" = <([^>]+)', dump) - if match is not None: - return match.group(1) - except (OSError, ImportError): - pass - - # On Windows we can use winreg to get the machine guid - wr = None - try: - import winreg as wr - except ImportError: - try: - import _winreg as wr - except ImportError: - pass - if wr is not None: - try: - with wr.OpenKey(wr.HKEY_LOCAL_MACHINE, - 'SOFTWARE\\Microsoft\\Cryptography', 0, - wr.KEY_READ | wr.KEY_WOW64_64KEY) as rk: - machineGuid, wrType = wr.QueryValueEx(rk, 'MachineGuid') - if (wrType == wr.REG_SZ): - return machineGuid.encode('utf-8') - else: - return machineGuid - except WindowsError: - pass - - _machine_id = rv = _generate() - return rv - - -class _ConsoleFrame(object): - - """Helper class so that we can reuse the frame console code for the - standalone console. - """ - - def __init__(self, namespace): - self.console = Console(namespace) - self.id = 0 - - -def get_pin_and_cookie_name(app): - """Given an application object this returns a semi-stable 9 digit pin - code and a random key. The hope is that this is stable between - restarts to not make debugging particularly frustrating. If the pin - was forcefully disabled this returns `None`. - - Second item in the resulting tuple is the cookie name for remembering. - """ - pin = os.environ.get('WERKZEUG_DEBUG_PIN') - rv = None - num = None - - # Pin was explicitly disabled - if pin == 'off': - return None, None - - # Pin was provided explicitly - if pin is not None and pin.replace('-', '').isdigit(): - # If there are separators in the pin, return it directly - if '-' in pin: - rv = pin - else: - num = pin - - modname = getattr(app, '__module__', - getattr(app.__class__, '__module__')) - - try: - # `getpass.getuser()` imports the `pwd` module, - # which does not exist in the Google App Engine sandbox. - username = getpass.getuser() - except ImportError: - username = None - - mod = sys.modules.get(modname) - - # This information only exists to make the cookie unique on the - # computer, not as a security feature. - probably_public_bits = [ - username, - modname, - getattr(app, '__name__', getattr(app.__class__, '__name__')), - getattr(mod, '__file__', None), - ] - - # This information is here to make it harder for an attacker to - # guess the cookie name. They are unlikely to be contained anywhere - # within the unauthenticated debug page. - private_bits = [ - str(uuid.getnode()), - get_machine_id(), - ] - - h = hashlib.md5() - for bit in chain(probably_public_bits, private_bits): - if not bit: - continue - if isinstance(bit, text_type): - bit = bit.encode('utf-8') - h.update(bit) - h.update(b'cookiesalt') - - cookie_name = '__wzd' + h.hexdigest()[:20] - - # If we need to generate a pin we salt it a bit more so that we don't - # end up with the same value and generate out 9 digits - if num is None: - h.update(b'pinsalt') - num = ('%09d' % int(h.hexdigest(), 16))[:9] - - # Format the pincode in groups of digits for easier remembering if - # we don't have a result yet. - if rv is None: - for group_size in 5, 4, 3: - if len(num) % group_size == 0: - rv = '-'.join(num[x:x + group_size].rjust(group_size, '0') - for x in range(0, len(num), group_size)) - break - else: - rv = num - - return rv, cookie_name - - -class DebuggedApplication(object): - """Enables debugging support for a given application:: - - from werkzeug.debug import DebuggedApplication - from myapp import app - app = DebuggedApplication(app, evalex=True) - - The `evalex` keyword argument allows evaluating expressions in a - traceback's frame context. - - .. versionadded:: 0.9 - The `lodgeit_url` parameter was deprecated. - - :param app: the WSGI application to run debugged. - :param evalex: enable exception evaluation feature (interactive - debugging). This requires a non-forking server. - :param request_key: The key that points to the request object in ths - environment. This parameter is ignored in current - versions. - :param console_path: the URL for a general purpose console. - :param console_init_func: the function that is executed before starting - the general purpose console. The return value - is used as initial namespace. - :param show_hidden_frames: by default hidden traceback frames are skipped. - You can show them by setting this parameter - to `True`. - :param pin_security: can be used to disable the pin based security system. - :param pin_logging: enables the logging of the pin system. - """ - - def __init__(self, app, evalex=False, request_key='werkzeug.request', - console_path='/console', console_init_func=None, - show_hidden_frames=False, lodgeit_url=None, - pin_security=True, pin_logging=True): - if lodgeit_url is not None: - from warnings import warn - warn(DeprecationWarning('Werkzeug now pastes into gists.')) - if not console_init_func: - console_init_func = None - self.app = app - self.evalex = evalex - self.frames = {} - self.tracebacks = {} - self.request_key = request_key - self.console_path = console_path - self.console_init_func = console_init_func - self.show_hidden_frames = show_hidden_frames - self.secret = gen_salt(20) - self._failed_pin_auth = 0 - - self.pin_logging = pin_logging - if pin_security: - # Print out the pin for the debugger on standard out. - if os.environ.get('WERKZEUG_RUN_MAIN') == 'true' and \ - pin_logging: - _log('warning', ' * Debugger is active!') - if self.pin is None: - _log('warning', ' * Debugger PIN disabled. ' - 'DEBUGGER UNSECURED!') - else: - _log('info', ' * Debugger PIN: %s' % self.pin) - else: - self.pin = None - - def _get_pin(self): - if not hasattr(self, '_pin'): - self._pin, self._pin_cookie = get_pin_and_cookie_name(self.app) - return self._pin - - def _set_pin(self, value): - self._pin = value - - pin = property(_get_pin, _set_pin) - del _get_pin, _set_pin - - @property - def pin_cookie_name(self): - """The name of the pin cookie.""" - if not hasattr(self, '_pin_cookie'): - self._pin, self._pin_cookie = get_pin_and_cookie_name(self.app) - return self._pin_cookie - - def debug_application(self, environ, start_response): - """Run the application and conserve the traceback frames.""" - app_iter = None - try: - app_iter = self.app(environ, start_response) - for item in app_iter: - yield item - if hasattr(app_iter, 'close'): - app_iter.close() - except Exception: - if hasattr(app_iter, 'close'): - app_iter.close() - traceback = get_current_traceback( - skip=1, show_hidden_frames=self.show_hidden_frames, - ignore_system_exceptions=True) - for frame in traceback.frames: - self.frames[frame.id] = frame - self.tracebacks[traceback.id] = traceback - - try: - start_response('500 INTERNAL SERVER ERROR', [ - ('Content-Type', 'text/html; charset=utf-8'), - # Disable Chrome's XSS protection, the debug - # output can cause false-positives. - ('X-XSS-Protection', '0'), - ]) - except Exception: - # if we end up here there has been output but an error - # occurred. in that situation we can do nothing fancy any - # more, better log something into the error log and fall - # back gracefully. - environ['wsgi.errors'].write( - 'Debugging middleware caught exception in streamed ' - 'response at a point where response headers were already ' - 'sent.\n') - else: - is_trusted = bool(self.check_pin_trust(environ)) - yield traceback.render_full(evalex=self.evalex, - evalex_trusted=is_trusted, - secret=self.secret) \ - .encode('utf-8', 'replace') - - traceback.log(environ['wsgi.errors']) - - def execute_command(self, request, command, frame): - """Execute a command in a console.""" - return Response(frame.console.eval(command), mimetype='text/html') - - def display_console(self, request): - """Display a standalone shell.""" - if 0 not in self.frames: - if self.console_init_func is None: - ns = {} - else: - ns = dict(self.console_init_func()) - ns.setdefault('app', self.app) - self.frames[0] = _ConsoleFrame(ns) - is_trusted = bool(self.check_pin_trust(request.environ)) - return Response(render_console_html(secret=self.secret, - evalex_trusted=is_trusted), - mimetype='text/html') - - def paste_traceback(self, request, traceback): - """Paste the traceback and return a JSON response.""" - rv = traceback.paste() - return Response(json.dumps(rv), mimetype='application/json') - - def get_resource(self, request, filename): - """Return a static resource from the shared folder.""" - filename = join(dirname(__file__), 'shared', basename(filename)) - if isfile(filename): - mimetype = mimetypes.guess_type(filename)[0] \ - or 'application/octet-stream' - f = open(filename, 'rb') - try: - return Response(f.read(), mimetype=mimetype) - finally: - f.close() - return Response('Not Found', status=404) - - def check_pin_trust(self, environ): - """Checks if the request passed the pin test. This returns `True` if the - request is trusted on a pin/cookie basis and returns `False` if not. - Additionally if the cookie's stored pin hash is wrong it will return - `None` so that appropriate action can be taken. - """ - if self.pin is None: - return True - val = parse_cookie(environ).get(self.pin_cookie_name) - if not val or '|' not in val: - return False - ts, pin_hash = val.split('|', 1) - if not ts.isdigit(): - return False - if pin_hash != hash_pin(self.pin): - return None - return (time.time() - PIN_TIME) < int(ts) - - def _fail_pin_auth(self): - time.sleep(self._failed_pin_auth > 5 and 5.0 or 0.5) - self._failed_pin_auth += 1 - - def pin_auth(self, request): - """Authenticates with the pin.""" - exhausted = False - auth = False - trust = self.check_pin_trust(request.environ) - - # If the trust return value is `None` it means that the cookie is - # set but the stored pin hash value is bad. This means that the - # pin was changed. In this case we count a bad auth and unset the - # cookie. This way it becomes harder to guess the cookie name - # instead of the pin as we still count up failures. - bad_cookie = False - if trust is None: - self._fail_pin_auth() - bad_cookie = True - - # If we're trusted, we're authenticated. - elif trust: - auth = True - - # If we failed too many times, then we're locked out. - elif self._failed_pin_auth > 10: - exhausted = True - - # Otherwise go through pin based authentication - else: - entered_pin = request.args.get('pin') - if entered_pin.strip().replace('-', '') == \ - self.pin.replace('-', ''): - self._failed_pin_auth = 0 - auth = True - else: - self._fail_pin_auth() - - rv = Response(json.dumps({ - 'auth': auth, - 'exhausted': exhausted, - }), mimetype='application/json') - if auth: - rv.set_cookie(self.pin_cookie_name, '%s|%s' % ( - int(time.time()), - hash_pin(self.pin) - ), httponly=True) - elif bad_cookie: - rv.delete_cookie(self.pin_cookie_name) - return rv - - def log_pin_request(self): - """Log the pin if needed.""" - if self.pin_logging and self.pin is not None: - _log('info', ' * To enable the debugger you need to ' - 'enter the security pin:') - _log('info', ' * Debugger pin code: %s' % self.pin) - return Response('') - - def __call__(self, environ, start_response): - """Dispatch the requests.""" - # important: don't ever access a function here that reads the incoming - # form data! Otherwise the application won't have access to that data - # any more! - request = Request(environ) - response = self.debug_application - if request.args.get('__debugger__') == 'yes': - cmd = request.args.get('cmd') - arg = request.args.get('f') - secret = request.args.get('s') - traceback = self.tracebacks.get(request.args.get('tb', type=int)) - frame = self.frames.get(request.args.get('frm', type=int)) - if cmd == 'resource' and arg: - response = self.get_resource(request, arg) - elif cmd == 'paste' and traceback is not None and \ - secret == self.secret: - response = self.paste_traceback(request, traceback) - elif cmd == 'pinauth' and secret == self.secret: - response = self.pin_auth(request) - elif cmd == 'printpin' and secret == self.secret: - response = self.log_pin_request() - elif self.evalex and cmd is not None and frame is not None \ - and self.secret == secret and \ - self.check_pin_trust(environ): - response = self.execute_command(request, cmd, frame) - elif self.evalex and self.console_path is not None and \ - request.path == self.console_path: - response = self.display_console(request) - return response(environ, start_response) diff --git a/pype/vendor/werkzeug/debug/console.py b/pype/vendor/werkzeug/debug/console.py deleted file mode 100644 index 30e89063ff..0000000000 --- a/pype/vendor/werkzeug/debug/console.py +++ /dev/null @@ -1,215 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.debug.console - ~~~~~~~~~~~~~~~~~~~~~~ - - Interactive console support. - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD. -""" -import sys -import code -from types import CodeType - -from werkzeug.utils import escape -from werkzeug.local import Local -from werkzeug.debug.repr import debug_repr, dump, helper - - -_local = Local() - - -class HTMLStringO(object): - - """A StringO version that HTML escapes on write.""" - - def __init__(self): - self._buffer = [] - - def isatty(self): - return False - - def close(self): - pass - - def flush(self): - pass - - def seek(self, n, mode=0): - pass - - def readline(self): - if len(self._buffer) == 0: - return '' - ret = self._buffer[0] - del self._buffer[0] - return ret - - def reset(self): - val = ''.join(self._buffer) - del self._buffer[:] - return val - - def _write(self, x): - if isinstance(x, bytes): - x = x.decode('utf-8', 'replace') - self._buffer.append(x) - - def write(self, x): - self._write(escape(x)) - - def writelines(self, x): - self._write(escape(''.join(x))) - - -class ThreadedStream(object): - - """Thread-local wrapper for sys.stdout for the interactive console.""" - - def push(): - if not isinstance(sys.stdout, ThreadedStream): - sys.stdout = ThreadedStream() - _local.stream = HTMLStringO() - push = staticmethod(push) - - def fetch(): - try: - stream = _local.stream - except AttributeError: - return '' - return stream.reset() - fetch = staticmethod(fetch) - - def displayhook(obj): - try: - stream = _local.stream - except AttributeError: - return _displayhook(obj) - # stream._write bypasses escaping as debug_repr is - # already generating HTML for us. - if obj is not None: - _local._current_ipy.locals['_'] = obj - stream._write(debug_repr(obj)) - displayhook = staticmethod(displayhook) - - def __setattr__(self, name, value): - raise AttributeError('read only attribute %s' % name) - - def __dir__(self): - return dir(sys.__stdout__) - - def __getattribute__(self, name): - if name == '__members__': - return dir(sys.__stdout__) - try: - stream = _local.stream - except AttributeError: - stream = sys.__stdout__ - return getattr(stream, name) - - def __repr__(self): - return repr(sys.__stdout__) - - -# add the threaded stream as display hook -_displayhook = sys.displayhook -sys.displayhook = ThreadedStream.displayhook - - -class _ConsoleLoader(object): - - def __init__(self): - self._storage = {} - - def register(self, code, source): - self._storage[id(code)] = source - # register code objects of wrapped functions too. - for var in code.co_consts: - if isinstance(var, CodeType): - self._storage[id(var)] = source - - def get_source_by_code(self, code): - try: - return self._storage[id(code)] - except KeyError: - pass - - -def _wrap_compiler(console): - compile = console.compile - - def func(source, filename, symbol): - code = compile(source, filename, symbol) - console.loader.register(code, source) - return code - console.compile = func - - -class _InteractiveConsole(code.InteractiveInterpreter): - - def __init__(self, globals, locals): - code.InteractiveInterpreter.__init__(self, locals) - self.globals = dict(globals) - self.globals['dump'] = dump - self.globals['help'] = helper - self.globals['__loader__'] = self.loader = _ConsoleLoader() - self.more = False - self.buffer = [] - _wrap_compiler(self) - - def runsource(self, source): - source = source.rstrip() + '\n' - ThreadedStream.push() - prompt = self.more and '... ' or '>>> ' - try: - source_to_eval = ''.join(self.buffer + [source]) - if code.InteractiveInterpreter.runsource(self, - source_to_eval, '', 'single'): - self.more = True - self.buffer.append(source) - else: - self.more = False - del self.buffer[:] - finally: - output = ThreadedStream.fetch() - return prompt + escape(source) + output - - def runcode(self, code): - try: - eval(code, self.globals, self.locals) - except Exception: - self.showtraceback() - - def showtraceback(self): - from werkzeug.debug.tbtools import get_current_traceback - tb = get_current_traceback(skip=1) - sys.stdout._write(tb.render_summary()) - - def showsyntaxerror(self, filename=None): - from werkzeug.debug.tbtools import get_current_traceback - tb = get_current_traceback(skip=4) - sys.stdout._write(tb.render_summary()) - - def write(self, data): - sys.stdout.write(data) - - -class Console(object): - - """An interactive console.""" - - def __init__(self, globals=None, locals=None): - if locals is None: - locals = {} - if globals is None: - globals = {} - self._ipy = _InteractiveConsole(globals, locals) - - def eval(self, code): - _local._current_ipy = self._ipy - old_sys_stdout = sys.stdout - try: - return self._ipy.runsource(code) - finally: - sys.stdout = old_sys_stdout diff --git a/pype/vendor/werkzeug/debug/repr.py b/pype/vendor/werkzeug/debug/repr.py deleted file mode 100644 index 0a024576e0..0000000000 --- a/pype/vendor/werkzeug/debug/repr.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.debug.repr - ~~~~~~~~~~~~~~~~~~~ - - This module implements object representations for debugging purposes. - Unlike the default repr these reprs expose a lot more information and - produce HTML instead of ASCII. - - Together with the CSS and JavaScript files of the debugger this gives - a colorful and more compact output. - - :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. - :license: BSD. -""" -import sys -import re -import codecs -from traceback import format_exception_only -try: - from collections import deque -except ImportError: # pragma: no cover - deque = None -from werkzeug.utils import escape -from werkzeug._compat import iteritems, PY2, text_type, integer_types, \ - string_types - - -missing = object() -_paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}') -RegexType = type(_paragraph_re) - - -HELP_HTML = '''\ -
-

%(title)s

-
%(text)s
-
\ -''' -OBJECT_DUMP_HTML = '''\ -
-

%(title)s

- %(repr)s - %(items)s
-
\ -''' - - -def debug_repr(obj): - """Creates a debug repr of an object as HTML unicode string.""" - return DebugReprGenerator().repr(obj) - - -def dump(obj=missing): - """Print the object details to stdout._write (for the interactive - console of the web debugger. - """ - gen = DebugReprGenerator() - if obj is missing: - rv = gen.dump_locals(sys._getframe(1).f_locals) - else: - rv = gen.dump_object(obj) - sys.stdout._write(rv) - - -class _Helper(object): - - """Displays an HTML version of the normal help, for the interactive - debugger only because it requires a patched sys.stdout. - """ - - def __repr__(self): - return 'Type help(object) for help about object.' - - def __call__(self, topic=None): - if topic is None: - sys.stdout._write('%s' % repr(self)) - return - import pydoc - pydoc.help(topic) - rv = sys.stdout.reset() - if isinstance(rv, bytes): - rv = rv.decode('utf-8', 'ignore') - paragraphs = _paragraph_re.split(rv) - if len(paragraphs) > 1: - title = paragraphs[0] - text = '\n\n'.join(paragraphs[1:]) - else: # pragma: no cover - title = 'Help' - text = paragraphs[0] - sys.stdout._write(HELP_HTML % {'title': title, 'text': text}) - - -helper = _Helper() - - -def _add_subclass_info(inner, obj, base): - if isinstance(base, tuple): - for base in base: - if type(obj) is base: - return inner - elif type(obj) is base: - return inner - module = '' - if obj.__class__.__module__ not in ('__builtin__', 'exceptions'): - module = '%s.' % obj.__class__.__module__ - return '%s%s(%s)' % (module, obj.__class__.__name__, inner) - - -class DebugReprGenerator(object): - - def __init__(self): - self._stack = [] - - def _sequence_repr_maker(left, right, base=object(), limit=8): - def proxy(self, obj, recursive): - if recursive: - return _add_subclass_info(left + '...' + right, obj, base) - buf = [left] - have_extended_section = False - for idx, item in enumerate(obj): - if idx: - buf.append(', ') - if idx == limit: - buf.append('') - have_extended_section = True - buf.append(self.repr(item)) - if have_extended_section: - buf.append('') - buf.append(right) - return _add_subclass_info(u''.join(buf), obj, base) - return proxy - - list_repr = _sequence_repr_maker('[', ']', list) - tuple_repr = _sequence_repr_maker('(', ')', tuple) - set_repr = _sequence_repr_maker('set([', '])', set) - frozenset_repr = _sequence_repr_maker('frozenset([', '])', frozenset) - if deque is not None: - deque_repr = _sequence_repr_maker('collections.' - 'deque([', '])', deque) - del _sequence_repr_maker - - def regex_repr(self, obj): - pattern = repr(obj.pattern) - if PY2: - pattern = pattern.decode('string-escape', 'ignore') - else: - pattern = codecs.decode(pattern, 'unicode-escape', 'ignore') - if pattern[:1] == 'u': - pattern = 'ur' + pattern[1:] - else: - pattern = 'r' + pattern - return u're.compile(%s)' % pattern - - def string_repr(self, obj, limit=70): - buf = [''] - a = repr(obj[:limit]) - b = repr(obj[limit:]) - if isinstance(obj, text_type) and PY2: - buf.append('u') - a = a[1:] - b = b[1:] - if b != "''": - buf.extend((escape(a[:-1]), '', escape(b[1:]), '')) - else: - buf.append(escape(a)) - buf.append('') - return _add_subclass_info(u''.join(buf), obj, (bytes, text_type)) - - def dict_repr(self, d, recursive, limit=5): - if recursive: - return _add_subclass_info(u'{...}', d, dict) - buf = ['{'] - have_extended_section = False - for idx, (key, value) in enumerate(iteritems(d)): - if idx: - buf.append(', ') - if idx == limit - 1: - buf.append('') - have_extended_section = True - buf.append('%s: ' - '%s' % - (self.repr(key), self.repr(value))) - if have_extended_section: - buf.append('') - buf.append('}') - return _add_subclass_info(u''.join(buf), d, dict) - - def object_repr(self, obj): - r = repr(obj) - if PY2: - r = r.decode('utf-8', 'replace') - return u'%s' % escape(r) - - def dispatch_repr(self, obj, recursive): - if obj is helper: - return u'%r' % helper - if isinstance(obj, (integer_types, float, complex)): - return u'%r' % obj - if isinstance(obj, string_types): - return self.string_repr(obj) - if isinstance(obj, RegexType): - return self.regex_repr(obj) - if isinstance(obj, list): - return self.list_repr(obj, recursive) - if isinstance(obj, tuple): - return self.tuple_repr(obj, recursive) - if isinstance(obj, set): - return self.set_repr(obj, recursive) - if isinstance(obj, frozenset): - return self.frozenset_repr(obj, recursive) - if isinstance(obj, dict): - return self.dict_repr(obj, recursive) - if deque is not None and isinstance(obj, deque): - return self.deque_repr(obj, recursive) - return self.object_repr(obj) - - def fallback_repr(self): - try: - info = ''.join(format_exception_only(*sys.exc_info()[:2])) - except Exception: # pragma: no cover - info = '?' - if PY2: - info = info.decode('utf-8', 'ignore') - return u'<broken repr (%s)>' \ - u'' % escape(info.strip()) - - def repr(self, obj): - recursive = False - for item in self._stack: - if item is obj: - recursive = True - break - self._stack.append(obj) - try: - try: - return self.dispatch_repr(obj, recursive) - except Exception: - return self.fallback_repr() - finally: - self._stack.pop() - - def dump_object(self, obj): - repr = items = None - if isinstance(obj, dict): - title = 'Contents of' - items = [] - for key, value in iteritems(obj): - if not isinstance(key, string_types): - items = None - break - items.append((key, self.repr(value))) - if items is None: - items = [] - repr = self.repr(obj) - for key in dir(obj): - try: - items.append((key, self.repr(getattr(obj, key)))) - except Exception: - pass - title = 'Details for' - title += ' ' + object.__repr__(obj)[1:-1] - return self.render_object_dump(items, title, repr) - - def dump_locals(self, d): - items = [(key, self.repr(value)) for key, value in d.items()] - return self.render_object_dump(items, 'Local variables in frame') - - def render_object_dump(self, items, title, repr=None): - html_items = [] - for key, value in items: - html_items.append('%s
%s
' % - (escape(key), value)) - if not html_items: - html_items.append('Nothing') - return OBJECT_DUMP_HTML % { - 'title': escape(title), - 'repr': repr and '
%s
' % repr or '', - 'items': '\n'.join(html_items) - } diff --git a/pype/vendor/werkzeug/debug/shared/FONT_LICENSE b/pype/vendor/werkzeug/debug/shared/FONT_LICENSE deleted file mode 100644 index ae78a8f94e..0000000000 --- a/pype/vendor/werkzeug/debug/shared/FONT_LICENSE +++ /dev/null @@ -1,96 +0,0 @@ -------------------------------- -UBUNTU FONT LICENCE Version 1.0 -------------------------------- - -PREAMBLE -This licence allows the licensed fonts to be used, studied, modified and -redistributed freely. The fonts, including any derivative works, can be -bundled, embedded, and redistributed provided the terms of this licence -are met. The fonts and derivatives, however, cannot be released under -any other licence. The requirement for fonts to remain under this -licence does not require any document created using the fonts or their -derivatives to be published under this licence, as long as the primary -purpose of the document is not to be a vehicle for the distribution of -the fonts. - -DEFINITIONS -"Font Software" refers to the set of files released by the Copyright -Holder(s) under this licence and clearly marked as such. This may -include source files, build scripts and documentation. - -"Original Version" refers to the collection of Font Software components -as received under this licence. - -"Modified Version" refers to any derivative made by adding to, deleting, -or substituting -- in part or in whole -- any of the components of the -Original Version, by changing formats or by porting the Font Software to -a new environment. - -"Copyright Holder(s)" refers to all individuals and companies who have a -copyright ownership of the Font Software. - -"Substantially Changed" refers to Modified Versions which can be easily -identified as dissimilar to the Font Software by users of the Font -Software comparing the Original Version with the Modified Version. - -To "Propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification and with or without charging -a redistribution fee), making available to the public, and in some -countries other activities as well. - -PERMISSION & CONDITIONS -This licence does not grant any rights under trademark law and all such -rights are reserved. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of the Font Software, to propagate the Font Software, subject to -the below conditions: - -1) Each copy of the Font Software must contain the above copyright -notice and this licence. These can be included either as stand-alone -text files, human-readable headers or in the appropriate machine- -readable metadata fields within text or binary files as long as those -fields can be easily viewed by the user. - -2) The font name complies with the following: -(a) The Original Version must retain its name, unmodified. -(b) Modified Versions which are Substantially Changed must be renamed to -avoid use of the name of the Original Version or similar names entirely. -(c) Modified Versions which are not Substantially Changed must be -renamed to both (i) retain the name of the Original Version and (ii) add -additional naming elements to distinguish the Modified Version from the -Original Version. The name of such Modified Versions must be the name of -the Original Version, with "derivative X" where X represents the name of -the new work, appended to that name. - -3) The name(s) of the Copyright Holder(s) and any contributor to the -Font Software shall not be used to promote, endorse or advertise any -Modified Version, except (i) as required by this licence, (ii) to -acknowledge the contribution(s) of the Copyright Holder(s) or (iii) with -their explicit written permission. - -4) The Font Software, modified or unmodified, in part or in whole, must -be distributed entirely under this licence, and must not be distributed -under any other licence. The requirement for fonts to remain under this -licence does not affect any document created using the Font Software, -except any version of the Font Software extracted from a document -created using the Font Software may only be distributed under this -licence. - -TERMINATION -This licence becomes null and void if any of the above conditions are -not met. - -DISCLAIMER -THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF -COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL -DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER -DEALINGS IN THE FONT SOFTWARE. diff --git a/pype/vendor/werkzeug/debug/shared/console.png b/pype/vendor/werkzeug/debug/shared/console.png deleted file mode 100644 index c28dd63812..0000000000 Binary files a/pype/vendor/werkzeug/debug/shared/console.png and /dev/null differ diff --git a/pype/vendor/werkzeug/debug/shared/debugger.js b/pype/vendor/werkzeug/debug/shared/debugger.js deleted file mode 100644 index 534019dc41..0000000000 --- a/pype/vendor/werkzeug/debug/shared/debugger.js +++ /dev/null @@ -1,205 +0,0 @@ -$(function() { - if (!EVALEX_TRUSTED) { - initPinBox(); - } - - /** - * if we are in console mode, show the console. - */ - if (CONSOLE_MODE && EVALEX) { - openShell(null, $('div.console div.inner').empty(), 0); - } - - $('div.traceback div.frame').each(function() { - var - target = $('pre', this), - consoleNode = null, - frameID = this.id.substring(6); - - target.click(function() { - $(this).parent().toggleClass('expanded'); - }); - - /** - * Add an interactive console to the frames - */ - if (EVALEX && target.is('.current')) { - $('') - .attr('title', 'Open an interactive python shell in this frame') - .click(function() { - consoleNode = openShell(consoleNode, target, frameID); - return false; - }) - .prependTo(target); - } - }); - - /** - * toggle traceback types on click. - */ - $('h2.traceback').click(function() { - $(this).next().slideToggle('fast'); - $('div.plain').slideToggle('fast'); - }).css('cursor', 'pointer'); - $('div.plain').hide(); - - /** - * Add extra info (this is here so that only users with JavaScript - * enabled see it.) - */ - $('span.nojavascript') - .removeClass('nojavascript') - .html('

To switch between the interactive traceback and the plaintext ' + - 'one, you can click on the "Traceback" headline. From the text ' + - 'traceback you can also create a paste of it. ' + (!EVALEX ? '' : - 'For code execution mouse-over the frame you want to debug and ' + - 'click on the console icon on the right side.' + - '

You can execute arbitrary Python code in the stack frames and ' + - 'there are some extra helpers available for introspection:' + - '

  • dump() shows all variables in the frame' + - '
  • dump(obj) dumps all that\'s known about the object
')); - - /** - * Add the pastebin feature - */ - $('div.plain form') - .submit(function() { - var label = $('input[type="submit"]', this); - var old_val = label.val(); - label.val('submitting...'); - $.ajax({ - dataType: 'json', - url: document.location.pathname, - data: {__debugger__: 'yes', tb: TRACEBACK, cmd: 'paste', - s: SECRET}, - success: function(data) { - $('div.plain span.pastemessage') - .removeClass('pastemessage') - .text('Paste created: ') - .append($('#' + data.id + '').attr('href', data.url)); - }, - error: function() { - alert('Error: Could not submit paste. No network connection?'); - label.val(old_val); - } - }); - return false; - }); - - // if we have javascript we submit by ajax anyways, so no need for the - // not scaling textarea. - var plainTraceback = $('div.plain textarea'); - plainTraceback.replaceWith($('
').text(plainTraceback.text()));
-});
-
-function initPinBox() {
-  $('.pin-prompt form').submit(function(evt) {
-    evt.preventDefault();
-    var pin = this.pin.value;
-    var btn = this.btn;
-    btn.disabled = true;
-    $.ajax({
-      dataType: 'json',
-      url: document.location.pathname,
-      data: {__debugger__: 'yes', cmd: 'pinauth', pin: pin,
-             s: SECRET},
-      success: function(data) {
-        btn.disabled = false;
-        if (data.auth) {
-          EVALEX_TRUSTED = true;
-          $('.pin-prompt').fadeOut();
-        } else {
-          if (data.exhausted) {
-            alert('Error: too many attempts.  Restart server to retry.');
-          } else {
-            alert('Error: incorrect pin');
-          }
-        }
-        console.log(data);
-      },
-      error: function() {
-        btn.disabled = false;
-        alert('Error: Could not verify PIN.  Network error?');
-      }
-    });
-  });
-}
-
-function promptForPin() {
-  if (!EVALEX_TRUSTED) {
-    $.ajax({
-      url: document.location.pathname,
-      data: {__debugger__: 'yes', cmd: 'printpin', s: SECRET}
-    });
-    $('.pin-prompt').fadeIn(function() {
-      $('.pin-prompt input[name="pin"]').focus();
-    });
-  }
-}
-
-
-/**
- * Helper function for shell initialization
- */
-function openShell(consoleNode, target, frameID) {
-  promptForPin();
-  if (consoleNode)
-    return consoleNode.slideToggle('fast');
-  consoleNode = $('
')
-    .appendTo(target.parent())
-    .hide()
-  var historyPos = 0, history = [''];
-  var output = $('
[console ready]
') - .appendTo(consoleNode); - var form = $('
>>>
') - .submit(function() { - var cmd = command.val(); - $.get('', { - __debugger__: 'yes', cmd: cmd, frm: frameID, s: SECRET}, function(data) { - var tmp = $('
').html(data); - $('span.extended', tmp).each(function() { - var hidden = $(this).wrap('').hide(); - hidden - .parent() - .append($('  ') - .click(function() { - hidden.toggle(); - $(this).toggleClass('open') - return false; - })); - }); - output.append(tmp); - command.focus(); - consoleNode.scrollTop(consoleNode.get(0).scrollHeight); - var old = history.pop(); - history.push(cmd); - if (typeof old != 'undefined') - history.push(old); - historyPos = history.length - 1; - }); - command.val(''); - return false; - }). - appendTo(consoleNode); - - var command = $('') - .appendTo(form) - .keydown(function(e) { - if (e.charCode == 100 && e.ctrlKey) { - output.text('--- screen cleared ---'); - return false; - } - else if (e.charCode == 0 && (e.keyCode == 38 || e.keyCode == 40)) { - if (e.keyCode == 38 && historyPos > 0) - historyPos--; - else if (e.keyCode == 40 && historyPos < history.length) - historyPos++; - command.val(history[historyPos]); - return false; - } - }); - - return consoleNode.slideDown('fast', function() { - command.focus(); - }); -} diff --git a/pype/vendor/werkzeug/debug/shared/jquery.js b/pype/vendor/werkzeug/debug/shared/jquery.js deleted file mode 100644 index 0f60b7bd0d..0000000000 --- a/pype/vendor/werkzeug/debug/shared/jquery.js +++ /dev/null @@ -1,5 +0,0 @@ -/*! jQuery v1.11.3 | (c) 2005, 2015 jQuery Foundation, Inc. | jquery.org/license */ -!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k={},l="1.11.3",m=function(a,b){return new m.fn.init(a,b)},n=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,o=/^-ms-/,p=/-([\da-z])/gi,q=function(a,b){return b.toUpperCase()};m.fn=m.prototype={jquery:l,constructor:m,selector:"",length:0,toArray:function(){return d.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:d.call(this)},pushStack:function(a){var b=m.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return m.each(this,a,b)},map:function(a){return this.pushStack(m.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:c.sort,splice:c.splice},m.extend=m.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||m.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(e=arguments[h]))for(d in e)a=g[d],c=e[d],g!==c&&(j&&c&&(m.isPlainObject(c)||(b=m.isArray(c)))?(b?(b=!1,f=a&&m.isArray(a)?a:[]):f=a&&m.isPlainObject(a)?a:{},g[d]=m.extend(j,f,c)):void 0!==c&&(g[d]=c));return g},m.extend({expando:"jQuery"+(l+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===m.type(a)},isArray:Array.isArray||function(a){return"array"===m.type(a)},isWindow:function(a){return null!=a&&a==a.window},isNumeric:function(a){return!m.isArray(a)&&a-parseFloat(a)+1>=0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},isPlainObject:function(a){var b;if(!a||"object"!==m.type(a)||a.nodeType||m.isWindow(a))return!1;try{if(a.constructor&&!j.call(a,"constructor")&&!j.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}if(k.ownLast)for(b in a)return j.call(a,b);for(b in a);return void 0===b||j.call(a,b)},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?h[i.call(a)]||"object":typeof a},globalEval:function(b){b&&m.trim(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(o,"ms-").replace(p,q)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,c){var d,e=0,f=a.length,g=r(a);if(c){if(g){for(;f>e;e++)if(d=b.apply(a[e],c),d===!1)break}else for(e in a)if(d=b.apply(a[e],c),d===!1)break}else if(g){for(;f>e;e++)if(d=b.call(a[e],e,a[e]),d===!1)break}else for(e in a)if(d=b.call(a[e],e,a[e]),d===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(n,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(r(Object(a))?m.merge(c,"string"==typeof a?[a]:a):f.call(c,a)),c},inArray:function(a,b,c){var d;if(b){if(g)return g.call(b,a,c);for(d=b.length,c=c?0>c?Math.max(0,d+c):c:0;d>c;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,b){var c=+b.length,d=0,e=a.length;while(c>d)a[e++]=b[d++];if(c!==c)while(void 0!==b[d])a[e++]=b[d++];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,f=0,g=a.length,h=r(a),i=[];if(h)for(;g>f;f++)d=b(a[f],f,c),null!=d&&i.push(d);else for(f in a)d=b(a[f],f,c),null!=d&&i.push(d);return e.apply([],i)},guid:1,proxy:function(a,b){var c,e,f;return"string"==typeof b&&(f=a[b],b=a,a=f),m.isFunction(a)?(c=d.call(arguments,2),e=function(){return a.apply(b||this,c.concat(d.call(arguments)))},e.guid=a.guid=a.guid||m.guid++,e):void 0},now:function(){return+new Date},support:k}),m.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){h["[object "+b+"]"]=b.toLowerCase()});function r(a){var b="length"in a&&a.length,c=m.type(a);return"function"===c||m.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var s=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+1*new Date,v=a.document,w=0,x=0,y=ha(),z=ha(),A=ha(),B=function(a,b){return a===b&&(l=!0),0},C=1<<31,D={}.hasOwnProperty,E=[],F=E.pop,G=E.push,H=E.push,I=E.slice,J=function(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return c;return-1},K="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",L="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",N=M.replace("w","w#"),O="\\["+L+"*("+M+")(?:"+L+"*([*^$|!~]?=)"+L+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+N+"))|)"+L+"*\\]",P=":("+M+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+O+")*)|.*)\\)|)",Q=new RegExp(L+"+","g"),R=new RegExp("^"+L+"+|((?:^|[^\\\\])(?:\\\\.)*)"+L+"+$","g"),S=new RegExp("^"+L+"*,"+L+"*"),T=new RegExp("^"+L+"*([>+~]|"+L+")"+L+"*"),U=new RegExp("="+L+"*([^\\]'\"]*?)"+L+"*\\]","g"),V=new RegExp(P),W=new RegExp("^"+N+"$"),X={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+O),PSEUDO:new RegExp("^"+P),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+L+"*(even|odd|(([+-]|)(\\d*)n|)"+L+"*(?:([+-]|)"+L+"*(\\d+)|))"+L+"*\\)|)","i"),bool:new RegExp("^(?:"+K+")$","i"),needsContext:new RegExp("^"+L+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+L+"*((?:-\\d)?\\d*)"+L+"*\\)|)(?=[^-]|$)","i")},Y=/^(?:input|select|textarea|button)$/i,Z=/^h\d$/i,$=/^[^{]+\{\s*\[native \w/,_=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,aa=/[+~]/,ba=/'|\\/g,ca=new RegExp("\\\\([\\da-f]{1,6}"+L+"?|("+L+")|.)","ig"),da=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)},ea=function(){m()};try{H.apply(E=I.call(v.childNodes),v.childNodes),E[v.childNodes.length].nodeType}catch(fa){H={apply:E.length?function(a,b){G.apply(a,I.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function ga(a,b,d,e){var f,h,j,k,l,o,r,s,w,x;if((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,d=d||[],k=b.nodeType,"string"!=typeof a||!a||1!==k&&9!==k&&11!==k)return d;if(!e&&p){if(11!==k&&(f=_.exec(a)))if(j=f[1]){if(9===k){if(h=b.getElementById(j),!h||!h.parentNode)return d;if(h.id===j)return d.push(h),d}else if(b.ownerDocument&&(h=b.ownerDocument.getElementById(j))&&t(b,h)&&h.id===j)return d.push(h),d}else{if(f[2])return H.apply(d,b.getElementsByTagName(a)),d;if((j=f[3])&&c.getElementsByClassName)return H.apply(d,b.getElementsByClassName(j)),d}if(c.qsa&&(!q||!q.test(a))){if(s=r=u,w=b,x=1!==k&&a,1===k&&"object"!==b.nodeName.toLowerCase()){o=g(a),(r=b.getAttribute("id"))?s=r.replace(ba,"\\$&"):b.setAttribute("id",s),s="[id='"+s+"'] ",l=o.length;while(l--)o[l]=s+ra(o[l]);w=aa.test(a)&&pa(b.parentNode)||b,x=o.join(",")}if(x)try{return H.apply(d,w.querySelectorAll(x)),d}catch(y){}finally{r||b.removeAttribute("id")}}}return i(a.replace(R,"$1"),b,d,e)}function ha(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function ia(a){return a[u]=!0,a}function ja(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function ka(a,b){var c=a.split("|"),e=a.length;while(e--)d.attrHandle[c[e]]=b}function la(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||C)-(~a.sourceIndex||C);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function ma(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function na(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function oa(a){return ia(function(b){return b=+b,ia(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function pa(a){return a&&"undefined"!=typeof a.getElementsByTagName&&a}c=ga.support={},f=ga.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=ga.setDocument=function(a){var b,e,g=a?a.ownerDocument||a:v;return g!==n&&9===g.nodeType&&g.documentElement?(n=g,o=g.documentElement,e=g.defaultView,e&&e!==e.top&&(e.addEventListener?e.addEventListener("unload",ea,!1):e.attachEvent&&e.attachEvent("onunload",ea)),p=!f(g),c.attributes=ja(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ja(function(a){return a.appendChild(g.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=$.test(g.getElementsByClassName),c.getById=ja(function(a){return o.appendChild(a).id=u,!g.getElementsByName||!g.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if("undefined"!=typeof b.getElementById&&p){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},d.filter.ID=function(a){var b=a.replace(ca,da);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(ca,da);return function(a){var c="undefined"!=typeof a.getAttributeNode&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return"undefined"!=typeof b.getElementsByTagName?b.getElementsByTagName(a):c.qsa?b.querySelectorAll(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=$.test(g.querySelectorAll))&&(ja(function(a){o.appendChild(a).innerHTML="",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+L+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+L+"*(?:value|"+K+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),ja(function(a){var b=g.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+L+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=$.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ja(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",P)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=$.test(o.compareDocumentPosition),t=b||$.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===g||a.ownerDocument===v&&t(v,a)?-1:b===g||b.ownerDocument===v&&t(v,b)?1:k?J(k,a)-J(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,h=[a],i=[b];if(!e||!f)return a===g?-1:b===g?1:e?-1:f?1:k?J(k,a)-J(k,b):0;if(e===f)return la(a,b);c=a;while(c=c.parentNode)h.unshift(c);c=b;while(c=c.parentNode)i.unshift(c);while(h[d]===i[d])d++;return d?la(h[d],i[d]):h[d]===v?-1:i[d]===v?1:0},g):n},ga.matches=function(a,b){return ga(a,null,null,b)},ga.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(U,"='$1']"),!(!c.matchesSelector||!p||r&&r.test(b)||q&&q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return ga(b,n,null,[a]).length>0},ga.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},ga.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&D.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},ga.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},ga.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=ga.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=ga.selectors={cacheLength:50,createPseudo:ia,match:X,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(ca,da),a[3]=(a[3]||a[4]||a[5]||"").replace(ca,da),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||ga.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&ga.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return X.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&V.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(ca,da).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+L+")"+a+"("+L+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=ga.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(Q," ")+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h;if(q){if(f){while(p){l=b;while(l=l[p])if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){k=q[u]||(q[u]={}),j=k[a]||[],n=j[0]===w&&j[1],m=j[0]===w&&j[2],l=n&&q.childNodes[n];while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if(1===l.nodeType&&++m&&l===b){k[a]=[w,n,m];break}}else if(s&&(j=(b[u]||(b[u]={}))[a])&&j[0]===w)m=j[1];else while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if((h?l.nodeName.toLowerCase()===r:1===l.nodeType)&&++m&&(s&&((l[u]||(l[u]={}))[a]=[w,m]),l===b))break;return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||ga.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ia(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=J(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ia(function(a){var b=[],c=[],d=h(a.replace(R,"$1"));return d[u]?ia(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ia(function(a){return function(b){return ga(a,b).length>0}}),contains:ia(function(a){return a=a.replace(ca,da),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ia(function(a){return W.test(a||"")||ga.error("unsupported lang: "+a),a=a.replace(ca,da).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Z.test(a.nodeName)},input:function(a){return Y.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:oa(function(){return[0]}),last:oa(function(a,b){return[b-1]}),eq:oa(function(a,b,c){return[0>c?c+b:c]}),even:oa(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:oa(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:oa(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:oa(function(a,b,c){for(var d=0>c?c+b:c;++db;b++)d+=a[b].value;return d}function sa(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(i=b[u]||(b[u]={}),(h=i[d])&&h[0]===w&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function ta(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function ua(a,b,c){for(var d=0,e=b.length;e>d;d++)ga(a,b[d],c);return c}function va(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function wa(a,b,c,d,e,f){return d&&!d[u]&&(d=wa(d)),e&&!e[u]&&(e=wa(e,f)),ia(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||ua(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:va(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=va(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?J(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=va(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):H.apply(g,r)})}function xa(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=sa(function(a){return a===b},h,!0),l=sa(function(a){return J(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];f>i;i++)if(c=d.relative[a[i].type])m=[sa(ta(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return wa(i>1&&ta(m),i>1&&ra(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(R,"$1"),c,e>i&&xa(a.slice(i,e)),f>e&&xa(a=a.slice(e)),f>e&&ra(a))}m.push(c)}return ta(m)}function ya(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,m,o,p=0,q="0",r=f&&[],s=[],t=j,u=f||e&&d.find.TAG("*",k),v=w+=null==t?1:Math.random()||.1,x=u.length;for(k&&(j=g!==n&&g);q!==x&&null!=(l=u[q]);q++){if(e&&l){m=0;while(o=a[m++])if(o(l,g,h)){i.push(l);break}k&&(w=v)}c&&((l=!o&&l)&&p--,f&&r.push(l))}if(p+=q,c&&q!==p){m=0;while(o=b[m++])o(r,s,g,h);if(f){if(p>0)while(q--)r[q]||s[q]||(s[q]=F.call(i));s=va(s)}H.apply(i,s),k&&!f&&s.length>0&&p+b.length>1&&ga.uniqueSort(i)}return k&&(w=v,j=t),r};return c?ia(f):f}return h=ga.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=xa(b[c]),f[u]?d.push(f):e.push(f);f=A(a,ya(e,d)),f.selector=a}return f},i=ga.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(ca,da),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=X.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(ca,da),aa.test(j[0].type)&&pa(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&ra(j),!a)return H.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,aa.test(a)&&pa(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ja(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ja(function(a){return a.innerHTML="","#"===a.firstChild.getAttribute("href")})||ka("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ja(function(a){return a.innerHTML="",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||ka("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ja(function(a){return null==a.getAttribute("disabled")})||ka(K,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),ga}(a);m.find=s,m.expr=s.selectors,m.expr[":"]=m.expr.pseudos,m.unique=s.uniqueSort,m.text=s.getText,m.isXMLDoc=s.isXML,m.contains=s.contains;var t=m.expr.match.needsContext,u=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,v=/^.[^:#\[\.,]*$/;function w(a,b,c){if(m.isFunction(b))return m.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return m.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(v.test(b))return m.filter(b,a,c);b=m.filter(b,a)}return m.grep(a,function(a){return m.inArray(a,b)>=0!==c})}m.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?m.find.matchesSelector(d,a)?[d]:[]:m.find.matches(a,m.grep(b,function(a){return 1===a.nodeType}))},m.fn.extend({find:function(a){var b,c=[],d=this,e=d.length;if("string"!=typeof a)return this.pushStack(m(a).filter(function(){for(b=0;e>b;b++)if(m.contains(d[b],this))return!0}));for(b=0;e>b;b++)m.find(a,d[b],c);return c=this.pushStack(e>1?m.unique(c):c),c.selector=this.selector?this.selector+" "+a:a,c},filter:function(a){return this.pushStack(w(this,a||[],!1))},not:function(a){return this.pushStack(w(this,a||[],!0))},is:function(a){return!!w(this,"string"==typeof a&&t.test(a)?m(a):a||[],!1).length}});var x,y=a.document,z=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,A=m.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a.charAt(0)&&">"===a.charAt(a.length-1)&&a.length>=3?[null,a,null]:z.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||x).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof m?b[0]:b,m.merge(this,m.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:y,!0)),u.test(c[1])&&m.isPlainObject(b))for(c in b)m.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}if(d=y.getElementById(c[2]),d&&d.parentNode){if(d.id!==c[2])return x.find(a);this.length=1,this[0]=d}return this.context=y,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):m.isFunction(a)?"undefined"!=typeof x.ready?x.ready(a):a(m):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),m.makeArray(a,this))};A.prototype=m.fn,x=m(y);var B=/^(?:parents|prev(?:Until|All))/,C={children:!0,contents:!0,next:!0,prev:!0};m.extend({dir:function(a,b,c){var d=[],e=a[b];while(e&&9!==e.nodeType&&(void 0===c||1!==e.nodeType||!m(e).is(c)))1===e.nodeType&&d.push(e),e=e[b];return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),m.fn.extend({has:function(a){var b,c=m(a,this),d=c.length;return this.filter(function(){for(b=0;d>b;b++)if(m.contains(this,c[b]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=t.test(a)||"string"!=typeof a?m(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&m.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?m.unique(f):f)},index:function(a){return a?"string"==typeof a?m.inArray(this[0],m(a)):m.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(m.unique(m.merge(this.get(),m(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function D(a,b){do a=a[b];while(a&&1!==a.nodeType);return a}m.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return m.dir(a,"parentNode")},parentsUntil:function(a,b,c){return m.dir(a,"parentNode",c)},next:function(a){return D(a,"nextSibling")},prev:function(a){return D(a,"previousSibling")},nextAll:function(a){return m.dir(a,"nextSibling")},prevAll:function(a){return m.dir(a,"previousSibling")},nextUntil:function(a,b,c){return m.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return m.dir(a,"previousSibling",c)},siblings:function(a){return m.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return m.sibling(a.firstChild)},contents:function(a){return m.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:m.merge([],a.childNodes)}},function(a,b){m.fn[a]=function(c,d){var e=m.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=m.filter(d,e)),this.length>1&&(C[a]||(e=m.unique(e)),B.test(a)&&(e=e.reverse())),this.pushStack(e)}});var E=/\S+/g,F={};function G(a){var b=F[a]={};return m.each(a.match(E)||[],function(a,c){b[c]=!0}),b}m.Callbacks=function(a){a="string"==typeof a?F[a]||G(a):m.extend({},a);var b,c,d,e,f,g,h=[],i=!a.once&&[],j=function(l){for(c=a.memory&&l,d=!0,f=g||0,g=0,e=h.length,b=!0;h&&e>f;f++)if(h[f].apply(l[0],l[1])===!1&&a.stopOnFalse){c=!1;break}b=!1,h&&(i?i.length&&j(i.shift()):c?h=[]:k.disable())},k={add:function(){if(h){var d=h.length;!function f(b){m.each(b,function(b,c){var d=m.type(c);"function"===d?a.unique&&k.has(c)||h.push(c):c&&c.length&&"string"!==d&&f(c)})}(arguments),b?e=h.length:c&&(g=d,j(c))}return this},remove:function(){return h&&m.each(arguments,function(a,c){var d;while((d=m.inArray(c,h,d))>-1)h.splice(d,1),b&&(e>=d&&e--,f>=d&&f--)}),this},has:function(a){return a?m.inArray(a,h)>-1:!(!h||!h.length)},empty:function(){return h=[],e=0,this},disable:function(){return h=i=c=void 0,this},disabled:function(){return!h},lock:function(){return i=void 0,c||k.disable(),this},locked:function(){return!i},fireWith:function(a,c){return!h||d&&!i||(c=c||[],c=[a,c.slice?c.slice():c],b?i.push(c):j(c)),this},fire:function(){return k.fireWith(this,arguments),this},fired:function(){return!!d}};return k},m.extend({Deferred:function(a){var b=[["resolve","done",m.Callbacks("once memory"),"resolved"],["reject","fail",m.Callbacks("once memory"),"rejected"],["notify","progress",m.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return m.Deferred(function(c){m.each(b,function(b,f){var g=m.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&m.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?m.extend(a,d):d}},e={};return d.pipe=d.then,m.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=d.call(arguments),e=c.length,f=1!==e||a&&m.isFunction(a.promise)?e:0,g=1===f?a:m.Deferred(),h=function(a,b,c){return function(e){b[a]=this,c[a]=arguments.length>1?d.call(arguments):e,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(e>1)for(i=new Array(e),j=new Array(e),k=new Array(e);e>b;b++)c[b]&&m.isFunction(c[b].promise)?c[b].promise().done(h(b,k,c)).fail(g.reject).progress(h(b,j,i)):--f;return f||g.resolveWith(k,c),g.promise()}});var H;m.fn.ready=function(a){return m.ready.promise().done(a),this},m.extend({isReady:!1,readyWait:1,holdReady:function(a){a?m.readyWait++:m.ready(!0)},ready:function(a){if(a===!0?!--m.readyWait:!m.isReady){if(!y.body)return setTimeout(m.ready);m.isReady=!0,a!==!0&&--m.readyWait>0||(H.resolveWith(y,[m]),m.fn.triggerHandler&&(m(y).triggerHandler("ready"),m(y).off("ready")))}}});function I(){y.addEventListener?(y.removeEventListener("DOMContentLoaded",J,!1),a.removeEventListener("load",J,!1)):(y.detachEvent("onreadystatechange",J),a.detachEvent("onload",J))}function J(){(y.addEventListener||"load"===event.type||"complete"===y.readyState)&&(I(),m.ready())}m.ready.promise=function(b){if(!H)if(H=m.Deferred(),"complete"===y.readyState)setTimeout(m.ready);else if(y.addEventListener)y.addEventListener("DOMContentLoaded",J,!1),a.addEventListener("load",J,!1);else{y.attachEvent("onreadystatechange",J),a.attachEvent("onload",J);var c=!1;try{c=null==a.frameElement&&y.documentElement}catch(d){}c&&c.doScroll&&!function e(){if(!m.isReady){try{c.doScroll("left")}catch(a){return setTimeout(e,50)}I(),m.ready()}}()}return H.promise(b)};var K="undefined",L;for(L in m(k))break;k.ownLast="0"!==L,k.inlineBlockNeedsLayout=!1,m(function(){var a,b,c,d;c=y.getElementsByTagName("body")[0],c&&c.style&&(b=y.createElement("div"),d=y.createElement("div"),d.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(d).appendChild(b),typeof b.style.zoom!==K&&(b.style.cssText="display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1",k.inlineBlockNeedsLayout=a=3===b.offsetWidth,a&&(c.style.zoom=1)),c.removeChild(d))}),function(){var a=y.createElement("div");if(null==k.deleteExpando){k.deleteExpando=!0;try{delete a.test}catch(b){k.deleteExpando=!1}}a=null}(),m.acceptData=function(a){var b=m.noData[(a.nodeName+" ").toLowerCase()],c=+a.nodeType||1;return 1!==c&&9!==c?!1:!b||b!==!0&&a.getAttribute("classid")===b};var M=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,N=/([A-Z])/g;function O(a,b,c){if(void 0===c&&1===a.nodeType){var d="data-"+b.replace(N,"-$1").toLowerCase();if(c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:M.test(c)?m.parseJSON(c):c}catch(e){}m.data(a,b,c)}else c=void 0}return c}function P(a){var b;for(b in a)if(("data"!==b||!m.isEmptyObject(a[b]))&&"toJSON"!==b)return!1; - -return!0}function Q(a,b,d,e){if(m.acceptData(a)){var f,g,h=m.expando,i=a.nodeType,j=i?m.cache:a,k=i?a[h]:a[h]&&h;if(k&&j[k]&&(e||j[k].data)||void 0!==d||"string"!=typeof b)return k||(k=i?a[h]=c.pop()||m.guid++:h),j[k]||(j[k]=i?{}:{toJSON:m.noop}),("object"==typeof b||"function"==typeof b)&&(e?j[k]=m.extend(j[k],b):j[k].data=m.extend(j[k].data,b)),g=j[k],e||(g.data||(g.data={}),g=g.data),void 0!==d&&(g[m.camelCase(b)]=d),"string"==typeof b?(f=g[b],null==f&&(f=g[m.camelCase(b)])):f=g,f}}function R(a,b,c){if(m.acceptData(a)){var d,e,f=a.nodeType,g=f?m.cache:a,h=f?a[m.expando]:m.expando;if(g[h]){if(b&&(d=c?g[h]:g[h].data)){m.isArray(b)?b=b.concat(m.map(b,m.camelCase)):b in d?b=[b]:(b=m.camelCase(b),b=b in d?[b]:b.split(" ")),e=b.length;while(e--)delete d[b[e]];if(c?!P(d):!m.isEmptyObject(d))return}(c||(delete g[h].data,P(g[h])))&&(f?m.cleanData([a],!0):k.deleteExpando||g!=g.window?delete g[h]:g[h]=null)}}}m.extend({cache:{},noData:{"applet ":!0,"embed ":!0,"object ":"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"},hasData:function(a){return a=a.nodeType?m.cache[a[m.expando]]:a[m.expando],!!a&&!P(a)},data:function(a,b,c){return Q(a,b,c)},removeData:function(a,b){return R(a,b)},_data:function(a,b,c){return Q(a,b,c,!0)},_removeData:function(a,b){return R(a,b,!0)}}),m.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=m.data(f),1===f.nodeType&&!m._data(f,"parsedAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=m.camelCase(d.slice(5)),O(f,d,e[d])));m._data(f,"parsedAttrs",!0)}return e}return"object"==typeof a?this.each(function(){m.data(this,a)}):arguments.length>1?this.each(function(){m.data(this,a,b)}):f?O(f,a,m.data(f,a)):void 0},removeData:function(a){return this.each(function(){m.removeData(this,a)})}}),m.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=m._data(a,b),c&&(!d||m.isArray(c)?d=m._data(a,b,m.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=m.queue(a,b),d=c.length,e=c.shift(),f=m._queueHooks(a,b),g=function(){m.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return m._data(a,c)||m._data(a,c,{empty:m.Callbacks("once memory").add(function(){m._removeData(a,b+"queue"),m._removeData(a,c)})})}}),m.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.lengthh;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},W=/^(?:checkbox|radio)$/i;!function(){var a=y.createElement("input"),b=y.createElement("div"),c=y.createDocumentFragment();if(b.innerHTML="
a",k.leadingWhitespace=3===b.firstChild.nodeType,k.tbody=!b.getElementsByTagName("tbody").length,k.htmlSerialize=!!b.getElementsByTagName("link").length,k.html5Clone="<:nav>"!==y.createElement("nav").cloneNode(!0).outerHTML,a.type="checkbox",a.checked=!0,c.appendChild(a),k.appendChecked=a.checked,b.innerHTML="",k.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue,c.appendChild(b),b.innerHTML="",k.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,k.noCloneEvent=!0,b.attachEvent&&(b.attachEvent("onclick",function(){k.noCloneEvent=!1}),b.cloneNode(!0).click()),null==k.deleteExpando){k.deleteExpando=!0;try{delete b.test}catch(d){k.deleteExpando=!1}}}(),function(){var b,c,d=y.createElement("div");for(b in{submit:!0,change:!0,focusin:!0})c="on"+b,(k[b+"Bubbles"]=c in a)||(d.setAttribute(c,"t"),k[b+"Bubbles"]=d.attributes[c].expando===!1);d=null}();var X=/^(?:input|select|textarea)$/i,Y=/^key/,Z=/^(?:mouse|pointer|contextmenu)|click/,$=/^(?:focusinfocus|focusoutblur)$/,_=/^([^.]*)(?:\.(.+)|)$/;function aa(){return!0}function ba(){return!1}function ca(){try{return y.activeElement}catch(a){}}m.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m._data(a);if(r){c.handler&&(i=c,c=i.handler,e=i.selector),c.guid||(c.guid=m.guid++),(g=r.events)||(g=r.events={}),(k=r.handle)||(k=r.handle=function(a){return typeof m===K||a&&m.event.triggered===a.type?void 0:m.event.dispatch.apply(k.elem,arguments)},k.elem=a),b=(b||"").match(E)||[""],h=b.length;while(h--)f=_.exec(b[h])||[],o=q=f[1],p=(f[2]||"").split(".").sort(),o&&(j=m.event.special[o]||{},o=(e?j.delegateType:j.bindType)||o,j=m.event.special[o]||{},l=m.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&m.expr.match.needsContext.test(e),namespace:p.join(".")},i),(n=g[o])||(n=g[o]=[],n.delegateCount=0,j.setup&&j.setup.call(a,d,p,k)!==!1||(a.addEventListener?a.addEventListener(o,k,!1):a.attachEvent&&a.attachEvent("on"+o,k))),j.add&&(j.add.call(a,l),l.handler.guid||(l.handler.guid=c.guid)),e?n.splice(n.delegateCount++,0,l):n.push(l),m.event.global[o]=!0);a=null}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m.hasData(a)&&m._data(a);if(r&&(k=r.events)){b=(b||"").match(E)||[""],j=b.length;while(j--)if(h=_.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=m.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,n=k[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),i=f=n.length;while(f--)g=n[f],!e&&q!==g.origType||c&&c.guid!==g.guid||h&&!h.test(g.namespace)||d&&d!==g.selector&&("**"!==d||!g.selector)||(n.splice(f,1),g.selector&&n.delegateCount--,l.remove&&l.remove.call(a,g));i&&!n.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||m.removeEvent(a,o,r.handle),delete k[o])}else for(o in k)m.event.remove(a,o+b[j],c,d,!0);m.isEmptyObject(k)&&(delete r.handle,m._removeData(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,k,l,n,o=[d||y],p=j.call(b,"type")?b.type:b,q=j.call(b,"namespace")?b.namespace.split("."):[];if(h=l=d=d||y,3!==d.nodeType&&8!==d.nodeType&&!$.test(p+m.event.triggered)&&(p.indexOf(".")>=0&&(q=p.split("."),p=q.shift(),q.sort()),g=p.indexOf(":")<0&&"on"+p,b=b[m.expando]?b:new m.Event(p,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=q.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+q.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:m.makeArray(c,[b]),k=m.event.special[p]||{},e||!k.trigger||k.trigger.apply(d,c)!==!1)){if(!e&&!k.noBubble&&!m.isWindow(d)){for(i=k.delegateType||p,$.test(i+p)||(h=h.parentNode);h;h=h.parentNode)o.push(h),l=h;l===(d.ownerDocument||y)&&o.push(l.defaultView||l.parentWindow||a)}n=0;while((h=o[n++])&&!b.isPropagationStopped())b.type=n>1?i:k.bindType||p,f=(m._data(h,"events")||{})[b.type]&&m._data(h,"handle"),f&&f.apply(h,c),f=g&&h[g],f&&f.apply&&m.acceptData(h)&&(b.result=f.apply(h,c),b.result===!1&&b.preventDefault());if(b.type=p,!e&&!b.isDefaultPrevented()&&(!k._default||k._default.apply(o.pop(),c)===!1)&&m.acceptData(d)&&g&&d[p]&&!m.isWindow(d)){l=d[g],l&&(d[g]=null),m.event.triggered=p;try{d[p]()}catch(r){}m.event.triggered=void 0,l&&(d[g]=l)}return b.result}},dispatch:function(a){a=m.event.fix(a);var b,c,e,f,g,h=[],i=d.call(arguments),j=(m._data(this,"events")||{})[a.type]||[],k=m.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=m.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,g=0;while((e=f.handlers[g++])&&!a.isImmediatePropagationStopped())(!a.namespace_re||a.namespace_re.test(e.namespace))&&(a.handleObj=e,a.data=e.data,c=((m.event.special[e.origType]||{}).handle||e.handler).apply(f.elem,i),void 0!==c&&(a.result=c)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!=this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(e=[],f=0;h>f;f++)d=b[f],c=d.selector+" ",void 0===e[c]&&(e[c]=d.needsContext?m(c,this).index(i)>=0:m.find(c,this,null,[i]).length),e[c]&&e.push(d);e.length&&g.push({elem:i,handlers:e})}return h]","i"),ha=/^\s+/,ia=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,ja=/<([\w:]+)/,ka=/\s*$/g,ra={option:[1,""],legend:[1,"
","
"],area:[1,"",""],param:[1,"",""],thead:[1,"","
"],tr:[2,"","
"],col:[2,"","
"],td:[3,"","
"],_default:k.htmlSerialize?[0,"",""]:[1,"X
","
"]},sa=da(y),ta=sa.appendChild(y.createElement("div"));ra.optgroup=ra.option,ra.tbody=ra.tfoot=ra.colgroup=ra.caption=ra.thead,ra.th=ra.td;function ua(a,b){var c,d,e=0,f=typeof a.getElementsByTagName!==K?a.getElementsByTagName(b||"*"):typeof a.querySelectorAll!==K?a.querySelectorAll(b||"*"):void 0;if(!f)for(f=[],c=a.childNodes||a;null!=(d=c[e]);e++)!b||m.nodeName(d,b)?f.push(d):m.merge(f,ua(d,b));return void 0===b||b&&m.nodeName(a,b)?m.merge([a],f):f}function va(a){W.test(a.type)&&(a.defaultChecked=a.checked)}function wa(a,b){return m.nodeName(a,"table")&&m.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function xa(a){return a.type=(null!==m.find.attr(a,"type"))+"/"+a.type,a}function ya(a){var b=pa.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function za(a,b){for(var c,d=0;null!=(c=a[d]);d++)m._data(c,"globalEval",!b||m._data(b[d],"globalEval"))}function Aa(a,b){if(1===b.nodeType&&m.hasData(a)){var c,d,e,f=m._data(a),g=m._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;e>d;d++)m.event.add(b,c,h[c][d])}g.data&&(g.data=m.extend({},g.data))}}function Ba(a,b){var c,d,e;if(1===b.nodeType){if(c=b.nodeName.toLowerCase(),!k.noCloneEvent&&b[m.expando]){e=m._data(b);for(d in e.events)m.removeEvent(b,d,e.handle);b.removeAttribute(m.expando)}"script"===c&&b.text!==a.text?(xa(b).text=a.text,ya(b)):"object"===c?(b.parentNode&&(b.outerHTML=a.outerHTML),k.html5Clone&&a.innerHTML&&!m.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):"input"===c&&W.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):"option"===c?b.defaultSelected=b.selected=a.defaultSelected:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}}m.extend({clone:function(a,b,c){var d,e,f,g,h,i=m.contains(a.ownerDocument,a);if(k.html5Clone||m.isXMLDoc(a)||!ga.test("<"+a.nodeName+">")?f=a.cloneNode(!0):(ta.innerHTML=a.outerHTML,ta.removeChild(f=ta.firstChild)),!(k.noCloneEvent&&k.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||m.isXMLDoc(a)))for(d=ua(f),h=ua(a),g=0;null!=(e=h[g]);++g)d[g]&&Ba(e,d[g]);if(b)if(c)for(h=h||ua(a),d=d||ua(f),g=0;null!=(e=h[g]);g++)Aa(e,d[g]);else Aa(a,f);return d=ua(f,"script"),d.length>0&&za(d,!i&&ua(a,"script")),d=h=e=null,f},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,l,n=a.length,o=da(b),p=[],q=0;n>q;q++)if(f=a[q],f||0===f)if("object"===m.type(f))m.merge(p,f.nodeType?[f]:f);else if(la.test(f)){h=h||o.appendChild(b.createElement("div")),i=(ja.exec(f)||["",""])[1].toLowerCase(),l=ra[i]||ra._default,h.innerHTML=l[1]+f.replace(ia,"<$1>")+l[2],e=l[0];while(e--)h=h.lastChild;if(!k.leadingWhitespace&&ha.test(f)&&p.push(b.createTextNode(ha.exec(f)[0])),!k.tbody){f="table"!==i||ka.test(f)?""!==l[1]||ka.test(f)?0:h:h.firstChild,e=f&&f.childNodes.length;while(e--)m.nodeName(j=f.childNodes[e],"tbody")&&!j.childNodes.length&&f.removeChild(j)}m.merge(p,h.childNodes),h.textContent="";while(h.firstChild)h.removeChild(h.firstChild);h=o.lastChild}else p.push(b.createTextNode(f));h&&o.removeChild(h),k.appendChecked||m.grep(ua(p,"input"),va),q=0;while(f=p[q++])if((!d||-1===m.inArray(f,d))&&(g=m.contains(f.ownerDocument,f),h=ua(o.appendChild(f),"script"),g&&za(h),c)){e=0;while(f=h[e++])oa.test(f.type||"")&&c.push(f)}return h=null,o},cleanData:function(a,b){for(var d,e,f,g,h=0,i=m.expando,j=m.cache,l=k.deleteExpando,n=m.event.special;null!=(d=a[h]);h++)if((b||m.acceptData(d))&&(f=d[i],g=f&&j[f])){if(g.events)for(e in g.events)n[e]?m.event.remove(d,e):m.removeEvent(d,e,g.handle);j[f]&&(delete j[f],l?delete d[i]:typeof d.removeAttribute!==K?d.removeAttribute(i):d[i]=null,c.push(f))}}}),m.fn.extend({text:function(a){return V(this,function(a){return void 0===a?m.text(this):this.empty().append((this[0]&&this[0].ownerDocument||y).createTextNode(a))},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wa(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wa(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?m.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||m.cleanData(ua(c)),c.parentNode&&(b&&m.contains(c.ownerDocument,c)&&za(ua(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++){1===a.nodeType&&m.cleanData(ua(a,!1));while(a.firstChild)a.removeChild(a.firstChild);a.options&&m.nodeName(a,"select")&&(a.options.length=0)}return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return m.clone(this,a,b)})},html:function(a){return V(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a)return 1===b.nodeType?b.innerHTML.replace(fa,""):void 0;if(!("string"!=typeof a||ma.test(a)||!k.htmlSerialize&&ga.test(a)||!k.leadingWhitespace&&ha.test(a)||ra[(ja.exec(a)||["",""])[1].toLowerCase()])){a=a.replace(ia,"<$1>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(m.cleanData(ua(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,m.cleanData(ua(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=e.apply([],a);var c,d,f,g,h,i,j=0,l=this.length,n=this,o=l-1,p=a[0],q=m.isFunction(p);if(q||l>1&&"string"==typeof p&&!k.checkClone&&na.test(p))return this.each(function(c){var d=n.eq(c);q&&(a[0]=p.call(this,c,d.html())),d.domManip(a,b)});if(l&&(i=m.buildFragment(a,this[0].ownerDocument,!1,this),c=i.firstChild,1===i.childNodes.length&&(i=c),c)){for(g=m.map(ua(i,"script"),xa),f=g.length;l>j;j++)d=i,j!==o&&(d=m.clone(d,!0,!0),f&&m.merge(g,ua(d,"script"))),b.call(this[j],d,j);if(f)for(h=g[g.length-1].ownerDocument,m.map(g,ya),j=0;f>j;j++)d=g[j],oa.test(d.type||"")&&!m._data(d,"globalEval")&&m.contains(h,d)&&(d.src?m._evalUrl&&m._evalUrl(d.src):m.globalEval((d.text||d.textContent||d.innerHTML||"").replace(qa,"")));i=c=null}return this}}),m.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){m.fn[a]=function(a){for(var c,d=0,e=[],g=m(a),h=g.length-1;h>=d;d++)c=d===h?this:this.clone(!0),m(g[d])[b](c),f.apply(e,c.get());return this.pushStack(e)}});var Ca,Da={};function Ea(b,c){var d,e=m(c.createElement(b)).appendTo(c.body),f=a.getDefaultComputedStyle&&(d=a.getDefaultComputedStyle(e[0]))?d.display:m.css(e[0],"display");return e.detach(),f}function Fa(a){var b=y,c=Da[a];return c||(c=Ea(a,b),"none"!==c&&c||(Ca=(Ca||m("