From f310747098f7365772dc02daeb0bcbca2abcad91 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 16 Jan 2019 09:47:47 +0100 Subject: [PATCH] dependencies update --- pype/aport/__init__.py | 16 +- pype/aport/api.py | 144 +++ pype/aport/html/pico.js | 1150 ----------------- pype/aport/js_client.html | 19 - pype/aport/original/api.py | 246 ++++ pype/aport/original/index.html | 196 +++ pype/aport/static/index.html | 121 ++ pype/aport/static/script.js | 214 +++ .../aport/publish/collect_instances.py | 4 +- pype/vendor/pico/client.py | 2 +- pype/vendor/urllib3/__init__.py | 9 +- pype/vendor/urllib3/_collections.py | 5 +- pype/vendor/urllib3/connection.py | 40 +- pype/vendor/urllib3/connectionpool.py | 18 +- .../urllib3/contrib/_appengine_environ.py | 30 + pype/vendor/urllib3/contrib/appengine.py | 36 +- pype/vendor/urllib3/contrib/ntlmpool.py | 3 +- pype/vendor/urllib3/contrib/pyopenssl.py | 25 +- .../urllib3/packages/backports/makefile.py | 2 +- pype/vendor/urllib3/packages/ordered_dict.py | 259 ---- .../ssl_match_hostname/_implementation.py | 3 +- pype/vendor/urllib3/poolmanager.py | 1 + pype/vendor/urllib3/request.py | 2 +- pype/vendor/urllib3/response.py | 47 +- pype/vendor/urllib3/util/connection.py | 8 + pype/vendor/urllib3/util/response.py | 10 +- pype/vendor/urllib3/util/retry.py | 2 +- pype/vendor/urllib3/util/ssl_.py | 35 +- pype/vendor/urllib3/util/wait.py | 5 +- 29 files changed, 1087 insertions(+), 1565 deletions(-) create mode 100644 pype/aport/api.py delete mode 100644 pype/aport/html/pico.js delete mode 100644 pype/aport/js_client.html create mode 100644 pype/aport/original/api.py create mode 100644 pype/aport/original/index.html create mode 100644 pype/aport/static/index.html create mode 100644 pype/aport/static/script.js create mode 100644 pype/vendor/urllib3/contrib/_appengine_environ.py delete mode 100644 pype/vendor/urllib3/packages/ordered_dict.py diff --git a/pype/aport/__init__.py b/pype/aport/__init__.py index e0ed6d55c2..4efcb731c7 100644 --- a/pype/aport/__init__.py +++ b/pype/aport/__init__.py @@ -68,12 +68,24 @@ def uninstall(): def pico_server_launch(): + # path = "C:/Users/hubert/CODE/github/pico/examples/everything" + path = os.path.join( + os.path.dirname(__file__), + # "package" + ) + + os.chdir(path) + print(os.getcwd()) + print(os.listdir(path)) try: - args = [sys.executable, "-m", "pico.server", "pipeline"] + args = [sys.executable, "-m", "pico.server", + # "pipeline", + "api" + ] app.forward( args, - cwd=os.path.dirname(__file__) + cwd=path ) except Exception as e: log.error(e) diff --git a/pype/aport/api.py b/pype/aport/api.py new file mode 100644 index 0000000000..4d202b6e7a --- /dev/null +++ b/pype/aport/api.py @@ -0,0 +1,144 @@ +# api.py +import os +import sys +import tempfile + +import pico +from pico import PicoApp +from pico.decorators import request_args, set_cookie, delete_cookie, stream +from pico.decorators import header, cookie + +from werkzeug.exceptions import Unauthorized, ImATeapot, BadRequest + +from avalon import api as avalon +from avalon import io + +import pyblish.api as pyblish + +from app.api import forward +from pype import api as pype + + +log = pype.Logger.getLogger(__name__, "aport") + + +SESSION = avalon.session +if not SESSION: + io.install() + + +@pico.expose() +def publish(json_data_path, gui): + """ + Runs standalone pyblish and adds link to + data in external json file + + It is necessary to run `register_plugin_path` if particular + host is needed + + Args: + json_data_path (string): path to temp json file with + context data + staging_dir (strign, optional): path to temp directory + + Returns: + dict: return_json_path + + Raises: + Exception: description + + """ + cwd = os.getenv('AVALON_WORKDIR').replace("\\", "/") + + staging_dir = tempfile.mkdtemp(prefix="pype_aport_").replace("\\", "/") + log.info("staging_dir: {}".format(staging_dir)) + return_json_path = os.path.join(staging_dir, "return_data.json").replace("\\", "/") + + log.info("avalon.session is: \n{}".format(SESSION)) + + pype_start = os.path.join(os.getenv('PYPE_SETUP_ROOT'), + "app", "pype-start.py") + + publish = "--publish-gui" if gui else "--publish" + + args = [pype_start, publish, + "-pp", os.environ["PUBLISH_PATH"], + "-d", "rqst_json_data_path", json_data_path, + "-d", "post_json_data_path", return_json_path + ] + + log.debug(args) + + # start standalone pyblish qml + forward([ + sys.executable, "-u" + ] + args, + cwd=cwd + ) + + return {"return_json_path": return_json_path} + + +@pico.expose() +def context(project, asset, task, app): + # http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp + + os.environ["AVALON_PROJECT"] = project + + avalon.update_current_task(task, asset, app) + + project_code = pype.get_project_code() + pype.set_project_code(project_code) + hierarchy = pype.get_hierarchy() + pype.set_hierarchy(hierarchy) + fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items() + if isinstance(v, str)} + SESSION.update(fix_paths) + SESSION.update({"AVALON_HIERARCHY": hierarchy, + "AVALON_PROJECTCODE": project_code, + "current_dir": os.getcwd().replace("\\", "/") + }) + + return SESSION + + +@pico.expose() +def deregister_plugin_path(): + if os.getenv("PUBLISH_PATH", None): + aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split( + os.pathsep) if "aport" in p][0] + os.environ["PUBLISH_PATH"] = aport_plugin_path + else: + log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred") + + return "Publish path deregistered" + + +@pico.expose() +def register_plugin_path(publish_path): + deregister_plugin_path() + if os.getenv("PUBLISH_PATH", None): + os.environ["PUBLISH_PATH"] = os.pathsep.join( + os.environ["PUBLISH_PATH"].split(os.pathsep) + + [publish_path.replace("\\", "/")] + ) + else: + os.environ["PUBLISH_PATH"] = publish_path + + log.info(os.environ["PUBLISH_PATH"].split(os.pathsep)) + + return "Publish registered paths: {}".format( + os.environ["PUBLISH_PATH"].split(os.pathsep) + ) + + +app = PicoApp() +app.register_module(__name__) + +# remove all Handlers created by pico +for name, handler in [(handler.get_name(), handler) + for handler in pype.Logger.logging.root.handlers[:]]: + if "pype" not in str(name).lower(): + print(name) + print(handler) + pype.Logger.logging.root.removeHandler(handler) diff --git a/pype/aport/html/pico.js b/pype/aport/html/pico.js deleted file mode 100644 index d00b77f2e8..0000000000 --- a/pype/aport/html/pico.js +++ /dev/null @@ -1,1150 +0,0 @@ -(function(module,exports,require){var - uuid=Date.now(), - modules={}, - updates={}, - paths={}, - env={}, - preprocessors={}, - EXT_JS='.js',EXT_JSON='.json', - MOD_PREFIX='"use strict";\n', - MOD_POSTFIX='//# sourceURL=', - PLACE_HOLDER='return arguments.callee.__proto__.apply(this,arguments)', // prevent closure - getEnv = function(k){ - return env[k] - }, - dummyCB=function(){}, - dummyLoader=function(){ - arguments[arguments.length-1]() - }, - // run the module and register the module output - define=function(url, func, mute){ - if (modules[url] && !isPlaceHolder(modules[url])) return modules[url] - var - ext=getExt(url)||EXT_JS, - pp=preprocessors[ext] - - if (pp) func=pp(url, func) - - switch(ext){ - case EXT_JS: - var - module={exports:{}}, - evt={}, - base, - getBase=function(k){ - base=getMod(k); return base - }, - m=func.call(mute?{}:evt,module.exports,getMod,module,define,getBase,pico)||module.exports - - if (base) m=inherit(m,base) - if ('function' === typeof m) m.extend=extend - if (evt.load) evt.load(m) - if (evt.update) updates[url]=[evt.update,m] - - if (!url) return m - - return modules[url]=wrap(modules[url],m) - case EXT_JSON: - try{ - return modules[url]=JSON.parse(func) - } catch(e){ - return console.error(url, e.message) - } - default: return modules[url]=func - } - }, - dummyPico={run:dummyCB,inherit:dummyCB,reload:dummyCB,parse:dummyCB,define:define,import:dummyCB,export:dummyCB,env:getEnv,ajax:dummyCB},//TODO: proxy - // call when pico.run done - ran,importRule, - schedule= (function(){ - return ('undefined'===typeof requestAnimationFrame) ? function(cb){ - return setTimeout(cb, 100) - }: requestAnimationFrame - })(), - funcBody=function(func){ - return func.substring(func.indexOf('{')+1,func.lastIndexOf('}')) - }, - getExt=function(url){ - if (!url)return null - var idx=url.lastIndexOf('.') - return -1!==idx && -1===url.indexOf('/',idx) ? url.substr(idx) : null - }, - // link to all deps - linker=function(deps, cb){ - if (!deps.length) return cb() - loader(deps.shift(),function(err){ - if (err) return cb(err) - linker(deps, cb) - }) - }, - // load files, and execute them based on ext - loader=function(url,cb){ - modules[url] = modules[url] || pico.import(url) // load node module? - if (modules[url]) return cb(null, modules[url]) - - var - idx=url.indexOf('/'), - path=~idx?paths[url.slice(0,idx)]:0, - fname= path ? url.slice(idx+1) : url - - path=path || paths['~'] || '' - - if (path instanceof Function){ - path(fname, function(err, txt){ - if (err) return cb(err) - js(url,txt,cb) - }) - }else{ - pico.ajax('get',path+fname+(getExt(url)?'':EXT_JS),null,null,function(err,state,txt){ - if (4!==state) return - if (err) return cb(err) - js(url,txt,cb) - }) - } - }, - placeHolder=function(url){ - return Object.defineProperties(Function(PLACE_HOLDER), { - name:{ value: url }, - i:{ value: uuid } - }) - }, - isPlaceHolder=function(obj){ - return 'function' === typeof obj && uuid===obj.i - }, - wrap=function(mod, obj){ - if (!mod || mod===obj) return obj - if (isPlaceHolder(mod)) mod.prototype=obj.prototype - mod.__proto__=obj - return mod - }, - unwrap=function(obj){ - return isPlaceHolder(obj) ? obj.__proto__ : obj - }, - extend=function(classMod,staticMod) { - if (!classMod) return this - return inherit(classMod, this, staticMod) - }, - inherit=function(mod1,mod2,mod3){ - var - child=unwrap(mod1), - ancestor=unwrap(mod2), - cType=typeof child, - aType=typeof ancestor, - fn, props - - switch(cType){ - case 'function': - fn=child - props=child.prototype - break - case 'object': - if (cType===aType){ - child.__proto__=ancestor // dun use wrap, inherit not wrap - return child - } - fn= function(){ - return ancestor.apply(this,arguments) - } - props=child - break - default: return child - } - Object.assign(fn,ancestor,unwrap(mod3)) - switch(aType){ - case 'function': - fn.prototype=Object.assign(Object.create(ancestor.prototype),props,{constructor: ancestor}) - return fn - case 'object': - fn.prototype=Object.assign(Object.create(ancestor),props) - return fn - default: return child - } - }, - getMod=function(url,cb){ - var mod=modules[url] - if(void 0===mod){ - if (cb) return loader(url,cb) - return modules[url]=placeHolder(url) - } - cb && setTimeout(cb, 0, null, mod) // make sure consistent async behaviour - return mod - }, - // do not run the module but getting the deps and inherit - compile=function(url,txt,deps,me){ - me=me||dummyPico - var - script=url ? MOD_PREFIX+txt+(env.live ? '' : MOD_POSTFIX+url) : txt, - frequire=function(k){ - if(!modules[k])deps.push(k);return modules[k] - } - - try{ - var func=Function('exports','require','module','define','inherit','pico',script) - } catch(e){ - return console.error(url, e.message) - } - - func.call({}, {},frequire,{},define,frequire,me) - return func - }, - // js file executer - js=function(url,txt,cb){ - cb=cb||dummyCB - if(modules[url]) return cb(null, modules[url]) - if(EXT_JS !== (getExt(url)||EXT_JS)) return cb(null, define(url,txt)) - - var - deps=[], - func=compile(url,txt,deps) - - if(url)modules[url]=placeHolder(url) - - linker(deps, function(err){ - if (err) return cb(err) - - cb(null,define(url,func)) - }) - }, - tick=function(timestamp){ - var f - for (var k in updates) { - (f = updates[k]) && f[0](f[1], timestamp) - } - schedule(tick) - } - -var pico=module[exports]={ - run:function(options,func){ - pico.ajax=options.ajax||pico.ajax - paths=options.paths||paths - env=options.env||env - preprocessors=options.preprocessors||preprocessors - importRule=options.importRule - - var pp - for(var url in modules){ - (pp=preprocessors[getExt(url)||EXT_JS]) && (modules[url]=pp(url, modules[url])) - } - - (options.onLoad||dummyLoader)(function(){ - js(options.name||null,funcBody(func.toString()),function(err,main){ - if (err) return console.error(err) - - main && main() - ran && ran() - - schedule(tick) - }) - }) - }, - reload:function(url, script, cb){ - if ('function'===typeof script) cb=script - cb=cb||dummyCB - var reattach=function(err, m){ - if (err) return cb(err) - cb(null, modules[url]=wrap(modules[url], m)) - } - delete modules[url] - if (cb===script) loader(url, reattach) - else js(url, script, reattach) - }, - parse:js, - define:define, - import:function(url){ - if (Array.isArray(importRule) && importRule.some(function(rx){ - return rx.match(url) - })) - return require(url) - }, - export:getMod, - env:getEnv -} -define('pico/func',function(exports,require,module,define,inherit,pico){ - function callerFormat(_, stack){ - var r = stack[0] - var trace = [] - - for (var i = 0, s; (s = stack[i]); i++){ - trace.push(s.toString()) - } - - return { - typeName: r.getTypeName(), - functionName: r.getFunctionName(), - methodName: r.getMethodName(), - fileName: r.getFileName(), - line: r.getLineNumber(), - column: r.getColumnNumber(), - evalOrigin: r.getEvalOrigin(), - isTopLevel: r.isToplevel(), - isEval: r.isEval(), - isNative: r.isNative(), - isConstructor: r.isConstructor(), - trace: trace - } - } - - return { - reflect: function callee(func, limit){ - var orgPrepare = Error.prepareStackTrace - var orgCount = Error.stackTraceLimit - - Error.prepareStackTrace = callerFormat - Error.stackTraceLimit = limit || 1 - - var err = new Error - Error.captureStackTrace(err, func || callee) - var s = err.stack - - Error.stackTraceLimit = orgCount - Error.prepareStackTrace = orgPrepare - - return s - } - } -}) -define('pico/json',function(exports,require,module,define,inherit,pico){ - return { - parse:function(pjson,deep){ - return JSON.parse(pjson[0], function(k, v){ - switch(k[0]){ - case '$': if(deep)return JSON.parse(pjson[v]) - case '_': return pjson[v] - default: return v - } - }) - }, - stringify:function(json, deep){ - var pjson=[] - pjson.unshift(JSON.stringify(json, function(k, v){ - switch(k[0]){ - case '$': if(deep)return pjson.push(JSON.stringify(v)) - case '_': return pjson.push(v) - default: return v - } - })) - return pjson - }, - path: function(json){ - var current = json - - function unwrap(arr, i) { - return i < 0 ? (arr.length || 0) + i : i - } - - function search(key, obj) { - if (!key || !obj || 'object' !== typeof obj) return - if (obj[key]) return obj[key] - - var ret = [] - var found - var ks = Object.keys(obj) - for(var i=0,k; (k=ks[i]); i++){ - found = search(key, obj[k]) - found && (Array.isArray(found) ? ret.push.apply(ret,found) : ret.push(found)) - } - return ret.length ? ret : void 0 - } - - function jwalk(){ - if (!arguments.length) return current - var isArr = Array.isArray(current) - - switch(typeof arguments[0]){ - case 'string': - var str = arguments[0] - - switch(str){ - default: - if (isArr){ - if (!current[0][str]) break - current = current.map( function(o) { - return o[str] - } ) - }else{ - if (!current[str]) break - current = current[str] - } - break - case '..': - current = search(arguments[1], current) || current - break - case '*': - if (isArr) break - current = Object.keys(current).map( function(k) { - return current[k] - } ) - break - } - break - case 'object': - var arr = arguments[0] - if (!Array.isArray(arr)) break - current = arr.map( function(i) { - return current[unwrap(current, i)] - } ) - break - case 'number': - var start = unwrap(current, arguments[0]) - var end = unwrap(current, arguments[1]) || current.length-1 || 0 - var interval = arguments[2] || 1 - var next = [] - var a = [] - for (var i=start; i <= end; i+=interval){ - next.push(current[i]) - a.push(i) - } - current = next - break - case 'function': - var cb = arguments[0] - current = isArr ? current.map( cb ) : cb(current) - break - } - Array.isArray(current) && (current = current.filter( function(o) { - return null != o - } )) - if (1 === current.length) current = current.pop() - return jwalk - } - return jwalk - } - } -}) -define('pico/obj',function(){ - var allows = ['object','function'] - var specialFunc = ['constructor'] - return { - extend: function extend(to, from, options){ - var tf=allows.indexOf(typeof to) - var ft=allows.indexOf(typeof from) - if (1 === tf) tf = allows.indexOf(typeof to.__proto__) - if (1 === ft) ft = allows.indexOf(typeof from.__proto__) - if (!to || null === from || (-1 === ft && ft === tf)) return void 0 === from ? to : from - if (1===ft) { - if(ft === tf)from.prototype=to - return from - } - options=options||{} - var tidy = options.tidy, key, value - if (Array.isArray(from)){ - if (options.mergeArr){ - to = to || [] - // TODO: change unique to Set when is more commonly support on mobile - var i, l, unique={} - for (i=0,l=to.length; i 1){ - switch(token.charAt(0)){ - case '%': - case ':': - case '#': - token = params[token.slice(1)] - if (!token) return mandatory ? '' : url - break - } - } - url += prefix + token - return buildRest(url, tokens, index, params, prefix, mandatory) - } - - return { - codec: function(num, str){ - for(var i=0,ret='',c; (c=str.charCodeAt(i)); i++){ - ret += String.fromCharCode(c ^ num) - } - return ret - }, - hash: function(str){ - for (var i=0,h=0,c; (c=str.charCodeAt(i)); i++) { - // same as h = ((h<<5)-h)+c; h = h | 0 or h = h & h <= Convert to 32bit integer - h = (h<<3)-h+c | 0 - } - return h - }, - rand: function(){ - return Random().toString(36).substr(2) - }, - pad:function(val,n,str){ - return this.tab(val,n,str)+val - }, - tab:function(val,n,str){ - var c=n-String(val).length+1 - return Array(c>0?c:0).join(str||'0') - }, - // src:https://raw.githubusercontent.com/krasimir/absurd/master/lib/processors/html/helpers/TemplateEngine.js - template:function(html){ - var re = /<%(.+?)%>/g, - reExp = /(^( )?(var|if|for|else|switch|case|break|{|}|;))(.*)?/g, - code = 'var r=[];\n', - cursor = 0, - match - var add = function(line, js) { - js ? (code += line.match(reExp) ? line + '\n' : 'r.push(' + line + ');\n') : - (code += line !== '' ? 'r.push("' + line.replace(/"/g, '\\"') + '");\n' : '') - return add - } - while((match = re.exec(html))) { - add(html.slice(cursor, match.index))(match[1], true) - cursor = match.index + match[0].length - } - add(html.substr(cursor, html.length - cursor)) - return partial(new Function('pico', 'd', (code + 'return r.join("");').replace(/[\r\t\n]/g, ' '))) - }, - // precedence | / # : % - compileRest:function(rest, output){ - output=output||[] - if (-1 === rest.search('[|#:%]')) return output - compileRestOptional(rest.split('|'),[rest],function(err,codes){ - if (err) throw err - output.push(codes) - }) - return output - }, - execRest:function(api,build,params){ - var units=api.split('/') - for(var i=0,route,j,opt; (route=build[i]); i++){ - if (matchRestCode(units, route[1], params)){ - for(j=2; (opt=route[j]); j++){ - if (!matchRestCode(units, opt, params)) break - } - return route[0] - } - } - return null - }, - buildRest:function(api,build,params,relativePath){ - var codes - for (var i=0, b; (b = build[i]); i++){ - if (api === b[0]){ - codes = b - break - } - } - if (!codes) return api - var url = buildRest('', codes[1], 0, params, '/', true) - if (!url) return false - var c - for (i=2; (c = codes[i]); i++){ - url = buildRest(url, c, 0, params, '/') - } - // remove the first slash - if (relativePath || 1 === url.indexOf('http')) url = url.slice(1) - return ~url.search('[#%]') ? false : url - } - } -}) -define('pico/time',function(){ - var - Max=Math.max, - Min=Math.min, - Floor=Math.floor, - Ceil=Math.ceil, - SEC = 1000, - MIN = 60*SEC, - SAFE_MIN = 90*SEC, - HR = 60*MIN, - DAY= 24*HR, - daynum=function(end,start){ - return (end-start) / DAY - }, - weeknum=function(date, us, yearoff){ - var - offset=us?1:0, - jan1= new Date(date.getFullYear()+(yearoff||0), 0, 1), - day1=((7-jan1.getDay())%7 + offset), - days=daynum(date, jan1) - - if (days > day1) return Ceil((days - day1)/7) - return weeknum(date, us, -1) - }, - parseQuark=function(quark, min, max){ - var - q=quark.split('/'), - q0=q[0] - - if ('*'===q0){ - q[0]=min - }else{ - q0 = parseInt(q0) - q0 = Max(min, q0) - q0 = Min(max, q0) - q[0] = q0 - } - - if (1===q.length) q.push(0) // interval=1 - else q[1]=parseInt(q[1]) - - return q - }, - parseAtom=function(atom, min, max){ - if ('*'===atom) return 0 - var - ret=[], - list=atom.split(',') - for(var i=0,l,j,r,r0,r1,rm,ri; (l=list[i]); i++){ - r=l.split('-') - r0=parseQuark(r[0],min,max) - if (1===r.length){ - ri=r0[1] - if (ri) for(j=r0[0]; j<=max; j+=ri) ret.push(j) - else ret.push(r0[0]) - continue - } - r1=parseQuark(r[1],min,max) - j=r0[0] - rm=r1[0] - ri=r1[1]||1 - - if (j>rm){ - // wrap around - for(; j>=rm; j-=ri) { - ret.push(j) - } - }else{ - for(; j<=rm; j+=ri) { - ret.push(j) - } - } - } - ret.sort(function(a,b){ - return a-b - }) - return ret - }, - closest=function(now, list, max){ - if (!list) return now - if (Max.apply(Math, list.concat(now))===now) return now+(max-now)+Min.apply(Math, list) - for(var i=0,l=list.length; i=now) return list[i] - } - console.error('not suppose to be here',now, list, max) - }, - nearest=function(now, count, mins, hrs, doms, mons, dows, yrs, cb){ - if (count++ > 3) return cb(0) - - var - min=closest(now.getMinutes(), mins, 60), - hr=closest(now.getHours()+Floor(min/60), hrs, 24), - dom=now.getDate(), - mon=now.getMonth(), - yr=now.getFullYear(), - days=(new Date(yr, mon, 0)).getDate() - - if (dows){ - // if dow set ignore dom fields - var - day=now.getDay()+Floor(hr/24), - dow=closest(day, dows, 7) - dom+=(dow-day) - }else{ - dom=closest(dom+Floor(hr/24), doms, days) - } - mon=closest(mon+1+Floor(dom/days), mons, 12) - - if (now.getMonth()+1 !== mon) return nearest(new Date(yr, mon-1), count, mins, hrs, doms, mons, dows, yrs, cb) - - yr=closest(yr+Floor((mon-1)/12), yrs, 0) - if (now.getFullYear() !== yr) return nearest(new Date(yr, mon-1), count, mins, hrs, doms, mons, dows, yrs, cb) - - var then=(new Date(yr, (mon-1)%12)).getTime() - then+=(dom%days-1)*DAY // beginning of day - then+=(hr%24)*HR - then+=(min%60)*MIN - - return cb(then) - } - - return { - // fmt: min hr dom M dow yr - parse: function(fmt){ - var atoms=fmt.split(' ') - if (atoms.length < 6) return 0 - var mins=parseAtom(atoms[0], 0, 59) - if (null == mins) return 0 - var hrs=parseAtom(atoms[1], 0, 23) - if (null == hrs) return 0 - var doms=parseAtom(atoms[2], 1, 31) - if (null == doms) return 0 - var mons=parseAtom(atoms[3], 1, 12) - if (null == mons) return 0 - var dows=parseAtom(atoms[4], 0, 6) - if (null == dows) return 0 - var yrs=parseAtom(atoms[5], 1975, 2075) - if (null == yrs) return 0 - - return [mins, hrs, doms, mons, dows, yrs] - }, - nearest:function(mins, hrs, doms, mons, dows, yrs, now){ - now = now || Date.now() - return nearest(new Date(now + SAFE_MIN), 0, mins, hrs, doms, mons, dows, yrs, function(then){ - return then - }) - }, - daynum:daynum, - weeknum:weeknum, - // node.js should compile with - // ./configure --with-intl=full-icu --download=all - // ./configure --with-intl=small-icu --download=all - day: function(date, locale){ - var - now=new Date, - mid=new Date(now.getFullYear(),now.getMonth(),now.getDate(),12,0,0), - diff=mid-date, - DAY15=DAY*1.5 - if ((diff >= 0 && diff <= DAY15) || (diff <= 0 && diff > -DAY15)){ - if (now.getDate()===date.getDate())return'Today' - if (now > date) return 'Yesterday' - return 'Tomorrow' - } - - locale=locale||'en-US' - if (now.getFullYear()===date.getFullYear() && weeknum(now)===weeknum(date)) return date.toLocaleDateString(locale, {weekday:'long'}) - return date.toLocaleDateString(locale,{weekday: 'short', month: 'short', day: 'numeric'}) - } - } -}) -define('pico/web',function(exports,require,module,define,inherit,pico){ - var - PJSON=require('pico/json'), - Abs = Math.abs,Floor=Math.floor,Random=Math.random, - API_ACK = 'ack', - PT_HEAD = 1, - PT_BODY = 2, - isOnline = true, - stdCB = function(err){ - if (err) console.error(err) - }, - appendFD = function(fd, name, value){ - fd.append(name, value) - }, - appendObj = function(obj, name, value){ - obj[name] = value - }, - timeSync = function(net, cb){ - cb = cb || stdCB - pico.ajax('get', net.url, null, null, function(err, readyState, response){ - if (4 !== readyState) return - if (err) return cb(err) - var st = parseInt(response) - if (isNaN(st)) return cb('invalid timesync response') - net.serverTime = st - net.serverTimeAtClient = Date.now() - cb() - }) - }, - onResponse = function(err, readyState, response, net){ - if (err && 4===readyState) timeSync(net) // sync time, in case it was due to time error - - // schedule next update - switch(readyState){ - case 2: // send() and header received - net.head = null - net.currPT = PT_HEAD - net.resEndPos = 0 - break - case 3: break // body loading - case 4: // body received - break - } - - if (!response) return - - var - startPos = net.resEndPos, endPos = -1, - sep = net.delimiter, - sepLen = sep.length, - body = net.body, - head - - try{ - while(true){ - endPos = response.indexOf(sep, startPos) - if (-1 === endPos) break - - switch(net.currPT){ - case PT_HEAD: - net.head = JSON.parse(response.substring(startPos, endPos)) - body.length = 0 - net.currPT = PT_BODY - break - case PT_BODY: - body.push(response.substring(startPos, endPos)) - break - } - head = net.head - if (head && head.len === body.length){ - net.currPT = PT_HEAD - - if (head.resId){ - net.request(API_ACK, {resId:head.resId}) - } - if (!head.reqId) { - console.error('incomplete response header: '+JSON.stringify(head)) - return - } - if (net.cullAge && net.cullAge < Abs(net.getServerTime()-head.date)) { - console.error('invalid server time: '+JSON.stringify(head)+' '+Abs(net.getServerTime()-head.date)) - return - } - if (net.secretKey && body.length){ - var hmac = CryptoJS.algo.HMAC.create(CryptoJS.algo.MD5, net.secretKey+head.date) - - //key: CryptoJS.HmacMD5(JSON.stringify(data), this.secretKey+t).toString(CryptoJS.enc.Base64), - for(var i=0,l=body.length; i - - - Pico Example - - - - - - -

- - - diff --git a/pype/aport/original/api.py b/pype/aport/original/api.py new file mode 100644 index 0000000000..bc2a71a08c --- /dev/null +++ b/pype/aport/original/api.py @@ -0,0 +1,246 @@ +# api.py +import os +import sys +import tempfile + +import pico +from pico import PicoApp +from pico.decorators import request_args, set_cookie, delete_cookie, stream +from pico.decorators import header, cookie + +from werkzeug.exceptions import Unauthorized, ImATeapot, BadRequest + +from avalon import api as avalon +from avalon import io + +import pyblish.api as pyblish + +from app.api import forward +from pype import api as pype + + +log = pype.Logger.getLogger(__name__, "aport") + + +SESSION = avalon.session +if not SESSION: + io.install() + + +@pico.expose() +def publish(json_data_path, staging_dir=None): + """ + Runs standalone pyblish and adds link to + data in external json file + + It is necessary to run `register_plugin_path` if particular + host is needed + + Args: + json_data_path (string): path to temp json file with + context data + staging_dir (strign, optional): path to temp directory + + Returns: + dict: return_json_path + + Raises: + Exception: description + + """ + cwd = os.getenv('AVALON_WORKDIR').replace("\\", "/") + os.chdir(cwd) + log.info(os.getcwd()) + staging_dir = tempfile.mkdtemp(prefix="pype_aport_").replace("\\", "/") + log.info("staging_dir: {}".format(staging_dir)) + return_json_path = os.path.join(staging_dir, "return_data.json") + + log.info("avalon.session is: \n{}".format(SESSION)) + pype_start = os.path.join(os.getenv('PYPE_SETUP_ROOT'), + "app", "pype-start.py") + + args = [pype_start, "--publish", + "-pp", os.environ["PUBLISH_PATH"], + "-d", "rqst_json_data_path", json_data_path, + "-d", "post_json_data_path", return_json_path + ] + + log.debug(args) + + # start standalone pyblish qml + forward([ + sys.executable, "-u" + ] + args, + cwd=cwd + ) + + return {"return_json_path": return_json_path} + + +@pico.expose() +def context(project, asset, task, app): + # http://localhost:4242/pipeline/context?project=this&asset=shot01&task=comp + + os.environ["AVALON_PROJECT"] = project + + avalon.update_current_task(task, asset, app) + + project_code = pype.get_project_code() + pype.set_project_code(project_code) + hierarchy = pype.get_hierarchy() + pype.set_hierarchy(hierarchy) + fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items() + if isinstance(v, str)} + SESSION.update(fix_paths) + SESSION.update({"AVALON_HIERARCHY": hierarchy, + "AVALON_PROJECTCODE": project_code, + "current_dir": os.getcwd().replace("\\", "/") + }) + + return SESSION + + +@pico.expose() +def deregister_plugin_path(): + if os.getenv("PUBLISH_PATH", None): + aport_plugin_path = [p.replace("\\", "/") for p in os.environ["PUBLISH_PATH"].split( + os.pathsep) if "aport" in p][0] + os.environ["PUBLISH_PATH"] = aport_plugin_path + else: + log.warning("deregister_plugin_path(): No PUBLISH_PATH is registred") + + return "Publish path deregistered" + + +@pico.expose() +def register_plugin_path(publish_path): + deregister_plugin_path() + if os.getenv("PUBLISH_PATH", None): + os.environ["PUBLISH_PATH"] = os.pathsep.join( + os.environ["PUBLISH_PATH"].split(os.pathsep) + + [publish_path.replace("\\", "/")] + ) + else: + os.environ["PUBLISH_PATH"] = publish_path + + log.info(os.environ["PUBLISH_PATH"].split(os.pathsep)) + + return "Publish registered paths: {}".format( + os.environ["PUBLISH_PATH"].split(os.pathsep) + ) + + +@pico.expose() +def nuke_test(): + import nuke + n = nuke.createNode("Constant") + log.info(n) + + +@pico.expose() +def hello(who='world'): + return 'Hello %s' % who + + +@pico.expose() +def multiply(x, y): + return x * y + + +@pico.expose() +def fail(): + raise Exception('fail!') + + +@pico.expose() +def make_coffee(): + raise ImATeapot() + + +@pico.expose() +def upload(upload, filename): + if not filename.endswith('.txt'): + raise BadRequest('Upload must be a .txt file!') + return upload.read().decode() + + +@pico.expose() +@request_args(ip='remote_addr') +def my_ip(ip): + return ip + + +@pico.expose() +@request_args(ip=lambda req: req.remote_addr) +def my_ip3(ip): + return ip + + +@pico.prehandle() +def set_user(request, kwargs): + if request.authorization: + if request.authorization.password != 'secret': + raise Unauthorized('Incorrect username or password') + request.user = request.authorization.username + else: + request.user = None + + +@pico.expose() +@request_args(username='user') +def current_user(username): + return username + + +@pico.expose() +@request_args(session=cookie('session_id')) +def session_id(session): + return session + + +@pico.expose() +@set_cookie() +def start_session(): + return {'session_id': '42'} + + +@pico.expose() +@delete_cookie('session_id') +def end_session(): + return True + + +@pico.expose() +@request_args(session=header('x-session-id')) +def session_id2(session): + return session + + +@pico.expose() +@stream() +def countdown(n=10): + for i in reversed(range(n)): + yield '%i' % i + time.sleep(0.5) + + +@pico.expose() +def user_description(user): + return '{name} is a {occupation} aged {age}'.format(**user) + + +@pico.expose() +def show_source(): + return open(__file__.replace('.pyc', '.py')).read() + + +app = PicoApp() +app.register_module(__name__) + +# remove all Handlers created by pico +for name, handler in [(handler.get_name(), handler) + for handler in pype.Logger.logging.root.handlers[:]]: + if "pype" not in str(name).lower(): + print(name) + print(handler) + pype.Logger.logging.root.removeHandler(handler) diff --git a/pype/aport/original/index.html b/pype/aport/original/index.html new file mode 100644 index 0000000000..ce780f7620 --- /dev/null +++ b/pype/aport/original/index.html @@ -0,0 +1,196 @@ + + + + + Pico Example - Everything + + + + + + + + + + + + + + + + +
+
+
+

Pico Examples

+

Here we show some simple examples of using Pico. Click any api.X link to see the corresponding Python source.

+
+
+
+
+
    + +
  1. +

    Hello World

    +
    + Name: + +
  2. +
  3. +

    deregister_plugin_path

    +
    + +
  4. +
  5. +

    register_plugin_path

    +
    + Path: + +
  6. +
  7. +

    Numeric Multiplication

    +
    + x + +
  8. +
  9. +

    File Upload

    +
    + + +
  10. +
  11. +

    Request parameters (IP address)

    +
    + +
  12. +
  13. +

    Authentication

    +
    +

    Note: see api.set_user for the authentication handler.

    + Username: + Password: + +
  14. +
  15. +

    Sessions (cookies)

    +
    + +
  16. +
  17. +

    Sessions (header)

    +
    + +
  18. +
  19. +

    Streaming Response

    +
    + +
  20. +
  21. +

    Objects

    +
    + +
  22. +
  23. +

    Errors

    +
    + +
  24. +
  25. +

    Errors

    +
    + +
  26. +
  27. +

    Forms

    +

    This example submits a form as a whole instead of individual arguments. + The form input names must match the function argument names. +

    +
    +
    +
    +
    + x:
    + y: +
    + +
    +
  28. +
  29. +

    JSON

    +

    This example submits data as JSON instead of individual arguments. + The object keys must match the function argument names. +

    +
    + +
  30. +
+
+
+
+
+
+
+
+
+
+
+ + + + diff --git a/pype/aport/static/index.html b/pype/aport/static/index.html new file mode 100644 index 0000000000..0da3f2abdb --- /dev/null +++ b/pype/aport/static/index.html @@ -0,0 +1,121 @@ + + + + + Pype extention + + + + + + + + + +
+
+
+
    +
  1. +

    Set context

    +
    + ProjectAssettaskapp + +
  2. +
  3. +

    deregister_plugin_path

    +
    + +
  4. +
  5. +

    register_plugin_path

    +
    + Path: + +
  6. +
  7. +

    Publish

    +
    + Json path: + Gui + +
  8. + +
+
+
+
+
+ +
+
+
+
+
+ + + + diff --git a/pype/aport/static/script.js b/pype/aport/static/script.js new file mode 100644 index 0000000000..417f5052d3 --- /dev/null +++ b/pype/aport/static/script.js @@ -0,0 +1,214 @@ +var api = pico.importModule('api'); + +var output = document.getElementById('output'); + +function querySelector(parent){ + return function(child){ + return document.querySelector(parent).querySelector(child) + }; +} + +var defs = {} +function jumpTo(name){ + var e = defs[name]; + document.querySelectorAll('.highlight').forEach(function(el){ + el.classList.remove('highlight'); + }); + e.classList.add('highlight'); + return false; +} + +function displayResult(r){ + output.classList.remove("error"); + output.innerText = JSON.stringify(r); +} + +function displayError(e){ + output.classList.add("error"); + output.innerText = e.message; +} + +function unindent(code){ + var lines = code.split('\n'); + var margin = -1; + for(var j=0; j < lines.length; j++){ + var l = lines[j]; + for(i=0; i < l.length; i++){ + if(l[i] != " "){ + margin = i; + break; + } + } + if(margin > -1){ + break; + } + } + lines = lines.slice(j); + return lines.map(function(s){ return s.substr(margin)}).join('\n'); +} + +function deregister(){ + var $ = querySelector("#deregister"); + api.deregister_plugin_path().then(displayResult); +} + +function register(){ + var $ = querySelector("#register"); + var path = $("input[name=path]").value; + api.register_plugin_path(path).then(displayResult); +} + + +function publish(){ + var $ = querySelector("#publish"); + var path = $("input[name=path]").value; + var gui = $("input[name=gui]").checked; + api.publish(path, gui).then(displayResult); +} + +function context(){ + var $ = querySelector("#context"); + var project = $("input[name=project]").value; + var asset = $("input[name=asset]").value; + var task = $("input[name=task]").value; + var app = $("input[name=app]").value; + api.context(project,asset,task,app).then(displayResult); +} + + +// +// function example1(){ +// var $ = querySelector("#example1"); +// var name = $("input[name=name]").value; +// api.hello(name).then(displayResult); +// } +// +// +// function example2(){ +// var $ = querySelector("#example2"); +// var x = $("input[name=x]").valueAsNumber; +// var y = $("#example2 input[name=y]").valueAsNumber; +// api.multiply(x, y).then(displayResult); +// } +// +// function example3(){ +// var $ = querySelector("#example3"); +// var file = $("input[name=upload]").files[0]; +// api.upload(file, file.name).then(displayResult).catch(displayError); +// } +// +// function example4(){ +// var $ = querySelector("#example4"); +// api.my_ip().then(displayResult) +// } +// +// function example5(){ +// var $ = querySelector("#example5"); +// var username = $("input[name=username]").value; +// var password = $("input[name=password]").value; +// pico.setAuthentication(api, username, password); +// api.current_user().then(displayResult).catch(displayError); +// pico.clearAuthentication(api); +// } +// +// function example6(){ +// var $ = querySelector("#example6"); +// api.start_session().then(function(){ +// api.session_id().then(displayResult).then(function(){ +// api.end_session(); +// }) +// }) +// } +// +// function example7(){ +// var $ = querySelector("#example7"); +// var session_id = "4242"; +// pico.setRequestHook(api, 'session', function(req) { +// req.headers.set('X-SESSION-ID', session_id) +// }) +// api.session_id2().then(displayResult) +// pico.clearRequestHook(api, 'session'); +// } +// +// function example8(){ +// var $ = querySelector("#example8"); +// api.countdown(10).each(displayResult).then(function(){ +// displayResult("Boom!"); +// }); +// } +// +// function example9(){ +// var $ = querySelector("#example9"); +// var user = { +// name: "Bob", +// age: 30, +// occupation: "Software Engineer", +// } +// api.user_description(user).then(displayResult); +// } +// +// function example10(){ +// var $ = querySelector("#example10"); +// api.fail().then(displayResult).catch(displayError); +// } +// +// function example11(){ +// var $ = querySelector("#example11"); +// api.make_coffee().then(displayResult).catch(displayError); +// } +// +// +// function example12(){ +// var $ = querySelector("#example12"); +// var form = $("form"); +// api.multiply.submitFormData(new FormData(form)).then(displayResult).catch(displayError); +// } +// +// function example13(){ +// var $ = querySelector("#example13"); +// var data = { +// x: 6, +// y: 7, +// } +// api.multiply.submitJSON(data).then(displayResult).catch(displayError); +// } + + +// api.show_source().then(function(s){ +// document.querySelector('#source code').innerText = s; +// }).then(ready); + + +function ready(){ + // // set the element of each example to the corresponding functions source + // document.querySelectorAll('li pre code.js').forEach(function(e){ + // var id = e.parentElement.parentElement.id; + // var f = window[id]; + // var code = f.toString().split('\n').slice(2, -1).join('\n'); + // e.innerText = unindent(code); + // }) + + document.querySelectorAll('li pre code.html').forEach(function(e){ + var html = e.parentElement.parentElement.querySelector('div.example').innerHTML; + e.innerText = unindent(html); + }) + + hljs.initHighlighting(); + + // // find all the elements representing the function definitions in the python source + // document.querySelectorAll('.python .hljs-function .hljs-title').forEach(function(e){ + // var a = document.createElement('a'); + // a.name = e.innerText; + // e.parentElement.insertBefore(a, e) + // return defs[e.innerText] = e.parentElement; + // }); + + // convert all 'api.X' strings to hyperlinks to jump to python source + document.querySelectorAll('.js').forEach(function(e){ + var code = e.innerHTML; + Object.keys(defs).forEach(function(k){ + code = code.replace('api.' + k + '(', 'api.' + k + '('); + }) + e.innerHTML = code; + }) +} diff --git a/pype/plugins/aport/publish/collect_instances.py b/pype/plugins/aport/publish/collect_instances.py index ce7156e6ad..ffb2ec824c 100644 --- a/pype/plugins/aport/publish/collect_instances.py +++ b/pype/plugins/aport/publish/collect_instances.py @@ -138,8 +138,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin): "family": inst["family"], "families": [subset], "jsonData": inst, - "parents": , # bez tasku - "hierarchy": , + # "parents": , # bez tasku + # "hierarchy": , "publish": True, }) self.log.info("collected instance: {}".format(instance.data)) diff --git a/pype/vendor/pico/client.py b/pype/vendor/pico/client.py index 8f1361f9c8..300afbab11 100644 --- a/pype/vendor/pico/client.py +++ b/pype/vendor/pico/client.py @@ -11,7 +11,7 @@ s = example.hello("Python") Use help(example.hello) or example.hello? as normal to check function parameters and docstrings. """ import os -import pico.pragmaticjson as json +from . import pragmaticjson as json import imp import requests diff --git a/pype/vendor/urllib3/__init__.py b/pype/vendor/urllib3/__init__.py index 4bd533b5b4..148a9c31a7 100644 --- a/pype/vendor/urllib3/__init__.py +++ b/pype/vendor/urllib3/__init__.py @@ -23,16 +23,11 @@ from .util.retry import Retry # Set default logging handler to avoid "No handler found" warnings. import logging -try: # Python 2.7+ - from logging import NullHandler -except ImportError: - class NullHandler(logging.Handler): - def emit(self, record): - pass +from logging import NullHandler __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' __license__ = 'MIT' -__version__ = '1.23' +__version__ = '1.24.1' __all__ = ( 'HTTPConnectionPool', diff --git a/pype/vendor/urllib3/_collections.py b/pype/vendor/urllib3/_collections.py index 6e36b84e59..34f23811c6 100644 --- a/pype/vendor/urllib3/_collections.py +++ b/pype/vendor/urllib3/_collections.py @@ -14,10 +14,7 @@ except ImportError: # Platform-specific: No threads available pass -try: # Python 2.7+ - from collections import OrderedDict -except ImportError: - from .packages.ordered_dict import OrderedDict +from collections import OrderedDict from .exceptions import InvalidHeader from .packages.six import iterkeys, itervalues, PY3 diff --git a/pype/vendor/urllib3/connection.py b/pype/vendor/urllib3/connection.py index a03b573f01..02b36654bd 100644 --- a/pype/vendor/urllib3/connection.py +++ b/pype/vendor/urllib3/connection.py @@ -2,7 +2,6 @@ from __future__ import absolute_import import datetime import logging import os -import sys import socket from socket import error as SocketError, timeout as SocketTimeout import warnings @@ -78,9 +77,6 @@ class HTTPConnection(_HTTPConnection, object): - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - ``source_address``: Set the source address for the current connection. - - .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x - - ``socket_options``: Set specific options on the underlying socket. If not specified, then defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. @@ -108,21 +104,13 @@ class HTTPConnection(_HTTPConnection, object): if six.PY3: # Python 3 kw.pop('strict', None) - # Pre-set source_address in case we have an older Python like 2.6. + # Pre-set source_address. self.source_address = kw.get('source_address') - if sys.version_info < (2, 7): # Python 2.6 - # _HTTPConnection on Python 2.6 will balk at this keyword arg, but - # not newer versions. We can still use it when creating a - # connection though, so we pop it *after* we have saved it as - # self.source_address. - kw.pop('source_address', None) - #: The socket options provided by the user. If no options are #: provided, we use the default options. self.socket_options = kw.pop('socket_options', self.default_socket_options) - # Superclass also sets self.source_address in Python 2.7+. _HTTPConnection.__init__(self, *args, **kw) @property @@ -183,10 +171,7 @@ class HTTPConnection(_HTTPConnection, object): def _prepare_conn(self, conn): self.sock = conn - # the _tunnel_host attribute was added in python 2.6.3 (via - # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do - # not have them. - if getattr(self, '_tunnel_host', None): + if self._tunnel_host: # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() # Mark this connection as not reusable @@ -217,13 +202,13 @@ class HTTPConnection(_HTTPConnection, object): self.endheaders() if body is not None: - stringish_types = six.string_types + (six.binary_type,) + stringish_types = six.string_types + (bytes,) if isinstance(body, stringish_types): body = (body,) for chunk in body: if not chunk: continue - if not isinstance(chunk, six.binary_type): + if not isinstance(chunk, bytes): chunk = chunk.encode('utf8') len_str = hex(len(chunk))[2:] self.send(len_str.encode('utf-8')) @@ -242,7 +227,7 @@ class HTTPSConnection(HTTPConnection): def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - ssl_context=None, **kw): + ssl_context=None, server_hostname=None, **kw): HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw) @@ -250,6 +235,7 @@ class HTTPSConnection(HTTPConnection): self.key_file = key_file self.cert_file = cert_file self.ssl_context = ssl_context + self.server_hostname = server_hostname # Required property for Google AppEngine 1.9.0 which otherwise causes # HTTPS requests to go out as HTTP. (See Issue #356) @@ -270,6 +256,7 @@ class HTTPSConnection(HTTPConnection): keyfile=self.key_file, certfile=self.cert_file, ssl_context=self.ssl_context, + server_hostname=self.server_hostname ) @@ -312,12 +299,9 @@ class VerifiedHTTPSConnection(HTTPSConnection): def connect(self): # Add certificate verification conn = self._new_conn() - hostname = self.host - if getattr(self, '_tunnel_host', None): - # _tunnel_host was added in Python 2.6.3 - # (See: http://hg.python.org/cpython/rev/0f57b30a152f) + if self._tunnel_host: self.sock = conn # Calls self._set_hostport(), so self.host is # self._tunnel_host below. @@ -328,6 +312,10 @@ class VerifiedHTTPSConnection(HTTPSConnection): # Override the host with the one we're requesting data from. hostname = self._tunnel_host + server_hostname = hostname + if self.server_hostname is not None: + server_hostname = self.server_hostname + is_time_off = datetime.date.today() < RECENT_DATE if is_time_off: warnings.warn(( @@ -352,7 +340,7 @@ class VerifiedHTTPSConnection(HTTPSConnection): certfile=self.cert_file, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, - server_hostname=hostname, + server_hostname=server_hostname, ssl_context=context) if self.assert_fingerprint: @@ -373,7 +361,7 @@ class VerifiedHTTPSConnection(HTTPSConnection): 'for details.)'.format(hostname)), SubjectAltNameWarning ) - _match_hostname(cert, self.assert_hostname or hostname) + _match_hostname(cert, self.assert_hostname or server_hostname) self.is_verified = ( context.verify_mode == ssl.CERT_REQUIRED or diff --git a/pype/vendor/urllib3/connectionpool.py b/pype/vendor/urllib3/connectionpool.py index 8fcb0bce79..f7a8f193d1 100644 --- a/pype/vendor/urllib3/connectionpool.py +++ b/pype/vendor/urllib3/connectionpool.py @@ -89,7 +89,7 @@ class ConnectionPool(object): # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 -_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) +_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} class HTTPConnectionPool(ConnectionPool, RequestMethods): @@ -313,7 +313,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # Catch possible read timeouts thrown as SSL errors. If not the # case, rethrow the original. We need to do this because of: # http://bugs.python.org/issue10272 - if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6 + if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python < 2.7.4 raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) def _make_request(self, conn, method, url, timeout=_Default, chunked=False, @@ -375,7 +375,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): try: try: # Python 2.7, use buffering of HTTP responses httplib_response = conn.getresponse(buffering=True) - except TypeError: # Python 2.6 and older, Python 3 + except TypeError: # Python 3 try: httplib_response = conn.getresponse() except Exception as e: @@ -801,17 +801,7 @@ class HTTPSConnectionPool(HTTPConnectionPool): Establish tunnel connection early, because otherwise httplib would improperly set Host: header to proxy's IP:port. """ - # Python 2.7+ - try: - set_tunnel = conn.set_tunnel - except AttributeError: # Platform-specific: Python 2.6 - set_tunnel = conn._set_tunnel - - if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older - set_tunnel(self._proxy_host, self.port) - else: - set_tunnel(self._proxy_host, self.port, self.proxy_headers) - + conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers) conn.connect() def _new_conn(self): diff --git a/pype/vendor/urllib3/contrib/_appengine_environ.py b/pype/vendor/urllib3/contrib/_appengine_environ.py new file mode 100644 index 0000000000..f3e00942cb --- /dev/null +++ b/pype/vendor/urllib3/contrib/_appengine_environ.py @@ -0,0 +1,30 @@ +""" +This module provides means to detect the App Engine environment. +""" + +import os + + +def is_appengine(): + return (is_local_appengine() or + is_prod_appengine() or + is_prod_appengine_mvms()) + + +def is_appengine_sandbox(): + return is_appengine() and not is_prod_appengine_mvms() + + +def is_local_appengine(): + return ('APPENGINE_RUNTIME' in os.environ and + 'Development/' in os.environ['SERVER_SOFTWARE']) + + +def is_prod_appengine(): + return ('APPENGINE_RUNTIME' in os.environ and + 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and + not is_prod_appengine_mvms()) + + +def is_prod_appengine_mvms(): + return os.environ.get('GAE_VM', False) == 'true' diff --git a/pype/vendor/urllib3/contrib/appengine.py b/pype/vendor/urllib3/contrib/appengine.py index 66922e06aa..2952f114df 100644 --- a/pype/vendor/urllib3/contrib/appengine.py +++ b/pype/vendor/urllib3/contrib/appengine.py @@ -39,8 +39,8 @@ urllib3 on Google App Engine: """ from __future__ import absolute_import +import io import logging -import os import warnings from ..packages.six.moves.urllib.parse import urljoin @@ -53,11 +53,11 @@ from ..exceptions import ( SSLError ) -from ..packages.six import BytesIO from ..request import RequestMethods from ..response import HTTPResponse from ..util.timeout import Timeout from ..util.retry import Retry +from . import _appengine_environ try: from google.appengine.api import urlfetch @@ -239,7 +239,7 @@ class AppEngineManager(RequestMethods): original_response = HTTPResponse( # In order for decoding to work, we must present the content as # a file-like object. - body=BytesIO(urlfetch_resp.content), + body=io.BytesIO(urlfetch_resp.content), msg=urlfetch_resp.header_msg, headers=urlfetch_resp.headers, status=urlfetch_resp.status_code, @@ -247,7 +247,7 @@ class AppEngineManager(RequestMethods): ) return HTTPResponse( - body=BytesIO(urlfetch_resp.content), + body=io.BytesIO(urlfetch_resp.content), headers=urlfetch_resp.headers, status=urlfetch_resp.status_code, original_response=original_response, @@ -280,26 +280,10 @@ class AppEngineManager(RequestMethods): return retries -def is_appengine(): - return (is_local_appengine() or - is_prod_appengine() or - is_prod_appengine_mvms()) +# Alias methods from _appengine_environ to maintain public API interface. - -def is_appengine_sandbox(): - return is_appengine() and not is_prod_appengine_mvms() - - -def is_local_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Development/' in os.environ['SERVER_SOFTWARE']) - - -def is_prod_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and - not is_prod_appengine_mvms()) - - -def is_prod_appengine_mvms(): - return os.environ.get('GAE_VM', False) == 'true' +is_appengine = _appengine_environ.is_appengine +is_appengine_sandbox = _appengine_environ.is_appengine_sandbox +is_local_appengine = _appengine_environ.is_local_appengine +is_prod_appengine = _appengine_environ.is_prod_appengine +is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms diff --git a/pype/vendor/urllib3/contrib/ntlmpool.py b/pype/vendor/urllib3/contrib/ntlmpool.py index 642e99ed2d..8ea127c583 100644 --- a/pype/vendor/urllib3/contrib/ntlmpool.py +++ b/pype/vendor/urllib3/contrib/ntlmpool.py @@ -43,8 +43,7 @@ class NTLMConnectionPool(HTTPSConnectionPool): log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', self.num_connections, self.host, self.authurl) - headers = {} - headers['Connection'] = 'Keep-Alive' + headers = {'Connection': 'Keep-Alive'} req_header = 'Authorization' resp_header = 'www-authenticate' diff --git a/pype/vendor/urllib3/contrib/pyopenssl.py b/pype/vendor/urllib3/contrib/pyopenssl.py index 4d4b1aff97..7c0e9465d9 100644 --- a/pype/vendor/urllib3/contrib/pyopenssl.py +++ b/pype/vendor/urllib3/contrib/pyopenssl.py @@ -163,6 +163,9 @@ def _dnsname_to_stdlib(name): from ASCII bytes. We need to idna-encode that string to get it back, and then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). + + If the name cannot be idna-encoded then we return None signalling that + the name given should be skipped. """ def idna_encode(name): """ @@ -172,14 +175,19 @@ def _dnsname_to_stdlib(name): """ import idna - for prefix in [u'*.', u'.']: - if name.startswith(prefix): - name = name[len(prefix):] - return prefix.encode('ascii') + idna.encode(name) - return idna.encode(name) + try: + for prefix in [u'*.', u'.']: + if name.startswith(prefix): + name = name[len(prefix):] + return prefix.encode('ascii') + idna.encode(name) + return idna.encode(name) + except idna.core.IDNAError: + return None name = idna_encode(name) - if sys.version_info >= (3, 0): + if name is None: + return None + elif sys.version_info >= (3, 0): name = name.decode('utf-8') return name @@ -223,9 +231,10 @@ def get_subj_alt_name(peer_cert): # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 # decoded. This is pretty frustrating, but that's what the standard library # does with certificates, and so we need to attempt to do the same. + # We also want to skip over names which cannot be idna encoded. names = [ - ('DNS', _dnsname_to_stdlib(name)) - for name in ext.get_values_for_type(x509.DNSName) + ('DNS', name) for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) + if name is not None ] names.extend( ('IP Address', str(name)) diff --git a/pype/vendor/urllib3/packages/backports/makefile.py b/pype/vendor/urllib3/packages/backports/makefile.py index 75b80dcf84..740db377d9 100644 --- a/pype/vendor/urllib3/packages/backports/makefile.py +++ b/pype/vendor/urllib3/packages/backports/makefile.py @@ -16,7 +16,7 @@ def backport_makefile(self, mode="r", buffering=None, encoding=None, """ Backport of ``socket.makefile`` from Python 3.5. """ - if not set(mode) <= set(["r", "w", "b"]): + if not set(mode) <= {"r", "w", "b"}: raise ValueError( "invalid mode %r (only r, w, b allowed)" % (mode,) ) diff --git a/pype/vendor/urllib3/packages/ordered_dict.py b/pype/vendor/urllib3/packages/ordered_dict.py deleted file mode 100644 index 4479363cc4..0000000000 --- a/pype/vendor/urllib3/packages/ordered_dict.py +++ /dev/null @@ -1,259 +0,0 @@ -# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. -# Passes Python2.7's test suite and incorporates all the latest updates. -# Copyright 2009 Raymond Hettinger, released under the MIT License. -# http://code.activestate.com/recipes/576693/ -try: - from thread import get_ident as _get_ident -except ImportError: - from dummy_thread import get_ident as _get_ident - -try: - from _abcoll import KeysView, ValuesView, ItemsView -except ImportError: - pass - - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as for regular dictionaries. - - # The internal self.__map dictionary maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # Each link is stored as a list of length three: [PREV, NEXT, KEY]. - - def __init__(self, *args, **kwds): - '''Initialize an ordered dictionary. Signature is the same as for - regular dictionaries, but keyword arguments are not recommended - because their insertion order is arbitrary. - - ''' - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__root = root = [] # sentinel node - root[:] = [root, root, None] - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, dict_setitem=dict.__setitem__): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link which goes at the end of the linked - # list, and the inherited dictionary is updated with the new key/value pair. - if key not in self: - root = self.__root - last = root[0] - last[1] = root[0] = self.__map[key] = [last, root, key] - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which is - # then removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link_prev, link_next, key = self.__map.pop(key) - link_prev[1] = link_next - link_next[0] = link_prev - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - root = self.__root - curr = root[1] - while curr is not root: - yield curr[2] - curr = curr[1] - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - root = self.__root - curr = root[0] - while curr is not root: - yield curr[2] - curr = curr[0] - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - try: - for node in self.__map.itervalues(): - del node[:] - root = self.__root - root[:] = [root, root, None] - self.__map.clear() - except AttributeError: - pass - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root[0] - link_prev = link[0] - link_prev[1] = root - root[0] = link_prev - else: - link = root[1] - link_next = link[1] - root[1] = link_next - link_next[0] = root - key = link[2] - del self.__map[key] - value = dict.pop(self, key) - return key, value - - # -- the following methods do not depend on the internal structure -- - - def keys(self): - 'od.keys() -> list of keys in od' - return list(self) - - def values(self): - 'od.values() -> list of values in od' - return [self[key] for key in self] - - def items(self): - 'od.items() -> list of (key, value) pairs in od' - return [(key, self[key]) for key in self] - - def iterkeys(self): - 'od.iterkeys() -> an iterator over the keys in od' - return iter(self) - - def itervalues(self): - 'od.itervalues -> an iterator over the values in od' - for k in self: - yield self[k] - - def iteritems(self): - 'od.iteritems -> an iterator over the (key, value) items in od' - for k in self: - yield (k, self[k]) - - def update(*args, **kwds): - '''od.update(E, **F) -> None. Update od from dict/iterable E and F. - - If E is a dict instance, does: for k in E: od[k] = E[k] - If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] - Or if E is an iterable of items, does: for k, v in E: od[k] = v - In either case, this is followed by: for k, v in F.items(): od[k] = v - - ''' - if len(args) > 2: - raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (len(args),)) - elif not args: - raise TypeError('update() takes at least 1 argument (0 given)') - self = args[0] - # Make progressively weaker assumptions about "other" - other = () - if len(args) == 2: - other = args[1] - if isinstance(other, dict): - for key in other: - self[key] = other[key] - elif hasattr(other, 'keys'): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - __update = update # let subclasses override update without breaking __init__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - def __repr__(self, _repr_running={}): - 'od.__repr__() <==> repr(od)' - call_key = id(self), _get_ident() - if call_key in _repr_running: - return '...' - _repr_running[call_key] = 1 - try: - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - finally: - del _repr_running[call_key] - - def __reduce__(self): - 'Return state information for pickling' - items = [[k, self[k]] for k in self] - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S - and values equal to v (which defaults to None). - - ''' - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other - - # -- the following methods are only used in Python 2.7 -- - - def viewkeys(self): - "od.viewkeys() -> a set-like object providing a view on od's keys" - return KeysView(self) - - def viewvalues(self): - "od.viewvalues() -> an object providing a view on od's values" - return ValuesView(self) - - def viewitems(self): - "od.viewitems() -> a set-like object providing a view on od's items" - return ItemsView(self) diff --git a/pype/vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/pype/vendor/urllib3/packages/ssl_match_hostname/_implementation.py index 1fd42f38ae..d6e66c0196 100644 --- a/pype/vendor/urllib3/packages/ssl_match_hostname/_implementation.py +++ b/pype/vendor/urllib3/packages/ssl_match_hostname/_implementation.py @@ -9,8 +9,7 @@ import sys # ipaddress has been backported to 2.6+ in pypi. If it is installed on the # system, use it to handle IPAddress ServerAltnames (this was added in # python-3.5) otherwise only do DNS matching. This allows -# backports.ssl_match_hostname to continue to be used all the way back to -# python-2.4. +# backports.ssl_match_hostname to continue to be used in Python 2.7. try: import ipaddress except ImportError: diff --git a/pype/vendor/urllib3/poolmanager.py b/pype/vendor/urllib3/poolmanager.py index 506a3c9b87..fe5491cfda 100644 --- a/pype/vendor/urllib3/poolmanager.py +++ b/pype/vendor/urllib3/poolmanager.py @@ -47,6 +47,7 @@ _key_fields = ( 'key__socks_options', # dict 'key_assert_hostname', # bool or string 'key_assert_fingerprint', # str + 'key_server_hostname', #str ) #: The namedtuple class used to construct keys for the connection pool. diff --git a/pype/vendor/urllib3/request.py b/pype/vendor/urllib3/request.py index 1be3334113..8f2f44bb21 100644 --- a/pype/vendor/urllib3/request.py +++ b/pype/vendor/urllib3/request.py @@ -36,7 +36,7 @@ class RequestMethods(object): explicitly. """ - _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS']) + _encode_url_methods = {'DELETE', 'GET', 'HEAD', 'OPTIONS'} def __init__(self, headers=None): self.headers = headers or {} diff --git a/pype/vendor/urllib3/response.py b/pype/vendor/urllib3/response.py index 9873cb9423..c112690b0a 100644 --- a/pype/vendor/urllib3/response.py +++ b/pype/vendor/urllib3/response.py @@ -11,7 +11,7 @@ from .exceptions import ( BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked, IncompleteRead, InvalidHeader ) -from .packages.six import string_types as basestring, binary_type, PY3 +from .packages.six import string_types as basestring, PY3 from .packages.six.moves import http_client as httplib from .connection import HTTPException, BaseSSLError from .util.response import is_fp_closed, is_response_to_head @@ -23,7 +23,7 @@ class DeflateDecoder(object): def __init__(self): self._first_try = True - self._data = binary_type() + self._data = b'' self._obj = zlib.decompressobj() def __getattr__(self, name): @@ -69,9 +69,9 @@ class GzipDecoder(object): return getattr(self._obj, name) def decompress(self, data): - ret = binary_type() + ret = bytearray() if self._state == GzipDecoderState.SWALLOW_DATA or not data: - return ret + return bytes(ret) while True: try: ret += self._obj.decompress(data) @@ -81,16 +81,40 @@ class GzipDecoder(object): self._state = GzipDecoderState.SWALLOW_DATA if previous_state == GzipDecoderState.OTHER_MEMBERS: # Allow trailing garbage acceptable in other gzip clients - return ret + return bytes(ret) raise data = self._obj.unused_data if not data: - return ret + return bytes(ret) self._state = GzipDecoderState.OTHER_MEMBERS self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) +class MultiDecoder(object): + """ + From RFC7231: + If one or more encodings have been applied to a representation, the + sender that applied the encodings MUST generate a Content-Encoding + header field that lists the content codings in the order in which + they were applied. + """ + + def __init__(self, modes): + self._decoders = [_get_decoder(m.strip()) for m in modes.split(',')] + + def flush(self): + return self._decoders[0].flush() + + def decompress(self, data): + for d in reversed(self._decoders): + data = d.decompress(data) + return data + + def _get_decoder(mode): + if ',' in mode: + return MultiDecoder(mode) + if mode == 'gzip': return GzipDecoder() @@ -159,7 +183,7 @@ class HTTPResponse(io.IOBase): self.msg = msg self._request_url = request_url - if body and isinstance(body, (basestring, binary_type)): + if body and isinstance(body, (basestring, bytes)): self._body = body self._pool = pool @@ -283,8 +307,13 @@ class HTTPResponse(io.IOBase): # Note: content-encoding value should be case-insensitive, per RFC 7230 # Section 3.2 content_encoding = self.headers.get('content-encoding', '').lower() - if self._decoder is None and content_encoding in self.CONTENT_DECODERS: - self._decoder = _get_decoder(content_encoding) + if self._decoder is None: + if content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + elif ',' in content_encoding: + encodings = [e.strip() for e in content_encoding.split(',') if e.strip() in self.CONTENT_DECODERS] + if len(encodings): + self._decoder = _get_decoder(content_encoding) def _decode(self, data, decode_content, flush_decoder): """ diff --git a/pype/vendor/urllib3/util/connection.py b/pype/vendor/urllib3/util/connection.py index 5cf488f4b5..5ad70b2f1c 100644 --- a/pype/vendor/urllib3/util/connection.py +++ b/pype/vendor/urllib3/util/connection.py @@ -1,6 +1,7 @@ from __future__ import absolute_import import socket from .wait import NoWayToWaitForSocketError, wait_for_read +from ..contrib import _appengine_environ def is_connection_dropped(conn): # Platform-specific @@ -105,6 +106,13 @@ def _has_ipv6(host): sock = None has_ipv6 = False + # App Engine doesn't support IPV6 sockets and actually has a quota on the + # number of sockets that can be used, so just early out here instead of + # creating a socket needlessly. + # See https://github.com/urllib3/urllib3/issues/1446 + if _appengine_environ.is_appengine_sandbox(): + return False + if socket.has_ipv6: # has_ipv6 returns true if cPython was compiled with IPv6 support. # It does not tell us if the system has IPv6 support enabled. To diff --git a/pype/vendor/urllib3/util/response.py b/pype/vendor/urllib3/util/response.py index 67cf730ab0..3d5486485a 100644 --- a/pype/vendor/urllib3/util/response.py +++ b/pype/vendor/urllib3/util/response.py @@ -59,8 +59,14 @@ def assert_header_parsing(headers): get_payload = getattr(headers, 'get_payload', None) unparsed_data = None - if get_payload: # Platform-specific: Python 3. - unparsed_data = get_payload() + if get_payload: + # get_payload is actually email.message.Message.get_payload; + # we're only interested in the result if it's not a multipart message + if not headers.is_multipart(): + payload = get_payload() + + if isinstance(payload, (bytes, str)): + unparsed_data = payload if defects or unparsed_data: raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) diff --git a/pype/vendor/urllib3/util/retry.py b/pype/vendor/urllib3/util/retry.py index 7ad3dc6608..e7d0abd610 100644 --- a/pype/vendor/urllib3/util/retry.py +++ b/pype/vendor/urllib3/util/retry.py @@ -115,7 +115,7 @@ class Retry(object): (most errors are resolved immediately by a second try without a delay). urllib3 will sleep for:: - {backoff factor} * (2 ^ ({number of total retries} - 1)) + {backoff factor} * (2 ** ({number of total retries} - 1)) seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer diff --git a/pype/vendor/urllib3/util/ssl_.py b/pype/vendor/urllib3/util/ssl_.py index 2893752a3d..64ea192a85 100644 --- a/pype/vendor/urllib3/util/ssl_.py +++ b/pype/vendor/urllib3/util/ssl_.py @@ -56,9 +56,8 @@ except ImportError: OP_NO_COMPRESSION = 0x20000 -# Python 2.7 and earlier didn't have inet_pton on non-Linux -# so we fallback on inet_aton in those cases. This means that -# we can only detect IPv4 addresses in this case. +# Python 2.7 doesn't have inet_pton on non-Linux so we fallback on inet_aton in +# those cases. This means that we can only detect IPv4 addresses in this case. if hasattr(socket, 'inet_pton'): inet_pton = socket.inet_pton else: @@ -67,7 +66,7 @@ else: import ipaddress def inet_pton(_, host): - if isinstance(host, six.binary_type): + if isinstance(host, bytes): host = host.decode('ascii') return ipaddress.ip_address(host) @@ -115,10 +114,7 @@ try: except ImportError: import sys - class SSLContext(object): # Platform-specific: Python 2 & 3.1 - supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or - (3, 2) <= sys.version_info) - + class SSLContext(object): # Platform-specific: Python 2 def __init__(self, protocol_version): self.protocol = protocol_version # Use default values from a real SSLContext @@ -141,12 +137,6 @@ except ImportError: raise SSLError("CA directories not supported in older Pythons") def set_ciphers(self, cipher_suite): - if not self.supports_set_ciphers: - raise TypeError( - 'Your version of Python does not support setting ' - 'a custom cipher suite. Please upgrade to Python ' - '2.7, 3.2, or later if you need this functionality.' - ) self.ciphers = cipher_suite def wrap_socket(self, socket, server_hostname=None, server_side=False): @@ -167,10 +157,7 @@ except ImportError: 'ssl_version': self.protocol, 'server_side': server_side, } - if self.supports_set_ciphers: # Platform-specific: Python 2.7+ - return wrap_socket(socket, ciphers=self.ciphers, **kwargs) - else: # Platform-specific: Python 2.6 - return wrap_socket(socket, **kwargs) + return wrap_socket(socket, ciphers=self.ciphers, **kwargs) def assert_fingerprint(cert, fingerprint): @@ -276,6 +263,8 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None, """ context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) + context.set_ciphers(ciphers or DEFAULT_CIPHERS) + # Setting the default here, as we may have no ssl module on import cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs @@ -291,9 +280,6 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None, context.options |= options - if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6 - context.set_ciphers(ciphers or DEFAULT_CIPHERS) - context.verify_mode = cert_reqs if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 # We do our own verification, including fingerprints and alternative @@ -316,8 +302,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, A pre-made :class:`SSLContext` object. If none is provided, one will be created using :func:`create_urllib3_context`. :param ciphers: - A string of ciphers we wish the client to support. This is not - supported on Python 2.6 as the ssl module does not support it. + A string of ciphers we wish the client to support. :param ca_cert_dir: A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to @@ -334,7 +319,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, if ca_certs or ca_cert_dir: try: context.load_verify_locations(ca_certs, ca_cert_dir) - except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2 + except IOError as e: # Platform-specific: Python 2.7 raise SSLError(e) # Py33 raises FileNotFoundError which subclasses OSError # These are not equivalent unless we check the errno attribute @@ -378,7 +363,7 @@ def is_ipaddress(hostname): :param str hostname: Hostname to examine. :return: True if the hostname is an IP address, False otherwise. """ - if six.PY3 and isinstance(hostname, six.binary_type): + if six.PY3 and isinstance(hostname, bytes): # IDN A-label bytes are ASCII compatible. hostname = hostname.decode('ascii') diff --git a/pype/vendor/urllib3/util/wait.py b/pype/vendor/urllib3/util/wait.py index fa686eff48..4db71bafd8 100644 --- a/pype/vendor/urllib3/util/wait.py +++ b/pype/vendor/urllib3/util/wait.py @@ -43,9 +43,6 @@ if sys.version_info >= (3, 5): else: # Old and broken Pythons. def _retry_on_intr(fn, timeout): - if timeout is not None and timeout <= 0: - return fn(timeout) - if timeout is None: deadline = float("inf") else: @@ -117,7 +114,7 @@ def _have_working_poll(): # from libraries like eventlet/greenlet. try: poll_obj = select.poll() - poll_obj.poll(0) + _retry_on_intr(poll_obj.poll, 0) except (AttributeError, OSError): return False else: