Drop support for Python 2

Co-Authored-By: Dustin Ingram <di@users.noreply.github.com>
Co-Authored-By: Berker Peksag <berker.peksag@gmail.com>
This commit is contained in:
Hugo 2018-08-01 15:31:17 +03:00
parent 78208c8c32
commit e974f30517
48 changed files with 260 additions and 3701 deletions

View File

@ -6,8 +6,6 @@ ignore=
examples,
scripts,
_compat.py,
argparse_compat.py,
six.py,
_gaiohttp.py,
[MESSAGES CONTROL]
@ -53,5 +51,3 @@ disable=
useless-import-alias,
comparison-with-callable,
try-except-raise,
# TODO: use dict comprehensions once we drop Python 2.6 support.
consider-using-dict-comprehension,

View File

@ -2,10 +2,6 @@ sudo: false
language: python
matrix:
include:
- python: 2.6
env: TOXENV=py26
- python: 2.7
env: TOXENV=py27
- python: 3.4
env: TOXENV=py34
- python: 3.5

View File

@ -28,7 +28,7 @@ The documentation is hosted at http://docs.gunicorn.org.
Installation
------------
Gunicorn requires **Python 2.x >= 2.6** or **Python 3.x >= 3.4**.
Gunicorn requires **Python **Python 3.x >= 3.4**.
Install from PyPI::

View File

@ -3,15 +3,12 @@ environment:
matrix:
- TOXENV: lint
PYTHON: "C:\\Python35-x64"
- TOXENV: py27
PYTHON: "C:\\Python27-x64"
- TOXENV: py35
PYTHON: "C:\\Python35-x64"
- TOXENV: py36
PYTHON: "C:\\Python36-x64"
matrix:
allow_failures:
- TOXENV: py27
- TOXENV: py35
- TOXENV: py36
init: SET "PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"

View File

@ -6,7 +6,7 @@ Requirements
To generate documentation you need to install:
- Python >= 2.5
- Python >= 3.4
- Sphinx (http://sphinx-doc.org/)

View File

@ -50,15 +50,6 @@ Usage: python sitemap_gen.py --config=config.xml [--help] [--testing]
--testing, specified when user is experimenting
"""
# Please be careful that all syntax used in this file can be parsed on
# Python 1.5 -- this version check is not evaluated until after the
# entire file has been parsed.
import sys
if sys.hexversion < 0x02020000:
print 'This script requires Python 2.2 or later.'
print 'Currently run with version: %s' % sys.version
sys.exit(1)
import fnmatch
import glob
import gzip
@ -72,14 +63,6 @@ import urllib
import urlparse
import xml.sax
# True and False were introduced in Python2.2.2
try:
testTrue=True
del testTrue
except NameError:
True=1
False=0
# Text encodings
ENC_ASCII = 'ASCII'
ENC_UTF8 = 'UTF-8'

View File

@ -21,8 +21,8 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
project = u'Gunicorn'
copyright = u'2009-%s, Benoit Chesneau' % time.strftime('%Y')
project = 'Gunicorn'
copyright = '2009-%s, Benoit Chesneau' % time.strftime('%Y')
# gunicorn version
import gunicorn
release = version = gunicorn.__version__
@ -55,19 +55,19 @@ latex_elements = {
}
latex_documents = [
('index', 'Gunicorn.tex', u'Gunicorn Documentation',
u'Benoit Chesneau', 'manual'),
('index', 'Gunicorn.tex', 'Gunicorn Documentation',
'Benoit Chesneau', 'manual'),
]
# -- Options for manual page output --------------------------------------------
man_pages = [
('index', 'gunicorn', u'Gunicorn Documentation',
[u'Benoit Chesneau'], 1)
('index', 'gunicorn', 'Gunicorn Documentation',
['Benoit Chesneau'], 1)
]
texinfo_documents = [
('index', 'Gunicorn', u'Gunicorn Documentation',
u'Benoit Chesneau', 'Gunicorn', 'One line description of project.',
('index', 'Gunicorn', 'Gunicorn Documentation',
'Benoit Chesneau', 'Gunicorn', 'One line description of project.',
'Miscellaneous'),
]

View File

@ -23,7 +23,7 @@ Features
* Simple Python configuration
* Multiple worker configurations
* Various server hooks for extensibility
* Compatible with Python 2.x >= 2.6 or 3.x >= 3.4
* Compatible with Python 3.x >= 3.4
Contents

View File

@ -4,7 +4,7 @@ Installation
.. highlight:: bash
:Requirements: **Python 2.x >= 2.6** or **Python 3.x >= 3.4**
:Requirements: **Python 3.x >= 3.4**
To install the latest released version of Gunicorn::

View File

@ -200,7 +200,7 @@ def worker_int(worker):
## get traceback info
import threading, sys, traceback
id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
id2name = {th.ident: th.name for th in threading.enumerate()}
code = []
for threadId, stack in sys._current_frames().items():
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId,""),

View File

@ -2,8 +2,6 @@
Use this config file in your script like this:
$ gunicorn project_name.wsgi:application -c read_django_settings.py
You need to replace the exec() call if you want it to work on Python 2.
"""
settings_dict = {}

View File

@ -8,14 +8,10 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from __future__ import unicode_literals
import multiprocessing
import gunicorn.app.base
from gunicorn.six import iteritems
def number_of_workers():
return (multiprocessing.cpu_count() * 2) + 1
@ -42,9 +38,9 @@ class StandaloneApplication(gunicorn.app.base.BaseApplication):
super(StandaloneApplication, self).__init__()
def load_config(self):
config = dict([(key, value) for key, value in iteritems(self.options)
if key in self.cfg.settings and value is not None])
for key, value in iteritems(config):
config = {key: value for key, value in self.options.items()
if key in self.cfg.settings and value is not None}
for key, value in config.items():
self.cfg.set(key.lower(), value)
def load(self):

View File

@ -1,10 +1,3 @@
import sys
from gunicorn import six
PY26 = (sys.version_info[:2] == (2, 6))
def _check_if_pyc(fname):
"""Return True if the extension is .pyc, False if .py
and None if otherwise"""
@ -62,147 +55,12 @@ def _get_codeobj(pyfile):
# Return code object
return code_obj
if six.PY3:
def execfile_(fname, *args):
if fname.endswith(".pyc"):
code = _get_codeobj(fname)
else:
code = compile(open(fname, 'rb').read(), fname, 'exec')
return six.exec_(code, *args)
def bytes_to_str(b):
if isinstance(b, six.text_type):
return b
return str(b, 'latin1')
import urllib.parse
def unquote_to_wsgi_str(string):
return _unquote_to_bytes(string).decode('latin-1')
_unquote_to_bytes = urllib.parse.unquote_to_bytes
else:
def execfile_(fname, *args):
""" Overriding PY2 execfile() implementation to support .pyc files """
if fname.endswith(".pyc"):
return six.exec_(_get_codeobj(fname), *args)
return execfile(fname, *args)
def bytes_to_str(s):
if isinstance(s, unicode):
return s.encode('utf-8')
return s
import urllib
unquote_to_wsgi_str = urllib.unquote
def execfile_(fname, *args):
if fname.endswith(".pyc"):
code = _get_codeobj(fname)
else:
code = compile(open(fname, 'rb').read(), fname, 'exec')
return exec(code, *args)
# The following code adapted from trollius.py33_exceptions
def _wrap_error(exc, mapping, key):
if key not in mapping:
return
new_err_cls = mapping[key]
new_err = new_err_cls(*exc.args)
# raise a new exception with the original traceback
six.reraise(new_err_cls, new_err,
exc.__traceback__ if hasattr(exc, '__traceback__') else sys.exc_info()[2])
if PY26:
from urlparse import (
_parse_cache, MAX_CACHE_SIZE, clear_cache, _splitnetloc, SplitResult,
scheme_chars,
)
def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL into 5 components:
<scheme>://<netloc>/<path>?<query>#<fragment>
Return a 5-tuple: (scheme, netloc, path, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
allow_fragments = bool(allow_fragments)
key = url, scheme, allow_fragments, type(url), type(scheme)
cached = _parse_cache.get(key, None)
if cached:
return cached
if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
clear_cache()
netloc = query = fragment = ''
i = url.find(':')
if i > 0:
if url[:i] == 'http': # optimize the common case
scheme = url[:i].lower()
url = url[i+1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
for c in url[:i]:
if c not in scheme_chars:
break
else:
# make sure "url" is not actually a port number (in which case
# "scheme" is really part of the path)
rest = url[i+1:]
if not rest or any(c not in '0123456789' for c in rest):
# not a port number
scheme, url = url[:i].lower(), rest
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
else:
from gunicorn.six.moves.urllib.parse import urlsplit
import inspect
if hasattr(inspect, 'signature'):
positionals = (
inspect.Parameter.POSITIONAL_ONLY,
inspect.Parameter.POSITIONAL_OR_KEYWORD,
)
def get_arity(f):
sig = inspect.signature(f)
arity = 0
for param in sig.parameters.values():
if param.kind in positionals:
arity += 1
return arity
else:
def get_arity(f):
return len(inspect.getargspec(f)[0])
try:
import html
def html_escape(s):
return html.escape(s)
except ImportError:
import cgi
def html_escape(s):
return cgi.escape(s, quote=True)

View File

@ -2,8 +2,6 @@
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from __future__ import print_function
import os
import sys
import traceback

View File

@ -2,7 +2,6 @@
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from __future__ import print_function
# pylint: skip-file

View File

@ -2,8 +2,6 @@
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from __future__ import print_function
import errno
import os
import random

File diff suppressed because it is too large Load Diff

View File

@ -5,27 +5,21 @@
# Please remember to run "make -C docs html" after update "desc" attributes.
import argparse
import copy
import grp
import inspect
try:
import argparse
except ImportError: # python 2.6
from . import argparse_compat as argparse
import os
import pwd
import re
import shlex
import ssl
import sys
import textwrap
import shlex
from gunicorn import __version__
from gunicorn import _compat
from gunicorn import __version__, util
from gunicorn.errors import ConfigError
from gunicorn.reloader import reloader_engines
from gunicorn import six
from gunicorn import util
KNOWN_SETTINGS = []
PLATFORM = sys.platform
@ -122,7 +116,7 @@ class Config(object):
@property
def address(self):
s = self.settings['bind'].get()
return [util.parse_address(_compat.bytes_to_str(bind)) for bind in s]
return [util.parse_address(util.bytes_to_str(bind)) for bind in s]
@property
def uid(self):
@ -183,7 +177,7 @@ class Config(object):
return env
for e in raw_env:
s = _compat.bytes_to_str(e)
s = util.bytes_to_str(e)
try:
k, v = s.split('=', 1)
except ValueError:
@ -216,7 +210,7 @@ class Config(object):
global_conf = {}
for e in raw_global_conf:
s = _compat.bytes_to_str(e)
s = util.bytes_to_str(e)
try:
k, v = re.split(r'(?<!\\)=', s, 1)
except ValueError:
@ -305,7 +299,7 @@ class Setting(object):
return self.value
def set(self, val):
if not six.callable(self.validator):
if not callable(self.validator):
raise TypeError('Invalid validator: %s' % self.name)
self.value = self.validator(val)
@ -323,7 +317,7 @@ def validate_bool(val):
if isinstance(val, bool):
return val
if not isinstance(val, six.string_types):
if not isinstance(val, str):
raise TypeError("Invalid type for casting: %s" % val)
if val.lower().strip() == "true":
return True
@ -340,7 +334,7 @@ def validate_dict(val):
def validate_pos_int(val):
if not isinstance(val, six.integer_types):
if not isinstance(val, int):
val = int(val, 0)
else:
# Booleans are ints!
@ -353,7 +347,7 @@ def validate_pos_int(val):
def validate_string(val):
if val is None:
return None
if not isinstance(val, six.string_types):
if not isinstance(val, str):
raise TypeError("Not a string: %s" % val)
return val.strip()
@ -371,7 +365,7 @@ def validate_list_string(val):
return []
# legacy syntax
if isinstance(val, six.string_types):
if isinstance(val, str):
val = [val]
return [validate_string(v) for v in val]
@ -400,7 +394,7 @@ def validate_class(val):
def validate_callable(arity):
def _validate_callable(val):
if isinstance(val, six.string_types):
if isinstance(val, str):
try:
mod_name, obj_name = val.rsplit(".", 1)
except ValueError:
@ -414,9 +408,9 @@ def validate_callable(arity):
except AttributeError:
raise TypeError("Can not load '%s' from '%s'"
"" % (obj_name, mod_name))
if not six.callable(val):
raise TypeError("Value is not six.callable: %s" % val)
if arity != -1 and arity != _compat.get_arity(val):
if not callable(val):
raise TypeError("Value is not callable: %s" % val)
if arity != -1 and arity != util.get_arity(val):
raise TypeError("Value must have an arity of: %s" % arity)
return val
return _validate_callable
@ -454,7 +448,7 @@ def validate_group(val):
def validate_post_request(val):
val = validate_callable(-1)(val)
largs = _compat.get_arity(val)
largs = util.get_arity(val)
if largs == 4:
return val
elif largs == 3:
@ -1540,7 +1534,7 @@ class OnStarting(Setting):
name = "on_starting"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
type = callable
def on_starting(server):
pass
@ -1556,7 +1550,7 @@ class OnReload(Setting):
name = "on_reload"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
type = callable
def on_reload(server):
pass
@ -1572,7 +1566,7 @@ class WhenReady(Setting):
name = "when_ready"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
type = callable
def when_ready(server):
pass
@ -1588,7 +1582,7 @@ class Prefork(Setting):
name = "pre_fork"
section = "Server Hooks"
validator = validate_callable(2)
type = six.callable
type = callable
def pre_fork(server, worker):
pass
@ -1605,7 +1599,7 @@ class Postfork(Setting):
name = "post_fork"
section = "Server Hooks"
validator = validate_callable(2)
type = six.callable
type = callable
def post_fork(server, worker):
pass
@ -1622,7 +1616,7 @@ class PostWorkerInit(Setting):
name = "post_worker_init"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
type = callable
def post_worker_init(worker):
pass
@ -1639,7 +1633,7 @@ class WorkerInt(Setting):
name = "worker_int"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
type = callable
def worker_int(worker):
pass
@ -1657,7 +1651,7 @@ class WorkerAbort(Setting):
name = "worker_abort"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
type = callable
def worker_abort(worker):
pass
@ -1677,7 +1671,7 @@ class PreExec(Setting):
name = "pre_exec"
section = "Server Hooks"
validator = validate_callable(1)
type = six.callable
type = callable
def pre_exec(server):
pass
@ -1693,7 +1687,7 @@ class PreRequest(Setting):
name = "pre_request"
section = "Server Hooks"
validator = validate_callable(2)
type = six.callable
type = callable
def pre_request(worker, req):
worker.log.debug("%s %s" % (req.method, req.path))
@ -1710,7 +1704,7 @@ class PostRequest(Setting):
name = "post_request"
section = "Server Hooks"
validator = validate_post_request
type = six.callable
type = callable
def post_request(worker, req, environ, resp):
pass
@ -1727,7 +1721,7 @@ class ChildExit(Setting):
name = "child_exit"
section = "Server Hooks"
validator = validate_callable(2)
type = six.callable
type = callable
def child_exit(server, worker):
pass
@ -1746,7 +1740,7 @@ class WorkerExit(Setting):
name = "worker_exit"
section = "Server Hooks"
validator = validate_callable(2)
type = six.callable
type = callable
def worker_exit(server, worker):
pass
@ -1763,7 +1757,7 @@ class NumWorkersChanged(Setting):
name = "nworkers_changed"
section = "Server Hooks"
validator = validate_callable(3)
type = six.callable
type = callable
def nworkers_changed(server, new_value, old_value):
pass
@ -1916,16 +1910,15 @@ class DoHandshakeOnConnect(Setting):
"""
if sys.version_info >= (2, 7):
class Ciphers(Setting):
name = "ciphers"
section = "SSL"
cli = ["--ciphers"]
validator = validate_string
default = 'TLSv1'
desc = """\
Ciphers to use (see stdlib ssl module's)
"""
class Ciphers(Setting):
name = "ciphers"
section = "SSL"
cli = ["--ciphers"]
validator = validate_string
default = 'TLSv1'
desc = """\
Ciphers to use (see stdlib ssl module's)
"""
class PasteGlobalConf(Setting):

View File

@ -8,12 +8,8 @@ import binascii
import time
import logging
logging.Logger.manager.emittedNoHandlerWarning = 1
from logging.config import dictConfig
from logging.config import fileConfig
try:
from logging.config import dictConfig
except ImportError:
# python 2.6
dictConfig = None
import os
import socket
import sys
@ -21,7 +17,6 @@ import threading
import traceback
from gunicorn import util
from gunicorn.six import PY3, string_types
# syslog facility codes
@ -104,7 +99,7 @@ class SafeAtoms(dict):
def __init__(self, atoms):
dict.__init__(self)
for key, value in atoms.items():
if isinstance(value, string_types):
if isinstance(value, str):
self[key] = value.replace('"', '\\"')
else:
self[key] = value
@ -231,11 +226,7 @@ class Logger(object):
self.access_log, cfg, self.syslog_fmt, "access"
)
if dictConfig is None and cfg.logconfig_dict:
util.warn("Dictionary-based log configuration requires "
"Python 2.7 or above.")
if dictConfig and cfg.logconfig_dict:
if cfg.logconfig_dict:
config = CONFIG_DEFAULTS.copy()
config.update(cfg.logconfig_dict)
try:
@ -277,7 +268,7 @@ class Logger(object):
self.error_log.exception(msg, *args, **kwargs)
def log(self, lvl, msg, *args, **kwargs):
if isinstance(lvl, string_types):
if isinstance(lvl, str):
lvl = self.LOG_LEVELS.get(lvl.lower(), logging.INFO)
self.error_log.log(lvl, msg, *args, **kwargs)
@ -318,18 +309,18 @@ class Logger(object):
if hasattr(req_headers, "items"):
req_headers = req_headers.items()
atoms.update(dict([("{%s}i" % k.lower(), v) for k, v in req_headers]))
atoms.update({"{%s}i" % k.lower(): v for k, v in req_headers})
resp_headers = resp.headers
if hasattr(resp_headers, "items"):
resp_headers = resp_headers.items()
# add response headers
atoms.update(dict([("{%s}o" % k.lower(), v) for k, v in resp_headers]))
atoms.update({"{%s}o" % k.lower(): v for k, v in resp_headers})
# add environ variables
environ_variables = environ.items()
atoms.update(dict([("{%s}e" % k.lower(), v) for k, v in environ_variables]))
atoms.update({"{%s}e" % k.lower(): v for k, v in environ_variables})
return atoms
@ -444,14 +435,8 @@ class Logger(object):
socktype, addr = parse_syslog_address(cfg.syslog_addr)
# finally setup the syslog handler
if sys.version_info >= (2, 7):
h = logging.handlers.SysLogHandler(address=addr,
facility=facility, socktype=socktype)
else:
# socktype is only supported in 2.7 and sup
# fix issue #541
h = logging.handlers.SysLogHandler(address=addr,
facility=facility)
h = logging.handlers.SysLogHandler(address=addr,
facility=facility, socktype=socktype)
h.setFormatter(fmt)
h._gunicorn = True
@ -467,8 +452,8 @@ class Logger(object):
# b64decode doesn't accept unicode in Python < 3.3
# so we need to convert it to a byte string
auth = base64.b64decode(auth[1].strip().encode('utf-8'))
if PY3: # b64decode returns a byte string in Python 3
auth = auth.decode('utf-8')
# b64decode returns a byte string
auth = auth.decode('utf-8')
auth = auth.split(":", 1)
except (TypeError, binascii.Error, UnicodeDecodeError) as exc:
self.debug("Couldn't get username: %s", exc)

View File

@ -1,67 +0,0 @@
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import errno
import os
import sys
try:
import ctypes
import ctypes.util
except MemoryError:
# selinux execmem denial
# https://bugzilla.redhat.com/show_bug.cgi?id=488396
raise ImportError
SUPPORTED_PLATFORMS = (
'darwin',
'freebsd',
'dragonfly',
'linux2')
if sys.platform not in SUPPORTED_PLATFORMS:
raise ImportError("sendfile isn't supported on this platform")
_libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True)
_sendfile = _libc.sendfile
def sendfile(fdout, fdin, offset, nbytes):
if sys.platform == 'darwin':
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64,
ctypes.POINTER(ctypes.c_uint64), ctypes.c_voidp,
ctypes.c_int]
_nbytes = ctypes.c_uint64(nbytes)
result = _sendfile(fdin, fdout, offset, _nbytes, None, 0)
if result == -1:
e = ctypes.get_errno()
if e == errno.EAGAIN and _nbytes.value is not None:
return _nbytes.value
raise OSError(e, os.strerror(e))
return _nbytes.value
elif sys.platform in ('freebsd', 'dragonfly',):
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64,
ctypes.c_uint64, ctypes.c_voidp,
ctypes.POINTER(ctypes.c_uint64), ctypes.c_int]
_sbytes = ctypes.c_uint64()
result = _sendfile(fdin, fdout, offset, nbytes, None, _sbytes, 0)
if result == -1:
e = ctypes.get_errno()
if e == errno.EAGAIN and _sbytes.value is not None:
return _sbytes.value
raise OSError(e, os.strerror(e))
return _sbytes.value
else:
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int,
ctypes.POINTER(ctypes.c_uint64), ctypes.c_size_t]
_offset = ctypes.c_uint64(offset)
sent = _sendfile(fdout, fdin, _offset, nbytes)
if sent == -1:
e = ctypes.get_errno()
raise OSError(e, os.strerror(e))
return sent

View File

@ -3,19 +3,21 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import io
import sys
from gunicorn.http.errors import (NoMoreData, ChunkMissingTerminator,
InvalidChunkSize)
from gunicorn import six
class ChunkedReader(object):
def __init__(self, req, unreader):
self.req = req
self.parser = self.parse_chunked(unreader)
self.buf = six.BytesIO()
self.buf = io.BytesIO()
def read(self, size):
if not isinstance(size, six.integer_types):
if not isinstance(size, int):
raise TypeError("size must be an integral type")
if size < 0:
raise ValueError("Size must be positive.")
@ -25,19 +27,19 @@ class ChunkedReader(object):
if self.parser:
while self.buf.tell() < size:
try:
self.buf.write(six.next(self.parser))
self.buf.write(next(self.parser))
except StopIteration:
self.parser = None
break
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf = io.BytesIO()
self.buf.write(rest)
return ret
def parse_trailers(self, unreader, data):
buf = six.BytesIO()
buf = io.BytesIO()
buf.write(data)
idx = buf.getvalue().find(b"\r\n\r\n")
@ -71,7 +73,7 @@ class ChunkedReader(object):
(size, rest) = self.parse_chunk_size(unreader, data=rest[2:])
def parse_chunk_size(self, unreader, data=None):
buf = six.BytesIO()
buf = io.BytesIO()
if data is not None:
buf.write(data)
@ -110,7 +112,7 @@ class LengthReader(object):
self.length = length
def read(self, size):
if not isinstance(size, six.integer_types):
if not isinstance(size, int):
raise TypeError("size must be an integral type")
size = min(self.length, size)
@ -119,7 +121,7 @@ class LengthReader(object):
if size == 0:
return b""
buf = six.BytesIO()
buf = io.BytesIO()
data = self.unreader.read()
while data:
buf.write(data)
@ -137,11 +139,11 @@ class LengthReader(object):
class EOFReader(object):
def __init__(self, unreader):
self.unreader = unreader
self.buf = six.BytesIO()
self.buf = io.BytesIO()
self.finished = False
def read(self, size):
if not isinstance(size, six.integer_types):
if not isinstance(size, int):
raise TypeError("size must be an integral type")
if size < 0:
raise ValueError("Size must be positive.")
@ -151,7 +153,7 @@ class EOFReader(object):
if self.finished:
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf = io.BytesIO()
self.buf.write(rest)
return ret
@ -167,7 +169,7 @@ class EOFReader(object):
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf = io.BytesIO()
self.buf.write(rest)
return ret
@ -175,7 +177,7 @@ class EOFReader(object):
class Body(object):
def __init__(self, reader):
self.reader = reader
self.buf = six.BytesIO()
self.buf = io.BytesIO()
def __iter__(self):
return self
@ -189,11 +191,11 @@ class Body(object):
def getsize(self, size):
if size is None:
return six.MAXSIZE
elif not isinstance(size, six.integer_types):
return sys.maxsize
elif not isinstance(size, int):
raise TypeError("size must be an integral type")
elif size < 0:
return six.MAXSIZE
return sys.maxsize
return size
def read(self, size=None):
@ -204,7 +206,7 @@ class Body(object):
if size < self.buf.tell():
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf = io.BytesIO()
self.buf.write(rest)
return ret
@ -216,7 +218,7 @@ class Body(object):
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf = io.BytesIO()
self.buf.write(rest)
return ret
@ -226,7 +228,7 @@ class Body(object):
return b""
data = self.buf.getvalue()
self.buf = six.BytesIO()
self.buf = io.BytesIO()
ret = []
while 1:

View File

@ -3,11 +3,11 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import io
import re
import socket
from errno import ENOTCONN
from gunicorn._compat import bytes_to_str
from gunicorn.http.unreader import SocketUnreader
from gunicorn.http.body import ChunkedReader, LengthReader, EOFReader, Body
from gunicorn.http.errors import (InvalidHeader, InvalidHeaderName, NoMoreData,
@ -15,8 +15,7 @@ from gunicorn.http.errors import (InvalidHeader, InvalidHeaderName, NoMoreData,
LimitRequestLine, LimitRequestHeaders)
from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest
from gunicorn.http.errors import InvalidSchemeHeaders
from gunicorn.six import BytesIO, string_types
from gunicorn.util import split_request_uri
from gunicorn.util import bytes_to_str, split_request_uri
MAX_REQUEST_LINE = 8190
MAX_HEADERS = 32768
@ -76,7 +75,7 @@ class Message(object):
remote_host = remote_addr[0]
if remote_host in cfg.forwarded_allow_ips:
secure_scheme_headers = cfg.secure_scheme_headers
elif isinstance(remote_addr, string_types):
elif isinstance(remote_addr, str):
secure_scheme_headers = cfg.secure_scheme_headers
# Parse headers into key/value pairs paying attention
@ -189,7 +188,7 @@ class Request(Message):
buf.write(data)
def parse(self, unreader):
buf = BytesIO()
buf = io.BytesIO()
self.get_data(unreader, buf, stop=True)
# get request line
@ -198,12 +197,12 @@ class Request(Message):
# proxy protocol
if self.proxy_protocol(bytes_to_str(line)):
# get next request line
buf = BytesIO()
buf = io.BytesIO()
buf.write(rbuf)
line, rbuf = self.read_line(unreader, buf, self.limit_request_line)
self.parse_request_line(line)
buf = BytesIO()
buf = io.BytesIO()
buf.write(rbuf)
# Headers

View File

@ -3,23 +3,22 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import io
import os
from gunicorn import six
# Classes that can undo reading data from
# a given type of data source.
class Unreader(object):
def __init__(self):
self.buf = six.BytesIO()
self.buf = io.BytesIO()
def chunk(self):
raise NotImplementedError()
def read(self, size=None):
if size is not None and not isinstance(size, six.integer_types):
if size is not None and not isinstance(size, int):
raise TypeError("size parameter must be an int or long.")
if size is not None:
@ -32,7 +31,7 @@ class Unreader(object):
if size is None and self.buf.tell():
ret = self.buf.getvalue()
self.buf = six.BytesIO()
self.buf = io.BytesIO()
return ret
if size is None:
d = self.chunk()
@ -42,11 +41,11 @@ class Unreader(object):
chunk = self.chunk()
if not chunk:
ret = self.buf.getvalue()
self.buf = six.BytesIO()
self.buf = io.BytesIO()
return ret
self.buf.write(chunk)
data = self.buf.getvalue()
self.buf = six.BytesIO()
self.buf = io.BytesIO()
self.buf.write(data[size:])
return data[:size]
@ -74,7 +73,7 @@ class IterUnreader(Unreader):
if not self.iter:
return b""
try:
return six.next(self.iter)
return next(self.iter)
except StopIteration:
self.iter = None
return b""

View File

@ -9,22 +9,11 @@ import os
import re
import sys
from gunicorn._compat import unquote_to_wsgi_str
from gunicorn.http.message import HEADER_RE
from gunicorn.http.errors import InvalidHeader, InvalidHeaderName
from gunicorn.six import string_types, binary_type, reraise
from gunicorn import SERVER_SOFTWARE
import gunicorn.util as util
try:
# Python 3.3 has os.sendfile().
from os import sendfile
except ImportError:
try:
from ._sendfile import sendfile
except ImportError:
sendfile = None
# Send files in at most 1GB blocks as some operating systems can have problems
# with sending files in blocks over 2GB.
BLKSIZE = 0x3FFFFFFF
@ -155,9 +144,9 @@ def create(req, sock, client, server, cfg):
# authors should be aware that REMOTE_HOST and REMOTE_ADDR
# may not qualify the remote addr:
# http://www.ietf.org/rfc/rfc3875
if isinstance(client, string_types):
if isinstance(client, str):
environ['REMOTE_ADDR'] = client
elif isinstance(client, binary_type):
elif isinstance(client, bytes):
environ['REMOTE_ADDR'] = client.decode()
else:
environ['REMOTE_ADDR'] = client[0]
@ -167,7 +156,7 @@ def create(req, sock, client, server, cfg):
# Normally only the application should use the Host header but since the
# WSGI spec doesn't support unix sockets, we are using it to create
# viable SERVER_* if possible.
if isinstance(server, string_types):
if isinstance(server, str):
server = server.split(":")
if len(server) == 1:
# unix socket
@ -191,7 +180,7 @@ def create(req, sock, client, server, cfg):
path_info = req.path
if script_name:
path_info = path_info.split(script_name, 1)[1]
environ['PATH_INFO'] = unquote_to_wsgi_str(path_info)
environ['PATH_INFO'] = util.unquote_to_wsgi_str(path_info)
environ['SCRIPT_NAME'] = script_name
# override the environ with the correct remote and server address if
@ -234,7 +223,7 @@ class Response(object):
if exc_info:
try:
if self.status and self.headers_sent:
reraise(exc_info[0], exc_info[1], exc_info[2])
util.reraise(exc_info[0], exc_info[1], exc_info[2])
finally:
exc_info = None
elif self.status is not None:
@ -256,7 +245,7 @@ class Response(object):
def process_headers(self, headers):
for name, value in headers:
if not isinstance(name, string_types):
if not isinstance(name, str):
raise TypeError('%r is not a string' % name)
if HEADER_RE.search(name):
@ -331,7 +320,7 @@ class Response(object):
def write(self, arg):
self.send_headers()
if not isinstance(arg, binary_type):
if not isinstance(arg, bytes):
raise TypeError('%r is not a byte' % arg)
arglen = len(arg)
tosend = arglen
@ -353,7 +342,7 @@ class Response(object):
util.write(self.sock, arg, self.chunked)
def can_sendfile(self):
return self.cfg.sendfile is not False and sendfile is not None
return self.cfg.sendfile is not False
def sendfile(self, respiter):
if self.cfg.is_ssl or not self.can_sendfile():
@ -390,7 +379,7 @@ class Response(object):
while sent != nbytes:
count = min(nbytes - sent, BLKSIZE)
sent += sendfile(sockno, fileno, offset + sent, count)
sent += os.sendfile(sockno, fileno, offset + sent, count)
if self.is_chunked():
self.sock.sendall(b"\r\n")

View File

@ -5,12 +5,11 @@
"Bare-bones implementation of statsD's protocol, client-side"
import socket
import logging
import socket
from re import sub
from gunicorn.glogging import Logger
from gunicorn import six
# Instrumentation constants
METRIC_VAR = "metric"
@ -115,7 +114,7 @@ class Statsd(Logger):
def _sock_send(self, msg):
try:
if isinstance(msg, six.text_type):
if isinstance(msg, str):
msg = msg.encode("ascii")
if self.sock:
self.sock.send(msg)

View File

@ -1,762 +0,0 @@
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2014 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.8.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
# This is a bit ugly, but it avoids running this again.
delattr(obj.__class__, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
else:
def iterkeys(d, **kw):
return iter(d.iterkeys(**kw))
def itervalues(d, **kw):
return iter(d.itervalues(**kw))
def iteritems(d, **kw):
return iter(d.iteritems(**kw))
def iterlists(d, **kw):
return iter(d.iterlists(**kw))
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
def iterbytes(buf):
return (ord(byte) for byte in buf)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)

View File

@ -11,7 +11,6 @@ import sys
import time
from gunicorn import util
from gunicorn.six import string_types
class BaseSocket(object):
@ -133,7 +132,7 @@ def _sock_type(addr):
sock_type = TCP6Socket
else:
sock_type = TCPSocket
elif isinstance(addr, string_types):
elif isinstance(addr, str):
sock_type = UnixSocket
else:
raise TypeError("Unable to create socket from: %r" % addr)

View File

@ -3,30 +3,29 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from __future__ import print_function
import email.utils
import errno
import fcntl
import html
import inspect
import io
import logging
import os
import pkg_resources
import pwd
import random
import re
import socket
import sys
import textwrap
import time
import traceback
import inspect
import errno
import warnings
import logging
import re
from gunicorn import _compat
import pkg_resources
from gunicorn.errors import AppImportError
from gunicorn.six import text_type
from gunicorn.workers import SUPPORTED_WORKERS
import urllib.parse
REDIRECT_TO = getattr(os, 'devnull', '/dev/null')
@ -140,6 +139,23 @@ def load_class(uri, default="gunicorn.workers.sync.SyncWorker",
return getattr(mod, klass)
positionals = (
inspect.Parameter.POSITIONAL_ONLY,
inspect.Parameter.POSITIONAL_OR_KEYWORD,
)
def get_arity(f):
sig = inspect.signature(f)
arity = 0
for param in sig.parameters.values():
if param.kind in positionals:
arity += 1
return arity
def get_username(uid):
""" get the username for a user id"""
return pwd.getpwuid(uid).pw_name
@ -169,7 +185,6 @@ def set_owner_process(uid, gid, initgroups=False):
def chown(path, uid, gid):
gid = abs(gid) & 0x7FFFFFFF # see note above.
os.chown(path, uid, gid)
@ -291,7 +306,7 @@ except ImportError:
def write_chunk(sock, data):
if isinstance(data, text_type):
if isinstance(data, str):
data = data.encode('utf-8')
chunk_size = "%X\r\n" % len(data)
chunk = b"".join([chunk_size.encode('utf-8'), data, b"\r\n"])
@ -317,7 +332,7 @@ def write_nonblock(sock, data, chunked=False):
def write_error(sock, status_int, reason, mesg):
html = textwrap.dedent("""\
html_error = textwrap.dedent("""\
<html>
<head>
<title>%(reason)s</title>
@ -327,7 +342,7 @@ def write_error(sock, status_int, reason, mesg):
%(mesg)s
</body>
</html>
""") % {"reason": reason, "mesg": _compat.html_escape(mesg)}
""") % {"reason": reason, "mesg": html.escape(mesg)}
http = textwrap.dedent("""\
HTTP/1.1 %s %s\r
@ -335,7 +350,7 @@ def write_error(sock, status_int, reason, mesg):
Content-Type: text/html\r
Content-Length: %d\r
\r
%s""") % (str(status_int), reason, len(html), html)
%s""") % (str(status_int), reason, len(html_error), html_error)
write_nonblock(sock, http.encode('latin1'))
@ -501,7 +516,7 @@ def to_bytestring(value, encoding="utf8"):
"""Converts a string argument to a byte string"""
if isinstance(value, bytes):
return value
if not isinstance(value, text_type):
if not isinstance(value, str):
raise TypeError('%r is not a string' % value)
return value.encode(encoding)
@ -551,7 +566,30 @@ def split_request_uri(uri):
# relative uri while the RFC says we should consider it as abs_path
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
# We use temporary dot prefix to workaround this behaviour
parts = _compat.urlsplit("." + uri)
parts = urllib.parse.urlsplit("." + uri)
return parts._replace(path=parts.path[1:])
return _compat.urlsplit(uri)
return urllib.parse.urlsplit(uri)
# From six.reraise
def reraise(tp, value, tb=None):
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
def bytes_to_str(b):
if isinstance(b, str):
return b
return str(b, 'latin1')
def unquote_to_wsgi_str(string):
return urllib.parse.unquote_to_bytes(string).decode('latin-1')

View File

@ -3,20 +3,14 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import sys
# supported gunicorn workers.
SUPPORTED_WORKERS = {
"sync": "gunicorn.workers.sync.SyncWorker",
"eventlet": "gunicorn.workers.geventlet.EventletWorker",
"gaiohttp": "gunicorn.workers.gaiohttp.AiohttpWorker",
"gevent": "gunicorn.workers.ggevent.GeventWorker",
"gevent_wsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker",
"gevent_pywsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker",
"tornado": "gunicorn.workers.gtornado.TornadoWorker",
"gthread": "gunicorn.workers.gthread.ThreadWorker",
}
if sys.version_info >= (3, 4):
# gaiohttp worker can be used with Python 3.4+ only.
SUPPORTED_WORKERS["gaiohttp"] = "gunicorn.workers.gaiohttp.AiohttpWorker"

View File

@ -3,27 +3,27 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from datetime import datetime
import io
import os
from random import randint
import signal
from ssl import SSLError
import sys
import time
import traceback
from datetime import datetime
from random import randint
from ssl import SSLError
from gunicorn import six
from gunicorn import util
from gunicorn.workers.workertmp import WorkerTmp
from gunicorn.reloader import reloader_engines
from gunicorn.http.errors import (
InvalidHeader, InvalidHeaderName, InvalidRequestLine, InvalidRequestMethod,
InvalidHTTPVersion, LimitRequestLine, LimitRequestHeaders,
ForbiddenProxyRequest, InvalidHeader,
InvalidHeaderName, InvalidHTTPVersion,
InvalidProxyLine, InvalidRequestLine,
InvalidRequestMethod, InvalidSchemeHeaders,
LimitRequestHeaders, LimitRequestLine,
)
from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest
from gunicorn.http.errors import InvalidSchemeHeaders
from gunicorn.http.wsgi import default_environ, Response
from gunicorn.six import MAXSIZE
from gunicorn.http.wsgi import Response, default_environ
from gunicorn.reloader import reloader_engines
from gunicorn.workers.workertmp import WorkerTmp
class Worker(object):
@ -52,7 +52,7 @@ class Worker(object):
self.nr = 0
jitter = randint(0, cfg.max_requests_jitter)
self.max_requests = cfg.max_requests + jitter or MAXSIZE
self.max_requests = cfg.max_requests + jitter or sys.maxsize
self.alive = True
self.log = log
self.tmp = WorkerTmp(cfg)
@ -150,7 +150,7 @@ class Worker(object):
_, exc_val, exc_tb = sys.exc_info()
self.reloader.add_extra_file(exc_val.filename)
tb_string = six.StringIO()
tb_string = io.StringIO()
traceback.print_tb(exc_tb, file=tb_string)
self.wsgi = util.make_fail_app(tb_string.getvalue())
finally:
@ -170,9 +170,8 @@ class Worker(object):
# Don't let SIGTERM and SIGUSR1 disturb active requests
# by interrupting system calls
if hasattr(signal, 'siginterrupt'): # python >= 2.6
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGUSR1, False)
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGUSR1, False)
if hasattr(signal, 'set_wakeup_fd'):
signal.set_wakeup_fd(self.PIPE[1])

View File

@ -13,7 +13,6 @@ import gunicorn.http as http
import gunicorn.http.wsgi as wsgi
import gunicorn.util as util
import gunicorn.workers.base as base
from gunicorn import six
ALREADY_HANDLED = object()
@ -38,7 +37,7 @@ class AsyncWorker(base.Worker):
try:
listener_name = listener.getsockname()
if not self.cfg.keepalive:
req = six.next(parser)
req = next(parser)
self.handle_request(listener_name, req, client, addr)
else:
# keepalive loop
@ -46,7 +45,7 @@ class AsyncWorker(base.Worker):
while True:
req = None
with self.timeout_ctx():
req = six.next(parser)
req = next(parser)
if not req:
break
if req.proxy_protocol_info:
@ -60,10 +59,10 @@ class AsyncWorker(base.Worker):
self.log.debug("Closing connection. %s", e)
except ssl.SSLError:
# pass to next try-except level
six.reraise(*sys.exc_info())
util.reraise(*sys.exc_info())
except EnvironmentError:
# pass to next try-except level
six.reraise(*sys.exc_info())
util.reraise(*sys.exc_info())
except Exception as e:
self.handle_error(req, client, addr, e)
except ssl.SSLError as e:
@ -126,7 +125,7 @@ class AsyncWorker(base.Worker):
except EnvironmentError:
# If the original exception was a socket.error we delegate
# handling it to the caller (where handle() might ignore it)
six.reraise(*sys.exc_info())
util.reraise(*sys.exc_info())
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the

View File

@ -3,25 +3,20 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import sys
from gunicorn import util
if sys.version_info >= (3, 4):
try:
import aiohttp # pylint: disable=unused-import
except ImportError:
raise RuntimeError("You need aiohttp installed to use this worker.")
else:
try:
from aiohttp.worker import GunicornWebWorker as AiohttpWorker
except ImportError:
from gunicorn.workers._gaiohttp import AiohttpWorker
util.warn(
"The 'gaiohttp' worker is deprecated. See --worker-class "
"documentation for more information."
)
__all__ = ['AiohttpWorker']
try:
import aiohttp # pylint: disable=unused-import
except ImportError:
raise RuntimeError("You need aiohttp installed to use this worker.")
else:
raise RuntimeError("You need Python >= 3.4 to use the gaiohttp worker")
try:
from aiohttp.worker import GunicornWebWorker as AiohttpWorker
except ImportError:
from gunicorn.workers._gaiohttp import AiohttpWorker
util.warn(
"The 'gaiohttp' worker is deprecated. See --worker-class "
"documentation for more information."
)
__all__ = ['AiohttpWorker']

View File

@ -5,6 +5,7 @@
from functools import partial
import errno
import os
import sys
try:
@ -23,13 +24,12 @@ from eventlet.hubs import trampoline
from eventlet.wsgi import ALREADY_HANDLED as EVENTLET_ALREADY_HANDLED
import greenlet
from gunicorn.http.wsgi import sendfile as o_sendfile
from gunicorn.workers.base_async import AsyncWorker
def _eventlet_sendfile(fdout, fdin, offset, nbytes):
while True:
try:
return o_sendfile(fdout, fdin, offset, nbytes)
return os.sendfile(fdout, fdin, offset, nbytes)
except OSError as e:
if e.args[0] == errno.EAGAIN:
trampoline(fdout, write=True)
@ -79,10 +79,7 @@ def _eventlet_stop(client, server, conn):
def patch_sendfile():
from gunicorn.http import wsgi
if o_sendfile is not None:
setattr(wsgi, "sendfile", _eventlet_sendfile)
setattr(os, "sendfile", _eventlet_sendfile)
class EventletWorker(AsyncWorker):

View File

@ -28,14 +28,13 @@ from gevent import pywsgi
import gunicorn
from gunicorn.http.wsgi import base_environ
from gunicorn.workers.base_async import AsyncWorker
from gunicorn.http.wsgi import sendfile as o_sendfile
VERSION = "gevent/%s gunicorn/%s" % (gevent.__version__, gunicorn.__version__)
def _gevent_sendfile(fdout, fdin, offset, nbytes):
while True:
try:
return o_sendfile(fdout, fdin, offset, nbytes)
return os.sendfile(fdout, fdin, offset, nbytes)
except OSError as e:
if e.args[0] == errno.EAGAIN:
wait_write(fdout)
@ -43,10 +42,7 @@ def _gevent_sendfile(fdout, fdin, offset, nbytes):
raise
def patch_sendfile():
from gunicorn.http import wsgi
if o_sendfile is not None:
setattr(wsgi, "sendfile", _gevent_sendfile)
setattr(os, "sendfile", _gevent_sendfile)
class GeventWorker(AsyncWorker):
@ -70,12 +66,8 @@ class GeventWorker(AsyncWorker):
# patch sockets
sockets = []
for s in self.sockets:
if sys.version_info[0] == 3:
sockets.append(socket(s.FAMILY, _socket.SOCK_STREAM,
fileno=s.sock.fileno()))
else:
sockets.append(socket(s.FAMILY, _socket.SOCK_STREAM,
_sock=s))
sockets.append(socket(s.FAMILY, _socket.SOCK_STREAM,
fileno=s.sock.fileno()))
self.sockets = sockets
def notify(self):

View File

@ -10,23 +10,22 @@
# If no event happen after the keep alive timeout, the connectoin is
# closed.
from collections import deque
from datetime import datetime
import errno
from functools import partial
import os
import selectors
import socket
import ssl
import sys
from threading import RLock
import time
from collections import deque
from datetime import datetime
from functools import partial
from threading import RLock
from .. import http
from ..http import wsgi
from .. import util
from . import base
from .. import six
from .. import http
from .. import util
from ..http import wsgi
try:
import concurrent.futures as futures
@ -36,19 +35,6 @@ except ImportError:
Python version.
""")
try:
# Python 3.4+
import selectors
except ImportError:
# Python 2
try:
import selectors34 as selectors
except ImportError:
raise RuntimeError(
"You need to install the 'selectors34' package to use this worker "
"with this Python version."
)
class TConn(object):
def __init__(self, cfg, sock, client, server):
@ -278,7 +264,7 @@ class ThreadWorker(base.Worker):
keepalive = False
req = None
try:
req = six.next(conn.parser)
req = next(conn.parser)
if not req:
return (False, conn)
@ -352,7 +338,7 @@ class ThreadWorker(base.Worker):
return False
except EnvironmentError:
# pass to next try-except level
six.reraise(*sys.exc_info())
util.reraise(*sys.exc_info())
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the

View File

@ -16,7 +16,6 @@ import gunicorn.http as http
import gunicorn.http.wsgi as wsgi
import gunicorn.util as util
import gunicorn.workers.base as base
from gunicorn import six
class StopWaiting(Exception):
""" exception raised to stop waiting for a connnection """
@ -131,7 +130,7 @@ class SyncWorker(base.Worker):
**self.cfg.ssl_options)
parser = http.RequestParser(self.cfg, client)
req = six.next(parser)
req = next(parser)
self.handle_request(listener, req, client, addr)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
@ -188,7 +187,7 @@ class SyncWorker(base.Worker):
respiter.close()
except EnvironmentError:
# pass to next try-except level
six.reraise(*sys.exc_info())
util.reraise(*sys.exc_info())
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the

View File

@ -38,13 +38,8 @@ class WorkerTmp(object):
self.spinner = 0
def notify(self):
try:
self.spinner = (self.spinner + 1) % 2
os.fchmod(self._tmp.fileno(), self.spinner)
except AttributeError:
# python < 2.6
self._tmp.truncate(0)
os.write(self._tmp.fileno(), b"X")
self.spinner = (self.spinner + 1) % 2
os.fchmod(self._tmp.fileno(), self.spinner)
def last_update(self):
return os.fstat(self._tmp.fileno()).st_ctime

View File

@ -1,3 +1,3 @@
coverage>=4.0,<4.4 # TODO: https://github.com/benoitc/gunicorn/issues/1548
pytest==3.2.5 # TODO: upgrade to latest version requires drop support to Python 2.6
pytest
pytest-cov==2.5.1

View File

@ -6,7 +6,6 @@
# =======
# pip install validate_email pyDNS
#
from __future__ import print_function
import sys
from validate_email import validate_email

View File

@ -20,13 +20,12 @@ CLASSIFIERS = [
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Internet',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
@ -44,11 +43,6 @@ fname = os.path.join(os.path.dirname(__file__), 'requirements_test.txt')
with open(fname) as f:
tests_require = [l.strip() for l in f.readlines()]
if sys.version_info[:2] < (3, 3):
tests_require.append('mock')
if sys.version_info[:2] < (2, 7):
tests_require.append('unittest2')
class PyTestCommand(TestCommand):
user_options = [
("cov", None, "measure coverage")
@ -77,8 +71,6 @@ extra_require = {
'tornado': ['tornado>=0.2'],
'gthread': [],
}
if sys.version_info[0] < 3:
extra_require['gthread'] = ['futures']
setup(
name='gunicorn',
@ -91,7 +83,7 @@ setup(
license='MIT',
url='http://gunicorn.org',
python_requires='>=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
python_requires='>=3.4',
classifiers=CLASSIFIERS,
zip_safe=False,
packages=find_packages(exclude=['examples', 'tests']),

View File

@ -4,17 +4,17 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import io
import os
import tempfile
dirname = os.path.dirname(__file__)
from gunicorn.http.parser import RequestParser
from gunicorn.six import BytesIO
def data_source(fname):
buf = BytesIO()
buf = io.BytesIO()
with open(fname) as handle:
for line in handle:
line = line.rstrip("\n").replace("\\r\\n", "\r\n")

View File

@ -1,5 +1,6 @@
# -*- encoding: utf-8 -*-
import io
import t
import pytest
@ -7,7 +8,6 @@ from gunicorn import util
from gunicorn.http.body import Body, LengthReader, EOFReader
from gunicorn.http.wsgi import Response
from gunicorn.http.unreader import Unreader, IterUnreader, SocketUnreader
from gunicorn.six import BytesIO
from gunicorn.http.errors import InvalidHeader, InvalidHeaderName
try:
@ -17,7 +17,7 @@ except ImportError:
def assert_readline(payload, size, expected):
body = Body(BytesIO(payload))
body = Body(io.BytesIO(payload))
assert body.readline(size) == expected
@ -32,28 +32,28 @@ def test_readline_zero_size():
def test_readline_new_line_before_size():
body = Body(BytesIO(b"abc\ndef"))
body = Body(io.BytesIO(b"abc\ndef"))
assert body.readline(4) == b"abc\n"
assert body.readline() == b"def"
def test_readline_new_line_after_size():
body = Body(BytesIO(b"abc\ndef"))
body = Body(io.BytesIO(b"abc\ndef"))
assert body.readline(2) == b"ab"
assert body.readline() == b"c\n"
def test_readline_no_new_line():
body = Body(BytesIO(b"abcdef"))
body = Body(io.BytesIO(b"abcdef"))
assert body.readline() == b"abcdef"
body = Body(BytesIO(b"abcdef"))
body = Body(io.BytesIO(b"abcdef"))
assert body.readline(2) == b"ab"
assert body.readline(2) == b"cd"
assert body.readline(2) == b"ef"
def test_readline_buffer_loaded():
reader = BytesIO(b"abc\ndef")
reader = io.BytesIO(b"abc\ndef")
body = Body(reader)
body.read(1) # load internal buffer
reader.write(b"g\nhi")
@ -64,7 +64,7 @@ def test_readline_buffer_loaded():
def test_readline_buffer_loaded_with_size():
body = Body(BytesIO(b"abc\ndef"))
body = Body(io.BytesIO(b"abc\ndef"))
body.read(1) # load internal buffer
assert body.readline(2) == b"bc"
assert body.readline(2) == b"\n"
@ -82,7 +82,7 @@ def test_http_header_encoding():
response = Response(mocked_request, mocked_socket, None)
# set umlaut header
response.headers.append(('foo', u'häder'))
response.headers.append(('foo', 'häder'))
with pytest.raises(UnicodeEncodeError):
response.send_headers()
@ -169,7 +169,7 @@ def test_iter_unreader_chunk():
def test_socket_unreader_chunk():
fake_sock = t.FakeSocket(BytesIO(b'Lorem ipsum dolor'))
fake_sock = t.FakeSocket(io.BytesIO(b'Lorem ipsum dolor'))
sock_unreader = SocketUnreader(fake_sock, max_chunk=5)
assert sock_unreader.chunk() == b'Lorem'

View File

@ -4,7 +4,6 @@
# See the NOTICE for more information.
import errno
import sys
try:
import unittest.mock as mock
@ -15,12 +14,7 @@ import gunicorn.pidfile
def builtin(name):
if sys.version_info >= (3, 0):
module = 'builtins'
else:
module = '__builtin__'
return '{0}.{1}'.format(module, name)
return 'builtins.{}'.format(name)
@mock.patch(builtin('open'), new_callable=mock.mock_open)

View File

@ -5,13 +5,11 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import sys
import pytest
from gunicorn.config import (
KeyFile, CertFile, SSLVersion, CACerts, SuppressRaggedEOFs,
DoHandshakeOnConnect, Setting,
DoHandshakeOnConnect, Setting, Ciphers,
)
ssl = pytest.importorskip('ssl')
@ -69,11 +67,7 @@ def test_do_handshake_on_connect():
assert DoHandshakeOnConnect.default is False
@pytest.mark.skipif(sys.version_info < (2, 7),
reason="requires Python 2.7+")
def test_ciphers():
from gunicorn.config import Ciphers
assert issubclass(Ciphers, Setting)
assert Ciphers.name == 'ciphers'
assert Ciphers.section == 'SSL'

View File

@ -1,14 +1,13 @@
from datetime import timedelta
import socket
import io
import logging
import tempfile
import shutil
import os
import shutil
import socket
import tempfile
from datetime import timedelta
from gunicorn.config import Config
from gunicorn.instrument.statsd import Statsd
from gunicorn.six import StringIO
from support import SimpleNamespace
@ -63,7 +62,7 @@ def test_statsd_fail():
def test_instrument():
logger = Statsd(Config())
# Capture logged messages
sio = StringIO()
sio = io.StringIO()
logger.error_log.addHandler(logging.StreamHandler(sio))
logger.sock = MockSocket(False)

View File

@ -7,7 +7,7 @@ import pytest
from gunicorn import util
from gunicorn.errors import AppImportError
from gunicorn.six.moves.urllib.parse import SplitResult # pylint: disable=no-name-in-module
from urllib.parse import SplitResult
@pytest.mark.parametrize('test_input, expected', [

View File

@ -11,7 +11,6 @@ from gunicorn._compat import execfile_
from gunicorn.config import Config
from gunicorn.http.parser import RequestParser
from gunicorn.util import split_request_uri
from gunicorn import six
dirname = os.path.dirname(__file__)
random.seed()
@ -71,10 +70,7 @@ class request(object):
def send_bytes(self):
for d in self.data:
if six.PY3:
yield bytes([d])
else:
yield d
yield bytes([d])
def send_random(self):
maxs = round(len(self.data) / 10)
@ -205,7 +201,7 @@ class request(object):
if body:
raise AssertionError("Failed to read entire body: %r" % body)
try:
data = six.next(iter(req.body))
data = next(iter(req.body))
raise AssertionError("Read data after body finished: %r" % data)
except StopIteration:
pass
@ -284,4 +280,4 @@ class badrequest(object):
def check(self, cfg):
p = RequestParser(cfg, self.send())
six.next(p)
next(p)

View File

@ -1,5 +1,5 @@
[tox]
envlist = py26, py27, py34, py35, py36, py36-dev, py37, pypy, lint
envlist = py34, py35, py36, py36-dev, py37, pypy, lint
skipsdist = True
[testenv]
@ -7,8 +7,6 @@ usedevelop = True
commands = py.test {posargs}
deps =
-rrequirements_test.txt
py26: unittest2
py{26,27},pypy: mock
py{34,35,36,36-dev,37}: aiohttp
[testenv:lint]