Merge remote-tracking branch 'origin/master' into syslog_socktype

This commit is contained in:
zz 2017-07-17 07:48:44 +02:00
commit 5b0ae52c8a
24 changed files with 173 additions and 77 deletions

View File

@ -11,21 +11,34 @@ ignore-paths:
- scripts
- tests/requests/valid
- tests/requests/invalid
- tests/treq.py # We are going to replace this with pytest.
- tests/t.py # Same as above.
- tests/test_selectors.py # This basically port of upstream selectors tests.
- tests/test_gaiohttp.py # TODO: We are going to remove this worker.
pep8:
disable:
- E126
- E128
- E129
- E302
- E501
run: false
pyflakes:
run: false
pylint:
disable:
# Redefinition of unused $X from line $LINE
- F811
# Access to a protected member $X of a client class
- W0212
- bare-except
- misplaced-comparison-constant
- protected-access
- import-error
- too-many-branches
- too-many-arguments
- too-many-nested-blocks
- eval-used
- no-else-return
- wrong-import-position
- unused-argument
- import-self
- duplicate-bases
- no-staticmethod-decorator
- not-callable
mccabe:
run: false

View File

@ -16,7 +16,7 @@ matrix:
env: TOXENV=py36-dev
- python: nightly
env: TOXENV=py37
- python: 3.5
- python: 3.6
env: TOXENV=lint
allow_failures:
- env: TOXENV=py36-dev

View File

@ -320,6 +320,37 @@ because it consumes less system resources.
In order to use the inotify reloader, you must have the ``inotify``
package installed.
.. _reload-engine:
reload_engine
~~~~~~~~~~~~~
* ``--reload-engine STRING``
* ``auto``
The implementation that should be used to power :ref:`reload`.
Valid engines are:
* 'auto'
* 'poll'
* 'inotify' (requires inotify)
.. versionadded:: 19.7
.. _reload-extra-files:
reload_extra_files
~~~~~~~~~~~~~~~~~~
* ``--reload-extra-file FILES``
* ``[]``
Extends :ref:`reload` option to also watch and reload on additional files
(e.g., templates, configurations, specifications, etc.).
.. versionadded:: 19.8
.. _spew:
spew

View File

@ -262,3 +262,37 @@ if PY26:
else:
from gunicorn.six.moves.urllib.parse import urlsplit
import inspect
if hasattr(inspect, 'signature'):
positionals = (
inspect.Parameter.POSITIONAL_ONLY,
inspect.Parameter.POSITIONAL_OR_KEYWORD,
)
def get_arity(f):
sig = inspect.signature(f)
arity = 0
for param in sig.parameters.values():
if param.kind in positionals:
arity += 1
return arity
else:
def get_arity(f):
return len(inspect.getargspec(f)[0])
try:
import html
def html_escape(s):
return html.escape(s)
except ImportError:
import cgi
def html_escape(s):
return cgi.escape(s, quote=True)

View File

@ -173,8 +173,8 @@ class Arbiter(object):
are queued. Child signals only wake up the master.
"""
# close old PIPE
if self.PIPE:
[os.close(p) for p in self.PIPE]
for p in self.PIPE:
os.close(p)
# initialize the pipe
self.PIPE = pair = os.pipe()
@ -185,7 +185,8 @@ class Arbiter(object):
self.log.close_on_exec()
# initialize all signals
[signal.signal(s, self.signal) for s in self.SIGNALS]
for s in self.SIGNALS:
signal.signal(s, self.signal)
signal.signal(signal.SIGCHLD, self.handle_chld)
def signal(self, sig, frame):
@ -204,7 +205,7 @@ class Arbiter(object):
while True:
self.maybe_promote_master()
sig = self.SIG_QUEUE.pop(0) if len(self.SIG_QUEUE) else None
sig = self.SIG_QUEUE.pop(0) if self.SIG_QUEUE else None
if sig is None:
self.sleep()
self.murder_workers()
@ -361,11 +362,10 @@ class Arbiter(object):
return
while os.read(self.PIPE[0], 1):
pass
except select.error as e:
if e.args[0] not in [errno.EAGAIN, errno.EINTR]:
raise
except OSError as e:
if e.errno not in [errno.EAGAIN, errno.EINTR]:
except (select.error, OSError) as e:
# TODO: select.error is a subclass of OSError since Python 3.3.
error_number = getattr(e, 'errno', e.args[0])
if error_number not in [errno.EAGAIN, errno.EINTR]:
raise
except KeyboardInterrupt:
sys.exit()
@ -454,7 +454,8 @@ class Arbiter(object):
# do we need to change listener ?
if old_address != self.cfg.address:
# close all listeners
[l.close() for l in self.LISTENERS]
for l in self.LISTENERS:
l.close()
# init new listeners
self.LISTENERS = sock.create_sockets(self.cfg, self.log)
listeners_str = ",".join([str(l) for l in self.LISTENERS])
@ -476,7 +477,7 @@ class Arbiter(object):
util._setproctitle("master [%s]" % self.proc_name)
# spawn new workers
for i in range(self.cfg.workers):
for _ in range(self.cfg.workers):
self.spawn_worker()
# manage workers
@ -586,7 +587,7 @@ class Arbiter(object):
sys.stderr.flush()
sys.exit(self.APP_LOAD_ERROR)
except:
self.log.exception("Exception in worker process"),
self.log.exception("Exception in worker process")
if not worker.booted:
sys.exit(self.WORKER_BOOT_ERROR)
sys.exit(-1)
@ -607,7 +608,7 @@ class Arbiter(object):
of the master process.
"""
for i in range(self.num_workers - len(self.WORKERS.keys())):
for _ in range(self.num_workers - len(self.WORKERS.keys())):
self.spawn_worker()
time.sleep(0.1 * random.random())

View File

@ -426,7 +426,7 @@ def validate_callable(arity):
"" % (obj_name, mod_name))
if not six.callable(val):
raise TypeError("Value is not six.callable: %s" % val)
if arity != -1 and arity != len(inspect.getargspec(val)[0]):
if arity != -1 and arity != _compat.get_arity(val):
raise TypeError("Value must have an arity of: %s" % arity)
return val
return _validate_callable
@ -464,7 +464,7 @@ def validate_group(val):
def validate_post_request(val):
val = validate_callable(-1)(val)
largs = len(inspect.getargspec(val)[0])
largs = _compat.get_arity(val)
if largs == 4:
return val
elif largs == 3:
@ -652,6 +652,11 @@ class WorkerThreads(Setting):
If it is not defined, the default is ``1``.
This setting only affects the Gthread worker type.
.. note::
If you try to use the ``sync`` worker type and set the ``threads``
setting to more than 1, the ``gthread`` worker type will be used
instead.
"""
@ -878,8 +883,10 @@ class ReloadExtraFiles(Setting):
validator = validate_list_of_existing_files
default = []
desc = """\
Extends --reload option to also watch and reload on additional files
Extends :ref:`reload` option to also watch and reload on additional files
(e.g., templates, configurations, specifications, etc.).
.. versionadded:: 19.8
"""

View File

@ -326,7 +326,7 @@ class Logger(object):
request_time))
try:
self.access_log.info(self.cfg.access_log_format % safe_atoms)
self.access_log.info(self.cfg.access_log_format, safe_atoms)
except:
self.error(traceback.format_exc())

View File

@ -210,7 +210,7 @@ class Body(object):
while size > self.buf.tell():
data = self.reader.read(1024)
if not len(data):
if not data:
break
self.buf.write(data)
@ -248,7 +248,7 @@ class Body(object):
def readlines(self, size=None):
ret = []
data = self.read()
while len(data):
while data:
pos = data.find(b"\n")
if pos < 0:
ret.append(data)

View File

@ -53,7 +53,7 @@ class Message(object):
self.unreader.unread(unused)
self.set_body_reader()
def parse(self):
def parse(self, unreader):
raise NotImplementedError()
def parse_headers(self, data):
@ -64,7 +64,7 @@ class Message(object):
# Parse headers into key/value pairs paying attention
# to continuation lines.
while len(lines):
while lines:
if len(headers) >= self.limit_request_fields:
raise LimitRequestHeaders("limit request headers fields")
@ -81,7 +81,7 @@ class Message(object):
name, value = name.strip(), [value.lstrip()]
# Consume value continuation lines
while len(lines) and lines[0].startswith((" ", "\t")):
while lines and lines[0].startswith((" ", "\t")):
curr = lines.pop(0)
header_length += len(curr)
if header_length > self.limit_request_field_size > 0:

View File

@ -40,7 +40,7 @@ class Unreader(object):
while self.buf.tell() < size:
chunk = self.chunk()
if not len(chunk):
if not chunk:
ret = self.buf.getvalue()
self.buf = six.BytesIO()
return ret

View File

@ -183,7 +183,7 @@ def create(req, sock, client, server, cfg):
server = host.split(':')
if len(server) == 1:
if url_scheme == "http":
server.append(80),
server.append(80)
elif url_scheme == "https":
server.append(443)
else:

View File

@ -80,7 +80,7 @@ class Statsd(Logger):
pass
# Log to parent logger only if there is something to say
if msg is not None and len(msg) > 0:
if msg:
Logger.log(self, lvl, msg, *args, **kwargs)
except Exception:
Logger.warning(self, "Failed to log to statsd", exc_info=True)
@ -108,7 +108,7 @@ class Statsd(Logger):
self._sock_send("{0}{1}:{2}|c|@{3}".format(self.prefix, name, value, sampling_rate))
def decrement(self, name, value, sampling_rate=1.0):
self._sock_send("{0){1}:-{2}|c|@{3}".format(self.prefix, name, value, sampling_rate))
self._sock_send("{0}{1}:-{2}|c|@{3}".format(self.prefix, name, value, sampling_rate))
def histogram(self, name, value):
self._sock_send("{0}{1}:{2}|ms".format(self.prefix, name, value))

View File

@ -53,19 +53,16 @@ class Reloader(threading.Thread):
self._callback(filename)
time.sleep(self._interval)
try:
from inotify.adapters import Inotify
import inotify.constants
has_inotify = True
except ImportError:
has_inotify = False
has_inotify = False
if sys.platform.startswith('linux'):
try:
from inotify.adapters import Inotify
import inotify.constants
has_inotify = True
except ImportError:
pass
class InotifyReloader():
def __init__(self, callback=None):
raise ImportError('You must have the inotify module installed to use '
'the inotify reloader')
if has_inotify:
class InotifyReloader(threading.Thread):
@ -116,6 +113,13 @@ if has_inotify:
self._callback(filename)
else:
class InotifyReloader(object):
def __init__(self, callback=None):
raise ImportError('You must have the inotify module installed to '
'use the inotify reloader')
preferred_reloader = InotifyReloader if has_inotify else Reloader

View File

@ -31,7 +31,7 @@ class BaseSocket(object):
self.sock = self.set_options(sock, bound=bound)
def __str__(self, name):
def __str__(self):
return "<socket %d>" % self.sock.fileno()
def __getattr__(self, name):
@ -94,7 +94,7 @@ class TCP6Socket(TCPSocket):
FAMILY = socket.AF_INET6
def __str__(self):
(host, port, fl, sc) = self.sock.getsockname()
(host, port, _, _) = self.sock.getsockname()
return "http://[%s]:%d" % (host, port)

View File

@ -20,9 +20,9 @@ import traceback
import inspect
import errno
import warnings
import cgi
import logging
from gunicorn import _compat
from gunicorn.errors import AppImportError
from gunicorn.six import text_type
from gunicorn.workers import SUPPORTED_WORKERS
@ -61,7 +61,7 @@ except ImportError:
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for x in range(level, 1, -1):
for _ in range(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
@ -329,7 +329,7 @@ def write_error(sock, status_int, reason, mesg):
%(mesg)s
</body>
</html>
""") % {"reason": reason, "mesg": cgi.escape(mesg)}
""") % {"reason": reason, "mesg": _compat.html_escape(mesg)}
http = textwrap.dedent("""\
HTTP/1.1 %s %s\r

View File

@ -101,7 +101,8 @@ class Worker(object):
util.close_on_exec(p)
# Prevent fd inheritance
[util.close_on_exec(s) for s in self.sockets]
for s in self.sockets:
util.close_on_exec(s)
util.close_on_exec(self.tmp.fileno())
self.wait_fds = self.sockets + [self.PIPE[0]]
@ -145,7 +146,7 @@ class Worker(object):
# per https://docs.python.org/2/library/sys.html#sys.exc_info warning,
# delete the traceback after use.
try:
exc_type, exc_val, exc_tb = sys.exc_info()
_, exc_val, exc_tb = sys.exc_info()
self.reloader.add_extra_file(exc_val.filename)
tb_string = six.StringIO()
@ -156,7 +157,8 @@ class Worker(object):
def init_signals(self):
# reset signaling
[signal.signal(s, signal.SIG_DFL) for s in self.SIGNALS]
for s in self.SIGNALS:
signal.signal(s, signal.SIG_DFL)
# init new signaling
signal.signal(signal.SIGQUIT, self.handle_quit)
signal.signal(signal.SIGTERM, self.handle_exit)

View File

@ -134,9 +134,12 @@ class EventletWorker(AsyncWorker):
self.notify()
try:
with eventlet.Timeout(self.cfg.graceful_timeout) as t:
[a.kill(eventlet.StopServe()) for a in acceptors]
[a.wait() for a in acceptors]
for a in acceptors:
a.kill(eventlet.StopServe())
for a in acceptors:
a.wait()
except eventlet.Timeout as te:
if te != t:
raise
[a.kill() for a in acceptors]
for a in acceptors:
a.kill()

View File

@ -143,13 +143,15 @@ class GeventWorker(AsyncWorker):
# Force kill all active the handlers
self.log.warning("Worker graceful timeout (pid:%s)" % self.pid)
[server.stop(timeout=1) for server in servers]
for server in servers:
server.stop(timeout=1)
except:
pass
def handle_request(self, *args):
def handle_request(self, listener_name, req, sock, addr):
try:
super(GeventWorker, self).handle_request(*args)
super(GeventWorker, self).handle_request(listener_name, req, sock,
addr)
except gevent.GreenletExit:
pass
except SystemExit:

View File

@ -211,7 +211,7 @@ class ThreadWorker(base.Worker):
if self.nr_conns < self.worker_connections:
# wait for an event
events = self.poller.select(1.0)
for key, mask in events:
for key, _ in events:
callback = key.data
callback(key.fileobj)

View File

@ -34,7 +34,7 @@ def requires_mac_ver(*min_version):
return decorator
try:
from types import SimpleNamespace
from types import SimpleNamespace # noqa
except ImportError:
class SimpleNamespace(object):
def __init__(self, **kwargs):

View File

@ -29,7 +29,7 @@ class request(object):
def __call__(self, func):
def run():
src = data_source(self.fname)
func(src, RequestParser(src))
func(src, RequestParser(src, None))
run.func_name = func.func_name
return run

View File

@ -35,6 +35,7 @@ def test_validate_no_file(_open):
def test_validate_file_pid_exists(kill, _open):
pidfile = gunicorn.pidfile.Pidfile('test.pid')
assert pidfile.validate() == 1
assert kill.called
@mock.patch(builtin('open'), new_callable=mock.mock_open, read_data='a')

View File

@ -74,7 +74,7 @@ class request(object):
yield lines[:pos+2]
lines = lines[pos+2:]
pos = lines.find(b"\r\n")
if len(lines):
if lines:
yield lines
def send_bytes(self):
@ -137,7 +137,7 @@ class request(object):
def match_read(self, req, body, sizes):
data = self.szread(req.body.read, sizes)
count = 1000
while len(body):
while body:
if body[:len(data)] != data:
raise AssertionError("Invalid body data read: %r != %r" % (
data, body[:len(data)]))
@ -148,9 +148,9 @@ class request(object):
if count <= 0:
raise AssertionError("Unexpected apparent EOF")
if len(body):
if body:
raise AssertionError("Failed to read entire body: %r" % body)
elif len(data):
elif data:
raise AssertionError("Read beyond expected body: %r" % data)
data = req.body.read(sizes())
if data:
@ -159,7 +159,7 @@ class request(object):
def match_readline(self, req, body, sizes):
data = self.szread(req.body.readline, sizes)
count = 1000
while len(body):
while body:
if body[:len(data)] != data:
raise AssertionError("Invalid data read: %r" % data)
if b'\n' in data[:-1]:
@ -170,9 +170,9 @@ class request(object):
count -= 1
if count <= 0:
raise AssertionError("Apparent unexpected EOF")
if len(body):
if body:
raise AssertionError("Failed to read entire body: %r" % body)
elif len(data):
elif data:
raise AssertionError("Read beyond expected body: %r" % data)
data = req.body.readline(sizes())
if data:
@ -190,7 +190,7 @@ class request(object):
raise AssertionError("Invalid body data read: %r != %r" % (
line, body[:len(line)]))
body = body[len(line):]
if len(body):
if body:
raise AssertionError("Failed to read entire body: %r" % body)
data = req.body.readlines(sizes())
if data:
@ -207,7 +207,7 @@ class request(object):
raise AssertionError("Invalid body data read: %r != %r" % (
line, body[:len(line)]))
body = body[len(line):]
if len(body):
if body:
raise AssertionError("Failed to read entire body: %r" % body)
try:
data = six.next(iter(req.body))
@ -254,7 +254,7 @@ class request(object):
p = RequestParser(cfg, sender())
for req in p:
self.same(req, sizer, matcher, cases.pop(0))
assert len(cases) == 0
assert not cases
def same(self, req, sizer, matcher, exp):
assert req.method == exp["method"]

View File

@ -14,6 +14,4 @@ deps =
[testenv:lint]
commands = prospector
deps =
; TODO: See https://github.com/landscapeio/prospector/pull/205
pylint<1.7
prospector