mirror of
https://github.com/frappe/gunicorn.git
synced 2026-01-14 11:09:11 +08:00
fetch all input body in gaiohttp to work w/ non asyncio wsgi apps
I took the suggestion of @asvetlov. This change load first all the body in RAM in the gaiohttp worker to handle classic WSGI application. This is a temporary fix, it should be replaced asap by a streaming behaviour. It does the trick anyway for most application since you're not supposed to handle large body directly with gunicorn. fix #803
This commit is contained in:
parent
feae663092
commit
aacfba1aed
37
examples/echo.py
Normal file
37
examples/echo.py
Normal file
@ -0,0 +1,37 @@
|
||||
# -*- coding: utf-8 -
|
||||
#
|
||||
# This file is part of gunicorn released under the MIT license.
|
||||
# See the NOTICE for more information.
|
||||
#
|
||||
# Example code from Eventlet sources
|
||||
|
||||
import os
|
||||
import pprint
|
||||
from wsgiref.validate import validator
|
||||
import sys
|
||||
|
||||
from gunicorn import __version__
|
||||
#@validator
|
||||
def app(environ, start_response):
|
||||
"""Simplest possible application object"""
|
||||
|
||||
errors = environ['wsgi.errors']
|
||||
# pprint.pprint(('ENVIRON', environ), stream=errors)
|
||||
|
||||
print(environ)
|
||||
if environ['REQUEST_METHOD'].upper() != 'POST':
|
||||
data = b'Hello, World!\n'
|
||||
else:
|
||||
data = environ['wsgi.input'].read()
|
||||
|
||||
|
||||
status = '200 OK'
|
||||
|
||||
response_headers = [
|
||||
('Content-type','text/plain'),
|
||||
('Content-Length', str(len(data))),
|
||||
('X-Gunicorn-Version', __version__),
|
||||
("Test", "test тест"),
|
||||
]
|
||||
start_response(status, response_headers)
|
||||
return iter([data])
|
||||
@ -49,7 +49,8 @@ class AiohttpWorker(base.Worker):
|
||||
|
||||
def factory(self, wsgi, host, port):
|
||||
proto = WSGIServerHttpProtocol(
|
||||
wsgi, loop=self.loop,
|
||||
wsgi, readpayload=True,
|
||||
loop=self.loop,
|
||||
log=self.log,
|
||||
debug=self.cfg.debug,
|
||||
keep_alive=self.cfg.keepalive,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user