new read_partial function. We also now use buffer in parser.

This commit is contained in:
benoitc 2010-02-28 18:46:24 +01:00
parent 05d4673972
commit d92364af71
6 changed files with 33 additions and 18 deletions

View File

@ -12,4 +12,4 @@ def app(environ, start_response):
('Content-Length', str(len(data)))
]
start_response(status, response_headers)
return [data]
return iter([data])

View File

@ -38,10 +38,11 @@ class Parser(object):
"""
ld = len("\r\n\r\n")
i = buf.find("\r\n\r\n")
s = "".join(buf)
i = s.find("\r\n\r\n")
if i != -1:
if i > 0:
r = buf[:i]
r = s[:i]
pos = i+ld
return self.finalize_headers(headers, r, pos)
return -1
@ -144,10 +145,11 @@ class Parser(object):
return False
def read_chunk(self, data):
s = "".join(data)
if not self.start_offset:
i = data.find("\r\n")
i = s.find("\r\n")
if i != -1:
chunk = data[:i].strip().split(";", 1)
chunk = s[:i].strip().split(";", 1)
chunk_size = int(chunk.pop(0), 16)
self.start_offset = i+2
self.chunk_size = chunk_size
@ -158,17 +160,18 @@ class Parser(object):
ret = '', data[:self.start_offset]
return ret
else:
buf = data[self.start_offset:self.start_offset+self.chunk_size]
chunk = s[self.start_offset:self.start_offset+self.chunk_size]
end_offset = self.start_offset + self.chunk_size + 2
# we wait CRLF else return None
if len(data) >= end_offset:
ret = buf, data[end_offset:]
ret = chunk, data[end_offset:]
self.chunk_size = 0
return ret
return '', data
def trailing_header(self, data):
i = data.find("\r\n\r\n")
s = "".join(data)
i = s.find("\r\n\r\n")
return (i != -1)
def filter_body(self, data):
@ -179,17 +182,15 @@ class Parser(object):
dlen = len(data)
chunk = ''
if self.is_chunked:
chunk, data = self.read_chunk(data)
if not chunk:
return '', data
else:
if self._content_len > 0:
nr = min(dlen, self._content_len)
chunk = data[:nr]
chunk = "".join(data[:nr])
self._content_len -= nr
data = ''
data = []
self.start_offset = 0
return (chunk, data)

View File

@ -55,7 +55,6 @@ class Request(object):
def read(self):
environ = {}
headers = []
buf = ""
buf = read_partial(self.socket, CHUNK_SIZE)
i = self.parser.filter_headers(headers, buf)
if i == -1 and buf:
@ -64,7 +63,8 @@ class Request(object):
if not data: break
buf += data
i = self.parser.filter_headers(headers, buf)
if i != -1: break
if i != -1:
break
self.log.debug("%s", self.parser.status)
self.log.debug("Headers:\n%s" % headers)

View File

@ -129,6 +129,8 @@ class TeeInput(object):
def _tee(self, length):
""" fetch partial body"""
while True:
self.buf = read_partial(self.socket, length, self.buf)
chunk, self.buf = self.parser.filter_body(self.buf)
if chunk:
fwrite(self.tmp, chunk)
@ -137,8 +139,6 @@ class TeeInput(object):
if self.parser.body_eof():
break
data = read_partial(self.socket, length)
self.buf += data
self._finalize()
return ""

View File

@ -3,6 +3,7 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import array
import ctypes
import errno
import fcntl
@ -92,8 +93,13 @@ def close(sock):
except socket.error:
pass
def read_partial(sock, length):
return sock.recv(length)
def read_partial(sock, length, buf=None):
if not buf:
buf = array.array("c", '\0' * length)
l = sock.recv_into(buf, length)
return buf[:l]
return buf
def write_chunk(sock, data):
chunk = "".join(("%X\r\n" % len(data), data, "\r\n"))

View File

@ -4,6 +4,7 @@
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import array
import os
import tempfile
@ -50,6 +51,13 @@ class FakeSocket(object):
def recv(self, length=None):
return self.tmp.read()
def recv_into(self, buf, length):
tmp_buffer = self.tmp.read(length)
v = len(tmp_buffer)
for i, c in enumerate(tmp_buffer):
buf[i] = c
return v
def send(self, data):
self.tmp.write(data)
self.tmp.flush()