mirror of
https://github.com/frappe/gunicorn.git
synced 2026-01-14 11:09:11 +08:00
recv_info seems slower that just concatening string. let's create a
branch to test it.
This commit is contained in:
parent
a0d8a16b86
commit
77ea9ea78a
@ -36,11 +36,9 @@ class Parser(object):
|
|||||||
if parsing isn't done. headers dict is updated
|
if parsing isn't done. headers dict is updated
|
||||||
with new headers.
|
with new headers.
|
||||||
"""
|
"""
|
||||||
|
i = buf.find("\r\n\r\n")
|
||||||
s = "".join(buf)
|
|
||||||
i = s.find("\r\n\r\n")
|
|
||||||
if i != -1:
|
if i != -1:
|
||||||
r = s[:i]
|
r = buf[:i]
|
||||||
pos = i+4
|
pos = i+4
|
||||||
return self.finalize_headers(headers, r, pos)
|
return self.finalize_headers(headers, r, pos)
|
||||||
return -1
|
return -1
|
||||||
@ -142,11 +140,10 @@ class Parser(object):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def read_chunk(self, data):
|
def read_chunk(self, data):
|
||||||
s = "".join(data)
|
|
||||||
if not self.start_offset:
|
if not self.start_offset:
|
||||||
i = s.find("\r\n")
|
i = data.find("\r\n")
|
||||||
if i != -1:
|
if i != -1:
|
||||||
chunk = s[:i].strip().split(";", 1)
|
chunk = data[:i].strip().split(";", 1)
|
||||||
chunk_size = int(chunk.pop(0), 16)
|
chunk_size = int(chunk.pop(0), 16)
|
||||||
self.start_offset = i+2
|
self.start_offset = i+2
|
||||||
self.chunk_size = chunk_size
|
self.chunk_size = chunk_size
|
||||||
@ -157,7 +154,7 @@ class Parser(object):
|
|||||||
ret = '', data[:self.start_offset]
|
ret = '', data[:self.start_offset]
|
||||||
return ret
|
return ret
|
||||||
else:
|
else:
|
||||||
chunk = s[self.start_offset:self.start_offset+self.chunk_size]
|
chunk = data[self.start_offset:self.start_offset+self.chunk_size]
|
||||||
end_offset = self.start_offset + self.chunk_size + 2
|
end_offset = self.start_offset + self.chunk_size + 2
|
||||||
# we wait CRLF else return None
|
# we wait CRLF else return None
|
||||||
if len(data) >= end_offset:
|
if len(data) >= end_offset:
|
||||||
@ -167,8 +164,7 @@ class Parser(object):
|
|||||||
return '', data
|
return '', data
|
||||||
|
|
||||||
def trailing_header(self, data):
|
def trailing_header(self, data):
|
||||||
s = "".join(data)
|
i = data.find("\r\n\r\n")
|
||||||
i = s.find("\r\n\r\n")
|
|
||||||
return (i != -1)
|
return (i != -1)
|
||||||
|
|
||||||
def filter_body(self, data):
|
def filter_body(self, data):
|
||||||
@ -185,7 +181,7 @@ class Parser(object):
|
|||||||
else:
|
else:
|
||||||
if self._content_len > 0:
|
if self._content_len > 0:
|
||||||
nr = min(dlen, self._content_len)
|
nr = min(dlen, self._content_len)
|
||||||
chunk = "".join(data[:nr])
|
chunk = data[:nr]
|
||||||
self._content_len -= nr
|
self._content_len -= nr
|
||||||
data = []
|
data = []
|
||||||
|
|
||||||
|
|||||||
@ -90,19 +90,15 @@ def close(sock):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def read_partial(sock, length, buf=None):
|
def read_partial(sock, length, buf=None):
|
||||||
if buf is not None:
|
if buf is not None and len(buf) >= length:
|
||||||
if len(buf) >= length:
|
return buf
|
||||||
return buf
|
|
||||||
else:
|
tmp_buf = sock.recv(length)
|
||||||
length = length - len(buf)
|
|
||||||
|
|
||||||
tmp_buf = ctypes.create_string_buffer(length)
|
|
||||||
l = sock.recv_into(tmp_buf, length)
|
|
||||||
|
|
||||||
if not buf:
|
if not buf:
|
||||||
return tmp_buf[:l]
|
return tmp_buf
|
||||||
|
|
||||||
return buf + tmp_buf[:l]
|
return buf + tmp_buf
|
||||||
|
|
||||||
def write_chunk(sock, data):
|
def write_chunk(sock, data):
|
||||||
chunk = "".join(("%X\r\n" % len(data), data, "\r\n"))
|
chunk = "".join(("%X\r\n" % len(data), data, "\r\n"))
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user