Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 15 additions & 10 deletions human_curl/async.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,14 +199,20 @@ def reset_opener(self, opener):

:param opener: :class:`pycurl.Curl` object
"""
opener.success_callback = None
opener.fail_callback = None
opener.request = None

if getattr(opener, "dirty", False) is True:
# After appling this method curl raise error
# Unable to fetch curl handle from curl object
opener.reset()
# This code is workaround case reset() method is not working
# So we create new instance of opener
self._openers_pool.handles.remove(opener)
del opener

opener = self.get_opener()
self._openers_pool.handles.append(opener)

opener.success_callback = None
opener.fail_callback = None
opener.request = None

# Maybe need delete cookies?
return opener
Expand All @@ -229,7 +235,7 @@ def process_raw_data(self):
opener = self._free_openers.pop()

# Create request object
self.configure_opener(opener, request_data)
opener = self.configure_opener(opener, request_data)

# Add configured opener to handles pool
self._openers_pool.add_handle(opener)
Expand All @@ -253,8 +259,7 @@ def process_pending_requests(self):
response = self.make_response(opener)
opener.success_callback(response=response,
async_client=self, opener=opener)
## FIXME: after pycurl.MultiCurl reset error
## opener.dirty = True
opener.dirty = True
self._free_openers.append(opener)

for opener, errno, errmsg in error_list:
Expand All @@ -264,8 +269,7 @@ def process_pending_requests(self):
opener.fail_callback(errno=errno, errmsg=errmsg,
async_client=self, opener=opener,
request=opener.request)
## FIXME: after pycurl.MultiCurl reset error
## opener.dirty = True
opener.dirty = True
self._free_openers.append(opener)


Expand Down Expand Up @@ -298,6 +302,7 @@ def cleanup_pool(self):
opener.close()

self._openers_pool.close()
self._openers_pool = None

def method(self, method, **kwargs):
"""Added request params to data_queue
Expand Down
21 changes: 18 additions & 3 deletions human_curl/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -727,6 +727,12 @@ def json(self):
except ValueError:
return None

def _clean_raw_headers(self, raw_headers):
ret = raw_headers.strip()
ret = ret.replace("\r\nHTTP", "\r\n\r\nHTTP")
ret = ret.replace("\r\n\r\n\r\n", "\r\n\r\n")
return ret

def _parse_headers_raw(self):
"""Parse response headers and save as instance vars
"""
Expand All @@ -746,7 +752,12 @@ def parse_header_block(raw_block):
if not header:
continue
elif not header.startswith("HTTP"):
field, value = map(lambda u: u.strip(), header.split(":", 1))
try:
field, value = map(lambda u: u.strip(), header.split(":", 1))
except Exception, e:
logger.warn("Unable to parse header %s for url %s , %s", header, self.url, e)
continue

if field.startswith("Location"):
# maybe not good
if not value.startswith("http"):
Expand All @@ -763,15 +774,19 @@ def parse_header_block(raw_block):
logger.warn(e)
continue
else:
block_headers.append((version, code, message))
if len(block_headers) > 0:
logger.warn("Status HTTP header already exitst %s, but found one more %s for url %s" %
(block_headers[0], (version, code, message), self.url))
else:
block_headers.append((version, code, message))
else:
# raise ValueError("Wrong header field")
pass
return block_headers

raw_headers = self._headers_output.getvalue()

headers_blocks = raw_headers.strip().split("\r\n\r\n")
headers_blocks = self._clean_raw_headers(raw_headers).split("\r\n\r\n")
for raw_block in headers_blocks:
block = parse_header_block(raw_block)

Expand Down