Skip to content

Commit

Permalink
Fix pycurl unhandled exception #180 (#196)
Browse files Browse the repository at this point in the history
New requests are added in the pycurl multi thread after perform (Fixes #180 )
Added py3.8 test job in travis and setup.py (fixes #190 )
Removed python 2 travis tests while figuring out with the build stalls when they run succesfully in Tox
  • Loading branch information
xmendez authored May 9, 2020
1 parent 5087640 commit f7bbca4
Show file tree
Hide file tree
Showing 5 changed files with 115 additions and 121 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@ language: python
services:
- docker
python:
- "2.7"
- "3.4"
- "3.5"
- "3.6"
- "3.7"
- "3.8"
before_install:
- docker-compose -f tests/server_dir/docker-compose.yml up -d
install:
Expand Down
7 changes: 5 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,9 @@
]

install_requires = [
'pycurl<=7.43.0.3',
'pyparsing',
'pycurl',
'pyparsing<3;python_version<="3.4"',
'pyparsing>3*;python_version>="3.5"',
'future',
'six',
'configparser;python_version<"3.5"',
Expand All @@ -31,6 +32,7 @@
if sys.platform.startswith("win"):
install_requires += ["colorama"]


setup(
name="wfuzz",
include_package_data=True,
Expand Down Expand Up @@ -71,5 +73,6 @@
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
),
)
2 changes: 1 addition & 1 deletion src/wfuzz/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
__title__ = 'wfuzz'
__version__ = "2.4.5"
__version__ = "2.4.6"
__build__ = 0x023000
__author__ = 'Xavier Mendez'
__license__ = 'GPL 2.0'
Expand Down
20 changes: 10 additions & 10 deletions src/wfuzz/externals/reqresp/Request.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,16 +351,16 @@ def to_pycurl_object(c, req):

c.setopt(pycurl.FOLLOWLOCATION, 1 if req.followLocation else 0)

proxy = req.getProxy()
if proxy is not None:
c.setopt(pycurl.PROXY, python2_3_convert_to_unicode(proxy))
if req.proxytype == "SOCKS5":
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
elif req.proxytype == "SOCKS4":
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
req.delHeader("Proxy-Connection")
else:
c.setopt(pycurl.PROXY, "")
# proxy = req.getProxy()
# if proxy is not None:
# c.setopt(pycurl.PROXY, python2_3_convert_to_unicode(proxy))
# if req.proxytype == "SOCKS5":
# c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
# elif req.proxytype == "SOCKS4":
# c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
# req.delHeader("Proxy-Connection")
# else:
# c.setopt(pycurl.PROXY, "")

return c

Expand Down
205 changes: 98 additions & 107 deletions src/wfuzz/myhttp.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from threading import Thread, Lock
import itertools
from queue import Queue
import collections

from .exception import FuzzExceptBadOptions, FuzzExceptNetError

Expand All @@ -15,34 +16,35 @@ def __init__(self, options):
self.processed = 0

self.exit_job = False
self.mutex_multi = Lock()
self.mutex_stats = Lock()
self.mutex_reg = Lock()

self.m = None
self.freelist = Queue()
self.retrylist = Queue()
self.curlh_freelist = []
self._request_list = collections.deque()
self.handles = []

self.ths = None

self.pool_map = {}
self.default_poolid = 0

self.options = options

self._registered = 0

def _initialize(self):
# pycurl Connection pool
self._create_pool(self.options.get("concurrent"))
self.m = pycurl.CurlMulti()
self.handles = []

# internal pool
self.default_poolid = self._new_pool()
for i in range(self.options.get("concurrent")):
curl_h = pycurl.Curl()
self.handles.append(curl_h)
self.curlh_freelist.append(curl_h)

# create threads
self.ths = []

for fn in ("_read_multi_stack", "_read_retry_queue"):
for fn in ("_read_multi_stack",):
th = Thread(target=getattr(self, fn))
th.setName(fn)
self.ths.append(th)
Expand All @@ -51,21 +53,15 @@ def _initialize(self):
def job_stats(self):
with self.mutex_stats:
dic = {
"http_Processed": self.processed,
"http_Idle Workers": self.freelist.qsize()
"http_processed": self.processed,
"http_registered": len(self._registered)
}
return dic

# internal http pool control

def perform(self, fuzzreq):
poolid = self._new_pool()
self.enqueue(fuzzreq, poolid)
def iter_results(self, poolid):
item = self.pool_map[poolid]["queue"].get()
return item

def iter_results(self, poolid=None):
item = self.pool_map[self.default_poolid if not poolid else poolid]["queue"].get()

if not item:
return
Expand All @@ -83,56 +79,46 @@ def _new_pool(self):

return poolid

def enqueue(self, fuzzres, poolid=None):
c = fuzzres.history.to_http_object(self.freelist.get())
c = self._set_extra_options(c, fuzzres, self.default_poolid if not poolid else poolid)
def _prepare_curl_h(self, curl_h, fuzzres, poolid):
new_curl_h = fuzzres.history.to_http_object(curl_h)
new_curl_h = self._set_extra_options(new_curl_h, fuzzres, poolid)

new_curl_h.response_queue = ((BytesIO(), BytesIO(), fuzzres, poolid))
new_curl_h.setopt(pycurl.WRITEFUNCTION, new_curl_h.response_queue[0].write)
new_curl_h.setopt(pycurl.HEADERFUNCTION, new_curl_h.response_queue[1].write)

return new_curl_h

def enqueue(self, fuzzres, poolid):
if self.exit_job:
return

c.response_queue = ((BytesIO(), BytesIO(), fuzzres, self.default_poolid if not poolid else poolid))
c.setopt(pycurl.WRITEFUNCTION, c.response_queue[0].write)
c.setopt(pycurl.HEADERFUNCTION, c.response_queue[1].write)

with self.mutex_multi:
self.m.add_handle(c)
self._request_list.append((fuzzres, poolid))

def _stop_to_pools(self):
for p in list(self.pool_map.keys()):
self.pool_map[p]["queue"].put(None)

# Pycurl management
def _create_pool(self, num_conn):
# Pre-allocate a list of curl objects
self.m = pycurl.CurlMulti()
self.m.handles = []

for i in range(num_conn):
c = pycurl.Curl()
self.m.handles.append(c)
self.freelist.put(c)

def cleanup(self):
self.exit_job = True
for th in self.ths:
th.join()

def register(self):
with self.mutex_reg:
with self.mutex_stats:
self._registered += 1

if not self.pool_map:
self._initialize()
return self.default_poolid
else:
return self._new_pool()
if not self.pool_map:
self._initialize()

return self._new_pool()

def deregister(self):
with self.mutex_reg:
with self.mutex_stats:
self._registered -= 1

if self._registered <= 0:
self.cleanup()
if self._registered <= 0:
self.cleanup()

def _get_next_proxy(self, proxy_list):
i = 0
Expand All @@ -141,19 +127,18 @@ def _get_next_proxy(self, proxy_list):
i += 1
i = i % len(proxy_list)

def _set_extra_options(self, c, freq, poolid):
def _set_extra_options(self, c, fuzzres, poolid):
if self.pool_map[poolid]["proxy"]:
ip, port, ptype = next(self.pool_map[poolid]["proxy"])

freq.wf_proxy = (("%s:%s" % (ip, port)), ptype)
fuzzres.history.wf_proxy = (("%s:%s" % (ip, port)), ptype)

c.setopt(pycurl.PROXY, "%s:%s" % (ip, port))
if ptype == "SOCKS5":
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
elif ptype == "SOCKS4":
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
elif ptype == "HTTP":
pass
c.setopt(pycurl.PROXY, "%s:%s" % (ip, port))
else:
raise FuzzExceptBadOptions("Bad proxy type specified, correct values are HTTP, SOCKS4 or SOCKS5.")
else:
Expand All @@ -169,79 +154,85 @@ def _set_extra_options(self, c, freq, poolid):

return c

def _read_retry_queue(self):
while not self.exit_job:
res, poolid = self.retrylist.get()
def _process_curl_handle(self, curl_h):
buff_body, buff_header, res, poolid = curl_h.response_queue

if res is None:
break
try:
res.history.from_http_object(curl_h, buff_header.getvalue(), buff_body.getvalue())
except Exception as e:
self.pool_map[poolid]["queue"].put(res.update(exception=e))
else:
# reset type to result otherwise backfeed items will enter an infinite loop
self.pool_map[poolid]["queue"].put(res.update())

self.enqueue(res, poolid)
with self.mutex_stats:
self.processed += 1

def _read_multi_stack(self):
# Check for curl objects which have terminated, and add them to the freelist
while not self.exit_job:
with self.mutex_multi:
while not self.exit_job:
ret, num_handles = self.m.perform()
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
def _process_curl_should_retry(self, res, errno, poolid):
# Usual suspects:

num_q, ok_list, err_list = self.m.info_read()
for c in ok_list:
# Parse response
buff_body, buff_header, res, poolid = c.response_queue
# Exception in perform (35, 'error:0B07C065:x509 certificate routines:X509_STORE_add_cert:cert already in hash table')
# Exception in perform (18, 'SSL read: error:0B07C065:x509 certificate routines:X509_STORE_add_cert:cert already in hash table, errno 11')
# Exception in perform (28, 'Connection time-out')
# Exception in perform (7, "couldn't connect to host")
# Exception in perform (6, "Couldn't resolve host 'www.xxx.com'")
# (28, 'Operation timed out after 20000 milliseconds with 0 bytes received')
# Exception in perform (28, 'SSL connection timeout')
# 5 Couldn't resolve proxy 'aaa'

try:
res.history.from_http_object(c, buff_header.getvalue(), buff_body.getvalue())
except Exception as e:
self.pool_map[poolid]["queue"].put(res.update(exception=e))
else:
# reset type to result otherwise backfeed items will enter an infinite loop
self.pool_map[poolid]["queue"].put(res.update())
# retry requests with recoverable errors
if errno not in [28, 7, 6, 5]:
res.history.wf_retries += 1

self.m.remove_handle(c)
self.freelist.put(c)
if res.history.wf_retries < self.options.get("retries"):
self._request_list.append((res, poolid))
return True

with self.mutex_stats:
self.processed += 1
return False

for c, errno, errmsg in err_list:
buff_body, buff_header, res, poolid = c.response_queue
def _process_curl_handle_error(self, res, errno, errmsg, poolid):
e = FuzzExceptNetError("Pycurl error %d: %s" % (errno, errmsg))
res.history.totaltime = 0
self.pool_map[poolid]["queue"].put(res.update(exception=e))

res.history.totaltime = 0
self.m.remove_handle(c)
self.freelist.put(c)
with self.mutex_stats:
self.processed += 1

def _read_multi_stack(self):
# Check for curl objects which have terminated, and add them to the curlh_freelist
while not self.exit_job:
while not self.exit_job:
ret, num_handles = self.m.perform()
if ret != pycurl.E_CALL_MULTI_PERFORM:
break

# Usual suspects:
num_q, ok_list, err_list = self.m.info_read()
for curl_h in ok_list:
self._process_curl_handle(curl_h)
self.m.remove_handle(curl_h)
self.curlh_freelist.append(curl_h)

# Exception in perform (35, 'error:0B07C065:x509 certificate routines:X509_STORE_add_cert:cert already in hash table')
# Exception in perform (18, 'SSL read: error:0B07C065:x509 certificate routines:X509_STORE_add_cert:cert already in hash table, errno 11')
# Exception in perform (28, 'Connection time-out')
# Exception in perform (7, "couldn't connect to host")
# Exception in perform (6, "Couldn't resolve host 'www.xxx.com'")
# (28, 'Operation timed out after 20000 milliseconds with 0 bytes received')
# Exception in perform (28, 'SSL connection timeout')
# 5 Couldn't resolve proxy 'aaa'
for curl_h, errno, errmsg in err_list:
buff_body, buff_header, res, poolid = curl_h.response_queue

# retry requests with recoverable errors
if errno not in [28, 7, 6, 5]:
res.history.wf_retries += 1
if not self._process_curl_should_retry(res, errno, poolid):
self._process_curl_handle_error(res, errno, errmsg, poolid)

if res.history.wf_retries < self.options.get("retries"):
self.retrylist.put((res, poolid))
continue
self.m.remove_handle(curl_h)
self.curlh_freelist.append(curl_h)

e = FuzzExceptNetError("Pycurl error %d: %s" % (errno, errmsg))
self.pool_map[poolid]["queue"].put(res.update(exception=e))
while self.curlh_freelist and self._request_list:
curl_h = self.curlh_freelist.pop()
fuzzres, poolid = self._request_list.popleft()

with self.mutex_stats:
self.processed += 1
self.m.add_handle(
self._prepare_curl_h(curl_h, fuzzres, poolid)
)

self._stop_to_pools()
self.retrylist.put((None, None))

# cleanup multi stack
for c in self.m.handles:
for c in self.handles:
c.close()
self.freelist.put(c)
self.curlh_freelist.append(c)
self.m.close()

0 comments on commit f7bbca4

Please sign in to comment.