Skip to content

Commit

Permalink
Merge pull request xmendez#84 from xmendez/httpbin
Browse files Browse the repository at this point in the history
Httpbin acceptance tests and other bugs
  • Loading branch information
xmendez authored May 26, 2018
2 parents bf11499 + 6d74a90 commit fede146
Show file tree
Hide file tree
Showing 12 changed files with 108 additions and 35 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ install:
- python setup.py install
script:
- flake8 --ignore=E501,E402,F401 src tests
- coverage run --append -m unittest discover -s tests/
- coverage run --append -m unittest discover -v -s tests/
- if [[ $TRAVIS_PYTHON_VERSION == '3.6' && $TRAVIS_BRANCH == 'master' ]]; then codecov; fi
stages:
- test
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,4 @@ docs:
cd docs && make html

coverage:
coverage report --skip-covered --include "*site-packages/wfuzz*" -m
coverage report --skip-covered --include "*python3.5/site-packages/wfuzz*" -m
5 changes: 4 additions & 1 deletion src/wfuzz/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,10 @@ def __iter__(self):

def _gen(self):
while 1:
payload_list = next(self.__payload)
try:
payload_list = next(self.__payload)
except StopIteration:
return

for name in self.__encoders:
if name.find('@') > 0:
Expand Down
7 changes: 2 additions & 5 deletions src/wfuzz/externals/reqresp/Request.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ def to_pycurl_object(c, req):
curl_options = {
"GET": pycurl.HTTPGET,
"POST": pycurl.POST,
"PUT": pycurl.UPLOAD,
"PATCH": pycurl.UPLOAD,
"HEAD": pycurl.NOBODY,
}

Expand Down Expand Up @@ -343,14 +343,11 @@ def response_from_conn_object(self, conn, header, body):
# followlocation
if conn.getinfo(pycurl.EFFECTIVE_URL) != self.completeUrl:
self.setFinalUrl(conn.getinfo(pycurl.EFFECTIVE_URL))
# pycurl reponse headers includes original => remove
header = header[header.find("\r\n\r\n") + 1:]

self.totaltime = conn.getinfo(pycurl.TOTAL_TIME)

rp = Response()
rp.parseResponse(header)
rp.addContent(body)
rp.parseResponse(header, rawbody=body)

if self.schema == "https" and self.__proxy:
self.response = Response()
Expand Down
33 changes: 25 additions & 8 deletions src/wfuzz/externals/reqresp/Response.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import string
from io import BytesIO
import gzip
import zlib

from .TextParser import TextParser

Expand Down Expand Up @@ -86,12 +87,12 @@ def getAll_wpost(self):
string += i + ": " + j + "\r\n"
return string

def parseResponse(self, rawResponse, type="curl"):
def parseResponse(self, rawheader, rawbody=None, type="curl"):
self.__content = ""
self._headers = []

tp = TextParser()
tp.setSource("string", rawResponse)
tp.setSource("string", rawheader.decode('utf-8', errors='replace'))

tp.readUntil("(HTTP\S*) ([0-9]+)")
while True:
Expand Down Expand Up @@ -135,7 +136,7 @@ def parseResponse(self, rawResponse, type="curl"):

if self.header_equal("Transfer-Encoding", "chunked"):
result = ""
content = BytesIO(self.__content)
content = BytesIO(rawbody)
hexa = content.readline()
nchunk = int(hexa.strip(), 16)

Expand All @@ -145,11 +146,27 @@ def parseResponse(self, rawResponse, type="curl"):
hexa = content.readline()
nchunk = int(hexa.strip(), 16)

self.__content = result
rawbody = result

if self.header_equal("Content-Encoding", "gzip"):
compressedstream = BytesIO(self.__content)
gzipper = gzip.GzipFile(compressedstream)
body = gzipper.read()
self.__content = body
compressedstream = BytesIO(rawbody)
gzipper = gzip.GzipFile(fileobj=compressedstream)
rawbody = gzipper.read()
self.delHeader("Content-Encoding")
elif self.header_equal("Content-Encoding", "deflate"):
deflated_data = None
try:
deflater = zlib.decompressobj()
deflated_data = deflater.decompress(rawbody)
deflated_data += deflater.flush()
except zlib.error:
try:
deflater = zlib.decompressobj(-zlib.MAX_WBITS)
deflated_data = deflater.decompress(rawbody)
deflated_data += deflater.flush()
except zlib.error:
deflated_data = ''
rawbody = deflated_data
self.delHeader("Content-Encoding")

self.__content = rawbody.decode('utf-8', errors='replace')
2 changes: 2 additions & 0 deletions src/wfuzz/fuzzobjects.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,6 +301,8 @@ def get_field(self, field):
field = alias[field]

if field in ["url", "method", "scheme", "host", "content", "raw_content", "code"]:
return getattr(self, field)
elif field in ["code"]:
return str(getattr(self, field))
elif field.startswith("cookies"):
return self.cookies.get_field(field).strip()
Expand Down
2 changes: 1 addition & 1 deletion src/wfuzz/myhttp.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ def _read_multi_stack(self):
# Parse response
buff_body, buff_header, res, poolid = c.response_queue

res.history.from_http_object(c, buff_header.getvalue().decode('UTF-8'), buff_body.getvalue().decode('UTF-8'))
res.history.from_http_object(c, buff_header.getvalue(), buff_body.getvalue())

# reset type to result otherwise backfeed items will enter an infinite loop
self.pool_map[poolid]["queue"].put(res.update())
Expand Down
14 changes: 10 additions & 4 deletions src/wfuzz/ui/console/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,16 @@
from __future__ import print_function

import math
import io
import operator
from functools import reduce

# Python 2 and 3: zip_longest
from six import StringIO
try:
from itertools import zip_longest
except ImportError:
from itertools import izip_longest as zip_longest


def indent(rows, hasHeader=False, headerChar='-', delim=' | ', justify='left', separateRows=False, prefix='', postfix='', wrapfunc=lambda x: x):
"""
Expand All @@ -28,17 +34,17 @@ def indent(rows, hasHeader=False, headerChar='-', delim=' | ', justify='left', s
# closure for breaking logical rows to physical, using wrapfunc
def rowWrapper(row):
newRows = [wrapfunc(item).split('\n') for item in row]
return [[substr or '' for substr in item] for item in map(None, *newRows)]
return [[substr or '' for substr in item] for item in zip_longest(*newRows)]
# break each logical row into one or more physical ones
logicalRows = [rowWrapper(row) for row in rows]
# columns of physical rows
columns = map(None, *reduce(operator.add, logicalRows))
columns = zip_longest(*reduce(operator.add, logicalRows))
# get the maximum of each column by the string length of its items
maxWidths = [max([len(str(item)) for item in column]) for column in columns]
rowSeparator = headerChar * (len(prefix) + len(postfix) + sum(maxWidths) + len(delim) * (len(maxWidths) - 1))
# select the appropriate justify method
justify = {'center': str.center, 'right': str.rjust, 'left': str.ljust}[justify.lower()]
output = io.StringIO()
output = StringIO()
if separateRows:
print(rowSeparator, file=output)
for physicalRows in logicalRows:
Expand Down
4 changes: 4 additions & 0 deletions tests/server_dir/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,7 @@ services:
ports:
- "8080:8080"
command: mitmdump
httpbin:
image: kennethreitz/httpbin
ports:
- "9000:80"
53 changes: 41 additions & 12 deletions tests/test_acceptance.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,14 @@
HTTPD_PORT = 8000

ECHO_URL = "%s:8000/echo" % (LOCAL_DOMAIN)
HTTPBIN_URL = "http://localhost:9000"

REPLACE_HOSTNAMES = [
('localhost:8000', 'httpserver:8000'),
('localhost:9000', 'httpbin:80'),
('9000', '80'),
('localhost', 'httpserver'),
]

# $ export PYTHONPATH=./src
# $ python -m unittest discover
Expand All @@ -31,6 +39,19 @@
]

basic_tests = [
# httpbin extra tests
("test_gzip", "%s/FUZZ" % HTTPBIN_URL, [["gzip"]], dict(filter="content~'\"gzipped\":true'"), [(200, '/gzip')], None),
("test_response_utf8", "%s/encoding/FUZZ" % HTTPBIN_URL, [["utf8"]], dict(), [(200, '/encoding/utf8')], None),
("test_image", "%s/image/FUZZ" % HTTPBIN_URL, [["jpeg"]], dict(filter="content~'JFIF'"), [(200, '/image/jpeg')], None),
("test_deflate", "%s/FUZZ" % HTTPBIN_URL, [["deflate"]], dict(filter="content~'\"deflated\":true'"), [(200, '/deflate')], None),

("test_robots_disallow", "%s/FUZZ" % HTTPBIN_URL, [["robots.txt"]], dict(script="robots"), [(200, '/deny'), (200, '/robots.txt')], None),
("test_response_base64", "%s/base64/FUZZ" % HTTPBIN_URL, None, dict(filter="content~'HTTPBIN is awesome'", payloads=[("list", dict(values="HTTPBIN is awesome", encoder=["base64"]))]), [(200, '/base64/SFRUUEJJTiBpcyBhd2Vzb21l')], None),
("test_basic_auth", "%s/basic-auth/FUZZ/FUZZ" % HTTPBIN_URL, [["userpass"]], dict(auth=("basic", "FUZZ:FUZZ")), [(200, '/basic-auth/userpass/userpass')], None),
("test_digest_auth", "%s/digest-auth/auth/FUZZ/FUZZ" % HTTPBIN_URL, [["userpass"]], dict(auth=("digest", "FUZZ:FUZZ")), [(200, '/digest-auth/auth/userpass/userpass')], None),
("test_delayed_response", "%s/delay/FUZZ" % HTTPBIN_URL, [["2"]], dict(req_delay=1), [(200, '/delay/2')], 'Operation timed out'),
("test_static_strquery_set", "%s/FUZZ?var=1&var2=2" % HTTPBIN_URL, [["anything"], ['PUT', 'GET', 'POST', 'DELETE']], dict(method='FUZ2Z', filter="content~'\"args\":{\"var\":\"1\",\"var2\":\"2\"}'"), [(200, '/anything')] * 4, None),

# set static HTTP values
("test_static_strquery_set", "%s:8000/FUZZ?var=1&var=2" % LOCAL_DOMAIN, [["echo"]], dict(filter="content~'query=var=1&var=2'"), [(200, '/echo')], None),
("test_static_postdata_set", "%s:8000/FUZZ" % LOCAL_DOMAIN, [["echo"]], dict(postdata="a=2", filter="content~'POST_DATA=a=2'"), [(200, '/echo')], None),
Expand Down Expand Up @@ -172,9 +193,10 @@ def test(self):
proxied_url = url
proxied_payloads = payloads
if "proxies" in extra_params:
proxied_url = url.replace('localhost', 'httpserver')
if payloads:
proxied_payloads = [[payload.replace("localhost", "httpserver") for payload in payloads_list] for payloads_list in payloads]
for original_host, proxied_host in REPLACE_HOSTNAMES:
proxied_url = proxied_url.replace(original_host, proxied_host)
if proxied_payloads:
proxied_payloads = [[payload.replace(original_host, proxied_host) for payload in payloads_list] for payloads_list in proxied_payloads]

with wfuzz.FuzzSession(url=proxied_url) as s:
same_list = [(x.code, x.history.urlparse.path) for x in s.get_payloads(proxied_payloads).fuzz(**extra_params)]
Expand Down Expand Up @@ -258,17 +280,21 @@ def test(self):
return test


def create_test(test_name, url, payloads, params, expected_res, extra_params, exception_str):
test_fn = wfuzz_me_test_generator(url, payloads, params, expected_res, extra_params)
if exception_str:
test_fn_exc = wfuzz_me_test_generator_exception(test_fn, exception_str)
setattr(DynamicTests, test_name, test_fn_exc)
else:
setattr(DynamicTests, test_name, test_fn)


def create_tests_from_list(test_list):
"""
Creates tests cases where wfuzz using the indicated url, params results are checked against expected_res
"""
for test_name, url, payloads, params, expected_res, exception_str in test_list:
test_fn = wfuzz_me_test_generator(url, payloads, params, expected_res, None)
if exception_str:
test_fn_exc = wfuzz_me_test_generator_exception(test_fn, exception_str)
setattr(DynamicTests, test_name, test_fn_exc)
else:
setattr(DynamicTests, test_name, test_fn)
create_test(test_name, url, payloads, params, expected_res, None, exception_str)


def duplicate_tests_diff_params(test_list, group, next_extra_params, previous_extra_params):
Expand All @@ -284,8 +310,7 @@ def duplicate_tests_diff_params(test_list, group, next_extra_params, previous_ex
if previous_extra_params:
prev_extra = dict(list(params.items()) + list(previous_extra_params.items()))

test_fn = wfuzz_me_test_generator(url, payloads, prev_extra, None, next_extra)
setattr(DynamicTests, new_test, test_fn)
create_test(new_test, url, payloads, prev_extra, None, next_extra, exception_str)


def duplicate_tests(test_list, group, test_gen_fun):
Expand All @@ -297,7 +322,11 @@ def duplicate_tests(test_list, group, test_gen_fun):
new_test = "%s_%s" % (test_name, group)

test_fn = test_gen_fun(url, payloads, params, None)
setattr(DynamicTests, new_test, test_fn)
if exception_str:
test_fn_exc = wfuzz_me_test_generator_exception(test_fn, exception_str)
setattr(DynamicTests, new_test, test_fn_exc)
else:
setattr(DynamicTests, new_test, test_fn)


def create_tests():
Expand Down
15 changes: 15 additions & 0 deletions tests/test_clparser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import unittest

from wfuzz.ui.console.clparser import CLParser


class CLParserTest(unittest.TestCase):
def test_listplugins(self):
with self.assertRaises(SystemExit) as cm:
CLParser(['wfuzz', '-e', 'iterators']).parse_cl()

self.assertEqual(cm.exception.code, 0)


if __name__ == '__main__':
unittest.main()
4 changes: 2 additions & 2 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ envlist = begin,docker,py27,py35,end
[testenv]
commands =
flake8 --ignore=E501,E402,F401 src tests
coverage run --append -m unittest discover -s tests/
coverage run --append -m unittest discover -v -s tests/
deps =
flake8
netaddr
Expand All @@ -21,7 +21,7 @@ commands = coverage erase
deps = coverage

[testenv:end]
commands = coverage report --skip-covered --include "*site-packages/wfuzz*" -m
commands = coverage report --skip-covered --include '*python3.5/site-packages/wfuzz*' -m
deps = coverage

[testenv:codecov]
Expand Down

0 comments on commit fede146

Please sign in to comment.