Skip to content

Commit

Permalink
Reduce duplicated findings between anything found by other plugins an…
Browse files Browse the repository at this point in the history
…d errors found by error_pages
  • Loading branch information
andresriancho committed Jun 28, 2017
1 parent e9a3221 commit 0cf71aa
Showing 1 changed file with 51 additions and 13 deletions.
64 changes: 51 additions & 13 deletions w3af/plugins/grep/error_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
"""
import w3af.core.data.kb.knowledge_base as kb

from w3af.core.data.db.disk_list import DiskList
from w3af.core.data.esmre.multi_in import multi_in
from w3af.core.data.esmre.multi_re import multi_re
from w3af.core.data.kb.info import Info
Expand All @@ -36,9 +37,11 @@ class error_pages(GrepPlugin):

ERROR_PAGES = (
'<H1>Error page exception</H1>',

# This signature fires up also in default 404 pages of aspx which
# generates a lot of noise, so ... disabling it
#mesg.append('<span><H1>Server Error in ',
# '<span><H1>Server Error in ',

'<h2> <i>Runtime Error</i> </h2></span>',
'<h2> <i>Access is denied</i> </h2></span>',
'<H3>Original Exception: </H3>',
Expand Down Expand Up @@ -111,7 +114,6 @@ class error_pages(GrepPlugin):
# ruby
'<h1 class="error_title">Ruby on Rails application could not be started</h1>',


# Coldfusion
'<title>Error Occurred While Processing Request</title></head><body><p></p>',
'<HTML><HEAD><TITLE>Error Occurred While Processing Request</TITLE></HEAD><BODY><HR><H3>',
Expand Down Expand Up @@ -143,6 +145,9 @@ class error_pages(GrepPlugin):
def __init__(self):
GrepPlugin.__init__(self)

# Internal variables
self._potential_vulns = DiskList(table_prefix='error_pages')

self._already_reported_versions = []
self._compiled_regex = []

Expand All @@ -161,22 +166,55 @@ def grep(self, request, response):
self.find_version_numbers(request, response)

def find_error_page(self, request, response):
# There is no need to report more than one info for the
# same result, the user will read the info object and
# analyze it even if we report it only once. If we report
# it twice, he'll get mad ;)
for _, _, _, url, _ in self._potential_vulns:
if url == response.get_url():
return

for msg in self._multi_in.query(response.body):

# We found a new error in a response!
desc = 'The URL: "%s" contains the descriptive error: "%s".'
desc %= (response.get_url(), msg)
i = Info('Descriptive error page', desc, response.id,
self.get_name())
i.set_url(response.get_url())
i.add_to_highlight(msg)

self.kb_append_uniq(self, 'error_page', i, 'URL')

# There is no need to report more than one info for the same result,
# the user will read the info object and analyze it even if we
# report it only once. If we report it twice, he'll get mad ;)

title = 'Descriptive error page'

data = (title, desc, response.id, response.get_url(), msg)
self._potential_vulns.append(data)

# Just report one instance for each HTTP response, no
# matter if multiple strings match
break

def end(self):
"""
This method is called when the plugin wont be used anymore.
"""
all_findings = kb.kb.get_all_findings()

for title, desc, _id, url, highlight in self._potential_vulns:
for info in all_findings:
# This makes sure that if the sqli plugin found a vulnerability
# in the same URL as we found a detailed error, we won't report
# the detailed error.
#
# If the user fixes the sqli vulnerability and runs the scan again
# most likely the detailed error will disappear too. If the sqli
# vulnerability disappears and this one remains, it will appear
# as a new vulnerability in the second scan.
if info.get_url() == url:
break
else:
i = Info(title, desc, _id, self.get_name())
i.set_url(url)
i.add_to_highlight(highlight)

self.kb_append_uniq(self, 'error_page', i)

self._potential_vulns.cleanup()

def find_version_numbers(self, request, response):
"""
Now i'll check if I can get a version number from the error page
Expand Down

0 comments on commit 0cf71aa

Please sign in to comment.