Skip to content

Commit

Permalink
v0.4.7
Browse files Browse the repository at this point in the history
  • Loading branch information
toolswatch committed Feb 2, 2014
1 parent fdeaef6 commit a6d39d1
Show file tree
Hide file tree
Showing 7 changed files with 293 additions and 51 deletions.
15 changes: 15 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,21 @@ Target Audience
Changelog
=========

Beta v0.4.7
---------
* Refactored the `vfeed_update.py` script as a separate class vFeedUpdate (vfeed\update.py). The method `update()` could be invoked to update the vulnerability database vFeed.db
* Added the support to HP (Hewlett-Packard) patch ids
* Added the support to BID - SecurityFocus ids
* Updated the Ubuntu, Redhat, CERT-VN mappers. Many new IDs have been added to vFeed.db
* To reflect the newest cross references, the following methods have been added:
- `get_hp` to enumerate HP ids. This function returns the patches alonside with links
- `get_bid`to list SecurityFocus Ids
- `update`to download the newest vFeed.db database. python vfeedcli.py update will do the trick now.

° vfeed.db the sqlite opensource cross linked vulnerability database fully regenerated to support the new changes
* Documentation updated accordingly


Beta v0.4.6
---------

Expand Down
1 change: 1 addition & 0 deletions vfeed/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from .api import vFeed
from .info import vFeedInfo
from .exportxml import vFeedXML
from .update import vFeedUpdate
from . import config
36 changes: 35 additions & 1 deletion vfeed/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ def __init__(self, cveID):
self.vfeed_db_url = config.database['primary']['url']
self.oval_url = config.gbVariables['oval_url']
self.edb_url = config.gbVariables['edb_url']
self.bid_url = config.gbVariables['bid_url']

self.cveID = cveID.upper()
self._check_env(self.vfeed_db)
Expand Down Expand Up @@ -345,6 +346,25 @@ def get_aixapar(self):

return self.AIXAPAR_id

def get_bid(self):
'''
Returning: BID ids and url link
'''
self.cnt = 0
self.BID_id = {}
self.cur.execute(
'SELECT * FROM map_cve_bid WHERE cveid=?', self.query)

for self.data in self.cur.fetchall():
self.BID_id[self.cnt] = {
'id': str(self.data[0]),
'link': self.bid_url + str(self.data[0]),
}
self.cnt += 1
return self.BID_id



def get_redhat(self):
'''
Returning: Redhat IDs & Bugzilla
Expand Down Expand Up @@ -662,7 +682,6 @@ def get_snort(self):

return self.SNORT_id


def get_suricata(self):
'''
Returning: Suricata references as dictionary
Expand All @@ -681,7 +700,22 @@ def get_suricata(self):

return self.SURICATA_id

def get_hp(self):
'''
Returning: HP references as dictionary
'''
self.cnt = 0
self.HP_id = {}
self.cur.execute(
'SELECT * FROM map_cve_hp WHERE cveid=?', self.query)

for self.data in self.cur.fetchall():
self.HP_id[self.cnt] = {
'id': str(self.data[0]),
'link': str(self.data[1]),
}
self.cnt += 1
return self.HP_id


def get_risk(self):
Expand Down
3 changes: 2 additions & 1 deletion vfeed/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
'__title__': 'vFeed - Open Source Cross-linked and Aggregated Local Vulnerability Database',
'__website__': 'http://www.toolswatch.org/vfeed',
'__mainRepository__': 'https://github.com/toolswatch/vFeed',
'__build__': 'beta 0.4.6',
'__build__': 'beta 0.4.7',
}


Expand Down Expand Up @@ -45,6 +45,7 @@

gbVariables = {
'cve_url': 'http://cve.mitre.org/cgi-bin/cvename.cgi?name=',
'bid_url': 'http://www.securityfocus.com/bid/',
'certvn_url':'http://www.kb.cert.org/vuls/id/',
'edb_url': 'http://www.exploit-db.com/exploits/',
'oval_url': 'http://oval.mitre.org/repository/data/getDef?id=',
Expand Down
37 changes: 30 additions & 7 deletions vfeed/exportxml.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def __init__(self, cveID):
self.milw0rm_url = config.gbVariables['milw0rm_url']
self.ms_bulletin_url = config.gbVariables['ms_bulletin_url']
self.ms_kb_url = config.gbVariables['ms_kb_url']
self.bid_url = config.gbVariables['bid_url']

#Invoking the vFeed api with CVE object
self.cveID = cveID.upper()
Expand All @@ -39,6 +40,7 @@ def __init__(self, cveID):
# Calling all available methods
self.cveInfo = self.vfeed.get_cve()
self.cveRef = self.vfeed.get_refs()
self.cveBID = self.vfeed.get_bid()
self.SCIP_id = self.vfeed.get_scip()
self.CERTVN_id = self.vfeed.get_certvn()
self.IAVM_id = self.vfeed.get_iavm()
Expand Down Expand Up @@ -68,7 +70,8 @@ def __init__(self, cveID):
self.MSF_id = self.vfeed.get_msf()
self.MILWORM_id = self.vfeed.get_milw0rm()
self.SNORT_id = self.vfeed.get_snort()
self.SURICATA_id = self.vfeed.get_suricata()
self.SURICATA_id = self.vfeed.get_suricata()
self.HP_id = self.vfeed.get_hp()

def export(self):
'''
Expand Down Expand Up @@ -176,6 +179,15 @@ def export(self):
'source': "DISA/IAVM",
})

# Exporting BID ref from Mapping

for i in range(0, len(self.cveBID)):
self.source_head = SubElement(self.mappedrefs_head, 'ref',
{'id': self.cveBID[i]['id'],
'url': self.cveBID[i]['link'],
'source': "SecurityFocus",
})

# Exporting OSVDB ref from Mapping

for i in range(0, len(self.OSVDB_id)):
Expand Down Expand Up @@ -357,6 +369,14 @@ def export(self):
'source': 'FEDORA',
})

## Exporting HP Patches

for i in range(0, len(self.HP_id)):
self.patch_head = SubElement(self.patchmanagement_head, 'patch',
{'id': self.HP_id[i]['id'],
'link': self.HP_id[i]['link'],
'source': 'Hewlett-Packard',
})


# Attack and Weaknesses Patterns
Expand Down Expand Up @@ -407,12 +427,15 @@ def export(self):
})

for i in range(0, len(self.REDHAT_id)):
self.ovalChecks_head = SubElement(self.securitytest_head, 'check',
{'type': 'Local Security Testing',
'id': self.REDHAT_id[i]['oval'],
'utility': "OVAL Interpreter",
'file': self.redhat_oval_url + self.REDHAT_id[i]['oval'].split('oval:com.redhat.rhsa:def:')[1] + '.xml',
})
try:
self.ovalChecks_head = SubElement(self.securitytest_head, 'check',
{'type': 'Local Security Testing',
'id': self.REDHAT_id[i]['oval'],
'utility': "OVAL Interpreter",
'file': self.redhat_oval_url + self.REDHAT_id[i]['oval'].split('oval:com.redhat.rhsa:def:')[1] + '.xml',
})
except:
pass

## Exporting Nessus attributes
for i in range(0, len(self.NESSUS_id)):
Expand Down
141 changes: 141 additions & 0 deletions vfeed/update.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
import os,sys
import urllib2
import tarfile
import hashlib
from . import vFeed
from . import config

'''
update.py - Class to update the vfeed.db correlated and aggregated vulnerability database
'''

class vFeedUpdate(object):
'''
Download the vfeed.db tgz'd file
Check for the checksum and decompress
Do not interrupt the process. If something wrong, it will flag it
The support for proxy will be added later on (or if you got the guts to do it, be my guest)
'''
def __init__(self):

self.configData = config.database['primary']
self.vfeed_db_primary_url = self.configData['url']
self.vfeed_db_compressed = self.configData['vfeed_db_compressed']
self.vfeed_db = self.configData['vfeed_db']
self.updateStatus = self.configData['updateStatus']
self.urlCompressed = self.vfeed_db_primary_url + self.vfeed_db_compressed
self.urlUpdate = self.vfeed_db_primary_url + self.updateStatus

def update(self):
'''
Download the db and decompress it
Output : vfeed.db
'''

if not os.path.isfile(self.vfeed_db):
print '[install] getting fresh copy of %s. It may take a while ...' %self.vfeed_db
self._updateDB(self.urlCompressed)
print '\n[info] decompressing %s ...' %self.vfeed_db_compressed
self._uncompress()
self.cleaning()
exit(0)

if os.path.isfile(self.vfeed_db):
print '[info] checking for the latest %s ' %self.vfeed_db
self._checkDBversion()

def _updateDB(self,url):
'''
This function was found on internet.
So thanks to its author wherever he is.
'''

self.filename = url.split('/')[-1]
self.u = urllib2.urlopen(url)
self.f = open(self.filename, 'wb')
self.meta = self.u.info()
self.filesize = int(self.meta.getheaders("Content-Length")[0])

self.filesize_dl = 0
self.block_sz = 8192
while True:
sys.stdout.flush()
self.buffer = self.u.read(self.block_sz)
if not self.buffer:
break

self.filesize_dl += len(self.buffer)
self.f.write(self.buffer)
self.status = r"%10d [%3.0f %%]" % (self.filesize_dl, self.filesize_dl * 100. / self.filesize)
self.status = self.status + chr(8)*(len(self.status)+1)
sys.stdout.write("\r[progress %3.0f %%] receiving %d out of %s Bytes of %s " % (self.filesize_dl * 100. / self.filesize, self.filesize_dl,self.filesize,self.filename))
sys.stdout.flush()

self.f.close()


def _uncompress(self):
'''
uncompress the tgz db
'''
if not os.path.isfile(self.vfeed_db_compressed):
print '[error] ' + self.vfeed_db_compressed + ' not found'
print '[info] Get manually your copy from %s' % self.config.database['primary']['url']
exit(0)

try:
tar = tarfile.open(self.vfeed_db_compressed, 'r:gz')
tar.extractall('.')
self.tar.close
except:
print '[error] Database not extracted.'


def _checkDBversion(self):
'''
updating the existing vfeed database if needed
'''
self._updateDB(self.urlUpdate)
self.hashLocal = self.checksumfile(self.vfeed_db)
with open(self.updateStatus,'r') as f:
self.output = f.read()
self.hashRemote = self.output.split(',')[1]

if self.hashRemote <> self.hashLocal:
print '[New Update] Downloading the recent vFeed Database %s from %s' %(self.vfeed_db_compressed,self.vfeed_db_primary_url)
self._updateDB(self.urlCompressed)
print '[info] Decompressing %s ...' %self.vfeed_db_compressed
self._uncompress()
self.cleaning()
exit(0)

if self.hashRemote == self.hashLocal:
print '\n[info] You have the latest %s vulnerability database' %self.vfeed_db
self.cleaning()

def checksumfile(self,file):
'''
returning the sha1 hash value
'''
self.sha1 = hashlib.sha1()
self.f = open(file, 'rb')
try:
self.sha1.update(self.f.read())
finally:
self.f.close()
return self.sha1.hexdigest()

def cleaning(self):
'''
Cleaning the tgz and .dat temporary files
'''
print '[info] Cleaning compressed database and update file'
try:
if os.path.isfile(self.vfeed_db_compressed):
os.remove(self.vfeed_db_compressed)
if os.path.isfile(self.updateStatus):
os.remove(self.updateStatus)
except:
print '[exception] Already cleaned'

Loading

0 comments on commit a6d39d1

Please sign in to comment.