Skip to content

Commit

Permalink
Fixed a bug in HTML periodic renderer.
Browse files Browse the repository at this point in the history
Ignore-this: 9da55b6606e0e74ac4c01c379c9a7197

darcs-hash:20091018233236-20ca2-f0d6945b7264264d0422cfad00ce1beedd20157c.gz
  • Loading branch information
scudette committed Oct 18, 2009
1 parent 91574cb commit fef767d
Show file tree
Hide file tree
Showing 8 changed files with 63 additions and 50 deletions.
2 changes: 1 addition & 1 deletion config/Makefile.rules
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
%.so: %.c
## This compile line is for building shared objects under
## linux. The -fPIC seems to be required for 64 bit machines.
$(CC) $(CFLAGS) -shared -fPIC $(AM_CFLAGS) $(PYTHON_CPPFLAGS) -I. -I$(top_srcdir)/src/include $(PYTHON_LDFLAGS) -o $@ $? .libs/$*.a $(PYTHON_EXTRA_LIBS) $(AM_LDFLAGS)
$(CC) $(CFLAGS) -g -O0 -Wall -shared -fPIC $(AM_CFLAGS) $(PYTHON_CPPFLAGS) -I. -I$(top_srcdir)/src/include $(PYTHON_LDFLAGS) -o $@ $? .libs/$*.a $(PYTHON_EXTRA_LIBS) $(AM_LDFLAGS)
## $(STRIP) $@

clean-local:
Expand Down
8 changes: 4 additions & 4 deletions src/network/tcp.c
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,7 @@ TCPStream TCPHashTable_find_stream(TCPHashTable self, IP ip) {
if(!ip) return NULL;

tcp =(TCP)ip->packet.payload;

if(!tcp) return NULL;
/** If we did not get a TCP packet, we fail */
/** The below should work but does not because __TCP is defined in 2
different shared objects reassemble.so and dissect.so. We are
Expand All @@ -402,13 +402,13 @@ TCPStream TCPHashTable_find_stream(TCPHashTable self, IP ip) {
FIXME: A possible optimization would be to create a class hash
which we can use instead of a string comparison.
*/
*/
if(ISNAMEINSTANCE(tcp,"TCP")) {
tcp_packet = 1;
} else if(ISNAMEINSTANCE(tcp,"UDP")) {
udp_packet = 1;
} else return NULL;

forward.saddr = ip->packet.header.saddr;
forward.daddr = ip->packet.header.daddr;
forward.source = tcp->packet.header.source;
Expand Down Expand Up @@ -437,7 +437,7 @@ TCPStream TCPHashTable_find_stream(TCPHashTable self, IP ip) {
return i;
};
};

reverse.saddr = ip->packet.header.daddr;
reverse.daddr = ip->packet.header.saddr;
reverse.source = tcp->packet.header.dest;
Expand Down
4 changes: 2 additions & 2 deletions src/plugins/NetworkForensics/Reassembler.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,10 @@ def __init__(self, case, fd, inode):
self.inode_ids = [ int(x) for x in inode[1:].split("/")]

## Fill in some vital stats
dbh.execute("select inode.inode_id, reverse, src_ip, dest_ip, src_port, dest_port, ts_sec, type from `connection_details` join inode on inode.inode_id = connection_details.inode_id where inode.inode=%r limit 1", self.inode)
dbh.execute("select inode.inode_id, reverse, src_ip, dest_ip, src_port, dest_port, ts_sec from `connection_details` join inode on inode.inode_id = connection_details.inode_id where inode.inode=%r limit 1", self.inode)
row=dbh.fetch()
if not row:
dbh.execute("select inode_id,reverse, src_ip, dest_ip, src_port, dest_port, ts_sec, type from `connection_details` where inode_id = %r", self.inode_ids[0])
dbh.execute("select inode_id,reverse, src_ip, dest_ip, src_port, dest_port, ts_sec from `connection_details` where inode_id = %r", self.inode_ids[0])
row = dbh.fetch()

## This updates our properties from the db
Expand Down
15 changes: 7 additions & 8 deletions src/plugins/TableRenderers/HTMLBundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@ def generator(query, result):
self.add_file_from_string(page_name,
page_data.encode("utf8"))

yield "Page %s\n" % page
yield "(%s) %s: Page %s\n" % (os.getpid(), self.name, page)
page +=1

## update the TOC page:
Expand Down Expand Up @@ -419,19 +419,20 @@ def generate_rows(self, query, ordering=True):
queries to maximise the chance of getting cache hits.
"""
dbh = DB.DBO(self.case)
print query, ordering
ordering = True
self.sql = self._make_sql(query, ordering=ordering)
print self.sql

## This allows pyflag to cache the resultset, needed to speed
## paging of slow queries. FIXME - implement
dbh.execute(self.sql + " limit %s,%s" % (self.limit,self.end_limit-self.limit))
#dbh.execute(self.sql + " limit %s,%s" % (self.limit,self.end_limit-self.limit))
dbh.execute(self.sql)
self.count = 0
for row in dbh:
self.count += 1
yield row

if self.end_limit > 0 \
and self.count > self.end_limit: return
#if self.end_limit > 0 \
# and self.count > self.end_limit: return

def make_archive_filename(self, inode_id, directory = 'inodes/'):
## Add the inode to the exported bundle:
Expand Down Expand Up @@ -586,8 +587,6 @@ def export(self, case, inode_id, table_renderer):

## Now explain this file:
import pyflag.HTMLUI as HTMLUI
result = HTMLUI.HTMLUI(initial = True)
result.heading("How to derive inode %s" % fd.inode)

filename = "inodes/%s_explain.html" % inode_id
if not table_renderer.filename_in_archive(filename):
Expand Down
51 changes: 28 additions & 23 deletions src/plugins/TableRenderers/PeriodicHTML.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
"""

import HTMLBundle
import cPickle
import cPickle,os
import pyflag.DB as DB
import pyflag.FlagFramework as FlagFramework
import pyflag.pyflaglog as pyflaglog
Expand Down Expand Up @@ -84,27 +84,28 @@ def real_render_table(self):
## Ok we need to figure out which pages need updating - we
## assume that data is only added to the tables not removed.
self.limit = 0
dbh = DB.DBO(self.case)
dbh.execute("select count(*) as total from reporting where "
" page_name like '%s%%'", self.page_name)
total = dbh.fetch()['total']

## Now work out the limit of the last page - we redo the last
## page because it may be more complete now.
dbh.execute("select * from reporting where "
" page_name like '%s%%' order by `limit` desc limit 1",
self.page_name)
row = dbh.fetch()
if row:
self.query.set("start_limit", row['limit'])
## The initial page
page = total
else:
self.query.set("start_limit",0)
page = 1

# dbh = DB.DBO(self.case)
# dbh.execute("select count(*) as total from reporting where "
# " page_name like '%s%%'", self.page_name)
# total = dbh.fetch()['total']

# ## Now work out the limit of the last page - we redo the last
# ## page because it may be more complete now.
# dbh.execute("select * from reporting where "
# " page_name like '%s%%' order by `limit` desc limit 1",
# self.page_name)
# row = dbh.fetch()
# if row:
# self.query.set("start_limit", row['limit'])
# ## The initial page
# page = total
# else:
# self.query.set("start_limit",0)
# page = 1

page = 1
print "Doing page %s from %s" % (page, self.query['start_limit'])
self.parse_limits(self.query)
# self.parse_limits(self.query)
g = self.generate_rows(self.query, ordering=False)
self.add_constant_files()

Expand All @@ -121,13 +122,14 @@ def real_render_table(self):

while 1:
page_name = "%s%03u.html" % (self.page_name, page)
start = self.count
page_data = self.render_page(page_name, page, elements, g)
if self.row_count ==0: break

self.add_file_from_string(page_name,
page_data.encode("utf8"))

print "Page %s\n" % page
print "(%s): Page %s %s-%s\n" % (os.getpid(), page, start, self.count)
page +=1

## update the TOC page:
Expand Down Expand Up @@ -165,9 +167,12 @@ def periodic(self, dbh, case):
if tables != new_table:
pyflaglog.log(pyflaglog.DEBUG, "Re-exporting HTML Table %s" % renderer.page_name)
try:
import pdb
renderer.real_render_table()
except Exception,e:
print e
print "Exception %s" % e
import pdb
pdb.post_mortem()
pass
dbh2.execute("update reporting_jobs set tables = %r where id=%r",
cPickle.dumps(new_table), row['id'])
Expand Down
6 changes: 5 additions & 1 deletion src/pyflag/Farm.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,11 @@ def nanny(cb, keepalive=None, *args, **kwargs):
quits we restart it.
"""
if config.DISABLE_NANNY:
cb(*args, **kwargs)
try:
cb(*args, **kwargs)
except Exception,e:
import pdb
pdb.post_mortem()

atexit.register(terminate_children)
signal.signal(signal.SIGABRT, handler)
Expand Down
2 changes: 1 addition & 1 deletion tests/pyflag
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/bin/bash
## This is a version of the launcher script which run on the src directory - paths are hard coded atm. Configure below:

SRC_DIR=~/pyflag/pyflag-mod-raid
SRC_DIR=~/pyflag/
INSTALL_DIR=/var/tmp/build/pyflag/

######### END CONFIGURATION ####################
Expand Down
25 changes: 15 additions & 10 deletions utilities/incremental_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,18 @@ def load_file(filename, processor, pcap_dbh):
## Start up some workers if needed:
Farm.start_workers()

def update_files(files_we_have):
try:
log_fd = open(config.log)
print "Reading log file"
for l in log_fd:
files_we_have.add(l.strip())
print "Done - added %s files from log" % len(files_we_have)
log_fd.close()
except IOError:
pass


def run(keepalive=None):
global last_mtime, offset, output_fd

Expand All @@ -235,15 +247,7 @@ def run(keepalive=None):
last_time = 0

files_we_have = set()
try:
log_fd = open(config.log)
print "Reading log file"
for l in log_fd:
files_we_have.add(l.strip())
print "Done - added %s files from log" % len(files_we_have)
except IOError:
pass

update_files(files_we_have)
log_fd = open(config.log, "a")
last_mtime = os.stat(directory).st_mtime

Expand Down Expand Up @@ -277,12 +281,13 @@ def run(keepalive=None):
Farm.check_mem(finish)

filename = "%s/%s" % (directory,f)
load_file(filename, processor, pcap_dbh)
if config.log:
log_fd.write(f+"\n")
log_fd.flush()
files_we_have.add(f)

load_file(filename, processor, pcap_dbh)

last_time = time.time()
else:
print "Lock file found"
Expand Down

0 comments on commit fef767d

Please sign in to comment.