forked from simsong/bulk_extractor
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathscan_hashdb.cpp
581 lines (484 loc) · 21 KB
/
scan_hashdb.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
// Author: Bruce Allen <[email protected]>
// Created: 2/25/2013
//
// The software provided here is released by the Naval Postgraduate
// School, an agency of the U.S. Department of Navy. The software
// bears no warranty, either expressed or implied. NPS does not assume
// legal liability nor responsibility for a User's use of the software
// or the results of such use.
//
// Please note that within the United States, copyright protection,
// under Section 105 of the United States Code, Title 17, is not
// available for any work of the United States Government and/or for
// any works created by United States Government employees. User
// acknowledges that this software contains work which was created by
// NPS government employees and is therefore in the public domain and
// not subject to copyright.
//
// Released into the public domain on February 25, 2013 by Bruce Allen.
/**
* \file
* Generates MD5 hash values from hashdb_block_size data taken along sector
* boundaries and scans for matches against a hash database.
*
* Note that the hash database may be accessed locally through the
* file system or remotely through a socket.
*/
#include "config.h"
#include "bulk_extractor.h"
#ifdef HAVE_HASHDB
//#define DEBUG_V2_OUT
#include "hashdb.hpp"
#include <dfxml/src/hash_t.h>
#include <iostream>
#include <cmath>
#include <unistd.h> // for getpid
#include <sys/types.h> // for getpid
// user settings
static std::string hashdb_mode="none"; // import or scan
static uint32_t hashdb_byte_alignment=512; // import only
static uint32_t hashdb_block_size=512; // import or scan
static uint32_t hashdb_step_size=512; // import or scan
static std::string hashdb_scan_path="your_hashdb_directory"; // scan only
static std::string hashdb_repository_name="default_repository"; // import only
static uint32_t hashdb_max_feature_file_lines=0; // scan only for feature file
// runtime modes
// scanner mode
enum mode_type_t {MODE_NONE, MODE_SCAN, MODE_IMPORT};
static mode_type_t mode = MODE_NONE;
// global state
// hashdb directory, import only
static std::string hashdb_dir;
// hash type
typedef md5_generator hash_generator;
// hashdb manager
static hashdb::import_manager_t* import_manager;
static hashdb::scan_manager_t* scan_manager;
static void do_import(const class scanner_params &sp,
const recursion_control_block &rcb);
static void do_scan(const class scanner_params &sp,
const recursion_control_block &rcb);
// safely hash sbuf range without overflow failure
inline const md5_t hash_one_block(const sbuf_t &sbuf)
{
if (sbuf.bufsize >= hashdb_block_size) {
// hash from the beginning
return hash_generator::hash_buf(sbuf.buf, hashdb_block_size);
}
// hash the available part and zero-fill
hash_generator g;
g.update(sbuf.buf, sbuf.bufsize);
// hash in extra zeros to fill out the block
size_t extra = hashdb_block_size - sbuf.bufsize;
std::vector<uint8_t> zeros(extra);
g.update(&zeros[0], extra);
return g.final();
}
// rules for determining if a block should be ignored
static bool ramp_trait(const sbuf_t &sbuf)
{
if (sbuf.pagesize < 8) {
// not enough to process
return false;
}
uint32_t count = 0;
for(size_t i=0;i<sbuf.pagesize-8;i+=4){
// note that little endian is detected and big endian is not detected
if (sbuf.get32u(i)+1 == sbuf.get32u(i+4)) {
count += 1;
}
}
return count > sbuf.pagesize/8;
}
static bool hist_trait(const sbuf_t &sbuf)
{
if (sbuf.pagesize < hashdb_block_size) {
// do not perform any histogram analysis on short blocks
return false;
}
std::map<uint32_t,uint32_t> hist;
for(size_t i=0;i<sbuf.pagesize-4;i+=4){
hist[sbuf.get32uBE(i)] += 1;
}
if (hist.size() < 3) return true;
for (std::map<uint32_t,uint32_t>::const_iterator it = hist.begin();it != hist.end(); it++){
if ((it->second) > hashdb_block_size/16){
return true;
}
}
return false;
}
static bool whitespace_trait(const sbuf_t &sbuf)
{
size_t count = 0;
for(size_t i=0;i<sbuf.pagesize;i++){
if (isspace(sbuf[i])) count+=1;
}
return count >= (sbuf.pagesize * 3)/4;
}
static bool monotonic_trait(const sbuf_t &sbuf)
{
if (sbuf.pagesize < 16) {
// not enough data
return false;
}
const double total = sbuf.pagesize / 4.0;
int increasing = 0, decreasing = 0, same = 0;
for (size_t i=0; i+8<sbuf.pagesize; i+=4) {
if (sbuf.get32u(i+4) > sbuf.get32u(i)) {
increasing++;
} else if (sbuf.get32u(i+4) < sbuf.get32u(i)) {
decreasing++;
} else {
same++;
}
}
if (increasing / total >= 0.75) return true;
if (decreasing / total >= 0.75) return true;
if (same / total >= 0.75) return true;
return false;
}
// detect if block is all the same
inline bool empty_sbuf(const sbuf_t &sbuf)
{
for (size_t i=1; i<sbuf.bufsize; i++) {
if (sbuf[i] != sbuf[0]) {
return false;
}
}
return true; // all the same
}
extern "C"
void scan_hashdb(const class scanner_params &sp,
const recursion_control_block &rcb) {
switch(sp.phase) {
// startup
case scanner_params::PHASE_STARTUP: {
// set properties for this scanner
std::string desc = "Search cryptographic hash IDs against hashes in a hashdb block hash database";
desc += std::string(" (hashdb version") + std::string(hashdb_version()) + std::string(")");
sp.info->name = "hashdb";
sp.info->author = "Bruce Allen";
sp.info->description = desc;
sp.info->flags = scanner_info::SCANNER_DISABLED;
// hashdb_mode
std::stringstream ss_hashdb_mode;
ss_hashdb_mode << "Operational mode [none|import|scan]\n"
<< " none - The scanner is active but performs no action.\n"
<< " import - Import block hashes.\n"
<< " scan - Scan for matching block hashes.";
sp.info->get_config("hashdb_mode", &hashdb_mode, ss_hashdb_mode.str());
// hashdb_byte_alignment
std::stringstream ss_hashdb_byte_alignment;
ss_hashdb_byte_alignment
<< "Selects the byte alignment to use in the new import\n"
<< " database.";
sp.info->get_config("hashdb_byte_alignment", &hashdb_byte_alignment,
ss_hashdb_byte_alignment.str());
// hashdb_block_size
sp.info->get_config("hashdb_block_size", &hashdb_block_size,
"Selects the block size to hash, in bytes.");
// hashdb_step_size
std::stringstream ss_hashdb_step_size;
ss_hashdb_step_size
<< "Selects the step size. Scans and imports along\n"
<< " this step value.";
sp.info->get_config("hashdb_step_size", &hashdb_step_size,
ss_hashdb_step_size.str());
// hashdb_scan_path
std::stringstream ss_hashdb_scan_path;
ss_hashdb_scan_path
<< "File path to a hash database to scan against.\n"
<< " Valid only in scan mode.";
sp.info->get_config("hashdb_scan_path", &hashdb_scan_path,
ss_hashdb_scan_path.str());
// hashdb_repository_name
std::stringstream ss_hashdb_import_repository_name;
ss_hashdb_import_repository_name
<< "Sets the repository name to\n"
<< " attribute the import to. Valid only in import mode.";
sp.info->get_config("hashdb_repository_name",
&hashdb_repository_name,
ss_hashdb_import_repository_name.str());
// configure the feature file to accept scan features
// but only if in scan mode
if (hashdb_mode == "scan") {
sp.info->feature_names.insert("identified_blocks");
#ifdef DEBUG_V2_OUT
sp.info->feature_names.insert("identified_blocks2");
#endif
}
// hashdb_max_feature_file_lines
std::stringstream ss_hashdb_max_feature_file_lines;
ss_hashdb_max_feature_file_lines
<< "The maximum number of features lines to record\n"
<< " or 0 for no limit. Valid only in scan mode.";
sp.info->get_config("hashdb_max_feature_file_lines", &hashdb_max_feature_file_lines,
ss_hashdb_max_feature_file_lines.str());
return;
}
// init
case scanner_params::PHASE_INIT: {
// validate the input parameters
// hashdb_mode
if (hashdb_mode == "none") {
mode = MODE_NONE;
} else if (hashdb_mode == "import") {
mode = MODE_IMPORT;
} else if (hashdb_mode == "scan") {
mode = MODE_SCAN;
} else {
// bad mode
std::cerr << "Error. Parameter 'hashdb_mode' value '"
<< hashdb_mode << "' must be [none|import|scan].\n"
<< "Cannot continue.\n";
exit(1);
}
// hashdb_byte_alignment
if (hashdb_byte_alignment == 0) {
std::cerr << "Error. Value for parameter 'hashdb_byte_alignment' is invalid.\n"
<< "Cannot continue.\n";
exit(1);
}
// hashdb_block_size
if (hashdb_block_size == 0) {
std::cerr << "Error. Value for parameter 'hashdb_block_size' is invalid.\n"
<< "Cannot continue.\n";
exit(1);
}
// hashdb_step_size
if (hashdb_step_size == 0) {
std::cerr << "Error. Value for parameter 'hashdb_step_size' is invalid.\n"
<< "Cannot continue.\n";
exit(1);
}
// for valid operation, scan sectors must align on byte aligned boundaries
if (hashdb_step_size % hashdb_byte_alignment != 0) {
std::cerr << "Error: invalid byte alignment=" << hashdb_byte_alignment
<< " for step size=" << hashdb_step_size << ".\n"
<< "Steps must fit along byte alignment boundaries.\n"
<< "Specifically, hashdb_step_size \% hashdb_byte_alignment must be zero.\n"
<< "Cannot continue.\n";
exit(1);
}
// indicate hashdb version
std::cout << "hashdb: hashdb_version=" << hashdb_version() << "\n";
// perform setup based on mode
switch(mode) {
case MODE_IMPORT: {
// set the path to the hashdb
hashdb_dir = sp.fs.get_outdir() + "/" + "hashdb.hdb";
// show relevant settable options
std::cout << "hashdb: hashdb_mode=" << hashdb_mode << "\n"
<< "hashdb: hashdb_byte_alignment= " << hashdb_byte_alignment << "\n"
<< "hashdb: hashdb_block_size=" << hashdb_block_size << "\n"
<< "hashdb: hashdb_step_size= " << hashdb_step_size << "\n"
<< "hashdb: hashdb_repository_name= " << hashdb_repository_name << "\n"
<< "hashdb: Creating hashdb directory " << hashdb_dir << "\n";
// open hashdb for importing
// currently, hashdb_dir is required to not exist
hashdb::settings_t settings;
settings.byte_alignment = hashdb_byte_alignment;
settings.block_size = hashdb_block_size;
std::string error_message = hashdb::create_hashdb(hashdb_dir, settings, "");
if (error_message.size() != 0) {
std::cerr << "Error: " << error_message << "\n";
exit(1);
}
import_manager = new hashdb::import_manager_t(hashdb_dir, "");
return;
}
case MODE_SCAN: {
// show relevant settable options
std::cout << "hashdb: hashdb_mode=" << hashdb_mode << "\n"
<< "hashdb: hashdb_block_size=" << hashdb_block_size << "\n"
<< "hashdb: hashdb_step_size= " << hashdb_step_size << "\n"
<< "hashdb: hashdb_scan_path=" << hashdb_scan_path << "\n"
<< "hashdb: hashdb_max_feature_file_lines=" << hashdb_max_feature_file_lines
<< "\n";
// open hashdb for scanning
scan_manager = new hashdb::scan_manager_t(hashdb_scan_path);
// set the feature recorder to leave context alone but fix invalid utf8
sp.fs.get_name("identified_blocks")->set_flag(feature_recorder::FLAG_XML);
#ifdef DEBUG_V2_OUT
sp.fs.get_name("identified_blocks2")->set_flag(feature_recorder::FLAG_XML);
#endif
return;
}
case MODE_NONE: {
// show relevant settable options
std::cout << "hashdb: hashdb_mode=" << hashdb_mode << "\n"
<< "WARNING: the hashdb scanner is enabled but it will not perform any action\n"
<< "because no mode has been selected. Please either select a hashdb mode or\n"
<< "leave the hashdb scanner disabled to avoid this warning.\n";
// no action
return;
}
default: {
// program error
assert(0);
}
}
}
// scan
case scanner_params::PHASE_SCAN: {
switch(mode) {
case MODE_IMPORT:
do_import(sp, rcb);
return;
case MODE_SCAN:
do_scan(sp, rcb);
return;
default:
// the user should have just left the scanner disabled.
// no action.
return;
}
}
// shutdown
case scanner_params::PHASE_SHUTDOWN: {
switch(mode) {
case MODE_IMPORT:
delete import_manager;
return;
case MODE_SCAN:
delete scan_manager;
return;
default:
// the user should have just left the scanner disabled.
// no action.
return;
}
}
// there are no other bulk_extractor scanner state actions
default: {
// no action for other bulk_extractor scanner states
return;
}
}
}
// perform import
static void do_import(const class scanner_params &sp,
const recursion_control_block &rcb) {
// get the sbuf
const sbuf_t& sbuf = sp.sbuf;
// get the filename from sbuf without the sbuf map file delimiter
std::string path_without_map_file_delimiter =
(sbuf.pos0.path.size() > 4) ?
std::string(sbuf.pos0.path, 0, sbuf.pos0.path.size() - 4) : "";
// get the filename to use as the source filename
std::stringstream ss;
const size_t p=sbuf.pos0.path.find('/');
if (p==std::string::npos) {
// no directory in forensic path so explicitly include the filename
ss << sp.fs.get_input_fname();
if (sbuf.pos0.isRecursive()) {
// forensic path is recursive so add "/" + forensic path
ss << "/" << path_without_map_file_delimiter;
}
} else {
// directory in forensic path so print forensic path as is
ss << path_without_map_file_delimiter;
}
std::string source_filename = ss.str();
// calculate the file hash using the sbuf page
const md5_t sbuf_hash = hash_generator::hash_buf(sbuf.buf, sbuf.pagesize);
const std::string file_binary_hash =
std::string(reinterpret_cast<const char*>(sbuf_hash.digest), 16);
// track nonprobative count
size_t nonprobative_count = 0;
// import the cryptograph hash values from all the blocks in sbuf
for (size_t offset=0; offset<sbuf.pagesize; offset+=hashdb_step_size) {
// Create a child sbuf of what we would hash
const sbuf_t sbuf_to_hash(sbuf,offset,hashdb_block_size);
// ignore empty blocks
if (empty_sbuf(sbuf_to_hash)){
++nonprobative_count;
continue;
}
// calculate the hash for this import-sector-aligned hash block
const md5_t hash = hash_one_block(sbuf_to_hash);
const std::string binary_hash(reinterpret_cast<const char*>(hash.digest), 16);
// calculate the offset from the start of the media image
const uint64_t image_offset = sbuf_to_hash.pos0.offset;
// put together any block classification labels
// set flags based on specific tests on the block
// Construct an sbuf from the block and subject it to the other tests
const sbuf_t s(sbuf, offset, hashdb_block_size);
std::stringstream ss_flags;
if (ramp_trait(s)) ss_flags << "R";
if (hist_trait(s)) ss_flags << "H";
if (whitespace_trait(s)) ss_flags << "W";
if (monotonic_trait(s)) ss_flags << "M";
// NOTE: shannon16 is Disabled because its results were not useful
// and because it needs fixed to not generate sbuf read exception.
//if (ss_flags.str().size() > 0) ss_flags << "," << shannon16(s);
// flags means nonprobative
if (ss_flags.str().size() > 0) {
++nonprobative_count;
}
// import the hash
import_manager->insert_hash(binary_hash,
file_binary_hash,
image_offset,
0, // entropy
""); // block label
}
// insert the source name pair
import_manager->insert_source_name(file_binary_hash,
hashdb_repository_name, source_filename);
// insert the source data
import_manager->insert_source_data(file_binary_hash,
sbuf.pagesize,
"", // file type
nonprobative_count);
}
// perform scan
static void do_scan(const class scanner_params &sp,
const recursion_control_block &rcb) {
// get the feature recorder
feature_recorder* identified_blocks_recorder = sp.fs.get_name("identified_blocks");
#ifdef DEBUG_V2_OUT
feature_recorder* identified_blocks_recorder2 = sp.fs.get_name("identified_blocks2");
#endif
// get the sbuf
const sbuf_t& sbuf = sp.sbuf;
// process cryptographic hash values for blocks along sector boundaries
for (size_t offset=0; offset<sbuf.pagesize; offset+=hashdb_step_size) {
// stop recording if feature file line count is at requested max
if (hashdb_max_feature_file_lines > 0 && identified_blocks_recorder->count() >=
hashdb_max_feature_file_lines) {
break;
}
// Create a child sbuf of the block
const sbuf_t sbuf_to_hash(sbuf, offset, hashdb_block_size);
// ignore empty blocks
if (empty_sbuf(sbuf_to_hash)){
continue;
}
// calculate the hash for this sector-aligned hash block
const md5_t hash = hash_one_block(sbuf_to_hash);
const std::string binary_hash =
std::string(reinterpret_cast<const char*>(hash.digest), 16);
// scan for the hash
std::string json_text = scan_manager->find_expanded_hash_json(binary_hash);
if (json_text.size() == 0) {
// hash not found
continue;
}
// prepare fields to record the feature
// get hash_string from hash
std::string hash_string = hash.hexdigest();
// record the feature, there is no context field
identified_blocks_recorder->write(sbuf.pos0+offset, hash_string, json_text);
#ifdef DEBUG_V2_OUT
size_t count = scan_manager->find_hash_count(binary_hash);
// build context field
std::stringstream ss;
ss << "{\"count\":" << count << "}";
// record the feature
identified_blocks_recorder2->write(sbuf.pos0+offset, hash_string, ss.str());
#endif
}
}
#endif