Skip to content

Commit

Permalink
[testing] sample fixtures/rules/conf, test runner scripts, and more
Browse files Browse the repository at this point in the history
* Add functional test fixtures, rules, and conf for users to work off of
* Fix a bug in the CLI test to strip all records of newline characters
* Abstract the testing commands into shell scripts to be used by devs and Travis CI
  • Loading branch information
Jack Naglieri committed Feb 14, 2017
1 parent 95f8019 commit 38ba931
Show file tree
Hide file tree
Showing 14 changed files with 161 additions and 74 deletions.
7 changes: 5 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Sphinx build directory
build
docs/build

# PYC files
# Compiled Python files
*.pyc

# Terraform files
Expand All @@ -14,3 +14,6 @@ Thumbs.db
.DS_Store
*.swp
terminal.glue

# nose coverage file
.coverage
3 changes: 2 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@ before_install:
install:
- pip install -r requirements.txt
script:
- nosetests -v -s test/unit/
- ./test/scripts/unit_tests.sh
- ./test/scripts/integration_test_kinesis.sh
22 changes: 11 additions & 11 deletions conf/sample_logs.json → conf/logs.json
Original file line number Diff line number Diff line change
@@ -1,19 +1,15 @@
/*
This is a sample! Copy and rename this file to `logs.json` in the same folder.
Below you will find a sample log for each parser type.
*/
{
"json_log_name": {
"json_log": {
"schema": {
"name": "string",
"host": "integer",
"host": "string",
"data": {
"time": "string"
}
},
"parser": "json"
},
"syslog_log_name": {
"syslog_log": {
"schema": {
"timestamp": "string",
"host": "string",
Expand All @@ -22,18 +18,22 @@ Below you will find a sample log for each parser type.
},
"parser": "syslog"
},
"csv_log_name": {
"csv_log": {
"schema": {
"date": "string",
"time": "integer",
"host": "string",
"message": "string"
"message": "string",
"source": "string"
},
"parser": "csv",
"hints": {
"message": ["*keyword*"]
"source": [
"cluster *"
]
}
},
"kv_log_name": {
"kv_log": {
"schema": {
"type": "string",
"msg": "string",
Expand Down
29 changes: 0 additions & 29 deletions conf/sample_sources.json

This file was deleted.

24 changes: 24 additions & 0 deletions conf/sources.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"kinesis": {
"prefix_cluster1_stream_alert_kinesis": {
"logs": [
"json_log",
"syslog_log",
"kv_log",
"csv_log"
]
},
"prefix_cluster2_stream_alert_kinesis": {
"logs": [
"json_log"
]
}
},
"s3": {
"my-s3-bucket-id": {
"logs": [
"syslog_log"
]
}
}
}
35 changes: 35 additions & 0 deletions rules/sample_rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,38 @@ def invalid_subnet_rule(rec):
def rule_func(rec):
"""Description"""
return True


@rule('sample_json_rule',
logs=['json_log'],
matchers=[],
outputs=['s3'])
def sample_json_rule(rec):
return rec['host'] == 'test-host-1'


@rule('sample_syslog_rule',
logs=['syslog_log'],
matchers=[],
outputs=['pagerduty'])
def sample_syslog_rule(rec):
return rec['application'] == 'sudo'


@rule('sample_csv_rule',
logs=['csv_log'],
matchers=[],
outputs=['s3'])
def sample_csv_rule(rec):
return rec['host'] == 'test-host-2'


@rule('sample_kv_rule',
logs=['kv_log'],
matchers=[],
outputs=['s3'])
def sample_kv_rule(rec):
return (
rec['msg'] == 'fatal' and
rec['uid'] == 100
)
67 changes: 36 additions & 31 deletions stream_alert_cli/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,13 +64,15 @@ def read_kinesis_records(local_directories):
with open(os.path.join(root, json_file), 'r') as json_fh:
lines = json_fh.readlines()
for line in lines:
line = line.strip()
record = {
'kinesis': {'data': base64.b64encode(line)},
'eventSource': 'aws:{}'.format(folder),
'eventSourceARN': 'arn:aws:{}:region:account-id:stream/{}' \
.format(folder, root.split('/')[-1])
}
records['Records'].append(record)

return records

def read_s3_records(local_directories):
Expand Down Expand Up @@ -121,10 +123,11 @@ def read_s3_records(local_directories):
records = {'Records': []}
for folder in local_directories:
for root, _, files in os.walk(os.path.join(BASEFOLDER, folder)):
for json_file in files:
with open(os.path.join(root, json_file), 'r') as json_fh:
lines = json_fh.readlines()
for test_file in files:
with open(os.path.join(root, test_file), 'r') as test_file_fh:
lines = test_file_fh.readlines()
for line in lines:
line = line.strip()
# provide a way to skip records
if line[0] == '#':
continue
Expand All @@ -135,6 +138,7 @@ def read_s3_records(local_directories):
record['awsRegion'] = 'us-east-1'
record['eventName'] = 'ObjectCreated:Put'
records['Records'].append(record)

return records

def format_sns(in_file):
Expand All @@ -144,37 +148,38 @@ def format_sns(in_file):
message = base64.b64encode(json.dumps(in_file_contents))
out_records = {
"Records": [
{
"EventVersion": "1.0",
"EventSubscriptionArn": "arn:aws:sns:EXAMPLE",
"EventSource": "aws:sns",
"Sns": {
"SignatureVersion": "1",
"Timestamp": "1970-01-01T00:00:00.000Z",
"Signature": "EXAMPLE",
"SigningCertUrl": "EXAMPLE",
"MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e",
"Message": message,
"MessageAttributes": {
"Test": {
"Type": "String",
"Value": "TestString"
},
"TestBinary": {
"Type": "Binary",
"Value": "TestBinary"
{
"EventVersion": "1.0",
"EventSubscriptionArn": "arn:aws:sns:EXAMPLE",
"EventSource": "aws:sns",
"Sns": {
"SignatureVersion": "1",
"Timestamp": "1970-01-01T00:00:00.000Z",
"Signature": "EXAMPLE",
"SigningCertUrl": "EXAMPLE",
"MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e",
"Message": message,
"MessageAttributes": {
"Test": {
"Type": "String",
"Value": "TestString"
},
"TestBinary": {
"Type": "Binary",
"Value": "TestBinary"
}
},
"Type": "Notification",
"UnsubscribeUrl": "EXAMPLE",
"TopicArn": "arn:aws:sns:EXAMPLE",
"Subject": "TestInvoke"
}
},
"Type": "Notification",
"UnsubscribeUrl": "EXAMPLE",
"TopicArn": "arn:aws:sns:EXAMPLE",
"Subject": "TestInvoke"
}
}
]
}
]
}
out_file = '{}.out'.format(in_file)
write_records(out_records, out_file)

return out_file

def write_records(records, out_file):
Expand All @@ -190,7 +195,7 @@ def write_records(records, out_file):

def stream_alert_test(options):
def alert_emulambda(out_file):
context_file = os.path.join(BASEFOLDER, 'context')
# context_file = os.path.join(BASEFOLDER, 'context')
sys.argv = ['emulambda', 'main.handler', out_file, '-v']
import emulambda
emulambda.main()
Expand Down
Empty file.
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"name": "name-1", "host": "test-host-2", "data": {"time": "Jan 01, 2017"}}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{"name": "name-1", "host": "test-host-1", "data": {"time": "Jan 01, 2017"}}
Jan 01 12:00:12 test-host-1 sudo[151]: COMMAND sudo rm /tmp/test
Jan 01 2017,1487095529,test-host-2,this is test data for rules,cluster 5
type=comm msg=fatal uid=100
39 changes: 39 additions & 0 deletions test/integration/fixtures/out/kinesis_record_events.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
{
"Records": [
{
"eventSource": "aws:kinesis",
"eventSourceARN": "arn:aws:kinesis:region:account-id:stream/prefix_cluster1_stream_alert_kinesis",
"kinesis": {
"data": "eyJuYW1lIjogIm5hbWUtMSIsICJob3N0IjogInRlc3QtaG9zdC0yIiwgImRhdGEiOiB7InRpbWUiOiAiSmFuIDAxLCAyMDE3In19"
}
},
{
"eventSource": "aws:kinesis",
"eventSourceARN": "arn:aws:kinesis:region:account-id:stream/prefix_cluster1_stream_alert_kinesis",
"kinesis": {
"data": "eyJuYW1lIjogIm5hbWUtMSIsICJob3N0IjogInRlc3QtaG9zdC0xIiwgImRhdGEiOiB7InRpbWUiOiAiSmFuIDAxLCAyMDE3In19"
}
},
{
"eventSource": "aws:kinesis",
"eventSourceARN": "arn:aws:kinesis:region:account-id:stream/prefix_cluster1_stream_alert_kinesis",
"kinesis": {
"data": "SmFuIDAxIDEyOjAwOjEyIHRlc3QtaG9zdC0xIHN1ZG9bMTUxXTogQ09NTUFORCBzdWRvIHJtIC90bXAvdGVzdA=="
}
},
{
"eventSource": "aws:kinesis",
"eventSourceARN": "arn:aws:kinesis:region:account-id:stream/prefix_cluster1_stream_alert_kinesis",
"kinesis": {
"data": "SmFuIDAxIDIwMTcsMTQ4NzA5NTUyOSx0ZXN0LWhvc3QtMix0aGlzIGlzIHRlc3QgZGF0YSBmb3IgcnVsZXMsY2x1c3RlciA1"
}
},
{
"eventSource": "aws:kinesis",
"eventSourceARN": "arn:aws:kinesis:region:account-id:stream/prefix_cluster1_stream_alert_kinesis",
"kinesis": {
"data": "dHlwZT1jb21tIG1zZz1mYXRhbCB1aWQ9MTAw"
}
}
]
}
2 changes: 2 additions & 0 deletions test/scripts/integration_test_kinesis.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#! /bin/bash
./stream_alert_cli.py lambda test --source kinesis --func alert
2 changes: 2 additions & 0 deletions test/scripts/unit_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#! /bin/bash
nosetests test/unit --with-coverage --cover-package=stream_alert

0 comments on commit 38ba931

Please sign in to comment.