Skip to content

Commit

Permalink
Merge branch 'master' of github.com:tomnomnom/hacks
Browse files Browse the repository at this point in the history
  • Loading branch information
tomnomnom committed Jun 4, 2021
2 parents 79844af + cd269d2 commit 352c68e
Show file tree
Hide file tree
Showing 11 changed files with 505 additions and 58 deletions.
47 changes: 1 addition & 46 deletions fff/README.mkd
Original file line number Diff line number Diff line change
@@ -1,48 +1,3 @@
# fff

The Fairly Fast Fetcher. Requests a bunch of URLs provided on stdin fairly quickly.

The main idea is to launch a new request every `n` milliseconds, without waiting
for the last request to finish first. This makes for consistently fast fetching,
but can be hard on system resources (e.g. you might run out of file descriptors).
The advantage though, is that hitting a bunch of very slow URLs or URLs that
result in timeouts doesn't slow the overall progress very much.

## Install

```
▶ go get -u github.com/tomnomnom/hacks/fff
```

## Usage

Basic usage:
```
▶ cat urls.txt | fff
```

Options:

```
▶ fff --help
Request URLs provided on stdin fairly frickin' fast
Options:
-d, --delay <delay> Delay between issuing requests (ms)
-H, --header <header> Add a header to the request (can be specified multiple times)
-o, --output <dir> Directory to save responses in (will be created)
-s, --save-status <code> Save responses with given status code (can be specified multiple times)
-S, --save Save all responses
```

## Tuning
You might want to increase your open file descriptor limit before doing anything crazy:

```
▶ ulimit -n 16384
```

## TODO

* Different methods and request bodies
* Create an index file in the output directory
Now at https://github.com/tomnomnom/fff
54 changes: 46 additions & 8 deletions fff/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@ import (
"io/ioutil"
"net"
"net/http"
"net/url"
"os"
"path"
"regexp"
"strconv"
"strings"
"sync"
Expand All @@ -24,9 +26,11 @@ func init() {
"Request URLs provided on stdin fairly frickin' fast",
"",
"Options:",
" -b, --body <data> Request body",
" -d, --delay <delay> Delay between issuing requests (ms)",
" -H, --header <header> Add a header to the request (can be specified multiple times)",
" -k, --keep-alive Use HTTP Keep-Alive",
" -m, --method HTTP method to use (default: GET, or POST if body is specified)",
" -o, --output <dir> Directory to save responses in (will be created)",
" -s, --save-status <code> Save responses with given status code (can be specified multiple times)",
" -S, --save Save all responses",
Expand All @@ -39,6 +43,10 @@ func init() {

func main() {

var body string
flag.StringVar(&body, "body", "", "")
flag.StringVar(&body, "b", "", "")

var keepAlives bool
flag.BoolVar(&keepAlives, "keep-alive", false, "")
flag.BoolVar(&keepAlives, "keep-alives", false, "")
Expand All @@ -52,6 +60,10 @@ func main() {
flag.IntVar(&delayMs, "delay", 100, "")
flag.IntVar(&delayMs, "d", 100, "")

var method string
flag.StringVar(&method, "method", "GET", "")
flag.StringVar(&method, "m", "GET", "")

var outputDir string
flag.StringVar(&outputDir, "output", "out", "")
flag.StringVar(&outputDir, "o", "out", "")
Expand Down Expand Up @@ -84,7 +96,16 @@ func main() {
defer wg.Done()

// create the request
req, err := http.NewRequest("GET", rawURL, nil)
var b io.Reader
if body != "" {
b = strings.NewReader(body)

// Can't send a body with a GET request
if method == "GET" {
method = "POST"
}
}
req, err := http.NewRequest(method, rawURL, b)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to create request: %s\n", err)
return
Expand Down Expand Up @@ -116,9 +137,10 @@ func main() {
return
}

// output files are prefix/domain/hash.(body|headers)
hash := sha1.Sum([]byte(rawURL))
p := path.Join(prefix, req.URL.Hostname(), fmt.Sprintf("%x.body", hash))
// output files are stored in prefix/domain/normalisedpath/hash.(body|headers)
normalisedPath := normalisePath(req.URL)
hash := sha1.Sum([]byte(method + rawURL + body + headers.String()))
p := path.Join(prefix, req.URL.Hostname(), normalisedPath, fmt.Sprintf("%x.body", hash))
err = os.MkdirAll(path.Dir(p), 0750)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to create dir: %s\n", err)
Expand All @@ -140,7 +162,7 @@ func main() {
}

// create the headers file
headersPath := path.Join(prefix, req.URL.Hostname(), fmt.Sprintf("%x.headers", hash))
headersPath := path.Join(prefix, req.URL.Hostname(), normalisedPath, fmt.Sprintf("%x.headers", hash))
headersFile, err := os.Create(headersPath)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to create file: %s\n", err)
Expand All @@ -149,22 +171,33 @@ func main() {
defer headersFile.Close()

var buf strings.Builder
buf.WriteString(fmt.Sprintf("%s\n\n", rawURL))

// put the request URL and method at the top
buf.WriteString(fmt.Sprintf("%s %s\n\n", method, rawURL))

// add the request headers
for _, h := range headers {
buf.WriteString(fmt.Sprintf("> %s\n", h))
}

buf.WriteRune('\n')

// add the request body
if body != "" {
buf.WriteString(body)
buf.WriteString("\n\n")
}

// add the proto and status
buf.WriteString(fmt.Sprintf("< %s %s\n", resp.Proto, resp.Status))

// add the response headers
for k, vs := range resp.Header {
for _, v := range vs {
buf.WriteString(fmt.Sprintf("< %s: %s\n", k, v))
}
}

// add the response body
_, err = io.Copy(headersFile, strings.NewReader(buf.String()))
if err != nil {
fmt.Fprintf(os.Stderr, "failed to write file contents: %s\n", err)
Expand Down Expand Up @@ -213,7 +246,7 @@ func (h *headerArgs) Set(val string) error {
}

func (h headerArgs) String() string {
return "string"
return strings.Join(h, ", ")
}

type saveStatusArgs []int
Expand All @@ -236,3 +269,8 @@ func (s saveStatusArgs) Includes(search int) bool {
}
return false
}

func normalisePath(u *url.URL) string {
re := regexp.MustCompile(`[^a-zA-Z0-9/._-]+`)
return re.ReplaceAllString(u.Path, "-")
}
3 changes: 3 additions & 0 deletions ghtool/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,9 @@ func getRepos(client *github.Client, user string) ([]string, error) {
}

for _, repo := range repos {
if *repo.Fork {
continue
}
allRepos = append(allRepos, *repo.CloneURL)
}

Expand Down
2 changes: 2 additions & 0 deletions kxss/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
kxss
*.sw*
105 changes: 105 additions & 0 deletions kxss/README.mkd
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
# kxss

I don't know what this name is. There might even be something that has this name already,
but I'm on a plane with no internet connection so I can't check that right now. Also:
who said you needed StackOverflow to be able to write code? ¯\\\_(ツ)\_

## Idea

So the general idea is:

* Take URLs with params on stdin. These might have come from waybackurls or maybe a Burp session
* Request the URLs, check the response body for any reflected parameters. There will be many false positives here.
* For any reflected parameters, re-request with some random alphanumeric value appended to the param
* Only one param is appended to at a time. This is to avoid breaking the request when a different param is required

And the bit that's not done yet:
* For any params that passed the appended-to check (i.e. ones we're *really* sure are reflected), start trying special characters too (`<"'>` etc)

The "best" thing to do here would probably be to test one special character at a time, but it's really a
trade-off between the number of requests we're issuing and accuracy.

## Things

* It'd be nice to support POST params at some point too
* We're going to be generating a *lot* of requests, often to the same hosts
* This needs some kind of rate-limiting with re-queing so that we don't overwhelm hosts, but can still cover lots of ground quickly

## Usage

At the moment there's a test server in `cmd/testserver`. Run that with `go run cmd/testserver/main.go` and
then do something like this:

```
▶ go build
▶ echo 'http://localhost:5566/?name=Tom&age=33&fake=Buck' | ./kxss
got reflection of appended param age on http://localhost:5566/?name=Tom&age=33&fake=Buck
got reflection of appended param name on http://localhost:5566/?name=Tom&age=33&fake=Buck
```

The source of the handler in the test server looks like this:

```
qs := r.URL.Query()
log.Printf("req: %#v", qs)
w.Header().Add("Content-Type", "text/html")
fmt.Fprintf(w, "Hello, %s!\n", qs.Get("name"))
fmt.Fprintf(w, "I hear you're %s years old!\n", qs.Get("age"))
fmt.Fprint(w, "My name is Buck and I'm here to greet you.\n")
```

Note that "Buck" appears in the query string and in the page output, but is not reported
as reflected. That's because we appended a random value to the end of it to double check.

## Payloads and context

As well as figuring out what special characters are allowed in reflected params, we need
to come up with a way to figure out what context (or contexts!) the reflected param appears in.
Really we need to figure out the context first and use that to prioritise the special character
checking part.

E.g. if we have something like this:

```
<h1>$reflectedParam</h1>
```

...then we need to check for `<` first as we can rule out XSS the fastest that way. We still
probably want to try multiple tricks for that char (e.g. double urlencoding, that funky %EF%BC%9C thing etc)

But if we have, say, this:

```
<iframe src=$reflectedParam></iframe>
```

...then we might not want to mess about with `<` and other special chars initially, but instead
look at `javascript:` type payloads because they're more likely to work.

A given context will have a variety of characters / payloads that we'll want to try.

Honestly: I think this bit is going to be Hard with a capital aitch; especially when the context
is inbetween `<script>` tags and HTML parsers can't save us.

Another option is to just blindly try lots of payloads. That feels a bit more barbaric, but
my goodness does it make for more simple code. Those funky payloads that work in lots of contexts
are probably worth playing with too, although they have an increased chance of being blocked by
WAFs or other filters too.

My suggestion is that for an MVP we settle for checking individual characters and having an output
that's something like:

```
o hai, i found reflected param X on url Y that allows these chars: <>"'
```

...and then letting the human go and investigate. We're basically never going to be able to be
as good as a human at figuring out the contexts etc, but it would be nice for the tool to at
least try some full payloads at some point in the future.

## Validation

If we do end up trying full payloads at any point I think basically the only way to reliably
validate that XSS fires is to use headless chrome (probably with `chromedp`) to load a URL
and then hook the alert boxes. This is resource intensive though so it should only be done
when we've exhausted what we can do with Go's HTTP client.
1 change: 1 addition & 0 deletions kxss/cmd/testserver/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
testserver
20 changes: 20 additions & 0 deletions kxss/cmd/testserver/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package main

import (
"fmt"
"log"
"net/http"
)

func main() {
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
qs := r.URL.Query()
log.Printf("req: %#v", qs)
w.Header().Add("Content-Type", "text/html")
fmt.Fprintf(w, "Hello, %s!\n", qs.Get("name"))
fmt.Fprintf(w, "I hear you're %s years old!\n", qs.Get("age"))
fmt.Fprint(w, "My name is Buck and I'm here to greet you.\n")
})

http.ListenAndServe("127.0.0.1:5566", nil)
}
Loading

0 comments on commit 352c68e

Please sign in to comment.