Initial Commit

This commit is contained in:
Sarah Jamie Lewis 2016-04-09 17:04:22 -07:00
commit 243f21f92c
13 changed files with 570 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
*~

71
README.md Normal file
View File

@ -0,0 +1,71 @@
# <img src="onionscan.png" alt="OnionScan"/>
The purpose of this tool is to make you a better onion service provider. You owe
it to yourself and your users to ensure that attackers cannot easily exploit and
deanonymize.
## Dependencies
* h12.me/socks - For the Tor SOCKS Proxy connection.
* github.com/xiam/exif & libexif-dev - For EXIF data extraction.
## Running
For a simple report detailing the high, medium and low risk areas found:
`./bin/onionscan blahblahblah.onion`
The most interesting output comes from the verbose option:
`./bin/onionscan --verbose blahblahblah.onion`
There is also a JSON output, if you want to integrate with something else:
`./bin/onionscan --jsonReport blahblahblah.onion`
If you would like to use a proxy server listening on something other that `127.0.0.1:9050`, then you can use the --torProxyAddress flag:
`./bin/onionscan --torProxyAddress=127.0.0.1:9150 blahblahblah.onion`
## Apache mod_status Protection
This [should not be news](http://arstechnica.com/security/2016/02/default-settings-in-apache-may-decloak-tor-hidden-services/), you should not have it enabled. If you do have it enabled, attacks can:
* Build a better fingerprint of your server, including php and other software versions.
* Determine client IP addresses if you are co-hosting a clearnet site.
* Determine your IP address if your setup allows.
* Determine other sites you are co-hosting.
* Determine how active your site it.
* Find secret or hiddenn areas of your site
* and much, much more.
Seriously, don't even run the tool, go to your site and check if you have `/server-status`
reachable. If you do, turn it off!
## Open Directories
Basic web security 101, if you leave directories open then people are going to scan
them, and find interesting things - old versions of images, temp files etc.
Many sites use common structures `style/`, `images/` etc. The tool checks for
common variations, and allows the user to submit others for testing.
## EXIF Tags
Whether you create them yourself or allow users to upload images, you need to
ensure the metadata associated with the image is stripped.
Many, many websites still do not properly sanitise image data, leaving themselves
or their users at risk of deanonymization.
## Server Fingerprint
Sometimes, even without mod_status we can determine if two sites are hosted on
the sam infrastructure. We can use the following attributes to make this distinction:
* Server HTTP Header
* Technology Stack (e.g. php, jquery version etc.)
* Website folder layout e.g. do you use `/style` or `/css` or do you use wordpress.
* Fingerprints of images
* GPG Versions being used.

152
main.go Normal file
View File

@ -0,0 +1,152 @@
package main
import (
"flag"
"fmt"
"io/ioutil"
"log"
"os"
)
func main() {
flag.Usage = func() {
fmt.Printf("Usage of %s:\n", os.Args[0])
fmt.Printf(" onionscan [flags] hiddenservice\n")
flag.PrintDefaults()
}
torProxyAddress := flag.String("torProxyAddress", "127.0.0.1:9050", "the address of the tor proxy to use")
simpleReport := flag.Bool("simpleReport", true, "print out a simple report detailing what is wrong and how to fix it, true by default")
jsonReport := flag.Bool("jsonReport", false, "print out a json report providing a detailed report of the scan.")
verbose := flag.Bool("verbose", false, "print out a verbose log output of the scan")
flag.Parse()
if len(flag.Args()) != 1 {
flag.Usage()
os.Exit(1)
}
hiddenService := flag.Args()[0]
log.Printf("Starting Scan of %s\n", hiddenService)
log.Printf("This might take a few minutes..\n\n")
if !*verbose {
log.SetOutput(ioutil.Discard)
}
onionScan := Configure(*torProxyAddress)
report, err := onionScan.Scan(hiddenService)
if err != nil {
log.Fatalf("Error running scanner: %s", err)
}
if *jsonReport {
jsonOut, _ := report.Serialize()
fmt.Printf("%s\n", jsonOut)
}
// FIXME: This needs refactoring, would be nice to put these into an external config files
if *simpleReport {
highRisk := 0
mediumRisk := 0
lowRisk := 0
if report.FoundApacheModStatus {
highRisk += 1
}
if len(report.RelatedClearnetDomains) > 0 {
highRisk += 1
}
if len(report.RelatedOnionServices) > 0 {
mediumRisk += 1
}
if report.ExifImages != nil {
if len(report.ExifImages) > 10 {
highRisk += 1
} else {
mediumRisk += 1
}
}
if report.OpenDirectories != nil {
if len(report.OpenDirectories) > 3 {
mediumRisk += 1
} else {
lowRisk += 1
}
}
if report.InterestingFiles != nil {
if len(report.InterestingFiles) > 10 {
mediumRisk += 1
} else {
lowRisk += 1
}
}
fmt.Printf("--------------- OnionScan Report ---------------\n")
fmt.Printf("High Risk Issues: %d\n", highRisk)
fmt.Printf("Medium Risk Issues: %d\n", mediumRisk)
fmt.Printf("Low Risk Issues: %d\n", lowRisk)
fmt.Printf("\n")
if report.FoundApacheModStatus {
fmt.Printf("\033[091mHigh Risk:\033[0m Apache mod_status is enabled and accessible\n")
fmt.Printf("\t Why this is bad: An attacker can gain very valuable information\n\t from this internal status page including IP addresses, co-hosted services and user activity.\n")
fmt.Printf("\t To fix, disable mod_status or serve it on a different port than the configured hidden service\n\n")
}
if len(report.RelatedClearnetDomains) > 0 {
fmt.Printf("\033[091mHigh Risk:\033[0m You are hosting a clearnet site on the same server as this onion service!\n")
fmt.Printf("\t Why this is bad: This may be intentional, but often isn't.\n\t Services are best operated in isolation such that a compromise of one does not mean a compromise of the other.\n")
fmt.Printf("\t To fix, host all services on separate infrastructure\n\n")
}
if len(report.RelatedOnionServices) > 0 {
fmt.Printf("\033[091mMedium Risk:\033[0m You are hosting multiple onion services on the same server as this onion service!\n")
fmt.Printf("\t Why this is bad: This may be intentional, but often isn't.\n\t Hidden services are best operated in isolation such that a compromise of one does not mean a compromise of the other.\n")
fmt.Printf("\t To fix, host all services on separate infrastructure\n\n")
}
if len(report.ExifImages) > 0 {
if len(report.ExifImages) > 10 {
fmt.Printf("\033[091mHigh Risk:\033[0m Large number of images with EXIF metadata were discovered!\n")
} else {
fmt.Printf("\033[091mMedium Risk:\033[0m Small number of images with EXIF metadata were discovered!\n")
}
fmt.Printf("\t Why this is bad: EXIF metadata can itself deanonymize a user or\n\t service operator (e.g. GPS location, Name etc.). Or, when combined, can be used to link anonymous identities together.\n")
fmt.Printf("\t To fix, re-encode all images to strip EXIF and other metadata.\n")
fmt.Printf("\t Images Identified:\n")
for _, image := range report.ExifImages {
fmt.Printf("\t\t%s\n", image.Location)
}
fmt.Printf("\n")
}
if len(report.OpenDirectories) > 0 {
if len(report.OpenDirectories) > 10 {
fmt.Printf("\033[091mMedium Risk:\033[0m Large number of open directories were discovered!\n")
} else {
fmt.Printf("\033[091mLow Risk:\033[0m Small number of open directories were discovered!\n")
}
fmt.Printf("\t Why this is bad: Open directories can reveal the existence of files\n\t not linked from the sites source code. Most of the time this is benign, but sometimes operators forget to clean up more sensitive folders.\n")
fmt.Printf("\t To fix, use .htaccess rules or equivalent to make reading directories listings forbidden.\n")
fmt.Printf("\t Directories Identified:\n")
for _, dir := range report.OpenDirectories {
fmt.Printf("\t\t%s\n", dir)
}
fmt.Printf("\n")
}
}
}

74
onionscan.go Normal file
View File

@ -0,0 +1,74 @@
package main
import (
"github.com/s-rah/onionscan/report"
"github.com/s-rah/onionscan/scans"
"h12.me/socks"
"io/ioutil"
"log"
"net/http"
)
type OnionScan struct {
TorProxyAddress string
Client *http.Client
}
func Configure(torProxyAddress string) *OnionScan {
onionScan := new(OnionScan)
onionScan.TorProxyAddress = torProxyAddress
dialSocksProxy := socks.DialSocksProxy(socks.SOCKS5, onionScan.TorProxyAddress)
transportConfig := &http.Transport{
Dial: dialSocksProxy,
}
onionScan.Client = &http.Client{Transport: transportConfig}
return onionScan
}
func (os *OnionScan) Scan(hiddenService string) (*report.OnionScanReport, error) {
report := report.NewOnionScanReport(hiddenService)
response, err := os.Client.Get("http://" + hiddenService)
if err != nil {
return report, err
}
// Initial Attempt at Resolving Server Type
log.Printf("Attempting to Derive Server Type from Headers..\n")
report.ServerVersion = response.Header.Get("Server")
log.Printf("\tServer Version: %s\n", report.ServerVersion)
// Apache mod-status Check
os.ScanPage(hiddenService, "/server-status", report, scans.ApacheModStatus)
os.ScanPage(hiddenService, "/", report, scans.StandardPageScan)
os.ScanPage(hiddenService, "/style", report, scans.CheckDirectoryListing)
os.ScanPage(hiddenService, "/styles", report, scans.CheckDirectoryListing)
os.ScanPage(hiddenService, "/css", report, scans.CheckDirectoryListing)
os.ScanPage(hiddenService, "/uploads", report, scans.CheckDirectoryListing)
os.ScanPage(hiddenService, "/images", report, scans.CheckDirectoryListing)
os.ScanPage(hiddenService, "/img", report, scans.CheckDirectoryListing)
os.ScanPage(hiddenService, "/static", report, scans.CheckDirectoryListing)
// Lots of Wordpress installs which don't lock down directory listings
os.ScanPage(hiddenService, "/wp-content/uploads", report, scans.CheckDirectoryListing)
// Common with torshops created onions
os.ScanPage(hiddenService, "/products", report, scans.CheckDirectoryListing)
os.ScanPage(hiddenService, "/products/cat", report, scans.CheckDirectoryListing)
return report, nil
}
func (os *OnionScan) ScanPage(hiddenService string, page string, report *report.OnionScanReport, f func(scans.Scanner, string, int, string, *report.OnionScanReport)) {
response, err := os.Client.Get("http://" + hiddenService + page)
if err != nil {
log.Printf("Error connecting to %s%s %s\n", hiddenService, page, err)
return
}
defer response.Body.Close()
contents, _ := ioutil.ReadAll(response.Body)
f(os, page, response.StatusCode, string(contents), report)
}

BIN
onionscan.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

67
report/onionscanreport.go Normal file
View File

@ -0,0 +1,67 @@
package report
import (
"encoding/json"
)
type ExifTag struct {
Name string `json:"name"`
Value string`json:"value"`
}
type ExifImage struct {
Location string `json:"location"`
ExifTags []ExifTag `json:"exifTags"`
}
type OnionScanReport struct {
HiddenService string `json:"hiddenService"`
ServerVersion string `json:"serverVersion"`
FoundApacheModStatus bool `json:"foundApacheModStatus"`
RelatedOnionServices []string `json:"relatedOnionServices"`
RelatedClearnetDomains []string `json:"relatedOnionDomains"`
IP []string `json:"ipAddresses"`
OpenDirectories []string `json:"openDirectories"`
ExifImages []ExifImage `json:"exifImages"`
InterestingFiles []string `json:"interestingFiles"`
}
func NewOnionScanReport(hiddenService string) *OnionScanReport {
return &OnionScanReport{HiddenService: hiddenService}
}
func (osr *OnionScanReport) AddOpenDirectory(dir string) {
osr.OpenDirectories = append(osr.OpenDirectories, dir)
}
func (osr *OnionScanReport) AddRelatedOnionService(service string) {
osr.RelatedOnionServices = append(osr.RelatedOnionServices, service)
}
func (osr *OnionScanReport) AddRelatedClearnetDomain(domain string) {
osr.RelatedClearnetDomains = append(osr.RelatedClearnetDomains, domain)
}
func (osr *OnionScanReport) AddInterestingFile(file string) {
osr.InterestingFiles = append(osr.InterestingFiles, file)
}
func (osr *OnionScanReport) AddIPAddress(ip string) {
osr.IP = append(osr.IP, ip)
}
func (osr *OnionScanReport) Serialize() (string, error) {
report,err := json.Marshal(osr)
if err != nil {
return "", err
}
return string(report), nil
}
func (osr *OnionScanReport) AddExifImage(location string) {
osr.ExifImages = append(osr.ExifImages, ExifImage{location, []ExifTag{}})
}
func (osr *OnionScanReport) AddExifTag(name string, value string) {
osr.ExifImages[len(osr.ExifImages)-1].ExifTags = append(osr.ExifImages[len(osr.ExifImages)-1].ExifTags , ExifTag{name, value})
}

View File

@ -0,0 +1,66 @@
package scans
import (
"github.com/s-rah/onionscan/report"
"github.com/s-rah/onionscan/utils"
"log"
"regexp"
"strings"
)
func ApacheModStatus(scan Scanner, page string, status int, contents string, report *report.OnionScanReport) {
if status == 200 {
r := regexp.MustCompile(`Server Version: (.*)</dt>`)
serverVersion := r.FindStringSubmatch(string(contents))
// Check if this looks like a mod_status page. Sometimes sites simply load their index.
if len(serverVersion) > 1 {
log.Printf("Detected Apache mod_status Exposed...\033[091mAlert!\033[0m\n")
report.FoundApacheModStatus = true
log.Printf("\t Using mod_status Server Version: %s\n", serverVersion[1])
report.ServerVersion = serverVersion[1]
// Check for co-hosted onion services.
log.Printf("Scanning for Co-Hosted Onions\n")
r = regexp.MustCompile(`[a-z0-9]+.onion(:[0-9]{0-5})?`)
foundServices := r.FindAllString(string(contents), -1)
utils.RemoveDuplicates(&foundServices)
for _, onion := range foundServices {
if onion != report.HiddenService {
log.Printf("\t \033[091mAlert!\033[0m Found Co-Hosted Onions: %s\n", onion)
report.AddRelatedOnionService(onion)
}
}
// Check for co-hosted onion services.
log.Printf("Scanning for Co-Hosted Clearnet Domains\n")
r = regexp.MustCompile(`>(([a-zA-Z]{1})|([a-zA-Z]{1}[a-zA-Z]{1})|([a-zA-Z]{1}[0-9]{1})|([0-9]{1}[a-zA-Z]{1})|([a-zA-Z0-9][a-zA-Z0-9-_]{1,61}[a-zA-Z0-9]))\.([a-zA-Z]{2,6}|[a-zA-Z0-9-]{2,30}\.[a-zA-Z]{2,3})`)
foundServices = r.FindAllString(string(contents), -1)
utils.RemoveDuplicates(&foundServices)
for _, domain := range foundServices {
if strings.Contains(domain, ".onion") == false {
log.Printf("\t \033[091mAlert!\033[0m Found Co-Hosted Service: %s\n", domain[1:])
report.AddRelatedClearnetDomain(domain[4:])
}
}
// Check for IP Addresses
log.Printf("Scanning for IP Addresses (clearweb clients, and servers)\n")
r = regexp.MustCompile(`(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)`)
foundIPs := r.FindAllString(string(contents), -1)
utils.RemoveDuplicates(&foundIPs)
for _, ip := range foundIPs {
if ip != "127.0.0.1" {
log.Printf("\t \033[091mAlert!\033[0m Found IP Address : %s\n", ip)
report.AddIPAddress(ip)
}
}
}
}
if !report.FoundApacheModStatus {
log.Printf("\tApache mod_status Not Exposed...\033[92mGood!\033[0m\n")
report.FoundApacheModStatus = false
}
}

View File

@ -0,0 +1,8 @@
{
"name":"Apache mod_status is Accessible",
"location":"/server-status",
"requirements": [
{"equals": ["http-status-code", 200]},
{"contains":["contents","Server Version: (.*)</dt>"]}
]
}

View File

@ -0,0 +1,42 @@
package scans
import (
"github.com/s-rah/onionscan/report"
"log"
"regexp"
"strings"
)
func CheckDirectoryListing(scan Scanner, dir string, status int, contents string, report *report.OnionScanReport) {
if status == 200 && strings.Contains(string(contents), "Index of "+dir) {
log.Printf("Detected Open Directory %s...\033[091mAlert!\033[0m\n", dir)
report.AddOpenDirectory(dir)
r := regexp.MustCompile(`href="((.*?\.jpg)|(.*?\.png)|(.*?\.jpeg)|(.*?\.gif))"`)
foundImages := r.FindAllStringSubmatch(string(contents), -1)
for _, image := range foundImages {
log.Printf("\t Found image %s/%s\n", dir, image[1])
scan.ScanPage(report.HiddenService, dir+"/"+image[1], report, CheckExif)
}
r = regexp.MustCompile(`href="((.*\.zip)|(.*\.tar)|(.*\.gz)|(.*\.pst)|(.*\.txt))"`)
interestingFiles := r.FindAllStringSubmatch(string(contents), -1)
for _, file := range interestingFiles {
log.Printf("\t Found interesting file %s/%s\n", dir, file[1])
//TODO: We can do further analysis here, for now, just report them.
report.AddInterestingFile(dir+"/"+file[1])
}
r = regexp.MustCompile(`href="([^/](.*?))/"`)
subDir := r.FindAllStringSubmatch(string(contents), -1)
for _, file := range subDir {
log.Printf("\t Found subdir %s/%s\n", dir, file[1])
//TODO: We can do further analysis here, for now, just report them.
scan.ScanPage(report.HiddenService, dir+"/"+file[1], report, CheckDirectoryListing)
}
} else {
log.Printf("Directory %s either doesn't exist or is not readable\n", dir)
}
}

39
scans/check_exif.go Normal file
View File

@ -0,0 +1,39 @@
package scans
import (
"github.com/s-rah/onionscan/report"
"github.com/xiam/exif"
"io"
"log"
"strings"
)
func CheckExif(scan Scanner, page string, status int, contents string, report *report.OnionScanReport) {
if status == 200 {
reader := exif.New()
_, err := io.Copy(reader, strings.NewReader(contents))
// exif.FoundExifInData is a signal that the EXIF parser has all it needs,
// it doesn't need to be given the whole image.
if err != nil && err != exif.ErrFoundExifInData {
// We don't care if we fail
return
}
err = reader.Parse()
if err != nil {
// We don't care if we fail
return
}
if len(reader.Tags) > 0 {
report.AddExifImage(page)
for name, val := range reader.Tags {
log.Printf("\t \033[091mAlert!\033[0m Found Exif Tag%s: %s\n", name, val)
report.AddExifTag(name, val)
}
}
}
}

9
scans/scanner.go Normal file
View File

@ -0,0 +1,9 @@
package scans
import (
"github.com/s-rah/onionscan/report"
)
type Scanner interface {
ScanPage(string, string,*report.OnionScanReport, func(Scanner, string,int, string,*report.OnionScanReport))
}

View File

@ -0,0 +1,26 @@
package scans
import (
"github.com/s-rah/onionscan/report"
"log"
"regexp"
)
func StandardPageScan(scan Scanner, page string, status int, contents string, report *report.OnionScanReport) {
log.Printf("Scanning %s%s\n", report.HiddenService, page)
if status == 200 {
log.Printf("\tPage %s%s is Accessible\n", report.HiddenService, page)
log.Printf("\tScanning for Images\n")
r := regexp.MustCompile("src=\"(" + "http://" + report.HiddenService + "/)?((.*?\\.jpg)|(.*?\\.png)|(.*?\\.jpeg)|(.*?\\.gif))\"")
foundImages := r.FindAllStringSubmatch(string(contents), -1)
for _, image := range foundImages {
log.Printf("\t Found image %s\n", image[2])
scan.ScanPage(report.HiddenService, "/"+image[2], report, CheckExif)
}
} else if status == 403 {
log.Printf("\tPage %s%s is Forbidden\n", report.HiddenService, page)
} else if status == 404 {
log.Printf("\tPage %s%s is Does Not Exist\n", report.HiddenService, page)
}
}

View File

@ -0,0 +1,15 @@
package utils
// RemoveDuplicates is a utility function
func RemoveDuplicates(xs *[]string) {
found := make(map[string]bool)
j := 0
for i, x := range *xs {
if !found[x] {
found[x] = true
(*xs)[j] = (*xs)[i]
j++
}
}
*xs = (*xs)[:j]
}