Fixing Bugs, Bitcoin Address Extraction Framework

This commit is contained in:
Sarah Jamie Lewis 2016-06-11 16:02:44 -07:00
parent 46996dc77f
commit 6bad842e1d
6 changed files with 61 additions and 27 deletions

16
main.go
View File

@ -50,8 +50,10 @@ func main() {
log.Fatalf("Could not read onion file %s\n", *list) log.Fatalf("Could not read onion file %s\n", *list)
} }
onions := strings.Split(string(content), "\n") onions := strings.Split(string(content), "\n")
onionsToScan = append(onionsToScan, onions...) for _, onion := range onions[0 : len(onions)-1] {
log.Printf("Starting Scan of %d onion services\n", len(onionsToScan)-1) onionsToScan = append(onionsToScan, onion)
}
log.Printf("Starting Scan of %d onion services\n", len(onionsToScan))
} }
log.Printf("This might take a few minutes..\n\n") log.Printf("This might take a few minutes..\n\n")
@ -66,8 +68,8 @@ func main() {
count := 0 count := 0
max := 100 max := 100
if max > len(onionsToScan)-1 { if max > len(onionsToScan) {
max = len(onionsToScan) - 1 max = len(onionsToScan)
} }
// Run an initial batch of 100 requests (or less...) // Run an initial batch of 100 requests (or less...)
@ -77,13 +79,15 @@ func main() {
} }
received := 0 received := 0
for received < len(onionsToScan)-1 { for received < len(onionsToScan) {
scanReport := <-reports scanReport := <-reports
// After the initial batch, it's one in one out to prevent proxy overload. // After the initial batch, it's one in one out to prevent proxy overload.
if count < len(onionsToScan)-1 { if count < len(onionsToScan) {
go onionScan.Scan(onionsToScan[count], reports) go onionScan.Scan(onionsToScan[count], reports)
count++ count++
} }
received++ received++
if *jsonReport { if *jsonReport {

View File

@ -40,7 +40,7 @@ func (hps *HTTPProtocolScanner) ScanProtocol(hiddenService string, onionscanConf
transportConfig := &http.Transport{ transportConfig := &http.Transport{
Dial: dialSocksProxy, Dial: dialSocksProxy,
} }
hps.Client = &http.Client{Transport: transportConfig, Timeout: onionscanConfig.Timeout} hps.Client = &http.Client{Transport: transportConfig}
// FIXME This should probably be moved to it's own file now. // FIXME This should probably be moved to it's own file now.
response, err := hps.Client.Get("http://" + hiddenService) response, err := hps.Client.Get("http://" + hiddenService)

View File

@ -56,6 +56,9 @@ type OnionScanReport struct {
PageTitle string `json:"pageTitle"` PageTitle string `json:"pageTitle"`
ResponseHeaders map[string]string `json:"responseHeaders"` ResponseHeaders map[string]string `json:"responseHeaders"`
//Bitcoin
BitcoinAddresses []string `json:"bitcoinAddresses"`
// SSH // SSH
SSHKey string `json:"sshKey"` SSHKey string `json:"sshKey"`

View File

@ -0,0 +1,20 @@
package scans
import (
"github.com/s-rah/onionscan/report"
"log"
"regexp"
)
type BitcoinContentScan struct {
}
func (cs *BitcoinContentScan) ScanContent(content string, report *report.OnionScanReport) {
log.Printf("Scanning for Bitcoin Address\n")
bitcoinAddressRegexp := regexp.MustCompile("[1|3][A-Za-z0-9]{25,34}")
foundBitcoinAddress := bitcoinAddressRegexp.FindAllString(content, -1)
for _, ba := range foundBitcoinAddress {
log.Printf("Found Bitcoin Address: %s", ba)
report.BitcoinAddresses = append(report.BitcoinAddresses, ba)
}
}

View File

@ -32,6 +32,7 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
} }
new(PGPContentScan).ScanContent(contents, report) new(PGPContentScan).ScanContent(contents, report)
//new(BitcoinContentScan).ScanContent(contents, report)
log.Printf("\tScanning for Images\n") log.Printf("\tScanning for Images\n")
var domains []string var domains []string
@ -68,12 +69,14 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
if t.Data == "img" { if t.Data == "img" {
imageUrl := utils.GetAttribute(t, "src") imageUrl := utils.GetAttribute(t, "src")
baseUrl, _ := url.Parse(imageUrl) baseUrl, err := url.Parse(imageUrl)
if utils.WithoutSubdomains(baseUrl.Host) == utils.WithoutSubdomains(report.HiddenService) { if err == nil {
scan.ScanPage(report.HiddenService, utils.WithoutProtocol(imageUrl), report, CheckExif) if utils.WithoutSubdomains(baseUrl.Host) == utils.WithoutSubdomains(report.HiddenService) {
log.Printf("\t Found internal image %s\n", imageUrl) scan.ScanPage(report.HiddenService, utils.WithoutProtocol(imageUrl), report, CheckExif)
} else { log.Printf("\t Found internal image %s\n", imageUrl)
log.Printf("\t Not scanning remote image %s\n", imageUrl) } else {
log.Printf("\t Not scanning remote image %s\n", imageUrl)
}
} }
} }
} }
@ -83,25 +86,27 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
for _, cssUrl := range cssLinks { for _, cssUrl := range cssLinks {
log.Printf("\tScanning CSS file: %s\n", cssUrl) log.Printf("\tScanning CSS file: %s\n", cssUrl)
_, cssContents, _ := scan.ScrapePage(report.HiddenService, utils.WithoutProtocol(cssUrl)) _, cssContents, _ := scan.ScrapePage(report.HiddenService, utils.WithoutProtocol(cssUrl))
domains = append(domains, utils.ExtractDomains(string(cssContents))[0:]...) domains = append(domains, utils.ExtractDomains(string(cssContents))[:]...)
} }
log.Printf("\tScanning for Links\n") log.Printf("\tScanning for Links\n")
domains = append(domains, utils.ExtractDomains(contents)...) domains = append(domains, utils.ExtractDomains(contents)...)
utils.RemoveDuplicates(&domains) utils.RemoveDuplicates(&domains)
for _, domain := range domains { for _, domain := range domains {
baseUrl, _ := url.Parse(domain) baseUrl, err := url.Parse(domain)
if baseUrl.Host != "" && utils.WithoutSubdomains(baseUrl.Host) != utils.WithoutSubdomains(report.HiddenService) { if err == nil {
log.Printf("Found Related URL %s\n", domain) if baseUrl.Host != "" && utils.WithoutSubdomains(baseUrl.Host) != utils.WithoutSubdomains(report.HiddenService) {
// TODO: Lots of information here which needs to be processed. log.Printf("Found Related URL %s\n", domain)
// * Links to standard sites - google / bitpay etc. // TODO: Lots of information here which needs to be processed.
// * Links to other onion sites // * Links to standard sites - google / bitpay etc.
// * Links to obscure clearnet sites. // * Links to other onion sites
report.AddLinkedSite(baseUrl.Host) // * Links to obscure clearnet sites.
} else { report.AddLinkedSite(baseUrl.Host)
// * Process FQDN internal links } else {
log.Printf("Found Internal URL %s\n", domain) // * Process FQDN internal links
report.AddInternalPage(baseUrl.Host) log.Printf("Found Internal URL %s\n", domain)
report.AddInternalPage(baseUrl.Host)
}
} }
} }

View File

@ -10,6 +10,8 @@ import (
func GetNetworkConnection(onionService string, port int, proxy string, timeout time.Duration) (net.Conn, error) { func GetNetworkConnection(onionService string, port int, proxy string, timeout time.Duration) (net.Conn, error) {
portNumber := strconv.Itoa(port) portNumber := strconv.Itoa(port)
conn, err := socks.DialSocksProxy(socks.SOCKS5, proxy)("", onionService+":"+portNumber) conn, err := socks.DialSocksProxy(socks.SOCKS5, proxy)("", onionService+":"+portNumber)
conn.SetDeadline(time.Now().Add(timeout)) if err == nil {
conn.SetDeadline(time.Now().Add(timeout))
}
return conn, err return conn, err
} }