Browse Source

Fixing Bugs, Bitcoin Address Extraction Framework

master
Sarah Jamie Lewis 4 years ago
parent
commit
6bad842e1d
6 changed files with 61 additions and 27 deletions
  1. +10
    -6
      main.go
  2. +1
    -1
      protocol/http_scanner.go
  3. +3
    -0
      report/onionscanreport.go
  4. +20
    -0
      scans/bitcoin_content_scan.go
  5. +24
    -19
      scans/standard-page-scan.go
  6. +3
    -1
      utils/networking.go

+ 10
- 6
main.go View File

@@ -50,8 +50,10 @@ func main() {
log.Fatalf("Could not read onion file %s\n", *list)
}
onions := strings.Split(string(content), "\n")
onionsToScan = append(onionsToScan, onions...)
log.Printf("Starting Scan of %d onion services\n", len(onionsToScan)-1)
for _, onion := range onions[0 : len(onions)-1] {
onionsToScan = append(onionsToScan, onion)
}
log.Printf("Starting Scan of %d onion services\n", len(onionsToScan))
}
log.Printf("This might take a few minutes..\n\n")

@@ -66,8 +68,8 @@ func main() {

count := 0
max := 100
if max > len(onionsToScan)-1 {
max = len(onionsToScan) - 1
if max > len(onionsToScan) {
max = len(onionsToScan)
}

// Run an initial batch of 100 requests (or less...)
@@ -77,13 +79,15 @@ func main() {
}

received := 0
for received < len(onionsToScan)-1 {
for received < len(onionsToScan) {
scanReport := <-reports

// After the initial batch, it's one in one out to prevent proxy overload.
if count < len(onionsToScan)-1 {
if count < len(onionsToScan) {
go onionScan.Scan(onionsToScan[count], reports)
count++
}

received++

if *jsonReport {


+ 1
- 1
protocol/http_scanner.go View File

@@ -40,7 +40,7 @@ func (hps *HTTPProtocolScanner) ScanProtocol(hiddenService string, onionscanConf
transportConfig := &http.Transport{
Dial: dialSocksProxy,
}
hps.Client = &http.Client{Transport: transportConfig, Timeout: onionscanConfig.Timeout}
hps.Client = &http.Client{Transport: transportConfig}
// FIXME This should probably be moved to it's own file now.
response, err := hps.Client.Get("http://" + hiddenService)



+ 3
- 0
report/onionscanreport.go View File

@@ -56,6 +56,9 @@ type OnionScanReport struct {
PageTitle string `json:"pageTitle"`
ResponseHeaders map[string]string `json:"responseHeaders"`

//Bitcoin
BitcoinAddresses []string `json:"bitcoinAddresses"`

// SSH
SSHKey string `json:"sshKey"`



+ 20
- 0
scans/bitcoin_content_scan.go View File

@@ -0,0 +1,20 @@
package scans

import (
"github.com/s-rah/onionscan/report"
"log"
"regexp"
)

type BitcoinContentScan struct {
}

func (cs *BitcoinContentScan) ScanContent(content string, report *report.OnionScanReport) {
log.Printf("Scanning for Bitcoin Address\n")
bitcoinAddressRegexp := regexp.MustCompile("[1|3][A-Za-z0-9]{25,34}")
foundBitcoinAddress := bitcoinAddressRegexp.FindAllString(content, -1)
for _, ba := range foundBitcoinAddress {
log.Printf("Found Bitcoin Address: %s", ba)
report.BitcoinAddresses = append(report.BitcoinAddresses, ba)
}
}

+ 24
- 19
scans/standard-page-scan.go View File

@@ -32,6 +32,7 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
}

new(PGPContentScan).ScanContent(contents, report)
//new(BitcoinContentScan).ScanContent(contents, report)

log.Printf("\tScanning for Images\n")
var domains []string
@@ -68,12 +69,14 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
if t.Data == "img" {
imageUrl := utils.GetAttribute(t, "src")

baseUrl, _ := url.Parse(imageUrl)
if utils.WithoutSubdomains(baseUrl.Host) == utils.WithoutSubdomains(report.HiddenService) {
scan.ScanPage(report.HiddenService, utils.WithoutProtocol(imageUrl), report, CheckExif)
log.Printf("\t Found internal image %s\n", imageUrl)
} else {
log.Printf("\t Not scanning remote image %s\n", imageUrl)
baseUrl, err := url.Parse(imageUrl)
if err == nil {
if utils.WithoutSubdomains(baseUrl.Host) == utils.WithoutSubdomains(report.HiddenService) {
scan.ScanPage(report.HiddenService, utils.WithoutProtocol(imageUrl), report, CheckExif)
log.Printf("\t Found internal image %s\n", imageUrl)
} else {
log.Printf("\t Not scanning remote image %s\n", imageUrl)
}
}
}
}
@@ -83,25 +86,27 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
for _, cssUrl := range cssLinks {
log.Printf("\tScanning CSS file: %s\n", cssUrl)
_, cssContents, _ := scan.ScrapePage(report.HiddenService, utils.WithoutProtocol(cssUrl))
domains = append(domains, utils.ExtractDomains(string(cssContents))[0:]...)
domains = append(domains, utils.ExtractDomains(string(cssContents))[:]...)
}

log.Printf("\tScanning for Links\n")
domains = append(domains, utils.ExtractDomains(contents)...)
utils.RemoveDuplicates(&domains)
for _, domain := range domains {
baseUrl, _ := url.Parse(domain)
if baseUrl.Host != "" && utils.WithoutSubdomains(baseUrl.Host) != utils.WithoutSubdomains(report.HiddenService) {
log.Printf("Found Related URL %s\n", domain)
// TODO: Lots of information here which needs to be processed.
// * Links to standard sites - google / bitpay etc.
// * Links to other onion sites
// * Links to obscure clearnet sites.
report.AddLinkedSite(baseUrl.Host)
} else {
// * Process FQDN internal links
log.Printf("Found Internal URL %s\n", domain)
report.AddInternalPage(baseUrl.Host)
baseUrl, err := url.Parse(domain)
if err == nil {
if baseUrl.Host != "" && utils.WithoutSubdomains(baseUrl.Host) != utils.WithoutSubdomains(report.HiddenService) {
log.Printf("Found Related URL %s\n", domain)
// TODO: Lots of information here which needs to be processed.
// * Links to standard sites - google / bitpay etc.
// * Links to other onion sites
// * Links to obscure clearnet sites.
report.AddLinkedSite(baseUrl.Host)
} else {
// * Process FQDN internal links
log.Printf("Found Internal URL %s\n", domain)
report.AddInternalPage(baseUrl.Host)
}
}
}



+ 3
- 1
utils/networking.go View File

@@ -10,6 +10,8 @@ import (
func GetNetworkConnection(onionService string, port int, proxy string, timeout time.Duration) (net.Conn, error) {
portNumber := strconv.Itoa(port)
conn, err := socks.DialSocksProxy(socks.SOCKS5, proxy)("", onionService+":"+portNumber)
conn.SetDeadline(time.Now().Add(timeout))
if err == nil {
conn.SetDeadline(time.Now().Add(timeout))
}
return conn, err
}

Loading…
Cancel
Save