diff --git a/main.go b/main.go index b7d1fc6..2201923 100644 --- a/main.go +++ b/main.go @@ -50,8 +50,10 @@ func main() { log.Fatalf("Could not read onion file %s\n", *list) } onions := strings.Split(string(content), "\n") - onionsToScan = append(onionsToScan, onions...) - log.Printf("Starting Scan of %d onion services\n", len(onionsToScan)-1) + for _, onion := range onions[0 : len(onions)-1] { + onionsToScan = append(onionsToScan, onion) + } + log.Printf("Starting Scan of %d onion services\n", len(onionsToScan)) } log.Printf("This might take a few minutes..\n\n") @@ -66,8 +68,8 @@ func main() { count := 0 max := 100 - if max > len(onionsToScan)-1 { - max = len(onionsToScan) - 1 + if max > len(onionsToScan) { + max = len(onionsToScan) } // Run an initial batch of 100 requests (or less...) @@ -77,13 +79,15 @@ func main() { } received := 0 - for received < len(onionsToScan)-1 { + for received < len(onionsToScan) { scanReport := <-reports + // After the initial batch, it's one in one out to prevent proxy overload. - if count < len(onionsToScan)-1 { + if count < len(onionsToScan) { go onionScan.Scan(onionsToScan[count], reports) count++ } + received++ if *jsonReport { diff --git a/protocol/http_scanner.go b/protocol/http_scanner.go index d571ed2..654e57d 100644 --- a/protocol/http_scanner.go +++ b/protocol/http_scanner.go @@ -40,7 +40,7 @@ func (hps *HTTPProtocolScanner) ScanProtocol(hiddenService string, onionscanConf transportConfig := &http.Transport{ Dial: dialSocksProxy, } - hps.Client = &http.Client{Transport: transportConfig, Timeout: onionscanConfig.Timeout} + hps.Client = &http.Client{Transport: transportConfig} // FIXME This should probably be moved to it's own file now. response, err := hps.Client.Get("http://" + hiddenService) diff --git a/report/onionscanreport.go b/report/onionscanreport.go index ee90faf..ad09771 100644 --- a/report/onionscanreport.go +++ b/report/onionscanreport.go @@ -56,6 +56,9 @@ type OnionScanReport struct { PageTitle string `json:"pageTitle"` ResponseHeaders map[string]string `json:"responseHeaders"` + //Bitcoin + BitcoinAddresses []string `json:"bitcoinAddresses"` + // SSH SSHKey string `json:"sshKey"` diff --git a/scans/bitcoin_content_scan.go b/scans/bitcoin_content_scan.go new file mode 100644 index 0000000..4518864 --- /dev/null +++ b/scans/bitcoin_content_scan.go @@ -0,0 +1,20 @@ +package scans + +import ( + "github.com/s-rah/onionscan/report" + "log" + "regexp" +) + +type BitcoinContentScan struct { +} + +func (cs *BitcoinContentScan) ScanContent(content string, report *report.OnionScanReport) { + log.Printf("Scanning for Bitcoin Address\n") + bitcoinAddressRegexp := regexp.MustCompile("[1|3][A-Za-z0-9]{25,34}") + foundBitcoinAddress := bitcoinAddressRegexp.FindAllString(content, -1) + for _, ba := range foundBitcoinAddress { + log.Printf("Found Bitcoin Address: %s", ba) + report.BitcoinAddresses = append(report.BitcoinAddresses, ba) + } +} diff --git a/scans/standard-page-scan.go b/scans/standard-page-scan.go index db44c5f..99ee343 100644 --- a/scans/standard-page-scan.go +++ b/scans/standard-page-scan.go @@ -32,6 +32,7 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re } new(PGPContentScan).ScanContent(contents, report) + //new(BitcoinContentScan).ScanContent(contents, report) log.Printf("\tScanning for Images\n") var domains []string @@ -68,12 +69,14 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re if t.Data == "img" { imageUrl := utils.GetAttribute(t, "src") - baseUrl, _ := url.Parse(imageUrl) - if utils.WithoutSubdomains(baseUrl.Host) == utils.WithoutSubdomains(report.HiddenService) { - scan.ScanPage(report.HiddenService, utils.WithoutProtocol(imageUrl), report, CheckExif) - log.Printf("\t Found internal image %s\n", imageUrl) - } else { - log.Printf("\t Not scanning remote image %s\n", imageUrl) + baseUrl, err := url.Parse(imageUrl) + if err == nil { + if utils.WithoutSubdomains(baseUrl.Host) == utils.WithoutSubdomains(report.HiddenService) { + scan.ScanPage(report.HiddenService, utils.WithoutProtocol(imageUrl), report, CheckExif) + log.Printf("\t Found internal image %s\n", imageUrl) + } else { + log.Printf("\t Not scanning remote image %s\n", imageUrl) + } } } } @@ -83,25 +86,27 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re for _, cssUrl := range cssLinks { log.Printf("\tScanning CSS file: %s\n", cssUrl) _, cssContents, _ := scan.ScrapePage(report.HiddenService, utils.WithoutProtocol(cssUrl)) - domains = append(domains, utils.ExtractDomains(string(cssContents))[0:]...) + domains = append(domains, utils.ExtractDomains(string(cssContents))[:]...) } log.Printf("\tScanning for Links\n") domains = append(domains, utils.ExtractDomains(contents)...) utils.RemoveDuplicates(&domains) for _, domain := range domains { - baseUrl, _ := url.Parse(domain) - if baseUrl.Host != "" && utils.WithoutSubdomains(baseUrl.Host) != utils.WithoutSubdomains(report.HiddenService) { - log.Printf("Found Related URL %s\n", domain) - // TODO: Lots of information here which needs to be processed. - // * Links to standard sites - google / bitpay etc. - // * Links to other onion sites - // * Links to obscure clearnet sites. - report.AddLinkedSite(baseUrl.Host) - } else { - // * Process FQDN internal links - log.Printf("Found Internal URL %s\n", domain) - report.AddInternalPage(baseUrl.Host) + baseUrl, err := url.Parse(domain) + if err == nil { + if baseUrl.Host != "" && utils.WithoutSubdomains(baseUrl.Host) != utils.WithoutSubdomains(report.HiddenService) { + log.Printf("Found Related URL %s\n", domain) + // TODO: Lots of information here which needs to be processed. + // * Links to standard sites - google / bitpay etc. + // * Links to other onion sites + // * Links to obscure clearnet sites. + report.AddLinkedSite(baseUrl.Host) + } else { + // * Process FQDN internal links + log.Printf("Found Internal URL %s\n", domain) + report.AddInternalPage(baseUrl.Host) + } } } diff --git a/utils/networking.go b/utils/networking.go index 9bfd179..a1151e5 100644 --- a/utils/networking.go +++ b/utils/networking.go @@ -10,6 +10,8 @@ import ( func GetNetworkConnection(onionService string, port int, proxy string, timeout time.Duration) (net.Conn, error) { portNumber := strconv.Itoa(port) conn, err := socks.DialSocksProxy(socks.SOCKS5, proxy)("", onionService+":"+portNumber) - conn.SetDeadline(time.Now().Add(timeout)) + if err == nil { + conn.SetDeadline(time.Now().Add(timeout)) + } return conn, err }