Fixing Bugs, Bitcoin Address Extraction Framework
This commit is contained in:
parent
46996dc77f
commit
6bad842e1d
16
main.go
16
main.go
|
@ -50,8 +50,10 @@ func main() {
|
||||||
log.Fatalf("Could not read onion file %s\n", *list)
|
log.Fatalf("Could not read onion file %s\n", *list)
|
||||||
}
|
}
|
||||||
onions := strings.Split(string(content), "\n")
|
onions := strings.Split(string(content), "\n")
|
||||||
onionsToScan = append(onionsToScan, onions...)
|
for _, onion := range onions[0 : len(onions)-1] {
|
||||||
log.Printf("Starting Scan of %d onion services\n", len(onionsToScan)-1)
|
onionsToScan = append(onionsToScan, onion)
|
||||||
|
}
|
||||||
|
log.Printf("Starting Scan of %d onion services\n", len(onionsToScan))
|
||||||
}
|
}
|
||||||
log.Printf("This might take a few minutes..\n\n")
|
log.Printf("This might take a few minutes..\n\n")
|
||||||
|
|
||||||
|
@ -66,8 +68,8 @@ func main() {
|
||||||
|
|
||||||
count := 0
|
count := 0
|
||||||
max := 100
|
max := 100
|
||||||
if max > len(onionsToScan)-1 {
|
if max > len(onionsToScan) {
|
||||||
max = len(onionsToScan) - 1
|
max = len(onionsToScan)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run an initial batch of 100 requests (or less...)
|
// Run an initial batch of 100 requests (or less...)
|
||||||
|
@ -77,13 +79,15 @@ func main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
received := 0
|
received := 0
|
||||||
for received < len(onionsToScan)-1 {
|
for received < len(onionsToScan) {
|
||||||
scanReport := <-reports
|
scanReport := <-reports
|
||||||
|
|
||||||
// After the initial batch, it's one in one out to prevent proxy overload.
|
// After the initial batch, it's one in one out to prevent proxy overload.
|
||||||
if count < len(onionsToScan)-1 {
|
if count < len(onionsToScan) {
|
||||||
go onionScan.Scan(onionsToScan[count], reports)
|
go onionScan.Scan(onionsToScan[count], reports)
|
||||||
count++
|
count++
|
||||||
}
|
}
|
||||||
|
|
||||||
received++
|
received++
|
||||||
|
|
||||||
if *jsonReport {
|
if *jsonReport {
|
||||||
|
|
|
@ -40,7 +40,7 @@ func (hps *HTTPProtocolScanner) ScanProtocol(hiddenService string, onionscanConf
|
||||||
transportConfig := &http.Transport{
|
transportConfig := &http.Transport{
|
||||||
Dial: dialSocksProxy,
|
Dial: dialSocksProxy,
|
||||||
}
|
}
|
||||||
hps.Client = &http.Client{Transport: transportConfig, Timeout: onionscanConfig.Timeout}
|
hps.Client = &http.Client{Transport: transportConfig}
|
||||||
// FIXME This should probably be moved to it's own file now.
|
// FIXME This should probably be moved to it's own file now.
|
||||||
response, err := hps.Client.Get("http://" + hiddenService)
|
response, err := hps.Client.Get("http://" + hiddenService)
|
||||||
|
|
||||||
|
|
|
@ -56,6 +56,9 @@ type OnionScanReport struct {
|
||||||
PageTitle string `json:"pageTitle"`
|
PageTitle string `json:"pageTitle"`
|
||||||
ResponseHeaders map[string]string `json:"responseHeaders"`
|
ResponseHeaders map[string]string `json:"responseHeaders"`
|
||||||
|
|
||||||
|
//Bitcoin
|
||||||
|
BitcoinAddresses []string `json:"bitcoinAddresses"`
|
||||||
|
|
||||||
// SSH
|
// SSH
|
||||||
SSHKey string `json:"sshKey"`
|
SSHKey string `json:"sshKey"`
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
package scans
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/s-rah/onionscan/report"
|
||||||
|
"log"
|
||||||
|
"regexp"
|
||||||
|
)
|
||||||
|
|
||||||
|
type BitcoinContentScan struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cs *BitcoinContentScan) ScanContent(content string, report *report.OnionScanReport) {
|
||||||
|
log.Printf("Scanning for Bitcoin Address\n")
|
||||||
|
bitcoinAddressRegexp := regexp.MustCompile("[1|3][A-Za-z0-9]{25,34}")
|
||||||
|
foundBitcoinAddress := bitcoinAddressRegexp.FindAllString(content, -1)
|
||||||
|
for _, ba := range foundBitcoinAddress {
|
||||||
|
log.Printf("Found Bitcoin Address: %s", ba)
|
||||||
|
report.BitcoinAddresses = append(report.BitcoinAddresses, ba)
|
||||||
|
}
|
||||||
|
}
|
|
@ -32,6 +32,7 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
|
||||||
}
|
}
|
||||||
|
|
||||||
new(PGPContentScan).ScanContent(contents, report)
|
new(PGPContentScan).ScanContent(contents, report)
|
||||||
|
//new(BitcoinContentScan).ScanContent(contents, report)
|
||||||
|
|
||||||
log.Printf("\tScanning for Images\n")
|
log.Printf("\tScanning for Images\n")
|
||||||
var domains []string
|
var domains []string
|
||||||
|
@ -68,7 +69,8 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
|
||||||
if t.Data == "img" {
|
if t.Data == "img" {
|
||||||
imageUrl := utils.GetAttribute(t, "src")
|
imageUrl := utils.GetAttribute(t, "src")
|
||||||
|
|
||||||
baseUrl, _ := url.Parse(imageUrl)
|
baseUrl, err := url.Parse(imageUrl)
|
||||||
|
if err == nil {
|
||||||
if utils.WithoutSubdomains(baseUrl.Host) == utils.WithoutSubdomains(report.HiddenService) {
|
if utils.WithoutSubdomains(baseUrl.Host) == utils.WithoutSubdomains(report.HiddenService) {
|
||||||
scan.ScanPage(report.HiddenService, utils.WithoutProtocol(imageUrl), report, CheckExif)
|
scan.ScanPage(report.HiddenService, utils.WithoutProtocol(imageUrl), report, CheckExif)
|
||||||
log.Printf("\t Found internal image %s\n", imageUrl)
|
log.Printf("\t Found internal image %s\n", imageUrl)
|
||||||
|
@ -77,20 +79,22 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
log.Printf("\tScanning for CSS Fonts and Background Images\n")
|
log.Printf("\tScanning for CSS Fonts and Background Images\n")
|
||||||
utils.RemoveDuplicates(&cssLinks)
|
utils.RemoveDuplicates(&cssLinks)
|
||||||
for _, cssUrl := range cssLinks {
|
for _, cssUrl := range cssLinks {
|
||||||
log.Printf("\tScanning CSS file: %s\n", cssUrl)
|
log.Printf("\tScanning CSS file: %s\n", cssUrl)
|
||||||
_, cssContents, _ := scan.ScrapePage(report.HiddenService, utils.WithoutProtocol(cssUrl))
|
_, cssContents, _ := scan.ScrapePage(report.HiddenService, utils.WithoutProtocol(cssUrl))
|
||||||
domains = append(domains, utils.ExtractDomains(string(cssContents))[0:]...)
|
domains = append(domains, utils.ExtractDomains(string(cssContents))[:]...)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Printf("\tScanning for Links\n")
|
log.Printf("\tScanning for Links\n")
|
||||||
domains = append(domains, utils.ExtractDomains(contents)...)
|
domains = append(domains, utils.ExtractDomains(contents)...)
|
||||||
utils.RemoveDuplicates(&domains)
|
utils.RemoveDuplicates(&domains)
|
||||||
for _, domain := range domains {
|
for _, domain := range domains {
|
||||||
baseUrl, _ := url.Parse(domain)
|
baseUrl, err := url.Parse(domain)
|
||||||
|
if err == nil {
|
||||||
if baseUrl.Host != "" && utils.WithoutSubdomains(baseUrl.Host) != utils.WithoutSubdomains(report.HiddenService) {
|
if baseUrl.Host != "" && utils.WithoutSubdomains(baseUrl.Host) != utils.WithoutSubdomains(report.HiddenService) {
|
||||||
log.Printf("Found Related URL %s\n", domain)
|
log.Printf("Found Related URL %s\n", domain)
|
||||||
// TODO: Lots of information here which needs to be processed.
|
// TODO: Lots of information here which needs to be processed.
|
||||||
|
@ -104,6 +108,7 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
|
||||||
report.AddInternalPage(baseUrl.Host)
|
report.AddInternalPage(baseUrl.Host)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
log.Printf("\tScanning for Referenced Directories\n")
|
log.Printf("\tScanning for Referenced Directories\n")
|
||||||
r := regexp.MustCompile("(src|href)=\"([^\"]*)\"")
|
r := regexp.MustCompile("(src|href)=\"([^\"]*)\"")
|
||||||
|
|
|
@ -10,6 +10,8 @@ import (
|
||||||
func GetNetworkConnection(onionService string, port int, proxy string, timeout time.Duration) (net.Conn, error) {
|
func GetNetworkConnection(onionService string, port int, proxy string, timeout time.Duration) (net.Conn, error) {
|
||||||
portNumber := strconv.Itoa(port)
|
portNumber := strconv.Itoa(port)
|
||||||
conn, err := socks.DialSocksProxy(socks.SOCKS5, proxy)("", onionService+":"+portNumber)
|
conn, err := socks.DialSocksProxy(socks.SOCKS5, proxy)("", onionService+":"+portNumber)
|
||||||
|
if err == nil {
|
||||||
conn.SetDeadline(time.Now().Add(timeout))
|
conn.SetDeadline(time.Now().Add(timeout))
|
||||||
|
}
|
||||||
return conn, err
|
return conn, err
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue