Merge pull request #28 from dballard/directory-scan
Pull referenced directories from page scan and scan them along with common directories in http scanner
This commit is contained in:
commit
93c56a44ed
|
@ -1,19 +1,28 @@
|
|||
package protocol
|
||||
|
||||
import (
|
||||
"github.com/s-rah/onionscan/scans"
|
||||
"github.com/s-rah/onionscan/report"
|
||||
"net/http"
|
||||
"io/ioutil"
|
||||
"github.com/s-rah/onionscan/scans"
|
||||
"github.com/s-rah/onionscan/utils"
|
||||
"h12.me/socks"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type HTTPProtocolScanner struct {
|
||||
Client *http.Client
|
||||
Client *http.Client
|
||||
}
|
||||
|
||||
func (hps * HTTPProtocolScanner) ScanProtocol(hiddenService string, proxyAddress string, report *report.OnionScanReport) {
|
||||
var (
|
||||
CommonDirectories = []string{"/style", "/styles", "/css", "/uploads", "/images", "/img", "/static",
|
||||
// Lots of Wordpress installs which don't lock down directory listings
|
||||
"/wp-content/uploads",
|
||||
// Common with torshops created onions
|
||||
"/products", "/products/cat"}
|
||||
)
|
||||
|
||||
func (hps *HTTPProtocolScanner) ScanProtocol(hiddenService string, proxyAddress string, report *report.OnionScanReport) {
|
||||
|
||||
// HTTP
|
||||
log.Printf("Checking %s http(80)\n", hiddenService)
|
||||
|
@ -45,25 +54,17 @@ func (hps * HTTPProtocolScanner) ScanProtocol(hiddenService string, proxyAddress
|
|||
hps.ScanPage(hiddenService, "/server-status", report, scans.ApacheModStatus)
|
||||
hps.ScanPage(hiddenService, "/", report, scans.StandardPageScan)
|
||||
|
||||
hps.ScanPage(hiddenService, "/style", report, scans.CheckDirectoryListing)
|
||||
hps.ScanPage(hiddenService, "/styles", report, scans.CheckDirectoryListing)
|
||||
hps.ScanPage(hiddenService, "/css", report, scans.CheckDirectoryListing)
|
||||
hps.ScanPage(hiddenService, "/uploads", report, scans.CheckDirectoryListing)
|
||||
hps.ScanPage(hiddenService, "/images", report, scans.CheckDirectoryListing)
|
||||
hps.ScanPage(hiddenService, "/img", report, scans.CheckDirectoryListing)
|
||||
hps.ScanPage(hiddenService, "/static", report, scans.CheckDirectoryListing)
|
||||
|
||||
// Lots of Wordpress installs which don't lock down directory listings
|
||||
hps.ScanPage(hiddenService, "/wp-content/uploads", report, scans.CheckDirectoryListing)
|
||||
|
||||
// Common with torshops created onions
|
||||
hps.ScanPage(hiddenService, "/products", report, scans.CheckDirectoryListing)
|
||||
hps.ScanPage(hiddenService, "/products/cat", report, scans.CheckDirectoryListing)
|
||||
log.Printf("\tScanning Common and Referenced Directories\n")
|
||||
directories := append(CommonDirectories, report.PageReferencedDirectories...)
|
||||
utils.RemoveDuplicates(&directories)
|
||||
for _, directory := range directories {
|
||||
hps.ScanPage(hiddenService, directory, report, scans.CheckDirectoryListing)
|
||||
}
|
||||
}
|
||||
log.Printf("\n")
|
||||
}
|
||||
|
||||
func (hps * HTTPProtocolScanner) ScanPage(hiddenService string, page string, report *report.OnionScanReport, f func(scans.Scanner, string, int, string, *report.OnionScanReport)) {
|
||||
func (hps *HTTPProtocolScanner) ScanPage(hiddenService string, page string, report *report.OnionScanReport, f func(scans.Scanner, string, int, string, *report.OnionScanReport)) {
|
||||
response, err := hps.Client.Get("http://" + hiddenService + page)
|
||||
if err != nil {
|
||||
log.Printf("Error connecting to %s%s %s\n", hiddenService, page, err)
|
||||
|
|
|
@ -2,58 +2,58 @@ package report
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"github.com/s-rah/onionscan/utils"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
type ExifTag struct {
|
||||
Name string `json:"name"`
|
||||
Value string`json:"value"`
|
||||
Name string `json:"name"`
|
||||
Value string `json:"value"`
|
||||
}
|
||||
|
||||
type ExifImage struct {
|
||||
Location string `json:"location"`
|
||||
ExifTags []ExifTag `json:"exifTags"`
|
||||
Location string `json:"location"`
|
||||
ExifTags []ExifTag `json:"exifTags"`
|
||||
}
|
||||
|
||||
type OnionScanReport struct {
|
||||
WebDetected bool `json:"webDetected"`
|
||||
SSHDetected bool `json:"sshDetected"`
|
||||
RicochetDetected bool `json:"ricochetDetected"`
|
||||
IRCDetected bool `json:"ircDetected"`
|
||||
FTPDetected bool `json:"ftpDetected"`
|
||||
SMTPDetected bool `json:"smtpDetected"`
|
||||
|
||||
WebDetected bool `json:"webDetected"`
|
||||
SSHDetected bool `json:"sshDetected"`
|
||||
RicochetDetected bool `json:"ricochetDetected"`
|
||||
IRCDetected bool `json:"ircDetected"`
|
||||
FTPDetected bool `json:"ftpDetected"`
|
||||
SMTPDetected bool `json:"smtpDetected"`
|
||||
BitcoinDetected bool `json:"bitcoinDetected"`
|
||||
|
||||
BitcoinDetected bool `json:"bitcoinDetected"`
|
||||
HiddenService string `json:"hiddenService"`
|
||||
ServerPoweredBy string `json:"serverPoweredBy"`
|
||||
ServerVersion string `json:"serverVersion"`
|
||||
FoundApacheModStatus bool `json:"foundApacheModStatus"`
|
||||
RelatedOnionServices []string `json:"relatedOnionServices"`
|
||||
RelatedClearnetDomains []string `json:"relatedOnionDomains"`
|
||||
LinkedSites []string `json:"linkedSites"`
|
||||
IP []string `json:"ipAddresses"`
|
||||
OpenDirectories []string `json:"openDirectories"`
|
||||
ExifImages []ExifImage `json:"exifImages"`
|
||||
InterestingFiles []string `json:"interestingFiles"`
|
||||
PageReferencedDirectories []string `json:"pageReferencedDirectories"`
|
||||
|
||||
HiddenService string `json:"hiddenService"`
|
||||
ServerPoweredBy string `json:"serverPoweredBy"`
|
||||
ServerVersion string `json:"serverVersion"`
|
||||
FoundApacheModStatus bool `json:"foundApacheModStatus"`
|
||||
RelatedOnionServices []string `json:"relatedOnionServices"`
|
||||
RelatedClearnetDomains []string `json:"relatedOnionDomains"`
|
||||
LinkedSites []string `json:"linkedSites"`
|
||||
IP []string `json:"ipAddresses"`
|
||||
OpenDirectories []string `json:"openDirectories"`
|
||||
ExifImages []ExifImage `json:"exifImages"`
|
||||
InterestingFiles []string `json:"interestingFiles"`
|
||||
Hashes []string `json:"hashes"`
|
||||
SSHKey string `json:"sshKey"`
|
||||
Snapshot string `json:"snapshot"`
|
||||
Hashes []string `json:"hashes"`
|
||||
SSHKey string `json:"sshKey"`
|
||||
Snapshot string `json:"snapshot"`
|
||||
}
|
||||
|
||||
func LoadReportFromFile(filename string) (OnionScanReport, error) {
|
||||
dat, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return OnionScanReport{}, err
|
||||
}
|
||||
res := OnionScanReport{}
|
||||
err = json.Unmarshal(dat, &res)
|
||||
return res, err
|
||||
dat, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return OnionScanReport{}, err
|
||||
}
|
||||
res := OnionScanReport{}
|
||||
err = json.Unmarshal(dat, &res)
|
||||
return res, err
|
||||
}
|
||||
|
||||
|
||||
func NewOnionScanReport(hiddenService string) *OnionScanReport {
|
||||
return &OnionScanReport{HiddenService: hiddenService}
|
||||
}
|
||||
|
@ -63,15 +63,15 @@ func (osr *OnionScanReport) AddOpenDirectory(dir string) {
|
|||
}
|
||||
|
||||
func (osr *OnionScanReport) AddRelatedOnionService(service string) {
|
||||
osr.RelatedOnionServices = append(osr.RelatedOnionServices, service)
|
||||
osr.RelatedOnionServices = append(osr.RelatedOnionServices, service)
|
||||
}
|
||||
|
||||
func (osr *OnionScanReport) AddRelatedClearnetDomain(domain string) {
|
||||
osr.RelatedClearnetDomains = append(osr.RelatedClearnetDomains, domain)
|
||||
osr.RelatedClearnetDomains = append(osr.RelatedClearnetDomains, domain)
|
||||
}
|
||||
|
||||
func (osr *OnionScanReport) AddInterestingFile(file string) {
|
||||
osr.InterestingFiles = append(osr.InterestingFiles, file)
|
||||
osr.InterestingFiles = append(osr.InterestingFiles, file)
|
||||
}
|
||||
|
||||
func (osr *OnionScanReport) AddIPAddress(ip string) {
|
||||
|
@ -84,17 +84,21 @@ func (osr *OnionScanReport) AddLinkedSite(site string) {
|
|||
}
|
||||
|
||||
func (osr *OnionScanReport) Serialize() (string, error) {
|
||||
report,err := json.Marshal(osr)
|
||||
report, err := json.Marshal(osr)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
return string(report), nil
|
||||
}
|
||||
|
||||
func (osr *OnionScanReport) AddExifImage(location string) {
|
||||
osr.ExifImages = append(osr.ExifImages, ExifImage{location, []ExifTag{}})
|
||||
osr.ExifImages = append(osr.ExifImages, ExifImage{location, []ExifTag{}})
|
||||
}
|
||||
|
||||
func (osr *OnionScanReport) AddExifTag(name string, value string) {
|
||||
osr.ExifImages[len(osr.ExifImages)-1].ExifTags = append(osr.ExifImages[len(osr.ExifImages)-1].ExifTags , ExifTag{name, value})
|
||||
osr.ExifImages[len(osr.ExifImages)-1].ExifTags = append(osr.ExifImages[len(osr.ExifImages)-1].ExifTags, ExifTag{name, value})
|
||||
}
|
||||
|
||||
func (osr *OnionScanReport) AddPageReferencedDirectory(directory string) {
|
||||
osr.PageReferencedDirectories = append(osr.PageReferencedDirectories, directory)
|
||||
}
|
||||
|
|
|
@ -1,41 +1,41 @@
|
|||
package scans
|
||||
|
||||
import (
|
||||
"github.com/s-rah/onionscan/report"
|
||||
"github.com/s-rah/onionscan/utils"
|
||||
"net/url"
|
||||
"log"
|
||||
"regexp"
|
||||
"strings"
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"github.com/s-rah/onionscan/report"
|
||||
"github.com/s-rah/onionscan/utils"
|
||||
"log"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func StandardPageScan(scan Scanner, page string, status int, contents string, report *report.OnionScanReport) {
|
||||
log.Printf("Scanning %s%s\n", report.HiddenService, page)
|
||||
if status == 200 {
|
||||
log.Printf("\tPage %s%s is Accessible\n", report.HiddenService, page)
|
||||
|
||||
|
||||
hash := sha1.Sum([]byte(contents))
|
||||
report.Hashes = append(report.Hashes, hex.EncodeToString(hash[:]))
|
||||
report.Snapshot = contents
|
||||
|
||||
domains := utils.ExtractDomains(contents)
|
||||
|
||||
for _,domain := range domains {
|
||||
|
||||
for _, domain := range domains {
|
||||
if !strings.HasPrefix(domain, "http://"+report.HiddenService) {
|
||||
log.Printf("Found Related URL %s\n", domain)
|
||||
// TODO: Lots of information here which needs to be processed.
|
||||
// * Links to standard sites - google / bitpay etc.
|
||||
// * Links to other onion sites
|
||||
// * Links to obscure clearnet sites.
|
||||
baseUrl,_ := url.Parse(domain)
|
||||
baseUrl, _ := url.Parse(domain)
|
||||
report.AddLinkedSite(baseUrl.Host)
|
||||
} else {
|
||||
// * Process Internal links
|
||||
// * Process FQDN internal links (unlikly)
|
||||
log.Printf("Found Internal URL %s\n", domain)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log.Printf("\tScanning for Images\n")
|
||||
r := regexp.MustCompile("src=\"(" + "http://" + report.HiddenService + "/)?((.*?\\.jpg)|(.*?\\.png)|(.*?\\.jpeg)|(.*?\\.gif))\"")
|
||||
|
@ -44,11 +44,25 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
|
|||
log.Printf("\t Found image %s\n", image[2])
|
||||
scan.ScanPage(report.HiddenService, "/"+image[2], report, CheckExif)
|
||||
}
|
||||
|
||||
log.Printf("\tScanning for Referenced Directories\n")
|
||||
r = regexp.MustCompile("(src|href)=\"([^\"]*)\"")
|
||||
foundPaths := r.FindAllStringSubmatch(string(contents), -1)
|
||||
for _, regexpResults := range foundPaths {
|
||||
path := regexpResults[2]
|
||||
if strings.HasPrefix(path, "http") {
|
||||
continue
|
||||
}
|
||||
|
||||
term := strings.LastIndex(path, "/")
|
||||
if term > 0 {
|
||||
log.Printf("\t Found Referenced Directory %s\n", path[:term])
|
||||
report.AddPageReferencedDirectory(path[:term])
|
||||
}
|
||||
}
|
||||
} else if status == 403 {
|
||||
log.Printf("\tPage %s%s is Forbidden\n", report.HiddenService, page)
|
||||
} else if status == 404 {
|
||||
log.Printf("\tPage %s%s is Does Not Exist\n", report.HiddenService, page)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue