Merge pull request #28 from dballard/directory-scan

Pull referenced directories from page scan and scan them along with common directories in http scanner
This commit is contained in:
Sarah Jamie Lewis 2016-04-26 17:34:00 -07:00
commit 93c56a44ed
3 changed files with 94 additions and 75 deletions

View File

@ -1,18 +1,27 @@
package protocol package protocol
import ( import (
"github.com/s-rah/onionscan/scans"
"github.com/s-rah/onionscan/report" "github.com/s-rah/onionscan/report"
"net/http" "github.com/s-rah/onionscan/scans"
"io/ioutil" "github.com/s-rah/onionscan/utils"
"h12.me/socks" "h12.me/socks"
"io/ioutil"
"log" "log"
"net/http"
) )
type HTTPProtocolScanner struct { type HTTPProtocolScanner struct {
Client *http.Client Client *http.Client
} }
var (
CommonDirectories = []string{"/style", "/styles", "/css", "/uploads", "/images", "/img", "/static",
// Lots of Wordpress installs which don't lock down directory listings
"/wp-content/uploads",
// Common with torshops created onions
"/products", "/products/cat"}
)
func (hps *HTTPProtocolScanner) ScanProtocol(hiddenService string, proxyAddress string, report *report.OnionScanReport) { func (hps *HTTPProtocolScanner) ScanProtocol(hiddenService string, proxyAddress string, report *report.OnionScanReport) {
// HTTP // HTTP
@ -45,20 +54,12 @@ func (hps * HTTPProtocolScanner) ScanProtocol(hiddenService string, proxyAddress
hps.ScanPage(hiddenService, "/server-status", report, scans.ApacheModStatus) hps.ScanPage(hiddenService, "/server-status", report, scans.ApacheModStatus)
hps.ScanPage(hiddenService, "/", report, scans.StandardPageScan) hps.ScanPage(hiddenService, "/", report, scans.StandardPageScan)
hps.ScanPage(hiddenService, "/style", report, scans.CheckDirectoryListing) log.Printf("\tScanning Common and Referenced Directories\n")
hps.ScanPage(hiddenService, "/styles", report, scans.CheckDirectoryListing) directories := append(CommonDirectories, report.PageReferencedDirectories...)
hps.ScanPage(hiddenService, "/css", report, scans.CheckDirectoryListing) utils.RemoveDuplicates(&directories)
hps.ScanPage(hiddenService, "/uploads", report, scans.CheckDirectoryListing) for _, directory := range directories {
hps.ScanPage(hiddenService, "/images", report, scans.CheckDirectoryListing) hps.ScanPage(hiddenService, directory, report, scans.CheckDirectoryListing)
hps.ScanPage(hiddenService, "/img", report, scans.CheckDirectoryListing) }
hps.ScanPage(hiddenService, "/static", report, scans.CheckDirectoryListing)
// Lots of Wordpress installs which don't lock down directory listings
hps.ScanPage(hiddenService, "/wp-content/uploads", report, scans.CheckDirectoryListing)
// Common with torshops created onions
hps.ScanPage(hiddenService, "/products", report, scans.CheckDirectoryListing)
hps.ScanPage(hiddenService, "/products/cat", report, scans.CheckDirectoryListing)
} }
log.Printf("\n") log.Printf("\n")
} }

View File

@ -2,8 +2,8 @@ package report
import ( import (
"encoding/json" "encoding/json"
"io/ioutil"
"github.com/s-rah/onionscan/utils" "github.com/s-rah/onionscan/utils"
"io/ioutil"
) )
type ExifTag struct { type ExifTag struct {
@ -17,7 +17,6 @@ type ExifImage struct {
} }
type OnionScanReport struct { type OnionScanReport struct {
WebDetected bool `json:"webDetected"` WebDetected bool `json:"webDetected"`
SSHDetected bool `json:"sshDetected"` SSHDetected bool `json:"sshDetected"`
RicochetDetected bool `json:"ricochetDetected"` RicochetDetected bool `json:"ricochetDetected"`
@ -38,6 +37,8 @@ type OnionScanReport struct {
OpenDirectories []string `json:"openDirectories"` OpenDirectories []string `json:"openDirectories"`
ExifImages []ExifImage `json:"exifImages"` ExifImages []ExifImage `json:"exifImages"`
InterestingFiles []string `json:"interestingFiles"` InterestingFiles []string `json:"interestingFiles"`
PageReferencedDirectories []string `json:"pageReferencedDirectories"`
Hashes []string `json:"hashes"` Hashes []string `json:"hashes"`
SSHKey string `json:"sshKey"` SSHKey string `json:"sshKey"`
Snapshot string `json:"snapshot"` Snapshot string `json:"snapshot"`
@ -53,7 +54,6 @@ func LoadReportFromFile(filename string) (OnionScanReport, error) {
return res, err return res, err
} }
func NewOnionScanReport(hiddenService string) *OnionScanReport { func NewOnionScanReport(hiddenService string) *OnionScanReport {
return &OnionScanReport{HiddenService: hiddenService} return &OnionScanReport{HiddenService: hiddenService}
} }
@ -98,3 +98,7 @@ func (osr *OnionScanReport) AddExifImage(location string) {
func (osr *OnionScanReport) AddExifTag(name string, value string) { func (osr *OnionScanReport) AddExifTag(name string, value string) {
osr.ExifImages[len(osr.ExifImages)-1].ExifTags = append(osr.ExifImages[len(osr.ExifImages)-1].ExifTags, ExifTag{name, value}) osr.ExifImages[len(osr.ExifImages)-1].ExifTags = append(osr.ExifImages[len(osr.ExifImages)-1].ExifTags, ExifTag{name, value})
} }
func (osr *OnionScanReport) AddPageReferencedDirectory(directory string) {
osr.PageReferencedDirectories = append(osr.PageReferencedDirectories, directory)
}

View File

@ -1,14 +1,14 @@
package scans package scans
import ( import (
"github.com/s-rah/onionscan/report"
"github.com/s-rah/onionscan/utils"
"net/url"
"log"
"regexp"
"strings"
"crypto/sha1" "crypto/sha1"
"encoding/hex" "encoding/hex"
"github.com/s-rah/onionscan/report"
"github.com/s-rah/onionscan/utils"
"log"
"net/url"
"regexp"
"strings"
) )
func StandardPageScan(scan Scanner, page string, status int, contents string, report *report.OnionScanReport) { func StandardPageScan(scan Scanner, page string, status int, contents string, report *report.OnionScanReport) {
@ -32,7 +32,7 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
baseUrl, _ := url.Parse(domain) baseUrl, _ := url.Parse(domain)
report.AddLinkedSite(baseUrl.Host) report.AddLinkedSite(baseUrl.Host)
} else { } else {
// * Process Internal links // * Process FQDN internal links (unlikly)
log.Printf("Found Internal URL %s\n", domain) log.Printf("Found Internal URL %s\n", domain)
} }
} }
@ -44,11 +44,25 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
log.Printf("\t Found image %s\n", image[2]) log.Printf("\t Found image %s\n", image[2])
scan.ScanPage(report.HiddenService, "/"+image[2], report, CheckExif) scan.ScanPage(report.HiddenService, "/"+image[2], report, CheckExif)
} }
log.Printf("\tScanning for Referenced Directories\n")
r = regexp.MustCompile("(src|href)=\"([^\"]*)\"")
foundPaths := r.FindAllStringSubmatch(string(contents), -1)
for _, regexpResults := range foundPaths {
path := regexpResults[2]
if strings.HasPrefix(path, "http") {
continue
}
term := strings.LastIndex(path, "/")
if term > 0 {
log.Printf("\t Found Referenced Directory %s\n", path[:term])
report.AddPageReferencedDirectory(path[:term])
}
}
} else if status == 403 { } else if status == 403 {
log.Printf("\tPage %s%s is Forbidden\n", report.HiddenService, page) log.Printf("\tPage %s%s is Forbidden\n", report.HiddenService, page)
} else if status == 404 { } else if status == 404 {
log.Printf("\tPage %s%s is Does Not Exist\n", report.HiddenService, page) log.Printf("\tPage %s%s is Does Not Exist\n", report.HiddenService, page)
} }
} }