Improving Standard Page Scan

This commit is contained in:
antoniaklja 2016-04-25 11:29:27 +02:00 committed by Sarah Jamie Lewis
parent 44e6d5c955
commit 2ff00f6a6d
4 changed files with 80 additions and 21 deletions

35
.gitignore vendored
View File

@ -1 +1,36 @@
# common files
*~
*.log
*.bak
*.tmp
*.swp
*.lock
# Eclipse
.classpath
.project
.settings
.metadata
.factorypath
# IDEA IntelliJ
*.ipr
*.iws
*.iml
.idea
.idea_modules
out
atlassian-ide-plugin.xml
# Windows
Thumbs.db
ehthumbs.db
Desktop.ini
# KDE directory preferences
.directory
# OS X
.DS_Store
.Trashes

View File

@ -36,10 +36,16 @@ func (hps * HTTPProtocolScanner) ScanProtocol(hiddenService string, proxyAddress
return
}
// Initial Attempt at Resolving Server Type
log.Printf("Attempting to Derive Server Type from Headers..\n")
report.ServerVersion = response.Header.Get("Server")
log.Printf("\tServer Version: %s\n", report.ServerVersion)
// Reading all http headers
log.Printf("HTTP response headers: %s\n", report.ServerVersion)
responseHeaders := response.Header
for key := range responseHeaders {
value := responseHeaders.Get(key)
report.AddResponseHeader(key, value)
log.Printf("\t%s : %s\n", key, value)
}
report.ServerVersion = responseHeaders.Get("Server")
// Apache mod-status Check
hps.ScanPage(hiddenService, "/server-status", report, scans.ApacheModStatus)

View File

@ -4,6 +4,7 @@ import (
"encoding/json"
"io/ioutil"
"github.com/s-rah/onionscan/utils"
"fmt"
)
type ExifTag struct {
@ -18,29 +19,31 @@ type ExifImage struct {
type OnionScanReport struct {
WebDetected bool `json:"webDetected"`
SSHDetected bool `json:"sshDetected"`
RicochetDetected bool `json:"ricochetDetected"`
WebDetected bool `json:"webDetected"`
SSHDetected bool `json:"sshDetected"`
RicochetDetected bool `json:"ricochetDetected"`
IRCDetected bool `json:"ircDetected"`
FTPDetected bool `json:"ftpDetected"`
SMTPDetected bool `json:"smtpDetected"`
BitcoinDetected bool `json:"bitcoinDetected"`
HiddenService string `json:"hiddenService"`
ServerPoweredBy string `json:"serverPoweredBy"`
ServerVersion string `json:"serverVersion"`
FoundApacheModStatus bool `json:"foundApacheModStatus"`
RelatedOnionServices []string `json:"relatedOnionServices"`
RelatedClearnetDomains []string `json:"relatedOnionDomains"`
LinkedSites []string `json:"linkedSites"`
IP []string `json:"ipAddresses"`
OpenDirectories []string `json:"openDirectories"`
ExifImages []ExifImage `json:"exifImages"`
InterestingFiles []string `json:"interestingFiles"`
Hashes []string `json:"hashes"`
SSHKey string `json:"sshKey"`
Snapshot string `json:"snapshot"`
HiddenService string `json:"hiddenService"`
PageTitle string `json:"pageTitle"`
ResponseHeaders []string `json:"responseHeaders"`
ServerPoweredBy string `json:"serverPoweredBy"`
ServerVersion string `json:"serverVersion"`
FoundApacheModStatus bool `json:"foundApacheModStatus"`
RelatedOnionServices []string `json:"relatedOnionServices"`
RelatedClearnetDomains []string `json:"relatedOnionDomains"`
LinkedSites []string `json:"linkedSites"`
IP []string `json:"ipAddresses"`
OpenDirectories []string `json:"openDirectories"`
ExifImages []ExifImage `json:"exifImages"`
InterestingFiles []string `json:"interestingFiles"`
Hashes []string `json:"hashes"`
SSHKey string `json:"sshKey"`
Snapshot string `json:"snapshot"`
}
func LoadReportFromFile(filename string) (OnionScanReport, error) {
@ -83,6 +86,11 @@ func (osr *OnionScanReport) AddLinkedSite(site string) {
utils.RemoveDuplicates(&osr.LinkedSites)
}
func (osr *OnionScanReport) AddResponseHeader(name string, value string) {
header := fmt.Sprintf("%s : %s ", name, value)
osr.ResponseHeaders = append(osr.ResponseHeaders, header)
}
func (osr *OnionScanReport) Serialize() (string, error) {
report,err := json.Marshal(osr)
if err != nil {

View File

@ -20,6 +20,16 @@ func StandardPageScan(scan Scanner, page string, status int, contents string, re
report.Hashes = append(report.Hashes, hex.EncodeToString(hash[:]))
report.Snapshot = contents
// Try resolve page title if present
isTitlePresent := strings.Contains(contents, "<title>")
if isTitlePresent {
var startIndex = strings.Index(contents, "<title>")
var endIndex = strings.Index(contents, "</title>")
var pageTitle = contents[startIndex+len("<title>"):endIndex]
log.Printf("\tPage Title: %s\n", pageTitle)
report.PageTitle = pageTitle
}
domains := utils.ExtractDomains(contents)
for _,domain := range domains {