commit
46092ee767
|
@ -18,10 +18,10 @@ This project began it's life as a Bug Bounty World slack channel discussion. We
|
|||
- Simple and modular code base making it easy to contribute.
|
||||
- Fast And Powerful Bruteforcing Module
|
||||
- Powerful Permutation generation engine. (In Development)
|
||||
- Many Passive Data Sources (24 At Present)
|
||||
- Many Passive Data Sources (27 At Present)
|
||||
- Multiple Output formats
|
||||
|
||||
> Ask, Archive.is, Baidu, Bing, Censys, CertDB, CertSpotter, CrtSH, DnsDB, DNSDumpster, Entrust CT-Search, FindSubdomains, Hackertarget, IPv4Info, Netcraft, PassiveTotal, PTRArchive, Riddler, SecurityTrails, SiteDossier, ThreatCrowd, ThreatMiner, Virustotal, WaybackArchive
|
||||
> Ask, Archive.is, Baidu, Bing, Censys, CertDB, CertSpotter, CrtSH, DnsDB, DNSDumpster, Dogpile, Entrust CT-Search, Exalead, FindSubdomains, Hackertarget, IPv4Info, Netcraft, PassiveTotal, PTRArchive, Riddler, SecurityTrails, SiteDossier, ThreatCrowd, ThreatMiner, Virustotal, WaybackArchive, Yahoo
|
||||
|
||||
# Usage
|
||||
|
||||
|
|
|
@ -36,6 +36,8 @@ import (
|
|||
"github.com/Ice3man543/subfinder/libsubfinder/sources/crtsh"
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/dnsdb"
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/dnsdumpster"
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/dogpile"
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/exalead"
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/findsubdomains"
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/hackertarget"
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/ipv4info"
|
||||
|
@ -49,6 +51,7 @@ import (
|
|||
"github.com/Ice3man543/subfinder/libsubfinder/sources/threatminer"
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/virustotal"
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/waybackarchive"
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/yahoo"
|
||||
)
|
||||
|
||||
//DomainList contain the list of domains
|
||||
|
@ -81,6 +84,9 @@ type Source struct {
|
|||
Waybackarchive bool
|
||||
CertificateTransparency bool
|
||||
Ipv4Info bool
|
||||
Yahoo bool
|
||||
Dogpile bool
|
||||
Exalead bool
|
||||
}
|
||||
|
||||
func (s *Source) enableAll() {
|
||||
|
@ -108,6 +114,9 @@ func (s *Source) enableAll() {
|
|||
s.Waybackarchive = true
|
||||
s.CertificateTransparency = true
|
||||
s.Ipv4Info = true
|
||||
s.Yahoo = true
|
||||
s.Dogpile = true
|
||||
s.Exalead = true
|
||||
}
|
||||
|
||||
func (s *Source) enable(dataSources []string) {
|
||||
|
@ -161,6 +170,12 @@ func (s *Source) enable(dataSources []string) {
|
|||
s.CertificateTransparency = true
|
||||
case "ipv4info":
|
||||
s.Ipv4Info = true
|
||||
case "yahoo":
|
||||
s.Yahoo = true
|
||||
case "dogpile":
|
||||
s.Dogpile = true
|
||||
case "exalead":
|
||||
s.Exalead = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -216,6 +231,12 @@ func (s *Source) disable(dataSources []string) {
|
|||
s.CertificateTransparency = false
|
||||
case "ipv4info":
|
||||
s.Ipv4Info = false
|
||||
case "yahoo":
|
||||
s.Yahoo = false
|
||||
case "dogpile":
|
||||
s.Dogpile = false
|
||||
case "exalead":
|
||||
s.Dogpile = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -291,7 +312,16 @@ func (s *Source) printSummary() {
|
|||
fmt.Printf("\nRunning Source: %sCertificateTransparency%s", helper.Info, helper.Reset)
|
||||
}
|
||||
if s.Ipv4Info {
|
||||
fmt.Printf("\nRunning Source: %sIpv4Info%s\n", helper.Info, helper.Reset)
|
||||
fmt.Printf("\nRunning Source: %sIpv4Info%s", helper.Info, helper.Reset)
|
||||
}
|
||||
if s.Yahoo {
|
||||
fmt.Printf("\nRunning Source: %sYahoo%s", helper.Info, helper.Reset)
|
||||
}
|
||||
if s.Dogpile {
|
||||
fmt.Printf("\nRunning Source: %sDogpile%s", helper.Info, helper.Reset)
|
||||
}
|
||||
if s.Exalead {
|
||||
fmt.Printf("\nRunning Source: %sExalead%s\n", helper.Info, helper.Reset)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -412,6 +442,15 @@ func discover(state *helper.State, domain string, sourceConfig *Source) (subdoma
|
|||
if sourceConfig.Sitedossier == true {
|
||||
go sitedossier.Query(domain, state, ch)
|
||||
}
|
||||
if sourceConfig.Yahoo == true {
|
||||
go yahoo.Query(domain, state, ch)
|
||||
}
|
||||
if sourceConfig.Dogpile == true {
|
||||
go dogpile.Query(domain, state, ch)
|
||||
}
|
||||
if sourceConfig.Exalead == true {
|
||||
go exalead.Query(domain, state, ch)
|
||||
}
|
||||
|
||||
// Recieve data from all goroutines running
|
||||
for i := 0; i < sourceConfig.nbrActive(); i++ {
|
||||
|
|
|
@ -66,7 +66,8 @@ type Setting struct {
|
|||
AskPages string // Ask search pages to check
|
||||
BaiduPages string // Ask search pages to check
|
||||
BingPages string // Ask search pages to check
|
||||
DogpilePages string
|
||||
DogpilePages string // Dogpile search pages to check
|
||||
YahooPages string // Yahoo search pages to check
|
||||
}
|
||||
|
||||
func InitializeSettings() (setting *Setting) {
|
||||
|
@ -78,6 +79,7 @@ func InitializeSettings() (setting *Setting) {
|
|||
settings.BaiduPages = "5"
|
||||
settings.BingPages = "50"
|
||||
settings.DogpilePages = "16"
|
||||
settings.YahooPages = "10"
|
||||
return &settings
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
//
|
||||
// Written By : @Mzack9999
|
||||
//
|
||||
// Distributed Under MIT License
|
||||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Dogpile Subdomain Discovery
|
||||
package dogpile
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/helper"
|
||||
)
|
||||
|
||||
// all subdomains found
|
||||
var subdomains []string
|
||||
|
||||
// Query function returns all subdomains found using the service.
|
||||
func Query(domain string, state *helper.State, ch chan helper.Result) {
|
||||
|
||||
var result helper.Result
|
||||
result.Subdomains = subdomains
|
||||
maxPages, _ := strconv.Atoi(state.CurrentSettings.DogpilePages)
|
||||
for currentPage := 0; currentPage <= maxPages; currentPage++ {
|
||||
url := "http://www.dogpile.com/search/web?q=" + domain + "&qsi=" + strconv.Itoa(currentPage*15+1)
|
||||
|
||||
resp, err := helper.GetHTTPResponse(url, state.Timeout)
|
||||
if err != nil {
|
||||
result.Error = err
|
||||
ch <- result
|
||||
return
|
||||
}
|
||||
|
||||
// Get the response body
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
result.Error = err
|
||||
ch <- result
|
||||
return
|
||||
}
|
||||
|
||||
reSub := regexp.MustCompile(`%.{4}`)
|
||||
src := reSub.ReplaceAllLiteralString(string(body), " ")
|
||||
|
||||
re := helper.SubdomainRegex(domain)
|
||||
match := re.FindAllString(src, -1)
|
||||
|
||||
for _, subdomain := range match {
|
||||
if state.Verbose == true {
|
||||
if state.Color == true {
|
||||
fmt.Printf("\n[%sDogpile%s] %s", helper.Red, helper.Reset, subdomain)
|
||||
} else {
|
||||
fmt.Printf("\n[Dogpile] %s", subdomain)
|
||||
}
|
||||
}
|
||||
|
||||
subdomains = append(subdomains, subdomain)
|
||||
}
|
||||
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
|
||||
}
|
||||
|
||||
result.Subdomains = subdomains
|
||||
result.Error = nil
|
||||
ch <- result
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
//
|
||||
// Written By : @Mzack9999
|
||||
//
|
||||
// Distributed Under MIT License
|
||||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Exalead Subdomain Discovery
|
||||
package exalead
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"regexp"
|
||||
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/helper"
|
||||
)
|
||||
|
||||
// all subdomains found
|
||||
var subdomains []string
|
||||
|
||||
// Query function returns all subdomains found using the service.
|
||||
func Query(domain string, state *helper.State, ch chan helper.Result) {
|
||||
|
||||
var result helper.Result
|
||||
result.Subdomains = subdomains
|
||||
|
||||
url := "http://www.exalead.com/search/web/results/?q=site:" + domain + "+-www?elements_per_page=50"
|
||||
resp, err := helper.GetHTTPResponse(url, state.Timeout)
|
||||
if err != nil {
|
||||
result.Error = err
|
||||
ch <- result
|
||||
return
|
||||
}
|
||||
|
||||
// Get the response body
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
result.Error = err
|
||||
ch <- result
|
||||
return
|
||||
}
|
||||
|
||||
reSub := regexp.MustCompile(`%.{2}`)
|
||||
src := reSub.ReplaceAllLiteralString(string(body), " ")
|
||||
|
||||
re := helper.SubdomainRegex(domain)
|
||||
match := re.FindAllString(src, -1)
|
||||
|
||||
for _, subdomain := range match {
|
||||
if state.Verbose == true {
|
||||
if state.Color == true {
|
||||
fmt.Printf("\n[%sExalead%s] %s", helper.Red, helper.Reset, subdomain)
|
||||
} else {
|
||||
fmt.Printf("\n[Exalead] %s", subdomain)
|
||||
}
|
||||
}
|
||||
|
||||
subdomains = append(subdomains, subdomain)
|
||||
}
|
||||
|
||||
result.Subdomains = subdomains
|
||||
result.Error = nil
|
||||
ch <- result
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
//
|
||||
// Written By : @Mzack9999
|
||||
//
|
||||
// Distributed Under MIT License
|
||||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Yahoo Subdomain Discovery
|
||||
package yahoo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/helper"
|
||||
)
|
||||
|
||||
// all subdomains found
|
||||
var subdomains []string
|
||||
|
||||
// Query function returns all subdomains found using the service.
|
||||
func Query(domain string, state *helper.State, ch chan helper.Result) {
|
||||
|
||||
var result helper.Result
|
||||
result.Subdomains = subdomains
|
||||
maxPages, _ := strconv.Atoi(state.CurrentSettings.YahooPages)
|
||||
for currentPage := 0; currentPage <= maxPages; currentPage++ {
|
||||
url := "https://search.yahoo.com/search?p=site:" + domain + "&b=" + strconv.Itoa(currentPage*10) + "&pz=10&bct=0&xargs=0"
|
||||
resp, err := helper.GetHTTPResponse(url, state.Timeout)
|
||||
if err != nil {
|
||||
result.Error = err
|
||||
ch <- result
|
||||
return
|
||||
}
|
||||
|
||||
// Get the response body
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
result.Error = err
|
||||
ch <- result
|
||||
return
|
||||
}
|
||||
|
||||
reSub := regexp.MustCompile(`%.{2}`)
|
||||
src := reSub.ReplaceAllLiteralString(string(body), " ")
|
||||
|
||||
re := helper.SubdomainRegex(domain)
|
||||
match := re.FindAllString(src, -1)
|
||||
|
||||
for _, subdomain := range match {
|
||||
if state.Verbose == true {
|
||||
if state.Color == true {
|
||||
fmt.Printf("\n[%sYahoo%s] %s", helper.Red, helper.Reset, subdomain)
|
||||
} else {
|
||||
fmt.Printf("\n[Yahoo] %s", subdomain)
|
||||
}
|
||||
}
|
||||
|
||||
subdomains = append(subdomains, subdomain)
|
||||
}
|
||||
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
|
||||
}
|
||||
|
||||
result.Subdomains = subdomains
|
||||
result.Error = nil
|
||||
ch <- result
|
||||
}
|
Loading…
Reference in New Issue