parent
d2b81e5cbf
commit
2ca7d5b145
|
@ -36,6 +36,8 @@ import (
|
||||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/crtsh"
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/crtsh"
|
||||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/dnsdb"
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/dnsdb"
|
||||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/dnsdumpster"
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/dnsdumpster"
|
||||||
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/dogpile"
|
||||||
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/exalead"
|
||||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/findsubdomains"
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/findsubdomains"
|
||||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/hackertarget"
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/hackertarget"
|
||||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/ipv4info"
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/ipv4info"
|
||||||
|
@ -49,6 +51,7 @@ import (
|
||||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/threatminer"
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/threatminer"
|
||||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/virustotal"
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/virustotal"
|
||||||
"github.com/Ice3man543/subfinder/libsubfinder/sources/waybackarchive"
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/waybackarchive"
|
||||||
|
"github.com/Ice3man543/subfinder/libsubfinder/sources/yahoo"
|
||||||
)
|
)
|
||||||
|
|
||||||
//DomainList contain the list of domains
|
//DomainList contain the list of domains
|
||||||
|
@ -81,6 +84,9 @@ type Source struct {
|
||||||
Waybackarchive bool
|
Waybackarchive bool
|
||||||
CertificateTransparency bool
|
CertificateTransparency bool
|
||||||
Ipv4Info bool
|
Ipv4Info bool
|
||||||
|
Yahoo bool
|
||||||
|
Dogpile bool
|
||||||
|
Exalead bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Source) enableAll() {
|
func (s *Source) enableAll() {
|
||||||
|
@ -108,6 +114,9 @@ func (s *Source) enableAll() {
|
||||||
s.Waybackarchive = true
|
s.Waybackarchive = true
|
||||||
s.CertificateTransparency = true
|
s.CertificateTransparency = true
|
||||||
s.Ipv4Info = true
|
s.Ipv4Info = true
|
||||||
|
s.Yahoo = true
|
||||||
|
s.Dogpile = true
|
||||||
|
s.Exalead = true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Source) enable(dataSources []string) {
|
func (s *Source) enable(dataSources []string) {
|
||||||
|
@ -161,6 +170,12 @@ func (s *Source) enable(dataSources []string) {
|
||||||
s.CertificateTransparency = true
|
s.CertificateTransparency = true
|
||||||
case "ipv4info":
|
case "ipv4info":
|
||||||
s.Ipv4Info = true
|
s.Ipv4Info = true
|
||||||
|
case "yahoo":
|
||||||
|
s.Yahoo = true
|
||||||
|
case "dogpile":
|
||||||
|
s.Dogpile = true
|
||||||
|
case "exalead":
|
||||||
|
s.Exalead = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -216,6 +231,12 @@ func (s *Source) disable(dataSources []string) {
|
||||||
s.CertificateTransparency = false
|
s.CertificateTransparency = false
|
||||||
case "ipv4info":
|
case "ipv4info":
|
||||||
s.Ipv4Info = false
|
s.Ipv4Info = false
|
||||||
|
case "yahoo":
|
||||||
|
s.Yahoo = false
|
||||||
|
case "dogpile":
|
||||||
|
s.Dogpile = false
|
||||||
|
case "exalead":
|
||||||
|
s.Dogpile = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -293,6 +314,15 @@ func (s *Source) printSummary() {
|
||||||
if s.Ipv4Info {
|
if s.Ipv4Info {
|
||||||
fmt.Printf("\nRunning Source: %sIpv4Info%s\n", helper.Info, helper.Reset)
|
fmt.Printf("\nRunning Source: %sIpv4Info%s\n", helper.Info, helper.Reset)
|
||||||
}
|
}
|
||||||
|
if s.Yahoo {
|
||||||
|
fmt.Printf("\nRunning Source: %sYahoo%s\n", helper.Info, helper.Reset)
|
||||||
|
}
|
||||||
|
if s.Dogpile {
|
||||||
|
fmt.Printf("\nRunning Source: %sDogpile%s\n", helper.Info, helper.Reset)
|
||||||
|
}
|
||||||
|
if s.Exalead {
|
||||||
|
fmt.Printf("\nRunning Source: %sExalead%s\n", helper.Info, helper.Reset)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//nbrActive ses reflection to get automatic active amount of searches
|
//nbrActive ses reflection to get automatic active amount of searches
|
||||||
|
@ -412,6 +442,15 @@ func discover(state *helper.State, domain string, sourceConfig *Source) (subdoma
|
||||||
if sourceConfig.Sitedossier == true {
|
if sourceConfig.Sitedossier == true {
|
||||||
go sitedossier.Query(domain, state, ch)
|
go sitedossier.Query(domain, state, ch)
|
||||||
}
|
}
|
||||||
|
if sourceConfig.Yahoo == true {
|
||||||
|
go yahoo.Query(domain, state, ch)
|
||||||
|
}
|
||||||
|
if sourceConfig.Dogpile == true {
|
||||||
|
go dogpile.Query(domain, state, ch)
|
||||||
|
}
|
||||||
|
if sourceConfig.Exalead == true {
|
||||||
|
go exalead.Query(domain, state, ch)
|
||||||
|
}
|
||||||
|
|
||||||
// Recieve data from all goroutines running
|
// Recieve data from all goroutines running
|
||||||
for i := 0; i < sourceConfig.nbrActive(); i++ {
|
for i := 0; i < sourceConfig.nbrActive(); i++ {
|
||||||
|
|
|
@ -66,7 +66,8 @@ type Setting struct {
|
||||||
AskPages string // Ask search pages to check
|
AskPages string // Ask search pages to check
|
||||||
BaiduPages string // Ask search pages to check
|
BaiduPages string // Ask search pages to check
|
||||||
BingPages string // Ask search pages to check
|
BingPages string // Ask search pages to check
|
||||||
DogpilePages string
|
DogpilePages string // Dogpile search pages to check
|
||||||
|
YahooPages string // Yahoo search pages to check
|
||||||
}
|
}
|
||||||
|
|
||||||
func InitializeSettings() (setting *Setting) {
|
func InitializeSettings() (setting *Setting) {
|
||||||
|
@ -78,6 +79,7 @@ func InitializeSettings() (setting *Setting) {
|
||||||
settings.BaiduPages = "5"
|
settings.BaiduPages = "5"
|
||||||
settings.BingPages = "50"
|
settings.BingPages = "50"
|
||||||
settings.DogpilePages = "16"
|
settings.DogpilePages = "16"
|
||||||
|
settings.YahooPages = "10"
|
||||||
return &settings
|
return &settings
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,73 @@
|
||||||
|
//
|
||||||
|
// Written By : @Mzack9999
|
||||||
|
//
|
||||||
|
// Distributed Under MIT License
|
||||||
|
// Copyrights (C) 2018 Ice3man
|
||||||
|
//
|
||||||
|
|
||||||
|
// A golang client for Dogpile Subdomain Discovery
|
||||||
|
package dogpile
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"math/rand"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Ice3man543/subfinder/libsubfinder/helper"
|
||||||
|
)
|
||||||
|
|
||||||
|
// all subdomains found
|
||||||
|
var subdomains []string
|
||||||
|
|
||||||
|
// Query function returns all subdomains found using the service.
|
||||||
|
func Query(domain string, state *helper.State, ch chan helper.Result) {
|
||||||
|
|
||||||
|
var result helper.Result
|
||||||
|
result.Subdomains = subdomains
|
||||||
|
maxPages, _ := strconv.Atoi(state.CurrentSettings.DogpilePages)
|
||||||
|
for currentPage := 0; currentPage <= maxPages; currentPage++ {
|
||||||
|
url := "http://www.dogpile.com/search/web?q=" + domain + "&qsi=" + strconv.Itoa(currentPage*15+1)
|
||||||
|
log.Print(url)
|
||||||
|
resp, err := helper.GetHTTPResponse(url, state.Timeout)
|
||||||
|
if err != nil {
|
||||||
|
result.Error = err
|
||||||
|
ch <- result
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the response body
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
result.Error = err
|
||||||
|
ch <- result
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
reSub := regexp.MustCompile(`%.{4}`)
|
||||||
|
src := reSub.ReplaceAllLiteralString(string(body), " ")
|
||||||
|
|
||||||
|
re := helper.SubdomainRegex(domain)
|
||||||
|
match := re.FindAllString(src, -1)
|
||||||
|
|
||||||
|
for _, subdomain := range match {
|
||||||
|
if state.Verbose == true {
|
||||||
|
if state.Color == true {
|
||||||
|
fmt.Printf("\n[%sDogpile%s] %s", helper.Red, helper.Reset, subdomain)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("\n[Dogpile] %s", subdomain)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
subdomains = append(subdomains, subdomain)
|
||||||
|
}
|
||||||
|
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
|
||||||
|
}
|
||||||
|
|
||||||
|
result.Subdomains = subdomains
|
||||||
|
result.Error = nil
|
||||||
|
ch <- result
|
||||||
|
}
|
|
@ -0,0 +1,65 @@
|
||||||
|
//
|
||||||
|
// Written By : @Mzack9999
|
||||||
|
//
|
||||||
|
// Distributed Under MIT License
|
||||||
|
// Copyrights (C) 2018 Ice3man
|
||||||
|
//
|
||||||
|
|
||||||
|
// A golang client for Exalead Subdomain Discovery
|
||||||
|
package exalead
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"regexp"
|
||||||
|
|
||||||
|
"github.com/Ice3man543/subfinder/libsubfinder/helper"
|
||||||
|
)
|
||||||
|
|
||||||
|
// all subdomains found
|
||||||
|
var subdomains []string
|
||||||
|
|
||||||
|
// Query function returns all subdomains found using the service.
|
||||||
|
func Query(domain string, state *helper.State, ch chan helper.Result) {
|
||||||
|
|
||||||
|
var result helper.Result
|
||||||
|
result.Subdomains = subdomains
|
||||||
|
|
||||||
|
url := "http://www.exalead.com/search/web/results/?q=site:" + domain + "+-www?elements_per_page=50"
|
||||||
|
resp, err := helper.GetHTTPResponse(url, state.Timeout)
|
||||||
|
if err != nil {
|
||||||
|
result.Error = err
|
||||||
|
ch <- result
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the response body
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
result.Error = err
|
||||||
|
ch <- result
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
reSub := regexp.MustCompile(`%.{2}`)
|
||||||
|
src := reSub.ReplaceAllLiteralString(string(body), " ")
|
||||||
|
|
||||||
|
re := helper.SubdomainRegex(domain)
|
||||||
|
match := re.FindAllString(src, -1)
|
||||||
|
|
||||||
|
for _, subdomain := range match {
|
||||||
|
if state.Verbose == true {
|
||||||
|
if state.Color == true {
|
||||||
|
fmt.Printf("\n[%sExalead%s] %s", helper.Red, helper.Reset, subdomain)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("\n[Exalead] %s", subdomain)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
subdomains = append(subdomains, subdomain)
|
||||||
|
}
|
||||||
|
|
||||||
|
result.Subdomains = subdomains
|
||||||
|
result.Error = nil
|
||||||
|
ch <- result
|
||||||
|
}
|
|
@ -0,0 +1,71 @@
|
||||||
|
//
|
||||||
|
// Written By : @Mzack9999
|
||||||
|
//
|
||||||
|
// Distributed Under MIT License
|
||||||
|
// Copyrights (C) 2018 Ice3man
|
||||||
|
//
|
||||||
|
|
||||||
|
// A golang client for Yahoo Subdomain Discovery
|
||||||
|
package yahoo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"math/rand"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Ice3man543/subfinder/libsubfinder/helper"
|
||||||
|
)
|
||||||
|
|
||||||
|
// all subdomains found
|
||||||
|
var subdomains []string
|
||||||
|
|
||||||
|
// Query function returns all subdomains found using the service.
|
||||||
|
func Query(domain string, state *helper.State, ch chan helper.Result) {
|
||||||
|
|
||||||
|
var result helper.Result
|
||||||
|
result.Subdomains = subdomains
|
||||||
|
maxPages, _ := strconv.Atoi(state.CurrentSettings.YahooPages)
|
||||||
|
for currentPage := 0; currentPage <= maxPages; currentPage++ {
|
||||||
|
url := "https://search.yahoo.com/search?p=site:" + domain + "&b=" + strconv.Itoa(currentPage*10) + "&pz=10&bct=0&xargs=0"
|
||||||
|
resp, err := helper.GetHTTPResponse(url, state.Timeout)
|
||||||
|
if err != nil {
|
||||||
|
result.Error = err
|
||||||
|
ch <- result
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the response body
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
result.Error = err
|
||||||
|
ch <- result
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
reSub := regexp.MustCompile(`%.{2}`)
|
||||||
|
src := reSub.ReplaceAllLiteralString(string(body), " ")
|
||||||
|
|
||||||
|
re := helper.SubdomainRegex(domain)
|
||||||
|
match := re.FindAllString(src, -1)
|
||||||
|
|
||||||
|
for _, subdomain := range match {
|
||||||
|
if state.Verbose == true {
|
||||||
|
if state.Color == true {
|
||||||
|
fmt.Printf("\n[%sYahoo%s] %s", helper.Red, helper.Reset, subdomain)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("\n[Yahoo] %s", subdomain)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
subdomains = append(subdomains, subdomain)
|
||||||
|
}
|
||||||
|
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
|
||||||
|
}
|
||||||
|
|
||||||
|
result.Subdomains = subdomains
|
||||||
|
result.Error = nil
|
||||||
|
ch <- result
|
||||||
|
}
|
Loading…
Reference in New Issue