Added custom pages flags for search engines

master
ice3man 2018-05-05 15:10:56 +05:30
parent 303734c46e
commit 98518d5782
4 changed files with 32 additions and 26 deletions

View File

@ -52,6 +52,9 @@ type Config struct {
type Setting struct {
CensysPages string // Censys pages to check. For All, use "all"
AskPages string // Ask search pages to check
BaiduPages string // Ask search pages to check
BingPages string // Ask search pages to check
}
func InitializeSettings() (setting *Setting) {
@ -59,6 +62,9 @@ func InitializeSettings() (setting *Setting) {
settings.CensysPages = "10" // Default is 10 pages. Strikes a fine balance
settings.AskPages = "15"
settings.BaiduPages = "5"
settings.BingPages = "50"
return &settings
}

View File

@ -11,10 +11,10 @@ package ask
import (
"fmt"
"io/ioutil"
"regexp"
"strconv"
"sort"
"net/url"
"regexp"
"sort"
"strconv"
"github.com/Ice3man543/subfinder/libsubfinder/helper"
)
@ -27,7 +27,7 @@ func Query(state *helper.State, ch chan helper.Result) {
var result helper.Result
result.Subdomains = subdomains
min_iterations := 15
min_iterations, _ := strconv.Atoi(state.CurrentSettings.AskPages)
max_iterations := 760
search_query := ""
current_page := 0
@ -41,8 +41,8 @@ func Query(state *helper.State, ch chan helper.Result) {
current_page = 0
search_query = new_search_query
}
resp, err := helper.GetHTTPResponse("http://www.ask.com/web?q=" + search_query + "&page=" + strconv.Itoa(current_page) + "&qid=8D6EE6BF52E0C04527E51F64F22C4534&o=0&l=dir&qsrc=998&qo=pagination", state.Timeout)
resp, err := helper.GetHTTPResponse("http://www.ask.com/web?q="+search_query+"&page="+strconv.Itoa(current_page)+"&qid=8D6EE6BF52E0C04527E51F64F22C4534&o=0&l=dir&qsrc=998&qo=pagination", state.Timeout)
if err != nil {
result.Error = err
ch <- result
@ -57,7 +57,7 @@ func Query(state *helper.State, ch chan helper.Result) {
return
}
src := string(body)
re := regexp.MustCompile(`([a-z0-9]+\.)+` + state.Domain)
match := re.FindAllString(src, -1)
@ -66,7 +66,7 @@ func Query(state *helper.State, ch chan helper.Result) {
if sort.StringsAreSorted(subdomains) == false {
sort.Strings(subdomains)
}
insert_index := sort.SearchStrings(subdomains, subdomain)
if insert_index < len(subdomains) && subdomains[insert_index] == subdomain {
continue
@ -93,4 +93,4 @@ func Query(state *helper.State, ch chan helper.Result) {
result.Subdomains = subdomains
result.Error = nil
ch <- result
}
}

View File

@ -11,12 +11,12 @@ package baidu
import (
"fmt"
"io/ioutil"
"regexp"
"time"
"math/rand"
"strconv"
"sort"
"net/url"
"regexp"
"sort"
"strconv"
"time"
"github.com/Ice3man543/subfinder/libsubfinder/helper"
)
@ -29,7 +29,7 @@ func Query(state *helper.State, ch chan helper.Result) {
var result helper.Result
result.Subdomains = subdomains
min_iterations := 5
min_iterations, _ := strconv.Atoi(state.CurrentSettings.BaiduPages)
max_iterations := 760
search_query := ""
current_page := 0
@ -44,7 +44,7 @@ func Query(state *helper.State, ch chan helper.Result) {
search_query = new_search_query
}
resp, err := helper.GetHTTPResponse("https://www.baidu.com/s?rn=100&pn=" + strconv.Itoa(current_page) + "&wd=" + search_query +"&oq=" + search_query, state.Timeout)
resp, err := helper.GetHTTPResponse("https://www.baidu.com/s?rn=100&pn="+strconv.Itoa(current_page)+"&wd="+search_query+"&oq="+search_query, state.Timeout)
if err != nil {
result.Error = err
ch <- result
@ -59,7 +59,7 @@ func Query(state *helper.State, ch chan helper.Result) {
return
}
src := string(body)
re := regexp.MustCompile(`([a-z0-9]+\.)+` + state.Domain)
match := re.FindAllString(src, -1)
@ -68,7 +68,7 @@ func Query(state *helper.State, ch chan helper.Result) {
if sort.StringsAreSorted(subdomains) == false {
sort.Strings(subdomains)
}
insert_index := sort.SearchStrings(subdomains, subdomain)
if insert_index < len(subdomains) && subdomains[insert_index] == subdomain {
continue
@ -90,10 +90,10 @@ func Query(state *helper.State, ch chan helper.Result) {
break
}
current_page++
time.Sleep(time.Duration((3 +rand.Intn(5))) * time.Second)
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
}
result.Subdomains = subdomains
result.Error = nil
ch <- result
}
}

View File

@ -11,10 +11,10 @@ package bing
import (
"fmt"
"io/ioutil"
"regexp"
"strconv"
"sort"
"net/url"
"regexp"
"sort"
"strconv"
"github.com/Ice3man543/subfinder/libsubfinder/helper"
)
@ -27,7 +27,7 @@ func Query(state *helper.State, ch chan helper.Result) {
var result helper.Result
result.Subdomains = subdomains
min_iterations := 50
min_iterations, _ := strconv.Atoi(state.CurrentSettings.BingPages)
max_iterations := 760
search_query := ""
current_page := 0
@ -42,7 +42,7 @@ func Query(state *helper.State, ch chan helper.Result) {
search_query = new_search_query
}
resp, err := helper.GetHTTPResponse("https://www.bing.com/search?q=" + search_query + "&go=Submit&first=" + strconv.Itoa(current_page), state.Timeout)
resp, err := helper.GetHTTPResponse("https://www.bing.com/search?q="+search_query+"&go=Submit&first="+strconv.Itoa(current_page), state.Timeout)
if err != nil {
result.Error = err
ch <- result
@ -69,7 +69,7 @@ func Query(state *helper.State, ch chan helper.Result) {
if sort.StringsAreSorted(subdomains) == false {
sort.Strings(subdomains)
}
insert_index := sort.SearchStrings(subdomains, subdomain)
if insert_index < len(subdomains) && subdomains[insert_index] == subdomain {
continue
@ -96,4 +96,4 @@ func Query(state *helper.State, ch chan helper.Result) {
result.Subdomains = subdomains
result.Error = nil
ch <- result
}
}