Massive linting along with fixing many bugs present leading to multiple errors
parent
d397687aa2
commit
e8a673710a
|
@ -22,3 +22,6 @@ main
|
|||
# VIM swap files
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# VSCode
|
||||
.vscode/
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
//
|
||||
// bruteforce.go : A DNS Bruteforcer in Golang
|
||||
// Written By : @ice3man (Nizamul Rana)
|
||||
//
|
||||
// Distributed Under MIT License
|
||||
// Copyrights (C) 2018 Ice3man
|
||||
// All Rights Reserved
|
||||
|
||||
// Package bruteforce is a fast bruteforce implementation in golang
|
||||
package bruteforce
|
||||
|
||||
import (
|
||||
|
@ -27,24 +26,24 @@ func consume(args ...interface{}) interface{} {
|
|||
if len(ips) <= 0 {
|
||||
// We didn't find any ips
|
||||
return ""
|
||||
} else {
|
||||
if state.IsWildcard == true {
|
||||
result := helper.CheckWildcard(state, ips)
|
||||
if result == true {
|
||||
// We have a wildcard ip
|
||||
return ""
|
||||
}
|
||||
}
|
||||
if !state.Silent {
|
||||
if state.Verbose {
|
||||
fmt.Printf("\n[%sBRUTE%s] %s : %s", helper.Info, helper.Reset, host, ips[0])
|
||||
}
|
||||
}
|
||||
return ips[0]
|
||||
}
|
||||
|
||||
if state.IsWildcard == true {
|
||||
result := helper.CheckWildcard(state, ips)
|
||||
if result == true {
|
||||
// We have a wildcard ip
|
||||
return ""
|
||||
}
|
||||
}
|
||||
if !state.Silent {
|
||||
if state.Verbose {
|
||||
fmt.Printf("\n[%sBRUTE%s] %s : %s", helper.Info, helper.Reset, host, ips[0])
|
||||
}
|
||||
}
|
||||
return ips[0]
|
||||
}
|
||||
|
||||
// Brute ... Resolve handle a list of subdomains to resolve
|
||||
// Brute handle a list of subdomains to resolve
|
||||
func Brute(state *helper.State, list []string, domain string) (subdomains []helper.Domain) {
|
||||
|
||||
brutePool := helper.NewPool(state.Threads)
|
||||
|
|
|
@ -3,11 +3,8 @@
|
|||
//
|
||||
// Distributed Under MIT License
|
||||
// Copyrights (C) 2018 Ice3man
|
||||
// All Rights Reserved
|
||||
|
||||
// Passive Subdomain Discovery Helper method
|
||||
// Calls all the functions and also manages error handling
|
||||
|
||||
// Package passive is the main core of the program
|
||||
package passive
|
||||
|
||||
import (
|
||||
|
@ -389,7 +386,7 @@ func discover(state *helper.State, domain string, sourceConfig *Source) (subdoma
|
|||
domainDiscoverPool.Wait()
|
||||
|
||||
if state.Silent != true {
|
||||
fmt.Printf("\nRunning enumeration on %s", domain)
|
||||
fmt.Printf("\nRunning enumeration on %s\n", domain)
|
||||
}
|
||||
|
||||
// Create goroutines for added speed and recieve data via channels
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
//
|
||||
// resolver.go : A Resolving package in golang
|
||||
// Written By : @ice3man (Nizamul Rana)
|
||||
//
|
||||
// Distributed Under MIT License
|
||||
// Copyrights (C) 2018 Ice3man
|
||||
// All Rights Reserved
|
||||
|
||||
// Package resolver is a fast dns resolver
|
||||
package resolver
|
||||
|
||||
import (
|
||||
|
|
|
@ -10,6 +10,7 @@ package helper
|
|||
|
||||
// Usage: fmt.Printf("[%sCRTSH%s] %s", r, rs, subdomain)
|
||||
|
||||
// Different Colours for use
|
||||
var (
|
||||
Red = "\033[31;1;4m"
|
||||
Cyan = "\033[36;6;2m"
|
||||
|
|
|
@ -16,7 +16,7 @@ import (
|
|||
"os/user"
|
||||
)
|
||||
|
||||
// GetHomeDir ... Gets current user directory
|
||||
// GetHomeDir gets current user directory
|
||||
func GetHomeDir() string {
|
||||
usr, err := user.Current()
|
||||
if err != nil {
|
||||
|
@ -27,7 +27,7 @@ func GetHomeDir() string {
|
|||
return usr.HomeDir
|
||||
}
|
||||
|
||||
// Exists ... exists returns whether the given file or directory exists or not
|
||||
// Exists returns whether the given file or directory exists or not
|
||||
func Exists(path string) (bool, error) {
|
||||
_, err := os.Stat(path)
|
||||
if err == nil {
|
||||
|
@ -39,7 +39,7 @@ func Exists(path string) (bool, error) {
|
|||
return true, err
|
||||
}
|
||||
|
||||
// CreateDirIfNotExist ... Create config directory if it does not exists
|
||||
// CreateDirIfNotExist creates config directory if it does not exists
|
||||
func CreateDirIfNotExist(dir string) {
|
||||
if _, err := os.Stat(dir); os.IsNotExist(err) {
|
||||
err = os.MkdirAll(dir, 0755)
|
||||
|
@ -50,7 +50,7 @@ func CreateDirIfNotExist(dir string) {
|
|||
}
|
||||
}
|
||||
|
||||
// ReadConfigFile ... Reads a config file from disk and returns Configuration structure
|
||||
// ReadConfigFile Reads a config file from disk and returns Configuration structure
|
||||
// If not exists, create one and then return
|
||||
func ReadConfigFile() (configuration *Config, err error) {
|
||||
|
||||
|
|
|
@ -9,74 +9,75 @@
|
|||
package helper
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/bogdanovich/dns_resolver"
|
||||
"github.com/bogdanovich/dns_resolver"
|
||||
)
|
||||
|
||||
// Resolver is a global dns_resolver object
|
||||
var Resolver *dns_resolver.DnsResolver
|
||||
|
||||
// Resolve a host using dns_resolver lib
|
||||
// ResolveHost resolves a host using dns_resolver lib
|
||||
func ResolveHost(host string) (ips []string, err error) {
|
||||
// In case of i/o timeout
|
||||
Resolver.RetryTimes = 5
|
||||
// In case of i/o timeout
|
||||
Resolver.RetryTimes = 5
|
||||
|
||||
ip, err := Resolver.LookupHost(host)
|
||||
if err != nil {
|
||||
return []string{}, err
|
||||
}
|
||||
ip, err := Resolver.LookupHost(host)
|
||||
if err != nil {
|
||||
return []string{}, err
|
||||
}
|
||||
|
||||
var retIPs []string
|
||||
for _, host := range ip {
|
||||
retIPs = append(retIPs, host.String())
|
||||
}
|
||||
var retIPs []string
|
||||
for _, host := range ip {
|
||||
retIPs = append(retIPs, host.String())
|
||||
}
|
||||
|
||||
return retIPs, nil
|
||||
return retIPs, nil
|
||||
}
|
||||
|
||||
// Check if a ip result contains wildcards
|
||||
// CheckWildcard checks if a ip result contains wildcards
|
||||
func CheckWildcard(state *State, ips []string) (result bool) {
|
||||
for _, ip := range ips {
|
||||
for _, wildcardIp := range state.WildcardIP {
|
||||
if ip == wildcardIp {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, ip := range ips {
|
||||
for _, wildcardIP := range state.WildcardIP {
|
||||
if ip == wildcardIP {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Not wildcard
|
||||
return false
|
||||
// Not wildcard
|
||||
return false
|
||||
}
|
||||
|
||||
// Checks if a host returns wildcard ips and returns status with ips returned
|
||||
// InitWildcard checks if a host returns wildcard ips and returns status with ips returned
|
||||
func InitWildcard(domain string) (result bool, ips []string) {
|
||||
UUIDs := make([]string, 4)
|
||||
UUIDs := make([]string, 4)
|
||||
|
||||
// Generate 4 random UUIDs
|
||||
for i := 0; i < 4; i++ {
|
||||
uuid, err := NewUUID()
|
||||
if err != nil {
|
||||
fmt.Printf("\nerror: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
UUIDs[i] = uuid
|
||||
}
|
||||
// Generate 4 random UUIDs
|
||||
for i := 0; i < 4; i++ {
|
||||
uuid, err := NewUUID()
|
||||
if err != nil {
|
||||
fmt.Printf("\nerror: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
UUIDs[i] = uuid
|
||||
}
|
||||
|
||||
for _, uid := range UUIDs {
|
||||
attempt := fmt.Sprintf("%s.%s", uid, domain)
|
||||
for _, uid := range UUIDs {
|
||||
attempt := fmt.Sprintf("%s.%s", uid, domain)
|
||||
|
||||
// Currently we check only A records. GoBuster also does that
|
||||
// I don't think checking both A and CNAME checking is necessary
|
||||
ips, err := ResolveHost(attempt)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
// Currently we check only A records. GoBuster also does that
|
||||
// I don't think checking both A and CNAME checking is necessary
|
||||
ips, err := ResolveHost(attempt)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(ips) > 0 {
|
||||
return true, ips
|
||||
}
|
||||
}
|
||||
if len(ips) > 0 {
|
||||
return true, ips
|
||||
}
|
||||
}
|
||||
|
||||
return false, ips
|
||||
return false, ips
|
||||
}
|
||||
|
|
|
@ -17,13 +17,13 @@ import (
|
|||
"github.com/Mzack9999/xurls"
|
||||
)
|
||||
|
||||
// Current result structure
|
||||
// Result is the Current result structure
|
||||
type Result struct {
|
||||
Subdomains []string // Subdomains found
|
||||
Error error // Any error that has occured
|
||||
}
|
||||
|
||||
//Domain structure
|
||||
// Domain structure
|
||||
type Domain struct {
|
||||
IP string
|
||||
Fqdn string
|
||||
|
@ -46,7 +46,7 @@ func NewUUID() (string, error) {
|
|||
return fmt.Sprintf("%x-%x-%x-%x-%x", uuid[0:4], uuid[4:6], uuid[6:8], uuid[8:10], uuid[10:]), nil
|
||||
}
|
||||
|
||||
// Returns unique items in a slice
|
||||
// Unique Returns unique items in a slice
|
||||
// Adapted from http://www.golangprograms.com/remove-duplicate-values-from-slice.html
|
||||
func Unique(elements []string) []string {
|
||||
// Use map to record duplicates as we find them.
|
||||
|
@ -67,6 +67,7 @@ func Unique(elements []string) []string {
|
|||
return result
|
||||
}
|
||||
|
||||
// SubdomainExists checks if a key exists in an array
|
||||
func SubdomainExists(key string, values []string) bool {
|
||||
for _, data := range values {
|
||||
if key == data {
|
||||
|
@ -76,6 +77,7 @@ func SubdomainExists(key string, values []string) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
// ExtractSubdomains extracts a subdomain from a big blob of text
|
||||
func ExtractSubdomains(text, domain string) (urls []string) {
|
||||
allUrls := xurls.ExtractSubdomains(text, domain)
|
||||
|
||||
|
|
|
@ -22,8 +22,8 @@ type Job struct {
|
|||
added chan bool // used by Pool.Add to wait for the supervisor
|
||||
}
|
||||
|
||||
// stats is a structure holding statistical data about the pool.
|
||||
type stats struct {
|
||||
// Stats is a structure holding statistical data about the pool.
|
||||
type Stats struct {
|
||||
Submitted int
|
||||
Running int
|
||||
Completed int
|
||||
|
@ -45,7 +45,7 @@ type Pool struct {
|
|||
jobsCompleted *list.List
|
||||
interval time.Duration // for sleeping, in ms
|
||||
workingWantedPipe chan chan bool
|
||||
statsWantedPipe chan chan stats
|
||||
statsWantedPipe chan chan Stats
|
||||
workerKillPipe chan bool
|
||||
supervisorKillPipe chan bool
|
||||
workerWg sync.WaitGroup
|
||||
|
@ -87,7 +87,7 @@ WORKER_LOOP:
|
|||
pool.workerWg.Done()
|
||||
}
|
||||
|
||||
// New creates a new Pool.
|
||||
// NewPool creates a new Pool
|
||||
func NewPool(workers int) (pool *Pool) {
|
||||
pool = new(Pool)
|
||||
pool.numWorkers = workers
|
||||
|
@ -98,7 +98,7 @@ func NewPool(workers int) (pool *Pool) {
|
|||
pool.jobsReadyToRun = list.New()
|
||||
pool.jobsCompleted = list.New()
|
||||
pool.workingWantedPipe = make(chan chan bool)
|
||||
pool.statsWantedPipe = make(chan chan stats)
|
||||
pool.statsWantedPipe = make(chan chan Stats)
|
||||
pool.workerKillPipe = make(chan bool)
|
||||
pool.supervisorKillPipe = make(chan bool)
|
||||
pool.interval = 1
|
||||
|
@ -159,7 +159,7 @@ SUPERVISOR_LOOP:
|
|||
workingPipe <- working
|
||||
// stats
|
||||
case statsPipe := <-pool.statsWantedPipe:
|
||||
poolStats := stats{pool.numJobsSubmitted, pool.numJobsRunning, pool.numJobsCompleted}
|
||||
poolStats := Stats{pool.numJobsSubmitted, pool.numJobsRunning, pool.numJobsCompleted}
|
||||
statsPipe <- poolStats
|
||||
// stopping
|
||||
case <-pool.supervisorKillPipe:
|
||||
|
@ -276,12 +276,12 @@ func (pool *Pool) WaitForJob() *Job {
|
|||
}
|
||||
|
||||
// Status returns a "stats" instance.
|
||||
func (pool *Pool) Status() stats {
|
||||
statsPipe := make(chan stats)
|
||||
func (pool *Pool) Status() Stats {
|
||||
statsPipe := make(chan Stats)
|
||||
if pool.supervisorStarted {
|
||||
pool.statsWantedPipe <- statsPipe
|
||||
return <-statsPipe
|
||||
}
|
||||
// the supervisor wasn't started so we return a zeroed structure
|
||||
return stats{}
|
||||
return Stats{}
|
||||
}
|
||||
|
|
|
@ -1,44 +0,0 @@
|
|||
//
|
||||
// search.go : Contains helper functions for search engine logic
|
||||
// Written By : @ice3man (Nizamul Rana)
|
||||
//
|
||||
// Distributed Under MIT License
|
||||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
package helper
|
||||
|
||||
// Configuration Structure which contains configuration for each
|
||||
// search engine.
|
||||
type BaseSearchConfiguration struct {
|
||||
MaxDomains int // Max subdomains per page
|
||||
MaxPages int // Max pages we should query
|
||||
|
||||
CurrentPageNo int // Current page we are checking
|
||||
CurrentRetries int // Retries we have already made
|
||||
|
||||
PrevLinksFound []string // Links we have previously found
|
||||
CurrentSubdomains []string // Subdomains we have already found on a page
|
||||
|
||||
AllSubdomains []string // All Subdomains found so far
|
||||
}
|
||||
|
||||
// CheckMaxSubdomains checks if maximum number of domains was found.
|
||||
func CheckMaxSubdomains(config *BaseSearchConfiguration) bool {
|
||||
// If we have no limit on max domains on pages
|
||||
if config.MaxDomains == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
return len(config.CurrentSubdomains) >= config.MaxDomains
|
||||
}
|
||||
|
||||
// CheckMaxPages checks if maximum number of pages per service was found.
|
||||
func CheckMaxPages(config *BaseSearchConfiguration) bool {
|
||||
// If we have no limit on max pages
|
||||
if config.MaxPages == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
return config.CurrentPageNo >= config.MaxPages
|
||||
}
|
|
@ -46,7 +46,7 @@ func GetHTTPResponse(url string, timeout int) (resp *http.Response, err error) {
|
|||
return resp, nil
|
||||
}
|
||||
|
||||
// GetHTTPResponse : Returns a HTTP Response object
|
||||
// GetHTTPCookieResponse returns a HTTP Response object
|
||||
// It needs URL To Visit and a cookie array to send with request.
|
||||
// Note, It needs full url with scheme and a timeout value.
|
||||
// It returns a HTTP Response object with a cookie array.
|
||||
|
|
|
@ -13,7 +13,7 @@ import (
|
|||
"os"
|
||||
)
|
||||
|
||||
// Holds the State read in from the CLI
|
||||
// State holds the State read in from the CLI
|
||||
type State struct {
|
||||
Color bool // Whether to use color or not
|
||||
Threads int // Number of threads to use
|
||||
|
@ -46,6 +46,7 @@ type State struct {
|
|||
ConfigState Config // Current configuration file state
|
||||
}
|
||||
|
||||
// Config contains api keys for different sources
|
||||
type Config struct {
|
||||
VirustotalAPIKey string `json:"virustotalApikey"` // Virustotal API Key
|
||||
|
||||
|
@ -63,6 +64,7 @@ type Config struct {
|
|||
ShodanAPIKey string `json:"shodanApiKey"` // Shodan API Key
|
||||
}
|
||||
|
||||
// Setting contains settings for sources
|
||||
type Setting struct {
|
||||
CensysPages string // Censys pages to check. For All, use "all"
|
||||
AskPages string // Ask search pages to check
|
||||
|
@ -73,6 +75,7 @@ type Setting struct {
|
|||
ShodanPages string // Shodan search pages to check
|
||||
}
|
||||
|
||||
// InitializeSettings sets default settings value
|
||||
func InitializeSettings() (setting *Setting) {
|
||||
var settings Setting
|
||||
|
||||
|
@ -87,6 +90,7 @@ func InitializeSettings() (setting *Setting) {
|
|||
return &settings
|
||||
}
|
||||
|
||||
// InitState initializes the default state
|
||||
func InitState() (state State, err error) {
|
||||
|
||||
// Read the configuration file and ignore errors
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// Package output ... Output ... Contains different functions for reporting
|
||||
// Package output Contains different functions for reporting
|
||||
package output
|
||||
|
||||
import (
|
||||
|
@ -17,7 +17,7 @@ import (
|
|||
"github.com/subfinder/subfinder/libsubfinder/helper"
|
||||
)
|
||||
|
||||
// WriteOutputText ... Write output to a normal text file
|
||||
// WriteOutputText writes a single subdomain output to a normal text file
|
||||
func WriteOutputText(state *helper.State, subdomain string) error {
|
||||
_, err := state.OutputHandle.WriteString(subdomain + "\n")
|
||||
if err != nil {
|
||||
|
@ -27,7 +27,7 @@ func WriteOutputText(state *helper.State, subdomain string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// WriteOutputTextArray ... Write output as a text array
|
||||
// WriteOutputTextArray writes a list of subdomains output to a normal text file
|
||||
func WriteOutputTextArray(state *helper.State, subdomains []string) error {
|
||||
for _, subdomain := range subdomains {
|
||||
_, err := state.OutputHandle.WriteString(subdomain + "\n")
|
||||
|
@ -39,7 +39,7 @@ func WriteOutputTextArray(state *helper.State, subdomains []string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// WriteOutputJSON ... Writes subdomains output to a json file
|
||||
// WriteOutputJSON writes subdomain output to a json file
|
||||
func WriteOutputJSON(state *helper.State, subdomains []string) error {
|
||||
_, err := os.Create(state.Output)
|
||||
|
||||
|
@ -58,7 +58,7 @@ func WriteOutputJSON(state *helper.State, subdomains []string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// WriteOutputAquatoneJSON ... Writes subdomains output to a json file
|
||||
// WriteOutputAquatoneJSON writes aquatone-style subdomains output to a json file
|
||||
func WriteOutputAquatoneJSON(state *helper.State, subdomains []helper.Domain) error {
|
||||
m := make(map[string]string)
|
||||
_, err := os.Create(state.Output)
|
||||
|
@ -83,7 +83,7 @@ func WriteOutputAquatoneJSON(state *helper.State, subdomains []helper.Domain) er
|
|||
return nil
|
||||
}
|
||||
|
||||
// WriteOutputToDir ... Write output state into a directory
|
||||
// WriteOutputToDir writes output state into a directory
|
||||
func WriteOutputToDir(state *helper.State, subdomains []string, domain string) (err error) {
|
||||
if state.OutputDir != "" {
|
||||
if state.IsJSON == false {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// Package archiveis ... Archiveis Scraping Engine in Golang
|
||||
// Package archiveis is a Archiveis Scraping Engine in Golang
|
||||
package archiveis
|
||||
|
||||
import (
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// Package ask ... A golang client for Ask Subdomain Discovery
|
||||
// Package ask is a golang client for Ask Subdomain Discovery
|
||||
package ask
|
||||
|
||||
import (
|
||||
|
@ -30,19 +30,19 @@ func Query(args ...interface{}) (i interface{}) {
|
|||
minIterations, _ := strconv.Atoi(state.CurrentSettings.AskPages)
|
||||
maxIterations := 760
|
||||
searchQuery := ""
|
||||
current_page := 0
|
||||
for current_iteration := 0; current_iteration <= maxIterations; current_iteration++ {
|
||||
new_searchQuery := "site:" + domain
|
||||
currentPage := 0
|
||||
for currentIteration := 0; currentIteration <= maxIterations; currentIteration++ {
|
||||
newSearchQuery := "site:" + domain
|
||||
if len(subdomains) > 0 {
|
||||
new_searchQuery += " -www." + domain
|
||||
newSearchQuery += " -www." + domain
|
||||
}
|
||||
new_searchQuery = url.QueryEscape(new_searchQuery)
|
||||
if searchQuery != new_searchQuery {
|
||||
current_page = 0
|
||||
searchQuery = new_searchQuery
|
||||
newSearchQuery = url.QueryEscape(newSearchQuery)
|
||||
if searchQuery != newSearchQuery {
|
||||
currentPage = 0
|
||||
searchQuery = newSearchQuery
|
||||
}
|
||||
|
||||
resp, err := helper.GetHTTPResponse("http://www.ask.com/web?q="+searchQuery+"&page="+strconv.Itoa(current_page)+"&qid=8D6EE6BF52E0C04527E51F64F22C4534&o=0&l=dir&qsrc=998&qo=pagination", state.Timeout)
|
||||
resp, err := helper.GetHTTPResponse("http://www.ask.com/web?q="+searchQuery+"&page="+strconv.Itoa(currentPage)+"&qid=8D6EE6BF52E0C04527E51F64F22C4534&o=0&l=dir&qsrc=998&qo=pagination", state.Timeout)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\nask: %v\n", err)
|
||||
|
@ -62,14 +62,14 @@ func Query(args ...interface{}) (i interface{}) {
|
|||
|
||||
match := helper.ExtractSubdomains(src, domain)
|
||||
|
||||
new_subdomains_found := 0
|
||||
newSubdomainsFound := 0
|
||||
for _, subdomain := range match {
|
||||
if sort.StringsAreSorted(subdomains) == false {
|
||||
sort.Strings(subdomains)
|
||||
}
|
||||
|
||||
insert_index := sort.SearchStrings(subdomains, subdomain)
|
||||
if insert_index < len(subdomains) && subdomains[insert_index] == subdomain {
|
||||
insertIndex := sort.SearchStrings(subdomains, subdomain)
|
||||
if insertIndex < len(subdomains) && subdomains[insertIndex] == subdomain {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -82,13 +82,13 @@ func Query(args ...interface{}) (i interface{}) {
|
|||
}
|
||||
|
||||
subdomains = append(subdomains, subdomain)
|
||||
new_subdomains_found++
|
||||
newSubdomainsFound++
|
||||
}
|
||||
// If no new subdomains are found exits after minIterations
|
||||
if new_subdomains_found == 0 && current_iteration > minIterations {
|
||||
if newSubdomainsFound == 0 && currentIteration > minIterations {
|
||||
break
|
||||
}
|
||||
current_page++
|
||||
currentPage++
|
||||
}
|
||||
|
||||
return subdomains
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Baidu Subdomain Discovery
|
||||
// Package baidu is a golang client for Baidu Subdomain Discovery
|
||||
package baidu
|
||||
|
||||
import (
|
||||
|
@ -29,22 +29,22 @@ func Query(args ...interface{}) interface{} {
|
|||
domain := args[0].(string)
|
||||
state := args[1].(*helper.State)
|
||||
|
||||
min_iterations, _ := strconv.Atoi(state.CurrentSettings.BaiduPages)
|
||||
max_iterations := 760
|
||||
search_query := ""
|
||||
current_page := 0
|
||||
for current_iteration := 0; current_iteration <= max_iterations; current_iteration++ {
|
||||
new_search_query := "site:" + domain
|
||||
minIterations, _ := strconv.Atoi(state.CurrentSettings.BaiduPages)
|
||||
maxIterations := 760
|
||||
searchQuery := ""
|
||||
currentPage := 0
|
||||
for currentIteration := 0; currentIteration <= maxIterations; currentIteration++ {
|
||||
newSearchQuery := "site:" + domain
|
||||
if len(subdomains) > 0 {
|
||||
new_search_query += " -site:www." + domain
|
||||
newSearchQuery += " -site:www." + domain
|
||||
}
|
||||
new_search_query = url.QueryEscape(new_search_query)
|
||||
if search_query != new_search_query {
|
||||
current_page = 0
|
||||
search_query = new_search_query
|
||||
newSearchQuery = url.QueryEscape(newSearchQuery)
|
||||
if searchQuery != newSearchQuery {
|
||||
currentPage = 0
|
||||
searchQuery = newSearchQuery
|
||||
}
|
||||
|
||||
resp, err := helper.GetHTTPResponse("https://www.baidu.com/s?rn=100&pn="+strconv.Itoa(current_page)+"&wd="+search_query+"&oq="+search_query, state.Timeout)
|
||||
resp, err := helper.GetHTTPResponse("https://www.baidu.com/s?rn=100&pn="+strconv.Itoa(currentPage)+"&wd="+searchQuery+"&oq="+searchQuery, state.Timeout)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\nbaidu: %v\n", err)
|
||||
|
@ -64,14 +64,14 @@ func Query(args ...interface{}) interface{} {
|
|||
|
||||
match := helper.ExtractSubdomains(src, domain)
|
||||
|
||||
new_subdomains_found := 0
|
||||
newSubdomainsFound := 0
|
||||
for _, subdomain := range match {
|
||||
if sort.StringsAreSorted(subdomains) == false {
|
||||
sort.Strings(subdomains)
|
||||
}
|
||||
|
||||
insert_index := sort.SearchStrings(subdomains, subdomain)
|
||||
if insert_index < len(subdomains) && subdomains[insert_index] == subdomain {
|
||||
insertIndex := sort.SearchStrings(subdomains, subdomain)
|
||||
if insertIndex < len(subdomains) && subdomains[insertIndex] == subdomain {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -84,13 +84,13 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
subdomains = append(subdomains, subdomain)
|
||||
new_subdomains_found++
|
||||
newSubdomainsFound++
|
||||
}
|
||||
// If no new subdomains are found exits after min_iterations
|
||||
if new_subdomains_found == 0 && current_iteration > min_iterations {
|
||||
if newSubdomainsFound == 0 && currentIteration > minIterations {
|
||||
break
|
||||
}
|
||||
current_page++
|
||||
currentPage++
|
||||
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Bing Subdomain Discovery
|
||||
// Package bing is a golang client for Bing Subdomain Discovery
|
||||
package bing
|
||||
|
||||
import (
|
||||
|
@ -28,22 +28,22 @@ func Query(args ...interface{}) interface{} {
|
|||
domain := args[0].(string)
|
||||
state := args[1].(*helper.State)
|
||||
|
||||
min_iterations, _ := strconv.Atoi(state.CurrentSettings.BingPages)
|
||||
max_iterations := 760
|
||||
search_query := ""
|
||||
current_page := 0
|
||||
for current_iteration := 0; current_iteration <= max_iterations; current_iteration++ {
|
||||
new_search_query := "domain:" + domain
|
||||
minIterations, _ := strconv.Atoi(state.CurrentSettings.BingPages)
|
||||
maxIterations := 760
|
||||
searchQuery := ""
|
||||
currentPage := 0
|
||||
for currentIteration := 0; currentIteration <= maxIterations; currentIteration++ {
|
||||
newSearchQuery := "domain:" + domain
|
||||
if len(subdomains) > 0 {
|
||||
new_search_query += " -www." + domain
|
||||
newSearchQuery += " -www." + domain
|
||||
}
|
||||
new_search_query = url.QueryEscape(new_search_query)
|
||||
if search_query != new_search_query {
|
||||
current_page = 0
|
||||
search_query = new_search_query
|
||||
newSearchQuery = url.QueryEscape(newSearchQuery)
|
||||
if searchQuery != newSearchQuery {
|
||||
currentPage = 0
|
||||
searchQuery = newSearchQuery
|
||||
}
|
||||
|
||||
resp, err := helper.GetHTTPResponse("https://www.bing.com/search?q="+search_query+"&go=Submit&first="+strconv.Itoa(current_page), state.Timeout)
|
||||
resp, err := helper.GetHTTPResponse("https://www.bing.com/search?q="+searchQuery+"&go=Submit&first="+strconv.Itoa(currentPage), state.Timeout)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\nbing: %v\n", err)
|
||||
|
@ -59,19 +59,19 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// suppress all %xx sequences with a space
|
||||
re_sub := regexp.MustCompile(`%.{2}`)
|
||||
src := re_sub.ReplaceAllLiteralString(string(body), " ")
|
||||
reSub := regexp.MustCompile(`%.{2}`)
|
||||
src := reSub.ReplaceAllLiteralString(string(body), " ")
|
||||
|
||||
match := helper.ExtractSubdomains(src, domain)
|
||||
|
||||
new_subdomains_found := 0
|
||||
newSubdomainsFound := 0
|
||||
for _, subdomain := range match {
|
||||
if sort.StringsAreSorted(subdomains) == false {
|
||||
sort.Strings(subdomains)
|
||||
}
|
||||
|
||||
insert_index := sort.SearchStrings(subdomains, subdomain)
|
||||
if insert_index < len(subdomains) && subdomains[insert_index] == subdomain {
|
||||
insertIndex := sort.SearchStrings(subdomains, subdomain)
|
||||
if insertIndex < len(subdomains) && subdomains[insertIndex] == subdomain {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -84,13 +84,13 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
subdomains = append(subdomains, subdomain)
|
||||
new_subdomains_found++
|
||||
newSubdomainsFound++
|
||||
}
|
||||
// If no new subdomains are found exits after min_iterations
|
||||
if new_subdomains_found == 0 && current_iteration > min_iterations {
|
||||
if newSubdomainsFound == 0 && currentIteration > minIterations {
|
||||
break
|
||||
}
|
||||
current_page++
|
||||
currentPage++
|
||||
}
|
||||
|
||||
return subdomains
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Censys Subdomain Discovery
|
||||
// Package censys is a golang client for Censys Subdomain Discovery
|
||||
package censys
|
||||
|
||||
import (
|
||||
|
@ -17,7 +17,7 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/helper"
|
||||
"github.com/subfinder/subfinder/libsubfinder/helper"
|
||||
)
|
||||
|
||||
type resultsq struct {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A CertDB Subdomain parser in golang
|
||||
// Package certdb is a CertDB Subdomain parser in golang
|
||||
package certdb
|
||||
|
||||
import (
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Google Transparency Report
|
||||
// Package certificatetransparency is a golang client for Entrust Certificate Transparency
|
||||
package certificatetransparency
|
||||
|
||||
import (
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A Golang based client for Certspotter Parsing
|
||||
// Package certspotter is a Golang based client for Certspotter Parsing
|
||||
package certspotter
|
||||
|
||||
import (
|
||||
|
@ -18,12 +18,12 @@ import (
|
|||
)
|
||||
|
||||
// Structure of a single dictionary of output by crt.sh
|
||||
type certspotter_object struct {
|
||||
Dns_names []string `json:"dns_names"`
|
||||
type certspotterObject struct {
|
||||
DNSNames []string `json:"dns_names"`
|
||||
}
|
||||
|
||||
// array of all results returned
|
||||
var certspotter_data []certspotter_object
|
||||
var certspotterData []certspotterObject
|
||||
|
||||
// all subdomains found
|
||||
var subdomains []string
|
||||
|
@ -45,7 +45,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Get the response body
|
||||
resp_body, err := ioutil.ReadAll(resp.Body)
|
||||
respBody, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\ncertspotter: %v\n", err)
|
||||
|
@ -54,7 +54,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Decode the json format
|
||||
err = json.Unmarshal([]byte(resp_body), &certspotter_data)
|
||||
err = json.Unmarshal([]byte(respBody), &certspotterData)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\ncertspotter: %v\n", err)
|
||||
|
@ -63,23 +63,23 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Append each subdomain found to subdomains array
|
||||
for _, block := range certspotter_data {
|
||||
for _, dns_name := range block.Dns_names {
|
||||
for _, block := range certspotterData {
|
||||
for _, dnsName := range block.DNSNames {
|
||||
|
||||
// Fix Wildcard subdomains containg asterisk before them
|
||||
if strings.Contains(dns_name, "*.") {
|
||||
dns_name = strings.Split(dns_name, "*.")[1]
|
||||
if strings.Contains(dnsName, "*.") {
|
||||
dnsName = strings.Split(dnsName, "*.")[1]
|
||||
}
|
||||
|
||||
if state.Verbose == true {
|
||||
if state.Color == true {
|
||||
fmt.Printf("\n[%sCERTSPOTTER%s] %s", helper.Red, helper.Reset, dns_name)
|
||||
fmt.Printf("\n[%sCERTSPOTTER%s] %s", helper.Red, helper.Reset, dnsName)
|
||||
} else {
|
||||
fmt.Printf("\n[CERTSPOTTER] %s", dns_name)
|
||||
fmt.Printf("\n[CERTSPOTTER] %s", dnsName)
|
||||
}
|
||||
}
|
||||
|
||||
subdomains = append(subdomains, dns_name)
|
||||
subdomains = append(subdomains, dnsName)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A Golang based client for CRT.SH Parsing
|
||||
// Package crtsh is a Golang based client for CRT.SH Parsing
|
||||
package crtsh
|
||||
|
||||
import (
|
||||
|
@ -17,14 +17,11 @@ import (
|
|||
"github.com/subfinder/subfinder/libsubfinder/helper"
|
||||
)
|
||||
|
||||
// Structure of a single dictionary of output by crt.sh
|
||||
// We only need name_value object hence this :-)
|
||||
type crtsh_object struct {
|
||||
Name_value string `json:"name_value"`
|
||||
type crtshObject struct {
|
||||
NameValue string `json:"name_value"`
|
||||
}
|
||||
|
||||
// array of all results returned
|
||||
var crtsh_data []crtsh_object
|
||||
var crtshData []crtshObject
|
||||
|
||||
// all subdomains found
|
||||
var subdomains []string
|
||||
|
@ -35,9 +32,6 @@ func Query(args ...interface{}) interface{} {
|
|||
domain := args[0].(string)
|
||||
state := args[1].(*helper.State)
|
||||
|
||||
// Make a http request to CRT.SH server and request output in JSON
|
||||
// format.
|
||||
// I Think 5 minutes would be more than enough for CRT.SH :-)
|
||||
resp, err := helper.GetHTTPResponse("https://crt.sh/?q=%25."+domain+"&output=json", state.Timeout)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
|
@ -47,7 +41,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Get the response body
|
||||
resp_body, err := ioutil.ReadAll(resp.Body)
|
||||
respBody, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\ncrtsh: %v\n", err)
|
||||
|
@ -55,9 +49,7 @@ func Query(args ...interface{}) interface{} {
|
|||
return subdomains
|
||||
}
|
||||
|
||||
if strings.Contains(string(resp_body), "The requested URL / was not found on this server.") {
|
||||
// crt.sh is not showing subdomains for some reason
|
||||
// move back
|
||||
if strings.Contains(string(respBody), "The requested URL / was not found on this server.") {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\ncrtsh: %v\n", err)
|
||||
}
|
||||
|
@ -67,17 +59,17 @@ func Query(args ...interface{}) interface{} {
|
|||
// Convert Response Body to string and then replace }{ to },{
|
||||
// This is done in order to enable parsing of JSON format employed by
|
||||
// crt.sh
|
||||
correct_format := strings.Replace(string(resp_body), "}{", "},{", -1)
|
||||
correctFormat := strings.Replace(string(respBody), "}{", "},{", -1)
|
||||
|
||||
// Now convert it into a json array like this
|
||||
// [
|
||||
// {abc},
|
||||
// {abc}
|
||||
// ]
|
||||
json_output := "[" + correct_format + "]"
|
||||
jsonOutput := "[" + correctFormat + "]"
|
||||
|
||||
// Decode the json format
|
||||
err = json.Unmarshal([]byte(json_output), &crtsh_data)
|
||||
err = json.Unmarshal([]byte(jsonOutput), &crtshData)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\ncrtsh: %v\n", err)
|
||||
|
@ -86,22 +78,22 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Append each subdomain found to subdomains array
|
||||
for _, subdomain := range crtsh_data {
|
||||
for _, subdomain := range crtshData {
|
||||
|
||||
// Fix Wildcard subdomains containg asterisk before them
|
||||
if strings.Contains(subdomain.Name_value, "*.") {
|
||||
subdomain.Name_value = strings.Split(subdomain.Name_value, "*.")[1]
|
||||
if strings.Contains(subdomain.NameValue, "*.") {
|
||||
subdomain.NameValue = strings.Split(subdomain.NameValue, "*.")[1]
|
||||
}
|
||||
|
||||
if state.Verbose == true {
|
||||
if state.Color == true {
|
||||
fmt.Printf("\n[%sCRT.SH%s] %s", helper.Red, helper.Reset, subdomain.Name_value)
|
||||
fmt.Printf("\n[%sCRT.SH%s] %s", helper.Red, helper.Reset, subdomain.NameValue)
|
||||
} else {
|
||||
fmt.Printf("\n[CRT.SH] %s", subdomain.Name_value)
|
||||
fmt.Printf("\n[CRT.SH] %s", subdomain.NameValue)
|
||||
}
|
||||
}
|
||||
|
||||
subdomains = append(subdomains, subdomain.Name_value)
|
||||
subdomains = append(subdomains, subdomain.NameValue)
|
||||
}
|
||||
|
||||
return subdomains
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// Golang driver from dnsdb.org
|
||||
// Package dnsdb is a Golang driver for dnsdb.org
|
||||
package dnsdb
|
||||
|
||||
import (
|
||||
|
@ -50,8 +50,8 @@ func Query(args ...interface{}) interface{} {
|
|||
match := re.FindAllStringSubmatch(src, -1)
|
||||
|
||||
for _, subdomain := range match {
|
||||
string_split := strings.Split(subdomain[0], "\">")[1]
|
||||
finishedSub := strings.TrimRight(string_split, "</a>")
|
||||
stringSplit := strings.Split(subdomain[0], "\">")[1]
|
||||
finishedSub := strings.TrimRight(stringSplit, "</a>")
|
||||
|
||||
if state.Verbose == true {
|
||||
if state.Color == true {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A Parser for subdomains from DNSDumpster
|
||||
// Package dnsdumpster is a Parser for subdomains from DNSDumpster
|
||||
package dnsdumpster
|
||||
|
||||
import (
|
||||
|
@ -17,7 +17,7 @@ import (
|
|||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/Ice3man543/subfinder/libsubfinder/helper"
|
||||
"github.com/subfinder/subfinder/libsubfinder/helper"
|
||||
)
|
||||
|
||||
// all subdomains found
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Dogpile Subdomain Discovery
|
||||
// Package dogpile is a golang client for Dogpile Subdomain Discovery
|
||||
package dogpile
|
||||
|
||||
import (
|
||||
|
@ -54,15 +54,17 @@ func Query(args ...interface{}) interface{} {
|
|||
match := helper.ExtractSubdomains(src, domain)
|
||||
|
||||
for _, subdomain := range match {
|
||||
if state.Verbose == true {
|
||||
if state.Color == true {
|
||||
fmt.Printf("\n[%sDogpile%s] %s", helper.Red, helper.Reset, subdomain)
|
||||
} else {
|
||||
fmt.Printf("\n[Dogpile] %s", subdomain)
|
||||
if helper.SubdomainExists(subdomain, subdomains) == false {
|
||||
if state.Verbose == true {
|
||||
if state.Color == true {
|
||||
fmt.Printf("\n[%sDogpile%s] %s", helper.Red, helper.Reset, subdomain)
|
||||
} else {
|
||||
fmt.Printf("\n[Dogpile] %s", subdomain)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
subdomains = append(subdomains, subdomain)
|
||||
subdomains = append(subdomains, subdomain)
|
||||
}
|
||||
}
|
||||
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Exalead Subdomain Discovery
|
||||
// Package exalead is a golang client for Exalead Subdomain Discovery
|
||||
package exalead
|
||||
|
||||
import (
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// Golang driver from findsubdomains.com
|
||||
// Package findsubdomains is a Golang driver for findsubdomains.com
|
||||
package findsubdomains
|
||||
|
||||
import (
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang based Hackertarget subdomains search client
|
||||
// Package hackertarget is a golang based Hackertarget subdomains search client
|
||||
package hackertarget
|
||||
|
||||
import (
|
||||
|
@ -38,7 +38,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Get the response body
|
||||
resp_body, err := ioutil.ReadAll(resp.Body)
|
||||
respBody, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\nhackertarget: %v\n", err)
|
||||
|
@ -46,7 +46,7 @@ func Query(args ...interface{}) interface{} {
|
|||
return subdomains
|
||||
}
|
||||
|
||||
scanner := bufio.NewScanner(strings.NewReader(string(resp_body)))
|
||||
scanner := bufio.NewScanner(strings.NewReader(string(respBody)))
|
||||
for scanner.Scan() {
|
||||
subdomain := strings.Split(scanner.Text(), ",")[0]
|
||||
subdomains = append(subdomains, subdomain)
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Ipv4Info
|
||||
// Package ipv4info is a golang client for Ipv4Info
|
||||
package ipv4info
|
||||
|
||||
import (
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// Netcraft Scraping Engine in Golang
|
||||
// Package netcraft is a Netcraft Scraping Engine in Golang
|
||||
package netcraft
|
||||
|
||||
import (
|
||||
|
@ -29,10 +29,10 @@ var gCookies []*http.Cookie
|
|||
|
||||
// Local function to recursively enumerate subdomains until no subdomains
|
||||
// are left
|
||||
func enumerate(state *helper.State, baseUrl string) (err error) {
|
||||
func enumerate(state *helper.State, baseURL string) (err error) {
|
||||
|
||||
// Make a http request to Netcraft
|
||||
resp, gCookies, err := helper.GetHTTPCookieResponse(baseUrl, gCookies, state.Timeout)
|
||||
resp, gCookies, err := helper.GetHTTPCookieResponse(baseURL, gCookies, state.Timeout)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\nnetcraft: %v\n", err)
|
||||
|
@ -42,7 +42,7 @@ func enumerate(state *helper.State, baseUrl string) (err error) {
|
|||
|
||||
// Check all cookies for netcraft_js_verification_challenge
|
||||
for i := 0; i < len(gCookies); i++ {
|
||||
var curCookie *http.Cookie = gCookies[i]
|
||||
var curCookie = gCookies[i]
|
||||
if curCookie.Name == "netcraft_js_verification_challenge" {
|
||||
// Get the current challenge string
|
||||
challenge := url.QueryEscape(curCookie.Value)
|
||||
|
@ -94,8 +94,8 @@ func enumerate(state *helper.State, baseUrl string) (err error) {
|
|||
// we have another page full of juicy subdomains
|
||||
if strings.Contains(src, "Next page") {
|
||||
// Checkout the link for the next page
|
||||
re_next := regexp.MustCompile("<A href=\"(.*?)\"><b>Next page</b></a>")
|
||||
match := re_next.FindStringSubmatch(src)
|
||||
reNext := regexp.MustCompile("<A href=\"(.*?)\"><b>Next page</b></a>")
|
||||
match := reNext.FindStringSubmatch(src)
|
||||
|
||||
// Replace spaces with + characters in URL Query since they don't allow request to happen
|
||||
finalQuery := strings.Replace(match[1], " ", "+", -1)
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Passive total Subdomain Discovery
|
||||
// Package passivetotal is a golang client for Passive total Subdomain Discovery
|
||||
package passivetotal
|
||||
|
||||
import (
|
||||
|
@ -18,11 +18,11 @@ import (
|
|||
"github.com/subfinder/subfinder/libsubfinder/helper"
|
||||
)
|
||||
|
||||
type passivetotal_object struct {
|
||||
type passivetotalObject struct {
|
||||
Subdomains []string `json:"subdomains"`
|
||||
}
|
||||
|
||||
var passivetotal_data passivetotal_object
|
||||
var passivetotalData passivetotalObject
|
||||
|
||||
// all subdomains found
|
||||
var subdomains []string
|
||||
|
@ -69,7 +69,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Decode the json format
|
||||
err = json.Unmarshal([]byte(body), &passivetotal_data)
|
||||
err = json.Unmarshal([]byte(body), &passivetotalData)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\npassivetotal: %v\n", err)
|
||||
|
@ -78,7 +78,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Append each subdomain found to subdomains array
|
||||
for _, subdomain := range passivetotal_data.Subdomains {
|
||||
for _, subdomain := range passivetotalData.Subdomains {
|
||||
finalSubdomain := subdomain + "." + domain
|
||||
|
||||
if state.Verbose == true {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A PTRArchive subdomain parser in golang
|
||||
// Package ptrarchive is a PTRArchive subdomain parser in golang
|
||||
package ptrarchive
|
||||
|
||||
import (
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A Parser for subdomains from Riddler
|
||||
// Package riddler is a Parser for subdomains from Riddler
|
||||
package riddler
|
||||
|
||||
import (
|
||||
|
@ -21,7 +21,7 @@ import (
|
|||
type authentication struct {
|
||||
Response struct {
|
||||
User struct {
|
||||
Authentication_token string `json:"authentication_token"`
|
||||
AuthenticationToken string `json:"authentication_token"`
|
||||
} `json:"user"`
|
||||
} `json:"response"`
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ func Query(args ...interface{}) interface{} {
|
|||
return subdomains
|
||||
}
|
||||
|
||||
if auth.Response.User.Authentication_token == "" {
|
||||
if auth.Response.User.AuthenticationToken == "" {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\nriddler: %v\n", "failed to get authentication token")
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ func Query(args ...interface{}) interface{} {
|
|||
|
||||
req, err = http.NewRequest("POST", "https://riddler.io/api/search", bytes.NewBuffer(data))
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authentication-Token", auth.Response.User.Authentication_token)
|
||||
req.Header.Add("Authentication-Token", auth.Response.User.AuthenticationToken)
|
||||
|
||||
resp, err = hc.Do(req)
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang SecurityTrails API client for subdomain discovery.
|
||||
// Package securitytrails is a golang SecurityTrails API client for subdomain discovery.
|
||||
package securitytrails
|
||||
|
||||
import (
|
||||
|
@ -17,11 +17,11 @@ import (
|
|||
"github.com/subfinder/subfinder/libsubfinder/helper"
|
||||
)
|
||||
|
||||
type securitytrails_object struct {
|
||||
type securitytrailsObject struct {
|
||||
Subdomains []string `json:"subdomains"`
|
||||
}
|
||||
|
||||
var securitytrails_data securitytrails_object
|
||||
var securitytrailsData securitytrailsObject
|
||||
|
||||
// all subdomains found
|
||||
var subdomains []string
|
||||
|
@ -61,7 +61,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Decode the json format
|
||||
err = json.Unmarshal([]byte(body), &securitytrails_data)
|
||||
err = json.Unmarshal([]byte(body), &securitytrailsData)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\nsecuritytrails: %v\n", err)
|
||||
|
@ -70,7 +70,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Append each subdomain found to subdomains array
|
||||
for _, subdomain := range securitytrails_data.Subdomains {
|
||||
for _, subdomain := range securitytrailsData.Subdomains {
|
||||
finalSubdomain := subdomain + "." + domain
|
||||
|
||||
if state.Verbose == true {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Shodan.io
|
||||
// Package shodan is a golang client for Shodan.io
|
||||
package shodan
|
||||
|
||||
import (
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// Sitedossier Scraping Engine in Golang
|
||||
// Package sitedossier is a Sitedossier Scraping Engine in Golang
|
||||
package sitedossier
|
||||
|
||||
import (
|
||||
|
@ -21,8 +21,8 @@ import (
|
|||
// Contains all subdomains found
|
||||
var globalSubdomains []string
|
||||
|
||||
func enumerate(state *helper.State, baseUrl string, domain string) (err error) {
|
||||
resp, err := helper.GetHTTPResponse(baseUrl, state.Timeout)
|
||||
func enumerate(state *helper.State, baseURL string, domain string) (err error) {
|
||||
resp, err := helper.GetHTTPResponse(baseURL, state.Timeout)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -55,8 +55,8 @@ func enumerate(state *helper.State, baseUrl string, domain string) (err error) {
|
|||
|
||||
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
|
||||
|
||||
re_next := regexp.MustCompile("<a href=\"(.*)\"><b>.*</b></a><br>")
|
||||
match1 := re_next.FindStringSubmatch(src)
|
||||
reNext := regexp.MustCompile("<a href=\"(.*)\"><b>.*</b></a><br>")
|
||||
match1 := reNext.FindStringSubmatch(src)
|
||||
|
||||
if len(match1) > 0 {
|
||||
enumerate(state, "http://www.sitedossier.com"+match1[1], domain)
|
||||
|
|
|
@ -5,7 +5,8 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A Golang based client
|
||||
// Package sslcertificates is a Golang based client for SAN discovery
|
||||
// TODO: Replace with a better one parsing SAN's from SSLMate or something else.
|
||||
package sslcertificates
|
||||
|
||||
import (
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A Golang based client for Threatcrowd API
|
||||
// Package threatcrowd is a Golang based client for Threatcrowd API
|
||||
package threatcrowd
|
||||
|
||||
import (
|
||||
|
@ -18,12 +18,12 @@ import (
|
|||
)
|
||||
|
||||
// Struct containing json data we actually need
|
||||
type threatcrowd_object struct {
|
||||
type threatcrowdObject struct {
|
||||
Subdomains []string `json:"subdomains"`
|
||||
}
|
||||
|
||||
// array of all results returned
|
||||
var threatcrowd_data threatcrowd_object
|
||||
var threatcrowdData threatcrowdObject
|
||||
|
||||
// all subdomains found
|
||||
var subdomains []string
|
||||
|
@ -44,7 +44,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Get the response body
|
||||
resp_body, err := ioutil.ReadAll(resp.Body)
|
||||
respBody, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\nthreatcrowd: %v\n", err)
|
||||
|
@ -53,7 +53,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Decode the json format
|
||||
err = json.Unmarshal([]byte(resp_body), &threatcrowd_data)
|
||||
err = json.Unmarshal([]byte(respBody), &threatcrowdData)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\nthreatcrowd: %v\n", err)
|
||||
|
@ -62,7 +62,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Append each subdomain found to subdomains array
|
||||
for _, subdomain := range threatcrowd_data.Subdomains {
|
||||
for _, subdomain := range threatcrowdData.Subdomains {
|
||||
|
||||
// Fix Wildcard subdomains containg asterisk before them
|
||||
if strings.Contains(subdomain, "*.") {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A ThreatMiner subdomain parser in golang
|
||||
// Package threatminer is a Threatminer subdomain parser in golang
|
||||
package threatminer
|
||||
|
||||
import (
|
||||
|
|
|
@ -5,11 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// NOTE : We are using Virustotal API here Since we wanted to eliminate the
|
||||
// rate limiting performed by Virustotal on scraping.
|
||||
// Direct queries and parsing can be also done :-)
|
||||
|
||||
// A Virustotal Client for Subdomain Enumeration
|
||||
// Package virustotal is a golang Client for Subdomain Enumeration
|
||||
package virustotal
|
||||
|
||||
import (
|
||||
|
@ -21,15 +17,15 @@ import (
|
|||
"github.com/subfinder/subfinder/libsubfinder/helper"
|
||||
)
|
||||
|
||||
type virustotalapi_object struct {
|
||||
type virustotalapiObject struct {
|
||||
Subdomains []string `json:"subdomains"`
|
||||
}
|
||||
|
||||
var virustotalapi_data virustotalapi_object
|
||||
var virustotalapiData virustotalapiObject
|
||||
|
||||
// Local function to query virustotal API
|
||||
// Requires an API key
|
||||
func queryVirustotalApi(domain string, state *helper.State) (subdomains []string, err error) {
|
||||
func queryVirustotalAPI(domain string, state *helper.State) (subdomains []string, err error) {
|
||||
|
||||
// Make a search for a domain name and get HTTP Response
|
||||
resp, err := helper.GetHTTPResponse("https://www.virustotal.com/vtapi/v2/domain/report?apikey="+state.ConfigState.VirustotalAPIKey+"&domain="+domain, state.Timeout)
|
||||
|
@ -38,19 +34,19 @@ func queryVirustotalApi(domain string, state *helper.State) (subdomains []string
|
|||
}
|
||||
|
||||
// Get the response body
|
||||
resp_body, err := ioutil.ReadAll(resp.Body)
|
||||
respBody, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return subdomains, err
|
||||
}
|
||||
|
||||
// Decode the json format
|
||||
err = json.Unmarshal([]byte(resp_body), &virustotalapi_data)
|
||||
err = json.Unmarshal([]byte(respBody), &virustotalapiData)
|
||||
if err != nil {
|
||||
return subdomains, err
|
||||
}
|
||||
|
||||
// Append each subdomain found to subdomains array
|
||||
for _, subdomain := range virustotalapi_data.Subdomains {
|
||||
for _, subdomain := range virustotalapiData.Subdomains {
|
||||
|
||||
// Fix Wildcard subdomains containg asterisk before them
|
||||
if strings.Contains(subdomain, "*.") {
|
||||
|
@ -84,7 +80,7 @@ func Query(args ...interface{}) interface{} {
|
|||
if state.ConfigState.VirustotalAPIKey != "" {
|
||||
|
||||
// Get subdomains via API
|
||||
subdomains, err := queryVirustotalApi(domain, state)
|
||||
subdomains, err := queryVirustotalAPI(domain, state)
|
||||
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A Golang based client for Parsing Subdomains from Waybackarchive
|
||||
// Package waybackarchive is a Golang based client for Parsing Subdomains from Waybackarchive
|
||||
package waybackarchive
|
||||
|
||||
import (
|
||||
|
@ -36,7 +36,7 @@ func Query(args ...interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Get the response body
|
||||
resp_body, err := ioutil.ReadAll(resp.Body)
|
||||
respBody, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\nwaybackarchive: %v\n", err)
|
||||
|
@ -47,7 +47,7 @@ func Query(args ...interface{}) interface{} {
|
|||
var urls [][]string
|
||||
|
||||
// Decode the json format
|
||||
err = json.Unmarshal([]byte(resp_body), &urls)
|
||||
err = json.Unmarshal([]byte(respBody), &urls)
|
||||
if err != nil {
|
||||
if !state.Silent {
|
||||
fmt.Printf("\nwaybackarchive: %v\n", err)
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// Copyrights (C) 2018 Ice3man
|
||||
//
|
||||
|
||||
// A golang client for Yahoo Subdomain Discovery
|
||||
// Package yahoo is a golang client for Yahoo Subdomain Discovery
|
||||
package yahoo
|
||||
|
||||
import (
|
||||
|
|
Loading…
Reference in New Issue