Merge pull request #186 from projectdiscovery/v2.0

New subfinder V2.0
master
Ice3man 2019-12-05 02:46:22 -08:00 committed by GitHub
commit 93f5b8d6b7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
97 changed files with 3230 additions and 5556 deletions

36
.gitignore vendored
View File

@ -1,34 +1,2 @@
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# JSON Configuration data
config.json
# Executable File
process
subfinder
main
# Test binary, build with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# VIM swap files
*.swp
*.swo
# VSCode
.vscode/
# Build Contents
*.zip
checksums.txt
build/
vendor/
.DS_Store
cmd/subfinder/subfinder

View File

@ -1,45 +0,0 @@
language: go
# Only the last two Go releases are supported by the Go team with security
# updates. Any versions older than that should be considered deprecated.
# Don't bother testing with them. tip builds your code with the latest
# development version of Go. This can warn you that your code will break
# in the next version of Go. Don't worry! Later we declare that test runs
# are allowed to fail on Go tip.
go:
- 1.10.2
- master
# Skip the install step. Don't `go get` dependencies. Only build with the
# code in vendor/
install: false
matrix:
# It's ok if our code fails on unstable development versions of Go.
allow_failures:
- go: master
# Don't wait for tip tests to finish. Mark the test run green if the
# tests pass on the stable versions of Go.
fast_finish: true
# Don't email me the results of the test runs.
notifications:
email: false
# Anything in before_script that returns a nonzero exit code will
# flunk the build and immediately stop. It's sorta like having
# set -e enabled in bash.
before_script:
- GO_FILES=$(find . -iname '*.go' -type f | grep -v /vendor/) # All the .go files, excluding vendor/
- go get golang.org/x/lint/golint # Linter
- go get honnef.co/go/tools/cmd/megacheck # Badass static analyzer/linter
# - go get github.com/fzipp/gocyclo
# script always run to completion (set +e). All of these code checks are must haves
# in a modern Go project.
script:
# - test -z $(gofmt -s -l $GO_FILES) # Fail if a .go file hasn't been formatted with gofmt
# - go test -v -race ./... # Run all the tests with the race detector enabled
- go vet ./... # go vet is the official Go static analyzer
# - megacheck ./... # "go vet on steroids" + linter
# - gocyclo -over 19 $GO_FILES # forbid code with huge functions

View File

@ -9,6 +9,6 @@ COPY Gopkg.toml Gopkg.lock ./
RUN dep ensure -vendor-only -v
# Install
RUN go get -u github.com/subfinder/subfinder
RUN go get -u github.com/projectdiscovery/subfinder
ENTRYPOINT ["subfinder"]

150
Gopkg.lock generated
View File

@ -1,150 +0,0 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
branch = "master"
digest = "1:fecb89fd185fd281d6fd0fcd698c8d6bfb628cbb355808901500b414232af60d"
name = "github.com/bogdanovich/dns_resolver"
packages = ["."]
pruneopts = "UT"
revision = "a8e42bc6a5b6c9a93be01ca204be7e17f7ba4cd2"
[[projects]]
digest = "1:1b46adc9e3d878cdf38a164cfdac2e19340f4d2662aa5bee88062f6ee08ac9df"
name = "github.com/miekg/dns"
packages = ["."]
pruneopts = "UT"
revision = "8fc2e5773bbd308ca2fcc962fd8d25c1bd0f6743"
version = "v1.1.4"
[[projects]]
digest = "1:d9bf2a9b88c0e37e5871a9bce498acd918ff7ff136fb0b2b9c943a3ffffc8103"
name = "github.com/subfinder/subfinder"
packages = [
"libsubfinder/engines/bruteforce",
"libsubfinder/engines/passive",
"libsubfinder/engines/resolver",
"libsubfinder/helper",
"libsubfinder/output",
"libsubfinder/sources/archiveis",
"libsubfinder/sources/ask",
"libsubfinder/sources/baidu",
"libsubfinder/sources/bing",
"libsubfinder/sources/censys",
"libsubfinder/sources/certdb",
"libsubfinder/sources/certificatetransparency",
"libsubfinder/sources/certspotter",
"libsubfinder/sources/commoncrawl",
"libsubfinder/sources/crtsh",
"libsubfinder/sources/dnsdb",
"libsubfinder/sources/dnsdumpster",
"libsubfinder/sources/dnstable",
"libsubfinder/sources/dogpile",
"libsubfinder/sources/exalead",
"libsubfinder/sources/findsubdomains",
"libsubfinder/sources/googleter",
"libsubfinder/sources/hackertarget",
"libsubfinder/sources/ipv4info",
"libsubfinder/sources/passivetotal",
"libsubfinder/sources/ptrarchive",
"libsubfinder/sources/riddler",
"libsubfinder/sources/securitytrails",
"libsubfinder/sources/shodan",
"libsubfinder/sources/sitedossier",
"libsubfinder/sources/threatcrowd",
"libsubfinder/sources/threatminer",
"libsubfinder/sources/virustotal",
"libsubfinder/sources/waybackarchive",
"libsubfinder/sources/yahoo",
"subf",
]
pruneopts = "UT"
revision = "b89ce975d1a578eddd14271542a5ed2296a06bb8"
version = "1.2"
[[projects]]
branch = "master"
digest = "1:a551fb43ccfdc565b3749b218904c728eab00c99a7417a0860b5e58fe5a9c9db"
name = "github.com/subfinder/urlx"
packages = ["."]
pruneopts = "UT"
revision = "8e731c8be06edbae81cab15937cd3c291c2a7680"
[[projects]]
branch = "master"
digest = "1:d5891c5bca9c62e5d394ca26491d2b710a1dc08cedeb0ca8f9ac4c3305120b02"
name = "golang.org/x/crypto"
packages = [
"ed25519",
"ed25519/internal/edwards25519",
]
pruneopts = "UT"
revision = "7f87c0fbb88b590338857bcb720678c2583d4dea"
[[projects]]
branch = "master"
digest = "1:19beed19e4246df7aff387a2bcd4519a386e3fe9637690031c4d4b0cf75f7215"
name = "golang.org/x/net"
packages = [
"bpf",
"internal/iana",
"internal/socket",
"ipv4",
"ipv6",
]
pruneopts = "UT"
revision = "fe579d43d83210096a79b46dcca0e3721058393a"
[[projects]]
branch = "master"
digest = "1:d0e9a312c4610a508569ab25e54d34600b1a96d19b1866ece104ffdf1c1b9d2c"
name = "golang.org/x/sys"
packages = ["unix"]
pruneopts = "UT"
revision = "cc5685c2db1239775905f3911f0067c0fa74762f"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
input-imports = [
"github.com/bogdanovich/dns_resolver",
"github.com/subfinder/subfinder/libsubfinder/engines/bruteforce",
"github.com/subfinder/subfinder/libsubfinder/engines/passive",
"github.com/subfinder/subfinder/libsubfinder/engines/resolver",
"github.com/subfinder/subfinder/libsubfinder/helper",
"github.com/subfinder/subfinder/libsubfinder/output",
"github.com/subfinder/subfinder/libsubfinder/sources/archiveis",
"github.com/subfinder/subfinder/libsubfinder/sources/ask",
"github.com/subfinder/subfinder/libsubfinder/sources/baidu",
"github.com/subfinder/subfinder/libsubfinder/sources/bing",
"github.com/subfinder/subfinder/libsubfinder/sources/censys",
"github.com/subfinder/subfinder/libsubfinder/sources/certdb",
"github.com/subfinder/subfinder/libsubfinder/sources/certificatetransparency",
"github.com/subfinder/subfinder/libsubfinder/sources/certspotter",
"github.com/subfinder/subfinder/libsubfinder/sources/commoncrawl",
"github.com/subfinder/subfinder/libsubfinder/sources/crtsh",
"github.com/subfinder/subfinder/libsubfinder/sources/dnsdb",
"github.com/subfinder/subfinder/libsubfinder/sources/dnsdumpster",
"github.com/subfinder/subfinder/libsubfinder/sources/dnstable",
"github.com/subfinder/subfinder/libsubfinder/sources/dogpile",
"github.com/subfinder/subfinder/libsubfinder/sources/exalead",
"github.com/subfinder/subfinder/libsubfinder/sources/findsubdomains",
"github.com/subfinder/subfinder/libsubfinder/sources/googleter",
"github.com/subfinder/subfinder/libsubfinder/sources/hackertarget",
"github.com/subfinder/subfinder/libsubfinder/sources/ipv4info",
"github.com/subfinder/subfinder/libsubfinder/sources/passivetotal",
"github.com/subfinder/subfinder/libsubfinder/sources/ptrarchive",
"github.com/subfinder/subfinder/libsubfinder/sources/riddler",
"github.com/subfinder/subfinder/libsubfinder/sources/securitytrails",
"github.com/subfinder/subfinder/libsubfinder/sources/shodan",
"github.com/subfinder/subfinder/libsubfinder/sources/sitedossier",
"github.com/subfinder/subfinder/libsubfinder/sources/threatcrowd",
"github.com/subfinder/subfinder/libsubfinder/sources/threatminer",
"github.com/subfinder/subfinder/libsubfinder/sources/virustotal",
"github.com/subfinder/subfinder/libsubfinder/sources/waybackarchive",
"github.com/subfinder/subfinder/libsubfinder/sources/yahoo",
"github.com/subfinder/subfinder/subf",
"github.com/subfinder/urlx",
]
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -1,42 +0,0 @@
# Gopkg.toml example
#
# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
#
# [prune]
# non-go = false
# go-tests = true
# unused-packages = true
[[constraint]]
branch = "master"
name = "github.com/bogdanovich/dns_resolver"
[[constraint]]
name = "github.com/subfinder/subfinder"
version = "1.2.0"
[[constraint]]
branch = "master"
name = "github.com/subfinder/urlx"
[prune]
go-tests = true
unused-packages = true

View File

@ -1,8 +1,7 @@
# SubFinder
[![License](https://img.shields.io/badge/license-MIT-_red.svg)](https://opensource.org/licenses/MIT)
[![Build Status](https://travis-ci.org/subfinder/subfinder.svg?branch=master)](https://travis-ci.org/subfinder/subfinder)
[![Go Report Card](https://goreportcard.com/badge/github.com/subfinder/subfinder)](https://goreportcard.com/report/github.com/subfinder/subfinder)
[![contributions welcome](https://img.shields.io/badge/contributions-welcome-brightgreen.svg?style=flat)](https://github.com/subfinder/subfinder/issues)
[![Go Report Card](https://goreportcard.com/badge/github.com/projectdiscovery/subfinder)](https://goreportcard.com/report/github.com/projectdiscovery/subfinder)
[![contributions welcome](https://img.shields.io/badge/contributions-welcome-brightgreen.svg?style=flat)](https://github.com/projectdiscovery/subfinder/issues)
SubFinder is a subdomain discovery tool that discovers valid subdomains for websites by using passive online sources. It has a simple modular architecture and has been aimed as a successor to sublist3r project. SubFinder uses Passive Sources, Search Engines, Pastebins, Internet Archives, etc to find subdomains and then it uses a permutation module inspired by altdns to generate permutations and resolve them quickly using a powerful bruteforcing engine. It can also perform plain bruteforce if needed. The tool is highly customizable, and the code is built with a modular approach in mind making it easy to add functionalities and remove errors.
@ -74,19 +73,19 @@ This will display help for the tool. Here are all the switches it supports.
The installation is easy. Git clone the repo and run go build.
```bash
go get github.com/subfinder/subfinder
go get github.com/projectdiscovery/subfinder
```
## Upgrading
If you wish to upgrade the package you can use:
```bash
go get -u github.com/subfinder/subfinder
go get -u github.com/projectdiscovery/subfinder
```
## Running in a Docker Container
Git clone the repo, then build and run subfinder in a container with the following commands
- Clone the repo using `git clone https://github.com/subfinder/subfinder.git`
- Clone the repo using `git clone https://github.com/projectdiscovery/subfinder.git`
- Build your docker container
```bash
docker build -t subfinder .

View File

@ -4,8 +4,8 @@ Many people have contributed to subfinder making it a wonderful tool either by m
donations to support the furthur development of this tool. Here, we recognize these persons and thank them.
### Donations
- @infosec-au - A really generous donation
- @infosec-au - Donating to the project
### Code
- @FranticFerret - For Working on initial docker support.
- @himanshudas - For Fixing DnsDB regex
- @himanshudas - For Fixing DnsDB regex

153
build.sh
View File

@ -1,153 +0,0 @@
#!/bin/bash
BUILD_FOLDER=build
VERSION="1.0"
bin_dep() {
BIN=$1
which $BIN > /dev/null || { echo "@ Dependency $BIN not found !"; exit 1; }
}
host_dep() {
HOST=$1
ping -c 1 $HOST > /dev/null || { echo "@ Virtual machine host $HOST not visible !"; exit 1; }
}
create_exe_archive() {
bin_dep 'zip'
OUTPUT=$1
echo "@ Creating archive $OUTPUT ..."
zip -j "$OUTPUT" subfinder.exe ../README.md ../LICENSE > /dev/null
rm -rf subfinder subfinder.exe
}
create_archive() {
bin_dep 'zip'
OUTPUT=$1
echo "@ Creating archive $OUTPUT ..."
zip -j "$OUTPUT" subfinder ../README.md ../LICENSE.md > /dev/null
rm -rf subfinder subfinder.exe
}
build_linux_amd64() {
echo "@ Building linux/amd64 ..."
go build -o subfinder ..
}
build_linux_arm7_static() {
OLD=$(pwd)
echo "@ Building linux/arm7 ..."
cd "$OLD"
env CC=arm-linux-gnueabi-gcc CGO_ENABLED=1 GOOS=linux GOARCH=arm GOARM=7 CGO_LDFLAGS="$CROSS_LIB" go build -o subfinder ..
}
build_linux_arm7hf_static() {
OLD=$(pwd)
echo "@ Building linux/arm7hf ..."
cd "$OLD"
env CC=arm-linux-gnueabihf-gcc CGO_ENABLED=1 GOOS=linux GOARCH=arm GOARM=7 CGO_LDFLAGS="$CROSS_LIB" go build -o subfinder ..
}
build_linux_mips_static() {
OLD=$(pwd)
echo "@ Building linux/mips ..."
cd "$OLD"
env CC=mips-linux-gnu-gcc CGO_ENABLED=1 GOOS=linux GOARCH=mips CGO_LDFLAGS="$CROSS_LIB" go build -o subfinder ..
}
build_linux_mipsle_static() {
OLD=$(pwd)
echo "@ Building linux/mipsle ..."
cd "$OLD"
env CC=mipsel-linux-gnu-gcc CGO_ENABLED=1 GOOS=linux GOARCH=mipsle CGO_LDFLAGS="$CROSS_LIB" go build -o subfinder ..
}
build_linux_mips64_static() {
OLD=$(pwd)
echo "@ Building linux/mips64 ..."
cd "$OLD"
env CC=mips64-linux-gnuabi64-gcc CGO_ENABLED=1 GOOS=linux GOARCH=mips64 CGO_LDFLAGS="$CROSS_LIB" go build -o subfinder ..
}
build_linux_mips64le_static() {
OLD=$(pwd)
echo "@ Building linux/mips64le ..."
cd "$OLD"
env CC=mips64el-linux-gnuabi64-gcc CGO_ENABLED=1 GOOS=linux GOARCH=mips64le CGO_LDFLAGS="$CROSS_LIB" go build -o subfinder ..
}
build_macos_amd64() {
host_dep 'osxvm'
DIR=/root/go/src/github.com/Ice3man543/subfinder
echo "@ Updating repo on MacOS VM ..."
ssh osxvm "cd $DIR && rm -rf '$OUTPUT' && git pull" > /dev/null
echo "@ Building darwin/amd64 ..."
ssh osxvm "export GOPATH=/Users/evilsocket/gocode && cd '$DIR' && PATH=$PATH:/usr/local/bin && go get ./... && go build -o subfinder ." > /dev/null
scp -C osxvm:$DIR/subfinder . > /dev/null
}
build_windows_amd64() {
host_dep 'winvm'
DIR=c:/Users/codingo/gopath/src/github.com/subfinder/subfinder
echo "@ Updating repo on Windows VM ..."
ssh winvm "cd $DIR && git pull && go get ./..." > /dev/null
echo "@ Building windows/amd64 ..."
ssh winvm "cd $DIR && go build -o subfinder.exe ." > /dev/null
scp -C winvm:$DIR/subfinder.exe . > /dev/null
}
build_android_arm() {
host_dep 'shield'
DIR=/data/data/com.termux/files/home/go/src/github.com/subfinder/subfinder
echo "@ Updating repo on Android host ..."
ssh -p 8022 root@shield "cd "$DIR" && rm -rf subfinder* && git pull && go get ./..."
echo "@ Building android/arm ..."
ssh -p 8022 root@shield "cd $DIR && go build -o subfinder ."
echo "@ Downloading subfinder ..."
scp -C -P 8022 root@shield:$DIR/subfinder .
}
rm -rf $BUILD_FOLDER
mkdir $BUILD_FOLDER
cd $BUILD_FOLDER
build_linux_amd64 && create_archive subfinder_linux_amd64_$VERSION.zip
#build_macos_amd64 && create_archive subfinder_macos_amd64_$VERSION.zip
#build_android_arm && create_archive subfinder_android_arm_$VERSION.zip
#build_windows_amd64 && create_exe_archive subfinder_windows_amd64_$VERSION.zip
build_linux_arm7_static && create_archive subfinder_linux_arm7_$VERSION.zip
# build_linux_arm7hf_static && create_archive subfinder_linux_arm7hf_$VERSION.zip
build_linux_mips_static && create_archive subfinder_linux_mips_$VERSION.zip
build_linux_mipsle_static && create_archive subfinder_linux_mipsle_$VERSION.zip
build_linux_mips64_static && create_archive subfinder_linux_mips64_$VERSION.zip
build_linux_mips64le_static && create_archive subfinder_linux_mips64le_$VERSION.zip
sha256sum * > checksums.txt
echo
echo
du -sh *
cd --

21
cmd/subfinder/main.go Normal file
View File

@ -0,0 +1,21 @@
package main
import (
"github.com/projectdiscovery/subfinder/pkg/log"
"github.com/projectdiscovery/subfinder/pkg/runner"
)
func main() {
// Parse the command line flags and read config files
options := runner.ParseOptions()
runner, err := runner.NewRunner(options)
if err != nil {
log.Fatalf("Could not create runner: %s\n", err)
}
err = runner.RunEnumeration()
if err != nil {
log.Fatalf("Could not run enumeration: %s\n", err)
}
}

View File

@ -1,11 +0,0 @@
{
"virustotalApikey": "",
"passivetotalUsername": "",
"passivetotalKey": "",
"securitytrailsKey": "",
"riddlerEmail": "",
"riddlerPassword": "",
"censysUsername": "",
"censysSecret": "",
"shodanApiKey": ""
}

View File

@ -1,76 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
// Package bruteforce is a fast bruteforce implementation in golang
package bruteforce
import (
"fmt"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
func consume(args ...interface{}) interface{} {
target := args[0].(string)
state := args[1].(*helper.State)
domain := args[2].(string)
host := fmt.Sprintf("%s.%s", target, domain)
ips, err := helper.ResolveHost(host)
if err != nil {
return ""
}
if len(ips) <= 0 {
// We didn't find any ips
return ""
}
if state.IsWildcard {
result := helper.CheckWildcard(state, ips)
if result {
// We have a wildcard ip
return ""
}
}
if !state.Silent {
if state.Verbose {
fmt.Printf("\n[%sBRUTE%s] %s : %s", helper.Info, helper.Reset, host, ips[0])
}
}
return ips[0]
}
// Brute handle a list of subdomains to resolve
func Brute(state *helper.State, list []string, domain string) (subdomains []helper.Domain) {
brutePool := helper.NewPool(state.Threads)
brutePool.Run()
// add jobs
for _, target := range list {
// Send the job to the channel
brutePool.Add(consume, target, state, domain)
}
brutePool.Wait()
var ValidSubdomains []helper.Domain
completedJobs := brutePool.Results()
for _, job := range completedJobs {
if job.Result != "" {
fqdn := job.Args[0].(string)
ip := job.Result.(string)
subdomain := helper.Domain{IP: ip, Fqdn: fmt.Sprintf("%s.%s", fqdn, domain)}
ValidSubdomains = append(ValidSubdomains, subdomain)
}
}
brutePool.Stop()
return ValidSubdomains
}

View File

@ -1,780 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
// Package passive is the main core of the program
package passive
import (
"bufio"
"fmt"
"os"
"reflect"
"sort"
"strings"
"github.com/bogdanovich/dns_resolver"
"github.com/subfinder/subfinder/libsubfinder/engines/bruteforce"
"github.com/subfinder/subfinder/libsubfinder/engines/resolver"
"github.com/subfinder/subfinder/libsubfinder/helper"
"github.com/subfinder/subfinder/libsubfinder/output"
// Load different Passive data sources
"github.com/subfinder/subfinder/libsubfinder/sources/archiveis"
"github.com/subfinder/subfinder/libsubfinder/sources/ask"
"github.com/subfinder/subfinder/libsubfinder/sources/baidu"
"github.com/subfinder/subfinder/libsubfinder/sources/bing"
"github.com/subfinder/subfinder/libsubfinder/sources/censys"
"github.com/subfinder/subfinder/libsubfinder/sources/certdb"
"github.com/subfinder/subfinder/libsubfinder/sources/certificatetransparency"
"github.com/subfinder/subfinder/libsubfinder/sources/certspotter"
"github.com/subfinder/subfinder/libsubfinder/sources/commoncrawl"
"github.com/subfinder/subfinder/libsubfinder/sources/crtsh"
"github.com/subfinder/subfinder/libsubfinder/sources/dnsdb"
"github.com/subfinder/subfinder/libsubfinder/sources/dnsdumpster"
"github.com/subfinder/subfinder/libsubfinder/sources/dnstable"
"github.com/subfinder/subfinder/libsubfinder/sources/dogpile"
"github.com/subfinder/subfinder/libsubfinder/sources/exalead"
"github.com/subfinder/subfinder/libsubfinder/sources/findsubdomains"
"github.com/subfinder/subfinder/libsubfinder/sources/googleter"
"github.com/subfinder/subfinder/libsubfinder/sources/hackertarget"
"github.com/subfinder/subfinder/libsubfinder/sources/ipv4info"
"github.com/subfinder/subfinder/libsubfinder/sources/passivetotal"
"github.com/subfinder/subfinder/libsubfinder/sources/ptrarchive"
"github.com/subfinder/subfinder/libsubfinder/sources/riddler"
"github.com/subfinder/subfinder/libsubfinder/sources/securitytrails"
"github.com/subfinder/subfinder/libsubfinder/sources/shodan"
"github.com/subfinder/subfinder/libsubfinder/sources/sitedossier"
"github.com/subfinder/subfinder/libsubfinder/sources/threatcrowd"
"github.com/subfinder/subfinder/libsubfinder/sources/threatminer"
"github.com/subfinder/subfinder/libsubfinder/sources/virustotal"
"github.com/subfinder/subfinder/libsubfinder/sources/waybackarchive"
"github.com/subfinder/subfinder/libsubfinder/sources/yahoo"
)
//DomainList contain the list of domains
var DomainList []string
// Source configuration structure specifying what should we use
// to do passive subdomain discovery.
type Source struct {
Ask bool
Archiveis bool
Baidu bool
Bing bool
Censys bool
Certdb bool
Commoncrawl bool
Crtsh bool
Certspotter bool
Dnsdb bool
Dnstable bool
Dnsdumpster bool
Findsubdomains bool
Googleter bool
Hackertarget bool
Passivetotal bool
Ptrarchive bool
Riddler bool
Securitytrails bool
Sitedossier bool
Threatcrowd bool
Threatminer bool
Virustotal bool
Waybackarchive bool
CertificateTransparency bool
Ipv4Info bool
Yahoo bool
Dogpile bool
Exalead bool
Shodan bool
}
func (s *Source) enableAll() {
s.Ask = true
s.Archiveis = true
s.Baidu = true
s.Bing = true
s.Censys = true
s.Certdb = true
s.Certspotter = true
s.Commoncrawl = true
s.Crtsh = true
s.Dnsdb = true
s.Dnsdumpster = true
s.Dnstable = true
s.Findsubdomains = true
s.Googleter = true
s.Hackertarget = true
s.Passivetotal = true
s.Ptrarchive = true
s.Riddler = true
s.Securitytrails = true
s.Sitedossier = true
s.Threatcrowd = true
s.Threatminer = true
s.Virustotal = true
s.Waybackarchive = true
s.CertificateTransparency = true
s.Ipv4Info = true
s.Yahoo = true
s.Dogpile = true
s.Exalead = true
s.Shodan = true
}
func (s *Source) enable(dataSources []string) {
for _, source := range dataSources {
switch source {
case "ask":
s.Ask = true
case "archiveis":
s.Archiveis = true
case "baidu":
s.Baidu = true
case "bing":
s.Bing = true
case "censys":
s.Censys = true
case "certdb":
s.Certdb = true
case "certspotter":
s.Certspotter = true
case "commoncrawl":
s.Commoncrawl = true
case "crtsh":
s.Crtsh = true
case "dnsdb":
s.Dnsdb = true
case "dnsdumpster":
s.Dnsdumpster = true
case "dnstable":
s.Dnstable = true
case "findsubdomains":
s.Findsubdomains = true
case "googleter":
s.Googleter = true
case "hackertarget":
s.Hackertarget = true
case "passivetotal":
s.Passivetotal = true
case "ptrarchive":
s.Ptrarchive = true
case "riddler":
s.Riddler = true
case "securitytrails":
s.Securitytrails = true
case "sitedossier":
s.Sitedossier = true
case "threatcrowd":
s.Threatcrowd = true
case "threatminer":
s.Threatminer = true
case "virustotal":
s.Virustotal = true
case "waybackarchive":
s.Waybackarchive = true
case "certificatetransparency":
s.CertificateTransparency = true
case "ipv4info":
s.Ipv4Info = true
case "yahoo":
s.Yahoo = true
case "dogpile":
s.Dogpile = true
case "exalead":
s.Exalead = true
case "shodan":
s.Shodan = true
}
}
}
func (s *Source) disable(dataSources []string) {
for _, source := range dataSources {
switch source {
case "ask":
s.Ask = false
case "archiveis":
s.Archiveis = false
case "baidu":
s.Baidu = false
case "bing":
s.Bing = false
case "censys":
s.Censys = false
case "certdb":
s.Certdb = false
case "certspotter":
s.Certspotter = false
case "commoncrawl":
s.Commoncrawl = false
case "crtsh":
s.Crtsh = false
case "dnsdb":
s.Dnsdb = false
case "dnsdumpster":
s.Dnsdumpster = false
case "dnstable":
s.Dnstable = false
case "findsubdomains":
s.Findsubdomains = false
case "googleter":
s.Googleter = false
case "hackertarget":
s.Hackertarget = false
case "passivetotal":
s.Passivetotal = false
case "ptrarchive":
s.Ptrarchive = false
case "riddler":
s.Riddler = false
case "securitytrails":
s.Securitytrails = false
case "sitedossier":
s.Sitedossier = false
case "threatcrowd":
s.Threatcrowd = false
case "threatminer":
s.Threatminer = false
case "virustotal":
s.Virustotal = false
case "waybackarchive":
s.Waybackarchive = false
case "certificatetransparency":
s.CertificateTransparency = false
case "ipv4info":
s.Ipv4Info = false
case "yahoo":
s.Yahoo = false
case "dogpile":
s.Dogpile = false
case "exalead":
s.Dogpile = false
case "shodan":
s.Shodan = false
case "all":
s.Ask = false
s.Archiveis = false
s.Baidu = false
s.Bing = false
s.Censys = false
s.Certdb = false
s.Certspotter = false
s.Commoncrawl = false
s.Crtsh = false
s.Dnsdb = false
s.Dnsdumpster = false
s.Dnstable = false
s.Findsubdomains = false
s.Googleter = false
s.Hackertarget = false
s.Passivetotal = false
s.Ptrarchive = false
s.Riddler = false
s.Securitytrails = false
s.Sitedossier = false
s.Threatcrowd = false
s.Threatminer = false
s.Virustotal = false
s.Waybackarchive = false
s.CertificateTransparency = false
s.Ipv4Info = false
s.Exalead = false
s.Yahoo = false
s.Dogpile = false
s.Dogpile = false
s.Shodan = false
}
}
}
func (s *Source) printSummary() {
if s.Ask {
fmt.Printf("\nRunning Source: %sAsk%s", helper.Info, helper.Reset)
}
if s.Archiveis {
fmt.Printf("\nRunning Source: %sArchive.is%s", helper.Info, helper.Reset)
}
if s.Baidu {
fmt.Printf("\nRunning Source: %sBaidu%s", helper.Info, helper.Reset)
}
if s.Bing {
fmt.Printf("\nRunning Source: %sBing%s", helper.Info, helper.Reset)
}
if s.Censys {
fmt.Printf("\nRunning Source: %sCensys%s", helper.Info, helper.Reset)
}
if s.Certdb {
fmt.Printf("\nRunning Source: %sCertDB%s", helper.Info, helper.Reset)
}
if s.CertificateTransparency {
fmt.Printf("\nRunning Source: %sCertificateTransparency%s", helper.Info, helper.Reset)
}
if s.Certspotter {
fmt.Printf("\nRunning Source: %sCertspotter%s", helper.Info, helper.Reset)
}
if s.Commoncrawl {
fmt.Printf("\nRunning Source: %sCommoncrawl%s", helper.Info, helper.Reset)
}
if s.Crtsh {
fmt.Printf("\nRunning Source: %sCrt.sh%s", helper.Info, helper.Reset)
}
if s.Dnsdb {
fmt.Printf("\nRunning Source: %sDnsdb%s", helper.Info, helper.Reset)
}
if s.Dnsdumpster {
fmt.Printf("\nRunning Source: %sDNSDumpster%s", helper.Info, helper.Reset)
}
if s.Dnstable {
fmt.Printf("\nRunning Source: %sDNSTable%s", helper.Info, helper.Reset)
}
if s.Dogpile {
fmt.Printf("\nRunning Source: %sDogpile%s", helper.Info, helper.Reset)
}
if s.Exalead {
fmt.Printf("\nRunning Source: %sExalead%s", helper.Info, helper.Reset)
}
if s.Findsubdomains {
fmt.Printf("\nRunning Source: %sFindsubdomains%s", helper.Info, helper.Reset)
}
if s.Googleter {
fmt.Printf("\nRunning Source: %sGoogleter%s", helper.Info, helper.Reset)
}
if s.Hackertarget {
fmt.Printf("\nRunning Source: %sHackertarget%s", helper.Info, helper.Reset)
}
if s.Ipv4Info {
fmt.Printf("\nRunning Source: %sIpv4Info%s", helper.Info, helper.Reset)
}
if s.Passivetotal {
fmt.Printf("\nRunning Source: %sPassiveTotal%s", helper.Info, helper.Reset)
}
if s.Ptrarchive {
fmt.Printf("\nRunning Source: %sPTRArchive%s", helper.Info, helper.Reset)
}
if s.Riddler {
fmt.Printf("\nRunning Source: %sRiddler%s", helper.Info, helper.Reset)
}
if s.Securitytrails {
fmt.Printf("\nRunning Source: %sSecuritytrails%s", helper.Info, helper.Reset)
}
if s.Shodan {
fmt.Printf("\nRunning Source: %sShodan%s", helper.Info, helper.Reset)
}
if s.Sitedossier {
fmt.Printf("\nRunning Source: %sSitedossier%s", helper.Info, helper.Reset)
}
if s.Threatcrowd {
fmt.Printf("\nRunning Source: %sThreatcrowd%s", helper.Info, helper.Reset)
}
if s.Threatminer {
fmt.Printf("\nRunning Source: %sThreatMiner%s", helper.Info, helper.Reset)
}
if s.Virustotal {
fmt.Printf("\nRunning Source: %sVirustotal%s", helper.Info, helper.Reset)
}
if s.Waybackarchive {
fmt.Printf("\nRunning Source: %sWaybackArchive%s", helper.Info, helper.Reset)
}
if s.Yahoo {
fmt.Printf("\nRunning Source: %sYahoo%s\n", helper.Info, helper.Reset)
}
}
func (s *Source) parseAPIKeys(state *helper.State) {
if state.ConfigState.CensysUsername == "" || state.ConfigState.CensysSecret == "" {
s.Censys = false
}
if state.ConfigState.PassivetotalUsername == "" || state.ConfigState.PassivetotalKey == "" {
s.Passivetotal = false
}
if state.ConfigState.RiddlerEmail == "" || state.ConfigState.RiddlerPassword == "" {
s.Riddler = false
}
if state.ConfigState.SecurityTrailsKey == "" {
s.Securitytrails = false
}
if state.ConfigState.ShodanAPIKey == "" {
s.Shodan = false
}
if state.ConfigState.VirustotalAPIKey == "" {
s.Virustotal = false
}
if state.ConfigState.VirustotalAPIKey == "" {
s.Virustotal = false
}
}
//nbrActive ses reflection to get automatic active amount of searches
func (s Source) nbrActive() int {
activeSearches := 0
values := reflect.ValueOf(s)
configNumbers := (reflect.TypeOf(s)).NumField()
for i := 0; i < configNumbers; i++ {
config := values.Field(i)
if config.Kind() == reflect.Bool && config.Bool() {
activeSearches++
}
}
return activeSearches
}
func discover(state *helper.State, domain string, sourceConfig *Source) (subdomains []string) {
var finalPassiveSubdomains []string
if strings.Contains(domain, "*.") {
domain = strings.Split(domain, "*.")[1]
}
// Now, perform checks for wildcard ip
// Must make a copy of state.LoadResolver to pass to dns_resolver.New because
// dns_resolver.New modifies the list of servers passed to it, which modifies
// state.LoadResolver. When using -dL the call to dns_resolver.New is made
// multiple times and each time state.LoadResolver is modified resulting in
// the following resolver lists:
// First call: [1.1.1.1:53 8.8.8.8:53 8.8.4.4:53].
// Second call: [[1.1.1.1:53]:53 [8.8.8.8:53]:53 [8.8.4.4:53]:53].
// Third call: [[[1.1.1.1:53]:53]:53 [[8.8.8.8:53]:53]:53 [[8.8.4.4:53]:53]:53]
var resolvers []string
resolvers = append(resolvers, state.LoadResolver...)
helper.Resolver = dns_resolver.New(resolvers)
// Initialize Wildcard Subdomains
state.IsWildcard, state.WildcardIP = helper.InitWildcard(domain)
if state.IsWildcard {
if !state.Silent {
fmt.Printf("\nFound Wildcard DNS at %s", domain)
for _, ip := range state.WildcardIP {
fmt.Printf("\n - %s", ip)
}
}
}
domainDiscoverPool := helper.NewPool(sourceConfig.nbrActive())
domainDiscoverPool.Run()
domainDiscoverPool.Wait()
if !state.Silent {
fmt.Printf("\nRunning enumeration on %s\n", domain)
}
// Create goroutines for added speed and receive data via channels
// Check if we the user has specified custom sources and if yes, run them
// via if statements.
if sourceConfig.Crtsh {
domainDiscoverPool.Add(crtsh.Query, domain, state)
}
if sourceConfig.Certdb {
domainDiscoverPool.Add(certdb.Query, domain, state)
}
if sourceConfig.Certspotter {
domainDiscoverPool.Add(certspotter.Query, domain, state)
}
if sourceConfig.Dnstable {
domainDiscoverPool.Add(dnstable.Query, domain, state)
}
if sourceConfig.Threatcrowd {
domainDiscoverPool.Add(threatcrowd.Query, domain, state)
}
if sourceConfig.Findsubdomains {
domainDiscoverPool.Add(findsubdomains.Query, domain, state)
}
if sourceConfig.Dnsdumpster {
domainDiscoverPool.Add(dnsdumpster.Query, domain, state)
}
if sourceConfig.Passivetotal {
domainDiscoverPool.Add(passivetotal.Query, domain, state)
}
if sourceConfig.Ptrarchive {
domainDiscoverPool.Add(ptrarchive.Query, domain, state)
}
if sourceConfig.Hackertarget {
domainDiscoverPool.Add(hackertarget.Query, domain, state)
}
if sourceConfig.Virustotal {
domainDiscoverPool.Add(virustotal.Query, domain, state)
}
if sourceConfig.Securitytrails {
domainDiscoverPool.Add(securitytrails.Query, domain, state)
}
if sourceConfig.Waybackarchive {
domainDiscoverPool.Add(waybackarchive.Query, domain, state)
}
if sourceConfig.Threatminer {
domainDiscoverPool.Add(threatminer.Query, domain, state)
}
if sourceConfig.Riddler {
domainDiscoverPool.Add(riddler.Query, domain, state)
}
if sourceConfig.Censys {
domainDiscoverPool.Add(censys.Query, domain, state)
}
if sourceConfig.Dnsdb {
domainDiscoverPool.Add(dnsdb.Query, domain, state)
}
if sourceConfig.Baidu {
domainDiscoverPool.Add(baidu.Query, domain, state)
}
if sourceConfig.Bing {
domainDiscoverPool.Add(bing.Query, domain, state)
}
if sourceConfig.Ask {
domainDiscoverPool.Add(ask.Query, domain, state)
}
if sourceConfig.CertificateTransparency {
domainDiscoverPool.Add(certificatetransparency.Query, domain, state)
}
if sourceConfig.Ipv4Info {
domainDiscoverPool.Add(ipv4info.Query, domain, state)
}
if sourceConfig.Archiveis {
domainDiscoverPool.Add(archiveis.Query, domain, state)
}
if sourceConfig.Sitedossier {
domainDiscoverPool.Add(sitedossier.Query, domain, state)
}
if sourceConfig.Yahoo {
domainDiscoverPool.Add(yahoo.Query, domain, state)
}
if sourceConfig.Dogpile {
domainDiscoverPool.Add(dogpile.Query, domain, state)
}
if sourceConfig.Exalead {
domainDiscoverPool.Add(exalead.Query, domain, state)
}
if sourceConfig.Shodan {
domainDiscoverPool.Add(shodan.Query, domain, state)
}
if sourceConfig.Googleter {
domainDiscoverPool.Add(googleter.Query, domain, state)
}
if sourceConfig.Commoncrawl {
domainDiscoverPool.Add(commoncrawl.Query, domain, state)
}
domainDiscoverPool.Wait()
completedJobs := domainDiscoverPool.Results()
for _, job := range completedJobs {
if job.Err != nil {
// an error occurred
if !state.Silent {
fmt.Printf("\nerror: %v\n", job.Err)
}
}
results := job.Result.([]string)
finalPassiveSubdomains = append(finalPassiveSubdomains, results...)
}
domainDiscoverPool.Stop()
// Now remove duplicate items from the slice
uniquePassiveSubdomains := helper.Unique(finalPassiveSubdomains)
// Now, validate all subdomains found
validPassiveSubdomains := helper.Validate(domain, uniquePassiveSubdomains)
var words []string
var BruteforceSubdomainList []string
// Start the bruteforcing workflow if the user has asked for it
if state.Bruteforce && state.Wordlist != "" {
file, err := os.Open(state.Wordlist)
if err != nil {
fmt.Fprintf(os.Stderr, "\nerror: %v\n", err)
os.Exit(1)
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
// Send the job to the channel
words = append(words, scanner.Text())
}
if !state.Silent {
fmt.Printf("\n\nStarting Bruteforcing of %s%s%s with %s%d%s words", helper.Info, domain, helper.Reset, helper.Info, len(words), helper.Reset)
}
BruteforceSubdomainsArray := bruteforce.Brute(state, words, domain)
for _, subdomain := range BruteforceSubdomainsArray {
BruteforceSubdomainList = append(BruteforceSubdomainList, subdomain.Fqdn)
}
}
// Append bruteforced subdomains to validPassiveSubdomains
validPassiveSubdomains = append(validPassiveSubdomains, BruteforceSubdomainList...)
var PassiveSubdomains []string
var passiveSubdomainsArray []helper.Domain
if state.Alive || state.AquatoneJSON {
// Nove remove all wildcard subdomains
if !state.Silent {
fmt.Printf("\n\nResolving %s%d%s Unique Hosts found", helper.Info, len(validPassiveSubdomains), helper.Reset)
}
passiveSubdomainsArray = resolver.Resolve(state, validPassiveSubdomains)
for _, subdomain := range passiveSubdomainsArray {
PassiveSubdomains = append(PassiveSubdomains, subdomain.Fqdn)
}
} else {
PassiveSubdomains = validPassiveSubdomains
}
if state.AquatoneJSON {
if !state.Silent {
fmt.Printf("\n\nWriting Resolved Enumeration Output To %s", state.Output)
}
output.WriteOutputAquatoneJSON(state, passiveSubdomainsArray)
}
// Sort the subdomains found alphabetically
sort.Strings(PassiveSubdomains)
if !state.Silent {
fmt.Printf("\n\nTotal %s%d%s Unique subdomains found for %s\n\n", helper.Info, len(PassiveSubdomains), helper.Reset, domain)
}
if state.Alive || state.AquatoneJSON {
for _, subdomain := range passiveSubdomainsArray {
if !state.Silent {
fmt.Printf("\n%s\t\t%s", subdomain.IP, subdomain.Fqdn)
} else {
fmt.Printf("\n%s", subdomain.Fqdn)
}
}
} else {
for _, subdomain := range PassiveSubdomains {
fmt.Println(subdomain)
}
}
return PassiveSubdomains
}
//Enumerate executes passive analysis
func Enumerate(state *helper.State) []string {
sourceConfig := new(Source)
fmt.Printf("\n")
if state.Sources == "all" {
sourceConfig.enableAll()
} else {
// Check data sources and create a source configuration structure
dataSources := strings.Split(state.Sources, ",")
sourceConfig.enable(dataSources)
}
if state.ExcludeSource != "" {
dataSources := strings.Split(state.ExcludeSource, ",")
sourceConfig.disable(dataSources)
}
// Do not perform passive enumeration
if state.NoPassive {
sourceConfig.disable([]string{"all"})
}
// Remove sources having no API keys present for them
sourceConfig.parseAPIKeys(state)
if !state.Silent {
sourceConfig.printSummary()
}
if state.DomainList != "" {
// Open the wordlist file
wordfile, err := os.Open(state.DomainList)
if err != nil {
return nil
}
scanner := bufio.NewScanner(wordfile)
for scanner.Scan() {
DomainList = append(DomainList, scanner.Text())
}
} else {
DomainList = append(DomainList, state.Domain)
}
passivePool := helper.NewPool(state.Threads)
passivePool.Run()
// add jobs
for _, domain := range DomainList {
passivePool.Add(analyzeDomain, domain, state, sourceConfig, passivePool)
}
passivePool.Wait()
var allSubdomains []string
completedJobs := passivePool.Results()
for _, job := range completedJobs {
if job.Result == nil {
continue
}
results := job.Result.([]string)
if state.Output != "" && state.IsJSON {
err := output.WriteOutputJSON(state, results)
if err != nil && state.Silent {
fmt.Printf("\n%s-> %v%s\n", helper.Bad, err, helper.Reset)
}
}
allSubdomains = append(allSubdomains, results...)
}
passivePool.Stop()
// Write the output to individual files in a directory
// TODO: group results by domain and write to directory
// if state.OutputDir != "" {
// output.WriteOutputToDir(state, allSubdomains, Domain)
// }
return allSubdomains
}
func analyzeDomain(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
sourceConfig := args[2].(*Source)
passivePool := args[3].(*helper.Pool)
foundSubdomains := discover(state, domain, sourceConfig)
if state.Output != "" {
if !state.IsJSON {
if !state.AquatoneJSON {
err := output.WriteOutputTextArray(state, foundSubdomains)
if err != nil {
if state.Silent {
fmt.Printf("\n%s-> %v%s\n", helper.Bad, err, helper.Reset)
}
}
}
}
}
if state.Recursive {
for _, subdomain := range foundSubdomains {
// Results will be written in next recursive iteration
passivePool.Add(analyzeDomain, subdomain, state, sourceConfig, passivePool)
}
}
return foundSubdomains
}

View File

@ -1,80 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
// Package resolver is a fast dns resolver
package resolver
import (
"fmt"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
func consume(args ...interface{}) interface{} {
target := args[0].(string)
state := args[1].(*helper.State)
ips, err := helper.ResolveHost(target)
if err != nil {
return ""
}
if len(ips) <= 0 {
// We didn't found any ips
return ""
}
if state.IsWildcard {
result := helper.CheckWildcard(state, ips)
if result {
// We have a wildcard ip
return ""
}
}
if !state.Silent {
if state.Verbose {
fmt.Printf("\n[%sRESOLVED%s] %s : %s", helper.Info, helper.Reset, target, ips[0])
}
}
return ips[0]
}
// Resolve handle a list of subdomains to resolve
func Resolve(state *helper.State, list []string) (subdomains []helper.Domain) {
resolverPool := helper.NewPool(state.Threads)
resolverPool.Run()
// add jobs
for _, target := range list {
// Send the job to the channel
resolverPool.Add(consume, target, state)
}
resolverPool.Wait()
var ValidSubdomains []helper.Domain
completedJobs := resolverPool.Results()
for _, job := range completedJobs {
if job.Result != "" {
fqdn := job.Args[0].(string)
ip := job.Result.(string)
subdomain := helper.Domain{IP: ip, Fqdn: fqdn}
if !state.Silent {
if state.Verbose {
fmt.Printf("\n[%sRESOLVED%s] %s : %s", helper.Info, helper.Reset, subdomain.Fqdn, subdomain.IP)
}
}
ValidSubdomains = append(ValidSubdomains, subdomain)
}
}
resolverPool.Stop()
return ValidSubdomains
}

View File

@ -1,28 +0,0 @@
//
// Contains color constants for printing
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
package helper
// Usage: fmt.Printf("[%sCRTSH%s] %s", r, rs, subdomain)
// Different Colours for use
var (
Bold = "\033[1m"
Underline = "\033[4m"
Red = "\033[31;1;4m"
Cyan = "\033[36;6;2m"
Green = "\033[32;6;3m"
Yellow = "\033[0;33m"
Reset = "\033[0m"
Info = "\033[33;1;1m"
Que = "\033[34;1;1m"
Bad = "\033[31;1;1m"
Good = "\033[32;1;1m"
Run = "\033[97;1;1m"
)

View File

@ -1,89 +0,0 @@
//
// Contains helper functions for dealing with configuration files
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
package helper
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"os/user"
)
// GetHomeDir gets current user directory
func GetHomeDir() string {
usr, err := user.Current()
if err != nil {
fmt.Printf("\n\n[!] Error : %v\n", err)
os.Exit(1)
}
return usr.HomeDir
}
// Exists returns whether the given file or directory exists or not
func Exists(path string) bool {
_, err := os.Stat(path)
if err == nil {
return true
}
if os.IsNotExist(err) {
return false
}
return true
}
// CreateDirIfNotExist creates config directory if it does not exists
func CreateDirIfNotExist(dir string) {
if _, err := os.Stat(dir); os.IsNotExist(err) {
err = os.MkdirAll(dir, 0755)
if err != nil {
fmt.Printf("\n\n[!] Error : %v\n", err)
os.Exit(1)
}
}
}
// ReadConfigFile Reads a config file from disk and returns Configuration structure
// If not exists, create one and then return
func ReadConfigFile() (configuration *Config, err error) {
var config Config
// Get current path
home := GetHomeDir()
path := home + "/.config/subfinder/config.json"
if Exists(path) {
raw, err := ioutil.ReadFile(path)
if err != nil {
return &config, err
}
err = json.Unmarshal(raw, &config)
if err != nil {
return &config, err
}
return &config, nil
}
CreateDirIfNotExist(home + "/.config/subfinder/")
configJSON, _ := json.MarshalIndent(config, "", " ")
err = ioutil.WriteFile(path, configJSON, 0644)
if err != nil {
fmt.Printf("\n\n[!] Error : %v\n", err)
os.Exit(1)
}
fmt.Printf("\n[NOTE] Edit %s with your options !", path)
return &config, nil
}

View File

@ -1,83 +0,0 @@
//
// dns.go : DNS helper functions for subfinder
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
package helper
import (
"fmt"
"os"
"github.com/bogdanovich/dns_resolver"
)
// Resolver is a global dns_resolver object
var Resolver *dns_resolver.DnsResolver
// ResolveHost resolves a host using dns_resolver lib
func ResolveHost(host string) (ips []string, err error) {
// In case of i/o timeout
Resolver.RetryTimes = 5
ip, err := Resolver.LookupHost(host)
if err != nil {
return []string{}, err
}
var retIPs []string
for _, host := range ip {
retIPs = append(retIPs, host.String())
}
return retIPs, nil
}
// CheckWildcard checks if a ip result contains wildcards
func CheckWildcard(state *State, ips []string) (result bool) {
for _, ip := range ips {
for _, wildcardIP := range state.WildcardIP {
if ip == wildcardIP {
return true
}
}
}
// Not wildcard
return false
}
// InitWildcard checks if a host returns wildcard ips and returns status with ips returned
func InitWildcard(domain string) (result bool, ips []string) {
UUIDs := make([]string, 4)
// Generate 4 random UUIDs
for i := 0; i < 4; i++ {
uuid, err := NewUUID()
if err != nil {
fmt.Printf("\nerror: %v\n", err)
os.Exit(1)
}
UUIDs[i] = uuid
}
for _, uid := range UUIDs {
attempt := fmt.Sprintf("%s.%s", uid, domain)
// Currently we check only A records. GoBuster also does that
// I don't think checking both A and CNAME checking is necessary
ips, err := ResolveHost(attempt)
if err != nil {
continue
}
if len(ips) > 0 {
return true, ips
}
}
return false, ips
}

View File

@ -1,96 +0,0 @@
//
// misc.go : contains misc helper function
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
package helper
import (
"crypto/rand"
"fmt"
"io"
"strings"
"github.com/subfinder/urlx"
)
// Result is the Current result structure
type Result struct {
Subdomains []string // Subdomains found
Error error // Any error that has occurred
}
// Domain structure
type Domain struct {
IP string
Fqdn string
}
// NewUUID generates a random UUID according to RFC 4122
// Taken from : https://play.golang.org/p/4FkNSiUDMg
//
// Used for bruteforcing and detection of Wildcard Subdomains :-)
func NewUUID() (string, error) {
uuid := make([]byte, 16)
n, err := io.ReadFull(rand.Reader, uuid)
if n != len(uuid) || err != nil {
return "", err
}
// variant bits; see section 4.1.1
uuid[8] = uuid[8]&^0xc0 | 0x80
// version 4 (pseudo-random); see section 4.1.3
uuid[6] = uuid[6]&^0xf0 | 0x40
return fmt.Sprintf("%x-%x-%x-%x-%x", uuid[0:4], uuid[4:6], uuid[6:8], uuid[8:10], uuid[10:]), nil
}
// Unique Returns unique items in a slice
// Adapted from http://www.golangprograms.com/remove-duplicate-values-from-slice.html
func Unique(elements []string) []string {
// Use map to record duplicates as we find them.
encountered := map[string]bool{}
result := []string{}
for v := range elements {
if encountered[elements[v]] {
// Do not add duplicate.
} else {
// Record this element as an encountered element.
encountered[elements[v]] = true
// Append to result slice.
result = append(result, elements[v])
}
}
// Return the new slice.
return result
}
// SubdomainExists checks if a key exists in an array
func SubdomainExists(key string, values []string) bool {
for _, data := range values {
if key == data {
return true
}
}
return false
}
// ExtractSubdomains extracts a subdomain from a big blob of text
func ExtractSubdomains(text, domain string) (urls []string) {
allUrls := urlx.ExtractSubdomains(text, domain)
return Validate(domain, allUrls)
}
//Validate returns valid subdomains found ending with target domain
func Validate(domain string, strslice []string) (subdomains []string) {
for _, entry := range strslice {
if strings.HasSuffix(entry, "."+domain) {
subdomains = append(subdomains, entry)
}
}
return subdomains
}

View File

@ -1,287 +0,0 @@
//
// pool.go : contains pool helper function
// heavily based on: github.com/stefantalpalaru/pool
//
package helper
import (
"container/list"
"fmt"
"log"
"sync"
"time"
)
// Job holds all the data related to a worker's instance.
type Job struct {
F func(...interface{}) interface{}
Args []interface{}
Result interface{}
Err error
added chan bool // used by Pool.Add to wait for the supervisor
}
// Stats is a structure holding statistical data about the pool.
type Stats struct {
Submitted int
Running int
Completed int
}
// Pool is the main data structure.
type Pool struct {
workersStarted bool
supervisorStarted bool
numWorkers int
jobWantedPipe chan chan *Job
donePipe chan *Job
addPipe chan *Job
resultWantedPipe chan chan *Job
jobsReadyToRun *list.List
numJobsSubmitted int
numJobsRunning int
numJobsCompleted int
jobsCompleted *list.List
interval time.Duration // for sleeping, in ms
workingWantedPipe chan chan bool
statsWantedPipe chan chan Stats
workerKillPipe chan bool
supervisorKillPipe chan bool
workerWg sync.WaitGroup
supervisorWg sync.WaitGroup
}
// subworker catches any panic while running the job.
func (pool *Pool) subworker(job *Job) {
defer func() {
if err := recover(); err != nil {
log.Println("panic while running job:", err)
job.Result = nil
job.Err = fmt.Errorf(err.(string))
}
}()
job.Result = job.F(job.Args...)
}
// worker gets a job from the job_pipe, passes it to a
// subworker and puts the job in the done_pipe when finished.
func (pool *Pool) worker(num int) {
jobPipe := make(chan *Job)
WORKER_LOOP:
for {
pool.jobWantedPipe <- jobPipe
job := <-jobPipe
if job == nil {
time.Sleep(pool.interval * time.Millisecond)
} else {
pool.subworker(job)
pool.donePipe <- job
}
select {
case <-pool.workerKillPipe:
break WORKER_LOOP
default:
}
}
pool.workerWg.Done()
}
// NewPool creates a new Pool
func NewPool(workers int) (pool *Pool) {
pool = new(Pool)
pool.numWorkers = workers
pool.jobWantedPipe = make(chan chan *Job)
pool.donePipe = make(chan *Job)
pool.addPipe = make(chan *Job)
pool.resultWantedPipe = make(chan chan *Job)
pool.jobsReadyToRun = list.New()
pool.jobsCompleted = list.New()
pool.workingWantedPipe = make(chan chan bool)
pool.statsWantedPipe = make(chan chan Stats)
pool.workerKillPipe = make(chan bool)
pool.supervisorKillPipe = make(chan bool)
pool.interval = 1
// start the supervisor here so we can accept jobs before a Run call
pool.startSupervisor()
return
}
// supervisor feeds jobs to workers and keeps track of them.
func (pool *Pool) supervisor() {
SUPERVISOR_LOOP:
for {
select {
// new job
case job := <-pool.addPipe:
pool.jobsReadyToRun.PushBack(job)
pool.numJobsSubmitted++
job.added <- true
// send jobs to the workers
case jobPipe := <-pool.jobWantedPipe:
element := pool.jobsReadyToRun.Front()
var job *Job
if element != nil {
job = element.Value.(*Job)
pool.numJobsRunning++
pool.jobsReadyToRun.Remove(element)
}
jobPipe <- job
// job completed
case job := <-pool.donePipe:
pool.numJobsRunning--
pool.jobsCompleted.PushBack(job)
pool.numJobsCompleted++
// wait for job
case resultPipe := <-pool.resultWantedPipe:
closePipe := false
job := (*Job)(nil)
element := pool.jobsCompleted.Front()
if element != nil {
job = element.Value.(*Job)
pool.jobsCompleted.Remove(element)
} else {
if pool.numJobsRunning == 0 && pool.numJobsCompleted == pool.numJobsSubmitted {
closePipe = true
}
}
if closePipe {
close(resultPipe)
} else {
resultPipe <- job
}
// is the pool working or just lazing on a Sunday afternoon?
case workingPipe := <-pool.workingWantedPipe:
working := true
if pool.jobsReadyToRun.Len() == 0 && pool.numJobsRunning == 0 {
working = false
}
workingPipe <- working
// stats
case statsPipe := <-pool.statsWantedPipe:
poolStats := Stats{pool.numJobsSubmitted, pool.numJobsRunning, pool.numJobsCompleted}
statsPipe <- poolStats
// stopping
case <-pool.supervisorKillPipe:
break SUPERVISOR_LOOP
}
}
pool.supervisorWg.Done()
}
// Run starts the Pool by launching the workers.
// It's OK to start an empty Pool. The jobs will be fed to the workers as soon
// as they become available.
func (pool *Pool) Run() {
if pool.workersStarted {
panic("trying to start a pool that's already running")
}
for i := 0; i < pool.numWorkers; i++ {
pool.workerWg.Add(1)
go pool.worker(i)
}
pool.workersStarted = true
// handle the supervisor
if !pool.supervisorStarted {
pool.startSupervisor()
}
}
// Stop will signal the workers to exit and wait for them to actually do that.
// It also releases any other resources (e.g.: it stops the supervisor goroutine)
// so call this method when you're done with the Pool instance to allow the GC
// to do its job.
func (pool *Pool) Stop() {
if !pool.workersStarted {
panic("trying to stop a pool that's already stopped")
}
// stop the workers
for i := 0; i < pool.numWorkers; i++ {
pool.workerKillPipe <- true
}
pool.workerWg.Wait()
// set the flag
pool.workersStarted = false
// handle the supervisor
if pool.supervisorStarted {
pool.stopSupervisor()
}
}
func (pool *Pool) startSupervisor() {
pool.supervisorWg.Add(1)
go pool.supervisor()
pool.supervisorStarted = true
}
func (pool *Pool) stopSupervisor() {
pool.supervisorKillPipe <- true
pool.supervisorWg.Wait()
pool.supervisorStarted = false
}
// Add creates a Job from the given function and args and
// adds it to the Pool.
func (pool *Pool) Add(f func(...interface{}) interface{}, args ...interface{}) {
job := &Job{f, args, nil, nil, make(chan bool)}
pool.addPipe <- job
<-job.added
}
// Wait blocks until all the jobs in the Pool are done.
func (pool *Pool) Wait() {
workingPipe := make(chan bool)
for {
pool.workingWantedPipe <- workingPipe
if !<-workingPipe {
break
}
time.Sleep(pool.interval * time.Millisecond)
}
}
// Results retrieves the completed jobs.
func (pool *Pool) Results() (res []*Job) {
res = make([]*Job, pool.jobsCompleted.Len())
i := 0
for e := pool.jobsCompleted.Front(); e != nil; e = e.Next() {
res[i] = e.Value.(*Job)
i++
}
pool.jobsCompleted = list.New()
return
}
// WaitForJob blocks until a completed job is available and returns it.
// If there are no jobs running, it returns nil.
func (pool *Pool) WaitForJob() *Job {
resultPipe := make(chan *Job)
var job *Job
var ok bool
for {
pool.resultWantedPipe <- resultPipe
job, ok = <-resultPipe
if !ok {
// no more results available
return nil
}
if job == (*Job)(nil) {
// no result available right now but there are jobs running
time.Sleep(pool.interval * time.Millisecond)
} else {
break
}
}
return job
}
// Status returns a "stats" instance.
func (pool *Pool) Status() Stats {
statsPipe := make(chan Stats)
if pool.supervisorStarted {
pool.statsWantedPipe <- statsPipe
return <-statsPipe
}
// the supervisor wasn't started so we return a zeroed structure
return Stats{}
}

View File

@ -1,89 +0,0 @@
//
// helper.go : Main sources driver. Contains helper functions for other sources.
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
package helper
import (
"crypto/tls"
"net/http"
"net/http/cookiejar"
"net/url"
"time"
)
// GetHTTPResponse : Returns a HTTP Response object
// It needs URL To Visit. Note, It needs full url with scheme and a timeout value.
// It returns a HTTP Response object
func GetHTTPResponse(url string, timeout int) (resp *http.Response, err error) {
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
client := &http.Client{
Transport: tr,
Timeout: time.Duration(timeout) * time.Second,
}
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return resp, err
}
req.Header.Set("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1")
req.Header.Add("Connection", "close")
resp, err = client.Do(req)
if err != nil {
return resp, err
}
return resp, nil
}
// GetHTTPCookieResponse returns a HTTP Response object
// It needs URL To Visit and a cookie array to send with request.
// Note, It needs full url with scheme and a timeout value.
// It returns a HTTP Response object with a cookie array.
func GetHTTPCookieResponse(urls string, cookies []*http.Cookie, timeout int) (resp *http.Response, cookie []*http.Cookie, err error) {
var curCookieJar *cookiejar.Jar
curCookieJar, _ = cookiejar.New(nil)
// Add the cookies received via request params
u, _ := url.Parse(urls)
curCookieJar.SetCookies(u, cookies)
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
client := &http.Client{
Transport: tr,
Jar: curCookieJar,
Timeout: time.Duration(timeout) * time.Second,
}
req, err := http.NewRequest("GET", urls, nil)
if err != nil {
return resp, cookie, err
}
req.Header.Set("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1")
req.Header.Add("Connection", "close")
resp, err = client.Do(req)
if err != nil {
return resp, cookie, err
}
cookie = curCookieJar.Cookies(req.URL)
return resp, cookie, nil
}

View File

@ -1,105 +0,0 @@
//
// state.go : Contains current program state
// Written By : @codingo (Michael)
// @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
package helper
import (
"os"
)
// State holds the State read in from the CLI
type State struct {
Color bool // Whether to use color or not
Threads int // Number of threads to use
Timeout int // Timeout for requests to different passive sources
Verbose bool // Show verbose information
Domain string // Domain name to find subdomains for
Recursive bool // Whether perform recursive subdomain discovery or not
Output string // Name of output file
Alive bool // Get only alive subdomains (x - no wildcards :-))
IsJSON bool // Provide JSON output file
Wordlist string // Wordlist file for subdomains bruteforcing
Bruteforce bool // Flag to decide whether to bruteforce or not
WildcardIP []string // Wildcard IP Structure
IsWildcard bool // Does the host has wildcard subdomains, if yes parse them carefully
Sources string // Comma separated list of sources to use
Silent bool // Show only silent output or not
FinalResults []string // Contains final bruteforcing results
SetConfig string // Used for changing the current configuration file details
SetSetting string // Used for passing custom configuration to the application
DomainList string // List of domains to find subdomains for
OutputDir string // Directory to output results to if domain list is used
LoadResolver []string // Slice of resolvers to use
ComResolver string // Comma-separated list of resolvers to use
ListResolver string // File to load resolvers from
AquatoneJSON bool // Use aquatone style json format
ExcludeSource string // Sources to exclude
NoPassive bool // Do not perform passive enumeration
OutputHandle *os.File // Handle to the output file used for output buffering
CurrentSettings Setting // Current application settings
ConfigState Config // Current configuration file state
}
// Config contains api keys for different sources
type Config struct {
VirustotalAPIKey string `json:"virustotalApikey"` // Virustotal API Key
PassivetotalUsername string `json:"passivetotalUsername"` // PassiveTotal Username (Email Address)
PassivetotalKey string `json:"passivetotalKey"` // PassiveTotal api key
SecurityTrailsKey string `json:"securitytrailsKey"` // SecurityTrails api key
RiddlerEmail string `json:"riddlerEmail"` // Riddler Email
RiddlerPassword string `json:"riddlerPassword"` // Riddler Password
CensysUsername string `json:"censysUsername"` // Censys Username
CensysSecret string `json:"censysSecret"` // Censys API Key
ShodanAPIKey string `json:"shodanApiKey"` // Shodan API Key
}
// Setting contains settings for sources
type Setting struct {
CensysPages string // Censys pages to check. For All, use "all"
AskPages string // Ask search pages to check
BaiduPages string // Ask search pages to check
BingPages string // Ask search pages to check
DogpilePages string // Dogpile search pages to check
YahooPages string // Yahoo search pages to check
ShodanPages string // Shodan search pages to check
GoogleterPages string // Googleter
}
// InitializeSettings sets default settings value
func InitializeSettings() (setting *Setting) {
var settings Setting
settings.CensysPages = "10" // Default is 10 pages. Strikes a fine balance
settings.AskPages = "15"
settings.BaiduPages = "5"
settings.BingPages = "50"
settings.DogpilePages = "16"
settings.YahooPages = "10"
settings.ShodanPages = "10"
settings.GoogleterPages = "30"
return &settings
}
// InitState initializes the default state
func InitState() (state *State) {
// Read the configuration file and ignore errors
config, _ := ReadConfigFile()
setting := InitializeSettings()
return &State{true, 10, 180, false, "", false, "", false, false, "", false, []string{}, true, "", false, []string{}, "", "", "", "", []string{}, "", "", false, "", false, nil, *setting, *config}
}

View File

@ -1,136 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package output Contains different functions for reporting
package output
import (
"encoding/json"
"io"
"io/ioutil"
"os"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// WriteOutputText writes a single subdomain output to a normal text file
func WriteOutputText(state *helper.State, subdomain string) error {
_, err := state.OutputHandle.WriteString(subdomain + "\n")
if err != nil {
return err
}
return nil
}
// WriteOutputTextArray writes a list of subdomains output to a normal text file
func WriteOutputTextArray(state *helper.State, subdomains []string) error {
for _, subdomain := range subdomains {
_, err := state.OutputHandle.WriteString(subdomain + "\n")
if err != nil {
return err
}
}
return nil
}
// WriteOutputJSON writes subdomain output to a json file
func WriteOutputJSON(state *helper.State, subdomains []string) error {
_, err := os.Create(state.Output)
if err != nil {
return err
}
data, err := json.MarshalIndent(subdomains, "", " ")
if err != nil {
return err
}
// Write the output to file
err = ioutil.WriteFile(state.Output, data, 0644)
if err != nil {
return err
}
return nil
}
// WriteOutputAquatoneJSON writes aquatone-style subdomains output to a json file
func WriteOutputAquatoneJSON(state *helper.State, subdomains []helper.Domain) error {
m := make(map[string]string)
_, err := os.Create(state.Output)
if err != nil {
return err
}
for _, subdomain := range subdomains {
// Set correct values
m[subdomain.Fqdn] = subdomain.IP
}
data, err := json.MarshalIndent(m, "", " ")
if err != nil {
return err
}
// Write the output to file
err = ioutil.WriteFile(state.Output, data, 0644)
if err != nil {
return err
}
return nil
}
// WriteOutputToDir writes output state into a directory
func WriteOutputToDir(state *helper.State, subdomains []string, domain string) (err error) {
if state.OutputDir != "" {
if !state.IsJSON {
file, err := os.Create(state.OutputDir + domain + "_hosts.txt")
if err != nil {
return err
}
for _, subdomain := range subdomains {
_, err := io.WriteString(file, subdomain+"\n")
if err != nil {
return err
}
}
file.Close()
return nil
}
_, err := os.Create(state.OutputDir + domain + "_hosts.json")
if err != nil {
return err
}
data, err := json.MarshalIndent(subdomains, "", " ")
if err != nil {
return err
}
// Write the output to file
err = ioutil.WriteFile(state.OutputDir+domain+"_hosts.json", data, 0644)
if err != nil {
return err
}
return nil
}
return nil
}

View File

@ -1,80 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package archiveis is a Archiveis Scraping Engine in Golang
package archiveis
import (
"fmt"
"io/ioutil"
"regexp"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// Contains all subdomains found
var globalSubdomains []string
func enumerate(state *helper.State, baseURL string, domain string) (err error) {
resp, err := helper.GetHTTPResponse(baseURL, state.Timeout)
if err != nil {
return err
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
src := string(body)
match := helper.ExtractSubdomains(src, domain)
for _, subdomain := range match {
finishedSub := subdomain
if !helper.SubdomainExists(finishedSub, globalSubdomains) {
if state.Verbose {
if state.Color {
fmt.Printf("\n[%sARCHIVE.IS%s] %s", helper.Red, helper.Reset, finishedSub)
} else {
fmt.Printf("\n[ARCHIVE.IS] %s", finishedSub)
}
}
globalSubdomains = append(globalSubdomains, finishedSub)
}
}
reNext := regexp.MustCompile("<a id=\"next\" style=\".*\" href=\"(.*)\">&rarr;</a>")
match1 := reNext.FindStringSubmatch(src)
if len(match1) > 0 {
enumerate(state, match1[1], domain)
}
return nil
}
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) (i interface{}) {
domain := args[0].(string)
state := args[1].(*helper.State)
// Query using first page. Everything from there would be recursive
err := enumerate(state, "http://archive.is/*."+domain, domain)
if err != nil {
if !state.Silent {
fmt.Printf("\narchiveis: %v\n", err)
}
return globalSubdomains
}
return globalSubdomains
}

View File

@ -1,95 +0,0 @@
//
// Written By : @Mzack9999 (Marco Rivoli)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package ask is a golang client for Ask Subdomain Discovery
package ask
import (
"fmt"
"io/ioutil"
"net/url"
"sort"
"strconv"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) (i interface{}) {
domain := args[0].(string)
state := args[1].(*helper.State)
minIterations, _ := strconv.Atoi(state.CurrentSettings.AskPages)
maxIterations := 760
searchQuery := ""
currentPage := 0
for currentIteration := 0; currentIteration <= maxIterations; currentIteration++ {
newSearchQuery := "site:" + domain
if len(subdomains) > 0 {
newSearchQuery += " -www." + domain
}
newSearchQuery = url.QueryEscape(newSearchQuery)
if searchQuery != newSearchQuery {
currentPage = 0
searchQuery = newSearchQuery
}
resp, err := helper.GetHTTPResponse("http://www.ask.com/web?q="+searchQuery+"&page="+strconv.Itoa(currentPage)+"&qid=8D6EE6BF52E0C04527E51F64F22C4534&o=0&l=dir&qsrc=998&qo=pagination", state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nask: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nask: %v\n", err)
}
return subdomains
}
src := string(body)
match := helper.ExtractSubdomains(src, domain)
newSubdomainsFound := 0
for _, subdomain := range match {
if !sort.StringsAreSorted(subdomains) {
sort.Strings(subdomains)
}
insertIndex := sort.SearchStrings(subdomains, subdomain)
if insertIndex < len(subdomains) && subdomains[insertIndex] == subdomain {
continue
}
if state.Verbose {
if state.Color {
fmt.Printf("\n[%sAsk%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[Ask] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
newSubdomainsFound++
}
// If no new subdomains are found exits after minIterations
if newSubdomainsFound == 0 && currentIteration > minIterations {
break
}
currentPage++
}
return subdomains
}

View File

@ -1,98 +0,0 @@
//
// Written By : @Mzack9999 (Marco Rivoli)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package baidu is a golang client for Baidu Subdomain Discovery
package baidu
import (
"fmt"
"io/ioutil"
"math/rand"
"net/url"
"sort"
"strconv"
"time"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
minIterations, _ := strconv.Atoi(state.CurrentSettings.BaiduPages)
maxIterations := 760
searchQuery := ""
currentPage := 0
for currentIteration := 0; currentIteration <= maxIterations; currentIteration++ {
newSearchQuery := "site:" + domain
if len(subdomains) > 0 {
newSearchQuery += " -site:www." + domain
}
newSearchQuery = url.QueryEscape(newSearchQuery)
if searchQuery != newSearchQuery {
currentPage = 0
searchQuery = newSearchQuery
}
resp, err := helper.GetHTTPResponse("https://www.baidu.com/s?rn=100&pn="+strconv.Itoa(currentPage)+"&wd="+searchQuery+"&oq="+searchQuery, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nbaidu: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nbaidu: %v\n", err)
}
return subdomains
}
src := string(body)
match := helper.ExtractSubdomains(src, domain)
newSubdomainsFound := 0
for _, subdomain := range match {
if !sort.StringsAreSorted(subdomains) {
sort.Strings(subdomains)
}
insertIndex := sort.SearchStrings(subdomains, subdomain)
if insertIndex < len(subdomains) && subdomains[insertIndex] == subdomain {
continue
}
if state.Verbose {
if state.Color {
fmt.Printf("\n[%sBaidu%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[Baidu] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
newSubdomainsFound++
}
// If no new subdomains are found exits after min_iterations
if newSubdomainsFound == 0 && currentIteration > minIterations {
break
}
currentPage++
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
}
return subdomains
}

View File

@ -1,97 +0,0 @@
//
// Written By : @Mzack9999 (Marco Rivoli)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package bing is a golang client for Bing Subdomain Discovery
package bing
import (
"fmt"
"io/ioutil"
"net/url"
"regexp"
"sort"
"strconv"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
minIterations, _ := strconv.Atoi(state.CurrentSettings.BingPages)
maxIterations := 760
searchQuery := ""
currentPage := 0
for currentIteration := 0; currentIteration <= maxIterations; currentIteration++ {
newSearchQuery := "domain:" + domain
if len(subdomains) > 0 {
newSearchQuery += " -www." + domain
}
newSearchQuery = url.QueryEscape(newSearchQuery)
if searchQuery != newSearchQuery {
currentPage = 0
searchQuery = newSearchQuery
}
resp, err := helper.GetHTTPResponse("https://www.bing.com/search?q="+searchQuery+"&go=Submit&first="+strconv.Itoa(currentPage), state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nbing: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Printf("\nbing: %v\n", err)
return subdomains
}
// suppress all %xx sequences with a space
reSub := regexp.MustCompile(`%.{2}`)
src := reSub.ReplaceAllLiteralString(string(body), " ")
match := helper.ExtractSubdomains(src, domain)
newSubdomainsFound := 0
for _, subdomain := range match {
if !sort.StringsAreSorted(subdomains) {
sort.Strings(subdomains)
}
insertIndex := sort.SearchStrings(subdomains, subdomain)
if insertIndex < len(subdomains) && subdomains[insertIndex] == subdomain {
continue
}
if state.Verbose {
if state.Color {
fmt.Printf("\n[%sBing%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[Bing] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
newSubdomainsFound++
}
// If no new subdomains are found exits after min_iterations
if newSubdomainsFound == 0 && currentIteration > minIterations {
break
}
currentPage++
}
return subdomains
}

View File

@ -1,277 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package censys is a golang client for Censys Subdomain Discovery
package censys
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"strconv"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
type resultsq struct {
Data []string `json:"parsed.extensions.subject_alt_name.dns_names"`
Data1 []string `json:"parsed.names"`
}
type response struct {
Results []resultsq `json:"results"`
Metadata struct {
Pages int `json:"pages"`
} `json:"metadata"`
}
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
var uniqueSubdomains []string
var initialSubdomains []string
var hostResponse response
// Default Censys Pages to process. I think 10 is a good value
//DefaultCensysPages := 10
// Get credentials for performing HTTP Basic Auth
username := state.ConfigState.CensysUsername
key := state.ConfigState.CensysSecret
if username == "" || key == "" {
return subdomains
}
if state.CurrentSettings.CensysPages != "all" {
CensysPages, _ := strconv.Atoi(state.CurrentSettings.CensysPages)
for i := 1; i <= CensysPages; i++ {
// Create JSON Get body
var request = []byte(`{"query":"` + domain + `", "page":` + strconv.Itoa(i) + `, "fields":["parsed.names","parsed.extensions.subject_alt_name.dns_names"], "flatten":true}`)
client := &http.Client{}
req, err := http.NewRequest("POST", "https://www.censys.io/api/v1/search/certificates", bytes.NewBuffer(request))
if err != nil {
if !state.Silent {
fmt.Printf("\ncensys: %v\n", err)
}
return subdomains
}
req.SetBasicAuth(username, key)
// Set content type as application/json
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
resp, err := client.Do(req)
if err != nil {
if !state.Silent {
fmt.Printf("\ncensys: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ncensys: %v\n", err)
}
return subdomains
}
err = json.Unmarshal([]byte(body), &hostResponse)
if err != nil {
if !state.Silent {
fmt.Printf("\ncensys: %v\n", err)
}
return subdomains
}
// Add all items found
for _, res := range hostResponse.Results {
initialSubdomains = append(initialSubdomains, res.Data...)
initialSubdomains = append(initialSubdomains, res.Data1...)
}
validSubdomains := helper.Validate(domain, initialSubdomains)
uniqueSubdomains = helper.Unique(validSubdomains)
}
// Append each subdomain found to subdomains array
for _, subdomain := range uniqueSubdomains {
if strings.Contains(subdomain, "*.") {
subdomain = strings.Split(subdomain, "*.")[1]
}
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sCENSYS%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[CENSYS] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
} else if state.CurrentSettings.CensysPages == "all" {
// Create JSON Get body
var request = []byte(`{"query":"` + domain + `", "page":1, "fields":["parsed.names","parsed.extensions.subject_alt_name.dns_names"], "flatten":true}`)
client := &http.Client{}
req, err := http.NewRequest("POST", "https://www.censys.io/api/v1/search/certificates", bytes.NewBuffer(request))
if err != nil {
if !state.Silent {
fmt.Printf("\ncensys: %v\n", err)
}
return subdomains
}
req.SetBasicAuth(username, key)
// Set content type as application/json
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
resp, err := client.Do(req)
if err != nil {
if !state.Silent {
fmt.Printf("\ncensys: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ncensys: %v\n", err)
}
return subdomains
}
err = json.Unmarshal([]byte(body), &hostResponse)
if err != nil {
if !state.Silent {
fmt.Printf("\ncensys: %v\n", err)
}
return subdomains
}
// Add all items found
for _, res := range hostResponse.Results {
initialSubdomains = append(initialSubdomains, res.Data...)
initialSubdomains = append(initialSubdomains, res.Data1...)
}
validSubdomains := helper.Validate(domain, initialSubdomains)
uniqueSubdomains = helper.Unique(validSubdomains)
// Append each subdomain found to subdomains array
for _, subdomain := range uniqueSubdomains {
if strings.Contains(subdomain, "*.") {
subdomain = strings.Split(subdomain, "*.")[1]
}
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sCENSYS%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[CENSYS] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
for i := 2; i <= hostResponse.Metadata.Pages; i++ {
// Create JSON Get body
var request = []byte(`{"query":"` + domain + `", "page":` + strconv.Itoa(i) + `, "fields":["parsed.names","parsed.extensions.subject_alt_name.dns_names"], "flatten":true}`)
client := &http.Client{}
req, err := http.NewRequest("POST", "https://www.censys.io/api/v1/search/certificates", bytes.NewBuffer(request))
if err != nil {
if !state.Silent {
fmt.Printf("\ncensys: %v\n", err)
}
return subdomains
}
req.SetBasicAuth(username, key)
// Set content type as application/json
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
resp, err := client.Do(req)
if err != nil {
panic(err)
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ncensys: %v\n", err)
}
return subdomains
}
err = json.Unmarshal([]byte(body), &hostResponse)
if err != nil {
if !state.Silent {
fmt.Printf("\ncensys: %v\n", err)
}
return subdomains
}
// Add all items found
for _, res := range hostResponse.Results {
initialSubdomains = append(initialSubdomains, res.Data...)
initialSubdomains = append(initialSubdomains, res.Data1...)
}
validSubdomains := helper.Validate(domain, initialSubdomains)
uniqueSubdomains = helper.Unique(validSubdomains)
// Append each subdomain found to subdomains array
for _, subdomain := range uniqueSubdomains {
if strings.Contains(subdomain, "*.") {
subdomain = strings.Split(subdomain, "*.")[1]
}
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sCENSYS%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[CENSYS] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
}
}
return subdomains
}

View File

@ -1,114 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package certdb is a CertDB Subdomain parser in golang
package certdb
import (
"fmt"
"io/ioutil"
"regexp"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Parser subdomains from SSL Certificate Information Page
func findSubdomains(link string, state *helper.State, channel chan []string) {
var subdomainsfound []string
resp, err := helper.GetHTTPResponse("https://certdb.com"+link, state.Timeout)
if err != nil {
channel <- subdomainsfound
return
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
channel <- subdomainsfound
return
}
src := string(body)
SubdomainRegex, err := regexp.Compile("<a href=\"https://certdb.com/domain/(.*)\" target='_blank' class='link-underlined' >")
if err != nil {
channel <- subdomainsfound
return
}
match := SubdomainRegex.FindAllStringSubmatch(src, -1)
for _, link := range match {
subdomainsfound = append(subdomainsfound, link[1])
}
channel <- subdomainsfound
}
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
// Make a http request to CertDB
resp, err := helper.GetHTTPResponse("https://certdb.com/domain/"+domain, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\ncertdb: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ncertdb: %v\n", err)
}
return subdomains
}
src := string(body)
// Get links for all the SSL Certficates found
Regex, _ := regexp.Compile("<a href=\"(.*)\" class=\"see-more-link\">See more →</a>")
match := Regex.FindAllStringSubmatch(src, -1)
var initialSubs []string
var subsReturned []string
channel := make(chan []string, len(match))
for _, link := range match {
go findSubdomains(link[1], state, channel)
}
for i := 0; i < len(match); i++ {
subsReturned = <-channel
initialSubs = append(initialSubs, subsReturned...)
}
for _, subdomain := range initialSubs {
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sCERTDB%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[CERTDB] %s", subdomains)
}
}
subdomains = append(subdomains, subdomain)
}
return subdomains
}

View File

@ -1,73 +0,0 @@
//
// Written By : @Mzack9999 (Marco Rivoli)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package certificatetransparency is a golang client for Entrust Certificate Transparency
package certificatetransparency
import (
"fmt"
"io/ioutil"
"sort"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
resp, err := helper.GetHTTPResponse("https://ctsearch.entrust.com/api/v1/certificates?fields=issuerCN,subjectO,issuerDN,issuerO,subjectDN,signAlg,san,publicKeyType,publicKeySize,validFrom,validTo,sn,ev,logEntries.logName,subjectCNReversed,cert&domain="+domain+"&includeExpired=true&exactMatch=false&limit=5000", state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\ncertificatetransparency: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ncertificatetransparency: %v\n", err)
}
return subdomains
}
// suppress all %xx sequences with a space
src := strings.Replace(string(body), "u003d", " ", -1)
match := helper.ExtractSubdomains(src, domain)
for _, subdomain := range match {
if sort.StringsAreSorted(subdomains) == false {
sort.Strings(subdomains)
}
insertIndex := sort.SearchStrings(subdomains, subdomain)
if insertIndex < len(subdomains) && subdomains[insertIndex] == subdomain {
continue
}
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sEntrust-CTSearch%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[Entrust-CTSearch] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
return subdomains
}

View File

@ -1,77 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package certspotter is a Golang based client for Certspotter Parsing
package certspotter
import (
"encoding/json"
"fmt"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// Structure of a single dictionary of output by crt.sh
type certspotterObject struct {
DNSNames []string `json:"dns_names"`
}
// array of all results returned
var certspotterData []certspotterObject
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
// Make a http request to Certspotter
resp, err := helper.GetHTTPResponse("https://certspotter.com/api/v0/certs?domain="+domain, state.Timeout)
if err != nil {
if !state.Silent {
// Set values and return
fmt.Printf("\ncertspotter: %v\n", err)
}
return subdomains
}
// Decode as json format
err = json.NewDecoder(resp.Body).Decode(&certspotterData)
if err != nil {
if !state.Silent {
fmt.Printf("\ncertspotter: %v\n", err)
}
return subdomains
}
// Append each subdomain found to subdomains array
for _, block := range certspotterData {
for _, dnsName := range block.DNSNames {
// Fix Wildcard subdomains containing asterisk before them
if strings.Contains(dnsName, "*.") {
dnsName = strings.Split(dnsName, "*.")[1]
}
if state.Verbose {
if state.Color {
fmt.Printf("\n[%sCERTSPOTTER%s] %s", helper.Red, helper.Reset, dnsName)
} else {
fmt.Printf("\n[CERTSPOTTER] %s", dnsName)
}
}
subdomains = append(subdomains, dnsName)
}
}
return subdomains
}

View File

@ -1,85 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package commoncrawl is a Golang based client for Parsing Subdomains from Commoncrawl
package commoncrawl
import (
"fmt"
"io/ioutil"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
type commoncrawlObject struct {
NameValue string `json:"url"`
}
// array of all results returned
var commoncrawlData []string
var commonCrawlIndexes = []string{
"CC-MAIN-2016-18",
"CC-MAIN-2016-26",
"CC-MAIN-2016-44",
"CC-MAIN-2017-04",
"CC-MAIN-2017-17",
"CC-MAIN-2017-26",
"CC-MAIN-2017-43",
"CC-MAIN-2018-05",
"CC-MAIN-2018-17",
"CC-MAIN-2018-26",
}
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
for _, index := range commonCrawlIndexes {
// Make a http request to Threatcrowd
resp, err := helper.GetHTTPResponse(fmt.Sprintf("http://index.commoncrawl.org/%s-index?url=*.%s&output=json", index, domain), state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\ncommoncrawl: %v\n", err)
}
return subdomains
}
// Get the response body
respBody, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ncommoncrawl: %v\n", err)
}
return subdomains
}
commoncrawlData := helper.ExtractSubdomains(string(respBody), domain)
for _, subdomain := range commoncrawlData {
if helper.SubdomainExists(subdomain, subdomains) == false {
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sCommoncrawl%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[Commoncrawl] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
}
}
return subdomains
}

View File

@ -1,88 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package crtsh is a Golang based client for CRT.SH Parsing
package crtsh
import (
"encoding/json"
"fmt"
"io/ioutil"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
type crtshObject struct {
NameValue string `json:"name_value"`
}
var crtshData []crtshObject
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
resp, err := helper.GetHTTPResponse("https://crt.sh/?q=%25."+domain+"&output=json", state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\ncrtsh: %v\n", err)
}
return subdomains
}
// Get the response body
respBody, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ncrtsh: %v\n", err)
}
return subdomains
}
if strings.Contains(string(respBody), "The requested URL / was not found on this server.") {
if !state.Silent {
fmt.Printf("\ncrtsh: %v\n", err)
}
return subdomains
}
// Decode the json format
err = json.Unmarshal([]byte(respBody), &crtshData)
if err != nil {
if !state.Silent {
fmt.Printf("\ncrtsh: %v\n", err)
}
return subdomains
}
// Append each subdomain found to subdomains array
for _, subdomain := range crtshData {
// Fix Wildcard subdomains containing asterisk before them
if strings.Contains(subdomain.NameValue, "*.") {
subdomain.NameValue = strings.Split(subdomain.NameValue, "*.")[1]
}
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sCRT.SH%s] %s", helper.Red, helper.Reset, subdomain.NameValue)
} else {
fmt.Printf("\n[CRT.SH] %s", subdomain.NameValue)
}
}
subdomains = append(subdomains, subdomain.NameValue)
}
return subdomains
}

View File

@ -1,73 +0,0 @@
//
// Written By : @mehimansu (Himanshu Das)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package dnsdb is a Golang driver for dnsdb.org
package dnsdb
import (
"fmt"
"io/ioutil"
"regexp"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
var result helper.Result
result.Subdomains = subdomains
// Make a http request to DnsDB
resp, err := helper.GetHTTPResponse("http://www.dnsdb.org/f/"+domain+".dnsdb.org/", state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\ndnsdb: %v\n", err)
}
return subdomains
}
if resp.StatusCode != 200 {
err := fmt.Sprintf("Unexpected return status %d", resp.StatusCode)
if !state.Silent {
fmt.Printf("\ndnsdb: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ndnsdb: %v\n", err)
}
return subdomains
}
src := string(body)
re := regexp.MustCompile("<a[^>]*?[^>]*>(.*?)</a>")
match := re.FindAllStringSubmatch(src, -1)
for _, subdomain := range match {
stringSplit := strings.Split(subdomain[0], "\">")[1]
finishedSub := strings.TrimRight(stringSplit, "</a>")
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sDNSDB%s] %s", helper.Red, helper.Reset, finishedSub)
} else {
fmt.Printf("\n[DNSDB] %s", finishedSub)
}
}
subdomains = append(subdomains, finishedSub)
}
return subdomains
}

View File

@ -1,130 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package dnsdumpster is a Parser for subdomains from DNSDumpster
package dnsdumpster
import (
"fmt"
"io/ioutil"
"net/http"
"net/http/cookiejar"
"net/url"
"regexp"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
var gCookies []*http.Cookie
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
// CookieJar to hold csrf cookie
var curCookieJar *cookiejar.Jar
curCookieJar, _ = cookiejar.New(nil)
// Make a http request to DNSDumpster
resp, gCookies, err := helper.GetHTTPCookieResponse("https://dnsdumpster.com", gCookies, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\ndnsdumpster: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ndnsdumpster: %v\n", err)
}
return subdomains
}
src := string(body)
re := regexp.MustCompile("<input type=\"hidden\" name=\"csrfmiddlewaretoken\" value=\"(.*)\">")
match := re.FindAllStringSubmatch(src, -1)
// CSRF Middleware token for POST Request
csrfmiddlewaretoken := match[0]
// Set cookiejar values
u, _ := url.Parse("https://dnsdumpster.com")
curCookieJar.SetCookies(u, gCookies)
hc := http.Client{Jar: curCookieJar}
form := url.Values{}
form.Add("csrfmiddlewaretoken", csrfmiddlewaretoken[1])
form.Add("targetip", domain)
// Create a post request to get subdomain data
req, err := http.NewRequest("POST", "https://dnsdumpster.com", strings.NewReader(form.Encode()))
if err != nil {
if !state.Silent {
fmt.Printf("\ndnsdumpster: %v\n", err)
}
return subdomains
}
req.PostForm = form
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
req.Header.Add("Referer", "https://dnsdumpster.com")
req.Header.Set("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1")
resp, err = hc.Do(req)
if err != nil {
if !state.Silent {
fmt.Printf("\ndnsdumpster: %v\n", err)
}
return subdomains
}
// Get the response body
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ndnsdumpster: %v\n", err)
}
return subdomains
}
src = string(body)
// Find the table holding host records
Regex, _ := regexp.Compile("<td class=\"col-md-4\">(.*\\..*\\..*)<br>")
match = Regex.FindAllStringSubmatch(src, -1)
// String to hold initial subdomains
var initialSubs []string
for _, data := range match {
initialSubs = append(initialSubs, data[1])
}
validSubdomains := helper.Validate(domain, initialSubs)
for _, subdomain := range validSubdomains {
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sDNSDUMPSTER%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[DNSDUMPSTER] %s", subdomains)
}
}
subdomains = append(subdomains, subdomain)
}
return subdomains
}

View File

@ -1,62 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package dnstable is a Golang driver for dnstable.com
package dnstable
import (
"fmt"
"io/ioutil"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
// Make a http request to Netcraft
resp, err := helper.GetHTTPResponse("https://dnstable.com/domain/"+domain, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\ndnstable: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ndnstable: %v\n", err)
}
return subdomains
}
src := string(body)
match := helper.ExtractSubdomains(src, domain)
for _, subdomain := range match {
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sDNSTABLE%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[DNSTABLE] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
return subdomains
}

View File

@ -1,73 +0,0 @@
//
// Written By : @Mzack9999
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package dogpile is a golang client for Dogpile Subdomain Discovery
package dogpile
import (
"fmt"
"io/ioutil"
"math/rand"
"regexp"
"strconv"
"time"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
maxPages, _ := strconv.Atoi(state.CurrentSettings.DogpilePages)
for currentPage := 0; currentPage <= maxPages; currentPage++ {
url := "http://www.dogpile.com/search/web?q=" + domain + "&qsi=" + strconv.Itoa(currentPage*15+1)
resp, err := helper.GetHTTPResponse(url, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\ndogpile: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ndogpile: %v\n", err)
}
return subdomains
}
reSub := regexp.MustCompile(`%.{4}`)
src := reSub.ReplaceAllLiteralString(string(body), " ")
match := helper.ExtractSubdomains(src, domain)
for _, subdomain := range match {
if helper.SubdomainExists(subdomain, subdomains) == false {
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sDogpile%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[Dogpile] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
}
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
}
return subdomains
}

View File

@ -1,67 +0,0 @@
//
// Written By : @Mzack9999
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package exalead is a golang client for Exalead Subdomain Discovery
package exalead
import (
"fmt"
"io/ioutil"
"regexp"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
url := "http://www.exalead.com/search/web/results/?q=site:" + domain + "+-www?elements_per_page=50"
resp, err := helper.GetHTTPResponse(url, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nexalead: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nexalead: %v\n", err)
}
return subdomains
}
reSub := regexp.MustCompile(`%.{2}`)
src := reSub.ReplaceAllLiteralString(string(body), " ")
match := helper.ExtractSubdomains(src, domain)
for _, subdomain := range match {
if helper.SubdomainExists(subdomain, subdomains) == false {
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sExalead%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[Exalead] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
}
return subdomains
}

View File

@ -1,68 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package findsubdomains is a Golang driver for findsubdomains.com
package findsubdomains
import (
"fmt"
"io/ioutil"
"regexp"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
// Make a http request to Netcraft
resp, err := helper.GetHTTPResponse("https://findsubdomains.com/subdomains-of/"+domain, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nfindsubdomains: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nfindsubdomains: %v\n", err)
}
return subdomains
}
src := string(body)
re := regexp.MustCompile("<a class=\"aggregated-link\" rel=\"nofollow\" href=\"(.*)\" target=\"_blank\">")
match := re.FindAllStringSubmatch(src, -1)
for _, subdomain := range match {
// Dirty Logic
finishedSub := strings.Split(subdomain[1], "//")[1]
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sFINDSUBDOMAINS%s] %s", helper.Red, helper.Reset, finishedSub)
} else {
fmt.Printf("\n[FINDSUBDOMAINS] %s", finishedSub)
}
}
subdomains = append(subdomains, finishedSub)
}
return subdomains
}

View File

@ -1,161 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package googleter is a Golang based client for GoogleTER Parsing
package googleter
import (
"fmt"
"io/ioutil"
"net/http"
"net/url"
"regexp"
"strconv"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
func makeRequest(token string, domain string, state *helper.State) (respBody []byte, err error) {
requestURI := ""
if token == "" {
requestURI = "https://www.google.com/transparencyreport/api/v3/httpsreport/ct/certsearch?domain=" + url.QueryEscape(domain) + "&include_expired=true&include_subdomains=true"
} else {
requestURI = "https://www.google.com/transparencyreport/api/v3/httpsreport/ct/certsearch/page?domain=" + url.QueryEscape(domain) + "&include_expired=true&include_subdomains=true&p=" + url.QueryEscape(token)
}
client := &http.Client{}
req, err := http.NewRequest("GET", requestURI, nil)
if err != nil {
if !state.Silent {
fmt.Printf("\ngoogleter: %v\n", err)
}
return respBody, nil
}
req.Header.Set("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1")
req.Header.Add("Connection", "close")
req.Header.Set("Referer", "https://transparencyreport.google.com/https/certificates")
resp, err := client.Do(req)
if err != nil {
if !state.Silent {
fmt.Printf("\ngoogleter: %v\n", err)
}
return respBody, nil
}
respBody, err = ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\ngoogleter: %v\n", err)
}
return respBody, nil
}
return respBody, nil
}
func printSubdomains(match [][]string, state *helper.State) {
var finalSubdomain = ""
for _, subdomain := range match {
if !helper.SubdomainExists(subdomain[0], subdomains) {
finalSubdomain = subdomain[0]
if strings.Contains(subdomain[0], "*.") {
finalSubdomain = strings.Split(subdomain[0], "*.")[1]
}
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sGoogleTER%s] %s", helper.Red, helper.Reset, finalSubdomain)
} else {
fmt.Printf("\n[GoogleTER] %s", finalSubdomain)
}
}
subdomains = append(subdomains, finalSubdomain)
}
}
}
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
respBody, err := makeRequest("", domain, state)
if err != nil {
if !state.Silent {
fmt.Printf("\ngoogleter: %v\n", err)
}
return subdomains
}
var Token string
metaRegex := regexp.MustCompile(`\[null,"(.*)",null,(.*),(.*)]`)
matches := metaRegex.FindStringSubmatch(string(respBody))
if len(matches) <= 1 {
return subdomains
}
subdomainRegex := regexp.MustCompile(`([A-Za-z0-9]+\.)+` + domain)
match := subdomainRegex.FindAllStringSubmatch(string(respBody), -1)
printSubdomains(match, state)
// In some weird cases, googleter returns specifc hashes embedded in the results.
// We fall back to this regex in order to to find that token
cryptoSHA2 := regexp.MustCompile(`\[null,"B05DFBBD58ECDB8D18100E6AA4DA0C64AECA148D41971942A8E2068375063759",null,(.*),(.*)]`)
mcryptoSHA2 := cryptoSHA2.FindStringSubmatch(string(respBody))
if len(mcryptoSHA2) >= 1 {
Token = mcryptoSHA2[0]
} else {
cryptoSHA2CBC := regexp.MustCompile(`\[null,"A0315234520886D17581376D876B44FE0FADAD26CA47A9A0A1F4BA9BD8735947",null,(.*),(.*)]`)
mcryptoSHA2CBC := cryptoSHA2CBC.FindStringSubmatch(string(respBody))
if len(mcryptoSHA2CBC) >= 1 {
Token = mcryptoSHA2[0]
} else {
Token = matches[1]
}
}
MaxPages, _ := strconv.Atoi(matches[3])
for i := 1; i <= MaxPages; i++ {
respBody, err = makeRequest(Token, domain, state)
if err != nil {
if !state.Silent {
fmt.Printf("\ngoogleter: %v\n", err)
}
return subdomains
}
match := subdomainRegex.FindAllStringSubmatch(string(respBody), -1)
printSubdomains(match, state)
metaRegex2 := regexp.MustCompile(`\["(.*)",".*",null,(.*),(.*)]`)
matches := metaRegex2.FindStringSubmatch(string(respBody))
matches2 := metaRegex.FindStringSubmatch(string(respBody))
if len(matches2) > 1 {
Token = matches2[1]
}
if len(matches) > 1 {
Token = matches[1]
}
maxPages, _ := strconv.Atoi(state.CurrentSettings.GoogleterPages)
if i > maxPages {
break
}
}
return subdomains
}

View File

@ -1,56 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana) and @picatz (Kent Gruber)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package hackertarget is a golang based Hackertarget subdomains search client
package hackertarget
import (
"bufio"
"fmt"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
var result helper.Result
result.Subdomains = subdomains
resp, err := helper.GetHTTPResponse("https://api.hackertarget.com/hostsearch/?q="+domain, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nhackertarget: %v\n", err)
}
return subdomains
}
defer resp.Body.Close()
scanner := bufio.NewScanner(resp.Body)
for scanner.Scan() {
subdomain := strings.Split(scanner.Text(), ",")[0]
subdomains = append(subdomains, subdomain)
if state.Verbose {
if state.Color {
fmt.Printf("\n[%sHACKERTARGET%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[HACKERTARGET] %s", subdomain)
}
}
}
return subdomains
}

View File

@ -1,194 +0,0 @@
//
// Written By : @Mzack9999
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package ipv4info is a golang client for Ipv4Info
package ipv4info
import (
"fmt"
"io/ioutil"
"regexp"
"strconv"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
resp, err := helper.GetHTTPResponse("http://ipv4info.com/search/"+domain, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
src := string(body)
// Get IP address page token
regxTokens := regexp.MustCompile("/ip-address/(.*)/" + domain)
matchTokens := regxTokens.FindAllString(src, -1)
if len(matchTokens) == 0 {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
token := matchTokens[0]
resp, err = helper.GetHTTPResponse("http://ipv4info.com"+token, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
// Get the response body
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
src = string(body)
// Get DNS address page token
regxTokens = regexp.MustCompile("/dns/(.*?)/" + domain)
matchTokens = regxTokens.FindAllString(src, -1)
if len(matchTokens) == 0 {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
token = matchTokens[0]
resp, err = helper.GetHTTPResponse("http://ipv4info.com"+token, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
return subdomains
}
}
// Get the response body
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
src = string(body)
// Get First Subdomains page token
regxTokens = regexp.MustCompile("/subdomains/(.*?)/" + domain)
matchTokens = regxTokens.FindAllString(src, -1)
if len(matchTokens) == 0 {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
token = matchTokens[0]
// Get first subdomains page
resp, err = helper.GetHTTPResponse("http://ipv4info.com"+token, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
// Get the response body
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
src = string(body)
additionalSubdomains := extractSubdomains(domain, src, state)
subdomains = append(subdomains, additionalSubdomains...)
nextPage := 1
for {
regxTokens := regexp.MustCompile("/subdomains/.*/page" + strconv.Itoa(nextPage) + "/" + domain + ".html")
matchTokens := regxTokens.FindAllString(src, -1)
if len(matchTokens) == 0 {
break
}
token = matchTokens[0]
resp, err = helper.GetHTTPResponse("http://ipv4info.com"+token, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
// Get the response body
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nipv4info: %v\n", err)
}
return subdomains
}
src = string(body)
additionalSubdomains := extractSubdomains(domain, src, state)
subdomains = append(subdomains, additionalSubdomains...)
nextPage++
}
return subdomains
}
func extractSubdomains(domain, text string, state *helper.State) (subdomains []string) {
match := helper.ExtractSubdomains(text, domain)
for _, subdomain := range match {
if helper.SubdomainExists(subdomain, subdomains) == false {
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sIpv4Info%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[Ipv4Info] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
}
return subdomains
}

View File

@ -1,103 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package passivetotal is a golang client for Passive total Subdomain Discovery
package passivetotal
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
type passivetotalObject struct {
Subdomains []string `json:"subdomains"`
}
var passivetotalData passivetotalObject
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
// Get credentials for performing HTTP Basic Auth
username := state.ConfigState.PassivetotalUsername
key := state.ConfigState.PassivetotalKey
if username == "" || key == "" {
return subdomains
}
// Create JSON Get body
var request = []byte(`{"query":"` + domain + `"}`)
client := &http.Client{}
req, err := http.NewRequest("GET", "https://api.passivetotal.org/v2/enrichment/subdomains", bytes.NewBuffer(request))
if err != nil {
if !state.Silent {
fmt.Printf("\npassivetotal: %v\n", err)
}
return subdomains
}
req.SetBasicAuth(username, key)
// Set content type as application/json
req.Header.Set("Content-Type", "application/json")
resp, err := client.Do(req)
if err != nil {
if !state.Silent {
fmt.Printf("\npassivetotal: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\npassivetotal: %v\n", err)
}
return subdomains
}
// Decode the json format
err = json.Unmarshal([]byte(body), &passivetotalData)
if err != nil {
if !state.Silent {
fmt.Printf("\npassivetotal: %v\n", err)
}
return subdomains
}
// Append each subdomain found to subdomains array
for _, subdomain := range passivetotalData.Subdomains {
finalSubdomain := subdomain + "." + domain
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sPASSIVETOTAL%s] %s", helper.Red, helper.Reset, finalSubdomain)
} else {
fmt.Printf("\n[PASSIVETOTAL] %s", finalSubdomain)
}
}
subdomains = append(subdomains, finalSubdomain)
}
return subdomains
}

View File

@ -1,74 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package ptrarchive is a PTRArchive subdomain parser in golang
package ptrarchive
import (
"fmt"
"io/ioutil"
"regexp"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
// Make a http request to CertDB
resp, err := helper.GetHTTPResponse("http://ptrarchive.com/tools/search3.htm?label="+domain+"&date=ALL", state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nptrarchive: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nptrarchive: %v\n", err)
}
return subdomains
}
src := string(body)
// Parse Subdomains found
Regex, _ := regexp.Compile("] (.*) \\[")
match := Regex.FindAllStringSubmatch(src, -1)
// String to hold initial subdomains
var initialSubs []string
for _, data := range match {
initialSubs = append(initialSubs, data[1])
}
validSubdomains := helper.Validate(domain, initialSubs)
for _, subdomain := range validSubdomains {
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sPTRARCHIVE%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[PTRARCHIVE] %s", subdomains)
}
}
subdomains = append(subdomains, subdomain)
}
return subdomains
}

View File

@ -1,150 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package riddler is a Parser for subdomains from Riddler
package riddler
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
type authentication struct {
Response struct {
User struct {
AuthenticationToken string `json:"authentication_token"`
} `json:"user"`
} `json:"response"`
}
type host struct {
Host string `json:"host"`
}
var hostResponse []host
var auth authentication
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
if state.ConfigState.RiddlerEmail == "" || state.ConfigState.RiddlerPassword == "" {
return subdomains
}
hc := http.Client{}
var data = []byte(`{"email":"` + state.ConfigState.RiddlerEmail + `", "password":"` + state.ConfigState.RiddlerPassword + `"}`)
// Create a post request to get subdomain data
req, err := http.NewRequest("POST", "https://riddler.io/auth/login", bytes.NewBuffer(data))
if err != nil {
if !state.Silent {
fmt.Printf("\nriddler: %v\n", err)
}
return subdomains
}
req.Header.Add("Content-Type", "application/json")
resp, err := hc.Do(req)
if err != nil {
if !state.Silent {
fmt.Printf("\nriddler: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nriddler: %v\n", err)
}
return subdomains
}
err = json.Unmarshal([]byte(body), &auth)
if err != nil {
if !state.Silent {
fmt.Printf("\nriddler: %v\n", err)
}
return subdomains
}
if auth.Response.User.AuthenticationToken == "" {
if !state.Silent {
fmt.Printf("\nriddler: %v\n", "failed to get authentication token")
}
return subdomains
}
data = []byte(`{"query":"pld:` + domain + `", "output":"host", "limit":500}`)
req, err = http.NewRequest("POST", "https://riddler.io/api/search", bytes.NewBuffer(data))
if err != nil {
if !state.Silent {
fmt.Printf("\nriddler: %v\n", err)
}
return subdomains
}
req.Header.Add("Content-Type", "application/json")
req.Header.Add("Authentication-Token", auth.Response.User.AuthenticationToken)
resp, err = hc.Do(req)
if err != nil {
if !state.Silent {
fmt.Printf("\nriddler: %v\n", err)
}
return subdomains
}
// Get the response body
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nriddler: %v\n", "failed to get authentication token")
}
return subdomains
}
err = json.Unmarshal([]byte(body), &hostResponse)
if err != nil {
if !state.Silent {
fmt.Printf("\nriddler: %v\n", err)
}
return subdomains
}
for _, host := range hostResponse {
subdomain := host.Host
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sRIDDLER%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[RIDDLER] %s", subdomains)
}
}
subdomains = append(subdomains, subdomain)
}
return subdomains
}

View File

@ -1,95 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package securitytrails is a golang SecurityTrails API client for subdomain discovery.
package securitytrails
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
type securitytrailsObject struct {
Subdomains []string `json:"subdomains"`
}
var securitytrailsData securitytrailsObject
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
// Get credentials for performing HTTP Basic Auth
securitytrailsKey := state.ConfigState.SecurityTrailsKey
if securitytrailsKey == "" {
return subdomains
}
client := &http.Client{}
req, err := http.NewRequest("GET", "https://api.securitytrails.com/v1/domain/"+domain+"/subdomains", nil)
if err != nil {
if !state.Silent {
fmt.Printf("\npassivetotal: %v\n", err)
}
return subdomains
}
req.Header.Add("APIKEY", securitytrailsKey)
resp, err := client.Do(req)
if err != nil {
if !state.Silent {
fmt.Printf("\nsecuritytrails: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nsecuritytrails: %v\n", err)
}
return subdomains
}
// Decode the json format
err = json.Unmarshal([]byte(body), &securitytrailsData)
if err != nil {
if !state.Silent {
fmt.Printf("\nsecuritytrails: %v\n", err)
}
return subdomains
}
// Append each subdomain found to subdomains array
for _, subdomain := range securitytrailsData.Subdomains {
finalSubdomain := subdomain + "." + domain
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sSECURITYTRAILS%s] %s", helper.Red, helper.Reset, finalSubdomain)
} else {
fmt.Printf("\n[SECURITYTRAILS] %s", finalSubdomain)
}
}
subdomains = append(subdomains, finalSubdomain)
}
return subdomains
}

View File

@ -1,104 +0,0 @@
//
// Written By : @Mzack9999
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package shodan is a golang client for Shodan.io
package shodan
import (
"encoding/json"
"fmt"
"io/ioutil"
"strconv"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
type ShodanResult struct {
Matches []shodanObject `json:"matches"`
Result int `json:"result"`
Error string `json:"error"`
}
// Structure of a single dictionary of output by crt.sh
type shodanObject struct {
Hostnames []string `json:"hostnames"`
}
var shodanResult ShodanResult
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
shodanAPIKey := state.ConfigState.ShodanAPIKey
if shodanAPIKey == "" {
return subdomains
}
maxPages, _ := strconv.Atoi(state.CurrentSettings.ShodanPages)
for currentPage := 0; currentPage <= maxPages; currentPage++ {
resp, err := helper.GetHTTPResponse("https://api.shodan.io/shodan/host/search?query=hostname:"+domain+"&page="+strconv.Itoa(currentPage)+"&key="+shodanAPIKey, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nshodan: %v\n", err)
}
return subdomains
}
// Get the response body
respBody, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nshodan: %v\n", err)
}
return subdomains
}
// Decode the json format
err = json.Unmarshal([]byte(respBody), &shodanResult)
if err != nil {
if !state.Silent {
fmt.Printf("\nshodan: %v\n", err)
}
return subdomains
}
if shodanResult.Error != "" {
return subdomains
}
// Append each subdomain found to subdomains array
for _, block := range shodanResult.Matches {
for _, hostname := range block.Hostnames {
// Fix Wildcard subdomains containg asterisk before them
if strings.Contains(hostname, "*.") {
hostname = strings.Split(hostname, "*.")[1]
}
if state.Verbose {
if state.Color {
fmt.Printf("\n[%sSHODAN%s] %s", helper.Red, helper.Reset, hostname)
} else {
fmt.Printf("\n[SHODAN] %s", hostname)
}
}
subdomains = append(subdomains, hostname)
}
}
}
return subdomains
}

View File

@ -1,84 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package sitedossier is a Sitedossier Scraping Engine in Golang
package sitedossier
import (
"fmt"
"io/ioutil"
"math/rand"
"regexp"
"time"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// Contains all subdomains found
var globalSubdomains []string
func enumerate(state *helper.State, baseURL string, domain string) (err error) {
resp, err := helper.GetHTTPResponse(baseURL, state.Timeout)
if err != nil {
return err
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
src := string(body)
match := helper.ExtractSubdomains(src, domain)
for _, subdomain := range match {
finishedSub := subdomain
if helper.SubdomainExists(finishedSub, globalSubdomains) == false {
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sSITEDOSSIER%s] %s", helper.Red, helper.Reset, finishedSub)
} else {
fmt.Printf("\n[SITEDOSSIER] %s", finishedSub)
}
}
globalSubdomains = append(globalSubdomains, finishedSub)
}
}
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
reNext := regexp.MustCompile("<a href=\"(.*)\"><b>.*</b></a><br>")
match1 := reNext.FindStringSubmatch(src)
if len(match1) > 0 {
enumerate(state, "http://www.sitedossier.com"+match1[1], domain)
}
return nil
}
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
// Query using first page. Everything from there would be recursive
err := enumerate(state, "http://www.sitedossier.com/parentdomain/"+domain, domain)
if err != nil {
if !state.Silent {
fmt.Printf("\nsitedossier: %v\n", err)
}
return globalSubdomains
}
return globalSubdomains
}

View File

@ -1,84 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package threatcrowd is a Golang based client for Threatcrowd API
package threatcrowd
import (
"encoding/json"
"fmt"
"io/ioutil"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// Struct containing json data we actually need
type threatcrowdObject struct {
Subdomains []string `json:"subdomains"`
}
// array of all results returned
var threatcrowdData threatcrowdObject
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
// Make a http request to Threatcrowd
resp, err := helper.GetHTTPResponse("https://www.threatcrowd.org/searchApi/v2/domain/report/?domain="+domain, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nthreatcrowd: %v\n", err)
}
return subdomains
}
// Get the response body
respBody, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nthreatcrowd: %v\n", err)
}
return subdomains
}
// Decode the json format
err = json.Unmarshal([]byte(respBody), &threatcrowdData)
if err != nil {
if !state.Silent {
fmt.Printf("\nthreatcrowd: %v\n", err)
return subdomains
}
}
// Append each subdomain found to subdomains array
for _, subdomain := range threatcrowdData.Subdomains {
// Fix Wildcard subdomains containing asterisk before them
if strings.Contains(subdomain, "*.") {
subdomain = strings.Split(subdomain, "*.")[1]
}
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sTHREATCROWD%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[THREATCROWD] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
return subdomains
}

View File

@ -1,69 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package threatminer is a Threatminer subdomain parser in golang
package threatminer
import (
"fmt"
"io/ioutil"
"regexp"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
// Make a http request to CertDB
resp, err := helper.GetHTTPResponse("https://www.threatminer.org/getData.php?e=subdomains_container&q="+domain+"&t=0&rt=10&p=1", state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nthreatminer: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nthreatminer: %v\n", err)
}
return subdomains
}
src := string(body)
// Parse Subdomains found
Regex, _ := regexp.Compile("\"domain\\.php\\?q=([a-zA-Z0-9\\*_.-]+\\." + domain + ")")
match := Regex.FindAllStringSubmatch(src, -1)
for _, m := range match {
// First Capturing group
subdomain := m[1]
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sTHREATMINER%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[THREATMINER] %s", subdomains)
}
}
subdomains = append(subdomains, subdomain)
}
return subdomains
}

View File

@ -1,93 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package virustotal is a golang Client for Subdomain Enumeration
package virustotal
import (
"encoding/json"
"fmt"
"io/ioutil"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
type virustotalapiObject struct {
Subdomains []string `json:"subdomains"`
}
var virustotalapiData virustotalapiObject
// Local function to query virustotal API
// Requires an API key
func queryVirustotalAPI(domain string, state *helper.State) (subdomains []string, err error) {
// Make a search for a domain name and get HTTP Response
resp, err := helper.GetHTTPResponse("https://www.virustotal.com/vtapi/v2/domain/report?apikey="+state.ConfigState.VirustotalAPIKey+"&domain="+domain, state.Timeout)
if err != nil {
return subdomains, err
}
// Get the response body
respBody, err := ioutil.ReadAll(resp.Body)
if err != nil {
return subdomains, err
}
// Decode the json format
err = json.Unmarshal([]byte(respBody), &virustotalapiData)
if err != nil {
return subdomains, err
}
// Append each subdomain found to subdomains array
for _, subdomain := range virustotalapiData.Subdomains {
// Fix Wildcard subdomains containing asterisk before them
if strings.Contains(subdomain, "*.") {
subdomain = strings.Split(subdomain, "*.")[1]
}
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sVIRUSTOTAL%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[VIRUSTOTAL] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
return subdomains, nil
}
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
var subdomains []string
if state.ConfigState.VirustotalAPIKey == "" {
return subdomains
}
// Get subdomains via API
subdomains, err := queryVirustotalAPI(domain, state)
if err != nil {
if !state.Silent {
fmt.Printf("\nvirustotal: %v\n", err)
}
return subdomains
}
return subdomains
}

View File

@ -1,90 +0,0 @@
//
// Written By : @ice3man (Nizamul Rana)
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package waybackarchive is a Golang based client for Parsing Subdomains from Waybackarchive
package waybackarchive
import (
"fmt"
"io/ioutil"
"strconv"
"strings"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
pagesResp, err := helper.GetHTTPResponse("http://web.archive.org/cdx/search/cdx?url=*."+domain+"&showNumPages=true", state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nwaybackarchive: %v\n", err)
}
return subdomains
}
b, err := ioutil.ReadAll(pagesResp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nwaybackarchive: %v\n", err)
}
return subdomains
}
numPages, err := strconv.Atoi(strings.Split(string(b), "\n")[0])
if err != nil {
if !state.Silent {
fmt.Printf("\nwaybackarchive: %v\n", err)
}
return subdomains
}
for i := 0; i <= numPages; i++ {
resp, err := helper.GetHTTPResponse("http://web.archive.org/cdx/search/cdx?url=*."+domain+"/*&output=json&fl=original&collapse=urlkey&page="+strconv.Itoa(i), state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nwaybackarchive: %v\n", err)
}
return subdomains
}
// Get the response body
respBody, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nwaybackarchive: %v\n", err)
}
return subdomains
}
initialSubs := helper.ExtractSubdomains(string(respBody), domain)
validSubdomains := helper.Unique(initialSubs)
for _, subdomain := range validSubdomains {
if helper.SubdomainExists(subdomain, subdomains) == false {
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sWAYBACKARCHIVE%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[WAYBACKARCHIVE] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
}
}
return subdomains
}

View File

@ -1,73 +0,0 @@
//
// Written By : @Mzack9999
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
//
// Package yahoo is a golang client for Yahoo Subdomain Discovery
package yahoo
import (
"fmt"
"io/ioutil"
"math/rand"
"regexp"
"strconv"
"time"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// all subdomains found
var subdomains []string
// Query function returns all subdomains found using the service.
func Query(args ...interface{}) interface{} {
domain := args[0].(string)
state := args[1].(*helper.State)
maxPages, _ := strconv.Atoi(state.CurrentSettings.YahooPages)
for currentPage := 0; currentPage <= maxPages; currentPage++ {
url := "https://search.yahoo.com/search?p=site:" + domain + "&b=" + strconv.Itoa(currentPage*10) + "&pz=10&bct=0&xargs=0"
resp, err := helper.GetHTTPResponse(url, state.Timeout)
if err != nil {
if !state.Silent {
fmt.Printf("\nyahoo: %v\n", err)
}
return subdomains
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
if !state.Silent {
fmt.Printf("\nyahoo: %v\n", err)
}
return subdomains
}
reSub := regexp.MustCompile(`%.{2}`)
src := reSub.ReplaceAllLiteralString(string(body), " ")
match := helper.ExtractSubdomains(src, domain)
for _, subdomain := range match {
if helper.SubdomainExists(subdomain, subdomains) == false {
if state.Verbose == true {
if state.Color == true {
fmt.Printf("\n[%sYahoo%s] %s", helper.Red, helper.Reset, subdomain)
} else {
fmt.Printf("\n[Yahoo] %s", subdomain)
}
}
subdomains = append(subdomains, subdomain)
}
}
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
}
return subdomains
}

67
main.go
View File

@ -1,67 +0,0 @@
// subfinder : Subdomain discovery tool in golang
// Written By : @codingo
// @ice3man
//
// Distributed Under MIT License
// Copyrights (C) 2018 Ice3man
// Contains main driver classes for the tool
package main
import (
"flag"
"fmt"
"github.com/subfinder/subfinder/libsubfinder/helper"
"github.com/subfinder/subfinder/subf"
)
// ParseCmdLine ... Parses command line arguments into a setting structure
// ParseCmdLine ... Parses command line arguments into a setting structure
func ParseCmdLine() (s *subf.Subfinder) {
s = subf.NewSubfinder()
flag.BoolVar(&s.State.Verbose, "v", false, "Verbose output")
flag.BoolVar(&s.State.Color, "no-color", true, "Don't Use colors in output")
flag.IntVar(&s.State.Threads, "t", 10, "Number of concurrent threads")
flag.IntVar(&s.State.Timeout, "timeout", 180, "Timeout for passive discovery services")
flag.StringVar(&s.State.Domain, "d", "", "Domain to find subdomains for")
flag.StringVar(&s.State.Output, "o", "", "Name of the output file (optional)")
flag.BoolVar(&s.State.IsJSON, "oJ", false, "Write output in JSON Format")
flag.BoolVar(&s.State.Alive, "nW", false, "Remove Wildcard Subdomains from output")
flag.BoolVar(&s.State.NoPassive, "no-passive", false, "Do not perform passive subdomain enumeration")
flag.BoolVar(&s.State.Silent, "silent", false, "Show only subdomains in output")
flag.BoolVar(&s.State.Recursive, "recursive", false, "Use recursion to find subdomains")
flag.StringVar(&s.State.Wordlist, "w", "", "Wordlist for doing subdomain bruteforcing")
flag.StringVar(&s.State.Sources, "sources", "all", "Comma separated list of sources to use")
flag.BoolVar(&s.State.Bruteforce, "b", false, "Use bruteforcing to find subdomains")
flag.StringVar(&s.State.SetConfig, "set-config", "none", "Comma separated list of configuration details")
flag.StringVar(&s.State.SetSetting, "set-settings", "none", "Comma separated list of settings")
flag.StringVar(&s.State.DomainList, "dL", "", "List of domains to find subdomains for")
flag.StringVar(&s.State.OutputDir, "oD", "", "Directory to output results to ")
flag.StringVar(&s.State.ComResolver, "r", "", "Comma-separated list of resolvers to use")
flag.StringVar(&s.State.ListResolver, "rL", "", "Text file containing list of resolvers to use")
flag.StringVar(&s.State.ExcludeSource, "exclude-sources", "", "List of sources to exclude from enumeration")
flag.BoolVar(&s.State.AquatoneJSON, "oT", false, "Use aquatone style json output format")
flag.Parse()
return s
}
func main() {
subfinder := ParseCmdLine()
if !subfinder.State.Silent {
fmt.Println("===============================================")
fmt.Printf("%s%s-=Subfinder%s v1.1.3 github.com/subfinder/subfinder\n", helper.Info, helper.Cyan, helper.Reset)
fmt.Println("===============================================")
}
subfinder.Init()
_ = subfinder.PassiveEnumeration()
fmt.Printf("\n")
}

2
pkg/log/doc.go Normal file
View File

@ -0,0 +1,2 @@
// Package log provides a simple layer for leveled logging in go.
package log

168
pkg/log/log.go Normal file
View File

@ -0,0 +1,168 @@
package log
import (
"fmt"
"os"
"strings"
"sync"
"github.com/logrusorgru/aurora"
)
// Level defines all the available levels we can log at
type Level int
// Available logging levels
const (
Null Level = iota
Fatal
Silent
Label
Misc
Error
Info
Warning
Verbose
)
var (
// UseColors can be used to control coloring of the output
UseColors = true
// MaxLevel is the maximum level to log at. By default, logging
// is done at Info level. Using verbose will display all the errors too,
// Using silent will display only the most relevant information.
MaxLevel = Info
labels = map[Level]string{
Warning: "WRN",
Error: "ERR",
Label: "WRN",
Fatal: "FTL",
Info: "INF",
}
)
var stringBuilderPool = &sync.Pool{New: func() interface{} {
return new(strings.Builder)
}}
// wrap wraps a given label for a message to a logg-able representation.
// It checks if colors are specified and what level we are logging at.
func wrap(label string, level Level) string {
// Check if we are not using colors, if not, return
if !UseColors {
return label
}
switch level {
case Silent:
return label
case Info, Verbose:
return aurora.Blue(label).String()
case Fatal:
return aurora.Bold(aurora.Red(label)).String()
case Error:
return aurora.Red(label).String()
case Warning, Label:
return aurora.Yellow(label).String()
default:
return label
}
}
// getLabel generates a label for a given message, depending on the level
// and the label passed.
func getLabel(level Level, label string, sb *strings.Builder) {
switch level {
case Silent, Misc:
return
case Error, Fatal, Info, Warning, Label:
sb.WriteString("[")
sb.WriteString(wrap(labels[level], level))
sb.WriteString("]")
sb.WriteString(" ")
return
case Verbose:
sb.WriteString("[")
sb.WriteString(wrap(label, level))
sb.WriteString("]")
sb.WriteString(" ")
return
default:
return
}
}
// log logs the actual message to the screen
func log(level Level, label string, format string, args ...interface{}) {
// Don't log if the level is null
if level == Null {
return
}
if level <= MaxLevel {
// Build the log message using the string builder pool
sb := stringBuilderPool.Get().(*strings.Builder)
// Get the label and append it to string builder
getLabel(level, label, sb)
message := fmt.Sprintf(format, args...)
sb.WriteString(message)
if strings.HasSuffix(message, "\n") == false {
sb.WriteString("\n")
}
switch level {
case Silent:
fmt.Fprintf(os.Stdout, sb.String())
default:
fmt.Fprintf(os.Stderr, sb.String())
}
sb.Reset()
stringBuilderPool.Put(sb)
}
}
// Infof writes a info message on the screen with the default label
func Infof(format string, args ...interface{}) {
log(Info, "", format, args...)
}
// Warningf writes a warning message on the screen with the default label
func Warningf(format string, args ...interface{}) {
log(Warning, "", format, args...)
}
// Errorf writes an error message on the screen with the default label
func Errorf(format string, args ...interface{}) {
log(Error, "", format, args...)
}
// Verbosef writes a verbose message on the screen with a tabel
func Verbosef(format string, label string, args ...interface{}) {
log(Verbose, label, format, args...)
}
// Silentf writes a message on the stdout with no label
func Silentf(format string, args ...interface{}) {
log(Silent, "", format, args...)
}
// Fatalf exits the program if we encounter a fatal error
func Fatalf(format string, args ...interface{}) {
log(Fatal, "", format, args...)
os.Exit(1)
}
// Printf prints a string on screen without any extra stuff
func Printf(format string, args ...interface{}) {
log(Misc, "", format, args...)
}
// Labelf prints a string on screen with a label interface
func Labelf(format string, args ...interface{}) {
log(Label, "", format, args...)
}

34
pkg/log/log_test.go Normal file
View File

@ -0,0 +1,34 @@
package log
import (
"fmt"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/logrusorgru/aurora"
)
func TestGetLabel(t *testing.T) {
tests := []struct {
level Level
label string
expected string
}{
{Fatal, "", fmt.Sprintf("[%s] ", aurora.Bold(aurora.Red(labels[Fatal])).String())},
{Silent, "hello", ""},
{Error, "error", fmt.Sprintf("[%s] ", aurora.Red(labels[Error]).String())},
{Info, "", fmt.Sprintf("[%s] ", aurora.Blue(labels[Info]).String())},
{Warning, "", fmt.Sprintf("[%s] ", aurora.Yellow(labels[Warning]).String())},
{Verbose, "dns", fmt.Sprintf("[%s] ", aurora.Blue("dns").String())},
}
sb := &strings.Builder{}
for _, test := range tests {
sb.Reset()
getLabel(test.level, test.label, sb)
data := sb.String()
assert.Equal(t, data, test.expected, "Expected message and generate message don't match")
}
}

4
pkg/passive/doc.go Normal file
View File

@ -0,0 +1,4 @@
// Package passive provides capability for doing passive subdomain
// enumeration on targets.
package passive

43
pkg/passive/passive.go Normal file
View File

@ -0,0 +1,43 @@
package passive
import (
"context"
"fmt"
"sync"
"time"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// EnumerateSubdomains enumerates all the subdomains for a given domain
func (a *Agent) EnumerateSubdomains(domain string, keys subscraping.Keys, timeout int, maxEnumTime time.Duration) chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
session, err := subscraping.NewSession(domain, keys, timeout)
if err != nil {
results <- subscraping.Result{Type: subscraping.Error, Error: fmt.Errorf("could not init passive session for %s: %s", domain, err)}
}
ctx, cancel := context.WithTimeout(context.Background(), maxEnumTime)
wg := &sync.WaitGroup{}
// Run each source in parallel on the target domain
for source, runner := range a.sources {
wg.Add(1)
go func(source string, runner subscraping.Source) {
for resp := range runner.Run(ctx, domain, session) {
results <- resp
}
wg.Done()
}(source, runner)
}
wg.Wait()
close(results)
cancel()
}()
return results
}

134
pkg/passive/sources.go Normal file
View File

@ -0,0 +1,134 @@
package passive
import (
"github.com/projectdiscovery/subfinder/pkg/subscraping"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/archiveis"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/binaryedge"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/bufferover"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/censys"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/certspotter"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/certspotterold"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/commoncrawl"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/crtsh"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/digicert"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/dnsdumpster"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/entrust"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/googleter"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/hackertarget"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/ipv4info"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/passivetotal"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/securitytrails"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/shodan"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/sitedossier"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/threatcrowd"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/threatminer"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/urlscan"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/virustotal"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/waybackarchive"
)
// DefaultSources contains the list of sources used by default
var DefaultSources = []string{
"archiveis",
"binaryedge",
"bufferover",
"censys",
"certspotter",
"certspotterold",
"commoncrawl",
"crtsh",
"digicert",
"dnsdumpster",
"entrust",
"googleter",
"hackertarget",
"ipv4info",
"passivetotal",
"securitytrails",
"shodan",
"sitedossier",
"threatcrowd",
"threatminer",
"urlscan",
"virustotal",
"waybackarchive",
}
// Agent is a struct for running passive subdomain enumeration
// against a given host. It wraps subscraping package and provides
// a layer to build upon.
type Agent struct {
sources map[string]subscraping.Source
}
// New creates a new agent for passive subdomain discovery
func New(sources []string, exclusions []string) *Agent {
// Create the agent, insert the sources and remove the excluded sources
agent := &Agent{sources: make(map[string]subscraping.Source)}
agent.addSources(sources)
agent.removeSources(exclusions)
return agent
}
// addSources adds the given list of sources to the source array
func (a *Agent) addSources(sources []string) {
for _, source := range sources {
switch source {
case "archiveis":
a.sources[source] = &archiveis.Source{}
case "binaryedge":
a.sources[source] = &binaryedge.Source{}
case "bufferover":
a.sources[source] = &bufferover.Source{}
case "censys":
a.sources[source] = &censys.Source{}
case "certspotter":
a.sources[source] = &certspotter.Source{}
case "certspotterold":
a.sources[source] = &certspotterold.Source{}
case "commoncrawl":
a.sources[source] = &commoncrawl.Source{}
case "crtsh":
a.sources[source] = &crtsh.Source{}
case "digicert":
a.sources[source] = &digicert.Source{}
case "dnsdumpster":
a.sources[source] = &dnsdumpster.Source{}
case "entrust":
a.sources[source] = &entrust.Source{}
case "googleter":
a.sources[source] = &googleter.Source{}
case "hackertarget":
a.sources[source] = &hackertarget.Source{}
case "ipv4info":
a.sources[source] = &ipv4info.Source{}
case "passivetotal":
a.sources[source] = &passivetotal.Source{}
case "securitytrails":
a.sources[source] = &securitytrails.Source{}
case "shodan":
a.sources[source] = &shodan.Source{}
case "sitedossier":
a.sources[source] = &sitedossier.Source{}
case "threatcrowd":
a.sources[source] = &threatcrowd.Source{}
case "threatminer":
a.sources[source] = &threatminer.Source{}
case "urlscan":
a.sources[source] = &urlscan.Source{}
case "virustotal":
a.sources[source] = &virustotal.Source{}
case "waybackarchive":
a.sources[source] = &waybackarchive.Source{}
}
}
}
// removeSources deletes the given sources from the source map
func (a *Agent) removeSources(sources []string) {
for _, source := range sources {
delete(a.sources, source)
}
}

59
pkg/resolve/client.go Normal file
View File

@ -0,0 +1,59 @@
package resolve
import (
"bufio"
"math/rand"
"os"
"time"
)
// DefaultResolvers contains the default list of resolvers known to be good
var DefaultResolvers = []string{
"1.1.1.1", // Cloudflare primary
"1.0.0.1", // Cloudlfare secondary
"8.8.8.8", // Google primary
"8.8.4.4", // Google secondary
"9.9.9.9", // Quad9 Primary
"9.9.9.10", // Quad9 Secondary
"77.88.8.8", // Yandex Primary
"77.88.8.1", // Yandex Secondary
"208.67.222.222", // OpenDNS Primary
"208.67.220.220", // OpenDNS Secondary
}
// Resolver is a struct for resolving DNS names
type Resolver struct {
resolvers []string
rand *rand.Rand
}
// New creates a new resolver struct with the default resolvers
func New() *Resolver {
return &Resolver{
resolvers: []string{},
rand: rand.New(rand.NewSource(time.Now().UnixNano())),
}
}
// AppendResolversFromFile appends the resolvers read from a file to the list of resolvers
func (r *Resolver) AppendResolversFromFile(file string) error {
f, err := os.Open(file)
if err != nil {
return err
}
scanner := bufio.NewScanner(f)
for scanner.Scan() {
text := scanner.Text()
if text == "" {
continue
}
r.resolvers = append(r.resolvers, text)
}
f.Close()
return scanner.Err()
}
// AppendResolversFromSlice appends the slice to the list of resolvers
func (r *Resolver) AppendResolversFromSlice(list []string) {
r.resolvers = append(r.resolvers, list...)
}

3
pkg/resolve/doc.go Normal file
View File

@ -0,0 +1,3 @@
// Package resolve is used to handle resolving records
// It also handles wildcard subdomains and rotating resolvers.
package resolve

150
pkg/resolve/resolve.go Normal file
View File

@ -0,0 +1,150 @@
package resolve
import (
"sync"
"github.com/miekg/dns"
"github.com/rs/xid"
)
const (
maxResolveRetries = 5
maxWildcardChecks = 3
)
// ResolutionPool is a pool of resolvers created for resolving subdomains
// for a given host.
type ResolutionPool struct {
*Resolver
Tasks chan string
Results chan Result
wg *sync.WaitGroup
removeWildcard bool
wildcardIPs map[string]struct{}
}
// Result contains the result for a host resolution
type Result struct {
Type ResultType
Host string
IP string
Error error
}
// ResultType is the type of result found
type ResultType int
// Types of data result can return
const (
Subdomain ResultType = iota
Error
)
// NewResolutionPool creates a pool of resolvers for resolving subdomains of a given domain
func (r *Resolver) NewResolutionPool(workers int, removeWildcard bool) *ResolutionPool {
resolutionPool := &ResolutionPool{
Resolver: r,
Tasks: make(chan string),
Results: make(chan Result),
wg: &sync.WaitGroup{},
removeWildcard: removeWildcard,
wildcardIPs: make(map[string]struct{}),
}
go func() {
for i := 0; i < workers; i++ {
resolutionPool.wg.Add(1)
go resolutionPool.resolveWorker()
}
resolutionPool.wg.Wait()
close(resolutionPool.Results)
}()
return resolutionPool
}
// InitWildcards inits the wildcard ips array
func (r *ResolutionPool) InitWildcards(domain string) error {
for i := 0; i < maxWildcardChecks; i++ {
uid := xid.New().String()
hosts, err := r.getARecords(uid + "." + domain)
if err != nil {
return err
}
// Append all wildcard ips found for domains
for _, host := range hosts {
r.wildcardIPs[host] = struct{}{}
}
}
return nil
}
func (r *ResolutionPool) resolveWorker() {
for task := range r.Tasks {
if !r.removeWildcard {
r.Results <- Result{Type: Subdomain, Host: task, IP: ""}
continue
}
hosts, err := r.getARecords(task)
if err != nil {
r.Results <- Result{Type: Error, Error: err}
continue
}
if len(hosts) == 0 {
continue
}
for _, host := range hosts {
// Ignore the host if it exists in wildcard ips map
if _, ok := r.wildcardIPs[host]; ok {
continue
}
}
r.Results <- Result{Type: Subdomain, Host: task, IP: hosts[0]}
}
r.wg.Done()
}
// getARecords gets all the A records for a given host
func (r *ResolutionPool) getARecords(host string) ([]string, error) {
var iteration int
m := new(dns.Msg)
m.Id = dns.Id()
m.RecursionDesired = true
m.Question = make([]dns.Question, 1)
m.Question[0] = dns.Question{
Name: dns.Fqdn(host),
Qtype: dns.TypeA,
Qclass: dns.ClassINET,
}
exchange:
iteration++
in, err := dns.Exchange(m, r.resolvers[r.rand.Intn(len(r.resolvers))]+":53")
if err != nil {
// Retry in case of I/O error
if iteration <= maxResolveRetries {
goto exchange
}
return nil, err
}
// Ignore the error in case we have bad result
if in != nil && in.Rcode != dns.RcodeSuccess {
return nil, nil
}
var hosts []string
for _, record := range in.Answer {
if t, ok := record.(*dns.A); ok {
hosts = append(hosts, t.A.String())
}
}
return hosts, nil
}

54
pkg/runner/banners.go Normal file
View File

@ -0,0 +1,54 @@
package runner
import (
"github.com/projectdiscovery/subfinder/pkg/log"
"github.com/projectdiscovery/subfinder/pkg/passive"
"github.com/projectdiscovery/subfinder/pkg/resolve"
)
const banner = `
_ __ _ _
____ _| |__ / _(_)_ _ __| |___ _ _
(_-< || | '_ \ _| | ' \/ _ / -_) '_|
/__/\_,_|_.__/_| |_|_||_\__,_\___|_| v2
`
// showBanner is used to show the banner to the user
func showBanner() {
log.Printf("%s\n", banner)
log.Printf("\t\tprojectdiscovery.io\n\n")
log.Labelf("Use with caution. You are responsible for your actions\n")
log.Labelf("Developers assume no liability and are not responsible for any misuse or damage.\n")
log.Labelf("By using subfinder, you also agree to the terms of the APIs used.\n\n")
}
// normalRunTasks runs the normal startup tasks
func (options *Options) normalRunTasks() {
configFile, err := UnmarshalRead(options.ConfigFile)
if err != nil {
log.Fatalf("Could not read configuration file %s: %s\n", options.ConfigFile, err)
}
options.YAMLConfig = configFile
}
// firstRunTasks runs some housekeeping tasks done
// when the program is ran for the first time
func (options *Options) firstRunTasks() {
// Create the configuration file and display information
// about it to the user.
config := ConfigFile{
// Use the default list of resolvers by marshalling it to the config
Resolvers: resolve.DefaultResolvers,
// Use the default list of passive sources
Sources: passive.DefaultSources,
}
err := config.MarshalWrite(options.ConfigFile)
if err != nil {
log.Fatalf("Could not write configuration file to %s: %s\n", options.ConfigFile, err)
}
options.YAMLConfig = config
log.Infof("Configuration file saved to %s\n", options.ConfigFile)
}

133
pkg/runner/config.go Normal file
View File

@ -0,0 +1,133 @@
package runner
import (
"math/rand"
"os"
"strings"
"time"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
"gopkg.in/yaml.v3"
)
// ConfigFile contains the fields stored in the configuration file
type ConfigFile struct {
// Resolvers contains the list of resolvers to use while resolving
Resolvers []string `yaml:"resolvers,omitempty"`
// Sources contains a list of sources to use for enumeration
Sources []string `yaml:"sources,omitempty"`
// ExcludeSources contains the sources to not include in the enumeration process
ExcludeSources []string `yaml:"exclude-sources,omitempty"`
// API keys for different sources
Binaryedge []string `yaml:"binaryedge"`
Censys []string `yaml:"censys"`
Certspotter []string `yaml:"certspotter"`
PassiveTotal []string `yaml:"passivetotal"`
SecurityTrails []string `yaml:"securitytrails"`
Shodan []string `yaml:"shodan"`
URLScan []string `yaml:"urlscan"`
Virustotal []string `yaml:"virustotal"`
}
// GetConfigDirectory gets the subfinder config directory for a user
func GetConfigDirectory() (string, error) {
// Seed the random number generator
rand.Seed(time.Now().UnixNano())
var config string
directory, err := os.UserHomeDir()
if err != nil {
return config, err
}
config = directory + "/.config/subfinder"
// Create All directory for subfinder even if they exist
os.MkdirAll(config, os.ModePerm)
return config, nil
}
// CheckConfigExists checks if the config file exists in the given path
func CheckConfigExists(configPath string) bool {
if _, err := os.Stat(configPath); err == nil {
return true
} else if os.IsNotExist(err) {
return false
}
return false
}
// MarshalWrite writes the marshalled yaml config to disk
func (c ConfigFile) MarshalWrite(file string) error {
f, err := os.OpenFile(file, os.O_WRONLY|os.O_CREATE, 0755)
if err != nil {
return err
}
// Indent the spaces too
enc := yaml.NewEncoder(f)
enc.SetIndent(4)
err = enc.Encode(&c)
f.Close()
return err
}
// UnmarshalRead reads the unmarshalled config yaml file from disk
func UnmarshalRead(file string) (ConfigFile, error) {
config := ConfigFile{}
f, err := os.Open(file)
if err != nil {
return config, err
}
err = yaml.NewDecoder(f).Decode(&config)
f.Close()
return config, err
}
// GetKeys gets the API keys from config file and creates a Keys struct
// We use random selection of api keys from the list of keys supplied.
// Keys that require 2 options are separated by colon (:).
func (c ConfigFile) GetKeys() subscraping.Keys {
keys := subscraping.Keys{}
if len(c.Binaryedge) > 0 {
keys.Binaryedge = c.Binaryedge[rand.Intn(len(c.Binaryedge))]
}
if len(c.Censys) > 0 {
censysKeys := c.Censys[rand.Intn(len(c.Censys))]
parts := strings.Split(censysKeys, ":")
if len(parts) == 2 {
keys.CensysToken = parts[0]
keys.CensysSecret = parts[1]
}
}
if len(c.Certspotter) > 0 {
keys.Certspotter = c.Certspotter[rand.Intn(len(c.Certspotter))]
}
if len(c.PassiveTotal) > 0 {
passiveTotalKeys := c.PassiveTotal[rand.Intn(len(c.PassiveTotal))]
parts := strings.Split(passiveTotalKeys, ":")
if len(parts) == 2 {
keys.PassiveTotalUsername = parts[0]
keys.PassiveTotalPassword = parts[1]
}
}
if len(c.SecurityTrails) > 0 {
keys.Securitytrails = c.SecurityTrails[rand.Intn(len(c.SecurityTrails))]
}
if len(c.Shodan) > 0 {
keys.Shodan = c.Shodan[rand.Intn(len(c.Shodan))]
}
if len(c.URLScan) > 0 {
keys.URLScan = c.URLScan[rand.Intn(len(c.URLScan))]
}
if len(c.Virustotal) > 0 {
keys.Virustotal = c.Virustotal[rand.Intn(len(c.Virustotal))]
}
return keys
}

22
pkg/runner/config_test.go Normal file
View File

@ -0,0 +1,22 @@
package runner
import (
"os"
"testing"
"github.com/stretchr/testify/assert"
)
func TestConfigGetDirectory(t *testing.T) {
directory, err := GetConfigDirectory()
if err != nil {
t.Fatalf("Expected nil got %v while getting home\n", err)
}
home, err := os.UserHomeDir()
if err != nil {
t.Fatalf("Expected nil got %v while getting dir\n", err)
}
config := home + "/.config/subfinder"
assert.Equal(t, directory, config, "Directory and config should be equal")
}

3
pkg/runner/doc.go Normal file
View File

@ -0,0 +1,3 @@
// Package runner implements the mechanism to drive the
// subdomain enumeration process
package runner

52
pkg/runner/initialize.go Normal file
View File

@ -0,0 +1,52 @@
package runner
import (
"strings"
"github.com/projectdiscovery/subfinder/pkg/passive"
"github.com/projectdiscovery/subfinder/pkg/resolve"
)
// initializePassiveEngine creates the passive engine and loads sources etc
func (r *Runner) initializePassiveEngine() {
var sources, exclusions []string
// If there are any sources from CLI, only use them
// Otherwise, use the yaml file sources
if r.options.Sources != "" {
sources = append(sources, strings.Split(r.options.Sources, ",")...)
} else {
sources = append(sources, r.options.YAMLConfig.Sources...)
}
if r.options.ExcludeSources != "" {
exclusions = append(exclusions, strings.Split(r.options.ExcludeSources, ",")...)
} else {
exclusions = append(exclusions, r.options.YAMLConfig.ExcludeSources...)
}
r.passiveAgent = passive.New(sources, exclusions)
}
// initializeActiveEngine creates the resolver used to resolve the found subdomains
func (r *Runner) initializeActiveEngine() error {
r.resolverClient = resolve.New()
// If the file has been provided, read resolvers from the file
if r.options.ResolverList != "" {
err := r.resolverClient.AppendResolversFromFile(r.options.ResolverList)
if err != nil {
return err
}
}
var resolvers []string
if r.options.Resolvers != "" {
resolvers = append(resolvers, strings.Split(r.options.Resolvers, ",")...)
} else {
resolvers = append(resolvers, r.options.YAMLConfig.Resolvers...)
}
r.resolverClient.AppendResolversFromSlice(resolvers)
return nil
}

104
pkg/runner/options.go Normal file
View File

@ -0,0 +1,104 @@
package runner
import (
"flag"
"os"
"path"
"github.com/projectdiscovery/subfinder/pkg/log"
)
// Options contains the configuration options for tuning
// the subdomain enumeration process.
type Options struct {
Verbose bool // Verbose flag indicates whether to show verbose output or not
NoColor bool // No-Color disables the colored output
Threads int // Thread controls the number of threads to use for active enumerations
Timeout int // Timeout is the seconds to wait for sources to respond
MaxEnumerationTime int // MaxEnumerationTime is the maximum amount of time in mins to wait for enumeration
Domain string // Domain is the domain to find subdomains for
DomainsFile string // DomainsFile is the file containing list of domains to find subdomains for
Output string // Output is the file to write found subdomains to.
OutputDirectory string // OutputDirectory is the directory to write results to in case list of domains is given
JSON bool // JSON specifies whether to use json for output format or text file
HostIP bool // HostIP specifies whether to write subdomains in host:ip format
Silent bool // Silent suppresses any extra text and only writes subdomains to screen
Sources string // Sources contains a comma-separated list of sources to use for enumeration
ExcludeSources string // ExcludeSources contains the comma-separated sources to not include in the enumeration process
Resolvers string // Resolvers is the comma-separated resolvers to use for enumeration
ResolverList string // ResolverList is a text file containing list of resolvers to use for enumeration
RemoveWildcard bool // RemoveWildcard specifies whether to remove potential wildcard or dead subdomains from the results.
ConfigFile string // ConfigFile contains the location of the config file
Stdin bool // Stdin specifies whether stdin input was given to the process
YAMLConfig ConfigFile // YAMLConfig contains the unmarshalled yaml config file
}
// ParseOptions parses the command line flags provided by a user
func ParseOptions() *Options {
options := &Options{}
config, err := GetConfigDirectory()
if err != nil {
// This should never be reached
log.Fatalf("Could not get user home: %s\n", err)
}
flag.BoolVar(&options.Verbose, "v", false, "Show Verbose output")
flag.BoolVar(&options.NoColor, "nC", false, "Don't Use colors in output")
flag.IntVar(&options.Threads, "t", 10, "Number of concurrent threads for active enumeration")
flag.IntVar(&options.Timeout, "timeout", 30, "Seconds to wait before timing out")
flag.IntVar(&options.MaxEnumerationTime, "max-time", 10, "Minutes to wait for enumeration results")
flag.StringVar(&options.Domain, "d", "", "Domain to find subdomains for")
flag.StringVar(&options.DomainsFile, "dL", "", "File containing list of domains to enumerate")
flag.StringVar(&options.Output, "o", "", "File to write output to (optional)")
flag.StringVar(&options.OutputDirectory, "oD", "", "Directory to write enumeration results to (optional)")
flag.BoolVar(&options.JSON, "oJ", false, "Write output in JSON lines Format")
flag.BoolVar(&options.HostIP, "oI", false, "Write output in Host,IP format")
flag.BoolVar(&options.Silent, "silent", false, "Show only subdomains in output")
flag.StringVar(&options.Sources, "sources", "", "Comma separated list of sources to use")
flag.StringVar(&options.ExcludeSources, "exclude-sources", "", "List of sources to exclude from enumeration")
flag.StringVar(&options.Resolvers, "r", "", "Comma-separated list of resolvers to use")
flag.StringVar(&options.ResolverList, "rL", "", "Text file containing list of resolvers to use")
flag.BoolVar(&options.RemoveWildcard, "nW", false, "Remove Wildcard & Dead Subdomains from output")
flag.StringVar(&options.ConfigFile, "config", path.Join(config, "config.yaml"), "Configuration file for API Keys, etc")
flag.Parse()
// Check if stdin pipe was given
options.Stdin = hasStdin()
// Read the inputs and configure the logging
options.configureOutput()
// Show the user the banner
showBanner()
// Check if the config file exists. If not, it means this is the
// first run of the program. Show the first run notices and initialize the config file.
// Else show the normal banners and read the yaml fiile to the config
if !CheckConfigExists(options.ConfigFile) {
options.firstRunTasks()
} else {
options.normalRunTasks()
}
// Validate the options passed by the user and if any
// invalid options have been used, exit.
err = options.validateOptions()
if err != nil {
log.Fatalf("Program exiting: %s\n", err)
}
return options
}
func hasStdin() bool {
fi, err := os.Stdin.Stat()
if err != nil {
return false
}
if fi.Mode()&os.ModeNamedPipe == 0 {
return false
}
return true
}

196
pkg/runner/runner.go Normal file
View File

@ -0,0 +1,196 @@
package runner
import (
"bufio"
"io"
"os"
"path"
"strings"
"sync"
"time"
"github.com/projectdiscovery/subfinder/pkg/log"
"github.com/projectdiscovery/subfinder/pkg/passive"
"github.com/projectdiscovery/subfinder/pkg/resolve"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Runner is an instance of the subdomain enumeration
// client used to orchestrate the whole process.
type Runner struct {
options *Options
passiveAgent *passive.Agent
resolverClient *resolve.Resolver
}
// NewRunner creates a new runner struct instance by parsing
// the configuration options, configuring sources, reading lists
// and setting up loggers, etc.
func NewRunner(options *Options) (*Runner, error) {
runner := &Runner{options: options}
// Initialize the passive subdomain enumeration engine
runner.initializePassiveEngine()
// Initialize the active subdomain enumeration engine
err := runner.initializeActiveEngine()
if err != nil {
return nil, err
}
return runner, nil
}
// RunEnumeration runs the subdomain enumeration flow on the targets specified
func (r *Runner) RunEnumeration() error {
// Check if only a single domain is sent as input. Process the domain now.
if r.options.Domain != "" {
return r.EnumerateSingleDomain(r.options.Domain, r.options.Output)
}
// If we have multiple domains as input,
if r.options.DomainsFile != "" {
f, err := os.Open(r.options.DomainsFile)
if err != nil {
return err
}
err = r.EnumerateMultipleDomains(f)
f.Close()
return err
}
// If we have STDIN input, treat it as multiple domains
if r.options.Stdin {
return r.EnumerateMultipleDomains(os.Stdin)
}
return nil
}
// EnumerateMultipleDomains enumerates subdomains for multiple domains
// We keep enumerating subdomains for a given domain until we reach an error
func (r *Runner) EnumerateMultipleDomains(reader io.Reader) error {
scanner := bufio.NewScanner(reader)
for scanner.Scan() {
domain := scanner.Text()
if domain == "" {
continue
}
outputFile := path.Join(r.options.OutputDirectory, domain)
err := r.EnumerateSingleDomain(domain, outputFile)
if err != nil {
return err
}
}
return nil
}
// EnumerateSingleDomain performs subdomain enumeration against a single domain
func (r *Runner) EnumerateSingleDomain(domain, output string) error {
log.Infof("Enumerating subdomains for %s\n", domain)
// Get the API keys for sources from the configuration
// and also create the active resolving engine for the domain.
keys := r.options.YAMLConfig.GetKeys()
resolutionPool := r.resolverClient.NewResolutionPool(r.options.Threads, r.options.RemoveWildcard)
err := resolutionPool.InitWildcards(domain)
if err != nil {
// Log the error but don't quit.
log.Warningf("Could not get wildcards for domain %s: %s\n", domain, err)
}
// Max time for performing enumeration is 5 mins
// Run the passive subdomain enumeration
passiveResults := r.passiveAgent.EnumerateSubdomains(domain, keys, r.options.Timeout, time.Duration(r.options.MaxEnumerationTime)*time.Minute)
wg := &sync.WaitGroup{}
// Process the results in a separate goroutine
go func() {
// Create a unique map for filtering duplicate subdomains out
uniqueMap := make(map[string]struct{})
wg.Add(1)
for result := range passiveResults {
switch result.Type {
case subscraping.Error:
log.Warningf("Could not run source %s: %s\n", result.Source, result.Error)
case subscraping.Subdomain:
// Validate the subdomain found and remove wildcards from
if !strings.HasSuffix(result.Value, "."+domain) {
continue
}
subdomain := strings.ReplaceAll(strings.ToLower(result.Value), "*.", "")
// Check if the subdomain is a duplicate. If not,
// send the subdomain for resolution.
if _, ok := uniqueMap[subdomain]; ok {
continue
}
uniqueMap[subdomain] = struct{}{}
// Log the verbose message about the found subdomain and send the
// host for resolution to the resolution pool
log.Verbosef("%s\n", result.Source, subdomain)
resolutionPool.Tasks <- subdomain
}
}
close(resolutionPool.Tasks)
wg.Done()
}()
foundResults := make(map[string]string)
// Process the results coming from the resolutions pool
for result := range resolutionPool.Results {
switch result.Type {
case resolve.Error:
log.Warningf("Could not resolve host: %s\n", result.Error)
case resolve.Subdomain:
// Add the found subdomain to a map.
if _, ok := foundResults[result.Host]; !ok {
foundResults[result.Host] = result.IP
}
}
}
wg.Wait()
// Print all the found subdomains on the screen
for result := range foundResults {
log.Silentf("%s\n", result)
}
// In case the user has given an output file, write all the found
// subdomains to the output file.
if output != "" {
// If the output format is json, append .json
// else append .txt
if r.options.OutputDirectory != "" {
if r.options.JSON {
output = output + ".json"
} else {
output = output + ".txt"
}
}
file, err := os.Create(output)
if err != nil {
log.Errorf("Could not create file %s for %s: %s\n", output, domain, err)
return err
}
// Write the output to the file depending upon user requirement
if r.options.HostIP {
err = WriteHostIPOutput(foundResults, file)
} else if r.options.JSON {
err = WriteJSONOutput(foundResults, file)
} else {
err = WriteHostOutput(foundResults, file)
}
if err != nil {
log.Errorf("Could not write results to file %s for %s: %s\n", output, domain, err)
}
file.Close()
return err
}
return nil
}

73
pkg/runner/utils.go Normal file
View File

@ -0,0 +1,73 @@
package runner
import (
"bufio"
"io"
"strings"
jsoniter "github.com/json-iterator/go"
)
// JSONResult contains the result for a host in JSON format
type JSONResult struct {
Host string `json:"host"`
IP string `json:"ip"`
}
// WriteHostOutput writes the output list of subdomain to an io.Writer
func WriteHostOutput(results map[string]string, writer io.Writer) error {
bufwriter := bufio.NewWriter(writer)
sb := &strings.Builder{}
for host := range results {
sb.WriteString(host)
sb.WriteString("\n")
_, err := bufwriter.WriteString(sb.String())
if err != nil {
bufwriter.Flush()
return err
}
sb.Reset()
}
return bufwriter.Flush()
}
// WriteJSONOutput writes the output list of subdomain in JSON to an io.Writer
func WriteJSONOutput(results map[string]string, writer io.Writer) error {
encoder := jsoniter.NewEncoder(writer)
data := JSONResult{}
for host, ip := range results {
data.Host = host
data.IP = ip
err := encoder.Encode(&data)
if err != nil {
return err
}
}
return nil
}
// WriteHostIPOutput writes the output list of subdomain to an io.Writer
func WriteHostIPOutput(results map[string]string, writer io.Writer) error {
bufwriter := bufio.NewWriter(writer)
sb := &strings.Builder{}
for host, ip := range results {
sb.WriteString(host)
sb.WriteString(",")
sb.WriteString(ip)
sb.WriteString("\n")
_, err := bufwriter.WriteString(sb.String())
if err != nil {
bufwriter.Flush()
return err
}
sb.Reset()
}
return bufwriter.Flush()
}

58
pkg/runner/validate.go Normal file
View File

@ -0,0 +1,58 @@
package runner
import (
"errors"
"github.com/projectdiscovery/subfinder/pkg/log"
)
// validateOptions validates the configuration options passed
func (options *Options) validateOptions() error {
// Check if domain, list of domains, or stdin info was provided.
// If none was provided, then return.
if options.Domain == "" && options.DomainsFile == "" && !options.Stdin {
return errors.New("no input list provided")
}
// Both verbose and silent flags were used
if options.Verbose && options.Silent {
return errors.New("both verbose and silent mode specified")
}
// Validate threads and options
if options.Threads == 0 {
return errors.New("threads cannot be zero")
}
if options.Timeout == 0 {
return errors.New("timeout cannot be zero")
}
// JSON cannot be used with hostIP
if options.JSON && options.HostIP {
return errors.New("hostip flag cannot be used with json flag")
}
// Always remove wildcard with hostip and json
if options.HostIP && !options.RemoveWildcard {
return errors.New("hostip flag must be used with RemoveWildcard option")
}
if options.JSON && !options.RemoveWildcard {
return errors.New("JSON flag must be used with RemoveWildcard option")
}
return nil
}
// configureOutput configures the output on the screen
func (options *Options) configureOutput() {
// If the user desires verbose output, show verbose output
if options.Verbose {
log.MaxLevel = log.Verbose
}
if options.NoColor {
log.UseColors = false
}
if options.Silent {
log.MaxLevel = log.Silent
}
}

81
pkg/subscraping/agent.go Executable file
View File

@ -0,0 +1,81 @@
package subscraping
import (
"crypto/tls"
"net/http"
"time"
)
// NewSession creates a new session object for a domain
func NewSession(domain string, keys Keys, timeout int) (*Session, error) {
client := &http.Client{
Transport: &http.Transport{
MaxIdleConns: 100,
MaxIdleConnsPerHost: 100,
TLSClientConfig: &tls.Config{
InsecureSkipVerify: true,
},
},
Timeout: time.Duration(timeout) * time.Second,
}
session := &Session{
Client: client,
Keys: keys,
}
// Create a new extractor object for the current domain
extractor, err := NewSubdomainExtractor(domain)
session.Extractor = extractor
return session, err
}
// NormalGet makes a normal GET request to a URL
func (s *Session) NormalGet(url string) (*http.Response, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
// Don't randomize user agents, as they cause issues sometimes
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
req.Header.Set("Accept", "*/*")
req.Header.Set("Accept-Language", "en")
resp, err := s.Client.Do(req)
if err != nil {
return nil, err
}
return resp, nil
}
// Get makes a GET request to a URL
func (s *Session) Get(url string, cookies string, headers map[string]string) (*http.Response, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
req.Header.Set("Accept", "*/*")
req.Header.Set("Accept-Language", "en")
if cookies != "" {
req.Header.Set("Cookie", cookies)
}
if headers != nil {
for key, value := range headers {
req.Header.Set(key, value)
}
}
resp, err := s.Client.Do(req)
if err != nil {
return nil, err
}
return resp, nil
}

View File

@ -0,0 +1,86 @@
// Package archiveis is a Archiveis Scraping Engine in Golang
package archiveis
import (
"context"
"io/ioutil"
"regexp"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// ArchiveIs is a struct for archiveurlsagent
type ArchiveIs struct {
Results chan subscraping.Result
Session *subscraping.Session
closed bool
}
var reNext = regexp.MustCompile("<a id=\"next\" style=\".*\" href=\"(.*)\">&rarr;</a>")
func (a *ArchiveIs) enumerate(ctx context.Context, baseURL string) {
for {
select {
case <-ctx.Done():
close(a.Results)
return
default:
resp, err := a.Session.NormalGet(baseURL)
if err != nil {
a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Error, Error: err}
close(a.Results)
return
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Error, Error: err}
close(a.Results)
return
}
resp.Body.Close()
src := string(body)
for _, subdomain := range a.Session.Extractor.FindAllString(src, -1) {
a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Subdomain, Value: subdomain}
}
match1 := reNext.FindStringSubmatch(src)
if len(match1) > 0 {
a.enumerate(ctx, match1[1])
}
// Guard channel closing during recursion
if !a.closed {
close(a.Results)
a.closed = true
}
return
}
}
}
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
aInstance := ArchiveIs{
Session: session,
Results: results,
}
go aInstance.enumerate(ctx, "http://archive.is/*."+domain)
return aInstance.Results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "archiveis"
}

View File

@ -0,0 +1,101 @@
package binaryedge
import (
"context"
"fmt"
jsoniter "github.com/json-iterator/go"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
type binaryedgeResponse struct {
Subdomains []string `json:"events"`
Total int `json:"total"`
}
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
if session.Keys.Binaryedge == "" {
close(results)
return
}
resp, err := session.Get(fmt.Sprintf("https://api.binaryedge.io/v2/query/domains/subdomain/%s", domain), "", map[string]string{"X-Key": session.Keys.Binaryedge})
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
response := new(binaryedgeResponse)
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
for _, subdomain := range response.Subdomains {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
remaining := response.Total - 100
currentPage := 2
for {
further := s.getSubdomains(ctx, domain, &remaining, &currentPage, session, results)
if !further {
break
}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "binaryedge"
}
func (s *Source) getSubdomains(ctx context.Context, domain string, remaining, currentPage *int, session *subscraping.Session, results chan subscraping.Result) bool {
for {
select {
case <-ctx.Done():
return false
default:
resp, err := session.Get(fmt.Sprintf("https://api.binaryedge.io/v2/query/domains/subdomain/%s?page=%d", domain, *currentPage), "", map[string]string{"X-Key": session.Keys.Binaryedge})
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
return false
}
response := binaryedgeResponse{}
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
return false
}
resp.Body.Close()
for _, subdomain := range response.Subdomains {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
*remaining = *remaining - 100
if *remaining <= 0 {
return false
}
*currentPage++
return true
}
}
}

View File

@ -0,0 +1,57 @@
// Package bufferover is a bufferover Scraping Engine in Golang
package bufferover
import (
"context"
"fmt"
"io/ioutil"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
// Run enumeration on subdomain dataset for historical SONAR datasets
s.getData(fmt.Sprintf("https://dns.bufferover.run/dns?q=.%s", domain), session, results)
s.getData(fmt.Sprintf("https://tls.bufferover.run/dns?q=.%s", domain), session, results)
close(results)
}()
return results
}
func (s *Source) getData(URL string, session *subscraping.Session, results chan subscraping.Result) {
resp, err := session.NormalGet(URL)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src := string(body)
for _, subdomain := range session.Extractor.FindAllString(src, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
return
}
// Name returns the name of the source
func (s *Source) Name() string {
return "bufferover"
}

View File

@ -0,0 +1,97 @@
package censys
import (
"bytes"
"context"
"fmt"
"net/http"
"strconv"
jsoniter "github.com/json-iterator/go"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
const maxCensysPages = 10
type resultsq struct {
Data []string `json:"parsed.extensions.subject_alt_name.dns_names"`
Data1 []string `json:"parsed.names"`
}
type response struct {
Results []resultsq `json:"results"`
Metadata struct {
Pages int `json:"pages"`
} `json:"metadata"`
}
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
if session.Keys.CensysToken == "" || session.Keys.CensysSecret == "" {
close(results)
return
}
var response response
currentPage := 1
for {
var request = []byte(`{"query":"` + domain + `", "page":` + strconv.Itoa(currentPage) + `, "fields":["parsed.names","parsed.extensions.subject_alt_name.dns_names"], "flatten":true}`)
req, err := http.NewRequest("POST", "https://www.censys.io/api/v1/search/certificates", bytes.NewReader(request))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
fmt.Printf("%s %s\n", session.Keys.CensysToken, session.Keys.CensysSecret)
req.SetBasicAuth(session.Keys.CensysToken, session.Keys.CensysSecret)
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
resp, err := session.Client.Do(req)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
// Exit the censys enumeration if max pages is reached
if currentPage >= response.Metadata.Pages || currentPage >= maxCensysPages {
break
}
for _, res := range response.Results {
for _, part := range res.Data {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: part}
}
for _, part := range res.Data1 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: part}
}
}
currentPage++
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "censys"
}

View File

@ -0,0 +1,92 @@
package certspotter
import (
"context"
"fmt"
jsoniter "github.com/json-iterator/go"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
type certspotterObject struct {
ID string `json:"id"`
DNSNames []string `json:"dns_names"`
}
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
if session.Keys.Certspotter == "" {
close(results)
return
}
resp, err := session.Get(fmt.Sprintf("https://api.certspotter.com/v1/issuances?domain=%s&include_subdomains=true&expand=dns_names", domain), "", map[string]string{"Authorization": "Bearer " + session.Keys.Certspotter})
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
response := []certspotterObject{}
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
for _, cert := range response {
for _, subdomain := range cert.DNSNames {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
}
id := response[len(response)-1].ID
for {
reqURL := fmt.Sprintf("https://api.certspotter.com/v1/issuances?domain=%s&include_subdomains=true&expand=dns_names&after=%s", domain, id)
resp, err := session.Get(reqURL, "", map[string]string{"Authorization": "Bearer " + session.Keys.Certspotter})
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
response := []certspotterObject{}
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
if len(response) == 0 {
break
}
for _, cert := range response {
for _, subdomain := range cert.DNSNames {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
}
id = response[len(response)-1].ID
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "certspotter"
}

View File

@ -0,0 +1,48 @@
package certspotterold
import (
"context"
"fmt"
"io/ioutil"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
resp, err := session.NormalGet(fmt.Sprintf("https://certspotter.com/api/v0/certs?domain=%s", domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src := string(body)
for _, subdomain := range session.Extractor.FindAllString(src, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "certspotterold"
}

View File

@ -0,0 +1,112 @@
package commoncrawl
import (
"context"
"errors"
"fmt"
"io/ioutil"
"strings"
jsoniter "github.com/json-iterator/go"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
const indexURL = "https://index.commoncrawl.org/collinfo.json"
type indexResponse struct {
ID string `json:"id"`
APIURL string `json:"cdx-api"`
}
// Source is the passive scraping agent
type Source struct{}
var years = [...]string{"2019", "2018", "2017", "2016"}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
resp, err := session.NormalGet(indexURL)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
if resp.StatusCode == 500 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("internal server error")}
close(results)
return
}
indexes := []indexResponse{}
err = jsoniter.NewDecoder(resp.Body).Decode(&indexes)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
searchIndexes := make(map[string]string)
for _, year := range years {
for _, index := range indexes {
if strings.Contains(index.ID, year) {
if _, ok := searchIndexes[year]; !ok {
searchIndexes[year] = index.APIURL
break
}
}
}
}
for _, url := range searchIndexes {
further := s.getSubdomains(ctx, url, domain, session, results)
if !further {
break
}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "commoncrawl"
}
func (s *Source) getSubdomains(ctx context.Context, url string, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
for {
select {
case <-ctx.Done():
return false
default:
resp, err := session.NormalGet(fmt.Sprintf("%s?url=*.%s&output=json", url, domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
return false
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
return false
}
resp.Body.Close()
src := string(body)
for _, subdomain := range session.Extractor.FindAllString(src, -1) {
subdomain = strings.TrimPrefix(subdomain, "25")
subdomain = strings.TrimPrefix(subdomain, "2F")
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
return true
}
}
}

View File

@ -0,0 +1,48 @@
package crtsh
import (
"context"
"fmt"
"io/ioutil"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
resp, err := session.NormalGet(fmt.Sprintf("https://crt.sh/?q=%%25.%s&output=json", domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src := string(body)
for _, subdomain := range session.Extractor.FindAllString(src, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "crtsh"
}

View File

@ -0,0 +1,49 @@
package digicert
import (
"context"
"fmt"
"io/ioutil"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
resp, err := session.Get(fmt.Sprintf("https://ssltools.digicert.com/chainTester/webservice/ctsearch/search?keyword=%s", domain), "", nil)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src := string(body)
for _, subdomain := range session.Extractor.FindAllString(src, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "digicert"
}

View File

@ -0,0 +1,111 @@
package dnsdumpster
import (
"context"
"io/ioutil"
"net"
"net/http"
"net/url"
"regexp"
"strings"
"time"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
var re = regexp.MustCompile("<input type=\"hidden\" name=\"csrfmiddlewaretoken\" value=\"(.*)\">")
// getCSRFToken gets the CSRF Token from the page
func getCSRFToken(page string) string {
if subs := re.FindStringSubmatch(page); len(subs) == 2 {
return strings.TrimSpace(subs[1])
}
return ""
}
// postForm posts a form for a domain and returns the response
func postForm(token, domain string) (string, error) {
dial := net.Dialer{}
client := &http.Client{
Transport: &http.Transport{
DialContext: dial.DialContext,
TLSHandshakeTimeout: 10 * time.Second,
},
}
params := url.Values{
"csrfmiddlewaretoken": {token},
"targetip": {domain},
}
req, err := http.NewRequest("POST", "https://dnsdumpster.com/", strings.NewReader(params.Encode()))
if err != nil {
return "", err
}
// The CSRF token needs to be sent as a cookie
cookie := &http.Cookie{
Name: "csrftoken",
Domain: "dnsdumpster.com",
Value: token,
}
req.AddCookie(cookie)
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
req.Header.Set("Referer", "https://dnsdumpster.com")
req.Header.Set("X-CSRF-Token", token)
resp, err := client.Do(req)
if err != nil {
return "", err
}
// Now, grab the entire page
in, err := ioutil.ReadAll(resp.Body)
resp.Body.Close()
return string(in), nil
}
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
resp, err := session.NormalGet("https://dnsdumpster.com/")
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
csrfToken := getCSRFToken(string(body))
data, err := postForm(csrfToken, domain)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
for _, subdomain := range session.Extractor.FindAllString(data, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "dnsdumpster"
}

View File

@ -0,0 +1,51 @@
package entrust
import (
"context"
"fmt"
"io/ioutil"
"strings"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
resp, err := session.NormalGet(fmt.Sprintf("https://ctsearch.entrust.com/api/v1/certificates?fields=issuerCN,subjectO,issuerDN,issuerO,subjectDN,signAlg,san,publicKeyType,publicKeySize,validFrom,validTo,sn,ev,logEntries.logName,subjectCNReversed,cert&domain=%s&includeExpired=true&exactMatch=false&limit=5000", domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src := string(body)
for _, subdomain := range session.Extractor.FindAllString(src, -1) {
subdomain = strings.TrimPrefix(subdomain, "u003d")
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "entrust"
}

View File

@ -0,0 +1,118 @@
package googleter
import (
"context"
"io/ioutil"
"net/url"
"regexp"
"strconv"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
var (
metaRegex = regexp.MustCompile(`\[null,"(.*)",null,(.*),(.*)]`)
metaRegex2 = regexp.MustCompile(`\["(.*)",".*",null,(.*),(.*)]`)
)
type agent struct {
subdomains chan subscraping.Result
session *subscraping.Session
}
func (a *agent) makeRequest(token string, domain string) (string, error) {
requestURI := ""
if token == "" {
requestURI = "https://www.google.com/transparencyreport/api/v3/httpsreport/ct/certsearch?domain=" + url.QueryEscape(domain) + "&include_expired=true&include_subdomains=true"
} else {
requestURI = "https://www.google.com/transparencyreport/api/v3/httpsreport/ct/certsearch/page?domain=" + url.QueryEscape(domain) + "&include_expired=true&include_subdomains=true&p=" + url.QueryEscape(token)
}
resp, err := a.session.Get(requestURI, "", map[string]string{
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36",
"Referer": "https://transparencyreport.google.com/https/certificates",
})
if err != nil {
return "", err
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return "", err
}
resp.Body.Close()
return string(body), nil
}
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
a := agent{
session: session,
subdomains: results,
}
go func() {
respBody, err := a.makeRequest("", domain)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
var Token string
matches := metaRegex.FindStringSubmatch(string(respBody))
if len(matches) <= 1 {
close(results)
return
}
for _, sub := range session.Extractor.FindAllString(respBody, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: sub}
}
Token = matches[1]
MaxPages, _ := strconv.Atoi(matches[3])
for i := 1; i <= MaxPages; i++ {
further := a.getSubdomains(ctx, &Token, domain, session, s, results)
if !further {
break
}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "googleter"
}
func (a *agent) getSubdomains(ctx context.Context, Token *string, domain string, session *subscraping.Session, s *Source, results chan subscraping.Result) bool {
respBody, err := a.makeRequest(*Token, domain)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
return false
}
for _, sub := range session.Extractor.FindAllString(respBody, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: sub}
}
matches := metaRegex2.FindStringSubmatch(respBody)
matches2 := metaRegex.FindStringSubmatch(respBody)
if len(matches2) > 1 {
*Token = matches2[1]
}
if len(matches) > 1 {
*Token = matches[1]
}
return true
}

View File

@ -0,0 +1,48 @@
package hackertarget
import (
"context"
"fmt"
"io/ioutil"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
resp, err := session.NormalGet(fmt.Sprintf("http://api.hackertarget.com/hostsearch/?q=%s", domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src := string(body)
for _, match := range session.Extractor.FindAllString(src, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "hackertarget"
}

View File

@ -0,0 +1,167 @@
package ipv4info
import (
"context"
"errors"
"io/ioutil"
"regexp"
"strconv"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
resp, err := session.NormalGet("http://ipv4info.com/search/" + domain)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src := string(body)
regxTokens := regexp.MustCompile("/ip-address/(.*)/" + domain)
matchTokens := regxTokens.FindAllString(src, -1)
if len(matchTokens) <= 0 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("Could not get ip tokens")}
close(results)
return
}
token := matchTokens[0]
resp, err = session.NormalGet("http://ipv4info.com" + token)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src = string(body)
regxTokens = regexp.MustCompile("/dns/(.*?)/" + domain)
matchTokens = regxTokens.FindAllString(src, -1)
if len(matchTokens) <= 0 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("Could not get ip tokens")}
close(results)
return
}
token = matchTokens[0]
resp, err = session.NormalGet("http://ipv4info.com" + token)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src = string(body)
regxTokens = regexp.MustCompile("/subdomains/(.*?)/" + domain)
matchTokens = regxTokens.FindAllString(src, -1)
if len(matchTokens) <= 0 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("Could not get ip tokens")}
close(results)
return
}
token = matchTokens[0]
resp, err = session.NormalGet("http://ipv4info.com" + token)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src = string(body)
for _, match := range session.Extractor.FindAllString(src, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
}
nextPage := 1
for {
further := s.getSubdomains(ctx, domain, &nextPage, src, session, results)
if !further {
break
}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "ipv4info"
}
func (s *Source) getSubdomains(ctx context.Context, domain string, nextPage *int, src string, session *subscraping.Session, results chan subscraping.Result) bool {
for {
select {
case <-ctx.Done():
return false
default:
regxTokens := regexp.MustCompile("/subdomains/.*/page" + strconv.Itoa(*nextPage) + "/" + domain + ".html")
matchTokens := regxTokens.FindAllString(src, -1)
if len(matchTokens) == 0 {
return false
}
token := matchTokens[0]
resp, err := session.NormalGet("http://ipv4info.com" + token)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
return false
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
return false
}
resp.Body.Close()
src = string(body)
for _, match := range session.Extractor.FindAllString(src, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
}
*nextPage++
return true
}
}
}

View File

@ -0,0 +1,71 @@
package passivetotal
import (
"bytes"
"context"
"net/http"
jsoniter "github.com/json-iterator/go"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
type response struct {
Subdomains []string `json:"subdomains"`
}
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
if session.Keys.PassiveTotalUsername == "" || session.Keys.PassiveTotalPassword == "" {
close(results)
return
}
// Create JSON Get body
var request = []byte(`{"query":"` + domain + `"}`)
req, err := http.NewRequest("GET", "https://api.passivetotal.org/v2/enrichment/subdomains", bytes.NewBuffer(request))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
req.SetBasicAuth(session.Keys.PassiveTotalUsername, session.Keys.PassiveTotalPassword)
req.Header.Set("Content-Type", "application/json")
resp, err := session.Client.Do(req)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
data := response{}
err = jsoniter.NewDecoder(resp.Body).Decode(&data)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
for _, subdomain := range data.Subdomains {
finalSubdomain := subdomain + "." + domain
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: finalSubdomain}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "passivetotal"
}

View File

@ -0,0 +1,63 @@
package securitytrails
import (
"context"
"fmt"
"strings"
jsoniter "github.com/json-iterator/go"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
type response struct {
Subdomains []string `json:"subdomains"`
}
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
if session.Keys.Securitytrails == "" {
close(results)
return
}
resp, err := session.Get(fmt.Sprintf("https://api.securitytrails.com/v1/domain/%s/subdomains", domain), "", map[string]string{"APIKEY": session.Keys.Securitytrails})
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
response := response{}
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
for _, subdomain := range response.Subdomains {
if strings.HasSuffix(subdomain, ".") {
subdomain = subdomain + domain
} else {
subdomain = subdomain + "." + domain
}
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "securitytrails"
}

View File

@ -0,0 +1,71 @@
package shodan
import (
"context"
"strconv"
jsoniter "github.com/json-iterator/go"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
type shodanResult struct {
Matches []shodanObject `json:"matches"`
Result int `json:"result"`
Error string `json:"error"`
}
type shodanObject struct {
Hostnames []string `json:"hostnames"`
}
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
if session.Keys.Shodan == "" {
close(results)
return
}
for currentPage := 0; currentPage <= 10; currentPage++ {
resp, err := session.NormalGet("https://api.shodan.io/shodan/host/search?query=hostname:" + domain + "&page=" + strconv.Itoa(currentPage) + "&key=" + session.Keys.Shodan)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
var response shodanResult
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
if response.Error != "" || len(response.Matches) == 0 {
close(results)
return
}
for _, block := range response.Matches {
for _, hostname := range block.Hostnames {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: hostname}
}
}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "shodan"
}

View File

@ -0,0 +1,82 @@
package sitedossier
import (
"context"
"fmt"
"io/ioutil"
"math/rand"
"regexp"
"time"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
var reNext = regexp.MustCompile("<a href=\"([A-Za-z0-9\\/.]+)\"><b>")
type agent struct {
results chan subscraping.Result
session *subscraping.Session
}
func (a *agent) enumerate(ctx context.Context, baseURL string) error {
for {
select {
case <-ctx.Done():
return nil
default:
resp, err := a.session.NormalGet(baseURL)
if err != nil {
a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
close(a.results)
return err
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
close(a.results)
return err
}
resp.Body.Close()
src := string(body)
for _, match := range a.session.Extractor.FindAllString(src, -1) {
a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Subdomain, Value: match}
}
match1 := reNext.FindStringSubmatch(src)
time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
if len(match1) > 0 {
a.enumerate(ctx, "http://www.sitedossier.com"+match1[1])
}
return nil
}
}
}
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
a := agent{
session: session,
results: results,
}
go func() {
err := a.enumerate(ctx, fmt.Sprintf("http://www.sitedossier.com/parentdomain/%s", domain))
if err == nil {
close(a.results)
}
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "sitedossier"
}

View File

@ -0,0 +1,49 @@
package threatcrowd
import (
"context"
"fmt"
"io/ioutil"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
resp, err := session.NormalGet(fmt.Sprintf("https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=%s", domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src := string(body)
for _, match := range session.Extractor.FindAllString(src, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "threatcrowd"
}

View File

@ -0,0 +1,49 @@
package threatminer
import (
"context"
"fmt"
"io/ioutil"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
resp, err := session.NormalGet(fmt.Sprintf("https://api.threatminer.org/v2/domain.php?q=%s&rt=5", domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
// Get the response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
src := string(body)
for _, match := range session.Extractor.FindAllString(src, -1) {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "threatminer"
}

View File

@ -0,0 +1,60 @@
package urlscan
import (
"context"
"fmt"
jsoniter "github.com/json-iterator/go"
"github.com/m-mizutani/urlscan-go/urlscan"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
if session.Keys.URLScan == "" {
close(results)
return
}
client := urlscan.NewClient(session.Keys.URLScan)
task, err := client.Submit(urlscan.SubmitArguments{URL: fmt.Sprintf("https://%s", domain)})
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
err = task.Wait()
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
data, err := jsoniter.Marshal(task.Result.Data)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
match := session.Extractor.FindAllString(string(data), -1)
for _, m := range match {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: m}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "urlscan"
}

View File

@ -0,0 +1,56 @@
package virustotal
import (
"context"
"fmt"
jsoniter "github.com/json-iterator/go"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
type response struct {
Subdomains []string `json:"subdomains"`
}
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
if session.Keys.Virustotal == "" {
close(results)
return
}
resp, err := session.NormalGet(fmt.Sprintf("https://www.virustotal.com/vtapi/v2/domain/report?apikey=%s&domain=%s", session.Keys.Virustotal, domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
data := response{}
err = jsoniter.NewDecoder(resp.Body).Decode(&data)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
resp.Body.Close()
for _, subdomain := range data.Subdomains {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "virustotal"
}

View File

@ -0,0 +1,51 @@
package waybackarchive
import (
"context"
"fmt"
"io/ioutil"
"strings"
"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
// Source is the passive scraping agent
type Source struct{}
// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
go func() {
pagesResp, err := session.NormalGet(fmt.Sprintf("http://web.archive.org/cdx/search/cdx?url=*.%s/*&output=json&fl=original&collapse=urlkey", domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
body, err := ioutil.ReadAll(pagesResp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}
pagesResp.Body.Close()
match := session.Extractor.FindAllString(string(body), -1)
for _, subdomain := range match {
subdomain = strings.TrimPrefix(subdomain, "25")
subdomain = strings.TrimPrefix(subdomain, "2F")
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
}
close(results)
}()
return results
}
// Name returns the name of the source
func (s *Source) Name() string {
return "waybackarchive"
}

59
pkg/subscraping/types.go Executable file
View File

@ -0,0 +1,59 @@
package subscraping
import (
"context"
"net/http"
"regexp"
)
// Source is an interface inherited by each passive source
type Source interface {
// Run takes a domain as argument and a session object
// which contains the extractor for subdomains, http client
// and other stuff.
Run(context.Context, string, *Session) <-chan Result
// Name returns the name of the source
Name() string
}
// Session is the option passed to the source, an option is created
// uniquely for eac source.
type Session struct {
// Extractor is the regex for subdomains created for each domain
Extractor *regexp.Regexp
// Keys is the API keys for the application
Keys Keys
// Client is the current http client
Client *http.Client
}
// Keys contains the current API Keys we have in store
type Keys struct {
Binaryedge string `json:"binaryedge"`
CensysToken string `json:"censysUsername"`
CensysSecret string `json:"censysPassword"`
Certspotter string `json:"certspotter"`
PassiveTotalUsername string `json:"passivetotal_username"`
PassiveTotalPassword string `json:"passivetotal_password"`
Securitytrails string `json:"securitytrails"`
Shodan string `json:"shodan"`
URLScan string `json:"urlscan"`
Virustotal string `json:"virustotal"`
}
// Result is a result structure returned by a source
type Result struct {
Type ResultType
Source string
Value string
Error error
}
// ResultType is the type of result returned by the source
type ResultType int
// Types of results returned by the source
const (
Subdomain ResultType = iota
Error
)

30
pkg/subscraping/utils.go Executable file
View File

@ -0,0 +1,30 @@
package subscraping
import (
"regexp"
"sync"
)
var subdomainExtractorMutex = &sync.Mutex{}
// NewSubdomainExtractor creates a new regular expression to extract
// subdomains from text based on the given domain.
func NewSubdomainExtractor(domain string) (*regexp.Regexp, error) {
subdomainExtractorMutex.Lock()
defer subdomainExtractorMutex.Unlock()
extractor, err := regexp.Compile(`[a-zA-Z0-9\*_.-]+\.` + domain)
if err != nil {
return nil, err
}
return extractor, nil
}
// Exists check if a key exist in a slice
func Exists(values []string, key string) bool {
for _, v := range values {
if v == key {
return true
}
}
return false
}

View File

@ -1,209 +0,0 @@
package subf
import (
"bufio"
"encoding/json"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"reflect"
"strings"
"github.com/subfinder/subfinder/libsubfinder/engines/passive"
"github.com/subfinder/subfinder/libsubfinder/helper"
)
// Subfinder represent a subdomain enumerator instance
type Subfinder struct {
State *helper.State
}
// NewSubfinder instantiate a new subfinder
func NewSubfinder() *Subfinder {
return &Subfinder{
State: helper.InitState(),
}
}
// Init setup the instance
func (s *Subfinder) Init() {
s.parseConfig()
s.parseSetting()
s.parseComResolver()
s.parseListResolver()
s.parseBruteForce()
s.setCommonResolver()
s.setOutput()
s.setDomain()
}
func (s *Subfinder) parseConfig() {
if s.State.SetConfig == "none" {
return
}
setConfig := strings.Split(s.State.SetConfig, ",")
// Build Configuration path
home := helper.GetHomeDir()
path := home + "/.config/subfinder/config.json"
for _, config := range setConfig {
object := strings.Split(config, "=")
// Change value dynamically using reflect package
if strings.EqualFold(object[0], "virustotalapikey") {
reflect.ValueOf(&s.State.ConfigState).Elem().FieldByName("VirustotalAPIKey").SetString(object[1])
} else if strings.EqualFold(object[0], "passivetotalusername") {
reflect.ValueOf(&s.State.ConfigState).Elem().FieldByName("PassivetotalUsername").SetString(object[1])
} else if strings.EqualFold(object[0], "passivetotalkey") {
reflect.ValueOf(&s.State.ConfigState).Elem().FieldByName("PassivetotalKey").SetString(object[1])
} else if strings.EqualFold(object[0], "securitytrailskey") {
reflect.ValueOf(&s.State.ConfigState).Elem().FieldByName("SecurityTrailsKey").SetString(object[1])
} else if strings.EqualFold(object[0], "riddleremail") {
reflect.ValueOf(&s.State.ConfigState).Elem().FieldByName("RiddlerEmail").SetString(object[1])
} else if strings.EqualFold(object[0], "riddlerpassword") {
reflect.ValueOf(&s.State.ConfigState).Elem().FieldByName("RiddlerPassword").SetString(object[1])
} else if strings.EqualFold(object[0], "censysusername") {
reflect.ValueOf(&s.State.ConfigState).Elem().FieldByName("CensysUsername").SetString(object[1])
} else if strings.EqualFold(object[0], "censyssecret") {
reflect.ValueOf(&s.State.ConfigState).Elem().FieldByName("CensysSecret").SetString(object[1])
} else if strings.EqualFold(object[0], "shodanapikey") {
reflect.ValueOf(&s.State.ConfigState).Elem().FieldByName("ShodanAPIKey").SetString(object[1])
}
configJSON, _ := json.MarshalIndent(s.State.ConfigState, "", " ")
err := ioutil.WriteFile(path, configJSON, 0644)
if err != nil {
fmt.Printf("\n\n[!] Error : %v\n", err)
os.Exit(1)
}
fmt.Printf("Successfully configured %s%s%s=>%s\n", helper.Info, object[0], helper.Reset, object[1])
}
}
func (s *Subfinder) parseSetting() {
if s.State.SetSetting == "none" {
return
}
setSetting := strings.Split(s.State.SetSetting, ",")
for _, setting := range setSetting {
object := strings.Split(setting, "=")
// Change value dynamically using reflect package
reflect.ValueOf(&s.State.CurrentSettings).Elem().FieldByName(object[0]).SetString(object[1])
if !s.State.Silent && s.State.Verbose {
fmt.Printf("Successfully Set %s%s%s=>%s\n", helper.Info, object[0], helper.Reset, object[1])
}
}
}
func (s *Subfinder) parseComResolver() {
if s.State.ComResolver == "" {
return
}
setResolvers := strings.Split(s.State.ComResolver, ",")
s.State.LoadResolver = append(s.State.LoadResolver, setResolvers...)
}
func (s *Subfinder) parseListResolver() {
if s.State.ListResolver == "" {
return
}
// Load the resolvers from file
file, err := os.Open(s.State.ListResolver)
if err != nil {
fmt.Fprintf(os.Stderr, "\nerror: %v\n", err)
os.Exit(1)
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
s.State.LoadResolver = append(s.State.LoadResolver, scanner.Text())
}
}
func (s *Subfinder) parseBruteForce() {
if s.State.Bruteforce == true {
if s.State.Wordlist == "" {
if !s.State.Silent {
fmt.Printf("%s-> Must provide a wordlist when bruteforce is enabled.%s\nTry %s'./subfinder -h'%s for more information\n", helper.Bad, helper.Reset, helper.Info, helper.Reset)
}
os.Exit(1)
}
if !helper.Exists(s.State.Wordlist) {
if !s.State.Silent {
fmt.Printf("%s-> The wordlist file '%s' does not exist.%s\n", helper.Bad, s.State.Wordlist, helper.Reset)
}
os.Exit(1)
}
}
}
func (s *Subfinder) setCommonResolver() {
// Use the default resolvers
if s.State.ComResolver != "" || s.State.ListResolver != "" {
return
}
s.State.LoadResolver = append(s.State.LoadResolver, "1.1.1.1")
s.State.LoadResolver = append(s.State.LoadResolver, "1.0.0.1")
s.State.LoadResolver = append(s.State.LoadResolver, "8.8.8.8")
s.State.LoadResolver = append(s.State.LoadResolver, "8.8.4.4")
s.State.LoadResolver = append(s.State.LoadResolver, "9.9.9.9")
s.State.LoadResolver = append(s.State.LoadResolver, "9.9.9.10")
s.State.LoadResolver = append(s.State.LoadResolver, "208.67.222.222")
s.State.LoadResolver = append(s.State.LoadResolver, "208.67.220.220")
}
func (s *Subfinder) setOutput() {
if s.State.Output != "" {
dir := filepath.Dir(s.State.Output)
if !helper.Exists(dir) {
fmt.Printf("\n%s-> The specified output directory does not exists !%s\n", helper.Yellow, helper.Reset)
} else {
// Get a handle to the out file if it is not json
if !s.State.AquatoneJSON && !s.State.IsJSON {
var err error
s.State.OutputHandle, err = os.OpenFile(s.State.Output, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0644)
if err != nil {
return
}
}
}
} else if s.State.OutputDir != "" {
if !helper.Exists(s.State.OutputDir) {
fmt.Printf("\n%s-> The specified output directory does not exists !%s\n", helper.Yellow, helper.Reset)
}
}
}
func (s *Subfinder) setDomain() {
if s.State.Domain == "" && s.State.DomainList == "" {
if !s.State.Silent {
fmt.Printf("%s-> Missing \"domain\" argument %s\nTry %s'./subfinder -h'%s for more information\n", helper.Bad, helper.Reset, helper.Info, helper.Reset)
}
os.Exit(1)
}
if s.State.DomainList != "" && !helper.Exists(s.State.DomainList) {
if !s.State.Silent {
fmt.Printf("%s-> The domainlist file '%s' does not exist.%s\n", helper.Bad, s.State.DomainList, helper.Reset)
}
os.Exit(1)
}
}
// PassiveEnumeration execute a passive enumeration
func (s *Subfinder) PassiveEnumeration() []string {
return passive.Enumerate(s.State)
}