Merge branch 'master' into read-ignoref-from-cwd

dev
Mzack9999 2020-10-13 02:13:04 +02:00 committed by GitHub
commit 1f2959708b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 267 additions and 96 deletions

View File

@ -39,6 +39,7 @@ type Options struct {
TemplateList bool // List available templates
Stdin bool // Stdin specifies whether stdin input was given to the process
StopAtFirstMatch bool // Stop processing template at first full match (this may break chained requests)
BulkSize int // Number of targets analyzed in parallel for each template
}
type multiStringFlag []string
@ -80,6 +81,7 @@ func ParseOptions() *Options {
flag.BoolVar(&options.TemplateList, "tl", false, "List available templates")
flag.IntVar(&options.RateLimit, "rate-limit", -1, "Per Target Rate-Limit")
flag.BoolVar(&options.StopAtFirstMatch, "stop-at-first-match", false, "Stop processing http requests at first match (this may break template/workflow logic)")
flag.IntVar(&options.BulkSize, "bulk-size", 150, "Number of hosts analyzed in parallel per template")
flag.Parse()

View File

@ -9,7 +9,6 @@ import (
"path"
"path/filepath"
"strings"
"sync"
tengo "github.com/d5/tengo/v2"
"github.com/d5/tengo/v2/stdlib"
@ -21,6 +20,7 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/requests"
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
"github.com/projectdiscovery/nuclei/v2/pkg/workflows"
"github.com/remeh/sizedwaitgroup"
)
// workflowTemplates contains the initialized workflow templates per template group
@ -79,12 +79,12 @@ func (r *Runner) processTemplateWithList(p progress.IProgress, template *templat
var globalresult atomicboolean.AtomBool
var wg sync.WaitGroup
wg := sizedwaitgroup.New(r.options.BulkSize)
scanner := bufio.NewScanner(strings.NewReader(r.input))
for scanner.Scan() {
URL := scanner.Text()
wg.Add(1)
wg.Add()
go func(URL string) {
defer wg.Done()
@ -125,12 +125,12 @@ func (r *Runner) processWorkflowWithList(p progress.IProgress, workflow *workflo
logicBytes := []byte(workflow.Logic)
var wg sync.WaitGroup
wg := sizedwaitgroup.New(r.options.BulkSize)
scanner := bufio.NewScanner(strings.NewReader(r.input))
for scanner.Scan() {
targetURL := scanner.Text()
wg.Add(1)
wg.Add()
go func(targetURL string) {
defer wg.Done()

View File

@ -203,17 +203,20 @@ func (e *HTTPExecuter) ExecuteTurboHTTP(p progress.IProgress, reqURL string) *Re
pipeOptions := rawhttp.DefaultPipelineOptions
pipeOptions.Host = URL.Host
pipeOptions.MaxConnections = 1
if e.bulkHTTPRequest.PipelineMaxWorkers > 0 {
pipeOptions.MaxConnections = e.bulkHTTPRequest.PipelineMaxWorkers
if e.bulkHTTPRequest.PipelineConcurrentConnections > 0 {
pipeOptions.MaxConnections = e.bulkHTTPRequest.PipelineConcurrentConnections
}
if e.bulkHTTPRequest.PipelineRequestsPerConnection > 0 {
pipeOptions.MaxPendingRequests = e.bulkHTTPRequest.PipelineRequestsPerConnection
}
pipeclient := rawhttp.NewPipelineClient(pipeOptions)
// Workers that keeps enqueuing new requests
// 150 should be a sufficient value to keep queues always full
maxWorkers := 150
if e.bulkHTTPRequest.PipelineMaxWorkers > 0 {
maxWorkers = e.bulkHTTPRequest.PipelineMaxWorkers
// in case the queue is bigger increase the workers
if pipeOptions.MaxPendingRequests > maxWorkers {
maxWorkers = pipeOptions.MaxPendingRequests
}
swg := sizedwaitgroup.New(maxWorkers)
for e.bulkHTTPRequest.Next(reqURL) && !result.Done {
request, err := e.bulkHTTPRequest.MakeHTTPRequest(reqURL, dynamicvalues, e.bulkHTTPRequest.Current(reqURL))
@ -226,8 +229,8 @@ func (e *HTTPExecuter) ExecuteTurboHTTP(p progress.IProgress, reqURL string) *Re
defer swg.Done()
// HTTP pipelining ignores rate limit
// If the request was built correctly then execute it
request.Pipeline = true
request.PipelineClient = pipeclient
err = e.handleHTTP(reqURL, httpRequest, dynamicvalues, result)
if err != nil {
@ -263,6 +266,7 @@ func (e *HTTPExecuter) ExecuteHTTP(p progress.IProgress, reqURL string) *Result
}
dynamicvalues := make(map[string]interface{})
_ = dynamicvalues
// verify if the URL is already being processed
if e.bulkHTTPRequest.HasGenerator(reqURL) {
@ -413,7 +417,7 @@ func (e *HTTPExecuter) handleHTTP(reqURL string, request *requests.HTTPRequest,
result.Meta = request.Meta
result.GotResults = true
result.Unlock()
e.writeOutputHTTP(request, resp, body, matcher, nil)
e.writeOutputHTTP(request, resp, body, matcher, nil, result.Meta)
}
}
}
@ -444,7 +448,7 @@ func (e *HTTPExecuter) handleHTTP(reqURL string, request *requests.HTTPRequest,
// Write a final string of output if matcher type is
// AND or if we have extractors for the mechanism too.
if len(outputExtractorResults) > 0 || matcherCondition == matchers.ANDCondition {
e.writeOutputHTTP(request, resp, body, nil, outputExtractorResults)
e.writeOutputHTTP(request, resp, body, nil, outputExtractorResults, result.Meta)
result.Lock()
result.GotResults = true
result.Unlock()

View File

@ -7,17 +7,18 @@ import (
)
type jsonOutput struct {
Template string `json:"template"`
Type string `json:"type"`
Matched string `json:"matched"`
MatcherName string `json:"matcher_name,omitempty"`
ExtractedResults []string `json:"extracted_results,omitempty"`
Name string `json:"name"`
Severity string `json:"severity"`
Author string `json:"author"`
Description string `json:"description"`
Request string `json:"request,omitempty"`
Response string `json:"response,omitempty"`
Template string `json:"template"`
Type string `json:"type"`
Matched string `json:"matched"`
MatcherName string `json:"matcher_name,omitempty"`
ExtractedResults []string `json:"extracted_results,omitempty"`
Name string `json:"name"`
Severity string `json:"severity"`
Author string `json:"author"`
Description string `json:"description"`
Request string `json:"request,omitempty"`
Response string `json:"response,omitempty"`
Meta map[string]interface{} `json:"meta,omitempty"`
}
// unsafeToString converts byte slice to string with zero allocations

View File

@ -12,7 +12,7 @@ import (
)
// writeOutputHTTP writes http output to streams
func (e *HTTPExecuter) writeOutputHTTP(req *requests.HTTPRequest, resp *http.Response, body string, matcher *matchers.Matcher, extractorResults []string) {
func (e *HTTPExecuter) writeOutputHTTP(req *requests.HTTPRequest, resp *http.Response, body string, matcher *matchers.Matcher, extractorResults []string, meta map[string]interface{}) {
var URL string
// rawhttp
if req.RawRequest != nil {
@ -32,6 +32,7 @@ func (e *HTTPExecuter) writeOutputHTTP(req *requests.HTTPRequest, resp *http.Res
Severity: e.template.Info.Severity,
Author: e.template.Info.Author,
Description: e.template.Info.Description,
Meta: meta,
}
if matcher != nil && len(matcher.Name) > 0 {
@ -99,9 +100,7 @@ func (e *HTTPExecuter) writeOutputHTTP(req *requests.HTTPRequest, resp *http.Res
builder.WriteString("] ")
}
// Escape the URL by replacing all % with %%
escapedURL := strings.ReplaceAll(URL, "%", "%%")
builder.WriteString(escapedURL)
builder.WriteString(URL)
// If any extractors, write the results
if len(extractorResults) > 0 {

View File

@ -7,6 +7,8 @@ import (
"encoding/base64"
"encoding/hex"
"html"
"math"
"math/rand"
"net/url"
"regexp"
"strings"
@ -14,6 +16,9 @@ import (
"github.com/Knetic/govaluate"
)
var letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
var numbers = "1234567890"
// HelperFunctions contains the dsl functions
func HelperFunctions() (functions map[string]govaluate.ExpressionFunction) {
functions = make(map[string]govaluate.ExpressionFunction)
@ -144,5 +149,106 @@ func HelperFunctions() (functions map[string]govaluate.ExpressionFunction) {
return compiled.MatchString(args[1].(string)), nil
}
// random generators
functions["rand_char"] = func(args ...interface{}) (interface{}, error) {
chars := letters + numbers
bad := ""
if len(args) >= 1 {
chars = args[0].(string)
}
if len(args) >= 2 {
bad = args[1].(string)
}
chars = TrimAll(chars, bad)
return chars[rand.Intn(len(chars))], nil
}
functions["rand_base"] = func(args ...interface{}) (interface{}, error) {
l := 0
bad := ""
base := letters + numbers
if len(args) >= 1 {
l = args[0].(int)
}
if len(args) >= 2 {
bad = args[1].(string)
}
if len(args) >= 3 {
base = args[2].(string)
}
base = TrimAll(base, bad)
return RandSeq(base, l), nil
}
functions["rand_text_alphanumeric"] = func(args ...interface{}) (interface{}, error) {
l := 0
bad := ""
chars := letters + numbers
if len(args) >= 1 {
l = args[0].(int)
}
if len(args) >= 2 {
bad = args[1].(string)
}
chars = TrimAll(chars, bad)
return RandSeq(chars, l), nil
}
functions["rand_text_alpha"] = func(args ...interface{}) (interface{}, error) {
l := 0
bad := ""
chars := letters
if len(args) >= 1 {
l = args[0].(int)
}
if len(args) >= 2 {
bad = args[1].(string)
}
chars = TrimAll(chars, bad)
return RandSeq(chars, l), nil
}
functions["rand_text_numeric"] = func(args ...interface{}) (interface{}, error) {
l := 0
bad := ""
chars := numbers
if len(args) >= 1 {
l = args[0].(int)
}
if len(args) >= 2 {
bad = args[1].(string)
}
chars = TrimAll(chars, bad)
return RandSeq(chars, l), nil
}
functions["rand_int"] = func(args ...interface{}) (interface{}, error) {
min := 0
max := math.MaxInt32
if len(args) >= 1 {
min = args[0].(int)
}
if len(args) >= 2 {
max = args[1].(int)
}
return rand.Intn(max-min) + min, nil
}
return functions
}

View File

@ -3,6 +3,7 @@ package generators
import (
"bufio"
"fmt"
"math/rand"
"os"
"strings"
)
@ -158,3 +159,28 @@ func FileExists(filename string) bool {
return !info.IsDir()
}
// TrimDelimiters removes trailing brackets
func SliceContins(s []string, k string) bool {
for _, a := range s {
if a == k {
return true
}
}
return false
}
func TrimAll(s string, cutset string) string {
for _, c := range cutset {
s = strings.ReplaceAll(s, string(c), "")
}
return s
}
func RandSeq(base string, n int) string {
b := make([]rune, n)
for i := range b {
b[i] = rune(base[rand.Intn(len(base))])
}
return string(b)
}

View File

@ -26,10 +26,16 @@ func Add(k string, rateLimit int) {
}
func Take(k string) {
rl := take(k)
rl.Take()
}
func take(k string) ratelimit.Limiter {
defaultrwmutex.RLock()
defer defaultrwmutex.RUnlock()
defaultGlobalRateLimiter.ratesLimiters[k].Take()
return defaultGlobalRateLimiter.ratesLimiters[k]
}
func Del(k string, rateLimit int) {
@ -56,11 +62,16 @@ func (grl *GlobalRateLimiter) Add(k string, rateLimit int) {
}
}
func (grl *GlobalRateLimiter) Take(k string) {
func (grl *GlobalRateLimiter) take(k string) ratelimit.Limiter {
grl.RLock()
defer grl.RUnlock()
grl.ratesLimiters[k].Take()
return grl.ratesLimiters[k]
}
func (grl *GlobalRateLimiter) Take(k string) {
rl := grl.take(k)
rl.Take()
}
func (grl *GlobalRateLimiter) Del(k string, rateLimit int) {

View File

@ -28,56 +28,56 @@ var urlWithPortRgx = regexp.MustCompile(`{{BaseURL}}:(\d+)`)
// BulkHTTPRequest contains a request to be made from a template
type BulkHTTPRequest struct {
// Path contains the path/s for the request
Path []string `yaml:"path"`
// Matchers contains the detection mechanism for the request to identify
// whether the request was successful
Matchers []*matchers.Matcher `yaml:"matchers,omitempty"`
// Extractors contains the extraction mechanism for the request to identify
// and extract parts of the response.
Extractors []*extractors.Extractor `yaml:"extractors,omitempty"`
// Raw contains raw requests
Raw []string `yaml:"raw,omitempty"`
Name string `yaml:"Name,omitempty"`
// AttackType is the attack type
// Sniper, PitchFork and ClusterBomb. Default is Sniper
AttackType string `yaml:"attack,omitempty"`
// Method is the request method, whether GET, POST, PUT, etc
Method string `yaml:"method"`
// Body is an optional parameter which contains the request body for POST methods, etc
Body string `yaml:"body,omitempty"`
// MatchersCondition is the condition of the matchers
// whether to use AND or OR. Default is OR.
MatchersCondition string `yaml:"matchers-condition,omitempty"`
// MaxRedirects is the maximum number of redirects that should be followed.
MaxRedirects int `yaml:"max-redirects,omitempty"`
PipelineMaxConnections int `yaml:"pipeline-max-connections,omitempty"`
PipelineMaxWorkers int `yaml:"pipeline-max-workers,omitempty"`
Threads int `yaml:"threads,omitempty"`
RateLimit int `yaml:"rate-limit,omitempty"`
// Internal Finite State Machine keeping track of scan process
gsfm *GeneratorFSM
// CookieReuse is an optional setting that makes cookies shared within requests
CookieReuse bool `yaml:"cookie-reuse,omitempty"`
// Redirects specifies whether redirects should be followed.
Redirects bool `yaml:"redirects,omitempty"`
Redirects bool `yaml:"redirects,omitempty"`
Name string `yaml:"Name,omitempty"`
// AttackType is the attack type
// Sniper, PitchFork and ClusterBomb. Default is Sniper
AttackType string `yaml:"attack,omitempty"`
// attackType is internal attack type
attackType generators.Type
// Path contains the path/s for the request variables
Payloads map[string]interface{} `yaml:"payloads,omitempty"`
// Method is the request method, whether GET, POST, PUT, etc
Method string `yaml:"method"`
// Path contains the path/s for the request
Path []string `yaml:"path"`
// Headers contains headers to send with the request
Headers map[string]string `yaml:"headers,omitempty"`
// Body is an optional parameter which contains the request body for POST methods, etc
Body string `yaml:"body,omitempty"`
// Matchers contains the detection mechanism for the request to identify
// whether the request was successful
Matchers []*matchers.Matcher `yaml:"matchers,omitempty"`
// MatchersCondition is the condition of the matchers
// whether to use AND or OR. Default is OR.
MatchersCondition string `yaml:"matchers-condition,omitempty"`
// matchersCondition is internal condition for the matchers.
matchersCondition matchers.ConditionType
// Extractors contains the extraction mechanism for the request to identify
// and extract parts of the response.
Extractors []*extractors.Extractor `yaml:"extractors,omitempty"`
// MaxRedirects is the maximum number of redirects that should be followed.
MaxRedirects int `yaml:"max-redirects,omitempty"`
// Raw contains raw requests
Raw []string `yaml:"raw,omitempty"`
// Pipeline defines if the attack should be performed with HTTP 1.1 Pipelining (race conditions/billions requests)
// All requests must be indempotent (GET/POST)
Pipeline bool `yaml:"pipeline,omitempty"`
Pipeline bool `yaml:"pipeline,omitempty"`
PipelineConcurrentConnections int `yaml:"pipeline-concurrent-connections,omitempty"`
PipelineRequestsPerConnection int `yaml:"pipeline-requests-per-connection,omitempty"`
// Specify in order to skip request RFC normalization
Unsafe bool `yaml:"unsafe,omitempty"`
// DisableAutoHostname Enable/Disable Host header for unsafe raw requests
DisableAutoHostname bool `yaml:"disable-automatic-host-header,omitempty"`
// DisableAutoContentLength Enable/Disable Content-Length header for unsafe raw requests
DisableAutoContentLength bool `yaml:"disable-automatic-content-length-header,omitempty"`
// attackType is internal attack type
attackType generators.Type
// Path contains the path/s for the request variables
Payloads map[string]interface{} `yaml:"payloads,omitempty"`
// Headers contains headers to send with the request
Headers map[string]string `yaml:"headers,omitempty"`
// matchersCondition is internal condition for the matchers.
matchersCondition matchers.ConditionType
Threads int `yaml:"threads,omitempty"`
// Internal Finite State Machine keeping track of scan process
gsfm *GeneratorFSM
}
// GetMatchersCondition returns the condition for the matcher
@ -102,7 +102,7 @@ func (r *BulkHTTPRequest) SetAttackType(attack generators.Type) {
// GetRequestCount returns the total number of requests the YAML rule will perform
func (r *BulkHTTPRequest) GetRequestCount() int64 {
return int64(len(r.Raw) | len(r.Path))
return int64(r.gsfm.Total())
}
// MakeHTTPRequest makes the HTTP request
@ -247,31 +247,23 @@ func (r *BulkHTTPRequest) handleRawWithPaylods(ctx context.Context, raw, baseURL
}
func (r *BulkHTTPRequest) fillRequest(req *http.Request, values map[string]interface{}) (*retryablehttp.Request, error) {
// In case of multiple threads the underlying connection should remain open to allow reuse
if r.Threads <= 0 {
setHeader(req, "Connection", "close")
req.Close = true
}
replacer := newReplacer(values)
// Check if the user requested a request body
if r.Body != "" {
req.Body = ioutil.NopCloser(strings.NewReader(r.Body))
}
// Set the header values requested
for header, value := range r.Headers {
req.Header[header] = []string{replacer.Replace(value)}
}
// if the user specified a Connection header we don't alter it
if req.Header.Get("Connection") == "" {
// Otherwise we set it to "Connection: close" - The instruction is redundant, but it ensures that internally net/http don't miss the header/internal flag
// In case of multiple threads the underlying connection should remain open to allow reuse
if r.Threads <= 0 && req.Header.Get("Connection") == "" {
setHeader(req, "Connection", "close")
req.Close = true
}
// Check if the user requested a request body
if r.Body != "" {
req.Body = ioutil.NopCloser(strings.NewReader(r.Body))
}
setHeader(req, "User-Agent", "Nuclei - Open-source project (github.com/projectdiscovery/nuclei)")
// raw requests are left untouched
@ -313,10 +305,13 @@ func setHeader(req *http.Request, name, value string) {
// the template port and path preference
func baseURLWithTemplatePrefs(data string, parsedURL *url.URL) string {
// template port preference over input URL port
// template has port
hasPort := len(urlWithPortRgx.FindStringSubmatch(data)) > 0
if hasPort {
hostname, _, _ := net.SplitHostPort(parsedURL.Host)
parsedURL.Host = hostname
// check if also the input contains port, in this case extracts the url
if hostname, _, err := net.SplitHostPort(parsedURL.Host); err == nil {
parsedURL.Host = hostname
}
}
return parsedURL.String()
@ -457,7 +452,7 @@ func (r *BulkHTTPRequest) Current(reqURL string) string {
// Total is the total number of requests
func (r *BulkHTTPRequest) Total() int {
return len(r.Path) + len(r.Raw)
return r.gsfm.Total()
}
// Increment increments the processed request

View File

@ -41,6 +41,7 @@ func NewGeneratorFSM(typ generators.Type, payloads map[string]interface{}, paths
gsfm.payloads = payloads
gsfm.Paths = paths
gsfm.Raws = raws
gsfm.Type = typ
if len(gsfm.payloads) > 0 {
// load payloads if not already done
@ -175,10 +176,6 @@ func (gfsm *GeneratorFSM) Next(key string) bool {
return false
}
if gfsm.hasPayloads() && g.state == done {
return false
}
if g.positionPath+g.positionRaw >= len(gfsm.Paths)+len(gfsm.Raws) {
return false
}
@ -231,7 +228,30 @@ func (gfsm *GeneratorFSM) Current(key string) string {
return gfsm.Raws[g.positionRaw]
}
func (gfsm *GeneratorFSM) Total() int {
return len(gfsm.Paths) + len(gfsm.Raws)
estimatedRequestsWithPayload := 0
if len(gfsm.basePayloads) > 0 {
switch gfsm.Type {
case generators.Sniper:
for _, kv := range gfsm.basePayloads {
estimatedRequestsWithPayload += len(kv)
}
case generators.PitchFork:
// Positional so it's equal to the length of one list
for _, kv := range gfsm.basePayloads {
estimatedRequestsWithPayload += len(kv)
break
}
case generators.ClusterBomb:
// Total of combinations => rule of product
prod := 1
for _, kv := range gfsm.basePayloads {
prod = prod * len(kv)
}
estimatedRequestsWithPayload += prod
}
}
return len(gfsm.Paths) + len(gfsm.Raws) + estimatedRequestsWithPayload
}
func (gfsm *GeneratorFSM) Increment(key string) {

View File

@ -8,10 +8,17 @@ import (
"strings"
)
const (
MARKER_PARENTHESIS_OPEN = "{{"
MARKER_PARENTHESIS_CLOSE = "}}"
MARKER_GENERAL = "§"
)
func newReplacer(values map[string]interface{}) *strings.Replacer {
var replacerItems []string
for k, v := range values {
replacerItems = append(replacerItems, fmt.Sprintf("{{%s}}", k), fmt.Sprintf("%s", v), k, fmt.Sprintf("%s", v))
replacerItems = append(replacerItems, fmt.Sprintf("%s%s%s", MARKER_PARENTHESIS_OPEN, k, MARKER_PARENTHESIS_CLOSE), fmt.Sprintf("%s", v))
replacerItems = append(replacerItems, fmt.Sprintf("%s%s%s", MARKER_GENERAL, k, MARKER_GENERAL), fmt.Sprintf("%s", v))
}
return strings.NewReplacer(replacerItems...)