mirror of https://github.com/daffainfo/nuclei.git
Bug fix with url in port
parent
f1ed74328a
commit
27391a4b76
|
@ -81,7 +81,7 @@ func (r *requestGenerator) Total() int {
|
|||
func baseURLWithTemplatePrefs(data string, parsedURL *url.URL) string {
|
||||
// template port preference over input URL port
|
||||
// template has port
|
||||
if strings.Contains(data, ":") {
|
||||
if urlWithPortRegex.MatchString(data) {
|
||||
if _, port, err := net.SplitHostPort(data); err == nil {
|
||||
parsedURL.Host = net.JoinHostPort(parsedURL.Hostname(), port)
|
||||
}
|
||||
|
|
|
@ -133,14 +133,14 @@ func TestHTTPOperatorExtract(t *testing.T) {
|
|||
|
||||
resp := &http.Response{}
|
||||
resp.Header = make(http.Header)
|
||||
resp.Header.Set("Test", "Test-Response")
|
||||
resp.Header.Set("Test-Header", "Test-Response")
|
||||
host := "http://example.com/test/"
|
||||
matched := "http://example.com/test/?test=1"
|
||||
|
||||
event := request.responseToDSLMap(resp, host, matched, exampleRawRequest, exampleRawResponse, exampleResponseBody, exampleResponseHeader, 1*time.Second, map[string]interface{}{})
|
||||
require.Len(t, event, 12, "could not get correct number of items in dsl map")
|
||||
require.Equal(t, exampleRawResponse, event["response"], "could not get correct resp")
|
||||
require.Equal(t, "Test-Response", event["test"], "could not get correct resp for header")
|
||||
require.Equal(t, "Test-Response", event["test-header"], "could not get correct resp for header")
|
||||
|
||||
t.Run("extract", func(t *testing.T) {
|
||||
extractor := &extractors.Extractor{
|
||||
|
@ -159,7 +159,7 @@ func TestHTTPOperatorExtract(t *testing.T) {
|
|||
t.Run("kval", func(t *testing.T) {
|
||||
extractor := &extractors.Extractor{
|
||||
Type: "kval",
|
||||
KVal: []string{"test"},
|
||||
KVal: []string{"test-header"},
|
||||
}
|
||||
err = extractor.CompileExtractors()
|
||||
require.Nil(t, err, "could not compile kval extractor")
|
||||
|
|
|
@ -216,17 +216,14 @@ func (r *Request) executeRequest(reqURL string, request *generatedRequest, dynam
|
|||
r.setCustomHeaders(request)
|
||||
|
||||
var (
|
||||
resp *http.Response
|
||||
err error
|
||||
dumpedRequest []byte
|
||||
fromcache bool
|
||||
resp *http.Response
|
||||
fromcache bool
|
||||
)
|
||||
if r.options.Options.Debug || r.options.ProjectFile != nil || r.options.Options.DebugRequests {
|
||||
dumpedRequest, err = dump(request, reqURL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dumpedRequest, err := dump(request, reqURL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if r.options.Options.Debug || r.options.Options.DebugRequests {
|
||||
gologger.Info().Msgf("[%s] Dumped HTTP request for %s\n\n", r.options.TemplateID, reqURL)
|
||||
gologger.Print().Msgf("%s", string(dumpedRequest))
|
||||
|
@ -278,18 +275,25 @@ func (r *Request) executeRequest(reqURL string, request *generatedRequest, dynam
|
|||
r.options.Progress.DecrementRequests(1)
|
||||
return err
|
||||
}
|
||||
|
||||
// redirectChain := &strings.Builder{}
|
||||
// redirectReq := resp.Request.Response
|
||||
// for redirectResp != nil {
|
||||
// dumpedRequest, err = dump(redirectResp, reqURL)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// redirectReq = redirectReq.Response.Request
|
||||
// }
|
||||
|
||||
gologger.Verbose().Msgf("[%s] Sent HTTP request to %s", r.options.TemplateID, formedURL)
|
||||
r.options.Output.Request(r.options.TemplateID, reqURL, "http", err)
|
||||
|
||||
duration := time.Since(timeStart)
|
||||
// Dump response - Step 1 - Decompression not yet handled
|
||||
var dumpedResponse []byte
|
||||
if r.options.Options.Debug || r.options.Options.DebugResponse {
|
||||
var dumpErr error
|
||||
dumpedResponse, dumpErr = httputil.DumpResponse(resp, true)
|
||||
if dumpErr != nil {
|
||||
return errors.Wrap(dumpErr, "could not dump http response")
|
||||
}
|
||||
|
||||
dumpedResponse, err := httputil.DumpResponse(resp, true)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "could not dump http response")
|
||||
}
|
||||
|
||||
var bodyReader io.Reader
|
||||
|
|
Loading…
Reference in New Issue