Close the response bodies

master
Ice3man543 2019-12-06 17:27:26 +05:30
parent 2cb3074e9c
commit f311374d7e
22 changed files with 33 additions and 1 deletions

View File

@ -37,6 +37,7 @@ func (a *ArchiveIs) enumerate(ctx context.Context, baseURL string) {
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Error, Error: err}
resp.Body.Close()
close(a.Results)
return
}

View File

@ -37,6 +37,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}
@ -82,6 +83,7 @@ func (s *Source) getSubdomains(ctx context.Context, domain string, remaining, cu
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
return false
}
resp.Body.Close()

View File

@ -37,6 +37,7 @@ func (s *Source) getData(URL string, session *subscraping.Session, results chan
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
return
}
resp.Body.Close()

View File

@ -64,6 +64,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -37,6 +37,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}
@ -63,6 +64,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -26,6 +26,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -4,6 +4,7 @@ import (
"context"
"errors"
"fmt"
"io"
"io/ioutil"
"strings"
@ -37,6 +38,8 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
if resp.StatusCode == 500 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("internal server error")}
io.Copy(ioutil.Discard, resp.Body)
resp.Body.Close()
close(results)
return
}
@ -45,6 +48,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
err = jsoniter.NewDecoder(resp.Body).Decode(&indexes)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}
@ -94,6 +98,7 @@ func (s *Source) getSubdomains(ctx context.Context, url string, domain string, s
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
return false
}
resp.Body.Close()

View File

@ -26,6 +26,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -26,6 +26,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -62,7 +62,7 @@ func postForm(token, domain string) (string, error) {
// Now, grab the entire page
in, err := ioutil.ReadAll(resp.Body)
resp.Body.Close()
return string(in), nil
return string(in), err
}
// Source is the passive scraping agent
@ -83,6 +83,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -27,6 +27,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -38,6 +38,7 @@ func (a *agent) makeRequest(token string, domain string) (string, error) {
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
resp.Body.Close()
return "", err
}
resp.Body.Close()

View File

@ -27,6 +27,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -28,6 +28,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}
@ -54,6 +55,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}
@ -79,6 +81,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}
@ -104,6 +107,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err = ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}
@ -153,6 +157,7 @@ func (s *Source) getSubdomains(ctx context.Context, domain string, nextPage *int
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
return false
}
resp.Body.Close()

View File

@ -50,6 +50,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
err = jsoniter.NewDecoder(resp.Body).Decode(&data)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -37,6 +37,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -43,6 +43,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
err = jsoniter.NewDecoder(resp.Body).Decode(&response)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -34,6 +34,7 @@ func (a *agent) enumerate(ctx context.Context, baseURL string) error {
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
resp.Body.Close()
close(a.results)
return err
}

View File

@ -27,6 +27,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -27,6 +27,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -36,6 +36,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
err = jsoniter.NewDecoder(resp.Body).Decode(&data)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
resp.Body.Close()
close(results)
return
}

View File

@ -27,6 +27,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
body, err := ioutil.ReadAll(pagesResp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
pagesResp.Body.Close()
close(results)
return
}