feat nuclei result upload (#4343)

* feat: add pdcp api key support

* add '-auth' cli option

* fix creds test

* results auto upload to pdcp

* fix upload on empty file

* dashboard env + fix test

* purge old cloud cli options

* misc updates in runner package

* fix headless integration test

* misc update

* add disable cloud upload env

---------

Co-authored-by: sandeep <8293321+ehsandeep@users.noreply.github.com>
dev
Tarun Koyalwar 2023-11-18 16:25:37 +05:30 committed by GitHub
parent dbba3074d8
commit 87aeb57b0d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 768 additions and 1825 deletions

View File

@ -174,6 +174,7 @@ func execute(testCase testutils.TestCase, templatePath string) (string, error) {
}
func expectResultsCount(results []string, expectedNumbers ...int) error {
results = filterHeadlessLogs(results)
match := sliceutil.Contains(expectedNumbers, len(results))
if !match {
return fmt.Errorf("incorrect number of results: %d (actual) vs %v (expected) \nResults:\n\t%s\n", len(results), expectedNumbers, strings.Join(results, "\n\t")) // nolint:all
@ -186,3 +187,17 @@ func normalizeSplit(str string) []string {
return r == ','
})
}
// if chromium is not installed go-rod installs it in .cache directory
// this function filters out the logs from download and installation
func filterHeadlessLogs(results []string) []string {
// [launcher.Browser] 2021/09/23 15:24:05 [launcher] [info] Starting browser
filtered := []string{}
for _, result := range results {
if strings.Contains(result, "[launcher.Browser]") {
continue
}
filtered = append(filtered, result)
}
return filtered
}

View File

@ -164,6 +164,7 @@ func readConfig() *goflags.FlagSet {
// when true updates nuclei binary to latest version
var updateNucleiBinary bool
var pdcpauth bool
flagSet := goflags.NewFlagSet()
flagSet.CaseSensitive = true
@ -365,27 +366,8 @@ on extensive configurability, massive extensibility and ease of use.`)
)
flagSet.CreateGroup("cloud", "Cloud",
flagSet.BoolVar(&options.Cloud, "cloud", false, "run scan on nuclei cloud"),
flagSet.StringVarP(&options.AddDatasource, "add-datasource", "ads", "", "add specified data source (s3,github)"),
flagSet.StringVarP(&options.AddTarget, "add-target", "atr", "", "add target(s) to cloud"),
flagSet.StringVarP(&options.AddTemplate, "add-template", "atm", "", "add template(s) to cloud"),
flagSet.BoolVarP(&options.ScanList, "list-scan", "lsn", false, "list previous cloud scans"),
flagSet.StringVarP(&options.ScanOutput, "list-output", "lso", "", "list scan output by scan id"),
flagSet.BoolVarP(&options.ListTargets, "list-target", "ltr", false, "list cloud target by id"),
flagSet.BoolVarP(&options.ListTemplates, "list-template", "ltm", false, "list cloud template by id"),
flagSet.BoolVarP(&options.ListDatasources, "list-datasource", "lds", false, "list cloud datasource by id"),
flagSet.BoolVarP(&options.ListReportingSources, "list-reportsource", "lrs", false, "list reporting sources"),
flagSet.StringVarP(&options.DeleteScan, "delete-scan", "dsn", "", "delete cloud scan by id"),
flagSet.StringVarP(&options.RemoveTarget, "delete-target", "dtr", "", "delete target(s) from cloud"),
flagSet.StringVarP(&options.RemoveTemplate, "delete-template", "dtm", "", "delete template(s) from cloud"),
flagSet.StringVarP(&options.RemoveDatasource, "delete-datasource", "dds", "", "delete specified data source"),
flagSet.StringVarP(&options.DisableReportingSource, "disable-reportsource", "drs", "", "disable specified reporting source"),
flagSet.StringVarP(&options.EnableReportingSource, "enable-reportsource", "ers", "", "enable specified reporting source"),
flagSet.StringVarP(&options.GetTarget, "get-target", "gtr", "", "get target content by id"),
flagSet.StringVarP(&options.GetTemplate, "get-template", "gtm", "", "get template content by id"),
flagSet.BoolVarP(&options.NoStore, "no-store", "nos", false, "disable scan/output storage on cloud"),
flagSet.BoolVar(&options.NoTables, "no-tables", false, "do not display pretty-printed tables"),
flagSet.IntVar(&options.OutputLimit, "limit", 100, "limit the number of output to display"),
flagSet.BoolVar(&pdcpauth, "auth", false, "configure projectdiscovery cloud (pdcp) api key"),
flagSet.BoolVarP(&options.DisableCloudUpload, "disable-cloud-upload", "dcu", false, "disable uploading scan results to pdcp"),
)
flagSet.SetCustomHelpText(`EXAMPLES:
@ -414,6 +396,10 @@ Additional documentation is available at: https://docs.nuclei.sh/getting-started
goflags.DisableAutoConfigMigration = true
_ = flagSet.Parse()
if pdcpauth {
runner.AuthWithPDCP()
}
gologger.DefaultLogger.SetTimestamp(options.Timestamp, levels.LevelDebug)
if options.VerboseVerbose {

2
go.mod
View File

@ -71,7 +71,6 @@ require (
github.com/go-sql-driver/mysql v1.6.0
github.com/h2non/filetype v1.1.3
github.com/hirochachacha/go-smb2 v1.1.0
github.com/klauspost/compress v1.16.7
github.com/labstack/echo/v4 v4.10.2
github.com/lib/pq v1.10.1
github.com/mholt/archiver v3.1.1+incompatible
@ -163,6 +162,7 @@ require (
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/kataras/jwt v0.1.10 // indirect
github.com/klauspost/compress v1.16.7 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mackerelio/go-osstat v0.2.4 // indirect

76
internal/pdcp/auth.go Normal file
View File

@ -0,0 +1,76 @@
// pdcp contains projectdiscovery cloud platform related features
// like result upload , dashboard etc.
package pdcp
import (
"fmt"
"os"
"strings"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/utils/env"
"golang.org/x/term"
)
var (
DashBoardURL = "https://cloud.projectdiscovery.io"
DefaultApiServer = "https://api.projectdiscovery.io"
)
// CheckNValidateCredentials checks if credentials exist on filesystem
// if not waits for user to enter credentials and validates them
// and saves them to filesystem
// when validate is true any existing credentials are validated
// Note: this is meant to be used in cli only (interactive mode)
func CheckNValidateCredentials(toolName string) {
h := &PDCPCredHandler{}
creds, err := h.GetCreds()
if err == nil {
// validate by fetching user profile
gotCreds, err := h.ValidateAPIKey(creds.APIKey, creds.Server, config.BinaryName)
if err == nil {
gologger.Info().Msgf("You are logged in as (@%v)", gotCreds.Username)
os.Exit(0)
}
gologger.Error().Msgf("Invalid API key found in file, please recheck or recreate your API key and retry.")
}
if err != nil && err != ErrNoCreds {
// this is unexpected error log it
gologger.Error().Msgf("Could not read credentials from file: %s\n", err)
}
// if we are here, we need to get credentials from user
gologger.Info().Msgf("Get your free api key by signing up at %v", DashBoardURL)
fmt.Printf("[*] Enter PDCP API Key (exit to abort): ")
bin, err := term.ReadPassword(int(os.Stdin.Fd()))
if err != nil {
gologger.Fatal().Msgf("Could not read input from terminal: %s\n", err)
}
apiKey := string(bin)
if strings.EqualFold(apiKey, "exit") {
os.Exit(0)
}
fmt.Println()
// if env variable is set use that for validating api key
apiServer := env.GetEnvOrDefault(apiServerEnv, DefaultApiServer)
// validate by fetching user profile
validatedCreds, err := h.ValidateAPIKey(apiKey, apiServer, toolName)
if err == nil {
gologger.Info().Msgf("Successfully logged in as (@%v)", validatedCreds.Username)
if saveErr := h.SaveCreds(validatedCreds); saveErr != nil {
gologger.Warning().Msgf("Could not save credentials to file: %s\n", saveErr)
}
os.Exit(0)
}
gologger.Error().Msgf("Invalid API key '%v' got error: %v", maskKey(apiKey), err)
gologger.Fatal().Msgf("please recheck or recreate your API key and retry")
}
func maskKey(key string) string {
if len(key) < 6 {
// this is invalid key
return key
}
return fmt.Sprintf("%v%v", key[:3], strings.Repeat("*", len(key)-3))
}

139
internal/pdcp/creds.go Normal file
View File

@ -0,0 +1,139 @@
package pdcp
import (
"encoding/json"
"fmt"
"io"
"os"
"path/filepath"
"github.com/projectdiscovery/retryablehttp-go"
"github.com/projectdiscovery/utils/env"
fileutil "github.com/projectdiscovery/utils/file"
folderutil "github.com/projectdiscovery/utils/folder"
urlutil "github.com/projectdiscovery/utils/url"
"gopkg.in/yaml.v3"
)
var (
PDCPDir = filepath.Join(folderutil.HomeDirOrDefault(""), ".pdcp")
PDCPCredFile = filepath.Join(PDCPDir, "credentials.yaml")
ErrNoCreds = fmt.Errorf("no credentials found in %s", PDCPDir)
)
const (
userProfileURL = "https://%s/v1/template/user/profile?utm_source=%s"
apiKeyEnv = "PDCP_API_KEY"
apiServerEnv = "PDCP_API_SERVER"
ApiKeyHeaderName = "X-Api-Key"
dashBoardEnv = "PDCP_DASHBOARD_URL"
)
type PDCPCredentials struct {
Username string `yaml:"username"`
APIKey string `yaml:"api-key"`
Server string `yaml:"server"`
}
type PDCPUserProfileResponse struct {
UserName string `json:"name"`
// there are more fields but we don't need them
/// below fields are added later on and not part of the response
}
// PDCPCredHandler is interface for adding / retrieving pdcp credentials
// from file system
type PDCPCredHandler struct{}
// GetCreds retrieves the credentials from the file system or environment variables
func (p *PDCPCredHandler) GetCreds() (*PDCPCredentials, error) {
credsFromEnv := p.getCredsFromEnv()
if credsFromEnv != nil {
return credsFromEnv, nil
}
if !fileutil.FolderExists(PDCPDir) || !fileutil.FileExists(PDCPCredFile) {
return nil, ErrNoCreds
}
bin, err := os.Open(PDCPCredFile)
if err != nil {
return nil, err
}
// for future use-cases
var creds []PDCPCredentials
err = yaml.NewDecoder(bin).Decode(&creds)
if err != nil {
return nil, err
}
if len(creds) == 0 {
return nil, ErrNoCreds
}
return &creds[0], nil
}
// getCredsFromEnv retrieves the credentials from the environment
// if not or incomplete credentials are found it return nil
func (p *PDCPCredHandler) getCredsFromEnv() *PDCPCredentials {
apiKey := env.GetEnvOrDefault(apiKeyEnv, "")
apiServer := env.GetEnvOrDefault(apiServerEnv, "")
if apiKey == "" || apiServer == "" {
return nil
}
return &PDCPCredentials{APIKey: apiKey, Server: apiServer}
}
// SaveCreds saves the credentials to the file system
func (p *PDCPCredHandler) SaveCreds(resp *PDCPCredentials) error {
if resp == nil {
return fmt.Errorf("invalid response")
}
if !fileutil.FolderExists(PDCPDir) {
_ = fileutil.CreateFolder(PDCPDir)
}
bin, err := yaml.Marshal([]*PDCPCredentials{resp})
if err != nil {
return err
}
return os.WriteFile(PDCPCredFile, bin, 0600)
}
// ValidateAPIKey validates the api key and retrieves associated user metadata like username
// from given api server/host
func (p *PDCPCredHandler) ValidateAPIKey(key string, host string, toolName string) (*PDCPCredentials, error) {
// get address from url
urlx, err := urlutil.Parse(host)
if err != nil {
return nil, err
}
req, err := retryablehttp.NewRequest("GET", fmt.Sprintf(userProfileURL, urlx.Host, toolName), nil)
if err != nil {
return nil, err
}
req.Header.Set(ApiKeyHeaderName, key)
resp, err := retryablehttp.DefaultHTTPClient.Do(req)
if err != nil {
return nil, err
}
if resp.StatusCode != 200 {
_, _ = io.Copy(io.Discard, resp.Body)
_ = resp.Body.Close()
return nil, fmt.Errorf("invalid status code: %d", resp.StatusCode)
}
defer resp.Body.Close()
bin, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
var profile PDCPUserProfileResponse
err = json.Unmarshal(bin, &profile)
if err != nil {
return nil, err
}
if profile.UserName == "" {
return nil, fmt.Errorf("invalid response from server got %v", string(bin))
}
return &PDCPCredentials{Username: profile.UserName, APIKey: key, Server: host}, nil
}
func init() {
DashBoardURL = env.GetEnvOrDefault("PDCP_DASHBOARD_URL", DashBoardURL)
}

View File

@ -0,0 +1,33 @@
package pdcp
import (
"os"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/require"
)
var exampleCred = `
- username: test
api-key: testpassword
server: https://scanme.sh
`
func TestLoadCreds(t *testing.T) {
// temporarily change PDCP file location for testing
f, err := os.CreateTemp("", "creds-test-*")
require.Nil(t, err)
_, _ = f.WriteString(strings.TrimSpace(exampleCred))
defer os.Remove(f.Name())
PDCPCredFile = f.Name()
PDCPDir = filepath.Dir(f.Name())
h := &PDCPCredHandler{}
value, err := h.GetCreds()
require.Nil(t, err)
require.NotNil(t, value)
require.Equal(t, "test", value.Username)
require.Equal(t, "testpassword", value.APIKey)
require.Equal(t, "https://scanme.sh", value.Server)
}

157
internal/pdcp/writer.go Normal file
View File

@ -0,0 +1,157 @@
package pdcp
import (
"bufio"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"os"
"path/filepath"
"strconv"
"strings"
"sync/atomic"
"time"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/retryablehttp-go"
errorutil "github.com/projectdiscovery/utils/errors"
fileutil "github.com/projectdiscovery/utils/file"
folderutil "github.com/projectdiscovery/utils/folder"
urlutil "github.com/projectdiscovery/utils/url"
)
const (
uploadEndpoint = "/v1/scans/import"
)
var _ output.Writer = &UploadWriter{}
// UploadWriter is a writer that uploads its output to pdcp
// server to enable web dashboard and more
type UploadWriter struct {
*output.StandardWriter
creds *PDCPCredentials
tempFile *os.File
done atomic.Bool
uploadURL *url.URL
}
// NewUploadWriter creates a new upload writer
func NewUploadWriter(creds *PDCPCredentials) (*UploadWriter, error) {
if creds == nil {
return nil, fmt.Errorf("no credentials provided")
}
u := &UploadWriter{creds: creds}
// create a temporary file in cache directory
cacheDir := folderutil.AppCacheDirOrDefault("", config.BinaryName)
if !fileutil.FolderExists(cacheDir) {
_ = fileutil.CreateFolder(cacheDir)
}
var err error
// tempfile is created in nuclei-results-<unix-timestamp>.json format
u.tempFile, err = os.OpenFile(filepath.Join(cacheDir, "nuclei-results-"+strconv.Itoa(int(time.Now().Unix()))+".json"), os.O_RDWR|os.O_CREATE, 0600)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create temporary file")
}
u.StandardWriter, err = output.NewWriter(
output.WithWriter(u.tempFile),
output.WithJson(true, false),
)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not create output writer")
}
tmp, err := urlutil.Parse(creds.Server)
if err != nil {
return nil, errorutil.NewWithErr(err).Msgf("could not parse server url")
}
tmp.Path = uploadEndpoint
tmp.Update()
u.uploadURL = tmp.URL
return u, nil
}
type uploadResponse struct {
ID string `json:"id"`
Message string `json:"message"`
}
// Upload uploads the results to pdcp server
func (u *UploadWriter) Upload() {
defer u.done.Store(true)
// start from beginning
_, _ = u.tempFile.Seek(0, 0)
// skip if file is empty
scanner := bufio.NewScanner(u.tempFile)
if !scanner.Scan() || (scanner.Scan() && strings.TrimSpace(scanner.Text()) == "") {
gologger.Verbose().Msgf("Scan results upload to cloud skipped, no results found to upload")
return
}
_, _ = u.tempFile.Seek(0, 0)
id, err := u.upload()
if err != nil {
gologger.Error().Msgf("Failed to upload scan results on cloud: %v", err)
return
}
gologger.Info().Msgf("Scan results uploaded! View them at %v", getScanDashBoardURL(id))
}
func (u *UploadWriter) upload() (string, error) {
req, err := retryablehttp.NewRequest(http.MethodPost, u.uploadURL.String(), u.tempFile)
if err != nil {
return "", errorutil.NewWithErr(err).Msgf("could not create cloud upload request")
}
req.Header.Set(ApiKeyHeaderName, u.creds.APIKey)
req.Header.Set("Content-Type", "application/octet-stream")
req.Header.Set("Accept", "application/json")
opts := retryablehttp.DefaultOptionsSingle
// we are uploading nuclei results which can be large
// server has a size limit of ~20ish MB
opts.Timeout = time.Duration(3) * time.Minute
client := retryablehttp.NewClient(opts)
resp, err := client.Do(req)
if err != nil {
return "", errorutil.NewWithErr(err).Msgf("could not upload results")
}
defer resp.Body.Close()
bin, err := io.ReadAll(resp.Body)
if err != nil {
return "", errorutil.NewWithErr(err).Msgf("could not get id from response")
}
if resp.StatusCode != http.StatusOK {
return "", fmt.Errorf("could not upload results got status code %v", resp.StatusCode)
}
var uploadResp uploadResponse
if err := json.Unmarshal(bin, &uploadResp); err != nil {
return "", errorutil.NewWithErr(err).Msgf("could not unmarshal response got %v", string(bin))
}
u.removeTempFile()
return uploadResp.ID, nil
}
// removeTempFile removes the temporary file
func (u *UploadWriter) removeTempFile() {
_ = os.Remove(u.tempFile.Name())
}
// Close closes the upload writer
func (u *UploadWriter) Close() {
if !u.done.Load() {
u.Upload()
}
}
func getScanDashBoardURL(id string) string {
ux, _ := urlutil.Parse(DashBoardURL)
ux.Path = "/scans/" + id
ux.Update()
return ux.String()
}

View File

@ -1,9 +1,11 @@
// Package runner executes the enumeration process.
package runner
import (
"fmt"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/internal/pdcp"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
updateutils "github.com/projectdiscovery/utils/update"
)
@ -25,5 +27,11 @@ func showBanner() {
// NucleiToolUpdateCallback updates nuclei binary/tool to latest version
func NucleiToolUpdateCallback() {
showBanner()
updateutils.GetUpdateToolCallback("nuclei", config.Version)()
updateutils.GetUpdateToolCallback(config.BinaryName, config.Version)()
}
// AuthWithPDCP is used to authenticate with PDCP
func AuthWithPDCP() {
showBanner()
pdcp.CheckNValidateCredentials(config.BinaryName)
}

View File

@ -1,438 +0,0 @@
package runner
import (
"io"
"io/fs"
"os"
"path"
"path/filepath"
"strconv"
"strings"
jsoniter "github.com/json-iterator/go"
"github.com/olekukonko/tablewriter"
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/internal/runner/nucleicloud"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/extensions"
)
// Get all the scan lists for a user/apikey.
func (r *Runner) getScanList(limit int) error {
lastTime := "2099-01-02 15:04:05 +0000 UTC"
header := []string{"ID", "Timestamp", "Targets", "Templates", "Matched", "Duration", "Status"}
var (
values [][]string
count int
)
for {
items, err := r.cloudClient.GetScans(limit, lastTime)
if err != nil {
return err
}
if len(items) == 0 {
break
}
for _, v := range items {
count++
lastTime = v.CreatedAt.String()
res := nucleicloud.PrepareScanListOutput(v)
if r.options.JSONL {
_ = jsoniter.NewEncoder(os.Stdout).Encode(res)
} else if !r.options.NoTables {
values = append(values, []string{strconv.FormatInt(res.ScanID, 10), res.Timestamp, strconv.Itoa(res.Target), strconv.Itoa(res.Template), strconv.Itoa(res.ScanResult), res.ScanTime, res.ScanStatus})
} else {
gologger.Silent().Msgf("%d. [%s] [TARGETS: %d] [TEMPLATES: %d] [MATCHED: %d] [DURATION: %s] [STATUS: %s]\n", res.ScanID, res.Timestamp, res.Target, res.Template, res.ScanResult, res.ScanTime, strings.ToUpper(res.ScanStatus))
}
}
}
if count == 0 {
return errors.New("no scan found")
}
if !r.options.NoTables {
r.prettyPrintTable(header, values)
}
return nil
}
func (r *Runner) listDatasources() error {
datasources, err := r.cloudClient.ListDatasources()
if err != nil {
return err
}
if len(datasources) == 0 {
return errors.New("no cloud datasource found")
}
header := []string{"ID", "UpdatedAt", "Type", "Repo", "Path"}
var values [][]string
for _, source := range datasources {
if r.options.JSONL {
_ = jsoniter.NewEncoder(os.Stdout).Encode(source)
} else if !r.options.NoTables {
values = append(values, []string{strconv.FormatInt(source.ID, 10), source.Updatedat.Format(nucleicloud.DDMMYYYYhhmmss), source.Type, source.Repo, source.Path})
} else {
gologger.Silent().Msgf("%d. [%s] [%s] [%s] %s", source.ID, source.Updatedat.Format(nucleicloud.DDMMYYYYhhmmss), source.Type, source.Repo, source.Path)
}
}
if !r.options.NoTables {
r.prettyPrintTable(header, values)
}
return nil
}
func (r *Runner) listReportingSources() error {
items, err := r.cloudClient.ListReportingSources()
if err != nil {
return err
}
if len(items) == 0 {
return errors.New("no reporting source found")
}
header := []string{"ID", "Type", "ProjectName", "Enabled"}
var values [][]string
for _, source := range items {
if r.options.JSONL {
_ = jsoniter.NewEncoder(os.Stdout).Encode(source)
} else if !r.options.NoTables {
values = append(values, []string{strconv.FormatInt(source.ID, 10), source.Type, source.ProjectName, strconv.FormatBool(source.Enabled)})
} else {
gologger.Silent().Msgf("%d. [%s] [%s] [%t]", source.ID, source.Type, source.ProjectName, source.Enabled)
}
}
if !r.options.NoTables {
r.prettyPrintTable(header, values)
}
return nil
}
func (r *Runner) listTargets() error {
items, err := r.cloudClient.ListTargets("")
if err != nil {
return err
}
if len(items) == 0 {
return errors.New("no target found")
}
header := []string{"ID", "Reference", "Count"}
var values [][]string
for _, source := range items {
if r.options.JSONL {
_ = jsoniter.NewEncoder(os.Stdout).Encode(source)
} else if !r.options.NoTables {
values = append(values, []string{strconv.FormatInt(source.ID, 10), source.Reference, strconv.FormatInt(source.Count, 10)})
} else {
gologger.Silent().Msgf("%d. %s (%d)", source.ID, source.Reference, source.Count)
}
}
if !r.options.NoTables {
r.prettyPrintTable(header, values)
}
return nil
}
func (r *Runner) listTemplates() error {
items, err := r.cloudClient.ListTemplates("")
if err != nil {
return err
}
if len(items) == 0 {
return errors.New("no template found")
}
header := []string{"ID", "Reference"}
var values [][]string
for _, source := range items {
if r.options.JSONL {
_ = jsoniter.NewEncoder(os.Stdout).Encode(source)
} else if !r.options.NoTables {
values = append(values, []string{strconv.FormatInt(source.ID, 10), source.Reference})
} else {
gologger.Silent().Msgf("%d. %s", source.ID, source.Reference)
}
}
if !r.options.NoTables {
r.prettyPrintTable(header, values)
}
return nil
}
func (r *Runner) prettyPrintTable(header []string, values [][]string) {
writer := tablewriter.NewWriter(os.Stdout)
writer.SetHeader(header)
writer.AppendBulk(values)
writer.Render()
}
func (r *Runner) deleteScan(id string) error {
ID, parseErr := strconv.ParseInt(id, 10, 64)
if parseErr != nil {
return errors.Wrap(parseErr, "could not parse scan id")
}
deleted, err := r.cloudClient.DeleteScan(ID)
if err != nil {
return errors.Wrap(err, "could not delete scan")
}
if !deleted.OK {
gologger.Error().Msgf("Error in deleting the scan %s.", id)
} else {
gologger.Info().Msgf("Scan deleted %s.", id)
}
return nil
}
func (r *Runner) getResults(id string, limit int) error {
ID, _ := strconv.ParseInt(id, 10, 64)
err := r.cloudClient.GetResults(ID, false, limit, func(re *output.ResultEvent) {
if outputErr := r.output.Write(re); outputErr != nil {
gologger.Warning().Msgf("Could not write output: %s", outputErr)
}
})
return err
}
func (r *Runner) getTarget(id string) error {
var name string
ID, parseErr := strconv.ParseInt(id, 10, 64)
if parseErr != nil {
name = id
}
reader, err := r.cloudClient.GetTarget(ID, name)
if err != nil {
return errors.Wrap(err, "could not get target")
}
defer reader.Close()
_, _ = io.Copy(os.Stdout, reader)
return nil
}
func (r *Runner) getTemplate(id string) error {
var name string
ID, parseErr := strconv.ParseInt(id, 10, 64)
if parseErr != nil {
name = id
}
reader, err := r.cloudClient.GetTemplate(ID, name)
if err != nil {
return errors.Wrap(err, "could not get template")
}
defer reader.Close()
_, _ = io.Copy(os.Stdout, reader)
return nil
}
func (r *Runner) removeDatasource(datasource string) error {
var source string
ID, parseErr := strconv.ParseInt(datasource, 10, 64)
if parseErr != nil {
source = datasource
}
err := r.cloudClient.RemoveDatasource(ID, source)
if err == nil {
gologger.Info().Msgf("Datasource deleted %s", datasource)
}
return err
}
func (r *Runner) toggleReportingSource(source string, status bool) error {
ID, parseErr := strconv.ParseInt(source, 10, 64)
if parseErr != nil {
return errors.Wrap(parseErr, "could not parse reporting source id")
}
err := r.cloudClient.ToggleReportingSource(ID, status)
if err == nil {
t := "enabled"
if !status {
t = "disabled"
}
gologger.Info().Msgf("Reporting source %s %s", t, source)
}
return err
}
func (r *Runner) addTemplate(location string) error {
walkErr := filepath.WalkDir(location, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() || !strings.EqualFold(filepath.Ext(path), extensions.YAML) {
return nil
}
base := filepath.Base(path)
reference, templateErr := r.cloudClient.AddTemplate(base, path)
if templateErr != nil {
gologger.Error().Msgf("Could not upload %s: %s", path, templateErr)
} else if reference != "" {
gologger.Info().Msgf("Uploaded template %s: %s", base, reference)
}
return nil
})
return walkErr
}
func (r *Runner) addTarget(location string) error {
walkErr := filepath.WalkDir(location, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() || !strings.EqualFold(filepath.Ext(path), ".txt") {
return nil
}
base := filepath.Base(path)
reference, targetErr := r.cloudClient.AddTarget(base, path)
if targetErr != nil {
gologger.Error().Msgf("Could not upload %s: %s", location, targetErr)
} else if reference != "" {
gologger.Info().Msgf("Uploaded target %s: %s", base, reference)
}
return nil
})
return walkErr
}
func (r *Runner) removeTarget(item string) error {
var err error
if ID, parseErr := strconv.ParseInt(item, 10, 64); parseErr == nil {
err = r.cloudClient.RemoveTarget(ID, "")
} else if strings.EqualFold(path.Ext(item), ".txt") {
err = r.cloudClient.RemoveTarget(0, item)
} else {
return r.removeTargetPrefix(item)
}
if err != nil {
gologger.Error().Msgf("Error in deleting target %s: %s", item, err)
} else {
gologger.Info().Msgf("Target deleted %s", item)
}
return nil
}
func (r *Runner) removeTargetPrefix(item string) error {
response, err := r.cloudClient.ListTargets(item)
if err != nil {
return errors.Wrap(err, "could not list targets")
}
for _, item := range response {
if err := r.cloudClient.RemoveTarget(item.ID, ""); err != nil {
gologger.Error().Msgf("Error in deleting target %s: %s", item.Reference, err)
} else {
gologger.Info().Msgf("Target deleted %s", item.Reference)
}
}
return nil
}
func (r *Runner) removeTemplate(item string) error {
var err error
if ID, parseErr := strconv.ParseInt(item, 10, 64); parseErr == nil {
err = r.cloudClient.RemoveTemplate(ID, "")
} else if strings.EqualFold(path.Ext(item), extensions.YAML) {
err = r.cloudClient.RemoveTemplate(0, item)
} else {
return r.removeTemplatePrefix(item)
}
if err != nil {
gologger.Error().Msgf("Error in deleting template %s: %s", item, err)
} else {
gologger.Info().Msgf("Template deleted %s", item)
}
return nil
}
func (r *Runner) removeTemplatePrefix(item string) error {
response, err := r.cloudClient.ListTemplates(item)
if err != nil {
return errors.Wrap(err, "could not list templates")
}
for _, item := range response {
if err := r.cloudClient.RemoveTemplate(item.ID, ""); err != nil {
gologger.Error().Msgf("Error in deleting template %s: %s", item.Reference, err)
} else {
gologger.Info().Msgf("Template deleted %s", item.Reference)
}
}
return nil
}
// initializeCloudDataSources initializes cloud data sources
func (r *Runner) addCloudDataSource(source string) error {
switch source {
case "s3":
token := strings.Join([]string{r.options.AwsAccessKey, r.options.AwsSecretKey, r.options.AwsRegion}, ":")
if _, err := r.processDataSourceItem(r.options.AwsBucketName, token, "s3"); err != nil {
return err
}
case "github":
for _, repo := range r.options.GitHubTemplateRepo {
if _, err := r.processDataSourceItem(repo, r.options.GitHubToken, "github"); err != nil {
return err
}
}
}
return nil
}
func (r *Runner) processDataSourceItem(repo, token, Type string) (int64, error) {
ID, err := r.cloudClient.StatusDataSource(nucleicloud.StatusDataSourceRequest{Repo: repo, Token: token})
if err != nil {
if !strings.Contains(err.Error(), "no rows in result set") {
return 0, errors.Wrap(err, "could not get data source status")
}
gologger.Info().Msgf("Adding new data source + syncing: %s\n", repo)
resp, err := r.cloudClient.AddDataSource(nucleicloud.AddDataSourceRequest{Type: Type, Repo: repo, Token: token})
if err != nil {
return 0, errors.Wrap(err, "could not add data source")
}
ID = resp.ID
if err = r.cloudClient.SyncDataSource(resp.ID); err != nil {
return 0, errors.Wrap(err, "could not sync data source")
}
if resp.Secret != "" {
gologger.Info().Msgf("Webhook URL for added source: %s/datasources/%s/webhook", r.options.CloudURL, resp.Hash)
gologger.Info().Msgf("Secret for webhook: %s", resp.Secret)
}
}
if r.options.UpdateTemplates {
gologger.Info().Msgf("Syncing data source: %s (%d)\n", repo, ID)
if err = r.cloudClient.SyncDataSource(ID); err != nil {
return 0, errors.Wrap(err, "could not sync data source")
}
}
return ID, nil
}
// addCloudReportingSource adds reporting sources to cloud
func (r *Runner) addCloudReportingSource() error {
rcOptions := r.issuesClient.GetReportingOptions()
if rcOptions == nil {
return nil
}
if rcOptions.Jira != nil {
payload, err := jsoniter.Marshal(rcOptions.Jira)
if err != nil {
return err
}
requestObj := nucleicloud.AddReportingSourceRequest{
Type: "jira",
Payload: payload,
}
if _, err := r.cloudClient.AddReportingSource(requestObj); err != nil {
return errors.Wrap(err, "could not add reporting source")
}
gologger.Info().Msgf("Reporting source and webhook added for %s: %s", "jira", r.options.CloudURL)
}
return nil
}

View File

@ -1,6 +0,0 @@
package runner
const (
// Default directory used to save protocols traffic
DefaultDumpTrafficOutputFolder = "output"
)

View File

@ -1,2 +0,0 @@
// Package runner executes the enumeration process.
package runner

View File

@ -1,166 +0,0 @@
package runner
import (
"bytes"
"context"
"crypto/sha1"
"encoding/base64"
"encoding/hex"
"io"
_ "net/http/pprof"
"os"
"path/filepath"
"strings"
"sync/atomic"
"time"
"github.com/klauspost/compress/zlib"
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/internal/runner/nucleicloud"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
"github.com/projectdiscovery/nuclei/v3/pkg/core"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
)
// runStandardEnumeration runs standard enumeration
func (r *Runner) runStandardEnumeration(executerOpts protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
if r.options.AutomaticScan {
return r.executeSmartWorkflowInput(executerOpts, store, engine)
}
return r.executeTemplatesInput(store, engine)
}
// runCloudEnumeration runs cloud based enumeration
func (r *Runner) runCloudEnumeration(store *loader.Store, cloudTemplates, cloudTargets []string, nostore bool, limit int) (*atomic.Bool, error) {
count := &atomic.Int64{}
now := time.Now()
defer func() {
gologger.Info().Msgf("Scan execution took %s and found %d results", time.Since(now), count.Load())
}()
results := &atomic.Bool{}
// TODO: Add payload file and workflow support for private templates
catalogChecksums := nucleicloud.ReadCatalogChecksum()
targets := make([]string, 0, r.hmapInputProvider.Count())
r.hmapInputProvider.Scan(func(value *contextargs.MetaInput) bool {
targets = append(targets, value.Input)
return true
})
templates := make([]string, 0, len(store.Templates()))
privateTemplates := make(map[string]string)
for _, template := range store.Templates() {
data, _ := os.ReadFile(template.Path)
h := sha1.New()
_, _ = io.Copy(h, bytes.NewReader(data))
newhash := hex.EncodeToString(h.Sum(nil))
templateRelativePath := getTemplateRelativePath(template.Path)
if hash, ok := catalogChecksums[templateRelativePath]; ok || newhash == hash {
templates = append(templates, templateRelativePath)
} else {
privateTemplates[filepath.Base(template.Path)] = gzipBase64EncodeData(data)
}
}
taskID, err := r.cloudClient.AddScan(&nucleicloud.AddScanRequest{
RawTargets: targets,
PublicTemplates: templates,
CloudTargets: cloudTargets,
CloudTemplates: cloudTemplates,
PrivateTemplates: privateTemplates,
IsTemporary: nostore,
Filtering: getCloudFilteringFromOptions(r.options),
})
if err != nil {
return results, err
}
gologger.Info().Msgf("Created task with ID: %d", taskID)
if nostore {
gologger.Info().Msgf("Cloud scan storage: disabled")
}
time.Sleep(3 * time.Second)
scanResponse, err := r.cloudClient.GetScan(taskID)
if err != nil {
return results, errors.Wrap(err, "could not get scan status")
}
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
// Start progress logging for the created scan
if r.progress != nil {
ticker := time.NewTicker(time.Duration(r.options.StatsInterval) * time.Second)
r.progress.Init(r.hmapInputProvider.Count(), int(scanResponse.Templates), int64(scanResponse.Total))
go func() {
defer ticker.Stop()
for {
select {
case <-ctx.Done():
return
case <-ticker.C:
if scanResponse, err := r.cloudClient.GetScan(taskID); err == nil {
r.progress.SetRequests(uint64(scanResponse.Current))
}
}
}
}()
}
err = r.cloudClient.GetResults(taskID, true, limit, func(re *output.ResultEvent) {
r.progress.IncrementMatched()
results.CompareAndSwap(false, true)
_ = count.Add(1)
if outputErr := r.output.Write(re); outputErr != nil {
gologger.Warning().Msgf("Could not write output: %s", err)
}
if r.issuesClient != nil {
if err := r.issuesClient.CreateIssue(re); err != nil {
gologger.Warning().Msgf("Could not create issue on tracker: %s", err)
}
}
})
return results, err
}
func getTemplateRelativePath(templatePath string) string {
splitted := strings.SplitN(templatePath, "nuclei-templates", 2)
if len(splitted) < 2 {
return ""
}
return strings.TrimPrefix(splitted[1], "/")
}
func gzipBase64EncodeData(data []byte) string {
var buf bytes.Buffer
writer, _ := zlib.NewWriterLevel(&buf, zlib.BestCompression)
_, _ = writer.Write(data)
_ = writer.Close()
encoded := base64.StdEncoding.EncodeToString(buf.Bytes())
return encoded
}
func getCloudFilteringFromOptions(options *types.Options) *nucleicloud.AddScanRequestConfiguration {
return &nucleicloud.AddScanRequestConfiguration{
Authors: options.Authors,
Tags: options.Tags,
ExcludeTags: options.ExcludeTags,
IncludeTags: options.IncludeTags,
IncludeIds: options.IncludeIds,
ExcludeIds: options.ExcludeIds,
IncludeTemplates: options.IncludeTemplates,
ExcludedTemplates: options.ExcludedTemplates,
ExcludeMatchers: options.ExcludeMatchers,
Severities: options.Severities,
ExcludeSeverities: options.ExcludeSeverities,
Protocols: options.Protocols,
ExcludeProtocols: options.ExcludeProtocols,
IncludeConditions: options.IncludeConditions,
}
}

View File

@ -1,666 +0,0 @@
package nucleicloud
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"mime/multipart"
"net/http"
"net/url"
"os"
"path/filepath"
"strconv"
"strings"
"time"
jsoniter "github.com/json-iterator/go"
"github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/retryablehttp-go"
)
// Client is a client for result retrieval from nuclei-cloud API
type Client struct {
baseURL string
apiKey string
httpclient *retryablehttp.Client
}
const (
pollInterval = 3 * time.Second
resultSize = 100
defaultBaseURL = "https://cloud-dev.nuclei.sh"
)
// HTTPErrorRetryPolicy is to retry for HTTPCodes >= 500.
func HTTPErrorRetryPolicy() func(ctx context.Context, resp *http.Response, err error) (bool, error) {
return func(ctx context.Context, resp *http.Response, err error) (bool, error) {
if resp != nil && resp.StatusCode >= http.StatusInternalServerError {
return true, errors.New(resp.Status)
}
return retryablehttp.CheckRecoverableErrors(ctx, resp, err)
}
}
// New returns a nuclei-cloud API client
func New(baseURL, apiKey string) *Client {
options := retryablehttp.DefaultOptionsSingle
options.NoAdjustTimeout = true
options.Timeout = 60 * time.Second
options.CheckRetry = HTTPErrorRetryPolicy()
client := retryablehttp.NewClient(options)
baseAppURL := baseURL
if baseAppURL == "" {
baseAppURL = defaultBaseURL
}
return &Client{httpclient: client, baseURL: baseAppURL, apiKey: apiKey}
}
// AddScan adds a scan for templates and target to nuclei server
func (c *Client) AddScan(req *AddScanRequest) (int64, error) {
var buf bytes.Buffer
if err := jsoniter.NewEncoder(&buf).Encode(req); err != nil {
return 0, errors.Wrap(err, "could not encode request")
}
httpReq, err := retryablehttp.NewRequest(http.MethodPost, fmt.Sprintf("%s/scan", c.baseURL), bytes.NewReader(buf.Bytes()))
if err != nil {
return 0, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return 0, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
var data map[string]int64
if err := jsoniter.NewDecoder(resp.Body).Decode(&data); err != nil {
return 0, errors.Wrap(err, "could not decode resp")
}
id := data["id"]
return id, nil
}
// GetResults gets results from nuclei server for an ID
// until there are no more results left to retrieve.
func (c *Client) GetResults(ID int64, checkProgress bool, limit int, callback func(*output.ResultEvent)) error {
lastID := int64(0)
for {
uri := fmt.Sprintf("%s/results?id=%d&from=%d&size=%d", c.baseURL, ID, lastID, limit)
httpReq, err := retryablehttp.NewRequest(http.MethodGet, uri, nil)
if err != nil {
return errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return errors.Wrap(err, "could not do request")
}
var items GetResultsResponse
if err := jsoniter.NewDecoder(resp.Body).Decode(&items); err != nil {
resp.Body.Close()
return errors.Wrap(err, "could not decode results")
}
resp.Body.Close()
for _, item := range items.Items {
lastID = item.ID
var result output.ResultEvent
if err := jsoniter.NewDecoder(strings.NewReader(item.Raw)).Decode(&result); err != nil {
return errors.Wrap(err, "could not decode result item")
}
callback(&result)
}
// This is checked during scan is added else if no item found break out of loop.
if checkProgress {
if items.Finished && len(items.Items) == 0 {
break
}
} else if len(items.Items) == 0 {
break
}
time.Sleep(pollInterval)
}
return nil
}
func (c *Client) GetScans(limit int, from string) ([]GetScanRequest, error) {
var items []GetScanRequest
httpReq, err := retryablehttp.NewRequest(http.MethodGet, fmt.Sprintf("%s/scan?from=%s&size=%d", c.baseURL, url.QueryEscape(from), limit), nil)
if err != nil {
return items, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return nil, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
if err := jsoniter.NewDecoder(resp.Body).Decode(&items); err != nil {
return items, errors.Wrap(err, "could not decode results")
}
return items, nil
}
func (c *Client) GetScan(id int64) (GetScanRequest, error) {
var items GetScanRequest
httpReq, err := retryablehttp.NewRequest(http.MethodGet, fmt.Sprintf("%s/scan/%d", c.baseURL, id), nil)
if err != nil {
return items, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return items, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
if err := jsoniter.NewDecoder(resp.Body).Decode(&items); err != nil {
return items, errors.Wrap(err, "could not decode results")
}
return items, nil
}
// Delete a scan and it's issues by the scan id.
func (c *Client) DeleteScan(id int64) (DeleteScanResults, error) {
deletescan := DeleteScanResults{}
httpReq, err := retryablehttp.NewRequest(http.MethodDelete, fmt.Sprintf("%s/scan?id=%d", c.baseURL, id), nil)
if err != nil {
return deletescan, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return deletescan, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
if err := jsoniter.NewDecoder(resp.Body).Decode(&deletescan); err != nil {
return deletescan, errors.Wrap(err, "could not delete scan")
}
return deletescan, nil
}
// StatusDataSource returns the status for a data source
func (c *Client) StatusDataSource(statusRequest StatusDataSourceRequest) (int64, error) {
var buf bytes.Buffer
if err := jsoniter.NewEncoder(&buf).Encode(statusRequest); err != nil {
return 0, errors.Wrap(err, "could not encode request")
}
httpReq, err := retryablehttp.NewRequest(http.MethodPost, fmt.Sprintf("%s/datasources/status", c.baseURL), bytes.NewReader(buf.Bytes()))
if err != nil {
return 0, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return 0, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
var data StatusDataSourceResponse
if err := jsoniter.NewDecoder(resp.Body).Decode(&data); err != nil {
return 0, errors.Wrap(err, "could not decode resp")
}
return data.ID, nil
}
// AddDataSource adds a new data source
func (c *Client) AddDataSource(req AddDataSourceRequest) (*AddDataSourceResponse, error) {
var buf bytes.Buffer
if err := jsoniter.NewEncoder(&buf).Encode(req); err != nil {
return nil, errors.Wrap(err, "could not encode request")
}
httpReq, err := retryablehttp.NewRequest(http.MethodPost, fmt.Sprintf("%s/datasources", c.baseURL), bytes.NewReader(buf.Bytes()))
if err != nil {
return nil, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return nil, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
var data AddDataSourceResponse
if err := jsoniter.NewDecoder(resp.Body).Decode(&data); err != nil {
return nil, errors.Wrap(err, "could not decode resp")
}
return &data, nil
}
// SyncDataSource syncs contents for a data source. The call blocks until
// update is completed.
func (c *Client) SyncDataSource(ID int64) error {
httpReq, err := retryablehttp.NewRequest(http.MethodGet, fmt.Sprintf("%s/datasources/%d/sync", c.baseURL, ID), nil)
if err != nil {
return errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
_, _ = io.Copy(io.Discard, resp.Body)
return nil
}
// ExistsDataSourceItem identifies whether data source item exist
func (c *Client) ExistsDataSourceItem(req ExistsDataSourceItemRequest) error {
var buf bytes.Buffer
if err := jsoniter.NewEncoder(&buf).Encode(req); err != nil {
return errors.Wrap(err, "could not encode request")
}
httpReq, err := retryablehttp.NewRequest(http.MethodPost, fmt.Sprintf("%s/datasources/exists", c.baseURL), bytes.NewReader(buf.Bytes()))
if err != nil {
return errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
_, _ = io.Copy(io.Discard, resp.Body)
return nil
}
func (c *Client) ListDatasources() ([]GetDataSourceResponse, error) {
var items []GetDataSourceResponse
httpReq, err := retryablehttp.NewRequest(http.MethodGet, fmt.Sprintf("%s/datasources", c.baseURL), nil)
if err != nil {
return items, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return nil, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
if err := jsoniter.NewDecoder(resp.Body).Decode(&items); err != nil {
return items, errors.Wrap(err, "could not decode results")
}
return items, nil
}
func (c *Client) ListReportingSources() ([]GetReportingSourceResponse, error) {
var items []GetReportingSourceResponse
httpReq, err := retryablehttp.NewRequest(http.MethodGet, fmt.Sprintf("%s/reporting", c.baseURL), nil)
if err != nil {
return items, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return nil, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
if err := jsoniter.NewDecoder(resp.Body).Decode(&items); err != nil {
return items, errors.Wrap(err, "could not decode results")
}
return items, nil
}
func (c *Client) ToggleReportingSource(ID int64, status bool) error {
r := ReportingSourceStatus{Enabled: status}
var buf bytes.Buffer
if err := jsoniter.NewEncoder(&buf).Encode(r); err != nil {
return errors.Wrap(err, "could not encode request")
}
httpReq, err := retryablehttp.NewRequest(http.MethodPut, fmt.Sprintf("%s/reporting/%d", c.baseURL, ID), bytes.NewReader(buf.Bytes()))
if err != nil {
return errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
_, _ = io.Copy(io.Discard, resp.Body)
return nil
}
func (c *Client) ListTargets(query string) ([]GetTargetResponse, error) {
var builder strings.Builder
_, _ = builder.WriteString(c.baseURL)
_, _ = builder.WriteString("/targets")
if query != "" {
_, _ = builder.WriteString("?query=")
_, _ = builder.WriteString(url.QueryEscape(query))
}
var items []GetTargetResponse
httpReq, err := retryablehttp.NewRequest(http.MethodGet, builder.String(), nil)
if err != nil {
return items, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return nil, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
if err := jsoniter.NewDecoder(resp.Body).Decode(&items); err != nil {
return items, errors.Wrap(err, "could not decode results")
}
return items, nil
}
func (c *Client) ListTemplates(query string) ([]GetTemplatesResponse, error) {
var builder strings.Builder
_, _ = builder.WriteString(c.baseURL)
_, _ = builder.WriteString("/templates")
if query != "" {
_, _ = builder.WriteString("?query=")
_, _ = builder.WriteString(url.QueryEscape(query))
}
var items []GetTemplatesResponse
httpReq, err := retryablehttp.NewRequest(http.MethodGet, builder.String(), nil)
if err != nil {
return items, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return nil, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
if err := jsoniter.NewDecoder(resp.Body).Decode(&items); err != nil {
return items, errors.Wrap(err, "could not decode results")
}
return items, nil
}
func (c *Client) RemoveDatasource(datasource int64, name string) error {
var builder strings.Builder
_, _ = builder.WriteString(c.baseURL)
_, _ = builder.WriteString("/datasources")
if name != "" {
_, _ = builder.WriteString("?name=")
_, _ = builder.WriteString(name)
} else if datasource != 0 {
_, _ = builder.WriteString("?id=")
_, _ = builder.WriteString(strconv.FormatInt(datasource, 10))
}
httpReq, err := retryablehttp.NewRequest(http.MethodDelete, builder.String(), nil)
if err != nil {
return errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
_, _ = io.Copy(io.Discard, resp.Body)
return nil
}
func (c *Client) AddTemplate(name, contents string) (string, error) {
file, err := os.Open(contents)
if err != nil {
return "", errors.Wrap(err, "could not open contents")
}
defer file.Close()
var buf bytes.Buffer
writer := multipart.NewWriter(&buf)
_ = writer.WriteField("name", name)
fileWriter, _ := writer.CreateFormFile("file", filepath.Base(contents))
_, _ = io.Copy(fileWriter, file)
_ = writer.Close()
httpReq, err := retryablehttp.NewRequest(http.MethodPost, fmt.Sprintf("%s/templates", c.baseURL), &buf)
if err != nil {
return "", errors.Wrap(err, "could not make request")
}
httpReq.Header.Set("Content-Type", writer.FormDataContentType())
resp, err := c.sendRequest(httpReq)
if err != nil {
return "", errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
var item AddItemResponse
if err := jsoniter.NewDecoder(resp.Body).Decode(&item); err != nil {
return "", errors.Wrap(err, "could not decode results")
}
return item.Ok, nil
}
func (c *Client) AddTarget(name, contents string) (string, error) {
file, err := os.Open(contents)
if err != nil {
return "", errors.Wrap(err, "could not open contents")
}
defer file.Close()
var buf bytes.Buffer
writer := multipart.NewWriter(&buf)
_ = writer.WriteField("name", name)
fileWriter, _ := writer.CreateFormFile("file", filepath.Base(contents))
_, _ = io.Copy(fileWriter, file)
_ = writer.Close()
httpReq, err := retryablehttp.NewRequest(http.MethodPost, fmt.Sprintf("%s/targets", c.baseURL), &buf)
if err != nil {
return "", errors.Wrap(err, "could not make request")
}
httpReq.Header.Set("Content-Type", writer.FormDataContentType())
resp, err := c.sendRequest(httpReq)
if err != nil {
return "", errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
var item AddItemResponse
if err := jsoniter.NewDecoder(resp.Body).Decode(&item); err != nil {
return "", errors.Wrap(err, "could not decode results")
}
return item.Ok, nil
}
func (c *Client) RemoveTemplate(ID int64, name string) error {
var builder strings.Builder
_, _ = builder.WriteString(c.baseURL)
_, _ = builder.WriteString("/templates")
if name != "" {
_, _ = builder.WriteString("?name=")
_, _ = builder.WriteString(name)
} else if ID != 0 {
_, _ = builder.WriteString("?id=")
_, _ = builder.WriteString(strconv.FormatInt(ID, 10))
}
httpReq, err := retryablehttp.NewRequest(http.MethodDelete, builder.String(), nil)
if err != nil {
return errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
_, _ = io.Copy(io.Discard, resp.Body)
return nil
}
func (c *Client) RemoveTarget(ID int64, name string) error {
var builder strings.Builder
_, _ = builder.WriteString(c.baseURL)
_, _ = builder.WriteString("/targets")
if name != "" {
_, _ = builder.WriteString("?name=")
_, _ = builder.WriteString(name)
} else if ID != 0 {
_, _ = builder.WriteString("?id=")
_, _ = builder.WriteString(strconv.FormatInt(ID, 10))
}
httpReq, err := retryablehttp.NewRequest(http.MethodDelete, builder.String(), nil)
if err != nil {
return errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
_, _ = io.Copy(io.Discard, resp.Body)
return nil
}
func (c *Client) GetTarget(ID int64, name string) (io.ReadCloser, error) {
var builder strings.Builder
_, _ = builder.WriteString(c.baseURL)
_, _ = builder.WriteString("/targets/get")
if name != "" {
_, _ = builder.WriteString("?name=")
_, _ = builder.WriteString(name)
} else if ID != 0 {
_, _ = builder.WriteString("?id=")
_, _ = builder.WriteString(strconv.FormatInt(ID, 10))
}
httpReq, err := retryablehttp.NewRequest(http.MethodGet, builder.String(), nil)
if err != nil {
return nil, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return nil, errors.Wrap(err, "could not do request")
}
return resp.Body, nil
}
func (c *Client) GetTemplate(ID int64, name string) (io.ReadCloser, error) {
var builder strings.Builder
_, _ = builder.WriteString(c.baseURL)
_, _ = builder.WriteString("/templates/get")
if name != "" {
_, _ = builder.WriteString("?name=")
_, _ = builder.WriteString(name)
} else if ID != 0 {
_, _ = builder.WriteString("?id=")
_, _ = builder.WriteString(strconv.FormatInt(ID, 10))
}
httpReq, err := retryablehttp.NewRequest(http.MethodGet, builder.String(), nil)
if err != nil {
return nil, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return nil, errors.Wrap(err, "could not do request")
}
return resp.Body, nil
}
func (c *Client) ExistsTarget(id int64) (ExistsInputResponse, error) {
var item ExistsInputResponse
httpReq, err := retryablehttp.NewRequest(http.MethodGet, fmt.Sprintf("%s/targets/%d/exists", c.baseURL, id), nil)
if err != nil {
return item, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return item, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
if err := jsoniter.NewDecoder(resp.Body).Decode(&item); err != nil {
return item, errors.Wrap(err, "could not decode results")
}
return item, nil
}
func (c *Client) ExistsTemplate(id int64) (ExistsInputResponse, error) {
var item ExistsInputResponse
httpReq, err := retryablehttp.NewRequest(http.MethodGet, fmt.Sprintf("%s/templates/%d/exists", c.baseURL, id), nil)
if err != nil {
return item, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return item, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
if err := jsoniter.NewDecoder(resp.Body).Decode(&item); err != nil {
return item, errors.Wrap(err, "could not decode results")
}
return item, nil
}
const apiKeyParameter = "X-API-Key"
type errorResponse struct {
Message string `json:"message"`
}
func (c *Client) sendRequest(req *retryablehttp.Request) (*http.Response, error) {
req.Header.Set(apiKeyParameter, c.apiKey)
resp, err := c.httpclient.Do(req)
if err != nil {
return nil, errors.Wrap(err, "could not do request")
}
if resp.StatusCode < http.StatusOK || resp.StatusCode >= http.StatusBadRequest {
data, _ := io.ReadAll(resp.Body)
resp.Body.Close()
var errRes errorResponse
if err = json.NewDecoder(bytes.NewReader(data)).Decode(&errRes); err == nil {
return nil, errors.New(errRes.Message)
}
return nil, fmt.Errorf("unknown error, status code: %d=%s", resp.StatusCode, string(data))
}
return resp, nil
}
// AddReportingSource adds a new data source
func (c *Client) AddReportingSource(req AddReportingSourceRequest) (*AddReportingSourceResponse, error) {
var buf bytes.Buffer
if err := jsoniter.NewEncoder(&buf).Encode(req); err != nil {
return nil, errors.Wrap(err, "could not encode request")
}
httpReq, err := retryablehttp.NewRequest(http.MethodPost, fmt.Sprintf("%s/reporting/add-source", c.baseURL), bytes.NewReader(buf.Bytes()))
if err != nil {
return nil, errors.Wrap(err, "could not make request")
}
resp, err := c.sendRequest(httpReq)
if err != nil {
return nil, errors.Wrap(err, "could not do request")
}
defer resp.Body.Close()
var data AddReportingSourceResponse
if err := jsoniter.NewDecoder(resp.Body).Decode(&data); err != nil {
return nil, errors.Wrap(err, "could not decode resp")
}
return &data, nil
}

View File

@ -1,178 +0,0 @@
package nucleicloud
import (
"encoding/json"
"time"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
)
// AddScanRequest is a nuclei scan input item.
type AddScanRequest struct {
// RawTargets is a list of raw target URLs for the scan.
RawTargets []string `json:"raw_targets,omitempty"`
// PublicTemplates is a list of public templates for the scan
PublicTemplates []string `json:"public_templates,omitempty"`
// PrivateTemplates is a map of template-name->contents that
// are private to the user executing the scan. (TODO: TBD)
PrivateTemplates map[string]string `json:"private_templates,omitempty"`
// CloudTargets is a list of cloud targets for the scan
CloudTargets []string `json:"cloud_targets,omitempty"`
// CloudTemplates is a list of cloud templates for the scan
CloudTemplates []string `json:"cloud_templates,omitempty"`
// Filtering contains optional filtering options for scan additions
Filtering *AddScanRequestConfiguration `json:"filtering"`
IsTemporary bool `json:"is_temporary"`
}
// AddScanRequestConfiguration contains filtering options for scan addition
type AddScanRequestConfiguration struct {
Authors []string `json:"author,omitempty"`
Tags []string `json:"tags,omitempty"`
ExcludeTags []string `json:"exclude-tags,omitempty"`
IncludeTags []string `json:"include-tags,omitempty"`
IncludeIds []string `json:"include-ids,omitempty"`
ExcludeIds []string `json:"exclude-ids,omitempty"`
IncludeTemplates []string `json:"include-templates,omitempty"`
ExcludedTemplates []string `json:"exclude-templates,omitempty"`
ExcludeMatchers []string `json:"exclude-matchers,omitempty"`
Severities severity.Severities `json:"severities,omitempty"`
ExcludeSeverities severity.Severities `json:"exclude-severities,omitempty"`
Protocols types.ProtocolTypes `json:"protocols,omitempty"`
ExcludeProtocols types.ProtocolTypes `json:"exclude-protocols,omitempty"`
IncludeConditions []string `json:"include-conditions,omitempty"`
}
type GetResultsResponse struct {
Finished bool `json:"finished"`
Items []GetResultsResponseItem `json:"items"`
}
type GetScanRequest struct {
Id int64 `json:"id"`
Total int32 `json:"total"`
Current int32 `json:"current"`
Finished bool `json:"finished"`
CreatedAt time.Time `json:"created_at"`
FinishedAt time.Time `json:"finished_at"`
Targets int32 `json:"targets"`
Templates int32 `json:"templates"`
Matches int64 `json:"matches"`
}
// AddDataSourceResponse is a add data source response item.
type AddDataSourceResponse struct {
ID int64 `json:"id"`
Hash string `json:"hash"`
Secret string `json:"secret,omitempty"`
}
type GetResultsResponseItem struct {
ID int64 `json:"id"`
Raw string `json:"raw"`
}
type DeleteScanResults struct {
OK bool `json:"ok"`
}
// StatusDataSourceRequest is a add data source request item.
type StatusDataSourceRequest struct {
Repo string `json:"repo"`
Token string `json:"token"`
}
// StatusDataSourceResponse is a add data source response item.
type StatusDataSourceResponse struct {
ID int64 `json:"id"`
}
// AddDataSourceRequest is a add data source request item.
type AddDataSourceRequest struct {
Type string `json:"type"`
Repo string `json:"repo"`
Token string `json:"token"`
Sync bool `json:"sync"`
}
// ExistsDataSourceItemRequest is a request to identify whether a data
// source item exists.
type ExistsDataSourceItemRequest struct {
Type string `json:"type"`
Contents string `json:"contents"`
}
// GetDataSourceResponse is response for a get data source request
type GetDataSourceResponse struct {
ID int64 `json:"id"`
Hash string `json:"hash"`
Type string `json:"type"`
Path string `json:"path"`
Repo string `json:"repo"`
Updatedat time.Time `json:"updated_at"`
}
// GetTargetResponse is the response for a get target request
type GetTargetResponse struct {
ID int64 `json:"id"`
DataSource int64 `json:"data_source"`
Name string `json:"name"`
Reference string `json:"reference"`
Count int64 `json:"count"`
Hash string `json:"hash"`
Type string `json:"type"`
}
// GetTemplatesResponse is the response for a get templates request
type GetTemplatesResponse struct {
ID int64 `json:"id,omitempty"`
DataSource int64 `json:"data_source,omitempty"`
Name string `json:"name,omitempty"`
Reference string `json:"reference,omitempty"`
Hash string `json:"hash,omitempty"`
Type string `json:"type,omitempty"`
}
type GetReportingSourceResponse struct {
ID int64 `json:"id"`
Type string `json:"type"`
ProjectName string `json:"project_name"`
Enabled bool `json:"enabled"`
Updatedat time.Time `json:"updated_at"`
}
type ReportingSourceStatus struct {
Enabled bool `json:"enabled"`
}
// AddItemResponse is the response to add item request
type AddItemResponse struct {
Ok string `json:"ok"`
}
type ListScanOutput struct {
Timestamp string `json:"timestamp"`
ScanID int64 `json:"scan_id"`
ScanTime string `json:"scan_time"`
ScanResult int `json:"scan_result"`
ScanStatus string `json:"scan_status"`
Target int `json:"target"`
Template int `json:"template"`
}
type ExistsInputResponse struct {
Reference string `json:"reference"`
}
// AddReportingSourceRequest is a add reporting source request item.
type AddReportingSourceRequest struct {
Type string `json:"type"`
Payload json.RawMessage `json:"payload"`
}
// AddReportingSourceResponse is a add reporting source response item.
type AddReportingSourceResponse struct {
Ok string `json:"ok"`
}

View File

@ -1,66 +0,0 @@
package nucleicloud
import (
"bufio"
"os"
"path/filepath"
"strings"
"time"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
)
const DDMMYYYYhhmmss = "2006-01-02 15:04:05"
// ReadCatalogChecksum reads catalog checksum from nuclei-templates repository
func ReadCatalogChecksum() map[string]string {
config := config.DefaultConfig
checksumFile := filepath.Join(config.TemplatesDirectory, "templates-checksum.txt")
file, err := os.Open(checksumFile)
if err != nil {
return nil
}
defer file.Close()
checksums := make(map[string]string)
scanner := bufio.NewScanner(file)
for scanner.Scan() {
text := strings.SplitN(scanner.Text(), ":", 2)
if len(text) < 2 {
continue
}
path := strings.TrimPrefix(text[0], "nuclei-templates/")
if strings.HasPrefix(path, ".") {
continue
}
checksums[path] = text[1]
}
return checksums
}
func PrepareScanListOutput(v GetScanRequest) ListScanOutput {
output := ListScanOutput{}
loc, _ := time.LoadLocation("Local")
status := "finished"
t := v.FinishedAt
duration := t.Sub(v.CreatedAt)
if !v.Finished {
status = "running"
t = time.Now().UTC()
duration = t.Sub(v.CreatedAt).Round(60 * time.Second)
}
val := v.CreatedAt.In(loc).Format(DDMMYYYYhhmmss)
output.Timestamp = val
output.ScanID = v.Id
output.ScanTime = duration.String()
output.ScanResult = int(v.Matches)
output.ScanStatus = status
output.Target = int(v.Targets)
output.Template = int(v.Templates)
return output
}

View File

@ -20,14 +20,24 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolinit"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/utils/vardump"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/headless/engine"
protocoltypes "github.com/projectdiscovery/nuclei/v3/pkg/templates/types"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/jsonexporter"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/jsonl"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/markdown"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/sarif"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/yaml"
fileutil "github.com/projectdiscovery/utils/file"
"github.com/projectdiscovery/utils/generic"
logutil "github.com/projectdiscovery/utils/log"
stringsutil "github.com/projectdiscovery/utils/strings"
)
const (
// Default directory used to save protocols traffic
DefaultDumpTrafficOutputFolder = "output"
)
func ConfigureOptions() error {
// with FileStringSliceOptions, FileNormalizedStringSliceOptions, FileCommaSeparatedStringSliceOptions
// if file has the extension `.yaml` or `.json` we consider those as strings and not files to be read
@ -184,37 +194,6 @@ func ValidateOptions(options *types.Options) error {
if !useIPV4 && !useIPV6 {
return errors.New("ipv4 and/or ipv6 must be selected")
}
// Validate cloud option
if err := validateCloudOptions(options); err != nil {
return err
}
return nil
}
func validateCloudOptions(options *types.Options) error {
if options.HasCloudOptions() && !options.Cloud {
return errors.New("cloud flags cannot be used without cloud option")
}
if options.Cloud {
if options.CloudAPIKey == "" {
return errors.New("missing NUCLEI_CLOUD_API env variable")
}
var missing []string
switch options.AddDatasource {
case "s3":
missing = validateMissingS3Options(options)
case "github":
missing = validateMissingGitHubOptions(options)
case "gitlab":
missing = validateMissingGitLabOptions(options)
case "azure":
missing = validateMissingAzureOptions(options)
}
if len(missing) > 0 {
return fmt.Errorf("missing %v env variables", strings.Join(missing, ", "))
}
}
return nil
}
@ -255,17 +234,6 @@ func validateMissingAzureOptions(options *types.Options) []string {
return missing
}
func validateMissingGitHubOptions(options *types.Options) []string {
var missing []string
if options.GitHubToken == "" {
missing = append(missing, "GITHUB_TOKEN")
}
if len(options.GitHubTemplateRepo) == 0 {
missing = append(missing, "GITHUB_TEMPLATE_REPO")
}
return missing
}
func validateMissingGitLabOptions(options *types.Options) []string {
var missing []string
if options.GitLabToken == "" {
@ -278,6 +246,46 @@ func validateMissingGitLabOptions(options *types.Options) []string {
return missing
}
func createReportingOptions(options *types.Options) (*reporting.Options, error) {
var reportingOptions = &reporting.Options{}
if options.ReportingConfig != "" {
file, err := os.Open(options.ReportingConfig)
if err != nil {
return nil, errors.Wrap(err, "could not open reporting config file")
}
defer file.Close()
if err := yaml.DecodeAndValidate(file, reportingOptions); err != nil {
return nil, errors.Wrap(err, "could not parse reporting config file")
}
Walk(reportingOptions, expandEndVars)
}
if options.MarkdownExportDirectory != "" {
reportingOptions.MarkdownExporter = &markdown.Options{
Directory: options.MarkdownExportDirectory,
IncludeRawPayload: !options.OmitRawRequests,
SortMode: options.MarkdownExportSortMode,
}
}
if options.SarifExport != "" {
reportingOptions.SarifExporter = &sarif.Options{File: options.SarifExport}
}
if options.JSONExport != "" {
reportingOptions.JSONExporter = &jsonexporter.Options{
File: options.JSONExport,
IncludeRawPayload: !options.OmitRawRequests,
}
}
if options.JSONLExport != "" {
reportingOptions.JSONLExporter = &jsonl.Options{
File: options.JSONLExport,
IncludeRawPayload: !options.OmitRawRequests,
}
}
return reportingOptions, nil
}
// configureOutput configures the output logging levels to be displayed on the screen
func configureOutput(options *types.Options) {
// If the user desires verbose output, show verbose output
@ -354,17 +362,6 @@ func validateCertificatePaths(certificatePaths ...string) {
// Read the input from env and set options
func readEnvInputVars(options *types.Options) {
if strings.EqualFold(os.Getenv("NUCLEI_CLOUD"), "true") {
options.Cloud = true
// TODO: disable files, offlinehttp, code
options.ExcludeProtocols = append(options.ExcludeProtocols, protocoltypes.CodeProtocol, protocoltypes.FileProtocol, protocoltypes.OfflineHTTPProtocol)
}
if options.CloudURL = os.Getenv("NUCLEI_CLOUD_SERVER"); options.CloudURL == "" {
options.CloudURL = "https://cloud-dev.nuclei.sh"
}
options.CloudAPIKey = os.Getenv("NUCLEI_CLOUD_API")
options.GitHubToken = os.Getenv("GITHUB_TOKEN")
repolist := os.Getenv("GITHUB_TEMPLATE_REPO")
if repolist != "" {

View File

@ -3,18 +3,19 @@ package runner
import (
"context"
"encoding/json"
"fmt"
"net/http"
_ "net/http/pprof"
"os"
"reflect"
"strconv"
"strings"
"sync/atomic"
"time"
"github.com/projectdiscovery/nuclei/v3/internal/runner/nucleicloud"
"github.com/projectdiscovery/nuclei/v3/internal/pdcp"
"github.com/projectdiscovery/nuclei/v3/pkg/installer"
uncoverlib "github.com/projectdiscovery/uncover"
"github.com/projectdiscovery/utils/env"
permissionutil "github.com/projectdiscovery/utils/permission"
updateutils "github.com/projectdiscovery/utils/update"
@ -47,10 +48,6 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/headless/engine"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/http/httpclientpool"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/jsonexporter"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/jsonl"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/markdown"
"github.com/projectdiscovery/nuclei/v3/pkg/reporting/exporters/sarif"
"github.com/projectdiscovery/nuclei/v3/pkg/templates"
"github.com/projectdiscovery/nuclei/v3/pkg/types"
"github.com/projectdiscovery/nuclei/v3/pkg/utils"
@ -60,6 +57,13 @@ import (
ptrutil "github.com/projectdiscovery/utils/ptr"
)
var (
// HideAutoSaveMsg is a global variable to hide the auto-save message
HideAutoSaveMsg = false
// DisableCloudUpload is a global variable to disable cloud upload
DisableCloudUpload = false
)
// Runner is a client for running the enumeration process.
type Runner struct {
output output.Writer
@ -76,8 +80,8 @@ type Runner struct {
hostErrors hosterrorscache.CacheInterface
resumeCfg *types.ResumeCfg
pprofServer *http.Server
cloudClient *nucleicloud.Client
cloudTargets []string
// pdcp auto-save options
pdcpUploadErrMsg string
}
const pprofServerAddress = "127.0.0.1:8086"
@ -93,10 +97,6 @@ func New(options *types.Options) (*Runner, error) {
os.Exit(0)
}
if options.Cloud {
runner.cloudClient = nucleicloud.New(options.CloudURL, options.CloudAPIKey)
}
// Version check by default
if config.DefaultConfig.CanCheckForUpdates() {
if err := installer.NucleiVersionCheck(); err != nil {
@ -210,31 +210,13 @@ func New(options *types.Options) (*Runner, error) {
}()
}
if (len(options.Templates) == 0 || !options.NewTemplates || (options.TargetsFilePath == "" && !options.Stdin && len(options.Targets) == 0)) && (options.UpdateTemplates && !options.Cloud) {
if (len(options.Templates) == 0 || !options.NewTemplates || (options.TargetsFilePath == "" && !options.Stdin && len(options.Targets) == 0)) && options.UpdateTemplates {
os.Exit(0)
}
// Initialize the input source
hmapInput, err := hybrid.New(&hybrid.Options{
Options: options,
NotFoundCallback: func(target string) bool {
if !options.Cloud {
return false
}
parsed, parseErr := strconv.ParseInt(target, 10, 64)
if parseErr != nil {
if err := runner.cloudClient.ExistsDataSourceItem(nucleicloud.ExistsDataSourceItemRequest{Contents: target, Type: "targets"}); err == nil {
runner.cloudTargets = append(runner.cloudTargets, target)
return true
}
return false
}
if exists, err := runner.cloudClient.ExistsTarget(parsed); err == nil {
runner.cloudTargets = append(runner.cloudTargets, exists.Reference)
return true
}
return false
},
})
if err != nil {
return nil, errors.Wrap(err, "could not create input provider")
@ -246,7 +228,8 @@ func New(options *types.Options) (*Runner, error) {
if err != nil {
return nil, errors.Wrap(err, "could not create output file")
}
runner.output = outputWriter
// setup a proxy writer to automatically upload results to PDCP
runner.output = runner.setupPDCPUpload(outputWriter)
if options.JSONL && options.EnableProgressBar {
options.StatsJSON = true
@ -257,11 +240,7 @@ func New(options *types.Options) (*Runner, error) {
// Creates the progress tracking object
var progressErr error
statsInterval := options.StatsInterval
if options.Cloud && !options.EnableProgressBar {
statsInterval = -1
options.EnableProgressBar = true
}
runner.progress, progressErr = progress.NewStatsTicker(statsInterval, options.EnableProgressBar, options.StatsJSON, options.Cloud, options.MetricsPort)
runner.progress, progressErr = progress.NewStatsTicker(statsInterval, options.EnableProgressBar, options.StatsJSON, false, options.MetricsPort)
if progressErr != nil {
return nil, progressErr
}
@ -336,44 +315,12 @@ func New(options *types.Options) (*Runner, error) {
return runner, nil
}
func createReportingOptions(options *types.Options) (*reporting.Options, error) {
var reportingOptions = &reporting.Options{}
if options.ReportingConfig != "" {
file, err := os.Open(options.ReportingConfig)
if err != nil {
return nil, errors.Wrap(err, "could not open reporting config file")
}
defer file.Close()
if err := yaml.DecodeAndValidate(file, reportingOptions); err != nil {
return nil, errors.Wrap(err, "could not parse reporting config file")
}
Walk(reportingOptions, expandEndVars)
// runStandardEnumeration runs standard enumeration
func (r *Runner) runStandardEnumeration(executerOpts protocols.ExecutorOptions, store *loader.Store, engine *core.Engine) (*atomic.Bool, error) {
if r.options.AutomaticScan {
return r.executeSmartWorkflowInput(executerOpts, store, engine)
}
if options.MarkdownExportDirectory != "" {
reportingOptions.MarkdownExporter = &markdown.Options{
Directory: options.MarkdownExportDirectory,
IncludeRawPayload: !options.OmitRawRequests,
SortMode: options.MarkdownExportSortMode,
}
}
if options.SarifExport != "" {
reportingOptions.SarifExporter = &sarif.Options{File: options.SarifExport}
}
if options.JSONExport != "" {
reportingOptions.JSONExporter = &jsonexporter.Options{
File: options.JSONExport,
IncludeRawPayload: !options.OmitRawRequests,
}
}
if options.JSONLExport != "" {
reportingOptions.JSONLExporter = &jsonl.Options{
File: options.JSONLExport,
IncludeRawPayload: !options.OmitRawRequests,
}
}
return reportingOptions, nil
return r.executeTemplatesInput(store, engine)
}
// Close releases all the resources and cleans up
@ -394,6 +341,31 @@ func (r *Runner) Close() {
}
}
// setupPDCPUpload sets up the PDCP upload writer
// by creating a new writer and returning it
func (r *Runner) setupPDCPUpload(writer output.Writer) output.Writer {
if r.options.DisableCloudUpload || DisableCloudUpload {
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] Scan results upload to cloud is disabled.", aurora.BrightYellow("WRN"))
return writer
}
color := aurora.NewAurora(!r.options.NoColor)
h := &pdcp.PDCPCredHandler{}
creds, err := h.GetCreds()
if err != nil {
if err != pdcp.ErrNoCreds && !HideAutoSaveMsg {
gologger.Verbose().Msgf("Could not get credentials for cloud upload: %s\n", err)
}
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] To view results on Cloud Dashboard, Configure API key from %v", color.BrightYellow("WRN"), pdcp.DashBoardURL)
return writer
}
uploadWriter, err := pdcp.NewUploadWriter(creds)
if err != nil {
r.pdcpUploadErrMsg = fmt.Sprintf("[%v] PDCP (%v) Auto-Save Failed: %s\n", color.BrightYellow("WRN"), pdcp.DashBoardURL, err)
return writer
}
return output.NewMultiWriter(writer, uploadWriter)
}
// RunEnumeration sets up the input layer for giving input nuclei.
// binary and runs the actual enumeration
func (r *Runner) RunEnumeration() error {
@ -454,25 +426,6 @@ func (r *Runner) RunEnumeration() error {
return errors.Wrap(err, "could not load templates from config")
}
var cloudTemplates []string
if r.options.Cloud {
// hook template loading
store.NotFoundCallback = func(template string) bool {
parsed, parseErr := strconv.ParseInt(template, 10, 64)
if parseErr != nil {
if err := r.cloudClient.ExistsDataSourceItem(nucleicloud.ExistsDataSourceItemRequest{Type: "templates", Contents: template}); err == nil {
cloudTemplates = append(cloudTemplates, template)
return true
}
return false
}
if exists, err := r.cloudClient.ExistsTemplate(parsed); err == nil {
cloudTemplates = append(cloudTemplates, exists.Reference)
return true
}
return false
}
}
if r.options.Validate {
if err := store.ValidateTemplates(); err != nil {
return err
@ -525,55 +478,8 @@ func (r *Runner) RunEnumeration() error {
enumeration := false
var results *atomic.Bool
if r.options.Cloud {
if r.options.ScanList {
err = r.getScanList(r.options.OutputLimit)
} else if r.options.DeleteScan != "" {
err = r.deleteScan(r.options.DeleteScan)
} else if r.options.ScanOutput != "" {
err = r.getResults(r.options.ScanOutput, r.options.OutputLimit)
} else if r.options.ListDatasources {
err = r.listDatasources()
} else if r.options.ListTargets {
err = r.listTargets()
} else if r.options.ListTemplates {
err = r.listTemplates()
} else if r.options.ListReportingSources {
err = r.listReportingSources()
} else if r.options.AddDatasource != "" {
err = r.addCloudDataSource(r.options.AddDatasource)
} else if r.options.RemoveDatasource != "" {
err = r.removeDatasource(r.options.RemoveDatasource)
} else if r.options.DisableReportingSource != "" {
err = r.toggleReportingSource(r.options.DisableReportingSource, false)
} else if r.options.EnableReportingSource != "" {
err = r.toggleReportingSource(r.options.EnableReportingSource, true)
} else if r.options.AddTarget != "" {
err = r.addTarget(r.options.AddTarget)
} else if r.options.AddTemplate != "" {
err = r.addTemplate(r.options.AddTemplate)
} else if r.options.GetTarget != "" {
err = r.getTarget(r.options.GetTarget)
} else if r.options.GetTemplate != "" {
err = r.getTemplate(r.options.GetTemplate)
} else if r.options.RemoveTarget != "" {
err = r.removeTarget(r.options.RemoveTarget)
} else if r.options.RemoveTemplate != "" {
err = r.removeTemplate(r.options.RemoveTemplate)
} else if r.options.ReportingConfig != "" {
err = r.addCloudReportingSource()
} else {
if len(store.Templates())+len(store.Workflows())+len(cloudTemplates) == 0 {
return errors.New("no templates provided for scan")
}
gologger.Info().Msgf("Running scan on cloud with URL %s", r.options.CloudURL)
results, err = r.runCloudEnumeration(store, cloudTemplates, r.cloudTargets, r.options.NoStore, r.options.OutputLimit)
enumeration = true
}
} else {
results, err = r.runStandardEnumeration(executorOpts, store, executorEngine)
enumeration = true
}
results, err = r.runStandardEnumeration(executorOpts, store, executorEngine)
enumeration = true
if !enumeration {
return err
@ -680,6 +586,13 @@ func (r *Runner) displayExecutionInfo(store *loader.Store) {
gologger.Info().Msgf("Current nuclei version: %v %v", config.Version, updateutils.GetVersionDescription(config.Version, cfg.LatestNucleiVersion))
gologger.Info().Msgf("Current nuclei-templates version: %v %v", cfg.TemplateVersion, updateutils.GetVersionDescription(cfg.TemplateVersion, cfg.LatestNucleiTemplatesVersion))
if !HideAutoSaveMsg {
if r.pdcpUploadErrMsg != "" {
gologger.Print().Msgf("%s", r.pdcpUploadErrMsg)
} else {
gologger.Info().Msgf("To view results on cloud dashboard, visit %v/scans upon scan completion.", pdcp.DashBoardURL)
}
}
if len(store.Templates()) > 0 {
gologger.Info().Msgf("New templates added in latest release: %d", len(config.DefaultConfig.GetNewAdditions()))
@ -759,3 +672,8 @@ func expandEndVars(f reflect.Value, fieldType reflect.StructField) {
}
}
}
func init() {
HideAutoSaveMsg = env.GetEnvOrDefault("DISABLE_CLOUD_UPLOAD_WRN", false)
DisableCloudUpload = env.GetEnvOrDefault("DISABLE_CLOUD_UPLOAD", false)
}

View File

@ -35,11 +35,6 @@ func (c *CustomTemplatesManager) Update(ctx context.Context) {
func NewCustomTemplatesManager(options *types.Options) (*CustomTemplatesManager, error) {
ctm := &CustomTemplatesManager{providers: []Provider{}}
if options.Cloud {
// if cloud is enabled, custom templates are Nop
return ctm, nil
}
// Add GitHub providers
githubProviders, err := NewGitHubProviders(options)
if err != nil {

View File

@ -0,0 +1,59 @@
package output
import (
"github.com/logrusorgru/aurora"
)
type MultiWriter struct {
writers []Writer
}
// NewMultiWriter creates a new MultiWriter instance
func NewMultiWriter(writers ...Writer) *MultiWriter {
return &MultiWriter{writers: writers}
}
func (mw *MultiWriter) Close() {
for _, writer := range mw.writers {
writer.Close()
}
}
func (mw *MultiWriter) Colorizer() aurora.Aurora {
// Return the colorizer of the first writer
if len(mw.writers) > 0 {
return mw.writers[0].Colorizer()
}
// Default to a no-op colorizer
return aurora.NewAurora(false)
}
func (mw *MultiWriter) Write(event *ResultEvent) error {
for _, writer := range mw.writers {
if err := writer.Write(event); err != nil {
return err
}
}
return nil
}
func (mw *MultiWriter) WriteFailure(event *InternalWrappedEvent) error {
for _, writer := range mw.writers {
if err := writer.WriteFailure(event); err != nil {
return err
}
}
return nil
}
func (mw *MultiWriter) Request(templateID, url, requestType string, err error) {
for _, writer := range mw.writers {
writer.Request(templateID, url, requestType, err)
}
}
func (mw *MultiWriter) WriteStoreDebugData(host, templateID, eventType string, data string) {
for _, writer := range mw.writers {
writer.WriteStoreDebugData(host, templateID, eventType, data)
}
}

View File

@ -49,20 +49,22 @@ type Writer interface {
// StandardWriter is a writer writing output to file and screen for results.
type StandardWriter struct {
json bool
jsonReqResp bool
timestamp bool
noMetadata bool
matcherStatus bool
mutex *sync.Mutex
aurora aurora.Aurora
outputFile io.WriteCloser
traceFile io.WriteCloser
errorFile io.WriteCloser
severityColors func(severity.Severity) string
storeResponse bool
storeResponseDir string
json bool
jsonReqResp bool
timestamp bool
noMetadata bool
matcherStatus bool
mutex *sync.Mutex
aurora aurora.Aurora
outputFile io.WriteCloser
traceFile io.WriteCloser
errorFile io.WriteCloser
severityColors func(severity.Severity) string
storeResponse bool
storeResponseDir string
omitTemplate bool
DisableStdout bool
AddNewLinesOutputFile bool // by default this is only done for stdout
}
var decolorizerRegex = regexp.MustCompile(`\x1B\[[0-9;]*[a-zA-Z]`)
@ -243,8 +245,10 @@ func (w *StandardWriter) Write(event *ResultEvent) error {
w.mutex.Lock()
defer w.mutex.Unlock()
_, _ = os.Stdout.Write(data)
_, _ = os.Stdout.Write([]byte("\n"))
if !w.DisableStdout {
_, _ = os.Stdout.Write(data)
_, _ = os.Stdout.Write([]byte("\n"))
}
if w.outputFile != nil {
if !w.json {
@ -253,6 +257,9 @@ func (w *StandardWriter) Write(event *ResultEvent) error {
if _, writeErr := w.outputFile.Write(data); writeErr != nil {
return errors.Wrap(err, "could not write to output")
}
if w.AddNewLinesOutputFile && w.json {
_, _ = w.outputFile.Write([]byte("\n"))
}
}
return nil
}

View File

@ -0,0 +1,124 @@
package output
import (
"io"
"os"
"sync"
"github.com/logrusorgru/aurora"
"github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity"
fileutil "github.com/projectdiscovery/utils/file"
)
// WriterOptions contains configuration options for a writer
type WriterOptions func(s *StandardWriter) error
// WithJson writes output in json format
func WithJson(json bool, dumpReqResp bool) WriterOptions {
return func(s *StandardWriter) error {
s.json = json
s.jsonReqResp = dumpReqResp
return nil
}
}
// WithTimestamp writes output with timestamp
func WithTimestamp(timestamp bool) WriterOptions {
return func(s *StandardWriter) error {
s.timestamp = timestamp
return nil
}
}
// WithNoMetadata disables metadata output
func WithNoMetadata(noMetadata bool) WriterOptions {
return func(s *StandardWriter) error {
s.noMetadata = noMetadata
return nil
}
}
// WithMatcherStatus writes output with matcher status
func WithMatcherStatus(matcherStatus bool) WriterOptions {
return func(s *StandardWriter) error {
s.matcherStatus = matcherStatus
return nil
}
}
// WithAurora sets the aurora instance for the writer
func WithAurora(aurora aurora.Aurora) WriterOptions {
return func(s *StandardWriter) error {
s.aurora = aurora
return nil
}
}
// WithWriter sets the writer for the writer
func WithWriter(outputFile io.WriteCloser) WriterOptions {
return func(s *StandardWriter) error {
s.outputFile = outputFile
return nil
}
}
// WithTraceSink sets the writer where trace output is written
func WithTraceSink(traceFile io.WriteCloser) WriterOptions {
return func(s *StandardWriter) error {
s.traceFile = traceFile
return nil
}
}
// WithErrorSink sets the writer where error output is written
func WithErrorSink(errorFile io.WriteCloser) WriterOptions {
return func(s *StandardWriter) error {
s.errorFile = errorFile
return nil
}
}
// WithSeverityColors sets the color function for severity
func WithSeverityColors(severityColors func(severity.Severity) string) WriterOptions {
return func(s *StandardWriter) error {
s.severityColors = severityColors
return nil
}
}
// WithStoreResponse sets the store response option
func WithStoreResponse(storeResponse bool, respDir string) WriterOptions {
return func(s *StandardWriter) error {
s.storeResponse = storeResponse
s.storeResponseDir = respDir
return nil
}
}
// NewWriter creates a new output writer
// if no writer is specified it writes to stdout
func NewWriter(opts ...WriterOptions) (*StandardWriter, error) {
s := &StandardWriter{
mutex: &sync.Mutex{},
DisableStdout: true,
AddNewLinesOutputFile: true,
}
for _, opt := range opts {
if err := opt(s); err != nil {
return nil, err
}
}
if s.aurora == nil {
s.aurora = aurora.NewAurora(false)
}
if s.outputFile == nil {
s.outputFile = os.Stdout
}
// Try to create output folder if it doesn't exist
if s.storeResponse && !fileutil.FolderExists(s.storeResponseDir) {
if err := fileutil.CreateFolder(s.storeResponseDir); err != nil {
return nil, err
}
}
return s, nil
}

View File

@ -17,7 +17,13 @@ import (
)
// ExtraArgs
var ExtraDebugArgs = []string{}
var (
ExtraDebugArgs = []string{}
ExtraEnvVars = []string{
"DISABLE_CLOUD_UPLOAD_WRN=true",
"DISABLE_CLOUD_UPLOAD=true",
}
)
// RunNucleiTemplateAndGetResults returns a list of results for a template
func RunNucleiTemplateAndGetResults(template, url string, debug bool, extra ...string) ([]string, error) {
@ -56,6 +62,7 @@ func RunNucleiBareArgsAndGetResults(debug bool, env []string, extra ...string) (
if env != nil {
cmd.Env = append(os.Environ(), env...)
}
cmd.Env = append(cmd.Env, ExtraEnvVars...)
if debug {
cmd.Args = append(cmd.Args, "-debug")
cmd.Stderr = os.Stderr
@ -83,6 +90,7 @@ func RunNucleiBareArgsAndGetResults(debug bool, env []string, extra ...string) (
// RunNucleiArgsAndGetResults returns result,and runtime errors
func RunNucleiWithArgsAndGetResults(debug bool, args ...string) ([]string, error) {
cmd := exec.Command("./nuclei", args...)
cmd.Env = append(cmd.Env, ExtraEnvVars...)
if debug {
cmd.Args = append(cmd.Args, "-debug")
cmd.Stderr = os.Stderr
@ -118,6 +126,7 @@ func RunNucleiArgsAndGetErrors(debug bool, env []string, extra ...string) ([]str
cmd.Args = append(cmd.Args, "-interactions-cooldown-period", "10")
cmd.Args = append(cmd.Args, "-allow-local-file-access")
cmd.Args = append(cmd.Args, "-nc") // disable color
cmd.Env = append(cmd.Env, ExtraEnvVars...)
data, err := cmd.CombinedOutput()
if debug {
fmt.Println(string(data))
@ -142,6 +151,7 @@ func RunNucleiArgsWithEnvAndGetResults(debug bool, env []string, extra ...string
cmd := exec.Command("./nuclei")
extra = append(extra, ExtraDebugArgs...)
cmd.Env = append(os.Environ(), env...)
cmd.Env = append(cmd.Env, ExtraEnvVars...)
cmd.Args = append(cmd.Args, extra...)
cmd.Args = append(cmd.Args, "-duc") // disable auto updates
cmd.Args = append(cmd.Args, "-interactions-poll-duration", "5")
@ -176,6 +186,7 @@ var templateLoaded = regexp.MustCompile(`(?:Templates|Workflows) loaded[^:]*: (\
// RunNucleiBinaryAndGetLoadedTemplates returns a list of results for a template
func RunNucleiBinaryAndGetLoadedTemplates(nucleiBinary string, debug bool, args []string) (string, error) {
cmd := exec.Command(nucleiBinary, args...)
cmd.Env = append(cmd.Env, ExtraEnvVars...)
cmd.Args = append(cmd.Args, "-duc") // disable auto updates
if debug {
cmd.Args = append(cmd.Args, "-debug")
@ -197,6 +208,7 @@ func RunNucleiBinaryAndGetLoadedTemplates(nucleiBinary string, debug bool, args
func RunNucleiBinaryAndGetCombinedOutput(debug bool, args []string) (string, error) {
args = append(args, "-interactions-cooldown-period", "10", "-interactions-poll-duration", "1")
cmd := exec.Command("./nuclei", args...)
cmd.Env = append(cmd.Env, ExtraEnvVars...)
if debug {
cmd.Args = append(cmd.Args, "-debug")
fmt.Println(cmd.String())

View File

@ -104,48 +104,6 @@ type Options struct {
MarkdownExportSortMode string
// SarifExport is the file to export sarif output format to
SarifExport string
// CloudURL is the URL for the nuclei cloud endpoint
CloudURL string
// CloudAPIKey is the api-key for the nuclei cloud endpoint
CloudAPIKey string
// ScanList feature to get all the scan ids for a user
ScanList bool
// ListDatasources enables listing of datasources for user
ListDatasources bool
// ListTargets enables listing of targets for user
ListTargets bool
// ListTemplates enables listing of templates for user
ListTemplates bool
// ListReportingSources enables listing of reporting source
ListReportingSources bool
// DisableReportingSource disables a reporting source
DisableReportingSource string
// EnableReportingSource enables a reporting source
EnableReportingSource string
// Limit the number of items at a time
OutputLimit int
// Nostore
NoStore bool
// Delete scan
DeleteScan string
// AddDatasource adds a datasource to cloud storage
AddDatasource string
// RemoveDatasource deletes a datasource from cloud storage
RemoveDatasource string
// AddTemplate adds a list of templates to custom datasource
AddTemplate string
// AddTarget adds a list of targets to custom datasource
AddTarget string
// GetTemplate gets a template by id
GetTemplate string
// GetTarget gets a target by id
GetTarget string
// RemoveTemplate removes a list of templates
RemoveTemplate string
// RemoveTarget removes a list of targets
RemoveTarget string
// Get issues for a scan
ScanOutput string
// ResolversFile is a file containing resolvers for nuclei.
ResolversFile string
// StatsInterval is the number of seconds to display stats after
@ -206,8 +164,6 @@ type Options struct {
ShowBrowser bool
// HeadlessOptionalArguments specifies optional arguments to pass to Chrome
HeadlessOptionalArguments goflags.StringSlice
// NoTables disables pretty printing of cloud results in tables
NoTables bool
// DisableClustering disables clustering of templates
DisableClustering bool
// UseInstalledChrome skips chrome install and use local instance
@ -258,8 +214,6 @@ type Options struct {
JSONExport string
// JSONLExport is the file to export JSONL output format to
JSONLExport string
// Cloud enables nuclei cloud scan execution
Cloud bool
// EnableProgressBar enables progress bar
EnableProgressBar bool
// TemplateDisplay displays the template contents
@ -402,6 +356,8 @@ type Options struct {
SignTemplates bool
// EnableCodeTemplates enables code templates
EnableCodeTemplates bool
// Disables cloud upload
DisableCloudUpload bool
}
// ShouldLoadResume resume file
@ -440,23 +396,6 @@ func DefaultOptions() *Options {
}
}
// HasCloudOptions returns true if cloud options have been specified
func (options *Options) HasCloudOptions() bool {
return options.ScanList ||
options.DeleteScan != "" ||
options.ScanOutput != "" ||
options.ListDatasources ||
options.ListTargets ||
options.ListTemplates ||
options.RemoveDatasource != "" ||
options.AddTarget != "" ||
options.AddTemplate != "" ||
options.RemoveTarget != "" ||
options.RemoveTemplate != "" ||
options.GetTarget != "" ||
options.GetTemplate != ""
}
func (options *Options) ShouldUseHostError() bool {
return options.MaxHostError > 0 && !options.NoHostErrors
}