mirror of https://github.com/daffainfo/nuclei.git
commit
d4e6aac3be
|
@ -90,6 +90,13 @@ on extensive configurability, massive extensibility and ease of use.`)
|
|||
|
||||
flagSet.StringVarP(&options.DiskExportDirectory, "markdown-export", "me", "", "directory to export results in markdown format"),
|
||||
flagSet.StringVarP(&options.SarifExport, "sarif-export", "se", "", "file to export results in SARIF format"),
|
||||
|
||||
flagSet.StringVar(&options.ESExport, "es-ip", "", "the ip address for elasticsearch . Ex: 127.0.0.1"),
|
||||
flagSet.IntVar(&options.ESPort, "es-port", 9200, "the port number on which elasticsearch is running"),
|
||||
flagSet.BoolVar(&options.ESSSL, "es-ssl", true, "false if http is to be used"),
|
||||
flagSet.BoolVar(&options.ESSSLVerificaiton, "es-ssl-verify", true, "set to false if ssl verificaiton needs to be disabled"),
|
||||
flagSet.StringVar(&options.ESUsername, "es-user", "", "the username for elasticsearch"),
|
||||
flagSet.StringVar(&options.ESPassword, "es-password", "", "the password for elasticsearch"),
|
||||
)
|
||||
|
||||
createGroup(flagSet, "configs", "Configurations",
|
||||
|
|
|
@ -35,6 +35,7 @@ import (
|
|||
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/headless/engine"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/disk"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/es"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/sarif"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/templates"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/types"
|
||||
|
@ -105,6 +106,22 @@ func New(options *types.Options) (*Runner, error) {
|
|||
reportingOptions.DiskExporter = &disk.Options{Directory: options.DiskExportDirectory}
|
||||
}
|
||||
}
|
||||
if options.ESExport != "" {
|
||||
es_temp := &es.Options{
|
||||
ESIP: options.ESExport,
|
||||
ESPort: options.ESPort,
|
||||
ESSSL: options.ESSSL,
|
||||
ESSSLVerificaiton: options.ESSSLVerificaiton,
|
||||
ESUsername: options.ESUsername,
|
||||
ESPassword: options.ESPassword,
|
||||
}
|
||||
if reportingOptions != nil {
|
||||
reportingOptions.ElasticsearchExporter = es_temp
|
||||
} else {
|
||||
reportingOptions = &reporting.Options{}
|
||||
reportingOptions.ElasticsearchExporter = es_temp
|
||||
}
|
||||
}
|
||||
if options.SarifExport != "" {
|
||||
if reportingOptions != nil {
|
||||
reportingOptions.SarifExporter = &sarif.Options{File: options.SarifExport}
|
||||
|
|
|
@ -0,0 +1,122 @@
|
|||
package es
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
b64 "encoding/base64"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||
)
|
||||
|
||||
// Options contains necessary options required for elasticsearch communicaiton
|
||||
type Options struct {
|
||||
// Full url for elasticsearch
|
||||
ESIP string
|
||||
// Full url for elasticsearch
|
||||
ESPort int
|
||||
// Enable/Disable SSL
|
||||
ESSSL bool
|
||||
// Enable/DIsable SSL verificaiton
|
||||
ESSSLVerificaiton bool
|
||||
// Elasticsearch username
|
||||
ESUsername string
|
||||
// Elasticsearch password
|
||||
ESPassword string
|
||||
}
|
||||
|
||||
type data struct {
|
||||
Event *output.ResultEvent `json:"event"`
|
||||
Timestamp string `json:"@timestamp"`
|
||||
}
|
||||
|
||||
// Exporter type for elasticsearch
|
||||
type Exporter struct {
|
||||
elasticsearch *http.Client
|
||||
req *http.Request
|
||||
wg *sync.Mutex
|
||||
}
|
||||
|
||||
// New creates and returns a new exporter for elasticsearch
|
||||
func New(option *Options) (*Exporter, error) {
|
||||
|
||||
var ei *Exporter
|
||||
|
||||
tr := &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: option.ESSSLVerificaiton},
|
||||
}
|
||||
c := &http.Client{
|
||||
Timeout: 5 * time.Second,
|
||||
Transport: tr,
|
||||
}
|
||||
|
||||
// preparing url for elasticsearch
|
||||
url := `http://`
|
||||
if option.ESSSL {
|
||||
url = `https://`
|
||||
}
|
||||
url = fmt.Sprintf(url+"%s:%d/nuclei-export/_doc", option.ESIP, option.ESPort)
|
||||
|
||||
// creafting a request
|
||||
req2, err := http.NewRequest(http.MethodPost, url, nil)
|
||||
if err != nil {
|
||||
return ei, nil
|
||||
}
|
||||
|
||||
// if authentication is required
|
||||
if len(option.ESUsername) != 0 && len(option.ESPassword) != 0 {
|
||||
auth := b64.StdEncoding.EncodeToString([]byte(option.ESUsername + ":" + option.ESPassword))
|
||||
auth = "Basic " + auth
|
||||
req2.Header.Add("Authorization", auth)
|
||||
}
|
||||
req2.Header.Add("Content-Type", "application/json")
|
||||
|
||||
ei = &Exporter{
|
||||
elasticsearch: c,
|
||||
req: req2,
|
||||
wg: &sync.Mutex{},
|
||||
}
|
||||
|
||||
return ei, nil
|
||||
}
|
||||
|
||||
// Export exports a passed result event to disk
|
||||
func (i *Exporter) Export(event *output.ResultEvent) error {
|
||||
i.wg.Lock()
|
||||
defer i.wg.Unlock()
|
||||
defer func() { i.req.Body = nil }()
|
||||
|
||||
d := data{
|
||||
Event: event,
|
||||
Timestamp: time.Now().Format(time.RFC3339),
|
||||
}
|
||||
b, err := json.Marshal(&d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
i.req.Body = io.NopCloser(strings.NewReader(string(b)))
|
||||
|
||||
res, err := i.elasticsearch.Do(i.req)
|
||||
b, _ = io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return errors.New(err.Error() + "error thrown by elasticsearch " + string(b))
|
||||
}
|
||||
|
||||
if res.StatusCode >= 300 {
|
||||
return errors.New("elasticsearch responded with an error: " + string(b))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Close closes the exporter after operation
|
||||
func (i *Exporter) Close() error {
|
||||
return nil
|
||||
}
|
|
@ -11,6 +11,7 @@ import (
|
|||
"github.com/projectdiscovery/nuclei/v2/pkg/output"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/dedupe"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/disk"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/es"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/exporters/sarif"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/trackers/github"
|
||||
"github.com/projectdiscovery/nuclei/v2/pkg/reporting/trackers/gitlab"
|
||||
|
@ -32,7 +33,8 @@ type Options struct {
|
|||
// DiskExporter contains configuration options for Disk Exporter Module
|
||||
DiskExporter *disk.Options `yaml:"disk"`
|
||||
// SarifExporter contains configuration options for Sarif Exporter Module
|
||||
SarifExporter *sarif.Options `yaml:"sarif"`
|
||||
SarifExporter *sarif.Options `yaml:"sarif"`
|
||||
ElasticsearchExporter *es.Options `yaml:"elasticsearch"`
|
||||
}
|
||||
|
||||
// Filter filters the received event and decides whether to perform
|
||||
|
@ -139,6 +141,13 @@ func New(options *Options, db string) (*Client, error) {
|
|||
}
|
||||
client.exporters = append(client.exporters, exporter)
|
||||
}
|
||||
if options.ElasticsearchExporter != nil {
|
||||
exporter, err := es.New(options.ElasticsearchExporter)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not create exporting client")
|
||||
}
|
||||
client.exporters = append(client.exporters, exporter)
|
||||
}
|
||||
|
||||
storage, err := dedupe.New(db)
|
||||
if err != nil {
|
||||
|
|
|
@ -152,4 +152,16 @@ type Options struct {
|
|||
NoUpdateTemplates bool
|
||||
// EnvironmentVariables enables support for environment variables
|
||||
EnvironmentVariables bool
|
||||
// Full url for elasticsearch
|
||||
ESExport string
|
||||
// Full url for elasticsearch
|
||||
ESPort int
|
||||
// Enable/Disable SSL
|
||||
ESSSL bool
|
||||
// Enable/DIsable SSL verificaiton
|
||||
ESSSLVerificaiton bool
|
||||
// Elasticsearch username
|
||||
ESUsername string
|
||||
// Elasticsearch password
|
||||
ESPassword string
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue