Merge branch 'dev' of https://github.com/projectdiscovery/nuclei into update-custom-dir

dev
sandeep 2022-03-09 01:52:08 +05:30
commit 8096737e1a
48 changed files with 485 additions and 290 deletions

View File

@ -18,7 +18,7 @@ jobs:
go-version: 1.17
- name: Check out code
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Build
run: go build .

View File

@ -24,7 +24,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v2
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL

View File

@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Git Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Get Github tag
id: meta

View File

@ -19,11 +19,12 @@ jobs:
go-version: 1.17
- name: Check out code
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Functional Tests
env:
GH_ACTION: true
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
run: |
chmod +x run.sh
bash run.sh ${{ matrix.os }}

View File

@ -1,4 +1,5 @@
name: 🙏🏻 Lint Test
on:
push:
pull_request:
@ -9,10 +10,14 @@ jobs:
name: Lint Test
runs-on: ubuntu-latest
steps:
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: 1.17
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Run golangci-lint
uses: golangci/golangci-lint-action@v2
uses: golangci/golangci-lint-action@v3.1.0
with:
version: latest
args: --timeout 5m

View File

@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@master
uses: actions/checkout@v3
with:
persist-credentials: false
fetch-depth: 0

View File

@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
with:
fetch-depth: 0

View File

@ -13,7 +13,7 @@ jobs:
name: SonarCloud
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
with:
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis

View File

@ -6,7 +6,7 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: actions/checkout@v3
- uses: actions/setup-go@v2
with:
go-version: 1.17

View File

@ -1,4 +1,4 @@
FROM golang:1.17.7-alpine as build-env
FROM golang:1.17.8-alpine as build-env
RUN go install -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei@latest
FROM alpine:3.15.0

View File

@ -95,7 +95,7 @@ Flags:
TARGET:
-u, -target string[] target URLs/hosts to scan
-l, -list string path to file containing a list of target URLs/hosts to scan (one per line)
-resume Resume scan using resume.cfg (clustering will be disabled)
-resume string Resume scan using resume.cfg (clustering will be disabled)
TEMPLATES:
-nt, -new-templates run only new templates added in latest nuclei-templates release

View File

@ -6,12 +6,18 @@ cd ../v2/cmd/nuclei
go build
mv nuclei ../../../integration_tests/nuclei
echo "::endgroup::"
echo "::group::Build nuclei integration-test"
cd ../integration-test
go build
mv integration-test ../../../integration_tests/integration-test
cd ../../../integration_tests
echo "::endgroup::"
echo "::group::Installing nuclei templates"
./nuclei -update-templates
echo "::endgroup::"
./integration-test
if [ $? -eq 0 ]
then

View File

@ -49,7 +49,7 @@ func process() error {
return err
}
for _, path := range paths {
data, err := ioutil.ReadFile(path)
data, err := os.ReadFile(path)
if err != nil {
return err
}

View File

@ -15,6 +15,10 @@ echo "::group::Building Nuclei binary from current branch"
go build -o nuclei_dev$extension ../nuclei
echo "::endgroup::"
echo "::group::Installing nuclei templates"
./nuclei_dev$extension -update-templates
echo "::endgroup::"
echo "::group::Building latest release of nuclei"
go build -o nuclei$extension -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei
echo "::endgroup::"

View File

@ -47,8 +47,10 @@
# token: test-token
# # project-name is the name of the project.
# project-name: test-project-name
# # issue-type is the name of the created issue type
# issue-type: bug
# # issue-type is the name of the created issue type (case sensitive)
# issue-type: Bug
# # SeverityAsLabel (optional) sends the severity as the label of the created issue
# severity-as-label: true
#
# elasticsearch contains configuration options for elasticsearch exporter
#elasticsearch:

View File

@ -60,7 +60,11 @@ func main() {
}()
if err := nucleiRunner.RunEnumeration(); err != nil {
gologger.Fatal().Msgf("Could not run nuclei: %s\n", err)
if options.Validate {
gologger.Fatal().Msgf("Could not validate templates: %s\n", err)
} else {
gologger.Fatal().Msgf("Could not run nuclei: %s\n", err)
}
}
nucleiRunner.Close()
// on successful execution remove the resume file in case it exists
@ -188,6 +192,7 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.BoolVar(&options.Version, "version", false, "show nuclei version"),
flagSet.BoolVarP(&options.Verbose, "verbose", "v", false, "show verbose output"),
flagSet.BoolVar(&options.VerboseVerbose, "vv", false, "display templates loaded for scan"),
flagSet.BoolVar(&options.EnablePprof, "enable-pprof", false, "enable pprof debugging server"),
flagSet.BoolVarP(&options.TemplatesVersion, "templates-version", "tv", false, "shows the version of the installed nuclei-templates"),
)

View File

@ -12,7 +12,7 @@ require (
github.com/bluele/gcache v0.0.2
github.com/corpix/uarand v0.1.1
github.com/go-playground/validator/v10 v10.10.0
github.com/go-rod/rod v0.102.0
github.com/go-rod/rod v0.103.0
github.com/gobwas/ws v1.1.0
github.com/google/go-github v17.0.0+incompatible
github.com/itchyny/gojq v0.12.7
@ -41,6 +41,7 @@ require (
github.com/projectdiscovery/stringsutil v0.0.0-20220119085121-22513a958700
github.com/projectdiscovery/yamldoc-go v1.0.3-0.20211126104922-00d2c6bb43b6
github.com/remeh/sizedwaitgroup v1.0.0
github.com/rs/xid v1.3.0
github.com/segmentio/ksuid v1.0.4
github.com/shirou/gopsutil/v3 v3.22.2
github.com/spaolacci/murmur3 v1.1.0
@ -49,7 +50,7 @@ require (
github.com/tj/go-update v2.2.5-0.20200519121640-62b4b798fd68+incompatible
github.com/valyala/fasttemplate v1.2.1
github.com/weppos/publicsuffix-go v0.15.1-0.20210928183822-5ee35905bd95
github.com/xanzy/go-gitlab v0.55.1
github.com/xanzy/go-gitlab v0.58.0
github.com/ysmood/gson v0.6.4 // indirect
github.com/ysmood/leakless v0.7.0 // indirect
go.uber.org/atomic v1.9.0
@ -62,15 +63,17 @@ require (
moul.io/http2curl v1.0.0
)
require github.com/aws/aws-sdk-go v1.43.9
require github.com/aws/aws-sdk-go v1.43.12
require github.com/projectdiscovery/folderutil v0.0.0-20211206150108-b4e7ea80f36e
require (
github.com/Ice3man543/nvd v1.0.8
github.com/docker/go-units v0.4.0
github.com/mholt/archiver v3.1.1+incompatible
github.com/openrdap/rdap v0.9.1-0.20191017185644-af93e7ef17b7
github.com/projectdiscovery/iputil v0.0.0-20210804143329-3a30fcde43f3
github.com/rs/xid v1.3.0
github.com/projectdiscovery/sliceutil v0.0.0-20220225084130-8392ac12fa6d
github.com/stretchr/testify v1.7.0
github.com/zmap/zcrypto v0.0.0-20211005224000-2d0ffdec8a9b
)
@ -94,6 +97,7 @@ require (
github.com/dimchansky/utfbom v1.1.1 // indirect
github.com/dsnet/compress v0.0.1 // indirect
github.com/fatih/structs v1.1.0 // indirect
github.com/frankban/quicktest v1.14.2 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/go-playground/locales v0.14.0 // indirect
github.com/go-playground/universal-translator v0.18.0 // indirect
@ -126,6 +130,8 @@ require (
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/nwaples/rardecode v1.1.2 // indirect
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
github.com/projectdiscovery/blackrock v0.0.0-20210415162320-b38689ae3a2e // indirect
@ -138,6 +144,7 @@ require (
github.com/ulikunitz/xz v0.5.10 // indirect
github.com/ulule/deepcopier v0.0.0-20200430083143-45decc6639b6 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
github.com/yl2chen/cidranger v1.0.2 // indirect
github.com/ysmood/goob v0.3.1 // indirect
github.com/yusufpapurcu/wmi v1.2.2 // indirect

View File

@ -83,8 +83,8 @@ github.com/aphistic/sweet v0.2.0/go.mod h1:fWDlIh/isSE9n6EPsRmC0det+whmX6dJid3st
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/aws/aws-sdk-go v1.20.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.43.9 h1:k1S/29Bp2QD5ZopnGzIn0Sp63yyt3WH1JRE2OOU3Aig=
github.com/aws/aws-sdk-go v1.43.9/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
github.com/aws/aws-sdk-go v1.43.12 h1:wOdx6+reSDpUBFEuJDA6edCrojzy8rOtMzhS2rD9+7M=
github.com/aws/aws-sdk-go v1.43.12/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
@ -134,6 +134,8 @@ github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUn
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U=
github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE=
github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=
github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo=
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
@ -146,6 +148,8 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
github.com/frankban/quicktest v1.14.2 h1:SPb1KFFmM+ybpEjPUhCCkZOM5xlovT5UbrMvWnXyBns=
github.com/frankban/quicktest v1.14.2/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
@ -166,8 +170,8 @@ github.com/go-playground/validator/v10 v10.10.0 h1:I7mrTYv78z8k8VXa/qJlOlEXn/nBh
github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
github.com/go-redis/redis v6.15.5+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
github.com/go-rod/rod v0.91.1/go.mod h1:/W4lcZiCALPD603MnJGIvhtywP3R6yRB9EDfFfsHiiI=
github.com/go-rod/rod v0.102.0 h1:jmfb9YYOQJF9jDCzwH3/rX0M0ZKobXbrAGz75MK7x4s=
github.com/go-rod/rod v0.102.0/go.mod h1:RXSLAlPodTFOmZnwaAQJIcOJ1i835r0uuTGPLO09t/M=
github.com/go-rod/rod v0.103.0 h1:pJPhdZPdbY75iyWMNSqiTYQnHHfuGUj0QY6WJ8B7ot4=
github.com/go-rod/rod v0.103.0/go.mod h1:RXSLAlPodTFOmZnwaAQJIcOJ1i835r0uuTGPLO09t/M=
github.com/goburrow/cache v0.1.4 h1:As4KzO3hgmzPlnaMniZU9+VmoNYseUhuELbxy9mRBfw=
github.com/goburrow/cache v0.1.4/go.mod h1:cDFesZDnIlrHoNlMYqqMpCRawuXulgx+y7mXU8HZ+/c=
github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
@ -355,6 +359,8 @@ github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/mholt/acmez v1.0.1 h1:J7uquHOKEmo71UDnVApy1sSLA0oF/r+NtVrNzMKKA9I=
github.com/mholt/acmez v1.0.1/go.mod h1:8qnn8QA/Ewx8E3ZSsmscqsIjhhpxuy9vqdgbX2ceceM=
github.com/mholt/archiver v3.1.1+incompatible h1:1dCVxuqs0dJseYEhi5pl7MYPH9zDa1wBi7mF09cbNkU=
github.com/mholt/archiver v3.1.1+incompatible/go.mod h1:Dh2dOXnSdiLxRiPoVfIr/fI1TwETms9B8CTWfeh7ROU=
github.com/miekg/dns v1.1.29/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
github.com/miekg/dns v1.1.43/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4=
@ -375,6 +381,8 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/mreiferson/go-httpclient v0.0.0-20160630210159-31f0106b4474/go.mod h1:OQA4XLvDbMgS8P0CevmM4m9Q3Jq4phKUzcocxuGJ5m8=
github.com/ngdinhtoan/glide-cleanup v0.2.0/go.mod h1:UQzsmiDOb8YV3nOsCxK/c9zPpCZVNoHScRE3EO9pVMM=
github.com/nwaples/rardecode v1.1.2 h1:Cj0yZY6T1Zx1R7AhTbyGSALm44/Mmq+BAPc4B/p/d3M=
github.com/nwaples/rardecode v1.1.2/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
@ -400,6 +408,8 @@ github.com/owenrumney/go-sarif v1.0.11/go.mod h1:hTBFbxU7GuVRUvwMx+eStp9M/Oun4xH
github.com/owenrumney/go-sarif v1.1.1 h1:QNObu6YX1igyFKhdzd7vgzmw7XsWN3/6NMGuDzBgXmE=
github.com/owenrumney/go-sarif v1.1.1/go.mod h1:dNDiPlF04ESR/6fHlPyq7gHKmrM0sHUvAGjsoh8ZH0U=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM=
github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
@ -466,6 +476,8 @@ github.com/projectdiscovery/retryabledns v1.0.13-0.20211109182249-43d38df59660/g
github.com/projectdiscovery/retryablehttp-go v1.0.1/go.mod h1:SrN6iLZilNG1X4neq1D+SBxoqfAF4nyzvmevkTkWsek=
github.com/projectdiscovery/retryablehttp-go v1.0.2 h1:LV1/KAQU+yeWhNVlvveaYFsjBYRwXlNEq0PvrezMV0U=
github.com/projectdiscovery/retryablehttp-go v1.0.2/go.mod h1:dx//aY9V247qHdsRf0vdWHTBZuBQ2vm6Dq5dagxrDYI=
github.com/projectdiscovery/sliceutil v0.0.0-20220225084130-8392ac12fa6d h1:wIQPYRZEwTeJuoZLv3NT9r+il2fAv1ObRzTdHkNgOxk=
github.com/projectdiscovery/sliceutil v0.0.0-20220225084130-8392ac12fa6d/go.mod h1:QHXvznfPfA5f0AZUIBkbLapoUJJlsIDgUlkKva6dOr4=
github.com/projectdiscovery/stringsutil v0.0.0-20210804142656-fd3c28dbaafe/go.mod h1:oTRc18WBv9t6BpaN9XBY+QmG28PUpsyDzRht56Qf49I=
github.com/projectdiscovery/stringsutil v0.0.0-20210823090203-2f5f137e8e1d/go.mod h1:oTRc18WBv9t6BpaN9XBY+QmG28PUpsyDzRht56Qf49I=
github.com/projectdiscovery/stringsutil v0.0.0-20210830151154-f567170afdd9/go.mod h1:oTRc18WBv9t6BpaN9XBY+QmG28PUpsyDzRht56Qf49I=
@ -561,8 +573,10 @@ github.com/weppos/publicsuffix-go v0.15.1-0.20210928183822-5ee35905bd95/go.mod h
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0 h1:3UeQBvD0TFrlVjOeLOBz+CPAI8dnbqNSVwUwRrkp7vQ=
github.com/wsxiaoys/terminal v0.0.0-20160513160801-0940f3fc43a0/go.mod h1:IXCdmsXIht47RaVFLEdVnh1t+pgYtTAhQGj73kz+2DM=
github.com/xanzy/go-gitlab v0.50.3/go.mod h1:Q+hQhV508bDPoBijv7YjK/Lvlb4PhVhJdKqXVQrUoAE=
github.com/xanzy/go-gitlab v0.55.1 h1:IgX/DS9buV0AUz8fuJPQkdl0fQGfBiAsAHxpun8sNhg=
github.com/xanzy/go-gitlab v0.55.1/go.mod h1:F0QEXwmqiBUxCgJm8fE9S+1veX4XC9Z4cfaAbqwk4YM=
github.com/xanzy/go-gitlab v0.58.0 h1:Entnl8GrVDlc1jd1BlOWhNR0QVQgiO3WDom5DJbT+1s=
github.com/xanzy/go-gitlab v0.58.0/go.mod h1:F0QEXwmqiBUxCgJm8fE9S+1veX4XC9Z4cfaAbqwk4YM=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/yl2chen/cidranger v1.0.2 h1:lbOWZVCG1tCRX4u24kuM1Tb4nHqWkDxwLdoS+SevawU=
github.com/yl2chen/cidranger v1.0.2/go.mod h1:9U1yz7WPYDwf0vpNWFaeRh0bjwz5RVgRy/9UEQfHl0g=

View File

@ -2,8 +2,10 @@ package runner
import (
"bufio"
"context"
"encoding/json"
"io/ioutil"
"net/http"
_ "net/http/pprof"
"os"
"path/filepath"
"strings"
@ -58,8 +60,11 @@ type Runner struct {
ratelimiter ratelimit.Limiter
hostErrors *hosterrorscache.Cache
resumeCfg *types.ResumeCfg
pprofServer *http.Server
}
const pprofServerAddress = "127.0.0.1:8086"
// New creates a new client for running enumeration process.
func New(options *types.Options) (*Runner, error) {
runner := &Runner{
@ -113,6 +118,17 @@ func New(options *types.Options) (*Runner, error) {
runner.listAvailableTemplates()
os.Exit(0)
}
if options.EnablePprof {
server := &http.Server{
Addr: pprofServerAddress,
Handler: http.DefaultServeMux,
}
gologger.Info().Msgf("Listening pprof debug server on: %s", pprofServerAddress)
runner.pprofServer = server
go func() {
_ = server.ListenAndServe()
}()
}
if (len(options.Templates) == 0 || !options.NewTemplates || (options.TargetsFilePath == "" && !options.Stdin && len(options.Targets) == 0)) && options.UpdateTemplates {
os.Exit(0)
@ -158,7 +174,7 @@ func New(options *types.Options) (*Runner, error) {
resumeCfg := types.NewResumeCfg()
if runner.options.ShouldLoadResume() {
gologger.Info().Msg("Resuming from save checkpoint")
file, err := ioutil.ReadFile(runner.options.Resume)
file, err := os.ReadFile(runner.options.Resume)
if err != nil {
return nil, err
}
@ -247,6 +263,9 @@ func (r *Runner) Close() {
}
r.hmapInputProvider.Close()
protocolinit.Close()
if r.pprofServer != nil {
_ = r.pprofServer.Shutdown(context.Background())
}
}
// RunEnumeration sets up the input layer for giving input nuclei.

View File

@ -23,6 +23,7 @@ import (
"github.com/google/go-github/github"
"github.com/olekukonko/tablewriter"
"github.com/pkg/errors"
"golang.org/x/oauth2"
"github.com/projectdiscovery/folderutil"
"github.com/projectdiscovery/gologger"
@ -225,7 +226,16 @@ func (r *Runner) checkNucleiIgnoreFileUpdates(configDir string) bool {
// getLatestReleaseFromGithub returns the latest release from GitHub
func (r *Runner) getLatestReleaseFromGithub(latestTag string) (*github.RepositoryRelease, error) {
gitHubClient := github.NewClient(nil)
var tc *http.Client
if token, ok := os.LookupEnv("GITHUB_TOKEN"); ok {
ctx := context.Background()
ts := oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: token},
)
tc = oauth2.NewClient(ctx, ts)
}
gitHubClient := github.NewClient(tc)
release, _, err := gitHubClient.Repositories.GetReleaseByTag(context.Background(), userName, repoName, "v"+latestTag)
if err != nil {
@ -253,7 +263,7 @@ func (r *Runner) downloadReleaseAndUnzip(ctx context.Context, version, downloadU
return nil, fmt.Errorf("failed to download a release file from %s: Not successful status %d", downloadURL, res.StatusCode)
}
buf, err := ioutil.ReadAll(res.Body)
buf, err := io.ReadAll(res.Body)
if err != nil {
return nil, fmt.Errorf("failed to create buffer for zip file: %w", err)
}

View File

@ -28,7 +28,7 @@ type Config struct {
const nucleiConfigFilename = ".templates-config.json"
// Version is the current version of nuclei
const Version = `2.6.3-dev`
const Version = `2.6.4-dev`
func getConfigDetails() (string, error) {
configDir, err := GetConfigDir()

View File

@ -149,6 +149,12 @@ func (store *Store) Load() {
store.workflows = store.LoadWorkflows(store.finalWorkflows)
}
var templateIDPathMap map[string]string
func init() {
templateIDPathMap = make(map[string]string)
}
// ValidateTemplates takes a list of templates and validates them
// erroring out on discovering any faulty templates.
func (store *Store) ValidateTemplates(templatesList, workflowsList []string) error {
@ -165,7 +171,7 @@ func (store *Store) ValidateTemplates(templatesList, workflowsList []string) err
if areTemplatesValid(store, filteredTemplatePaths) && areWorkflowsValid(store, filteredWorkflowPaths) {
return nil
}
return errors.New("an error occurred during templates validation")
return errors.New("errors occured during template validation")
}
func areWorkflowsValid(store *Store, filteredWorkflowPaths map[string]struct{}) bool {
@ -182,6 +188,7 @@ func areTemplatesValid(store *Store, filteredTemplatePaths map[string]struct{})
func areWorkflowOrTemplatesValid(store *Store, filteredTemplatePaths map[string]struct{}, isWorkflow bool, load func(templatePath string, tagFilter *filter.TagFilter) (bool, error)) bool {
areTemplatesValid := true
for templatePath := range filteredTemplatePaths {
if _, err := load(templatePath, store.tagFilter); err != nil {
if isParsingError("Error occurred loading template %s: %s\n", templatePath, err) {
@ -196,8 +203,14 @@ func areWorkflowOrTemplatesValid(store *Store, filteredTemplatePaths map[string]
areTemplatesValid = false
}
} else {
if existingTemplatePath, found := templateIDPathMap[template.ID]; !found {
templateIDPathMap[template.ID] = templatePath
} else {
areTemplatesValid = false
gologger.Warning().Msgf("Found duplicate template ID during validation '%s' => '%s': %s\n", templatePath, existingTemplatePath, template.ID)
}
if !isWorkflow && len(template.Workflows) > 0 {
return true
continue
}
}
if isWorkflow {

View File

@ -7,6 +7,7 @@ import (
"github.com/projectdiscovery/nuclei/v2/pkg/operators/extractors"
"github.com/projectdiscovery/nuclei/v2/pkg/operators/matchers"
"github.com/projectdiscovery/sliceutil"
)
// Operators contains the operators that can be applied on protocols
@ -145,10 +146,10 @@ func (r *Result) Merge(result *Result) {
}
for k, v := range result.Matches {
r.Matches[k] = v
r.Matches[k] = sliceutil.Dedupe(append(r.Matches[k], v...))
}
for k, v := range result.Extracts {
r.Extracts[k] = v
r.Extracts[k] = sliceutil.Dedupe(append(r.Extracts[k], v...))
}
r.outputUnique = make(map[string]struct{})
@ -166,12 +167,6 @@ func (r *Result) Merge(result *Result) {
r.OutputExtracts = append(r.OutputExtracts, v)
}
}
for _, v := range result.OutputExtracts {
if _, ok := r.outputUnique[v]; !ok {
r.outputUnique[v] = struct{}{}
r.OutputExtracts = append(r.OutputExtracts, v)
}
}
for k, v := range result.DynamicValues {
r.DynamicValues[k] = v
}
@ -201,7 +196,6 @@ func (operators *Operators) Execute(data map[string]interface{}, match MatchFunc
// Start with the extractors first and evaluate them.
for _, extractor := range operators.Extractors {
var extractorResults []string
for match := range extract(data, extractor) {
extractorResults = append(extractorResults, match)

View File

@ -8,10 +8,10 @@ import (
func TestMakeDynamicValuesCallback(t *testing.T) {
input := map[string][]string{
"a": []string{"1", "2"},
"b": []string{"3"},
"c": []string{},
"d": []string{"A", "B", "C"},
"a": {"1", "2"},
"b": {"3"},
"c": {},
"d": {"A", "B", "C"},
}
count := 0
@ -24,9 +24,9 @@ func TestMakeDynamicValuesCallback(t *testing.T) {
t.Run("all", func(t *testing.T) {
input := map[string][]string{
"a": []string{"1"},
"b": []string{"2"},
"c": []string{"3"},
"a": {"1"},
"b": {"2"},
"c": {"3"},
}
count := 0
@ -40,10 +40,10 @@ func TestMakeDynamicValuesCallback(t *testing.T) {
t.Run("first", func(t *testing.T) {
input := map[string][]string{
"a": []string{"1", "2"},
"b": []string{"3"},
"c": []string{},
"d": []string{"A", "B", "C"},
"a": {"1", "2"},
"b": {"3"},
"c": {},
"d": {"A", "B", "C"},
}
count := 0

View File

@ -65,13 +65,13 @@ func (w *StandardWriter) formatScreen(output *ResultEvent) []byte {
builder.WriteString("]")
}
if len(output.LineCount) > 0 {
if len(output.Lines) > 0 {
builder.WriteString(" [LN: ")
for i, line := range output.LineCount {
for i, line := range output.Lines {
builder.WriteString(strconv.Itoa(line))
if i != len(output.LineCount)-1 {
if i != len(output.Lines)-1 {
builder.WriteString(",")
}
}

View File

@ -105,8 +105,8 @@ type ResultEvent struct {
CURLCommand string `json:"curl-command,omitempty"`
// MatcherStatus is the status of the match
MatcherStatus bool `json:"matcher-status"`
// LineCount is the line count for the specified match
LineCount []int `json:"matched-line"`
// Lines is the line count for the specified match
Lines []int `json:"matched-line"`
FileToIndexPosition map[string]int `json:"-"`
}

View File

@ -1,16 +1,18 @@
package eventcreator
import (
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
"github.com/projectdiscovery/nuclei/v2/pkg/output"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
)
// CreateEvent wraps the outputEvent with the result of the operators defined on the request
func CreateEvent(request protocols.Request, outputEvent output.InternalEvent, isResponseDebug bool) *output.InternalWrappedEvent {
return CreateEventWithAdditionalOptions(request, outputEvent, isResponseDebug, func(internalWrappedEvent *output.InternalWrappedEvent) {})
return CreateEventWithAdditionalOptions(request, outputEvent, isResponseDebug, nil)
}
// CreateEventWithAdditionalOptions wraps the outputEvent with the result of the operators defined on the request and enables extending the resulting event with additional attributes or values.
// CreateEventWithAdditionalOptions wraps the outputEvent with the result of the operators defined on the request
// and enables extending the resulting event with additional attributes or values.
func CreateEventWithAdditionalOptions(request protocols.Request, outputEvent output.InternalEvent, isResponseDebug bool,
addAdditionalOptions func(internalWrappedEvent *output.InternalWrappedEvent)) *output.InternalWrappedEvent {
event := &output.InternalWrappedEvent{InternalEvent: outputEvent}
@ -19,10 +21,19 @@ func CreateEventWithAdditionalOptions(request protocols.Request, outputEvent out
result, ok := compiledOperator.Execute(outputEvent, request.Match, request.Extract, isResponseDebug)
if ok && result != nil {
event.OperatorsResult = result
addAdditionalOptions(event)
if addAdditionalOptions != nil {
addAdditionalOptions(event)
}
event.Results = append(event.Results, request.MakeResultEvent(event)...)
}
}
}
return event
}
func CreateEventWithOperatorResults(request protocols.Request, internalEvent output.InternalEvent, operatorResult *operators.Result) *output.InternalWrappedEvent {
event := &output.InternalWrappedEvent{InternalEvent: internalEvent}
event.OperatorsResult = operatorResult
event.Results = append(event.Results, request.MakeResultEvent(event)...)
return event
}

View File

@ -4,12 +4,18 @@ import (
"path/filepath"
"strings"
"github.com/docker/go-units"
"github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
)
var (
defaultMaxReadSize, _ = units.FromHumanSize("1Gb")
chunkSize, _ = units.FromHumanSize("100Mb")
)
// Request contains a File matching mechanism for local disk operations.
type Request struct {
// Operators for the current request go here.
@ -34,11 +40,18 @@ type Request struct {
// description: |
// MaxSize is the maximum size of the file to run request on.
//
// By default, nuclei will process 5 MB files and not go more than that.
// By default, nuclei will process 1 GB of content and not go more than that.
// It can be set to much lower or higher depending on use.
// If set to "no" then all content will be processed
// examples:
// - value: 2048
MaxSize int `yaml:"max-size,omitempty" jsonschema:"title=max size data to run request on,description=Maximum size of the file to run request on"`
// - value: 5Mb
MaxSize string `yaml:"max-size,omitempty" jsonschema:"title=max size data to run request on,description=Maximum size of the file to run request on"`
maxSize int64
// description: |
// elaborates archives
Archive bool
CompiledOperators *operators.Operators `yaml:"-"`
// cache any variables that may be needed for operation.
@ -66,8 +79,11 @@ var RequestPartDefinitions = map[string]string{
"raw,body,all,data": "Raw contains the raw file contents",
}
// defaultDenylist is the default list of extensions to be denied
var defaultDenylist = []string{".3g2", ".3gp", ".7z", ".apk", ".arj", ".avi", ".axd", ".bmp", ".css", ".csv", ".deb", ".dll", ".doc", ".drv", ".eot", ".exe", ".flv", ".gif", ".gifv", ".gz", ".h264", ".ico", ".iso", ".jar", ".jpeg", ".jpg", ".lock", ".m4a", ".m4v", ".map", ".mkv", ".mov", ".mp3", ".mp4", ".mpeg", ".mpg", ".msi", ".ogg", ".ogm", ".ogv", ".otf", ".pdf", ".pkg", ".png", ".ppt", ".psd", ".rar", ".rm", ".rpm", ".svg", ".swf", ".sys", ".tar.gz", ".tar", ".tif", ".tiff", ".ttf", ".vob", ".wav", ".webm", ".wmv", ".woff", ".woff2", ".xcf", ".xls", ".xlsx", ".zip"}
// defaultDenylist contains common extensions to exclude
var defaultDenylist = []string{".3g2", ".3gp", ".arj", ".avi", ".axd", ".bmp", ".css", ".csv", ".deb", ".dll", ".doc", ".drv", ".eot", ".exe", ".flv", ".gif", ".gifv", ".h264", ".ico", ".iso", ".jar", ".jpeg", ".jpg", ".lock", ".m4a", ".m4v", ".map", ".mkv", ".mov", ".mp3", ".mp4", ".mpeg", ".mpg", ".msi", ".ogg", ".ogm", ".ogv", ".otf", ".pdf", ".pkg", ".png", ".ppt", ".psd", ".rm", ".rpm", ".svg", ".swf", ".sys", ".tif", ".tiff", ".ttf", ".vob", ".wav", ".webm", ".wmv", ".woff", ".woff2", ".xcf", ".xls", ".xlsx"}
// defaultArchiveDenyList contains common archive extensions to exclude
var defaultArchiveDenyList = []string{".7z", ".apk", ".gz", ".rar", ".tar.gz", ".tar", ".zip"}
// GetID returns the unique ID of the request if any.
func (request *Request) GetID() string {
@ -83,10 +99,21 @@ func (request *Request) Compile(options *protocols.ExecuterOptions) error {
}
request.CompiledOperators = compiled
}
// By default, use 1GB (1024 MB) as max size to read.
if request.MaxSize == 0 {
request.MaxSize = 1024 * 1024 * 1024
// By default, use default max size if not defined
switch {
case request.MaxSize != "":
maxSize, err := units.FromHumanSize(request.MaxSize)
if err != nil {
return errors.Wrap(err, "could not compile operators")
}
request.maxSize = maxSize
case request.MaxSize == "no":
request.maxSize = -1
default:
request.maxSize = defaultMaxReadSize
}
request.options = options
request.extensions = make(map[string]struct{})
@ -103,7 +130,13 @@ func (request *Request) Compile(options *protocols.ExecuterOptions) error {
}
}
// process default denylist (extensions)
for _, excludeItem := range defaultDenylist {
var denyList []string
if !request.Archive {
denyList = append(defaultDenylist, defaultArchiveDenyList...)
} else {
denyList = defaultDenylist
}
for _, excludeItem := range denyList {
if !strings.HasPrefix(excludeItem, ".") {
excludeItem = "." + excludeItem
}

View File

@ -1,35 +0,0 @@
package file
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/projectdiscovery/nuclei/v2/pkg/model"
"github.com/projectdiscovery/nuclei/v2/pkg/model/types/severity"
"github.com/projectdiscovery/nuclei/v2/pkg/testutils"
)
func TestFileCompile(t *testing.T) {
options := testutils.DefaultOptions
testutils.Init(options)
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
NoRecursive: false,
Extensions: []string{"all", ".lock"},
DenyList: []string{".go"},
}
executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{
ID: templateID,
Info: model.Info{SeverityHolder: severity.Holder{Severity: severity.Low}, Name: "test"},
})
err := request.Compile(executerOpts)
require.Nil(t, err, "could not compile file request")
require.Contains(t, request.denyList, ".go", "could not get .go in denylist")
require.NotContains(t, request.extensions, ".go", "could get .go in allowlist")
require.True(t, request.allExtensions, "could not get correct allExtensions")
}

View File

@ -20,7 +20,7 @@ func TestFindInputPaths(t *testing.T) {
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"all", ".lock"},
DenyList: []string{".go"},

View File

@ -1,8 +1,6 @@
package file
import (
"bufio"
"strings"
"time"
"github.com/projectdiscovery/nuclei/v2/pkg/model"
@ -67,7 +65,7 @@ func (request *Request) getMatchPart(part string, data output.InternalEvent) (st
return itemStr, true
}
// responseToDSLMap converts a file response to a map for use in DSL matching
// responseToDSLMap converts a file chunk elaboration to a map for use in DSL matching
func (request *Request) responseToDSLMap(raw, inputFilePath, matchedFileName string) output.InternalEvent {
return output.InternalEvent{
"path": inputFilePath,
@ -81,62 +79,17 @@ func (request *Request) responseToDSLMap(raw, inputFilePath, matchedFileName str
}
// MakeResultEvent creates a result event from internal wrapped event
// Deprecated: unused in stream mode, must be present for interface compatibility
func (request *Request) MakeResultEvent(wrapped *output.InternalWrappedEvent) []*output.ResultEvent {
results := protocols.MakeDefaultResultEvent(request, wrapped)
raw, ok := wrapped.InternalEvent["raw"]
if !ok {
return results
}
rawStr, ok := raw.(string)
if !ok {
return results
}
for _, result := range results {
lineWords := make(map[string]struct{})
if wrapped.OperatorsResult != nil {
for _, value := range wrapped.OperatorsResult.Matches {
for _, v := range value {
lineWords[v] = struct{}{}
}
}
}
if len(result.ExtractedResults) > 0 {
for _, v := range result.ExtractedResults {
lineWords[v] = struct{}{}
}
}
result.LineCount = calculateLineFunc(rawStr, lineWords)
}
// Identify the position of match in file using a dirty hack.
for _, result := range results {
for _, extraction := range result.ExtractedResults {
scanner := bufio.NewScanner(strings.NewReader(rawStr))
line := 1
for scanner.Scan() {
if strings.Contains(scanner.Text(), extraction) {
if result.FileToIndexPosition == nil {
result.FileToIndexPosition = make(map[string]int)
}
result.FileToIndexPosition[result.Matched] = line
continue
}
line++
}
}
}
return results
return protocols.MakeDefaultResultEvent(request, wrapped)
}
func (request *Request) GetCompiledOperators() []*operators.Operators {
return []*operators.Operators{request.CompiledOperators}
}
// MakeResultEventItem
// Deprecated: unused in stream mode, must be present for interface compatibility
func (request *Request) MakeResultEventItem(wrapped *output.InternalWrappedEvent) *output.ResultEvent {
data := &output.ResultEvent{
MatcherStatus: true,

View File

@ -21,7 +21,7 @@ func TestResponseToDSLMap(t *testing.T) {
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"*", ".lock"},
DenyList: []string{".go"},
@ -46,7 +46,7 @@ func TestFileOperatorMatch(t *testing.T) {
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"*", ".lock"},
DenyList: []string{".go"},
@ -134,7 +134,7 @@ func TestFileOperatorExtract(t *testing.T) {
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"*", ".lock"},
DenyList: []string{".go"},
@ -241,7 +241,7 @@ func testFileMakeResult(t *testing.T, matchers []*matchers.Matcher, matcherCondi
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"*", ".lock"},
DenyList: []string{".go"},

View File

@ -1,22 +1,26 @@
package file
import (
"bufio"
"encoding/hex"
"io/ioutil"
"io"
"os"
"sort"
"path/filepath"
"strings"
"github.com/docker/go-units"
"github.com/mholt/archiver"
"github.com/pkg/errors"
"github.com/remeh/sizedwaitgroup"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v2/pkg/operators"
"github.com/projectdiscovery/nuclei/v2/pkg/output"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/eventcreator"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/helpers/responsehighlighter"
"github.com/projectdiscovery/nuclei/v2/pkg/protocols/common/tostring"
templateTypes "github.com/projectdiscovery/nuclei/v2/pkg/templates/types"
"github.com/projectdiscovery/sliceutil"
)
var _ protocols.Request = &Request{}
@ -26,55 +30,102 @@ func (request *Request) Type() templateTypes.ProtocolType {
return templateTypes.FileProtocol
}
// ExecuteWithResults executes the protocol requests and returns results instead of writing them.
func (request *Request) ExecuteWithResults(input string, metadata /*TODO review unused parameter*/, previous output.InternalEvent, callback protocols.OutputEventCallback) error {
wg := sizedwaitgroup.New(request.options.Options.BulkSize)
type FileMatch struct {
Data string
Line int
ByteIndex int
Match bool
Extract bool
Expr string
Raw string
}
err := request.getInputPaths(input, func(data string) {
var emptyResultErr = errors.New("Empty result")
// ExecuteWithResults executes the protocol requests and returns results instead of writing them.
func (request *Request) ExecuteWithResults(input string, metadata, previous output.InternalEvent, callback protocols.OutputEventCallback) error {
wg := sizedwaitgroup.New(request.options.Options.BulkSize)
err := request.getInputPaths(input, func(filePath string) {
request.options.Progress.AddToTotal(1)
wg.Add()
go func(filePath string) {
func(filePath string) {
defer wg.Done()
file, err := os.Open(filePath)
if err != nil {
gologger.Error().Msgf("Could not open file path %s: %s\n", filePath, err)
return
archiveReader, _ := archiver.ByExtension(filePath)
switch {
case archiveReader != nil:
switch archiveInstance := archiveReader.(type) {
case archiver.Walker:
err := archiveInstance.Walk(filePath, func(file archiver.File) error {
if !request.validatePath("/", file.Name()) {
return nil
}
archiveFileName := filepath.Join(filePath, file.Name())
event, fileMatches, err := request.processReader(file.ReadCloser, archiveFileName, input, file.Size(), previous)
if err != nil {
if errors.Is(err, emptyResultErr) {
return nil
}
return err
}
defer file.Close()
dumpResponse(event, request.options, fileMatches, filePath)
callback(event)
request.options.Progress.IncrementRequests()
return nil
})
if err != nil {
gologger.Error().Msgf("%s\n", err)
return
}
case archiver.Decompressor:
file, err := os.Open(filePath)
if err != nil {
gologger.Error().Msgf("%s\n", err)
return
}
defer file.Close()
fileStat, _ := file.Stat()
tmpFileOut, err := os.CreateTemp("", "")
if err != nil {
gologger.Error().Msgf("%s\n", err)
return
}
defer tmpFileOut.Close()
defer os.RemoveAll(tmpFileOut.Name())
if err := archiveInstance.Decompress(file, tmpFileOut); err != nil {
gologger.Error().Msgf("%s\n", err)
return
}
_ = tmpFileOut.Sync()
// rewind the file
_, _ = tmpFileOut.Seek(0, 0)
event, fileMatches, err := request.processReader(tmpFileOut, filePath, input, fileStat.Size(), previous)
if err != nil {
if !errors.Is(err, emptyResultErr) {
gologger.Error().Msgf("%s\n", err)
}
return
}
dumpResponse(event, request.options, fileMatches, filePath)
callback(event)
request.options.Progress.IncrementRequests()
}
default:
// normal file
event, fileMatches, err := request.processFile(filePath, input, previous)
if err != nil {
if !errors.Is(err, emptyResultErr) {
gologger.Error().Msgf("%s\n", err)
}
return
}
dumpResponse(event, request.options, fileMatches, filePath)
callback(event)
request.options.Progress.IncrementRequests()
}
defer file.Close()
stat, err := file.Stat()
if err != nil {
gologger.Error().Msgf("Could not stat file path %s: %s\n", filePath, err)
return
}
if stat.Size() >= int64(request.MaxSize) {
gologger.Verbose().Msgf("Could not process path %s: exceeded max size\n", filePath)
return
}
buffer, err := ioutil.ReadAll(file)
if err != nil {
gologger.Error().Msgf("Could not read file path %s: %s\n", filePath, err)
return
}
fileContent := tostring.UnsafeToString(buffer)
gologger.Verbose().Msgf("[%s] Sent FILE request to %s", request.options.TemplateID, filePath)
outputEvent := request.responseToDSLMap(fileContent, input, filePath)
for k, v := range previous {
outputEvent[k] = v
}
event := eventcreator.CreateEvent(request, outputEvent, request.options.Options.Debug || request.options.Options.DebugResponse)
dumpResponse(event, request.options, fileContent, filePath)
callback(event)
request.options.Progress.IncrementRequests()
}(data)
}(filePath)
})
wg.Wait()
if err != nil {
request.options.Output.Request(request.options.TemplatePath, input, request.Type().String(), err)
@ -84,52 +135,159 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
return nil
}
func dumpResponse(event *output.InternalWrappedEvent, requestOptions *protocols.ExecuterOptions, fileContent string, filePath string) {
cliOptions := requestOptions.Options
if cliOptions.Debug || cliOptions.DebugResponse {
hexDump := false
if responsehighlighter.HasBinaryContent(fileContent) {
hexDump = true
fileContent = hex.Dump([]byte(fileContent))
}
highlightedResponse := responsehighlighter.Highlight(event.OperatorsResult, fileContent, cliOptions.NoColor, hexDump)
gologger.Debug().Msgf("[%s] Dumped file request for %s\n\n%s", requestOptions.TemplateID, filePath, highlightedResponse)
func (request *Request) processFile(filePath, input string, previousInternalEvent output.InternalEvent) (*output.InternalWrappedEvent, []FileMatch, error) {
file, err := os.Open(filePath)
if err != nil {
return nil, nil, errors.Errorf("Could not open file path %s: %s\n", filePath, err)
}
defer file.Close()
stat, err := file.Stat()
if err != nil {
return nil, nil, errors.Errorf("Could not stat file path %s: %s\n", filePath, err)
}
if stat.Size() >= request.maxSize {
maxSizeString := units.HumanSize(float64(request.maxSize))
gologger.Verbose().Msgf("Limiting %s processed data to %s bytes: exceeded max size\n", filePath, maxSizeString)
}
return request.processReader(file, filePath, input, stat.Size(), previousInternalEvent)
}
func getAllStringSubmatchIndex(content string, word string) []int {
indexes := []int{}
start := 0
for {
v := strings.Index(content[start:], word)
if v == -1 {
break
}
indexes = append(indexes, v+start)
start += len(word) + v
func (request *Request) processReader(reader io.Reader, filePath, input string, totalBytes int64, previousInternalEvent output.InternalEvent) (*output.InternalWrappedEvent, []FileMatch, error) {
fileReader := io.LimitReader(reader, request.maxSize)
fileMatches, opResult := request.findMatchesWithReader(fileReader, input, filePath, totalBytes, previousInternalEvent)
if opResult == nil && len(fileMatches) == 0 {
return nil, nil, emptyResultErr
}
return indexes
// build event structure to interface with internal logic
return request.buildEvent(input, filePath, fileMatches, opResult, previousInternalEvent), fileMatches, nil
}
func calculateLineFunc(contents string, words map[string]struct{}) []int {
var lines []int
func (request *Request) findMatchesWithReader(reader io.Reader, input, filePath string, totalBytes int64, previous output.InternalEvent) ([]FileMatch, *operators.Result) {
var bytesCount, linesCount, wordsCount int
isResponseDebug := request.options.Options.Debug || request.options.Options.DebugResponse
totalBytesString := units.BytesSize(float64(totalBytes))
for word := range words {
matches := getAllStringSubmatchIndex(contents, word)
scanner := bufio.NewScanner(reader)
buffer := []byte{}
scanner.Buffer(buffer, int(chunkSize))
for _, index := range matches {
lineCount := int(0)
for _, c := range contents[:index] {
if c == '\n' {
lineCount++
var fileMatches []FileMatch
var opResult *operators.Result
for scanner.Scan() {
lineContent := scanner.Text()
n := len(lineContent)
// update counters
currentBytes := bytesCount + n
processedBytes := units.BytesSize(float64(currentBytes))
gologger.Verbose().Msgf("[%s] Processing file %s chunk %s/%s", request.options.TemplateID, filePath, processedBytes, totalBytesString)
dslMap := request.responseToDSLMap(lineContent, input, filePath)
for k, v := range previous {
dslMap[k] = v
}
discardEvent := eventcreator.CreateEvent(request, dslMap, isResponseDebug)
newOpResult := discardEvent.OperatorsResult
if newOpResult != nil {
if opResult == nil {
opResult = newOpResult
} else {
opResult.Merge(newOpResult)
}
if newOpResult.Matched || newOpResult.Extracted {
if newOpResult.Extracts != nil {
for expr, extracts := range newOpResult.Extracts {
for _, extract := range extracts {
fileMatches = append(fileMatches, FileMatch{
Data: extract,
Extract: true,
Line: linesCount + 1,
ByteIndex: bytesCount,
Expr: expr,
Raw: lineContent,
})
}
}
}
if newOpResult.Matches != nil {
for expr, matches := range newOpResult.Matches {
for _, match := range matches {
fileMatches = append(fileMatches, FileMatch{
Data: match,
Match: true,
Line: linesCount + 1,
ByteIndex: bytesCount,
Expr: expr,
Raw: lineContent,
})
}
}
}
for _, outputExtract := range newOpResult.OutputExtracts {
fileMatches = append(fileMatches, FileMatch{
Data: outputExtract,
Match: true,
Line: linesCount + 1,
ByteIndex: bytesCount,
Expr: outputExtract,
Raw: lineContent,
})
}
}
if lineCount > 0 {
lines = append(lines, lineCount+1)
}
currentLinesCount := 1 + strings.Count(lineContent, "\n")
linesCount += currentLinesCount
wordsCount += strings.Count(lineContent, " ")
bytesCount = currentBytes
}
return fileMatches, opResult
}
func (request *Request) buildEvent(input, filePath string, fileMatches []FileMatch, operatorResult *operators.Result, previous output.InternalEvent) *output.InternalWrappedEvent {
exprLines := make(map[string][]int)
exprBytes := make(map[string][]int)
internalEvent := request.responseToDSLMap("", input, filePath)
for k, v := range previous {
internalEvent[k] = v
}
for _, fileMatch := range fileMatches {
exprLines[fileMatch.Expr] = append(exprLines[fileMatch.Expr], fileMatch.Line)
exprBytes[fileMatch.Expr] = append(exprBytes[fileMatch.Expr], fileMatch.ByteIndex)
}
event := eventcreator.CreateEventWithOperatorResults(request, internalEvent, operatorResult)
for _, result := range event.Results {
switch {
case result.MatcherName != "":
result.Lines = exprLines[result.MatcherName]
case result.ExtractorName != "":
result.Lines = exprLines[result.ExtractorName]
default:
for _, extractedResult := range result.ExtractedResults {
result.Lines = append(result.Lines, exprLines[extractedResult]...)
}
}
result.Lines = sliceutil.DedupeInt(result.Lines)
}
return event
}
func dumpResponse(event *output.InternalWrappedEvent, requestOptions *protocols.ExecuterOptions, filematches []FileMatch, filePath string) {
cliOptions := requestOptions.Options
if cliOptions.Debug || cliOptions.DebugResponse {
for _, fileMatch := range filematches {
lineContent := fileMatch.Raw
hexDump := false
if responsehighlighter.HasBinaryContent(lineContent) {
hexDump = true
lineContent = hex.Dump([]byte(lineContent))
}
highlightedResponse := responsehighlighter.Highlight(event.OperatorsResult, lineContent, cliOptions.NoColor, hexDump)
gologger.Debug().Msgf("[%s] Dumped match/extract file snippet for %s at line %d\n\n%s", requestOptions.TemplateID, filePath, fileMatch.Line, highlightedResponse)
}
}
sort.Ints(lines)
return lines
}

View File

@ -24,7 +24,7 @@ func TestFileExecuteWithResults(t *testing.T) {
templateID := "testing-file"
request := &Request{
ID: templateID,
MaxSize: 1024,
MaxSize: "1Gb",
NoRecursive: false,
Extensions: []string{"all"},
DenyList: []string{".go"},
@ -77,17 +77,3 @@ func TestFileExecuteWithResults(t *testing.T) {
require.Equal(t, "1.1.1.1", finalEvent.Results[0].ExtractedResults[0], "could not get correct extracted results")
finalEvent = nil
}
func TestGenerateNewLineIndexes(t *testing.T) {
lines := calculateLineFunc(`aaa
bbb
ccc
RequestDataTooBig
dddd
eeee
RequestDataTooBig
dd
RequestDataTooBig3
SuspiciousOperation`, map[string]struct{}{"SuspiciousOperation": {}, "RequestDataTooBig": {}})
require.ElementsMatch(t, []int{4, 7, 9, 10}, lines, "could not calculate correct lines")
}

View File

@ -2,7 +2,7 @@ package engine
import (
"fmt"
"io/ioutil"
"io"
"net/http"
"net/http/httptest"
"net/url"
@ -414,7 +414,7 @@ func TestActionSetBody(t *testing.T) {
}
handler := func(w http.ResponseWriter, r *http.Request) {
body, _ := ioutil.ReadAll(r.Body)
body, _ := io.ReadAll(r.Body)
_, _ = fmt.Fprintln(w, string(body))
}

View File

@ -3,7 +3,6 @@ package race
import (
"fmt"
"io"
"io/ioutil"
"time"
)
@ -23,7 +22,7 @@ func NewSyncedReadCloser(r io.ReadCloser) *SyncedReadCloser {
s SyncedReadCloser
err error
)
s.data, err = ioutil.ReadAll(r)
s.data, err = io.ReadAll(r)
if err != nil {
return nil
}

View File

@ -6,7 +6,6 @@ import (
"errors"
"fmt"
"io"
"io/ioutil"
"net/url"
"path"
"strings"
@ -134,7 +133,7 @@ func Parse(request, baseURL string, unsafe bool) (*Request, error) {
}
// Set the request body
b, err := ioutil.ReadAll(reader)
b, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("could not read request body: %w", err)
}

View File

@ -487,7 +487,7 @@ func (request *Request) executeRequest(reqURL string, generatedRequest *generate
} else {
bodyReader = resp.Body
}
data, err := ioutil.ReadAll(bodyReader)
data, err := io.ReadAll(bodyReader)
if err != nil {
// Ignore body read due to server misconfiguration errors
if stringsutil.ContainsAny(err.Error(), "gzip: invalid header") {
@ -564,7 +564,8 @@ func (request *Request) executeRequest(reqURL string, generatedRequest *generate
}
responseContentType := resp.Header.Get("Content-Type")
dumpResponse(event, request.options, response.fullResponse, formedURL, responseContentType)
isResponseTruncated := len(gotData) >= request.MaxSize
dumpResponse(event, request.options, response.fullResponse, formedURL, responseContentType, isResponseTruncated)
callback(event)
}
@ -622,7 +623,7 @@ func (request *Request) setCustomHeaders(req *generatedRequest) {
const CRLF = "\r\n"
func dumpResponse(event *output.InternalWrappedEvent, requestOptions *protocols.ExecuterOptions, redirectedResponse []byte, formedURL string, responseContentType string) {
func dumpResponse(event *output.InternalWrappedEvent, requestOptions *protocols.ExecuterOptions, redirectedResponse []byte, formedURL string, responseContentType string, isResponseTruncated bool) {
cliOptions := requestOptions.Options
if cliOptions.Debug || cliOptions.DebugResponse {
response := string(redirectedResponse)
@ -634,7 +635,12 @@ func dumpResponse(event *output.InternalWrappedEvent, requestOptions *protocols.
highlightedResult = responsehighlighter.Highlight(event.OperatorsResult, response, cliOptions.NoColor, false)
}
gologger.Debug().Msgf("[%s] Dumped HTTP response for %s\n\n%s", requestOptions.TemplateID, formedURL, highlightedResult)
msg := "[%s] Dumped HTTP response %s\n\n%s"
if isResponseTruncated {
msg = "[%s] Dumped HTTP response (Truncated) %s\n\n%s"
}
gologger.Debug().Msgf(msg, requestOptions.TemplateID, formedURL, highlightedResult)
}
}

View File

@ -4,7 +4,7 @@ import (
"bytes"
"context"
"errors"
"io/ioutil"
"io"
"net/http"
"time"
@ -89,7 +89,7 @@ func (awsSigner *AwsSigner) SignHTTP(request *http.Request, args interface{}) er
awsSigner.prepareRequest(request)
var body *bytes.Reader
if request.Body != nil {
bodyBytes, err := ioutil.ReadAll(request.Body)
bodyBytes, err := io.ReadAll(request.Body)
if err != nil {
return err
}

View File

@ -63,7 +63,7 @@ func dumpResponseWithRedirectChain(resp *http.Response, body []byte) ([]redirect
break
}
if redirectResp.Body != nil {
body, _ = ioutil.ReadAll(redirectResp.Body)
body, _ = io.ReadAll(redirectResp.Body)
}
respObj := redirectedResponse{
headers: respData,
@ -162,7 +162,7 @@ func handleDecompression(resp *http.Response, bodyOrig []byte) (bodyDec []byte,
}
defer reader.Close()
bodyDec, err = ioutil.ReadAll(reader)
bodyDec, err = io.ReadAll(reader)
if err != nil {
return bodyOrig, err
}
@ -173,7 +173,7 @@ func handleDecompression(resp *http.Response, bodyOrig []byte) (bodyDec []byte,
func decodeGBK(s []byte) ([]byte, error) {
I := bytes.NewReader(s)
O := transform.NewReader(I, simplifiedchinese.GBK.NewDecoder())
d, e := ioutil.ReadAll(O)
d, e := io.ReadAll(O)
if e != nil {
return nil, e
}

View File

@ -2,7 +2,7 @@ package offlinehttp
import (
"fmt"
"io/ioutil"
"io"
"net/http"
"net/http/httptest"
"net/http/httputil"
@ -149,7 +149,7 @@ Server: Google Frontend
resp, err := readResponseFromString(tt.data)
require.Nil(t, err, "could not read response from string")
respData, err := ioutil.ReadAll(resp.Body)
respData, err := io.ReadAll(resp.Body)
require.Nil(t, err, "could not read response body")
require.Equal(t, expectedBody, string(respData), "could not get correct parsed body")
require.Equal(t, "Google Frontend", resp.Header.Get("Server"), "could not get correct headers")
@ -190,7 +190,7 @@ Server: Google Frontend
respData, err := readResponseFromString(string(b))
require.Nil(t, err, "could not read response from string")
_, err = ioutil.ReadAll(respData.Body)
_, err = io.ReadAll(respData.Body)
require.Nil(t, err, "could not read response body")
require.Equal(t, "Google Frontend", respData.Header.Get("Server"), "could not get correct headers")

View File

@ -1,7 +1,7 @@
package offlinehttp
import (
"io/ioutil"
"io"
"net/http"
"net/http/httputil"
"os"
@ -54,7 +54,7 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
return
}
buffer, err := ioutil.ReadAll(file)
buffer, err := io.ReadAll(file)
if err != nil {
gologger.Error().Msgf("Could not read file path %s: %s\n", data, err)
return
@ -79,7 +79,7 @@ func (request *Request) ExecuteWithResults(input string, metadata /*TODO review
return
}
body, err := ioutil.ReadAll(resp.Body)
body, err := io.ReadAll(resp.Body)
if err != nil {
gologger.Error().Msgf("Could not read raw http response body %s: %s\n", data, err)
return

View File

@ -3,7 +3,7 @@ package utils
import (
"crypto/tls"
"crypto/x509"
"io/ioutil"
"os"
"github.com/projectdiscovery/nuclei/v2/pkg/types"
)
@ -22,7 +22,7 @@ func AddConfiguredClientCertToRequest(tlsConfig *tls.Config, options *types.Opti
tlsConfig.Certificates = []tls.Certificate{cert}
// Load the certificate authority PEM certificate into the TLS configuration
caCert, err := ioutil.ReadFile(options.ClientCAFile)
caCert, err := os.ReadFile(options.ClientCAFile)
if err != nil {
return nil, err
}

View File

@ -6,6 +6,7 @@ import (
"encoding/base64"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"time"
@ -108,7 +109,7 @@ func (exporter *Exporter) Export(event *output.ResultEvent) error {
return err
}
b, err = ioutil.ReadAll(res.Body)
b, err = io.ReadAll(res.Body)
if err != nil {
return errors.New(err.Error() + "error thrown by elasticsearch " + string(b))
}

View File

@ -3,7 +3,7 @@ package jira
import (
"bytes"
"fmt"
"io/ioutil"
"io"
"strings"
"github.com/andygrunwald/go-jira"
@ -65,7 +65,7 @@ func New(options *Options) (*Integration, error) {
func (i *Integration) CreateNewIssue(event *output.ResultEvent) error {
summary := format.Summary(event)
labels := []string{}
severityLabel := fmt.Sprintf("Severity: %s", event.Info.SeverityHolder.Severity.String())
severityLabel := fmt.Sprintf("Severity:%s", event.Info.SeverityHolder.Severity.String())
if i.options.SeverityAsLabel && severityLabel != "" {
labels = append(labels, severityLabel)
}
@ -90,7 +90,7 @@ func (i *Integration) CreateNewIssue(event *output.ResultEvent) error {
Type: jira.IssueType{Name: i.options.IssueType},
Project: jira.Project{Key: i.options.ProjectName},
Summary: summary,
Labels: []string{severityLabel},
Labels: labels,
}
}
@ -101,7 +101,7 @@ func (i *Integration) CreateNewIssue(event *output.ResultEvent) error {
if err != nil {
var data string
if resp != nil && resp.Body != nil {
d, _ := ioutil.ReadAll(resp.Body)
d, _ := io.ReadAll(resp.Body)
data = string(d)
}
return fmt.Errorf("%w => %s", err, data)
@ -138,7 +138,7 @@ func (i *Integration) FindExistingIssue(event *output.ResultEvent) (string, erro
if err != nil {
var data string
if resp != nil && resp.Body != nil {
d, _ := ioutil.ReadAll(resp.Body)
d, _ := io.ReadAll(resp.Body)
data = string(d)
}
return "", fmt.Errorf("%w => %s", err, data)

View File

@ -45,6 +45,7 @@ func RunNucleiAndGetResults(isTemplate bool, template, url string, debug bool, e
func RunNucleiBareArgsAndGetResults(debug bool, extra ...string) ([]string, error) {
cmd := exec.Command("./nuclei")
cmd.Args = append(cmd.Args, extra...)
cmd.Args = append(cmd.Args, "-duc") // disable auto updates
if debug {
cmd.Args = append(cmd.Args, "-debug")
cmd.Stderr = os.Stderr
@ -74,6 +75,7 @@ var templateLoaded = regexp.MustCompile(`(?:Templates|Workflows) loaded[^:]*: (\
// RunNucleiBinaryAndGetLoadedTemplates returns a list of results for a template
func RunNucleiBinaryAndGetLoadedTemplates(nucleiBinary string, debug bool, args []string) (string, error) {
cmd := exec.Command(nucleiBinary, args...)
cmd.Args = append(cmd.Args, "-duc") // disable auto updates
if debug {
cmd.Args = append(cmd.Args, "-debug")
fmt.Println(cmd.String())

View File

@ -198,6 +198,8 @@ type Options struct {
ClientCAFile string
// Use ZTLS library
ZTLS bool
// EnablePprof enables exposing pprof runtime information with a webserver.
EnablePprof bool
}
func (options *Options) AddVarPayload(key string, value interface{}) {

View File

@ -2,7 +2,7 @@ package utils
import (
"errors"
"io/ioutil"
"io"
"net/http"
"net/url"
"os"
@ -51,7 +51,7 @@ func ReadFromPathOrURL(templatePath string) (data []byte, err error) {
return nil, err
}
defer resp.Body.Close()
data, err = ioutil.ReadAll(resp.Body)
data, err = io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
@ -61,7 +61,7 @@ func ReadFromPathOrURL(templatePath string) (data []byte, err error) {
return nil, err
}
defer f.Close()
data, err = ioutil.ReadAll(f)
data, err = io.ReadAll(f)
if err != nil {
return nil, err
}