Testing Environment

Config Api-keys and Tokens

Github Access tokens: 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
DOM-fear.sh
#!/bin/bash

figlet -f slant  -c "Start Hacking" | lolcat && figlet -f digital  -c "Hack to Learn" | lolcat && figlet -f mini -c "DOM Fear" | lolcat

# Path to the file containing URLs
url_file="js.txt"

# Check if the file exists
if [ ! -f "$url_file" ]; then
  echo "File $url_file not found!"
  exit 1
fi

# Customize grep highlight color (e.g., green)
export GREP_COLORS='mt=01;32'

# Read each URL from the file
while IFS= read -r url; do
  echo "Scanning $url..."
  
  # Run the curl command and grep for sensitive keywords with highlighting
  curl -s -X GET "$url" | grep -i --color=always -E 'location.href|location.search|window.location|window.hash|window.location.href|location.search|location.pathname|document.URL|getparam|getUelParameter|getParameter()|parameter|innerHTML|outerHTML|document.write|document.writeln|var ='
  
  echo "----------------------------------------"
done < "$url_file"
js-fear.sh
#!/bin/bash

figlet -f slant  -c "Start Hacking" | lolcat && figlet -f digital  -c "Hack to Learn" | lolcat && figlet -f mini -c "JS Fear" | lolcat

# Path to the file containing URLs
url_file="js.txt"

# Check if the file exists
if [ ! -f "$url_file" ]; then
  echo "File $url_file not found!"
  exit 1
fi

# Customize grep highlight color (e.g., green)
export GREP_COLORS='mt=01;32'

# Read each URL from the file
while IFS= read -r url; do
  echo "Scanning $url..."
  
  # Run the curl command and grep for sensitive keywords with highlighting
  curl -s -X GET "$url" | grep -i --color=always -E 'password|pwd|pass|passphrase|credentials|encryptKey|appKey|token|secret|Authorization|Key|private'
  
  echo "----------------------------------------"
done < "$url_file"
subdomain.sh
#!/bin/bash

figlet -f slant  -c "Start Hacking" | lolcat && figlet -f digital  -c "Hack to Learn" | lolcat && figlet -f mini -c "Sub Fear" | lolcat

# Step 1: Accept the domain name from the user
echo -e "\033[1;34mEnter the domain name:\033[0m"
read domain

# step 2: Create a subdomains_output directory if it doesn't exist
rm -r "subdomains_output"
output_dir="subdomains_output"
mkdir -p "$output_dir"

# Step 3: Run My Passive and Active Sources and save output in subdomains_output Directory
echo "Running Script with passive sources (subfinder, assetfinder, findomain, github-subdomains, crt.sh, web.archive)..."
figlet -f small -c "Passive Subdomain Enum with subfinder" | lolcat
subfinder -d $domain -all -recursive -t 200 -o subfinder.txt 
figlet -f small -c "Passive Subdomain Enum with Assetfinder" | lolcat
assetfinder --subs-only $domain > assetfinder.txt 
figlet -f small -c "Passive Subdomain Enum with Findomain" | lolcat
findomain --quiet -t $domain -u findomain.txt
export GITHUB_TOKEN=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
figlet -f small -c "Passive Subdomain Enum with Github-Subdomains" | lolcat
github-subdomains -d $domain -o github-subdomains.txt
figlet -f small -c "Passive Subdomain Enum with Amass" | lolcat
amass enum -d $domain -o domains-amass.txt -timeout 12 -v
cat domains-amass.txt | grep $domain | grep -oP '^\S+' | sort -u > raw-amass.txt
cat raw-amass.txt | sed 's/\x1b\[[0-9;]*m//g' > amass.txt
figlet -f small -c "Passive Subdomain Enum with Web-Archive" | lolcat
curl -s "https://web.archive.org/cdx/search/cdx?url=*.$domain&fl=original&collapse=urlkey" | awk -F/ '{print $3}' | sort -u |  tee archive.txt
figlet -f small -c "Passive Subdomain Enum with Crt.sh" | lolcat
curl -s "https://crt.sh/?q=%25.$domain&output=json" | jq -r '.[].name_value' | tee crt.txt

echo "Running Script with active sources (puredns, dnsx, alterx, knockpy)..."
knockpy -d $domain --recon --bruteforce | grep -oP 'https?://[a-zA-Z0-9.-]+(:[0-9]+)?' | tee knockpy.txt
cat knockpy.txt | alterx | dnsx -t 1000 | tee brute-subs.txt
puredns resolve brute-subs.txt --threads 250 --resolvers resolvers.txt --resolvers-trusted trusted.txt --rate-limit 1000 | tee alterx.txt
puredns bruteforce /home/kali/xss_test/wordlist/2m-subdomains.txt $domain | grep -oE '[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}' | tee puredns.txt
cat knockpy.txt alterx.txt puredns.txt | sort -u | tee brute-subs.txt 
cat brute-subs.txt github-subdomains.txt crt.txt archive.txt assetfinder.txt subfinder.txt findomain.txt amass.txt > subdomains.txt 
sort -u subdomains.txt > sort.txt 
cat sort.txt | httpx -silent -threads 50 | tee "$output_dir/livesubdomains.txt" 
cat "$output_dir/livesubdomains.txt" | wc -l  

# Step 4: Extract 200 status Domains 
echo "Running Script Extract 200 status Domains..."
cat "$output_dir/livesubdomains.txt"  | httpx -mc 200 > "$output_dir/200_livesubdomains.txt"
cat "$output_dir/200_livesubdomains.txt" | wc -l  

# Step 5: Port Scanning on All subdomains to Identifying anomalies and potential internal development services 
echo "Port Scanning to Identifying anomalies and potential internal development services..."
naabu -tp 1000 -l sort.txt -o "$output_dir/sub-with-ports.txt"
cat "$output_dir/sub-with-ports.txt" | httpx -title -sc -location -ip -td
 -follow-redirects | tee "$output_dir/exposed-services.txt"

# Step 6: Reverse DNS IP Scanning with All Ports to expose internal development services 
echo "Reverse DNS IP  Scanning with All Ports to Expose internal development services..."
dnsx -l "$output_dir/livesubdomains.txt" -silent -a -resp-only -o all-ip.txt && cat all-ip.txt | sort -u | tee "$output_dir/ip.txt" 
cat "$output_dir/ip.txt" | dnsx -ptr -resp-only | tee "$output_dir/dnsx.txt"
echo "Find Origin IP's"
> origin-ip.txt  # Clear the output file at the beginning
while read -r host; do
    echo "Querying: $host"
    ip=$(nslookup "$host" | grep "Address" | tail -n +2 | awk '{print $2}')
    if [[ -n "$ip" ]]; then
        echo "$ip" >> origin-ip.txt
    else
        echo "No IP found for $host" >&2
    fi
done < "$output_dir/dnsx.txt" 
cat origin-ip.txt | sort -u | tee -a "$output_dir/ip.txt"
sort -u "$output_dir/ip.txt" | tee "$output_dir/sort-origin-ip.txt"
naabu -tp 1000 -l "$output_dir/sort-origin-ip.txt" -o "$output_dir/ip-with-ports.txt"
cat "$output_dir/dnsx.txt" | grep $domain | grep -oP '^\S+' | sort -u | httpx | tee -a "$output_dir/livesubdomains.txt" 
cat "$output_dir/ip-with-ports.txt" | httpx -title -sc -location -td -follow-redirects | tee -a "$output_dir/exposed-services.txt"

# Step 7: Filter Domains keyword wise
echo "Filter Domains keyword wise..."
cat "$output_dir/livesubdomains.txt" | grep -E 'api|prod|test|dev|staging|secure|login|admin|beta|support|private|internal|demo|management|dashboard|config|service|analytics|auth' subdomains.txt > "$output_dir/important_subs.txt"

# Step 8: Filter Domains Technologies wise
echo "Filter Domains Technologies wise..."
cat "$output_dir/livesubdomains.txt" | httpx -td -title -sc -ip > "$output_dir/httpx_domains.txt"

# Step 9: Filter Domain Language wise
echo "File Domain Language wise..."
cat "$output_dir/httpx_domains.txt" | grep -i php | awk '{print $1}' > "$output_dir/php-html_domains.txt"
cat "$output_dir/httpx_domains.txt" | grep -i asp | awk '{print $1}' > "$output_dir/asp-aspx_domains.txt"
cat "$output_dir/httpx_domains.txt" | grep -i java | awk '{print $1}' > "$output_dir/jsp-jspx-htm-do-actiom_domains.txt"
cat "$output_dir/httpx_domains.txt" | grep -i CFML | awk '{print $1}' > "$output_dir/cfm-html-htm_domains.txt"
cat "$output_dir/httpx_domains.txt" | grep -i perl | awk '{print $1}' > "$output_dir/pl-html-htm_domains.txt"

# Step 10: Filter Domain Server wise
echo "Filter Domains Server wise..."
cat "$output_dir/httpx_domains.txt" | grep -i apache | awk '{print $1}' > "$output_dir/Apache_Server.txt"
cat "$output_dir/httpx_domains.txt" | grep -i Nginx | awk '{print $1}' > "$output_dir/Nginx_Server.txt"
cat "$output_dir/httpx_domains.txt" | grep -i IIS | awk '{print $1}' > "$output_dir/IIS-Windows_Server.txt"
cat "$output_dir/httpx_domains.txt" | grep -i oracle | awk '{print $1}' > "$output_dir/Oracle-weblogic_Server.txt"
cat "$output_dir/httpx_domains.txt" | grep -i Tomcat | awk '{print $1}' > "$output_dir/Apache_Tomcat_Server.txt"
cat "$output_dir/httpx_domains.txt" | grep -i httpd | awk '{print $1}' > "$output_dir/httpd_Tomcat_Server.txt"
cat "$output_dir/httpx_domains.txt" | grep -i Adobe ColdFusion | awk '{print $1}' > "$output_dir/Adobe-ColdFusion.txt"

# Step 11: Filter Domain WAF wise
echo "Filter Domains Language wise..."
cat "$output_dir/httpx_domains.txt" | grep -i Cloudflare | awk '{print $1}' > "$output_dir/Cloudflare_WAF.txt"
cat "$output_dir/httpx_domains.txt" | grep -i Akamai | awk '{print $1}' > "$output_dir/Akamai_WAF.txt"
cat "$output_dir/httpx_domains.txt" | grep -i Amazon CloudFront | awk '{print $1}' > "$output_dir/Amazon_CloudFront_WAF.txt"
cat "$output_dir/httpx_domains.txt" | grep -i imperva | awk '{print $1}' > "$output_dir/imperva_WAF.txt"

# Step 12: Capture Screenshorts Domains
echo "Capture Screenshorts Domains..."
gowitness scan file -f "$output_dir/livesubdomains.txt" --delay 4 --screenshot-fullpage --screenshot-path screenshots/ --write-csv-file 100sshost.csv --write-db 
gowitness report generate --screenshot-path screenshots/ --zip-name screenshots-report.zip 
chmod +x screenshots-report.zip
unzip screenshots-report.zip -d "$output_dir/sc-report"
rm -r screenshots

# Step 13: Remove the intermediate files
echo "Remove the intermediate files..."
rm github-subdomains.txt knockpy.txt puredns.txt crt.txt archive.txt assetfinder.txt subfinder.txt findomain.txt amass.txt raw-amass.txt domains-amass.txt subdomains.txt all-ip.txt gowitness.sqlite3 


# Step 14: Run CVE Scan with Nuclei on all Subdomains
cat "$output_dir/livesubdomains.txt" | nuclei -t /home/kali/target/nuclei-templates -o "$output_dir/swagger-xss.txt"
cat "$output_dir/livesubdomains.txt" | nuclei --tags tech,cve,xss,lfi,panel --s info,high,critical,medium -es unknown --cloud-upload -c 30 -stats -headless -o "$output_dir/Domain-CVE.txt"
cat "$output_dir/sort-origin-ip.txt" | nuclei --tags tech,cve,xss,lfi,panel --s info,high,critical,medium -es unknown --cloud-upload -c 30 -stats -headless -o "$output_dir/IP-CVE.txt"
cat "$output_dir/ip-with-ports.txt"  | nuclei --tags tech,cve,xss,lfi,panel --s info,high,critical,medium -es unknown --cloud-upload -c 30 -stats -headless -o "$output_dir/IP-with-ports-CVE.txt"

# Step 15: Find Hidden Parameters on all Subdomains
arjun -i "$output_dir/livesubdomains.txt" -oT "$output_dir/arjun.txt"

echo "All Tasks Subdomains Related are Completed..."
dork-fear.sh
#!/bin/bash

figlet -f slant  -c "Start Hacking" | lolcat && figlet -f digital  -c "Hack to Learn" | lolcat && figlet -f mini -c "Dork Fear" | lolcat

# Set color codes for user prompts (optional, for styling)
GREENBOLD="\e[1;32m"
NC="\e[0m"  # No Color

# Ask the user for the website URL or domain
echo -e "${GREENBOLD}Enter the website URL or domain (without protocol, e.g., example.com): ${NC}"
read website_input

# Normalize the input: Add "https://" if the input is just a domain without protocol
if [[ ! $website_input =~ ^https?:// ]]; then
    website_url="https://$website_input"
    website_without_protocol="$website_input"  # No protocol for checking subdomains
else
    website_url="$website_input"
    website_without_protocol="${website_input#https://}"  # Remove https://
    website_without_protocol="${website_without_protocol#http://}"  # Remove http://
fi

# Define lists of extensions, keywords, and parameters
file_extensions=("xlsx" "xls" "csv" "doc" "docx" "pdf" "txt" "odt" "odf" "ppt" "pptx")
keywords=("confidential" "PRIVATE ASSET" "COMPANY SENSITIVE" "SENSITIVE" "STRICTLY CONFIDENTIAL" "HIGHLY CONFIDENTIAL" "ONLY FOR" "Not for Public Release" "internal use only" "do not distribute" "PRIVATE AND CONFIDENTIAL")
datasite=("drive.google.com" "onedrive.live.com" "docs.google.com" "groups.google.com" "googleapis.com" "dropbox.com/s" "box.com/s" "dev.azure.com" "sharepoint.com" "blob.core.windows.net" "digitaloceanspaces.com" "firebaseio.com" "jfrog.io" "s3.amazonaws.com")
intextkeywords=("choose file" "choose files" "No file chosen" "Submit Content")
inurlkeywords=("uploadform")
web_extensions=("php" "html" "xhtml" "htm" "asp" "aspx" "jsp" "jspx" "jsf" "do" "action")
parameters=("=https" "%2F" "redir=" "redirect=" "return_to=" "redirect_url=" "redirect_uri=" "redirect_to=" "url=" ".=https")
shodan_extensions=("php" "java" "ASP.NET" "perl" "CFML" "Adobe ColdFusion" "Swagger UI")
paths=("phpinfo()" "Swagger UI" "admin" "dashboard" "signin" "login" "panel" "Check Point" "Ivanti Connect" "HugeGraph")

# Output search queries
for ext in "${file_extensions[@]}"; do
    echo "site:.$website_without_protocol ext:$ext"
done

echo ""
for ext in "${file_extensions[@]}"; do
    echo "site:.$website_without_protocol ext:$ext name @gmail.com phone"
done

echo ""
for data in "${datasite[@]}"; do
    echo "site:.$data .$website_without_protocol"
done

echo ""
for words in "${keywords[@]}"; do
    for ext in "${file_extensions[@]}"; do
        echo "site:.$website_without_protocol intext:$words ext:$ext"
    done
done

echo ""
for textkeywords in "${intextkeywords[@]}"; do
    echo "site:.$website_without_protocol intext:$textkeywords"
done

echo ""
for urlkeywords in "${inurlkeywords[@]}"; do
    echo "site:.$website_without_protocol inurl:$urlkeywords"
done

echo ""
for ext in "${web_extensions[@]}"; do
    for urlkeywords in "${inurlkeywords[@]}"; do
        echo "site:.$website_without_protocol inurl:$urlkeywords filetype:$ext"
    done
done

echo ""
for param in "${parameters[@]}"; do
    echo "site:.$website_without_protocol inurl:\"$param\""
done

echo ""
for ext in "${web_extensions[@]}"; do
    echo "site:.$website_without_protocol ext:$ext"
done

echo ""
for ext in "${web_extensions[@]}"; do
    echo "site:.$website_without_protocol ext:$ext inurl:& | inurl:? | inurl:="
done

echo ""
for ext in "${shodan_extensions[@]}"; do
    echo "hostname:.$website_without_protocol http.component:$ext"
done

echo ""
for path in "${paths[@]}"; do
    echo "hostname:.$website_without_protocol http.title:\"$path\""
done
recon-fear.sh
#!/bin/bash

# Color definitions
REDCOLOR="\e[31m"
GREENBOLD="\e[1;32m"
WELCOMCOLOR="\e[1;3;33m"
CYANBOLD="\e[1;36m"
NC="\e[0m"

# Function to display the welcome message with cool ASCII art
function show_welcome {
    clear
    # Display ASCII art for title and user message
    figlet -f slant "Recon Fear" | lolcat -a -s 100
    echo -e "${CYANBOLD}------------------------------- Created by Muhammad Asad -------------------------------\n"
    echo -e "${REDCOLOR}------------------------------------------------------------------------------------------"
    echo -e "${WELCOMCOLOR}=================================== Recon Fear ================================\n"
}

# Main script execution
show_welcome

# Sample random quote or message
arr=("Knowledge is power, let’s hack it right!" "Prepare yourself for an awesome recon journey..." "Hacking is an art, not a crime.")
random=$((RANDOM % 4))

# Display random message
echo -e "${GREENBOLD}${arr[$random]}\n"

# Stylish separator
echo -e "${REDCOLOR}------------------------------------------------------------------------------------------$NC\n"

# Ask the user for the Webhook.site URL for SSRF or Open-Redirect checks
echo -e "\033[1;34mEnter the Webhook.site_URL for Check SSRF OR Open-Redirect:\033[0m"
read URL

# Initialize counter for naming subdomain directories
counter=1

# Loop through the scope.txt file
while read subdomain; do
    # Skip empty lines and comments
    if [[ -z "$subdomain" || "$subdomain" =~ ^# ]]; then
        continue
    fi

    # Create the output directory for the subdomain as subdomain-<counter> in the recon-output folder
    subdomain_dir="recon-output/subdomain-$counter"
    mkdir -p "$subdomain_dir"
    
    # Print the current subdomain in use
    echo -e "${GREENBOLD}Processing Subdomain: $subdomain${NC}"

    # Step 1: Normalize the input (Add https:// if missing)
    if [[ ! $subdomain =~ ^https?:// ]]; then
        subdomain_url="https://$subdomain"
        subdomain_without_protocol="$subdomain"
    else
        subdomain_url="$subdomain"
        subdomain_without_protocol="${subdomain#https://}"
        subdomain_without_protocol="${subdomain_without_protocol#http://}"
    fi

    echo -e "${REDCOLOR}Normalized URL with protocol: $subdomain_url"
    echo -e "${REDCOLOR}Website URL without protocol: $subdomain_without_protocol"

    # Create an output directory specific to the subdomain (under the recon-output folder)
    output_dir="$subdomain_dir/endpoints_output"
    mkdir -p "$output_dir"

    # Step 2: Run katana with passive sources and save output to a unified file ("all_endpoints.txt")
    echo -e "${REDCOLOR}Running katana with passive sources (waybackarchive, commoncrawl, alienvault)...\n"
    echo "$subdomain_url" | katana -ps -pss waybackarchive,commoncrawl,alienvault -f qurl | tee "$output_dir/all_endpoints.txt"

    # Step 3: Run waybackurl passively and append results to "all_endpoints.txt"
    echo -e "${REDCOLOR}Running waybackurl passively..."
    waybackurls "$subdomain_url" | tee -a "$output_dir/all_endpoints.txt"

    # Step 4: Run gau passively and append results to "all_endpoints.txt"
    echo -e "${REDCOLOR}Running gau passively..."
    gau "$subdomain_url" | tee -a "$output_dir/all_endpoints.txt"

    # Step 5: Run katana actively with depth 2 and append results to "all_endpoints.txt"
    echo -e "${REDCOLOR}Running katana actively with depth 2..."
    katana -u "$subdomain_url" -d 2 -jc -f qurl | tee -a "$output_dir/all_endpoints.txt"

    # Step 6: Filter "all_endpoints.txt" for Unique Endpoints
    echo -e "Filtering Unique Endpoints for Vulnerabilities ..."
    cat "$output_dir/all_endpoints.txt" | uro > "$output_dir/endpoints.txt"

    # Step 7: Filter "endpoints.txt" for FUZZ different vulnerabilities
    echo -e "Filtering Parameters for Vulnerabilities ..."
    cat "$output_dir/endpoints.txt" | grep "=" | sort -u > "$output_dir/fuzz_parameters.txt"
    
    # Step 10: Parameter discovery using ParamSpider
    echo -e "Performing parameter discovery using ParamSpider..."
    paramspider -d "$subdomain_without_protocol" --stream | grep -oP 'http[s]?://\S+' | tee -a "$output_dir/fuzz_parameters.txt"

    # Step 8: XSS Testing on All Passive Parameters
    echo "Running XSS Testing on All Parameters with Quick Analysis..."
    cat "$output_dir/fuzz_parameters.txt" | Gxss -p '">asad<hacked' | tee "$output_dir/passive-xss.txt"

    # Step 9: Open Redirect OR SSRF Testing
    echo "Running Open Redirect OR SSRF Testing on All Parameters with Webhook.site_URL..."
    cat "$output_dir/fuzz_parameters.txt" | qsreplace $URL | tee "$output_dir/open-redirect.txt"
    cat "$output_dir/open-redirect.txt" | httpx -status-code -title -location > "$output_dir/check-open-redirect.txt"

    # Step 11: Filter URLs for different vulnerabilities using GF (XSS, Open Redirect, etc.)
    echo -e "${REDCOLOR}Filtering URLs for potential vulnerabilities... (XSS, Open Redirect, LFI, etc.)"
    cat "$output_dir/endpoints.txt" | gf xss | sort -u > "$output_dir/xss_endpoints.txt"
    cat "$output_dir/endpoints.txt" | gf redirect | sort -u > "$output_dir/open_redirect_endpoints.txt"
    cat "$output_dir/endpoints.txt" | gf lfi | sort -u > "$output_dir/lfi_endpoints.txt"
    cat "$output_dir/endpoints.txt" | gf sqli | sort -u > "$output_dir/sqli_endpoints.txt"
    cat "$output_dir/endpoints.txt" | gf ssrf | sort -u > "$output_dir/ssrf_endpoints.txt"
    cat "$output_dir/endpoints.txt" | gf rce | sort -u > "$output_dir/rce_endpoints.txt"

    # Additional steps (Arjun, x8, etc.) would follow the same pattern, appending results to the appropriate subdomain folder

    echo -e "$GREENBOLD----------------------------------------------------------------------------------------------$NC\n"

    # step 9: Extract the ext-Endpoints for Find Hidden Parameters with Arjun and save output to a file (ext_endpoint.txt)
    echo -e "Filtering Ext-Endpoint for Find Hidden Parameters with Arjun..."
    cat "$output_dir/endpoints.txt" | grep ".php$" | tee "$output_dir/ext_endpoints.txt" 
    cat "$output_dir/endpoints.txt" | grep ".html$" | tee -a "$output_dir/ext_endpoints.txt"
    cat "$output_dir/endpoints.txt" | grep ".shtml$" | tee -a "$output_dir/ext_endpoints.txt"
    cat "$output_dir/endpoints.txt" | grep ".xhtml$" | tee -a "$output_dir/ext_endpoints.txt"
    cat "$output_dir/endpoints.txt" | grep ".xhtm$" | tee -a "$output_dir/ext_endpoints.txt" 
    cat "$output_dir/endpoints.txt" | grep ".htm$" | tee -a "$output_dir/ext_endpoints.txt"
    cat "$output_dir/endpoints.txt" | grep ".htn$" | tee -a "$output_dir/ext_endpoints.txt" 
    cat "$output_dir/endpoints.txt" | grep ".asp$" | tee -a "$output_dir/ext_endpoints.txt" 
    cat "$output_dir/endpoints.txt" | grep ".aspx$" | tee -a "$output_dir/ext_endpoints.txt" 
    cat "$output_dir/endpoints.txt" | grep ".ashx$" | tee -a "$output_dir/ext_endpoints.txt"
    cat "$output_dir/endpoints.txt" | grep ".asmx$" | tee -a "$output_dir/ext_endpoints.txt"
    cat "$output_dir/endpoints.txt" | grep ".pl$" | tee -a "$output_dir/ext_endpoints.txt" 
    cat "$output_dir/endpoints.txt" | grep ".cfm$" | tee -a "$output_dir/ext_endpoints.txt" 
    cat "$output_dir/endpoints.txt" | grep ".jsp$" | tee -a "$output_dir/ext_endpoints.txt" 
    cat "$output_dir/endpoints.txt" | grep ".jspx$" | tee -a "$output_dir/ext_endpoints.txt"
    cat "$output_dir/endpoints.txt" | grep ".jsf$" | tee -a "$output_dir/ext_endpoints.txt" 
    cat "$output_dir/endpoints.txt" | grep ".do$" | tee -a "$output_dir/ext_endpoints.txt"
    cat "$output_dir/endpoints.txt" | grep ".action$" | tee -a "$output_dir/ext_endpoints.txt" 

    echo -e "$GREENBOLD----------------------------------------------------------------------------------------------$NC\n"

    # Step 10: Filtering Unique Ext-Endpoints 
    echo -e "Ext-Endpoints Filtering Unique for Parameter Fuzzing with Arjun ..."
    cat "$output_dir/ext_endpoints.txt" | uro > "$output_dir/arjun_ext-endpoints.txt"

    echo -e "$GREENBOLD----------------------------------------------------------------------------------------------$NC\n"
    
    # Step 11: Find Hidden Parameters on Passive Ext-Endpoints with Arjun
    echo -e "Running Arjun to find Ext-Endpoints for Find Hidden Parameters..."
    arjun -i "$output_dir/arjun_ext-endpoints.txt" -oT "$output_dir/arjun_result_ext_endpoints.txt"
    cat "$output_dir/arjun_result_ext_endpoints.txt" | awk -F'[?&]' '{baseUrl=$1; for(i=2; i<=NF; i++) {split($i, param, "="); print baseUrl "?" param[1] "="}}' | tee "$output_dir/arjun-xss.txt"
    cat "$output_dir/arjun-xss.txt" | kxss | tee -a "$output_dir/kxss-result.txt"
    cat "$output_dir/arjun-xss.txt" | Gxss -p '">asad<hacked' | tee -a "$output_dir/Gxss-result.txt"
    
    echo -e "$GREENBOLD----------------------------------------------------------------------------------------------$NC\n"

    # Step 12: Crawling for targeted parameter wordlists 
    echo -e "Targeted Parameter wordlists for Parameter Fuzzing with x8 on All Endpoints..."
    fallparams -u "$subdomain_url" -crawl 5 -headless && cat parameters.txt | tee "$output_dir/parameters.txt"

    echo -e "$GREENBOLD----------------------------------------------------------------------------------------------$NC\n"

    # Step 13: x8 Find Hidden Parameters on Passive Ext-Endpoints with targeted parameter wordlists
    echo -e "Running x8 to find Ext-Endpoints for Find Hidden Parameters with targeted parameter wordlists..."
    x8 -u "$output_dir/arjun_ext-endpoints.txt" -w "$output_dir/parameters.txt" -X GET -o "$output_dir/x8-result.txt"
    cat "$output_dir/x8-result.txt" | awk -F' % ' '{baseUrl=$1; params=$2; split(params, paramArray, ", "); for(i=1; i<=length(paramArray); i++) {print baseUrl "?" paramArray[i] "="}}' | sed 's/^GET //' | tee "$output_dir/x8-xss.txt"
    cat "$output_dir/x8-xss.txt" | kxss | tee "$output_dir/kxss-result.txt"
    cat "$output_dir/x8-xss.txt" | Gxss -p '">asad<hacked' | tee "$output_dir/Gxss-result.txt"

    echo -e "$GREENBOLD----------------------------------------------------------------------------------------------$NC\n"

    echo -e "${REDCOLOR}  - Vulnerable XSS Hidden Parameters by find with x8: x8_xss.txt...\n"
    echo -e "${REDCOLOR}  - Vulnerable XSS Hidden Parameters by find with Arjun: arjun_xss.txt...\n"
    echo -e "${REDCOLOR}  - Vulnerable XSS Hidden Parameters by find KXSS: kxss-result.txt...\n"
    echo -e "${REDCOLOR}  - Vulnerable XSS Hidden Parameters by find KXSS: Gxss-result.txtt...\n"

    echo -e "$GREENBOLD----------------------------------------------------------------------------------------------$NC\n"

    # Step 14: Endpoint discovery using JS-Files
    echo -e "Enpoitnt discovery using JS-Files..."
    echo "$subdomain_url" | subjs | python3 /home/kali/tools/JSA/jsa.py | tee "$output_dir/js_endpoints.txt"
    cat "$output_dir/all_endpoints.txt"| grep '.js$' | tee "$output_dir/js_files.txt"  
    cat "$output_dir/js_files.txt" | python3 /home/kali/tools/JSA/jsa.py | tee -a "$output_dir/js_endpoints.txt"
    linkfinder -i "$subdomain_url" -d -o cli | tee -a "$output_dir/js_endpoints.txt"

    echo -e "$GREENBOLD----------------------------------------------------------------------------------------------$NC\n"

    # Final completion message for this subdomain
    echo -e "${REDCOLOR}All tasks for $subdomain are complete...\n"
    
    # Increment counter for next subdomain
    ((counter++))
    
done < scope.txt  # Loop over scope.txt file

# Notify user that all tasks are complete for all subdomains
echo -e "${GREENBOLD}----------------------------------------------------------------------------------------------$NC\n"
echo -e "${REDCOLOR}All tasks are complete for all subdomains...\n"

Last updated