Skip to content

Instantly share code, notes, and snippets.

@0x3n0
Created November 27, 2024 14:26
Show Gist options
  • Save 0x3n0/e8cc55d7b19d2c1680b2d4172bf233de to your computer and use it in GitHub Desktop.
Save 0x3n0/e8cc55d7b19d2c1680b2d4172bf233de to your computer and use it in GitHub Desktop.

Website Crawler for Clickjacking Detection Using X-Frame-Options and CSP

This script is a Go-based website crawler designed to detect potential vulnerabilities to clickjacking attacks. It works by examining HTTP headers such as X-Frame-Options and Content-Security-Policy.

Features:

  • Crawling Capability: Recursively scans all reachable URLs within the same domain starting from the base URL.
  • Header Analysis:
    • Detects the presence and values of the X-Frame-Options header.
    • Parses Content-Security-Policy directives for frame-ancestors to evaluate iframe embedding restrictions.
  • Vulnerability Classification:
    • Protected: URLs with sufficient protections (X-Frame-Options or CSP frame-ancestors).
    • Potentially Vulnerable: URLs lacking these protections.
  • Customizable User-Agent: Mimics common browser behavior to reduce blocking during requests.

Usage:

Run the script with the following command:

go run crawl_paths.go <base_url>

Replace <base_url> with the URL you want to crawl.

Output Example:

The script categorizes URLs into protected and potentially vulnerable and prints details for each URL analyzed. Example:

Path: https://example.com
  - X-Frame-Options: SAMEORIGIN
  - CSP: frame-ancestors 'self'
  - Status: Protected

Notes:

  • This tool is intended for security research and testing purposes only. Always get proper authorization before crawling or testing any website.
  • Results may vary depending on server configurations or additional access restrictions.

Feel free to use or adapt this script for your cybersecurity projects!

package main

import (
	"fmt"
	"net/http"
	"net/url"
	"os"
	"strings"

	"golang.org/x/net/html"
)

func main() {
	if len(os.Args) != 2 {
		fmt.Println("Usage: go run crawl_paths.go <base_url>")
		return
	}

	baseURL := os.Args[1]
	if !strings.HasPrefix(baseURL, "http") {
		baseURL = "http://" + baseURL
	}

	fmt.Printf("Starting crawl on %s...\n\n", baseURL)
	visited := make(map[string]bool)
	vulnerable := []string{}
	protected := []string{}

	crawl(baseURL, baseURL, visited, &vulnerable, &protected)

	fmt.Printf("\n[INFO] Crawling completed.\n")
	fmt.Printf("  - Total URLs found: %d\n", len(visited))
	fmt.Printf("  - Potentially Vulnerable URLs: %d\n", len(vulnerable))
	fmt.Printf("  - Protected URLs: %d\n\n", len(protected))

	if len(vulnerable) > 0 {
		fmt.Println("[VULNERABLE URLS]")
		for _, v := range vulnerable {
			fmt.Println(v)
		}
	}
	if len(protected) > 0 {
		fmt.Println("\n[PROTECTED URLS]")
		for _, p := range protected {
			fmt.Println(p)
		}
	}
}

func crawl(baseURL, currentURL string, visited map[string]bool, vulnerable, protected *[]string) {
	if visited[currentURL] {
		return
	}
	visited[currentURL] = true

	req, err := http.NewRequest("GET", currentURL, nil)
	if err != nil {
		fmt.Printf("[ERROR] Failed to create request for %s: %s\n", currentURL, err)
		return
	}
	req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36")

	client := &http.Client{}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Printf("[ERROR] Failed to fetch %s: %s\n", currentURL, err)
		return
	}
	defer resp.Body.Close()

	if resp.StatusCode != http.StatusOK {
		fmt.Printf("[WARNING] Unable to access %s (Status: %d)\n", currentURL, resp.StatusCode)
		return
	}

	// Check headers for potential clickjacking vulnerability
	xFrameOptions := resp.Header.Get("X-Frame-Options")
	csp := resp.Header.Get("Content-Security-Policy")
	cspFrameAncestors := parseCSPFrameAncestors(csp)

	isVulnerable := true
	if xFrameOptions != "" || cspFrameAncestors != "" {
		isVulnerable = false
	}

	if isVulnerable {
		*vulnerable = append(*vulnerable, currentURL)
	} else {
		*protected = append(*protected, currentURL)
	}

	fmt.Printf("Path: %s\n", currentURL)
	fmt.Printf("  - X-Frame-Options: %s\n", xFrameOptions)
	if csp != "" {
		fmt.Printf("  - CSP: %s\n", csp)
		fmt.Printf("  - CSP frame-ancestors: %s\n", cspFrameAncestors)
	}
	fmt.Printf("  - Status: %s\n\n", map[bool]string{true: "Potentially Vulnerable", false: "Protected"}[isVulnerable])

	doc, err := html.Parse(resp.Body)
	if err != nil {
		fmt.Printf("[ERROR] Failed to parse HTML for %s: %s\n", currentURL, err)
		return
	}

	links := extractLinks(doc, baseURL)
	for _, link := range links {
		parsedLink, err := url.Parse(link)
		if err != nil {
			continue
		}

		baseParsed, _ := url.Parse(baseURL)
		if parsedLink.Host != "" && parsedLink.Host != baseParsed.Host {
			continue
		}

		absoluteURL := baseParsed.ResolveReference(parsedLink).String()
		crawl(baseURL, absoluteURL, visited, vulnerable, protected)
	}
}

func extractLinks(n *html.Node, baseURL string) []string {
	var links []string
	if n.Type == html.ElementNode && n.Data == "a" {
		for _, attr := range n.Attr {
			if attr.Key == "href" {
				links = append(links, attr.Val)
			}
		}
	}
	for c := n.FirstChild; c != nil; c = c.NextSibling {
		links = append(links, extractLinks(c, baseURL)...)
	}
	return links
}

func parseCSPFrameAncestors(csp string) string {
	directives := strings.Split(csp, ";")
	for _, directive := range directives {
		directive = strings.TrimSpace(directive)
		if strings.HasPrefix(directive, "frame-ancestors") {
			parts := strings.SplitN(directive, " ", 2)
			if len(parts) == 2 {
				return strings.TrimSpace(parts[1])
			}
		}
	}
	return ""
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment