Skip to content

Instantly share code, notes, and snippets.

@pvillamil
Forked from ilovefreesw/WpBroken.py
Created April 6, 2023 01:01
Show Gist options
  • Save pvillamil/8fe95142e1a08e7e1bd46bed5a9e964f to your computer and use it in GitHub Desktop.
Save pvillamil/8fe95142e1a08e7e1bd46bed5a9e964f to your computer and use it in GitHub Desktop.
A script to check links in all the WordPress posts to find out broken ones. Saves the report in CSV file. Based on WordPress API and only needs domain as input.
import requests
import csv
import concurrent.futures
import sys, time
import bs4
domain = sys.argv[1]
csv_file = sys.argv[2]
headers = {
'authority': 'www.'+domain,
'referer': 'https://'+domain,
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.51 Mobile Safari/537.36',
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'sec-fetch-dest': 'document',
'accept-language': 'en-US,en;q=0.9,tr;q=0.8',
}
links404 = []
pages = int(requests.get('https://www.' + domain + '/wp-json/wp/v2/posts', headers=headers).headers['X-WP-TotalPages'])
def getLinks(rendered_content):
soup = bs4.BeautifulSoup(rendered_content, 'html.parser')
return [link['href'] for link in soup('a') if 'href' in link.attrs]
def getStatusCode(link, headers, timeout=5):
try:
r = requests.head(link, headers=headers, timeout=timeout)
except (requests.exceptions.SSLError, requests.exceptions.HTTPError, requests.exceptions.ConnectionError, requests.exceptions.MissingSchema, requests.exceptions.Timeout) as errh:
print("Error in URL, ", link)
return errh.__class__.__name__
else:
if r.status_code not in [200, 500, 503]:
return str(r.status_code)
with concurrent.futures.ThreadPoolExecutor(max_workers = 4) as executor:
futures = []
for i in range(pages):
post_data = requests.get('https://www.' + domain + '/wp-json/wp/v2/posts?page='+str(i+1), headers=headers).json()
for data in post_data:
print(data["link"])
post_links = getLinks(data["content"]["rendered"])
for url in post_links:
futures.append(executor.submit(getStatusCode, url, headers))
for future in concurrent.futures.as_completed(futures):
info = {
"Post ID": data['id'],
"Post Link": data['link'],
"Broken Link": url,
"Status Code":future.result(),
}
links404.append(info)
with open(csv_file, 'w+') as file:
csvwriter = csv.DictWriter(file, fieldnames=list(links404[0].keys()))
csvwriter.writeheader()
csvwriter.writerows(links404)
print("Report saved in file: ", csv_file)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment