Created
January 25, 2016 08:55
-
-
Save fblundun/ecc77934ad0184f479a3 to your computer and use it in GitHub Desktop.
GitHub wiki link validator
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
# | |
# link-validator.py | |
# Brandon Amos <http://bamos.io> | |
import argparse | |
import re | |
from functools import lru_cache | |
from urllib.request import urlopen | |
from urllib.parse import urljoin,urldefrag | |
from bs4 import BeautifulSoup, SoupStrainer | |
WIKI_PREFIX = "wiki-" | |
baseUrl = None | |
invalidWikiPages = [] | |
visitedUrls = {} | |
validated_urls = set() | |
@lru_cache(1000) | |
def get_content(url): | |
try: | |
content = urlopen(url).read().decode("utf8") | |
return BeautifulSoup(content) | |
except: | |
# Assume the content is binary | |
return BeautifulSoup("") | |
def validate(url): | |
if url in validated_urls: return | |
validated_urls.add(url) | |
content = get_content(url) | |
wikiUrls = set() | |
invalidUrls = [] | |
invalid_fragments = [] | |
soup = content.find_all('a', href=True) | |
for external_link in soup: | |
fragment = urldefrag(external_link['href']).fragment | |
fullexternal_link = urljoin(url, urldefrag(external_link['href']).url) | |
if baseUrl in fullexternal_link and not fullexternal_link.endswith('/_history'): | |
if external_link.has_attr('class') and 'absent' in external_link['class']: | |
invalidUrls.append(external_link) | |
else: | |
# Check whether the fragment really points to an element | |
if fragment: | |
content = get_content(fullexternal_link) | |
# Some fragments have "wiki-" prepended | |
shortened_fragment = fragment[len(WIKI_PREFIX):] if fragment.startswith(WIKI_PREFIX) else fragment | |
pattern = re.compile("^.*" + shortened_fragment + "$") | |
if not (content.find(attrs={"name":pattern}) or content.find(attrs={"id":pattern})): | |
invalidUrls.append(external_link) | |
wikiUrls.add(fullexternal_link) | |
if len(invalidUrls) > 0: | |
print("\nBad links on page " + url) | |
for i in invalidUrls: print(i) | |
for wikiUrl in wikiUrls: | |
if wikiUrl not in visitedUrls: | |
validate(wikiUrl) | |
if __name__ == '__main__': | |
parser = argparse.ArgumentParser() | |
parser.add_argument('url', type=str, | |
help='The base link to the GitHub Wiki to scrape. ' + | |
'Example: http://github.com/bamos/github-wiki-link-validator/wiki') | |
args = parser.parse_args() | |
baseUrl = args.url | |
validate(args.url) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Here's the output when this is run on the Snowplow wiki: