Last active
March 30, 2023 04:04
-
-
Save avipars/0b1c05e4a1f664967a3945bb384e55ea to your computer and use it in GitHub Desktop.
Recover files from the marvelous suspender
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# script to ask user for the txt file name and then clean the URLS and save in new txt file | |
def main(): | |
# ask user for the txt file name | |
file_name = input("Enter the txt file name: ") | |
parse_file(file_name) | |
def parse_file(file_name): | |
with open(file_name, "r") as file: | |
# read the file | |
content = file.read() | |
# split the content by new line | |
content = content.split("\n") | |
suspendedURLS = [] # list to store the suspended URLS | |
regularURLS = [] # list to store the regular URLS | |
for url in content: | |
# check if contains the infamous suspension part | |
if "chrome-extension://" in url: | |
content[content.index(url)] = clean_url(url) # clean url and return it | |
suspendedURLS.append(clean_url(url)) # add the suspended URL to the list | |
else: | |
regularURLS.append(url) | |
save_urls("all_urls.txt",content) # save the all the URLS in new txt file | |
save_urls("regular_urls.txt",regularURLS) # save the regular URLS in new txt file (without the suspended URLS | |
save_urls("cleaned_urls.txt",suspendedURLS) # save the suspended URLS in new txt file | |
# gets a single "link", cleans it then returns it | |
def clean_url(string): | |
# clean the URL | |
return string.split("&uri=")[1] | |
def save_urls(title, content): | |
with open(title, "w") as file: | |
# write the urls to the file | |
for url in content: | |
file.write(url + "\n") | |
if __name__ == "__main__": | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Use an extension like session buddy or find a way to export the tab URLS as a .txt file (split by new-line)
Run this and then it exports a txt file (split by new line), import into session buddy or do with it what you please (have python open each url in a new tab, etc.)