Skip to content

Instantly share code, notes, and snippets.

@platomav
Created September 6, 2022 22:06
Show Gist options
  • Save platomav/3cb254e9ac05c8b36113ae9ce237a5b2 to your computer and use it in GitHub Desktop.
Save platomav/3cb254e9ac05c8b36113ae9ce237a5b2 to your computer and use it in GitHub Desktop.
MSI LiveUpdate BIOS Grabber

MSI LiveUpdate BIOS Grabber

Description

Parses MSI LiveUpdate BIOS update catalogs and saves all newer (since last run) download links into a text file. It removes any catalog formatting, ignores false positives, removes duplicate links and sorts them in alphabetical order for easy comparison afterwards.

Usage

The DB (i.e. MSI_BIOS_Grab.txt file) consists of all MSI LiveUpdate BIOS update catalog download links which have been gathered so far across all runs. Note that some of them may not work (e.g. 404), which is probably due to MSI mistakes/typos.

Compatibility

Should work at all Windows, Linux or macOS operating systems which have at least Python 3.7 support.

Prerequisites

To run the python script, you do not need any prerequisites.

#!/usr/bin/env python3
#coding=utf-8
"""
MSI BIOS Grab
MSI LiveUpdate BIOS Grabber
Copyright (C) 2021 Plato Mavropoulos
*** FOR PRIVATE USE ONLY ***
"""
title = 'MSI LiveUpdate BIOS Grabber v1.0'
print('\n' + title)
import sys
import traceback
def show_exception_and_exit(exc_type, exc_value, tb) :
if exc_type is KeyboardInterrupt :
print('\nNote: Keyboard Interrupt!')
else :
print('\nError: %s crashed, please report the following:\n' % title)
traceback.print_exception(exc_type, exc_value, tb)
input('\nPress enter to exit')
sys.exit(1)
sys.excepthook = show_exception_and_exit
import urllib.request
from datetime import datetime
dat_db = 'MSI_BIOS_Grab.txt'
mlu_url = 'https://liveupdate.msi.com/autobios/'
mlu_cat = ['DataBase/BIOSList.xml','DQATest/BIOSList.xml']
mlu_links = []
with open(dat_db, 'r', encoding='utf-8') as dat : db_links = dat.readlines()
db_links = [link.strip('\n') for link in db_links]
for cat in mlu_cat :
with urllib.request.urlopen(mlu_url + cat) as link : fdata = link.readlines()
web_paths = [l.decode('utf-8').strip('\n') for l in fdata if l.startswith(b'<WebPath>')]
for wp in web_paths :
link = '%sXBIOS/%s/%s.exe' % (mlu_url,wp[9:].split('<')[0],wp[9:].split('/')[0].replace('Ms','MS-'))
mlu_links.append(link)
if not mlu_links :
input('\nError: Failed to retrieve Catalog Links!\n\nDone!')
sys.exit(1)
new_links = sorted(list(dict.fromkeys([link for link in mlu_links if link not in db_links])))
if new_links :
print('\nFound %d new link(s)!' % len(new_links))
cur_date = datetime.utcnow().isoformat(timespec='seconds').replace('-','').replace('T','').replace(':','') # Local UTC Unix
with open('MSI_%s.txt' % cur_date, 'w', encoding='utf-8') as lout : lout.write('\n'.join(map(str, new_links)))
else :
print('\nThere are no new links!')
new_db = '\n'.join(map(str, sorted(list(dict.fromkeys(mlu_links + db_links)))))
with open(dat_db, 'w', encoding='utf-8') as dbout : dbout.write(new_db)
input('\nDone!')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment