Skip to content

Instantly share code, notes, and snippets.

@bacilla-ru
Last active November 13, 2021 18:35
Show Gist options
  • Save bacilla-ru/254e383d575d1d722fffd62be6642589 to your computer and use it in GitHub Desktop.
Save bacilla-ru/254e383d575d1d722fffd62be6642589 to your computer and use it in GitHub Desktop.
Update installed Zeal (https://github.com/zealdocs/zeal) docsets.
#!/usr/bin/env python3
import json
import os
import os.path
import shutil
import sys
import tarfile
import tempfile
import typing
import xml.etree.ElementTree as ET
from collections import namedtuple
from copy import copy
missing_packages = []
try:
import requests
except ImportError:
missing_packages.append('requests')
try:
from pkg_resources import parse_version
except ImportError:
missing_packages.append('setuptools')
try:
from tabulate import tabulate
except ImportError:
missing_packages.append('tabulate')
try:
from tqdm import tqdm
except ImportError:
missing_packages.append('tqdm')
if missing_packages:
import platform
descr = platform.platform().lower()
if descr.startswith('linux'):
if 'arch' in descr:
print('=== Install required packages using command:')
print('sudo pacman -S ' + ' '.join('python-' + it for it in missing_packages))
exit(1)
elif 'debian' in descr or 'ubuntu' in descr:
print('=== Install required packages using command:')
print('sudo apt-get install ' + ' '.join('python3-' + it for it in missing_packages))
exit(1)
print('=== Install required packages using command:')
print('pip install ' + ' '.join(missing_packages))
exit(1)
WIDTH = shutil.get_terminal_size()[0]
Ver = namedtuple('Ver', 'ver rev name')
Outdated = namedtuple('Outdated', 'name curr_ver next_ver urls meta path')
def mk_outdated(name, curr_ver, curr_rev, next_ver, next_rev, urls, meta, path):
curr_name, next_name = '', ''
if curr_ver or next_ver:
curr_name = curr_ver
next_name = next_ver
if curr_rev or next_rev:
curr_name += (curr_name and '/') + str(curr_rev)
next_name += (next_name and '/') + str(next_rev)
return Outdated(name, Ver(curr_ver, curr_rev, curr_name), Ver(next_ver, next_rev, next_name), urls, meta, path)
def is_outdated(outdated):
cur, nxt = parse_version(outdated.curr_ver.ver), parse_version(outdated.next_ver.ver)
return nxt > cur or nxt == cur and outdated.next_ver.rev > outdated.curr_ver.rev
def get_docsets_path():
import configparser
config = configparser.ConfigParser()
config.read(os.path.expanduser('~/.config/Zeal/Zeal.conf'))
try:
return config.get('docsets', 'path')
except configparser.Error:
return None
def check_docset(path) -> typing.Optional[Outdated]:
with open(os.path.join(path, 'meta.json'), 'r', encoding='utf8') as meta_json:
meta = json.load(meta_json)
name = meta['name']
if 'feed_url' in meta:
feed = meta['feed_url']
else:
feed = 'https://raw.githubusercontent.com/Kapeli/feeds/master/%s.xml' % name
r = requests.get(feed)
if r.ok:
root = ET.fromstring(r.text)
full_version = root.findtext('version')
if '/' in full_version:
version, revision = full_version.split('/')
revision = int(revision)
else:
version, revision = full_version, 0
if version.startswith('.'):
for n in range(len(name) - 1, 0, -1):
if name[n].isdigit():
version = name[n] + version
else:
break
outdated = mk_outdated(name,
meta.get('version', ''), int(meta.get('revision', 0)),
version, revision,
[it.text for it in root.findall('url')], meta, path)
if is_outdated(outdated):
return outdated
else:
print('=== Unable to fetch feed: ' + feed)
def download_file(url, dst, chunk_size=32*1024, expected_content_type=None):
assert url
assert dst
os.makedirs(os.path.dirname(dst), exist_ok=True)
r = requests.get(url, stream=True)
if not r.ok:
return False
if expected_content_type and r.headers.get('Content-Type', '') != expected_content_type:
return False
total_size = int(r.headers.get('Content-Length', 0))
with open(dst, 'wb') as f:
with tqdm(total=total_size, unit='B', unit_scale=True, ncols=WIDTH, file=sys.stdout) as pbar:
for data in r.iter_content(chunk_size):
read_size = len(data)
f.write(data)
pbar.update(read_size)
return True
def update_docset(outdated, tmp_dir, preferred_cdns=('frankfurt', 'london', 'singapore')):
print('=== %s' % outdated.name)
def update(tgz):
unpack_to = os.path.join(tmp_dir, outdated.name + '.unpacked')
with tarfile.open(tgz) as tar:
tar.extractall(unpack_to)
for entry in os.scandir(unpack_to):
if entry.name.endswith('.docset') and entry.is_dir():
shutil.rmtree(os.path.join(outdated.path, 'Contents'))
shutil.copytree(os.path.join(entry.path, 'Contents'), os.path.join(outdated.path, 'Contents'))
meta = copy(outdated.meta)
if outdated.next_ver.ver:
meta['version'] = outdated.next_ver.ver
else:
meta.pop('version', None)
meta['revision'] = str(outdated.next_ver.rev)
with open(os.path.join(outdated.path, 'meta.json'), 'w', encoding='utf8') as meta_json:
json.dump(meta, meta_json, indent=4)
return True
def download_and_update(url):
tgz = os.path.join(tmp_dir, outdated.name + '.tgz')
print('=== Downloading %s' % url)
if download_file(url, tgz, expected_content_type='application/x-tar'):
print('=== Updating {} ({} -> {})'.format(outdated.name, outdated.curr_ver.name, outdated.next_ver.name))
return update(tgz)
return False
tried_urls = []
for cdn in preferred_cdns:
for url in outdated.urls:
if cdn in url:
if download_and_update(url):
print('=== Success')
return
else:
tried_urls.append(url)
for url in outdated.urls:
if url not in tried_urls:
if download_and_update(url):
print('=== Success')
return
print('=== Fail')
def main():
docsets_path = get_docsets_path()
if not docsets_path:
print('=== Can not determine docsets path, exiting')
return
print('=== Using docsets path: ' + docsets_path)
all_outdated = []
for entry in os.scandir(docsets_path):
if entry.name.endswith('.docset') and entry.is_dir():
result = check_docset(entry.path)
if result:
all_outdated.append(result)
if all_outdated:
all_outdated.sort(key=lambda it: it.name)
print('=== Docsets needs update')
print(tabulate(
[[it.name, it.curr_ver.name, '-> ' + it.next_ver.name] for it in all_outdated], tablefmt='plain'))
with tempfile.TemporaryDirectory() as tmp_dir:
for it in all_outdated:
update_docset(it, tmp_dir)
else:
print('=== All docsets is up to date.')
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment