This repository has been archived on 2022-01-23. You can view files and clone it, but cannot push or open issues or pull requests.
relchk/ipxe.py

180 lines
6.4 KiB
Python
Raw Normal View History

2021-01-21 18:15:20 -05:00
#!/usr/bin/env python3
# Example .ipxe.json:
# {
# "date": "Thu, 21 Jan 2021 06:42:21 +0000",
# "variant": "efi",
# "sha512": "b4d2e517c69224bf14f79e155(...)"
# }
# They don't version the ISO, so we use the file date on the mirror listing.
import datetime
import json
import os
import re
##
import requests
from bs4 import BeautifulSoup
##
import _base
try:
import lxml
_has_lxml = True
except ImportError:
_has_lxml = False
class Updater(_base.BaseUpdater):
_fname_re = re.compile(r'^(?:.*/)?ipxe\.'
r'(?P<variant>(iso|efi))$')
_allowed_variants = ('iso', 'efi')
_tpl_file = 'ipxe_grub.conf.j2'
def __init__(self,
variant = 'full',
dest_dir = '/boot/iso', # Should be subdir of boot_dir
dest_file = 'ipxe.iso',
ver_file = '.ipxe.json',
lock_path = '/tmp/.ipxe.lck',
dl_base = 'https://boot.ipxe.org/',
do_grub_cfg = True,
boot_dir = '/boot', # ESP or boot partition mount; where GRUB files are installed *under*
grub_cfg = '/etc/grub.d/40_custom_ipxe',
# check_gpg = True, # TODO: GPG sig checking, http://mirror.rit.edu/grml//gnupg-michael-prokop.txt
# hash_type = 'sha512'):
):
if variant not in self._allowed_variants:
raise ValueError('variant must be one of: {0}'.format(', '.join(self._allowed_variants)))
else:
self.variant = variant.lower()
if self.variant == 'efi':
dest_file = dest_file.replace('.iso', '.efi')
super().__init__(dest_dir,
dest_file,
ver_file,
lock_path,
do_grub_cfg,
boot_dir,
grub_cfg,
hash_type = 'sha512')
self.dl_base = dl_base
self._init_vars()
def _init_vars(self):
if self.getRunning():
return(None)
self.getCurVer()
self.getNewVer()
return(None)
def getCurVer(self):
if self.getRunning():
return(None)
if not os.path.isfile(self.dest_ver):
self.do_update = True
self.force_update = True
return(None)
with open(self.dest_ver, 'rb') as fh:
ver_info = json.load(fh)
self.old_date = datetime.datetime.strptime(ver_info['date'], self._date_fmt)
self.old_hash = ver_info.get(self.hash_type)
self.variant = ver_info.get('variant', self.variant)
self.new_hash = self.old_hash
self.new_date = self.old_date
if ver_info.get('arch') != self.arch:
self.do_update = True
self.force_update = True
return(None)
if not os.path.isfile(self.dest_iso):
self.do_update = True
self.force_update = True
return(None)
realhash = self.getISOHash()
if self.old_hash != realhash:
self.do_update = True
self.force_update = True
return(None)
return(None)
def getNewVer(self):
if self.getRunning():
return(None)
req = requests.get(self.dl_base, headers = {'User-Agent': 'curl/7.74.0'})
if not req.ok:
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, self.dl_base))
html = BeautifulSoup(req.content.decode('utf-8'), ('lxml' if _has_lxml else 'html.parser'))
# This is a little hacky.
filelist = html.find('table')
# Get the header, and the index for the proper columns.
file_col = 0
date_col = 0
file_len = 0
file_html = None
header = filelist.find('tr')
# Icon, Name, Modified, Size, Description
file_len = len(header.find_all('th'))
if header is None:
raise RuntimeError('Could not find header row')
for idx, cell in enumerate(header.find_all('th')):
link = cell.find('a')
if link is None:
continue
# At least the header columns have predictable links (for sorting).
if link['href'] == '?C=N;O=D': # Name
file_col = idx
continue
if link['href'] == '?C=M;O=A': # Last Modified
date_col = idx
for idx, row in enumerate(filelist.find('tr')):
if idx == 0: # Header; skip.
continue
cells = row.find_all('td')
if len(cells) != file_len:
continue
name_html = cells[file_col]
date_html = cells[date_col]
for link in filelist.find_all():
fname_r = self._fname_re.search(link['href'])
if not fname_r:
continue
ver_info = fname_r.groupdict()
if ver_info['variant'] != self.variant:
continue
new_date = float(ver_info.get('version', self.old_ver))
iso_url = os.path.join(self.dl_base, link['href'].replace(self.dl_base, ''))
hash_url = '{0}.{1}'.format(iso_url, self.hash_type)
newver_info = (hash_url, iso_url)
versions[new_ver] = newver_info
self.new_ver = sorted(list(versions.keys()))[-1]
if not all((self.old_ver, self.old_date)) or \
(self.new_ver > self.old_ver):
self.do_update = True
self.new_date = datetime.datetime.now(datetime.timezone.utc)
hash_url, self.iso_url = versions[self.new_ver]
req = requests.get(hash_url, headers = {'User-Agent': 'curl/7.74.0'})
if not req.ok:
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, hash_url))
self.new_hash = req.content.decode('utf-8').lower().split()[0]
return(None)
def updateVer(self):
if self.getRunning():
return(None)
d = {
'date': self.new_date.strftime(self._date_fmt),
'variant': self.variant,
self.hash_type: self.new_hash}
j = json.dumps(d, indent = 4)
with open(self.dest_ver, 'w') as fh:
fh.write(j)
fh.write('\n')
os.chmod(self.dest_ver, 0o0644)
return(None)
if __name__ == '__main__':
u = Updater()
u.main()