From 9d018be8aafb22af6dd0faef5b50948db81a784b Mon Sep 17 00:00:00 2001 From: r00t Date: Mon, 5 Jul 2021 02:50:13 -0400 Subject: [PATCH] checking in before switching things up --- bdisk/guests/archlinux.py | 63 +++++++++++++++++++-------------------- bdisk/utils.py | 21 +++++++------ 2 files changed, 41 insertions(+), 43 deletions(-) diff --git a/bdisk/guests/archlinux.py b/bdisk/guests/archlinux.py index 4c92d82..375fc99 100644 --- a/bdisk/guests/archlinux.py +++ b/bdisk/guests/archlinux.py @@ -1,16 +1,7 @@ #!/usr/bin/env python3 -from .. import download # LOCAL # do i need to escalate two levels up? import os -from .. import utils - -# TODO: can this be trimmed down? -prereqs = ['arch-install-scripts', 'archiso', 'bzip2', 'coreutils', 'customizepkg-scripting', 'cronie', 'dhclient', - 'dhcp', 'dhcpcd', 'dosfstools', 'dropbear', 'efibootmgr', 'efitools', 'efivar', 'file', 'findutils', - 'iproute2', 'iputils', 'libisoburn', 'localepurge', 'lz4', 'lzo', 'lzop', 'mkinitcpio-nbd', - 'mkinitcpio-nfs-utils', 'mkinitcpio-utils', 'nbd', 'ms-sys', 'mtools', 'net-tools', 'netctl', - 'networkmanager', 'pv', 'python', 'python-pyroute2', 'rsync', 'sed', 'shorewall', 'squashfs-tools', - 'sudo', 'sysfsutils', 'syslinux', 'traceroute', 'vi'] +from .. import utils # LOCAL # do i need to escalate two levels up? class Manifest(object): def __init__(self, cfg): @@ -23,24 +14,46 @@ class Manifest(object): self.gpg_authorities = ['4AA4767BBC9C4B1D18AE28B77F2D434B9741E8AC'] self.tarball = None self.sig = None + self.mirror = None self.checksum = {'sha1': None, 'md5': None} - self._get_filename() + self.verified = False + self.arches = ('x86_64', ) + self.bootsupport = ('uefi', 'bios', 'pxe', 'ipxe', 'iso') + self.kernel = '/boot/vmlinuz-linux' + self.initrd = '/boot/initramfs-linux.img' + # TODO: can this be trimmed down? + self.prereqs = ['arch-install-scripts', 'archiso', 'bzip2', 'coreutils', 'customizepkg-scripting', 'cronie', + 'dhclient', 'dhcp', 'dhcpcd', 'dosfstools', 'dropbear', 'efibootmgr', 'efitools', 'efivar', + 'file', 'findutils', 'iproute2', 'iputils', 'libisoburn', 'localepurge', 'lz4', 'lzo', + 'lzop', 'mkinitcpio-nbd', 'mkinitcpio-nfs-utils', 'mkinitcpio-utils', 'nbd', 'ms-sys', + 'mtools', 'net-tools', 'netctl', 'networkmanager', 'pv', 'python', 'python-pyroute2', + 'rsync', 'sed', 'shorewall', 'squashfs-tools', 'sudo', 'sysfsutils', 'syslinux', + 'traceroute', 'vi'] + self._get_filenames() - def _get_filename(self): + def _get_filenames(self): # TODO: cache this info webroot = 'iso/latest' for m in self.cfg['mirrors']: uri = os.path.join(m, webroot) try: - self.tarball = utils.detect().remote_files(uri, ptrn = ('archlinux-' - 'bootstrap-' - '[0-9]{4}\.' - '[0-9]{2}\.' - '[0-9]{2}-' - 'x86_64\.tar\.gz$'))[0] + self.tarball = utils.detect().remote_files(uri, regex = ('archlinux-' + 'bootstrap-' + '[0-9]{4}\.' + '[0-9]{2}\.' + '[0-9]{2}-' + 'x86_64\.tar\.gz$'))[0] + self.sig = '{0}.sig'.format(self.tarball) + for h in self.checksum: + self.checksum[h] = os.path.join(uri, '{0}sums.txt'.format(h)) + self.mirror = m + break except Exception as e: pass + if not self.tarball: + raise ValueError('Could not find the tarball URI. Check your network connection.') + return() def extern_prep(cfg, cur_arch = 'x86_64'): @@ -113,17 +126,3 @@ packager = {'pre_check': False, '{PACKAGE}'] }, } - -# These are packages *required* to exist on the base guest, no questions asked. -# TODO: can this be trimmed down? -prereqs = ['arch-install-scripts', 'archiso', 'bzip2', 'coreutils', - 'customizepkg-scripting', 'cronie', 'dhclient', 'dhcp', 'dhcpcd', - 'dosfstools', 'dropbear', 'efibootmgr', 'efitools', 'efivar', - 'file', 'findutils', 'iproute2', 'iputils', 'libisoburn', - 'localepurge', 'lz4', 'lzo', 'lzop', 'mkinitcpio-nbd', - 'mkinitcpio-nfs-utils', 'mkinitcpio-utils', 'nbd', 'ms-sys', - 'mtools', 'net-tools', 'netctl', 'networkmanager', 'pv', - 'python', 'python-pyroute2', 'rsync', 'sed', 'shorewall', - 'squashfs-tools', 'sudo', 'sysfsutils', - 'syslinux', 'traceroute', 'vi'] - diff --git a/bdisk/utils.py b/bdisk/utils.py index d4cb306..36a4c5e 100644 --- a/bdisk/utils.py +++ b/bdisk/utils.py @@ -87,34 +87,33 @@ class detect(object): salt = _hash_list[2] return(salt) - def remote_files(self, url_base, ptrn = None, flags = []): + def remote_files(self, url_base, regex = None, sort = False): soup = BeautifulSoup(Download(url_base, progress = False).fetch().decode('utf-8'), 'lxml') urls = [] - if 'regex' in flags: - if not isinstance(ptrn, str): - raise ValueError('"ptrn" must be a regex pattern to match ' + if regex: + if not isinstance(regex, str): + raise ValueError('"regex" must be a regex pattern to match ' 'against') else: - ptrn = re.compile(ptrn) + ptrn = re.compile(regex) for u in soup.find_all('a'): if not u.has_attr('href'): continue - if 'regex' in flags: + if regex: if not ptrn.search(u.attrs['href']): continue if u.has_attr('href'): + if re.search('/?\.\./?$', u.attrs['href']): + continue urls.append(u.attrs['href']) if not urls: return(None) # We certainly can't intelligently parse the printed timestamp since it # varies so much and that'd be a nightmare to get consistent... - # But we CAN sort by filename. - if 'latest' in flags: + # But we CAN sort by filename. MOST of the time, though, this will already be sorted. + if sort: urls = sorted(list(set(urls))) - # urls = urls[-1] - # else: - # urls = urls[0] return(urls) def gpgkeyID_from_url(self, url):