pacman-key initialization done.

This commit is contained in:
brent s. 2020-01-03 03:38:35 -05:00
parent dc70409c8d
commit ec28849f23
7 changed files with 323 additions and 49 deletions

View File

@ -11,16 +11,31 @@ import gpg
from lxml import etree from lxml import etree
## ##
from . import _common from . import _common
from . import keyring




_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)




# TODO: There is some duplication here that we can get rid of in the future. Namely:
# - Mirror URI parsing
# - Unified function for parsing Includes
# - At some point, ideally there should be a MirrorList class that can take (or generate?) a list of Mirrors
# and have a write function to write out a mirror list to a specified location.


class Mirror(object): class Mirror(object):
def __init__(self, mirror_xml): def __init__(self, mirror_xml, repo = None, arch = None):
self.xml = mirror_xml self.xml = mirror_xml
_logger.debug('mirror_xml: {0}'.format(etree.tostring(self.xml, with_tail = False).decode('utf-8'))) _logger.debug('mirror_xml: {0}'.format(etree.tostring(self.xml, with_tail = False).decode('utf-8')))
self.uri = self.xml.text self.uri = self.xml.text
self.real_uri = None
self.aif_uri = None

def parse(self, chroot_base, repo, arch):
self.real_uri = self.uri.replace('$repo', repo).replace('$arch', arch)
if self.uri.startswith('file://'):
self.aif_uri = os.path.join(chroot_base, re.sub(r'^file:///?', ''))




class Package(object): class Package(object):
@ -42,6 +57,7 @@ class PackageManager(object):
self.chroot_base = chroot_base self.chroot_base = chroot_base
self.pacman_dir = os.path.join(self.chroot_base, 'var', 'lib', 'pacman') self.pacman_dir = os.path.join(self.chroot_base, 'var', 'lib', 'pacman')
self.configfile = os.path.join(self.chroot_base, 'etc', 'pacman.conf') self.configfile = os.path.join(self.chroot_base, 'etc', 'pacman.conf')
self.keyring = keyring.PacmanKey(self.chroot_base)
self.config = None self.config = None
self.handler = None self.handler = None
self.repos = [] self.repos = []
@ -97,7 +113,7 @@ class PackageManager(object):


def _initMirrors(self): def _initMirrors(self):
mirrors = self.xml.find('mirrorList') mirrors = self.xml.find('mirrorList')
if mirrors is not None: if mirrors:
_mirrorlist = os.path.join(self.chroot_base, 'etc', 'pacman.d', 'mirrorlist') _mirrorlist = os.path.join(self.chroot_base, 'etc', 'pacman.d', 'mirrorlist')
with open(_mirrorlist, 'a') as fh: with open(_mirrorlist, 'a') as fh:
fh.write('\n# Added by AIF-NG.\n') fh.write('\n# Added by AIF-NG.\n')
@ -114,8 +130,7 @@ class PackageManager(object):
with open(_conf, 'a') as fh: with open(_conf, 'a') as fh:
fh.write('\n# Added by AIF-NG.\n') fh.write('\n# Added by AIF-NG.\n')
for r in repos.findall('repo'): for r in repos.findall('repo'):
repo = Repo(r) repo = Repo(self.chroot_base, r)
self.repos.append(repo)
if repo.enabled: if repo.enabled:
fh.write('[{0}]\n'.format(repo.name)) fh.write('[{0}]\n'.format(repo.name))
if repo.siglevel: if repo.siglevel:
@ -132,21 +147,41 @@ class PackageManager(object):
fh.write('#Server = {0}\n'.format(repo.uri)) fh.write('#Server = {0}\n'.format(repo.uri))
else: else:
fh.write('#Include = /etc/pacman.d/mirrorlist\n') fh.write('#Include = /etc/pacman.d/mirrorlist\n')
self.repos.append(repo)
_logger.info('Appended: {0}'.format(_conf)) _logger.info('Appended: {0}'.format(_conf))
return(None) return(None)




class Repo(object): class Repo(object):
def __init__(self, repo_xml, arch = 'x86_64'): def __init__(self, chroot_base, repo_xml, arch = 'x86_64'):
# TODO: support Usage? ("REPOSITORY SECTIONS", pacman.conf(5)) # TODO: support Usage? ("REPOSITORY SECTIONS", pacman.conf(5))
self.xml = repo_xml self.xml = repo_xml
_logger.debug('repo_xml: {0}'.format(etree.tostring(self.xml, with_tail = False).decode('utf-8'))) _logger.debug('repo_xml: {0}'.format(etree.tostring(self.xml, with_tail = False).decode('utf-8')))
# TODO: SigLevels?! # TODO: SigLevels?!
self.name = self.xml.attrib['name'] self.name = self.xml.attrib['name']
self.uri = self.xml.attrib.get('mirror') # "server" in pyalpm lingo. self.mirrors = {}
self.parsed_mirrors = []
_mirrors = self.xml.xpath('mirror|include') # "Server" and "Include" respectively in pyalpm lingo.
if _mirrors:
for m in _mirrors:
k = m.tag.title()
if k == 'Mirror':
k = 'Server'
if k not in self.mirrors.keys():
self.mirrors[k] = []
self.mirrors[k].append(m.text)
if m.tag == 'include':
file_uri = os.path.join(chroot_base, re.sub(r'^file:///?', '', m.text))
if not os.path.isfile(file_uri):
_logger.error('Include file ({0}) does not exist: {1}'.format(m.text, file_uri))
raise FileNotFoundError('Include file does not exist')
with open(file_uri, 'r') as fh:
for line in fh.read().splitlines():
else:
# Default (mirrorlist)
self.mirrors['Include'] = ['file:///etc/pacman.d/mirrorlist']
self.enabled = (True if self.xml.attrib.get('enabled', 'true') in ('1', 'true') else False) self.enabled = (True if self.xml.attrib.get('enabled', 'true') in ('1', 'true') else False)
self.siglevel = self.xml.attrib.get('sigLevel') self.siglevel = self.xml.attrib.get('sigLevel')
self.real_uri = None self.real_uri = None
if self.uri: if self.uri:
self.real_uri = self.uri.replace('$repo', self.name).replace('$arch', arch) self.real_uri = self.uri.replace('$repo', self.name).replace('$arch', arch)
return(None)

View File

@ -3,6 +3,9 @@ import logging
from collections import OrderedDict from collections import OrderedDict




# TODO: Add pacman.conf parsing?


_logger = logging.getLogger('pacman:_common') _logger = logging.getLogger('pacman:_common')





229
aif/pacman/keyring.py Normal file
View File

@ -0,0 +1,229 @@
import csv
import logging
import os
import re
import sqlite3
##
import gpg


# We don't use utils.gpg_handler because this is pretty much all procedural.

_logger = logging.getLogger(__name__)


_createTofuDB = """BEGIN TRANSACTION;
CREATE TABLE IF NOT EXISTS "ultimately_trusted_keys" (
"keyid" TEXT
);
CREATE TABLE IF NOT EXISTS "encryptions" (
"binding" INTEGER NOT NULL,
"time" INTEGER
);
CREATE TABLE IF NOT EXISTS "signatures" (
"binding" INTEGER NOT NULL,
"sig_digest" TEXT,
"origin" TEXT,
"sig_time" INTEGER,
"time" INTEGER,
PRIMARY KEY("binding","sig_digest","origin")
);
CREATE TABLE IF NOT EXISTS "bindings" (
"oid" INTEGER PRIMARY KEY AUTOINCREMENT,
"fingerprint" TEXT,
"email" TEXT,
"user_id" TEXT,
"time" INTEGER,
"policy" INTEGER CHECK(policy in (1,2,3,4,5)),
"conflict" STRING,
"effective_policy" INTEGER DEFAULT 0 CHECK(effective_policy in (0,1,2,3,4,5)),
UNIQUE("fingerprint","email")
);
CREATE TABLE IF NOT EXISTS "version" (
"version" INTEGER
);
INSERT INTO "version" ("version") VALUES (1);
CREATE INDEX IF NOT EXISTS "encryptions_binding" ON "encryptions" (
"binding"
);
CREATE INDEX IF NOT EXISTS "bindings_email" ON "bindings" (
"email"
);
CREATE INDEX IF NOT EXISTS "bindings_fingerprint_email" ON "bindings" (
"fingerprint",
"email"
);
COMMIT;"""


class KeyEditor(object):
def __init__(self, trustlevel = 4):
self.trusted = False
self.revoked = False
self.trustlevel = trustlevel
_logger.info('Key editor instantiated.')

def revoker(self, kw, arg, *args, **kwargs):
# The "save" commands here can also be "quit".
_logger.debug('Key revoker invoked:')
_logger.debug('Command: {0}'.format(kw))
_logger.debug('Argument: {0}'.format(arg))
if args:
_logger.debug('args: {0}'.format(','.join(args)))
if kwargs:
_logger.debug('kwargs: {0}'.format(kwargs))
if kw == 'GET_LINE':
if arg == 'keyedit.prompt':
if not self.revoked:
_logger.debug('Returning: "disable"')
self.revoked = True
return('disable')
else:
_logger.debug('Returning: "save"')
return('save')
else:
_logger.debug('Returning: "save"')
return('save')
return (None)

def truster(self, kw, arg, *args, **kwargs):
_logger.debug('Key trust editor invoked:')
_logger.debug('Command: {0}'.format(kw))
_logger.debug('Argument: {0}'.format(arg))
if args:
_logger.debug('args: {0}'.format(','.join(args)))
if kwargs:
_logger.debug('kwargs: {0}'.format(kwargs))
if kw == 'GET_LINE':
if arg == 'keyedit.prompt':
if not self.trusted:
_logger.debug('Returning: "trust"')
return('trust')
else:
_logger.debug('Returning: "save"')
return('save')
elif arg == 'edit_ownertrust.value' and not self.trusted:
self.trusted = True
_logger.debug('Status changed to trusted')
_logger.debug('Returning: "{0}"'.format(self.trustlevel))
return(str(self.trustlevel))
else:
_logger.debug('Returning: "save"')
return('save')
return(None)


class PacmanKey(object):
def __init__(self, chroot_base):
# We more or less recreate /usr/bin/pacman-key in python.
self.chroot_base = chroot_base
self.home = os.path.join(self.chroot_base, 'etc', 'pacman.d', 'gnupg')
self.conf = os.path.join(self.home, 'gpg.conf')
self.agent_conf = os.path.join(self.home, 'gpg-agent.conf')
self.db = os.path.join(self.home, 'tofu.db')
# ...pacman devs, why do you create the gnupg home with 0755?
os.makedirs(self.home, 0o0755, exist_ok = True)
# Probably not necessary, but...
with open(os.path.join(self.home, '.gpg-v21-migrated'), 'wb') as fh:
fh.write(b'')
_logger.info('Touched/wrote: {0}'.format(os.path.join(self.home, '.gpg-v21-migrated')))
if not os.path.isfile(self.conf):
with open(self.conf, 'w') as fh:
fh.write(('# Generated by AIF-NG.\n'
'no-greeting\n'
'no-permission-warning\n'
'lock-never\n'
'keyserver-options timeout=10\n'))
_logger.info('Wrote: {0}'.format(self.conf))
if not os.path.isfile(self.agent_conf):
with open(self.agent_conf, 'w') as fh:
fh.write(('# Generated by AIF-NG.\n'
'disable-scdaemon\n'))
_logger.info('Wrote: {0}'.format(self.agent_conf))
self.key = None
# ...PROBABLY order-specific.
self._initTofuDB()
self.gpg = gpg.Context(home_dir = self.home)
self._initKey()
self._initPerms()
self._initKeyring()

def _initKey(self):
# These match what is currently used by pacman-key --init.
_keyinfo = {'userid': 'Pacman Keyring Master Key <pacman@localhost>',
'algorithm': 'rsa2048',
'expires_in': 0,
'expires': False,
'sign': True,
'encrypt': False,
'certify': False,
'authenticate': False,
'passphrase': None,
'force': False}
_logger.debug('Creating key with options: {0}'.format(_keyinfo))
genkey = self.gpg.create_key(**_keyinfo)
_logger.info('Created key: {0}'.format(genkey.fpr))
self.key = self.gpg.get_key(genkey.fpr, secret = True)
self.gpg.signers = [self.key]
_logger.debug('Set signer/self key to: {0}'.format(self.key))

def _initKeyring(self):
krdir = os.path.join(self.chroot_base, 'usr', 'share', 'pacman', 'keyrings')
keyrings = [i for i in os.listdir(krdir) if i.endswith('.gpg')]
_logger.info('Importing {0} keyring(s).'.format(len(keyrings)))
for idx, kr in enumerate(keyrings):
krname = re.sub(r'\.gpg$', '', kr)
krfile = os.path.join(krdir, kr)
trustfile = os.path.join(krdir, '{0}-trusted'.format(krname))
revokefile = os.path.join(krdir, '{0}-revoked'.format(krname))
_logger.debug('Importing keyring: {0} ({1}/{2})'.format(krname, (idx + 1), len(keyrings)))
with open(os.path.join(krdir, kr), 'rb') as fh:
imported_keys = self.gpg.key_import(fh.read())
if imported_keys:
_logger.debug('Imported: {0}'.format(imported_keys))
# We also have to sign/trust the keys. I still can't believe there isn't an easier way to do this.
if os.path.isfile(trustfile):
with open(trustfile, 'r') as fh:
for trust in csv.reader(fh, delimiter = ':'):
k_id = trust[0]
k_trust = int(trust[1])
k = self.gpg.get_key(k_id)
self.gpg.key_sign(k, local = True)
editor = KeyEditor(trustlevel = k_trust)
self.gpg.interact(k, editor.truster)
# And revoke keys.
if os.path.isfile(revokefile):
with open(revokefile, 'r') as fh:
for fpr in fh.read().splitlines():
k = self.gpg.get_key(fpr)
editor = KeyEditor()
self.gpg.interact(k, editor.revoker)
return(None)

def _initPerms(self):
# Again, not quite sure why it's so permissive. But pacman-key explicitly does it, so.
filenames = {'pubring': 0o0644,
'trustdb': 0o0644,
'secring': 0o0600}
for fname, filemode in filenames.items():
fpath = os.path.join(self.home, '{0}.gpg'.format(fname))
if not os.path.isfile(fpath):
# TODO: Can we just manually create an empty file, or will GPG not like that?
# I'm fairly certain that the key creation automatically creates these files, so as long as this
# function is run after _initKey() then we should be fine.
# with open(fpath, 'wb') as fh:
# fh.write(b'')
# _logger.info('Wrote: {0}'.format(fpath))
continue
os.chmod(fpath, filemode)
return(None)

def _initTofuDB(self):
# As glad as I am that GnuPG is moving more towards more accessible data structures...
db = sqlite3.connect(self.db)
cur = db.cursor()
cur.executescript(_createTofuDB)
db.commit()
cur.close()
db.close()
return(None)

View File

@ -120,7 +120,7 @@ class Password(object):
if not self._is_gshadow: if not self._is_gshadow:
self.disabled = aif.utils.xmlBool(self.xml.attrib.get('locked', 'false')) self.disabled = aif.utils.xmlBool(self.xml.attrib.get('locked', 'false'))
self._password_xml = self.xml.xpath('passwordPlain|passwordHash') self._password_xml = self.xml.xpath('passwordPlain|passwordHash')
if self._password_xml is not None: if self._password_xml:
self._password_xml = self._password_xml[0] self._password_xml = self._password_xml[0]
if self._password_xml.tag == 'passwordPlain': if self._password_xml.tag == 'passwordPlain':
self.password = self._password_xml.text.strip() self.password = self._password_xml.text.strip()
@ -187,12 +187,12 @@ class User(object):
self.sudoPassword = aif.utils.xmlBool(self.xml.attrib.get('sudoPassword', 'true')) self.sudoPassword = aif.utils.xmlBool(self.xml.attrib.get('sudoPassword', 'true'))
self.home = self.xml.attrib.get('home', '/home/{0}'.format(self.name)) self.home = self.xml.attrib.get('home', '/home/{0}'.format(self.name))
self.uid = self.xml.attrib.get('uid') self.uid = self.xml.attrib.get('uid')
if self.uid is not None: if self.uid:
self.uid = int(self.uid) self.uid = int(self.uid)
self.primary_group = Group(None) self.primary_group = Group(None)
self.primary_group.name = self.xml.attrib.get('group', self.name) self.primary_group.name = self.xml.attrib.get('group', self.name)
self.primary_group.gid = self.xml.attrib.get('gid') self.primary_group.gid = self.xml.attrib.get('gid')
if self.primary_group.gid is not None: if self.primary_group.gid:
self.primary_group.gid = int(self.primary_group.gid) self.primary_group.gid = int(self.primary_group.gid)
self.primary_group.create = True self.primary_group.create = True
self.primary_group.members.add(self.name) self.primary_group.members.add(self.name)
@ -204,7 +204,7 @@ class User(object):
self.inactive_period = int(self.xml.attrib.get('inactiveDays', 0)) self.inactive_period = int(self.xml.attrib.get('inactiveDays', 0))
self.expire_date = self.xml.attrib.get('expireDate') self.expire_date = self.xml.attrib.get('expireDate')
self.last_change = _since_epoch.days - 1 self.last_change = _since_epoch.days - 1
if self.expire_date is not None: if self.expire_date:
# https://www.w3.org/TR/xmlschema-2/#dateTime # https://www.w3.org/TR/xmlschema-2/#dateTime
try: try:
self.expire_date = datetime.datetime.fromtimestamp(int(self.expire_date)) # It's an Epoch self.expire_date = datetime.datetime.fromtimestamp(int(self.expire_date)) # It's an Epoch

View File

@ -45,13 +45,13 @@ class KeyEditor(object):




class GPG(object): class GPG(object):
def __init__(self, homedir = None, primary_key = None, *args, **kwargs): def __init__(self, home = None, primary_key = None, *args, **kwargs):
self.homedir = homedir self.home = home
self.primary_key = primary_key self.primary_key = primary_key
self.temporary = None self.temporary = None
self.ctx = None self.ctx = None
self._imported_keys = [] self._imported_keys = []
_logger.debug('Homedir: {0}'.format(self.homedir)) _logger.debug('Homedir: {0}'.format(self.home))
_logger.debug('Primary key: {0}'.format(self.primary_key)) _logger.debug('Primary key: {0}'.format(self.primary_key))
if args: if args:
_logger.debug('args: {0}'.format(','.join(args))) _logger.debug('args: {0}'.format(','.join(args)))
@ -61,17 +61,17 @@ class GPG(object):
self._initContext() self._initContext()


def _initContext(self): def _initContext(self):
if not self.homedir: if not self.home:
self.homedir = tempfile.mkdtemp(prefix = '.aif.', suffix = '.gpg') self.home = tempfile.mkdtemp(prefix = '.aif.', suffix = '.gpg')
self.temporary = True self.temporary = True
_logger.debug('Set as temporary homedir.') _logger.debug('Set as temporary home.')
self.homedir = os.path.abspath(os.path.expanduser(self.homedir)) self.home = os.path.abspath(os.path.expanduser(self.home))
_logger.debug('Homedir finalized: {0}'.format(self.homedir)) _logger.debug('Homedir finalized: {0}'.format(self.home))
if not os.path.isdir(self.homedir): if not os.path.isdir(self.home):
os.makedirs(self.homedir, exist_ok = True) os.makedirs(self.home, exist_ok = True)
os.chmod(self.homedir, 0o0700) os.chmod(self.home, 0o0700)
_logger.info('Created {0}'.format(self.homedir)) _logger.info('Created {0}'.format(self.home))
self.ctx = gpg.Context(home_dir = self.homedir) self.ctx = gpg.Context(home_dir = self.home)
if self.temporary: if self.temporary:
self.primary_key = self.createKey('AIF-NG File Verification Key', self.primary_key = self.createKey('AIF-NG File Verification Key',
sign = True, sign = True,
@ -92,12 +92,12 @@ class GPG(object):


def clean(self): def clean(self):
# This is mostly just to cleanup the stuff we did before. # This is mostly just to cleanup the stuff we did before.
_logger.info('Cleaning GPG homedir.') _logger.info('Cleaning GPG home.')
self.primary_key = self.primary_key.fpr self.primary_key = self.primary_key.fpr
if self.temporary: if self.temporary:
self.primary_key = None self.primary_key = None
shutil.rmtree(self.homedir) shutil.rmtree(self.home)
_logger.info('Deleted temporary GPG homedir: {0}'.format(self.homedir)) _logger.info('Deleted temporary GPG home: {0}'.format(self.home))
self.ctx = None self.ctx = None
return(None) return(None)


@ -147,7 +147,7 @@ class GPG(object):
if keys: if keys:
_logger.debug('Found keys: {0}'.format(keys)) _logger.debug('Found keys: {0}'.format(keys))
else: else:
_logger.warn('Found no keys.') _logger.warning('Found no keys.')
if keyring_import: if keyring_import:
_logger.debug('Importing enabled; importing found keys.') _logger.debug('Importing enabled; importing found keys.')
self.importKeys(keys, native = True) self.importKeys(keys, native = True)

View File

@ -106,6 +106,7 @@ class Downloader(object):


def parseGpgVerify(self, results): def parseGpgVerify(self, results):
pass # TODO? Might not need to. pass # TODO? Might not need to.
return(None)


def verify(self, verify_xml, *args, **kwargs): def verify(self, verify_xml, *args, **kwargs):
gpg_xml = verify_xml.find('gpg') gpg_xml = verify_xml.find('gpg')
@ -131,7 +132,7 @@ class Downloader(object):
# This means we can *always* instantiate the GPG handler from scratch. # This means we can *always* instantiate the GPG handler from scratch.
self.gpg = gpg_handler.GPG() self.gpg = gpg_handler.GPG()
_logger.info('Established GPG session.') _logger.info('Established GPG session.')
_logger.debug('GPG home dir: {0}'.format(self.gpg.homedir)) _logger.debug('GPG home dir: {0}'.format(self.gpg.home))
_logger.debug('GPG primary key: {0}'.format(self.gpg.primary_key.fpr)) _logger.debug('GPG primary key: {0}'.format(self.gpg.primary_key.fpr))
keys_xml = gpg_xml.find('keys') keys_xml = gpg_xml.find('keys')
if keys_xml is not None: if keys_xml is not None:
@ -217,7 +218,7 @@ class Downloader(object):
checksum_file_xml = hash_xml.findall('checksumFile') checksum_file_xml = hash_xml.findall('checksumFile')
checksums = self.checksum.hashData(self.data.read()) checksums = self.checksum.hashData(self.data.read())
self.data.seek(0, 0) self.data.seek(0, 0)
if checksum_file_xml is not None: if checksum_file_xml:
for cksum_xml in checksum_file_xml: for cksum_xml in checksum_file_xml:
_logger.debug('cksum_xml: {0}'.format(etree.tostring(cksum_xml, with_tail = False).decode('utf-8'))) _logger.debug('cksum_xml: {0}'.format(etree.tostring(cksum_xml, with_tail = False).decode('utf-8')))
htype = cksum_xml.attrib['hashType'].strip().lower() htype = cksum_xml.attrib['hashType'].strip().lower()
@ -236,7 +237,7 @@ class Downloader(object):
_logger.warning(('Checksum type {0} mismatch: ' _logger.warning(('Checksum type {0} mismatch: '
'{1} (data) vs. {2} (specified)').format(htype, checksums[htype], cksum)) '{1} (data) vs. {2} (specified)').format(htype, checksums[htype], cksum))
results.append(result) results.append(result)
if checksum_xml is not None: if checksum_xml:
for cksum_xml in checksum_xml: for cksum_xml in checksum_xml:
_logger.debug('cksum_xml: {0}'.format(etree.tostring(cksum_xml, with_tail = False).decode('utf-8'))) _logger.debug('cksum_xml: {0}'.format(etree.tostring(cksum_xml, with_tail = False).decode('utf-8')))
# Thankfully, this is a LOT easier. # Thankfully, this is a LOT easier.
@ -339,6 +340,9 @@ class HTTPDownloader(Downloader):
def get(self): def get(self):
self.data.seek(0, 0) self.data.seek(0, 0)
req = requests.get(self.real_uri, auth = self.auth) req = requests.get(self.real_uri, auth = self.auth)
if not req.ok:
_logger.error('Could not fetch remote resource: {0}'.format(self.real_uri))
raise RuntimeError('Unable to fetch remote resource')
self.data.write(req.content) self.data.write(req.content)
self.data.seek(0, 0) self.data.seek(0, 0)
_logger.info('Read in {0} bytes'.format(self.data.getbuffer().nbytes)) _logger.info('Read in {0} bytes'.format(self.data.getbuffer().nbytes))

View File

@ -5,21 +5,21 @@
chrootPath="/mnt/aif" chrootPath="/mnt/aif"
reboot="false"> reboot="false">
<bootstrap> <bootstrap>
<!-- <tarball>-->
<!-- https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz-->
<!-- </tarball>-->
<tarball> <tarball>
file:///tmp/archlinux-bootstrap-2019.12.01-x86_64.tar.gz https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2020.01.01-x86_64.tar.gz
</tarball> </tarball>
<!-- <tarball>-->
<!-- file:///tmp/archlinux-bootstrap-2020.01.01-x86_64.tar.gz-->
<!-- </tarball>-->
<verify> <verify>
<gpg> <gpg>
<sigs> <sigs>
<!-- <signatureFile>-->
<!-- https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig-->
<!-- </signatureFile>-->
<signatureFile> <signatureFile>
file:///tmp/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2020.01.01-x86_64.tar.gz.sig
</signatureFile> </signatureFile>
<!-- <signatureFile>-->
<!-- file:///tmp/archlinux-bootstrap-2020.01.01-x86_64.tar.gz.sig-->
<!-- </signatureFile>-->
</sigs> </sigs>
<keys detect="false"> <keys detect="false">
<keyID>0x4AA4767BBC9C4B1D18AE28B77F2D434B9741E8AC</keyID> <keyID>0x4AA4767BBC9C4B1D18AE28B77F2D434B9741E8AC</keyID>
@ -230,16 +230,6 @@
</services> </services>
</system> </system>
<pacman> <pacman>
<repos>
<repo name="core" enabled="true" sigLevel="default" mirror="file:///etc/pacman.d/mirrorlist"/>
<repo name="extra" enabled="true" sigLevel="default" mirror="file:///etc/pacman.d/mirrorlist"/>
<repo name="community" enabled="true" sigLevel="default" mirror="file:///etc/pacman.d/mirrorlist"/>
<repo name="multilib" enabled="true" sigLevel="default" mirror="file:///etc/pacman.d/mirrorlist"/>
<repo name="testing" enabled="false" sigLevel="default" mirror="file:///etc/pacman.d/mirrorlist"/>
<repo name="multilib-testing" enabled="false" sigLevel="default" mirror="file:///etc/pacman.d/mirrorlist"/>
<repo name="archlinuxfr" enabled="false" sigLevel="Optional TrustedOnly"
mirror="http://repo.archlinux.fr/$arch"/>
</repos>
<mirrorList> <mirrorList>
<mirror>http://arch.mirror.square-r00t.net/$repo/os/$arch</mirror> <mirror>http://arch.mirror.square-r00t.net/$repo/os/$arch</mirror>
<mirror>http://mirror.us.leaseweb.net/archlinux/$repo/os/$arch</mirror> <mirror>http://mirror.us.leaseweb.net/archlinux/$repo/os/$arch</mirror>
@ -248,6 +238,19 @@
<mirror>http://mirrors.gigenet.com/archlinux/$repo/os/$arch</mirror> <mirror>http://mirrors.gigenet.com/archlinux/$repo/os/$arch</mirror>
<mirror>http://mirror.jmu.edu/pub/archlinux/$repo/os/$arch</mirror> <mirror>http://mirror.jmu.edu/pub/archlinux/$repo/os/$arch</mirror>
</mirrorList> </mirrorList>
<repos>
<repo name="core" enabled="true" sigLevel="default">
<include>file:///etc/pacman.d/mirrorlist</include>
</repo>
<repo name="extra" enabled="true" sigLevel="default"/>
<repo name="community" enabled="true" sigLevel="default"/>
<repo name="multilib" enabled="true" sigLevel="default"/>
<repo name="testing" enabled="false" sigLevel="default"/>
<repo name="multilib-testing" enabled="false" sigLevel="default"/>
<repo name="sqrt" enabled="false" sigLevel="Required">
<mirror>https://$repo.arch.repo.square-r00t.net</mirror>
</repo>
</repos>
<software> <software>
<package repo="core">sed</package> <package repo="core">sed</package>
<package>python</package> <package>python</package>