From b4c9caefbda827a98f42e94794974cd8fd4284d5 Mon Sep 17 00:00:00 2001 From: brent s Date: Thu, 19 Dec 2019 14:04:34 -0500 Subject: [PATCH] fixed the gpg thing. WHEW. what a PITA. also fleshed out some logging. --- .gitignore | 1 + aif/utils/__init__.py | 8 +- aif/utils/gpg_handler.py | 221 +++++++++++++++++++++++++++++++++++---- aif/utils/sources.py | 130 ++++++++++++++++++++--- docs/MANUAL.adoc | 18 +++- docs/TODO | 9 ++ examples/aif.xml | 2 +- 7 files changed, 348 insertions(+), 41 deletions(-) diff --git a/.gitignore b/.gitignore index 7f47a39..2af9f75 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,4 @@ .idea/ __pycache__/ test.py +test*.py diff --git a/aif/utils/__init__.py b/aif/utils/__init__.py index 7c9f501..8a79986 100644 --- a/aif/utils/__init__.py +++ b/aif/utils/__init__.py @@ -252,9 +252,10 @@ class _Sizer(object): conversion = None base_factors = [] if suffix not in self.valid_bw: - _logger.error('Passed an invalid suffix') + _logger.error('Suffix {0} is invalid; must be one of {1}'.format(suffix, ','.join(self.valid_bw))) raise ValueError('suffix is not a valid unit notation for this conversion') if target and target not in self.valid_bw: + _logger.error('Target {0} is invalid; must be one of {1}'.format(target, ','.join(self.valid_bw))) raise ValueError('target is not a valid unit notation for this conversion') for (_unit_type, _base) in (('decimal', 10), ('binary', 2)): if target and base_factors: @@ -282,8 +283,10 @@ class _Sizer(object): conversion = None base_factors = [] if suffix not in self.valid_storage: + _logger.error('Suffix {0} is invalid; must be one of {1}'.format(suffix, ','.join(self.valid_storage))) raise ValueError('suffix is not a valid unit notation for this conversion') if target and target not in self.valid_storage: + _logger.error('Target {0} is invalid; must be one of {1}'.format(target, ','.join(self.valid_storage))) raise ValueError('target is not a valid unit notation for this conversion') for (_unit_type, _base) in (('decimal', 10), ('binary', 2)): if target and base_factors: @@ -334,5 +337,6 @@ def convertSizeUnit(pos): _size = int(pos.group('size')) amt_type = pos.group('pct_unit_or_sct').strip() else: - raise ValueError('Invalid size specified: {0}'.format(orig_pos)) + _logger.error('Size {0} is invalid; did not match {1}'.format(orig_pos, _pos_re.pattern)) + raise ValueError('Invalid size specified') return((from_beginning, _size, amt_type)) diff --git a/aif/utils/gpg_handler.py b/aif/utils/gpg_handler.py index 66e945c..ea9cbd4 100644 --- a/aif/utils/gpg_handler.py +++ b/aif/utils/gpg_handler.py @@ -1,5 +1,6 @@ import copy import io +import logging import os import shutil import tempfile @@ -8,21 +9,37 @@ import gpg import gpg.errors +_logger = logging.getLogger(__name__) + + class KeyEditor(object): def __init__(self): self.trusted = False + _logger.info('Key editor instantiated.') def truster(self, kw, arg, *args, **kwargs): + _logger.debug('Key trust editor invoked:') + _logger.debug('Command: {0}'.format(kw)) + _logger.debug('Argument: {0}'.format(arg)) + if args: + _logger.debug('args: {0}'.format(','.join(args))) + if kwargs: + _logger.debug('kwargs: {0}'.format(kwargs)) if kw == 'GET_LINE': if arg == 'keyedit.prompt': if not self.trusted: + _logger.debug('Returning: "trust"') return('trust') else: + _logger.debug('Returning: "save"') return('save') elif arg == 'edit_ownertrust.value' and not self.trusted: self.trusted = True + _logger.debug('Status changed to trusted') + _logger.debug('Returning: "4"') return('4') # "Full" else: + _logger.debug('Returning: "save"') return('save') return(None) @@ -34,29 +51,53 @@ class GPG(object): self.temporary = None self.ctx = None self._imported_keys = [] + _logger.debug('Homedir: {0}'.format(self.homedir)) + _logger.debug('Primary key: {0}'.format(self.primary_key)) + if args: + _logger.debug('args: {0}'.format(','.join(args))) + if kwargs: + _logger.debug('kwargs: {0}'.format(kwargs)) + _logger.info('Instantiated GPG class.') self._initContext() def _initContext(self): if not self.homedir: - self.homedir = tempfile.mkdtemp(suffix = '.gpg', prefix = '.aif.') + self.homedir = tempfile.mkdtemp(prefix = '.aif.', suffix = '.gpg') self.temporary = True + _logger.debug('Set as temporary homedir.') self.homedir = os.path.abspath(os.path.expanduser(self.homedir)) + _logger.debug('Homedir finalized: {0}'.format(self.homedir)) if not os.path.isdir(self.homedir): os.makedirs(self.homedir, exist_ok = True) os.chmod(self.homedir, 0o0700) + _logger.info('Created {0}'.format(self.homedir)) self.ctx = gpg.Context(home_dir = self.homedir) if self.temporary: - self.primary_key = self.createKey('AIF-NG File Verification Key', sign = True, force = True).fpr + self.primary_key = self.createKey('AIF-NG File Verification Key', + sign = True, + force = True, + certify = True).fpr self.primary_key = self.findKeyByID(self.primary_key, source = 'secret') + if self.primary_key: + _logger.debug('Found primary key in secret keyring: {0}'.format(self.primary_key.fpr)) + else: + _logger.error('Could not find primary key in secret keyring: {0}'.format(self.primary_key)) + raise RuntimeError('Primary key not found in secret keyring') self.ctx.signers = [self.primary_key] + if self.ctx.signers: + _logger.debug('Signers set to: {0}'.format(','.join([k.fpr for k in self.ctx.signers]))) + else: + raise _logger.error('Could not assign signing keys; signing set empty') return(None) def clean(self): # This is mostly just to cleanup the stuff we did before. + _logger.info('Cleaning GPG homedir.') self.primary_key = self.primary_key.fpr if self.temporary: self.primary_key = None shutil.rmtree(self.homedir) + _logger.info('Deleted temporary GPG homedir: {0}'.format(self.homedir)) self.ctx = None return(None) @@ -72,10 +113,17 @@ class GPG(object): 'authenticate': kwargs.get('authenticate', False), 'passphrase': kwargs.get('passphrase'), 'force': kwargs.get('force')} + _logger.debug('Key creation parameters: {0}'.format(keyinfo)) + if args: + _logger.debug('args: {0}'.format(','.join(args))) + if kwargs: + _logger.debug('kwargs: {0}'.format(kwargs)) if not keyinfo['expires_in']: del(keyinfo['expires_in']) keyinfo['expires'] = False k = self.ctx.create_key(**keyinfo) + _logger.info('Created key: {0}'.format(k.fpr)) + _logger.debug('Key info: {0}'.format(k)) return(k) def findKey(self, searchstr, secret = False, local = True, remote = True, @@ -83,12 +131,25 @@ class GPG(object): fltr = 0 if secret: fltr = fltr | gpg.constants.KEYLIST_MODE_WITH_SECRET + _logger.debug('Added "secret" to filter; new filter value: {0}'.format(fltr)) if local: fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL + _logger.debug('Added "local" to filter; new filter value: {0}'.format(fltr)) if remote: fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN + _logger.debug('Added "remote" to filter; new filter value: {0}'.format(fltr)) + if args: + _logger.debug('args: {0}'.format(','.join(args))) + if kwargs: + _logger.debug('kwargs: {0}'.format(kwargs)) keys = [k for k in self.ctx.keylist(pattern = searchstr, secret = secret_only, mode = fltr)] + _logger.info('Found {0} keys'.format(len(keys))) + if keys: + _logger.debug('Found keys: {0}'.format(keys)) + else: + _logger.warn('Found no keys.') if keyring_import: + _logger.debug('Importing enabled; importing found keys.') self.importKeys(keys, native = True) return(keys) @@ -99,129 +160,243 @@ class GPG(object): 'local': gpg.constants.KEYLIST_MODE_LOCAL, 'secret': gpg.constants.KEYLIST_MODE_WITH_SECRET} if source not in sources.keys(): - raise ValueError('source parameter must be one (and only one) of: {0}'.format(sources.keys())) + _logger.error('Invalid source parameter ({0}); must be one of: {1}'.format(source, sources.keys())) + raise ValueError('Invalid source parameter') + if args: + _logger.debug('args: {0}'.format(','.join(args))) + if kwargs: + _logger.debug('kwargs: {0}'.format(kwargs)) orig_mode = self.ctx.get_keylist_mode() + _logger.debug('Original keylist mode: {0}'.format(orig_mode)) self.ctx.set_keylist_mode(sources[source]) + _logger.info('Set keylist mode: {0} ({1})'.format(source, sources[source])) + _logger.debug('Searching for key ID: {0}'.format(key_id)) try: key = self.ctx.get_key(key_id, secret = (True if source == 'secret' else False)) + _logger.info('Found key object for {0}'.format(key_id)) + _logger.debug('Found key: {0}'.format(key)) except gpg.errors.KeyNotFound: key = None + _logger.warning('Found no keys.') self.ctx.set_keylist_mode(orig_mode) + _logger.info('Restored keylist mode ({0})'.format(orig_mode)) if keyring_import and key: + _logger.debug('Importing enabled; importing found keys.') self.importKeys(key, native = True) return(key) def getKey(self, key_id, secret = False, strict = False, *args, **kwargs): + key = None + if args: + _logger.debug('args: {0}'.format(','.join(args))) + if kwargs: + _logger.debug('kwargs: {0}'.format(kwargs)) try: getattr(key_id, 'fpr') + _logger.info('Key specified is already a native key object.') + _logger.debug('Key: {0}'.format(key_id)) return(key_id) except AttributeError: if not strict: + _logger.debug('Strict mode disabled; attempting import of {0} first.'.format(key_id)) self.findKeyByID(key_id, keyring_import = True, **kwargs) try: key = self.ctx.get_key(key_id, secret = secret) + _logger.info('Found {0}.'.format(key_id)) + _logger.debug('Key: {0}'.format(key)) except gpg.errors.KeyNotFound: - key = None - return(key) - return(None) - - def getKeyFile(self, keyfile, keyring_import = False, *args, **kwargs): - keyfile = os.path.abspath(os.path.expanduser(keyfile)) - with open(keyfile, 'rb') as fh: - rawkey_data = fh.read() - fh.seek(0, 0) - keys = [k for k in self.ctx.keylist(source = fh)] - if keyring_import: - self.importKeys(keys, native = True) - return((keys, rawkey_data)) + _logger.warning('Could not locate {0} in keyring'.format(key_id)) + return(key) def getKeyData(self, keydata, keyring_import = False, *args, **kwargs): orig_keydata = keydata + if args: + _logger.debug('args: {0}'.format(','.join(args))) + if kwargs: + _logger.debug('kwargs: {0}'.format(kwargs)) if isinstance(keydata, str): + _logger.debug('String passed as keydata; converting to bytes.') keydata = keydata.encode('utf-8') buf = io.BytesIO(keydata) + _logger.info('Parsed {0} bytes; looking for key(s).'.format(buf.getbuffer().nbytes)) keys = [k for k in self.ctx.keylist(source = buf)] + _logger.info('Found {0} key(s) in data.'.format(len(keys))) + if keys: + _logger.debug('Keys found: {0}'.format(keys)) + else: + _logger.warning('No keys found in data.') buf.close() if keyring_import: + _logger.debug('Importing enabled; importing found keys.') self.importKeys(keys, native = True) return((keys, orig_keydata)) + def getKeyFile(self, keyfile, keyring_import = False, *args, **kwargs): + if args: + _logger.debug('args: {0}'.format(','.join(args))) + if kwargs: + _logger.debug('kwargs: {0}'.format(kwargs)) + orig_keyfile = keyfile + keyfile = os.path.abspath(os.path.expanduser(keyfile)) + _logger.info('Parsed absolute keyfile path: {0} => {1}'.format(orig_keyfile, keyfile)) + with open(keyfile, 'rb') as fh: + rawkey_data = fh.read() + fh.seek(0, 0) + _logger.debug('Parsed {0} bytes; looking for key(s).'.format(len(rawkey_data))) + keys = [k for k in self.ctx.keylist(source = fh)] + _logger.info('Found {0} key(s) in data.'.format(len(keys))) + if keys: + _logger.debug('Keys found: {0}'.format(keys)) + else: + _logger.warning('No keys found in data.') + if keyring_import: + _logger.debug('Importing enabled; importing found keys.') + self.importKeys(keys, native = True) + return((keys, rawkey_data)) + def importKeys(self, keydata, native = False, local = True, remote = True, *args, **kwargs): fltr = 0 orig_km = None keys = [] + if args: + _logger.debug('args: {0}'.format(','.join(args))) + if kwargs: + _logger.debug('kwargs: {0}'.format(kwargs)) if local: fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL + _logger.debug('Added "local" to filter; new filter value: {0}'.format(fltr)) if remote: fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN + _logger.debug('Added "remote" to filter; new filter value: {0}'.format(fltr)) if self.ctx.get_keylist_mode() != fltr: orig_km = self.ctx.get_keylist_mode() self.ctx.set_keylist_mode(fltr) + _logger.info(('Current keylist mode ({0}) doesn\'t match filter ({1}); ' + 'set to new mode.').format(orig_km, fltr)) if not native: # It's raw key data (.gpg, .asc, etc.). + _logger.info('Non-native keydata specified; parsing.') formatted_keys = b'' if isinstance(keydata, str): formatted_keys += keydata.encode('utf-8') + _logger.debug('Specified keydata was a string; converted to bytes.') elif isinstance(keydata, list): - for k in keydata: + _logger.debug('Specified keydata was a list/list-like; iterating.') + for idx, k in enumerate(keydata): + _logger.debug('Parsing entry {0} of {1} entries.'.format((idx + 1), len(keydata))) if isinstance(k, str): formatted_keys += k.encode('utf-8') + _logger.debug('Keydata ({0}) was a string; converted to bytes.'.format((idx + 1))) else: + _logger.debug('Keydata ({0}) was already in bytes.'.format((idx + 1))) formatted_keys += k else: - formatted_keys += keydata - for rslt in self.ctx.key_import(formatted_keys).imports: - keys.append(self.ctx.get_key(rslt.fpr)) + _logger.warning('Could not identify keydata reliably; unpredictable results ahead.') + formatted_keys = keydata + rslt = self.ctx.key_import(formatted_keys).imports + _logger.debug('Imported keys: {0}'.format(rslt)) + for r in rslt: + k = self.ctx.get_key(r.fpr) + if k: + _logger.debug('Adding key to keylist: {0}'.format(k)) + else: + _logger.warning('Could not find key ID {0}.'.format(r.fpr)) + keys.append(k) else: # It's a native Key() object (or a list of them). + _logger.info('Native keydata specified; parsing.') if not isinstance(keydata, list): + _logger.debug('Specified keydata was not a list/list-like; fixing.') keydata = [keydata] keys = keydata + _logger.debug('Importing keys: {0}'.format(keys)) self.ctx.op_import_keys(keydata) if orig_km: self.ctx.set_keylist_mode(orig_km) + _logger.info('Restored keylist mode to {0}'.format(orig_km)) for k in keys: + _logger.info('Signing {0} with a local signature.'.format(k.fpr)) self.ctx.key_sign(k, local = True) + _logger.debug('Adding trust for {0}.'.format(k.fpr)) trusteditor = KeyEditor() self.ctx.interact(k, trusteditor.truster) return(None) def verifyData(self, data, keys = None, strict = False, detached = None, *args, **kwargs): results = {} + if args: + _logger.debug('args: {0}'.format(','.join(args))) + if kwargs: + _logger.debug('kwargs: {0}'.format(kwargs)) if keys: + _logger.info('Keys were specified.') if not isinstance(keys, list): keys = [self.getKey(keys, source = 'local')] else: keys = [self.getKey(k, source = 'local') for k in keys] + _logger.debug('Verifying against keys: {0}'.format(keys)) if isinstance(data, str): data = data.encode('utf-8') + _logger.debug('Specified data was a string; converted to bytes.') + _logger.info('Verifying {0} bytes of data.'.format(len(data))) fnargs = {'signed_data': data} if detached: + _logger.info('Specified a detached signature.') if isinstance(detached, str): detached = detached.encode('utf-8') + _logger.debug('Specified signature was a string; converted to bytes.') if not isinstance(detached, bytes) and not hasattr(detached, 'read'): - raise TypeError('detached must be bytes or a file-like object (make sure the position is correct!)') + _logger.error('Detached signature was neither bytes nor a buffer-like object.') + raise TypeError('detached must be bytes or buffer-like object') + if isinstance(detached, bytes): + _logger.info('Signature length: {0} bytes'.format(len(detached))) + else: + _logger.info('Signature length: {0} bytes'.format(detached.getbuffer().nbytes)) fnargs['signature'] = detached if strict: + _logger.debug('Strict mode enabled; data must be signed by ALL specified keys.') fnargs['verify'] = keys + _logger.debug('Verifying with args: {0}'.format(fnargs)) results[None] = self.ctx.verify(**fnargs) else: if keys: + _logger.debug('Keys were specified but running in non-strict; iterating over all.') for k in keys: _fnargs = copy.deepcopy(fnargs) _fnargs['verify'] = [k] + _logger.info('Verifying against key {0}'.format(k.fpr)) try: - print(self.ctx.get_keylist_mode()) + _logger.debug(('Verifying with args (data-stripped): ' + '{0}').format({k: (v if k not in ('signed_data', + 'signature') + else '(stripped)') for k, v in _fnargs.items()})) sigchk = self.ctx.verify(**_fnargs) - results[k.fpr] = (True, sigchk[1].results, None) + _logger.info('Key {0} verification results: {1}'.format(k.fpr, sigchk)) + results[k.fpr] = (True, sigchk[1], None) except gpg.errors.MissingSignatures as e: + _logger.warning('Key {0}: missing signature'.format(k.fpr)) + _logger.debug('Key {0} results: {1}'.format(k.fpr, e.results)) results[k.fpr] = (False, e.results, 'Missing Signature') except gpg.errors.BadSignatures as e: + _logger.warning('Key {0}: bad signature'.format(k.fpr)) + _logger.debug('Key {0} results: {1}'.format(k.fpr, e.results)) results[k.fpr] = (False, e.results, 'Bad Signature') else: + _logger.debug('No keys specified but running in non-strict; accepting any signatures.') + _logger.debug(('Verifying with args (data-stripped): ' + '{0}').format({k: (v if k not in ('signed_data', + 'signature') + else '(stripped)') for k, v in fnargs.items()})) results[None] = self.ctx.verify(**fnargs) + _logger.debug('Results for any/all signatures: {0}'.format(results[None])) return(results) def verifyFile(self, filepath, *args, **kwargs): + orig_filepath = filepath filepath = os.path.abspath(os.path.expanduser(filepath)) + _logger.debug('File verification invoked. Transformed filepath: {0} => {1}'.format(orig_filepath, filepath)) + if args: + _logger.debug('args: {0}'.format(','.join(args))) + if kwargs: + _logger.debug('kwargs: {0}'.format(kwargs)) with open(filepath, 'rb') as fh: results = self.verifyData(fh.read(), **kwargs) return(results) diff --git a/aif/utils/sources.py b/aif/utils/sources.py index 6e5d5d3..e9317f9 100644 --- a/aif/utils/sources.py +++ b/aif/utils/sources.py @@ -1,10 +1,12 @@ import ftplib import io +import logging import pathlib import re ## import requests import requests.auth +from lxml import etree ## import aif.constants_fallback from . import gpg_handler @@ -12,13 +14,26 @@ from . import hash_handler from . import parser +_logger = logging.getLogger(__name__) + + class ChecksumFile(object): _bsd_re = re.compile(r'^(?P\(.*\))\s+=\s+(?P.*)$') def __init__(self, checksum_xml, filetype): self.xml = checksum_xml + if self.xml is not None: + _logger.debug('checksum_xml: {0}'.format(etree.tostring(self.xml).decode('utf-8'))) + else: + _logger.error('checksum_xml is required but not specified') + raise ValueError('checksum_xml is required') self.uri = self.xml.text.strip() self.filetype = filetype + if filetype: + _logger.debug('URI and filetype: {{{0}}}{1}'.format(self.uri, self.filetype)) + else: + _logger.error('filetype is required but not specified') + raise ValueError('filetype is required') self.hashes = None downloader = getDLHandler(self.uri) # Recursive objects for the win? dl = downloader(self.xml) @@ -28,14 +43,14 @@ class ChecksumFile(object): self._convert() def _convert(self): - data = self.data - if not isinstance(data, str): - data = data.decode('utf-8') - data.strip() + if not isinstance(self.data, str): + self.data = self.data.decode('utf-8') + self.data.strip() self.hashes = {} if self.filetype not in ('gnu', 'bsd'): + _logger.error('Passed an invalid filetype: {0}'.format(self.filetype)) raise ValueError('filetype attribute must be either "gnu" or "bsd"') - for line in data.splitlines(): + for line in self.data.splitlines(): if self.filetype == 'gnu': hashtype = None # GNU style splits their hash types into separate files by default. h, fname = line.split(None, 1) @@ -48,17 +63,29 @@ class ChecksumFile(object): if hashtype not in self.hashes: self.hashes[hashtype] = {} self.hashes[hashtype][fname] = h + _logger.debug('Generated hash set: {0}'.format(self.hashes)) return(None) class Downloader(object): def __init__(self, netresource_xml, *args, **kwargs): self.xml = netresource_xml + _logger.info('Instantiated class {0}'.format(type(self).__name__)) + if netresource_xml is not None: + _logger.debug('netresource_xml: {0}'.format(etree.tostring(self.xml).decode('utf-8'))) + else: + _logger.error('netresource_xml is required but not specified') + raise ValueError('netresource_xml is required') + _logger.debug('args: {0}'.format(','.join(args))) + _logger.debug('kwargs: {0}'.format(kwargs)) self.uri = parser.URI(self.xml.text.strip()) + _logger.debug('Parsed URI: {0}'.format(self.uri)) self.user = self.xml.attrib.get('user') if not self.user and self.uri.user: self.user = self.uri.user self.password = self.xml.attrib.get('password') + _logger.debug('Parsed user: {0}'.format(self.user)) + _logger.debug('Parsed password: {0}'.format(self.password)) if not self.password and self.uri.password: self.password = self.uri.password self.real_uri = ('{0}://' @@ -68,6 +95,7 @@ class Downloader(object): (self.uri.base if self.uri.base else ''), (':{0}'.format(self.uri.port) if self.uri.port else ''), self.uri.path) + _logger.debug('Rebuilt URI: {0}'.format(self.real_uri)) self.gpg = None self.checksum = None self.data = io.BytesIO() @@ -77,11 +105,19 @@ class Downloader(object): return(None) def parseGpgVerify(self, results): - pass + pass # TODO? Might not need to. def verify(self, verify_xml, *args, **kwargs): gpg_xml = verify_xml.find('gpg') + if gpg_xml is not None: + _logger.debug('gpg_xml: {0}'.format(etree.tostring(gpg_xml).decode('utf-8'))) + else: + _logger.debug('No in verify_xml') hash_xml = verify_xml.find('hash') + if hash_xml is not None: + _logger.debug('Hash XML: {0}'.format(etree.tostring(hash_xml).decode('utf-8'))) + else: + _logger.debug('No in verify_xml') results = {} if gpg_xml is not None: results['gpg'] = self.verifyGPG(gpg_xml) @@ -94,35 +130,72 @@ class Downloader(object): # We don't allow custom GPG homedirs since this is probably running from a LiveCD/USB/whatever anyways. # This means we can *always* instantiate the GPG handler from scratch. self.gpg = gpg_handler.GPG() + _logger.info('Established GPG session.') + _logger.debug('GPG home dir: {0}'.format(self.gpg.homedir)) + _logger.debug('GPG primary key: {0}'.format(self.gpg.primary_key.fpr)) keys_xml = gpg_xml.find('keys') + if keys_xml is not None: + _logger.debug('keys_xml: {0}'.format(etree.tostring(keys_xml).decode('utf-8'))) + else: + _logger.error('No required in gpg_xml') + raise ValueError(' is required in a GPG verification block') sigs_xml = gpg_xml.find('sigs') - fnargs = {'keyring_import': True} + if sigs_xml is not None: + _logger.debug('Keys XML: {0}'.format(etree.tostring(keys_xml).decode('utf-8'))) + else: + _logger.error('No required in gpg_xml') + raise ValueError(' is required in a GPG verification block') + fnargs = {'strict': keys_xml.attrib.get('detect')} + if fnargs['strict']: # We have to manually do this since it's in our parent's __init__ + if fnargs['strict'].lower() in ('true', '1'): + fnargs['strict'] = True + else: + fnargs['strict'] = False + else: + fnargs['strict'] = False fnargs.update(kwargs) if keys_xml is not None: fnargs['keys'] = [] for key_id_xml in keys_xml.findall('keyID'): + _logger.debug('Found : {0}'.format(etree.tostring(key_id_xml).decode('utf-8'))) if key_id_xml.text == 'auto': - k = self.gpg.findKeyByID(aif.constants_fallback.ARCH_RELENG_KEY, **fnargs) - elif key_id_xml.text == 'detect': - fnargs['strict'] = False - continue + _logger.debug('Key ID was set to "auto"; using {0}'.format(aif.constants_fallback.ARCH_RELENG_KEY)) + self.gpg.findKeyByID(aif.constants_fallback.ARCH_RELENG_KEY, source = 'remote', + keyring_import = True, **fnargs) + k = self.gpg.findKeyByID(aif.constants_fallback.ARCH_RELENG_KEY, source = 'local', **fnargs) else: - k = self.gpg.findKeyByID(key_id_xml.text.strip(), **fnargs) + _logger.debug('Finding key: {0}'.format(key_id_xml.text.strip())) + self.gpg.findKeyByID(key_id_xml.text.strip(), source = 'remote', keyring_import = True, **fnargs) + k = self.gpg.findKeyByID(key_id_xml.text.strip(), source = 'local', **fnargs) + if k: + _logger.debug('Key {0} found'.format(k.fpr)) + else: + _logger.error('Key {0} not found'.format(key_id_xml.text.strip())) + raise RuntimeError('Could not find key ID specified') fnargs['keys'].append(k) for key_file_xml in keys_xml.findall('keyFile'): + _logger.debug('Found : {0}'.format(etree.tostring(key_file_xml).decode('utf-8'))) downloader = getDLHandler(key_file_xml.text.strip()) # Recursive objects for the win? dl = downloader(key_file_xml) dl.get() - k = self.gpg.getKeyData(dl.data.read(), **fnargs)[0] + k = self.gpg.getKeyData(dl.data.read(), keyring_import = True, **fnargs)[0] + if k: + fnargs['keys'].extend(k) + else: + pass # No keys found in key file. We log this in GPG.getKeyData() though. dl.data.seek(0, 0) - fnargs['keys'].extend(k) + if not fnargs['keys']: + _logger.debug('Found no keys in keys_xml') + raise ValueError('Could not find any keys') if sigs_xml is not None: for sig_text_xml in sigs_xml.findall('signature'): + _logger.debug('Found ') sig = sig_text_xml.text.strip() sigchk = self.gpg.verifyData(self.data.read(), detached = sig, **fnargs) self.data.seek(0, 0) results.update(sigchk) for sig_file_xml in sigs_xml.findall('signatureFile'): + _logger.debug('Found : {0}'.format(sig_file_xml.text.strip())) downloader = getDLHandler(sig_file_xml.text.strip()) dl = downloader(sig_file_xml) dl.get() @@ -131,6 +204,7 @@ class Downloader(object): self.data.seek(0, 0) results.update(sigchk) self.gpg.clean() + _logger.debug('Rendered results: {0}'.format(results)) return(results) def verifyHash(self, hash_xml, *args, **kwargs): @@ -144,6 +218,7 @@ class Downloader(object): self.data.seek(0, 0) if checksum_file_xml is not None: for cksum_xml in checksum_file_xml: + _logger.debug('Found : {0}'.format(etree.tostring(cksum_xml).decode('utf-8'))) htype = cksum_xml.attrib['hashType'].strip().lower() ftype = cksum_xml.attrib['fileType'].strip().lower() fname = cksum_xml.attrib.get('filePath', @@ -154,14 +229,28 @@ class Downloader(object): elif ftype == 'bsd': cksum = cksum_file.hashes[htype][fname] result = (cksum == checksums[htype]) + if result: + _logger.debug('Checksum type {0} matches ({1})'.format(htype, cksum)) + else: + _logger.warning(('Checksum type {0} mismatch: ' + '{1} (data) vs. {2} (specified)').format(htype, checksums[htype], cksum)) results.append(result) if checksum_xml is not None: for cksum_xml in checksum_xml: + _logger.debug('Found : {0}'.format(etree.tostring(cksum_xml).decode('utf-8'))) # Thankfully, this is a LOT easier. htype = cksum_xml.attrib['hashType'].strip().lower() result = (cksum_xml.text.strip().lower() == checksums[htype]) + if result: + _logger.debug('Checksum type {0} matches ({1})'.format(htype, checksums[htype])) + else: + _logger.warning(('Checksum type {0} mismatch: ' + '{1} (data) vs. {2} (specified)').format(htype, + checksums[htype], + cksum_xml.text.strip().lower())) results.append(result) result = all(results) + _logger.debug('Overall result of checksumming: {0}'.format(result)) return(result) @@ -176,6 +265,7 @@ class FSDownloader(Downloader): with open(self.uri.path, 'rb') as fh: self.data.write(fh.read()) self.data.seek(0, 0) + _logger.info('Read in {0} bytes'.format(self.data.getbuffer().nbytes)) return(None) @@ -188,11 +278,15 @@ class FTPDownloader(Downloader): self.password = '' self.port = (self.uri.port if self.uri.port else 0) self._conn = None + _logger.debug('User: {0}'.format(self.user)) + _logger.debug('Password: {0}'.format(self.password)) + _logger.debug('Port: {0}'.format(self.port)) def _connect(self): self._conn = ftplib.FTP() self._conn.connect(host = self.uri.base, port = self.port) self._conn.login(user = self.user, passwd = self.password) + _logger.info('Connected.') return(None) def get(self): @@ -201,10 +295,12 @@ class FTPDownloader(Downloader): self._conn.retrbinary('RETR {0}'.format(self.uri.path), self.data.write) self.data.seek(0, 0) self._close() + _logger.info('Read in {0} bytes'.format(self.data.getbuffer().nbytes)) return(None) def _close(self): self._conn.quit() + _logger.info('Closed connection') return(None) @@ -217,6 +313,7 @@ class FTPSDownloader(FTPDownloader): self._conn.connect(host = self.uri.base, port = self.port) self._conn.login(user = self.user, passwd = self.password) self._conn.prot_p() + _logger.info('Connected.') return(None) @@ -225,6 +322,7 @@ class HTTPDownloader(Downloader): super().__init__(netresource_xml, *args, **kwargs) self.auth = self.xml.attrib.get('authType', 'none').lower() if self.auth == 'none': + _logger.debug('No auth.') self.auth = None self.realm = None self.user = None @@ -232,14 +330,17 @@ class HTTPDownloader(Downloader): else: if self.auth == 'basic': self.auth = requests.auth.HTTPBasicAuth(self.user, self.password) + _logger.info('HTTP basic auth configured.') elif self.auth == 'digest': self.auth = requests.auth.HTTPDigestAuth(self.user, self.password) + _logger.info('HTTP digest auth configured.') def get(self): self.data.seek(0, 0) req = requests.get(self.real_uri, auth = self.auth) self.data.write(req.content) self.data.seek(0, 0) + _logger.info('Read in {0} bytes'.format(self.data.getbuffer().nbytes)) return(None) @@ -254,5 +355,6 @@ def getDLHandler(uri): elif re.search(r'^ftps://', uri, re.IGNORECASE): return(FTPSDownloader) else: + _logger.error('Unable to detect which download handler to instantiate.') raise RuntimeError('Could not detect which download handler to use') return(None) diff --git a/docs/MANUAL.adoc b/docs/MANUAL.adoc index 219e332..a8eac5f 100644 --- a/docs/MANUAL.adoc +++ b/docs/MANUAL.adoc @@ -546,9 +546,10 @@ There are several script types availabe for `execution`. Currently, these are: * pkg * post -*pre* scripts are run (in numerical `order`) before the disks are even formatted. *pkg* scripts are run (in numerical `order`) right before the <> are installed (this allows you to configure an <> such as https://aur.archlinux.org/packages/apacman/[apacman^]) -- these are run *inside* the chroot of the new install. *post* scripts are run inside the chroot like *pkg*, but are executed very last thing, just before the reboot. +*pre* scripts are run (in specified order) before the disks are even formatted. *pkg* scripts are run (in specified order) right before the <> are installed (this allows you to configure an <> such as https://aur.archlinux.org/packages/apacman/[apacman^]) -- these are run *inside* the chroot of the new install. *post* scripts are run inside the chroot like *pkg*, but are executed very last thing, just before the reboot. = Further Information + Here you will find further info and other resources relating to AIF-NG. == FAQ @@ -614,6 +615,8 @@ As long as: then it shouldn't try to perform any remote operations. +Note that if you specified a GPG verification, you'll need to use a local exported key file for the public key (`keyFile`); if you use a `keyID`, then AIF-NG will try to fetch the key from keyservers. + === "I specified start sector as 0 for a GPT-labeled disk but it starts at sector 2048 instead. What gives?" GPT requires 33 sectors for the table at the beginning (and 32 sectors at the end) for the actual table. That plus an extra (usually) 512 bytes at the beginning for something called a https://en.wikipedia.org/wiki/GUID_Partition_Table#Protective_MBR_(LBA_0)[Protective MBR^] (this prevents disk utilities from overwriting the GPT label automatically in case they only recognize "msdos" labels and assume the disk is not formatted yet). @@ -662,6 +665,19 @@ You can't, currently; support is only stubbed out for now. If absolutely necessa This hopefully will be changed in the future, however, as I'm interested in adding support. For now, open and WPA/WPA2 PSK only are considered supported. +=== "How do I use my own GnuPG homedir instead of letting AIF-NG create one automatically?" +I can pretty easily add support for this -- it's stubbed in already. But there are a couple reasons it doesn't really make sense to do so: + +* Being that most people are probably using this from a LiveCD/LiveUSB/PXE/whatever, it's *highly* unlikely they'll even have a static GnuPG homedir available. +* Even if they did, AIF-NG has no real way of running a passphrase prompt. It's intended to be run automatically, non-interactively, and daemonized. You'd have to have a passphrase-less private key for it to work. +** Why? Because it needs to be able to sign and trust the key ID you specified to get an accurate validity reading of the signature. If the private key has a passphrase, this is required for the operation to complete. If a custom homedir with a passphrased private key was specified, the signature's signer's public key would already need to be imported into the keyring, signed, AND trusted (with a sufficiently high enough level). + +=== "Why do I have to specify a URI or key ID for a GPG key but can include a raw text block for a GPG `signature`?" +Because keys are (generally speaking) intended to be publicly fetchable in some form or another. `signatures` are not (necessarily); they're more geared towards being file objects. I definitely recommend using `signatureFile` instead, though, even if it's just to a local .sig/.asc file. + +=== "Why don't you support WKD for GPG key fetching?" +Because I didn't. If there is interest, I can add support for it but please don't request it unless you plan on actually using it. + == Bug Reports/Feature Requests NOTE: It is possible to submit a bug or feature request without registering in my bugtracker. One of my pet peeves is needing to create an account/register on a bugtracker simply to report a bug! The following links only require an email address to file a bug (which is necessary in case I need any further clarification from you or to keep you updated on the status of the bug/feature request -- so please be sure to use a valid email address). diff --git a/docs/TODO b/docs/TODO index 69fdcb3..54de515 100644 --- a/docs/TODO +++ b/docs/TODO @@ -51,3 +51,12 @@ https://msdn.microsoft.com/en-us/library/dd489258.aspx if i ever need a list of GPT GUIDs, maybe to do some fancy GUID-to-name-and-back mapping? https://en.wikipedia.org/wiki/GUID_Partition_Table#Partition_type_GUIDs (mapping can be done via https://stackoverflow.com/questions/483666/reverse-invert-a-dictionary-mapping) + + + +docs todo: +- syntax notation: +bold element/attribute names are required (only specified once). +regular are optional. +italicized means there can be multiple (none, one or many) specified. +italicized and bold means there must be at LEAST one. diff --git a/examples/aif.xml b/examples/aif.xml index 0d33c95..0df2984 100644 --- a/examples/aif.xml +++ b/examples/aif.xml @@ -21,7 +21,7 @@ file:///tmp/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig - + 0x4AA4767BBC9C4B1D18AE28B77F2D434B9741E8AC