Compare commits

...

6 Commits
v1.1 ... master

Author SHA1 Message Date
brent s. faa79046d3 more useful error output 2019-01-21 15:59:18 -05:00
brent s e0ed3346aa Merge branch 'master' of square-r00t.net:podloader 2018-08-07 10:53:24 -04:00
brent s d817c8bc31 use any python3 available 2018-08-07 10:53:19 -04:00
brent s. a613cbe220 fixing 5.29 -> 5.30 python-magic 2018-06-02 13:01:17 -04:00
brent s a137dc395c adding re-signing 2018-03-13 11:14:41 -04:00
brent s. d2d405a36a pretty happy with this now. 2017-04-04 07:54:39 -04:00
3 changed files with 196 additions and 5 deletions

View File

@ -158,7 +158,8 @@ def confArgs(conf, args):
exit('ERROR: We cannot seem to locate a FLAC to convert. Try using the -f/--file argument.')
magic_file = magic.open(magic.MAGIC_MIME)
magic_file.load()
if not magic_file.file(conf['episode']['raw']) == 'audio/x-flac; charset=binary':
if magic_file.file(conf['episode']['raw']) not in ('audio/x-flac; charset=binary',
'audio/flac; charset=binary'):
exit('ERROR: Your FLAC file does not seem to actually be FLAC.')
conf['flac'] = {}
conf['flac']['samples'] = subprocess.check_output(['metaflac',
@ -437,9 +438,9 @@ def dbEntry(conf):
cur.execute(query)
cur.close()
conn.close()
except:
print('{0}: There seems to have been some error when inserting into the DB. Check access (or it is a dupe).'.format(
datetime.datetime.now()))
except Exception as e:
print('{0}: There seems to have been some error when inserting into the DB: {1}'.format(
datetime.datetime.now(), e))

def signEp(mediatype, conf):
# No reason to call this for each file. Fix.
@ -493,8 +494,9 @@ def uploadFile(conf):
# and create if it doesn't exist.
# Also, no reason to call this for each file.
print('{0}: Syncing files to server...'.format(datetime.datetime.now()))
subprocess.call(['rsync',
subprocess.run(['rsync',
'-a',
'--info=progress2',
'{0}'.format(conf['local']['mediadir']),
'{0}@{1}:{2}S{3}/.'.format(conf['rsync']['user'],
conf['rsync']['host'],

83
re_sign.py Executable file
View File

@ -0,0 +1,83 @@
#!/usr/bin/env python3

# stdlib
import os
import re
# pypi
import gpg
import gpg.constants
import gpg.errors

# Re-sign all episode files that have an invalid signature.

# The GPG home
GNUPGHOME = '~/podcast/gpg'
# The key ID to use to sign/verify. Must exist in the local keyring, trusted, etc.
GPGKEY = '0x63D1CEA387C27A92E0D50AB8343C305F9109D4DC'
# The "parent" path that contains both audio files and detached sigs.
# It is assumed that the sigs for each episode file live in ../gpg/* (relative
# to the media file(s)).
# If you need to change this, check the signer class.
EPPATH = '~/podcast/releases'
FILEEXTS = ('mp3', 'ogg')

class signer(object):
def __init__(self, key_id, gpg_home = '~/.gnupg',
sig_ext = '.asc', gpg_armor = True):
os.environ['GNUPGHOME'] = os.path.abspath(os.path.expanduser(gpg_home))
self.sig_ext = sig_ext
self.gpg = gpg.Context()
# has to be an iterable
self.gpg.signers = []
self.key = self.gpg.get_key(key_id, True)
if self.key.can_sign:
self.gpg.signers.append(self.key)
self.gpg.armor = True

def chkSigValid(self, fpath, sigpath_base):
sigpath = '.'.join((sigpath_base, re.sub('^\.', '', self.sig_ext)))
with open(sigpath, 'rb') as sig, open(fpath, 'rb') as f, \
open(os.devnull, 'wb') as DEVNULL:
try:
self.gpg.verify(f, signature = sig, sink = DEVNULL,
verify = self.gpg.signers)
return(True)
except (gpg.errors.BadSignatures, gpg.errors.GPGMEError,
FileNotFoundError):
print('BAD/MISSING SIGNATURE: {0}'.format(fpath))
return(False)

def signEpFile(self, fpath, sigpath_base):
sigpath = '.'.join((sigpath_base, re.sub('^\.', '', self.sig_ext)))
with open(sigpath, 'wb') as f, open(fpath, 'rb') as s:
f.write(self.gpg.sign(s, mode = gpg.constants.SIG_MODE_DETACH)[0])
print('Signed/re-signed {0}'.format(fpath))
return()

def getEpFiles(path, exts):
print('Building list of media files; please wait...')
fpaths = []
path = os.path.abspath(os.path.expanduser(path))
for root, dirs, files in os.walk(path):
for f in files:
if f.endswith(exts):
fpaths.append(os.path.join(root, f))
return(fpaths)

def main(GPGHOME = GNUPGHOME, KEYID = GPGKEY,
EPSPATH = EPPATH, FILEEXT = FILEEXTS):
fpaths = getEpFiles(EPSPATH, FILEEXT)
sig = signer(KEYID, gpg_home = GPGHOME)
print('Verifying files (and signing if necessary)...')
for f in fpaths:
sigfilebase = os.path.abspath(
os.path.join(
os.path.dirname(f),
os.path.join('..',
'gpg',
os.path.basename(f))))
if not sig.chkSigValid(f, sigfilebase):
sig.signEpFile(f, sigfilebase)

if __name__ == '__main__':
main()

106
verifyfeed.py Executable file
View File

@ -0,0 +1,106 @@
#!/usr/bin/env python3

# https://sysadministrivia.com/news/every-new-beginning

import hashlib
import argparse
import os
import glob
from urllib.request import urlopen
try:
from lxml import etree
except ImportError:
import xml.etree.ElementTree as etree
# TODO: GPG verification too

baseurl = 'https://sysadministrivia.com'

feeds = {'itunes':'/feed/itunes.xml',
'google':'/feed/google.xml',
'mp3':'/feed/podcast.xml',
'ogg':'/feed/oggcast.xml'}

def getXML(baseurl, feeds, args):
xml = {}
print('Fetching feed(s) XML, please wait...')
for feed in args.feedlist:
with urlopen(baseurl + feeds[feed]) as url:
xml[feed] = etree.fromstring(url.read())
return(xml)

def getSums(xml, args):
sums = {}
for feed in args.feedlist:
sums[feed] = {}
for episode in xml[feed].findall('channel/item'):
epID = episode.find('title').text.split(':')[0]
sums[feed][epID] = {}
sums[feed][epID]['uri'] = episode.find('enclosure').attrib['url']
sums[feed][epID]['guid'] = episode.find('guid').text
sums[feed][epID]['file'] = os.path.basename(sums[feed][epID]['uri'])
if args.livesums:
livesha = hashlib.sha256()
print('{0}({1}): Fetching/verifying live sum...'.format(epID, feed))
with urlopen(sums[feed][epID]['uri']) as url:
for chunk in iter(lambda: url.read(4096), b''):
livesha.update(chunk)
sums[feed][epID]['livesha'] = livesha.hexdigest()
if sums[feed][epID]['livesha'] != sums[feed][epID]['guid']:
print('\t\tWARNING: GUID {1} does not match live sum {1}!'.format(sums[feed][epID]['guid'],
sums[feed][epID]['livesha']))
if args.locdir:
localdir = os.path.abspath(os.path.expanduser(args.locdir))
if not os.path.isdir(localdir):
exit('ERROR: Directory {0} does not exist!'.format(args.locdir))
episodes = sums[args.feedlist[0]]
print('Checking local files...')
for episode in episodes.keys():
filename = episodes[episode]['file']
guid = episodes[episode]['guid']
for localfile in glob.iglob('{0}/**/{1}'.format(localdir, filename), recursive = True):
localsha = hashlib.sha256()
print('Checking {0}...'.format(localfile))
with open(localfile, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b''):
localsha.update(chunk)
if localsha.hexdigest() != guid:
print('WARNING: GUID {0} does not match local hash {1}!'.format(guid, localsha.hexdigest()))
print('Finished checking local files.')
if not args.locdir and not args.livesums:
for episode in sums[args.feedlist[0]].keys():
print(episode + ':')
for feed in args.feedlist:
print('\t{0:6}: {1}'.format(feed,
sums[feed][episode]['guid']))
return(sums)

def parseArgs():
args = argparse.ArgumentParser(description = 'Sysadministrivia Verifier',
epilog = 'https://git.square-r00t.net/Podloader')
args.add_argument('-l',
'--live',
dest = 'livesums',
action = 'store_true',
help = 'If specified, calculate the sums live from the site and compare against the GUIDs served. This can take a long time.')
args.add_argument('-f',
'--feed',
choices = ['itunes', 'google', 'mp3', 'ogg'],
dest = 'feedlist',
nargs = '*',
default = ['itunes', 'google', 'mp3', 'ogg'],
help = 'Which feed(s) to check. The default is all. Multiple can be specified via "-f itunes google" etc.')
args.add_argument('-d',
'--directory',
dest = 'locdir',
metavar = 'path',
default = False,
help = 'If specified, a directory where local copies of the episodes exist. (e.g. ~/gPodder/Downloads/Sysadministrivia)')
return(args)

def main():
args = parseArgs().parse_args()
xml = getXML(baseurl, feeds, args)
sums = getSums(xml, args)

if __name__ == '__main__':
main()