bdisk/bdisk/prep.py

265 lines
13 KiB
Python
Raw Normal View History

import os
import shutil
import re
import hashlib
import gnupg
import tarfile
import subprocess
import re
#import git
import jinja2
import datetime
2016-12-03 06:07:41 -05:00
import humanize
from urllib.request import urlopen
import host # bdisk.host
2016-11-28 02:56:15 -05:00
2016-11-21 01:35:45 -05:00
def dirChk(config_dict):
# Make dirs if they don't exist
for d in ('archboot', 'isodir', 'mountpt', 'srcdir', 'tempdir'):
os.makedirs(config_dict['build'][d], exist_ok = True)
2016-11-21 01:35:45 -05:00
# Make dirs for sync staging if we need to
for x in ('http', 'tftp'):
if config_dict['sync'][x]:
os.makedirs(config_dict[x]['path'], exist_ok = True)
def downloadTarball(build):
dlpath = build['dlpath']
arch = build['arch']
#mirror = 'http://mirrors.kernel.org/archlinux'
mirror = build['mirrorproto'] + '://' + build['mirror']
rlsdir = mirror + build['mirrorpath']
sha_in = urlopen(mirror + build['mirrorchksum'])
# returns path/filename e.g. /some/path/to/file.tar.gz
# we use .gnupg since we'll need it later.
os.makedirs(dlpath + '/.gnupg', exist_ok = True)
tarball_path = {}
for x in arch:
tarball_path[x] = dlpath + '/.latest.' + x + '.tar'
sha1sums = sha_in.read()
sha_in.close()
sha_raw = sha1sums.decode("utf-8")
sha_list = list(filter(None, sha_raw.split('\n')))
sha_dict = {x.split()[1]: x.split()[0] for x in sha_list}
# all that lousy work just to get a sha1 sum. okay. so.
if build['mirrorgpgsig'] != '':
# we don't want to futz with the user's normal gpg.
gpg = gnupg.GPG(gnupghome = dlpath + '/.gnupg')
2016-12-03 06:07:41 -05:00
print("\n{0}: Generating a GPG key. Please wait...".format(datetime.datetime.now()))
# python-gnupg 0.3.9 spits this error in Arch. it's harmless, but ugly af.
# TODO: remove this when the error doesn't happen anymore.
print("\t\t\t If you see a \"ValueError: Unknown status message: 'KEY_CONSIDERED'\" error,\n\t\t\t it can be safely ignored.")
print("\t\t\t If this is taking a VERY LONG time, try installing haveged and starting it.\n\t\t\t This can be " +
"done safely in parallel with the build process.\n")
input_data = gpg.gen_key_input(name_email = 'tempuser@nodomain.tld', passphrase = 'placeholder_passphrase')
key = gpg.gen_key(input_data)
keyid = build['gpgkey']
gpg.recv_keys(build['gpgkeyserver'], keyid)
for a in arch:
pattern = re.compile('^.*' + a + '\.tar(\.(gz|bz2|xz))?$')
tarball = [filename.group(0) for l in list(sha_dict.keys()) for filename in [pattern.search(l)] if filename][0]
sha1 = sha_dict[tarball]
if os.path.isfile(tarball_path[a]):
pass
else:
# fetch the tarball...
2016-12-03 06:07:41 -05:00
print("{0}: Fetching the tarball for {1} architecture, please wait...".format(
datetime.datetime.now(),
a))
#dl_file = urllib.URLopener()
tarball_dl = urlopen(rlsdir + tarball)
with open(tarball_path[a], 'wb') as f:
f.write(tarball_dl.read())
tarball_dl.close()
2016-12-03 06:07:41 -05:00
print("{0}: Done fetching {1} ({2}).".format(
datetime.datetime.now(),
tarball_path[a],
humanize.naturalsize(
os.path.getsize(tarball_path[a]))))
print("{0}: Checking that the hash checksum for {1}\n\t\t\t matches {2}, please wait...".format(
2016-12-03 06:07:41 -05:00
datetime.datetime.now(),
tarball_path[a],
sha1))
tarball_hash = hashlib.sha1(open(tarball_path[a], 'rb').read()).hexdigest()
if tarball_hash != sha1:
2016-12-03 06:07:41 -05:00
exit(("{0}: {1} either did not download correctly or a wrong (probably old) version exists on the filesystem.\n" +
"Please delete it and try again.").format(datetime.datetime.now(), tarball))
elif build['mirrorgpgsig'] != '':
# okay, so the sha1 matches. let's verify the signature.
if build['mirrorgpgsig'] == '.sig':
gpgsig_remote = rlsdir + tarball + '.sig'
else:
gpgsig_remote = mirror + build['mirrorgpgsig']
gpg_sig = tarball + '.sig'
sig_dl = urlopen(gpgsig_remote)
sig = tarball_path[a] + '.sig'
with open(sig, 'wb+') as f:
f.write(sig_dl.read())
sig_dl.close()
tarball_data = open(tarball_path[a], 'rb')
tarball_data_in = tarball_data.read()
gpg_verify = gpg.verify_data(sig, tarball_data_in)
tarball_data.close()
if not gpg_verify:
2016-12-03 06:07:41 -05:00
exit("{0}: There was a failure checking {1} against {2}. Please investigate.".format(
datetime.datetime.now(),
sig,
tarball_path[a]))
os.remove(sig)
return(tarball_path)
2016-12-03 06:07:41 -05:00
def unpackTarball(tarball_path, build, keep = False):
chrootdir = build['chrootdir']
2016-12-03 06:07:41 -05:00
if os.path.isdir(chrootdir):
if not keep:
# Make the dir if it doesn't exist
shutil.rmtree(chrootdir, ignore_errors = True)
os.makedirs(chrootdir, exist_ok = True)
else:
os.makedirs(chrootdir, exist_ok = True)
# Open and extract the tarball
2016-12-03 06:07:41 -05:00
if not keep:
for a in build['arch']:
print("{0}: Extracting tarball {1} ({2}). Please wait...".format(
datetime.datetime.now(),
tarball_path[a],
humanize.naturalsize(
os.path.getsize(tarball_path[a]))))
tar = tarfile.open(tarball_path[a], 'r:gz')
tar.extractall(path = chrootdir)
tar.close()
print("{0}: Extraction for {1} finished.".format(datetime.datetime.now(), tarball_path[a]))
2016-12-03 06:07:41 -05:00
def buildChroot(build, keep = False):
dlpath = build['dlpath']
chrootdir = build['chrootdir']
arch = build['arch']
extradir = build['basedir'] + '/extra'
2016-12-03 06:07:41 -05:00
unpack_me = unpackTarball(downloadTarball(build), build, keep)
# build dict of lists of files and dirs from pre-build.d dir, do the same with arch-specific changes.
prebuild_overlay = {}
prebuild_arch_overlay = {}
for x in arch:
prebuild_arch_overlay[x] = {}
for y in ['files', 'dirs']:
prebuild_overlay[y] = []
prebuild_arch_overlay[x][y] = []
for path, dirs, files in os.walk(extradir + '/pre-build.d/'):
prebuild_overlay['dirs'].append(path + '/')
for file in files:
prebuild_overlay['files'].append(os.path.join(path, file))
for x in prebuild_overlay.keys():
prebuild_overlay[x][:] = [re.sub('^' + extradir + '/pre-build.d/', '', s) for s in prebuild_overlay[x]]
prebuild_overlay[x] = list(filter(None, prebuild_overlay[x]))
for y in prebuild_arch_overlay.keys():
prebuild_arch_overlay[y][x][:] = [i for i in prebuild_overlay[x] if i.startswith(y)]
prebuild_arch_overlay[y][x][:] = [re.sub('^' + y + '/', '', s) for s in prebuild_arch_overlay[y][x]]
prebuild_arch_overlay[y][x] = list(filter(None, prebuild_arch_overlay[y][x]))
prebuild_overlay[x][:] = [y for y in prebuild_overlay[x] if not y.startswith(('x86_64','i686'))]
prebuild_overlay['dirs'].remove('/')
# create the dir structure. these should almost definitely be owned by root.
for a in arch:
for dir in prebuild_overlay['dirs']:
os.makedirs(chrootdir + '/root.' + a + '/' + dir, exist_ok = True)
os.chown(chrootdir + '/root.' + a + '/' + dir, 0, 0)
# and copy over the files. again, chown to root.
for file in prebuild_overlay['files']:
2016-11-28 02:56:15 -05:00
shutil.copy2(extradir + '/pre-build.d/' + file, chrootdir + '/root.' + a + '/' + file, follow_symlinks = False)
os.chown(chrootdir + '/root.' + a + '/' + file, 0, 0, follow_symlinks = False)
# do the same for arch-specific stuff.
for dir in prebuild_arch_overlay[a]['dirs']:
os.makedirs(chrootdir + '/root.' + a + '/' + dir, exist_ok = True)
os.chown(chrootdir + '/root.' + a + '/' + dir, 0, 0)
for file in prebuild_arch_overlay[a]['files']:
2016-11-28 02:56:15 -05:00
shutil.copy2(extradir + '/pre-build.d/' + a + '/' + file, chrootdir + '/root.' + a + '/' + file, follow_symlinks = False)
os.chown(chrootdir + '/root.' + a + '/' + file, 0, 0, follow_symlinks = False)
def prepChroot(build, bdisk, user):
chrootdir = build['chrootdir']
tempdir = build['tempdir']
arch = build['arch']
bdisk_repo_dir = build['basedir']
dlpath = build['dlpath']
2016-11-28 02:56:15 -05:00
templates_dir = bdisk_repo_dir + '/extra/templates'
#build = {} # why was this here?
## let's prep some variables to write out the version info.txt
# and these should be passed in from the args, from the most part.
build['name'] = bdisk['name']
build['time'] = datetime.datetime.utcnow().strftime("%a %b %d %H:%M:%S UTC %Y")
hostname = host.getHostname
build['user'] = os.environ['USER']
if 'SUDO_USER' in os.environ:
build['realuser'] = os.environ['SUDO_USER']
2016-12-02 00:18:23 -05:00
# Get the build number...
# TODO: support tracking builds per version. i.e. in buildnum:
# v2.51-g7381cc3:0
# v2.51-gb3bb039:3
if os.path.isfile(dlpath + '/buildnum'):
2016-12-02 00:18:23 -05:00
with open(dlpath + '/buildnum', 'r') as f:
2016-12-02 04:10:38 -05:00
build['buildnum'] = int(f.readlines()[0])
else:
build['buildnum'] = 0
2016-12-02 00:18:23 -05:00
build['buildnum'] += 1
with open(dlpath + '/buildnum', 'w+') as f:
2016-12-03 06:07:41 -05:00
f.write(str(build['buildnum']) + "\n")
# and now that we have that dict, let's write out the VERSION_INFO.txt file.
loader = jinja2.FileSystemLoader(templates_dir)
env = jinja2.Environment(loader = loader)
tpl = env.get_template('VERSION_INFO.txt.j2')
tpl_out = tpl.render(build = build, bdisk = bdisk, hostname = host.getHostname(), distro = host.getOS())
for a in arch:
with open('{0}/root.{1}/root/VERSION_INFO.txt'.format(chrootdir, a), 'w+') as f:
f.write(tpl_out)
with open(tempdir + '/VERSION_INFO.txt', 'w+') as f:
f.write(tpl_out)
tpl = env.get_template('VARS.txt.j2')
tpl_out = tpl.render(bdisk = bdisk, user = user)
for a in arch:
with open('{0}/root.{1}/root/VARS.txt'.format(chrootdir, a), 'w+') as f:
f.write(tpl_out)
return(build)
2016-11-28 02:56:15 -05:00
def postChroot(build):
dlpath = build['dlpath']
chrootdir = build['chrootdir']
arch = build['arch']
overdir = build['basedir'] + '/overlay/'
postbuild_overlay = {}
postbuild_arch_overlay = {}
for x in arch:
2016-12-03 06:07:41 -05:00
os.remove('{0}/root.{1}/README'.format(chrootdir, x))
2016-11-28 02:56:15 -05:00
postbuild_arch_overlay[x] = {}
for y in ['files', 'dirs']:
postbuild_overlay[y] = []
postbuild_arch_overlay[x][y] = []
for path, dirs, files in os.walk(overdir):
postbuild_overlay['dirs'].append(path + '/')
for file in files:
postbuild_overlay['files'].append(os.path.join(path, file))
for x in postbuild_overlay.keys():
postbuild_overlay[x][:] = [re.sub('^' + overdir, '', s) for s in postbuild_overlay[x]]
postbuild_overlay[x] = list(filter(None, postbuild_overlay[x]))
for y in postbuild_arch_overlay.keys():
postbuild_arch_overlay[y][x][:] = [i for i in postbuild_overlay[x] if i.startswith(y)]
postbuild_arch_overlay[y][x][:] = [re.sub('^' + y + '/', '', s) for s in postbuild_arch_overlay[y][x]]
postbuild_arch_overlay[y][x] = list(filter(None, postbuild_arch_overlay[y][x]))
postbuild_overlay[x][:] = [y for y in postbuild_overlay[x] if not y.startswith(('x86_64','i686'))]
postbuild_overlay['dirs'].remove('/')
# create the dir structure. these should almost definitely be owned by root.
for a in arch:
for dir in postbuild_overlay['dirs']:
os.makedirs(chrootdir + '/root.' + a + '/' + dir, exist_ok = True)
os.chown(chrootdir + '/root.' + a + '/' + dir, 0, 0, follow_symlinks = False)
# and copy over the files. again, chown to root.
for file in postbuild_overlay['files']:
shutil.copy2(overdir + file, chrootdir + '/root.' + a + '/' + file, follow_symlinks = False)
os.chown(chrootdir + '/root.' + a + '/' + file, 0, 0, follow_symlinks = False)
# do the same for arch-specific stuff.
for dir in postbuild_arch_overlay[a]['dirs']:
os.makedirs(chrootdir + '/root.' + a + '/' + dir, exist_ok = True)
os.chown(chrootdir + '/root.' + a + '/' + dir, 0, 0, follow_symlinks = False)
for file in postbuild_arch_overlay[a]['files']:
shutil.copy2(overdir + a + '/' + file, chrootdir + '/root.' + a + '/' + file, follow_symlinks = False)
os.chown(chrootdir + '/root.' + a + '/' + file, 0, 0, follow_symlinks = False)