You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
gitian-builder/share/gitian-updater

324 lines
11 KiB
Python

#!/usr/bin/python
# Gitian Downloader - download/update and verify a gitian distribution package
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys, os, subprocess
from os import path
import shutil
import re
import tempfile
import atexit
import urllib2
import libxml2
import argparse
import yaml
from zipfile import ZipFile
inject_config_string = "INJECT" + "CONFIG"
injected_config = """INJECTCONFIG"""
have_injected_config = injected_config != inject_config_string
quiet = 0
def sanitize_path(dir_name, name, where):
if re.search(r'[^/\w.-]', name):
raise ValueError, "unsanitary path in %s"%(where)
full_name = path.normpath(path.join(dir_name, name))
if full_name.find(dir_name + os.sep) != 0:
raise ValueError, "unsafe path in %s"%(where)
def remove_temp(tdir):
shutil.rmtree(tdir)
def download(url, dest):
if quiet == 0:
print "Downloading from %s"%(url)
file_name = url.split('/')[-1]
u = urllib2.urlopen(url)
f = open(dest, 'w')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
if quiet == 0:
print "Downloading: %s Bytes: %s"%(file_name, file_size)
file_size_dl = 0
block_sz = 65536
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
status = status + chr(8)*(len(status)+1)
if quiet == 0:
print status,
if quiet == 0:
print
f.close()
def extract(dir_name, zip_path):
zipfile = ZipFile(zip_path, 'r')
files = []
for name in zipfile.namelist():
sanitize_path(dir_name, name, "zip file")
zipfile.extractall(dir_name)
zipfile.close()
for name in zipfile.namelist():
if path.isfile(path.join(dir_name, name)):
files.append(path.normpath(name))
return files
def get_assertions(temp_dir, unpack_dir, file_names):
assertions = {"build" : {}}
sums = {}
to_check = {}
for file_name in file_names:
shasum = subprocess.Popen(["sha256sum", '-b', os.path.join(unpack_dir, file_name)], stdout=subprocess.PIPE).communicate()[0][0:64]
sums[file_name] = shasum
to_check[file_name] = 1
out_manifest = False
error = False
for file_name in file_names:
if file_name.startswith("gitian"):
del to_check[file_name]
if file_name.endswith(".assert"):
popen = subprocess.Popen(["gpg", '--status-fd', '1', '--homedir', path.join(temp_dir, 'gpg'), '--verify', os.path.join(unpack_dir, file_name + '.pgp'), os.path.join(unpack_dir, file_name)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
gpgout = popen.communicate()[0]
retcode = popen.wait()
if retcode != 0:
if quiet <= 1:
print>>sys.stderr, 'PGP verify failed for %s' %(file_name)
error = True
continue
match = re.search(r'^\[GNUPG:\] VALIDSIG ([A-F0-9]+)', gpgout, re.M)
assertions['build'][match.group(1)] = 1
f = file(os.path.join(unpack_dir, file_name), 'r')
assertion = yaml.load(f, OrderedDictYAMLLoader)
f.close()
if assertion['out_manifest']:
if out_manifest:
if out_manifest != assertion['out_manifest']:
print>>sys.stderr, 'not all out manifests are identical'
error = True
continue
else:
out_manifest = assertion['out_manifest']
if out_manifest:
for line in out_manifest.split("\n"):
if line != "":
shasum = line[0:64]
summed_file = line[66:]
if sums[summed_file] != shasum:
print>>sys.stderr, "sha256sum mismatch on %s" %(summed_file)
error = True
del to_check[summed_file]
if len(to_check) > 0 and quiet == 0:
print>>sys.stderr, "Some of the files were not checksummed:"
for key in to_check:
print>>sys.stderr, " ", key
else:
print>>sys.stderr, 'No build assertions found'
error = True
return (not error, assertions, sums)
def import_keys(temp_dir, config):
gpg_dir = path.join(temp_dir, 'gpg')
os.mkdir(gpg_dir, 0700)
signers = config['signers']
for keyid in signers:
key_path = path.join('gitian', signers[keyid]['key'] + '-key.pgp')
popen = subprocess.Popen(['gpg', '--status-fd', '1', '--homedir', gpg_dir, '--import', path.join(temp_dir, 'unpack', key_path)], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
gpgout = popen.communicate(signers[keyid]['key'])[0]
if popen.wait() != 0:
print>>sys.stderr, 'Key %s failed to import'%(keyid)
continue
expected_keyid = keyid
if signers[keyid].has_key('keyid'):
expected_keyid = signers[keyid]['keyid']
if gpgout.count(expected_keyid) == 0:
print>>sys.stderr, 'Key file %s did not contain the key %s'%(key_path, keyid)
if gpgout.count('IMPORT_OK') != 1 and quiet <= 1:
print>>sys.stderr, 'Key file %s contained more than one key'%(key_path)
def check_assertions(config, assertions):
total_weight = 0
signers = config['signers']
if quiet == 0:
print 'Signatures from:'
for key in assertions['build']:
if not signers.has_key(key):
if quiet <= 1:
print>>sys.stderr, 'key %s is not in config, skipping'%(key)
continue
if quiet == 0:
print ' %s : weight %d'%(signers[key]['name'], signers[key]['weight'])
total_weight += signers[key]['weight']
if total_weight < config['minimum_weight']:
print>>sys.stderr, "The total weight of signatures is %d, which is less than the minimum required %d"%(total_weight, config['minimum_weight'])
return False
return total_weight
class OrderedDictYAMLLoader(yaml.Loader):
"""
A YAML loader that loads ordered yaml maps into a dictionary.
"""
def __init__(self, *args, **kwargs):
yaml.Loader.__init__(self, *args, **kwargs)
self.add_constructor(u'!omap', type(self).construct_yaml_map)
def construct_yaml_map(self, node):
data = dict()
yield data
for mapping in node.value:
for key, value in mapping.value:
key = self.construct_object(key)
value = self.construct_object(value)
data[key] = value
full_prog = sys.argv[0]
prog = os.path.basename(full_prog)
parser = argparse.ArgumentParser(description='Download a verify a gitian package')
parser.add_argument('-u', '--url', metavar='URL', type=str, nargs='+', required=False,
help='one or more URLs where the package can be found')
parser.add_argument('-c', '--config', metavar='CONF', type=str, required=not have_injected_config,
help='a configuration file')
parser.add_argument('-d', '--dest', metavar='DEST', type=str, required=False,
help='the destination directory for unpacking')
parser.add_argument('-q', '--quiet', action='append_const', const=1, default=[], help='be quiet')
parser.add_argument('-m', '--customize', metavar='OUTPUT', type=str, help='generate a customized version of the script with the given config')
args = parser.parse_args()
quiet = len(args.quiet)
if args.config:
f = file(args.config, 'r')
if args.customize:
s = file(full_prog, 'r')
script = s.read()
s.close()
config = f.read()
script = script.replace(inject_config_string, config)
s = file(args.customize, 'w')
s.write(script)
s.close()
os.chmod(args.customize, 0750)
exit(0)
config = yaml.safe_load(f)
f.close()
else:
config = yaml.safe_load(injected_config)
if not args.dest:
parser.error('argument -d/--dest is required unless -m is specified')
rsses = []
if args.url:
urls = args.url
else:
urls = config['urls']
if config.has_key('rss'):
rsses = config['rss']
if not urls:
parser.error('argument -u/--url is required since config does not specify it')
# TODO: rss, atom, etc.
if path.exists(args.dest):
print>>sys.stderr, "destination already exists, please remove it first"
exit(1)
temp_dir = tempfile.mkdtemp('', prog)
atexit.register(remove_temp, temp_dir)
package_file = path.join(temp_dir, 'package')
downloaded = False
for rss in rsses:
try:
feed = libxml2.parseDoc(urllib2.urlopen(rss['url']).read())
url = None
for node in feed.xpathEval(rss['xpath']):
if re.search(rss['pattern'], str(node)):
url = str(node)
break
try:
download(url, package_file)
downloaded = True
break
except:
print>>sys.stderr, "could not download from %s, trying next rss"%(url)
pass
except:
print>>sys.stderr, "could read not from rss %s"%(rss)
pass
if not downloaded:
for url in urls:
try:
download(url, package_file)
downloaded = True
break
except:
print>>sys.stderr, "could not download from %s, trying next url"%(url)
pass
if not downloaded:
print>>sys.stderr, "out of places to download from, try later"
exit(1)
unpack_dir = path.join(temp_dir, 'unpack')
files = extract(unpack_dir, package_file)
import_keys(temp_dir, config)
(success, assertions, out_manifest) = get_assertions(temp_dir, unpack_dir, files)
if not success and quiet <= 1:
print>>sys.stderr, "There were errors getting assertions"
total_weight = check_assertions(config, assertions)
if not total_weight:
print>>sys.stderr, "There were errors checking assertions, build is untrusted, aborting"
exit(1)
if quiet == 0:
print>>sys.stderr, "Successful with signature weight %d"%(total_weight)
shutil.copytree(unpack_dir, args.dest)
f = file(path.join(args.dest, '.manifest'), 'w')
yaml.dump(out_manifest, f)
f.close()
#os.system("cd %s ; /bin/bash"%(temp_dir))