Add checksum support for downloaded files.

File download by http, ftp, pw support checksum. The %hash
directive provides a means of setting a hash used to
checksum the file.

Files on disk or just downloaded are checked.
This commit is contained in:
Chris Johns
2014-07-29 16:35:43 +10:00
parent c49e500042
commit a083b52921
28 changed files with 159 additions and 10 deletions

View File

@@ -308,6 +308,8 @@ class build:
for l in _prep:
args = l.split()
if len(args):
def err(msg):
raise error.general('%s: %s' % (package, msg))
if args[0] == '%setup':
if len(args) == 1:
raise error.general('invalid %%setup directive: %s' % (' '.join(args)))
@@ -315,8 +317,11 @@ class build:
self.source_setup(package, args[1:])
elif args[1] == 'patch':
self.patch_setup(package, args[1:])
elif args[0].startswith('%patch'):
self.patch(package, args)
elif args[0] in ['%patch', '%source']:
sources.process(args[0][1:], args[1:], self.macros, err)
elif args[0] == '%hash':
sources.hash(args[1:], self.macros, err)
self.hash(package, args)
else:
self.script.append(' '.join(args))

View File

@@ -229,8 +229,9 @@ class file:
_ignore = [ re.compile('%setup'),
re.compile('%configure'),
re.compile('%source[0-9]*'),
re.compile('%patch[0-9]*'),
re.compile('%source'),
re.compile('%patch'),
re.compile('%hash'),
re.compile('%select'),
re.compile('%disable') ]
@@ -671,6 +672,9 @@ class file:
def _sources(self, ls):
return sources.process(ls[0][1:], ls[1:], self.macros, self._error)
def _hash(self, ls):
return sources.hash(ls[1:], self.macros, self._error)
def _define(self, config, ls):
if len(ls) <= 1:
log.warning('invalid macro definition')
@@ -892,6 +896,11 @@ class file:
d = self._sources(ls)
if d is not None:
return ('data', d)
elif ls[0] == '%hash':
if isvalid:
d = self._hash(ls)
if d is not None:
return ('data', d)
elif ls[0] == '%patch':
if isvalid:
self._select(config, ls)

View File

@@ -22,6 +22,7 @@
# installed not to be package unless you run a packager around this.
#
import hashlib
import os
import stat
import sys
@@ -50,6 +51,47 @@ def _humanize_bytes(bytes, precision = 1):
break
return '%.*f%s' % (precision, float(bytes) / factor, suffix)
def _hash_check(file_, absfile, macros, remove = True):
failed = False
if file_.lower() in macros.map_keys('hashes'):
m1, m2, hash = macros.get(file_.lower(), globals = False, maps = 'hashes')
hash = hash.split()
if len(hash) != 2:
raise error.internal('invalid hash format: %s' % (file_))
if hash[0] not in hashlib.algorithms:
raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0]))
hasher = None
_in = None
try:
hasher = hashlib.new(hash[0])
_in = open(absfile, 'rb')
hasher.update(_in.read())
except IOError, err:
log.notice('hash: %s: read error: %s' % (file_, str(err)))
failed = True
except:
msg = 'hash: %s: error' % (file_)
log.stderr(msg)
log.notice(msg)
if _in is not None:
_in.close()
raise
if _in is not None:
_in.close()
log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1]))
if hasher.hexdigest() != hash[1]:
log.warning('checksum error: %s' % (file_))
failed = True
if failed and remove:
log.warning('removing: %s' % (file_))
if path.exists(absfile):
os.remove(path.host(absfile))
if hasher is not None:
del hasher
else:
log.warning('%s: no hash found' % (file_))
return not failed
def _http_parser(source, config, opts):
#
# Is the file compressed ?
@@ -173,6 +215,7 @@ def parse_url(url, pathkey, config, opts):
if path.exists(local):
source['local_prefix'] = path.abspath(p)
source['local'] = local
_hash_check(source['file'], local, config.macros)
break
source['script'] = ''
for p in parsers:
@@ -257,6 +300,8 @@ def _http_downloader(url, local, config, opts):
if not failed:
if not path.isfile(local):
raise error.general('source is not a file: %s' % (path.host(local)))
if not _hash_check(path.basename(local), local, config.macros, False):
raise error.general('checksum failure file: %s' % (dst))
return not failed
def _git_downloader(url, local, config, opts):

View File

@@ -82,7 +82,7 @@ def trace(text = os.linesep, log = None):
def warning(text = os.linesep, log = None):
for l in text.replace(chr(13), '').splitlines():
_output('warning: %s' % (l), log)
notice('warning: %s' % (l), log)
def flush(log = None):
if log:

View File

@@ -236,6 +236,8 @@ class buildset:
self.bset_pkg = self.macros.expand(ls[1].strip())
self.macros['package'] = self.bset_pkg
elif ls[0][0] == '%':
def err(msg):
raise error.general('%s:%d: %s' % (self.bset, lc, msg))
if ls[0] == '%define':
if len(ls) > 2:
self.macros.define(ls[1].strip(),
@@ -249,10 +251,10 @@ class buildset:
self.macros.undefine(ls[1].strip())
elif ls[0] == '%include':
configs += self.parse(ls[1].strip())
elif ls[0] == '%patch' or ls[0] == '%source':
def err(msg):
raise error.general('%s:%d: %s' % (self.bset, lc, msg))
elif ls[0] in ['%patch', '%source']:
sources.process(ls[0][1:], ls[1:], self.macros, err)
elif ls[0] == '%hash':
sources.hash(ls[1:], self.macros, err)
else:
l = l.strip()
c = build.find_config(l, self.configs)

View File

@@ -72,3 +72,17 @@ def process(label, args, macros, error):
elif args[0] == 'setup':
return setup(label, args[1:], macros, error)
error('invalid %%%s command: %s' % (label, args[0]))
def hash(args, macros, error):
args = _args(args)
if len(args) != 3:
error('invalid number of hash args')
_map = 'hashes'
_file = macros.expand(args[1])
if _file in macros.map_keys(_map):
error('hash already set: %s' % (args[1]))
macros.create_map(_map)
macros.set_write_map(_map)
macros.define(_file, '%s %s' % (args[0], args[2]))
macros.unset_write_map()
return None