mirror of
https://git.rtems.org/rtems-source-builder
synced 2024-10-09 07:15:10 +08:00
sb/track: Add a command to track build sets.
- Process a build set for a range of hosts and output a dependency tree, the used build set and configuration files. - Output the configuration files that are no referenced Closes #4036
This commit is contained in:
parent
cc107b53da
commit
12418190b9
@ -15,6 +15,9 @@ Name: libusb-%{libusb_version}-%{_host}-%{release}
|
||||
%description
|
||||
LibUSB for host %{_host}.
|
||||
|
||||
%hash sha512 libusb-%{libusb_version}.tar.bz2 \
|
||||
u8PXXLkfTmoCRUnCusOO6FrtqFKaRVP9NryOilwiPNieuVLtpx/MAHHWxsgYoGMdTMoIT+1p1Jhu7l3PmofWYg==
|
||||
|
||||
#
|
||||
# The Libuxb build instructions. We use 1.xx Release 1.
|
||||
#
|
||||
|
@ -6,9 +6,9 @@
|
||||
|
||||
%ifn %{defined _internal_autotools}
|
||||
%define _internal_autotools no
|
||||
%ifn %{defined _internal_autotools_path}
|
||||
%define _internal_autotools_path %{nil}
|
||||
%endif
|
||||
%endif
|
||||
%ifn %{defined _internal_autotools_path}
|
||||
%define _internal_autotools_path %{_prefix}
|
||||
%endif
|
||||
|
||||
Name: autoconf-%{autoconf_version}-%{_host}-%{release}
|
||||
|
@ -6,9 +6,9 @@
|
||||
|
||||
%ifn %{defined _internal_autotools}
|
||||
%define _internal_autotools no
|
||||
%ifn %{defined _internal_autotools_path}
|
||||
%define _internal_autotools_path %{nil}
|
||||
%endif
|
||||
%endif
|
||||
%ifn %{defined _internal_autotools_path}
|
||||
%define _internal_autotools_path %{_prefix}
|
||||
%endif
|
||||
|
||||
Name: automake-%{automake_version}-%{_host}-%{release}
|
||||
|
@ -33,8 +33,10 @@ URL: http://libusb.org/
|
||||
cd libusb-%{libusb_version}
|
||||
|
||||
%if "%{_build}" != "%{_host}"
|
||||
CFLAGS_FOR_BUILD="-g -O2 -Wall" \
|
||||
LIBUSB_CFLAGS_FOR_BUILD="-g -O2 -Wall"
|
||||
%endif
|
||||
|
||||
CFLAGS_FOR_BUILD=${LIBUSB_CFLAGS_FOR_BUILD} \
|
||||
CFLAGS="$SB_CFLAGS" \
|
||||
./configure \
|
||||
--build=%{_build} --host=%{_host} \
|
||||
|
27
source-builder/sb-track
Executable file
27
source-builder/sb-track
Executable file
@ -0,0 +1,27 @@
|
||||
#! /bin/sh
|
||||
#
|
||||
# RTEMS Tools Project (http://www.rtems.org/)
|
||||
# Copyright 2019 Chris Johns (chrisj@rtems.org)
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of the RTEMS Tools package in 'rtems-tools'.
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
#
|
||||
set -e
|
||||
base=$(dirname $0)
|
||||
PYTHON_CMD=${base}/sb/cmd-track.py
|
||||
if test -f ${base}/sb/python-wrapper.sh; then
|
||||
. ${base}/sb/python-wrapper.sh
|
||||
fi
|
||||
echo "error: python wrapper not found"
|
@ -635,6 +635,10 @@ class build:
|
||||
return 0
|
||||
return package.get_size('installed')
|
||||
|
||||
def includes(self):
|
||||
if self.config:
|
||||
return self.config.includes()
|
||||
|
||||
def get_configs(opts):
|
||||
|
||||
def _scan(_path, ext):
|
||||
@ -648,10 +652,17 @@ def get_configs(opts):
|
||||
return configs
|
||||
|
||||
configs = { 'paths': [], 'files': [] }
|
||||
for cp in opts.defaults.expand('%{_configdir}').split(':'):
|
||||
paths = opts.defaults.expand('%{_configdir}').split(':')
|
||||
root = path.host(os.path.commonprefix(paths))
|
||||
configs['root'] = root
|
||||
configs['localpaths'] = [lp[len(root):] for lp in paths]
|
||||
for cp in paths:
|
||||
hcp = path.host(path.abspath(cp))
|
||||
configs['paths'] += [hcp]
|
||||
configs['files'] += _scan(hcp, ['.cfg', '.bset'])
|
||||
hpconfigs = sorted(set(_scan(hcp, ['.cfg', '.bset'])))
|
||||
hcplocal = hcp[len(root):]
|
||||
configs[hcplocal] = [path.join(hcplocal, c) for c in hpconfigs]
|
||||
configs['files'] += hpconfigs
|
||||
configs['files'] = sorted(set(configs['files']))
|
||||
return configs
|
||||
|
||||
|
29
source-builder/sb/cmd-track.py
Executable file
29
source-builder/sb/cmd-track.py
Executable file
@ -0,0 +1,29 @@
|
||||
#
|
||||
# RTEMS Tools Project (http://www.rtems.org/)
|
||||
# Copyright 2010-2019 Chris Johns (chrisj@rtems.org)
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of the RTEMS Tools package in 'rtems-tools'.
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys, os
|
||||
|
||||
try:
|
||||
import track
|
||||
track.run()
|
||||
except ImportError:
|
||||
print("Incorrect Source Builder installation", file = sys.stderr)
|
||||
sys.exit(1)
|
@ -283,6 +283,7 @@ class file:
|
||||
return s
|
||||
|
||||
def _reset(self, name):
|
||||
self.parent = 'root'
|
||||
self.name = name
|
||||
self.load_depth = 0
|
||||
self.configpath = []
|
||||
@ -430,7 +431,8 @@ class file:
|
||||
if len(shell_macro) > 3:
|
||||
e = execute.capture_execution()
|
||||
if options.host_windows:
|
||||
shell_cmd = ''.join([c if c != '"' else '\\' + c for c in shell_macro[2:-1]])
|
||||
shell_cmd = \
|
||||
''.join([c if c != '"' else '\\' + c for c in shell_macro[2:-1]])
|
||||
cmd = '%s -c "%s"' % (self.macros.expand('%{__sh}'), shell_cmd)
|
||||
else:
|
||||
cmd = shell_macro[2:-1]
|
||||
@ -458,7 +460,8 @@ class file:
|
||||
if braces > 0:
|
||||
braces -= 1
|
||||
else:
|
||||
shell_cmd = '%(' + self._shell(line[pos + 2:p], nesting + 1) + ')'
|
||||
shell_cmd = '%(' + \
|
||||
self._shell(line[pos + 2:p], nesting + 1) + ')'
|
||||
line = line[:pos] + _exec(shell_cmd) + line[p + 1:]
|
||||
updating = True
|
||||
break
|
||||
@ -472,9 +475,10 @@ class file:
|
||||
('with_download' in self.macros and self.macros['with_download'] == '1'):
|
||||
return '0'
|
||||
ok = False
|
||||
log.trace('pkgconfig: check: crossc=%d pkg_crossc=%d prefix=%s' % ( self._cross_compile(),
|
||||
self.pkgconfig_crosscompile,
|
||||
self.pkgconfig_prefix))
|
||||
log.trace('pkgconfig: check: crossc=%d pkg_crossc=%d prefix=%s'
|
||||
% ( self._cross_compile(),
|
||||
self.pkgconfig_crosscompile,
|
||||
self.pkgconfig_prefix))
|
||||
log.trace('pkgconfig: check: test=%s' % (test))
|
||||
if type(test) == str:
|
||||
test = test.split()
|
||||
@ -594,7 +598,8 @@ class file:
|
||||
elif m.startswith('%{expand'):
|
||||
colon = m.find(':')
|
||||
if colon < 8:
|
||||
log.warning(self._name_line_msg('malformed expand macro, no colon found'))
|
||||
log.warning(self._name_line_msg('malformed expand macro, ' \
|
||||
'no colon found'))
|
||||
else:
|
||||
e = self._expand(m[colon + 1:-1].strip())
|
||||
s = s.replace(m, self._label(e))
|
||||
@ -861,7 +866,8 @@ class file:
|
||||
dir, info, data = self._process_directive(r, dir, info, data)
|
||||
else:
|
||||
if in_dir != dir:
|
||||
self._error('directives cannot change scope across if statements')
|
||||
self._error('directives cannot change' \
|
||||
' scope across if statements')
|
||||
|
||||
return data
|
||||
if r[1] == '%else':
|
||||
@ -904,22 +910,25 @@ class file:
|
||||
elif cls[0] == '&&':
|
||||
join_op = 'and'
|
||||
cls = cls[1:]
|
||||
log.trace('config: %s: %3d: _if[%i]: joining: %s' % (self.name, self.lc,
|
||||
self.if_depth,
|
||||
join_op))
|
||||
log.trace('config: %s: %3d: _if[%i]: joining: %s' % \
|
||||
(self.name, self.lc,
|
||||
self.if_depth,
|
||||
join_op))
|
||||
ori = 0
|
||||
andi = 0
|
||||
i = len(cls)
|
||||
if '||' in cls:
|
||||
ori = cls.index('||')
|
||||
log.trace('config: %s: %3d: _if[%i}: OR found at %i' % (self.name, self.lc,
|
||||
self.if_depth,
|
||||
ori))
|
||||
log.trace('config: %s: %3d: _if[%i}: OR found at %i' % \
|
||||
(self.name, self.lc,
|
||||
self.if_depth,
|
||||
ori))
|
||||
if '&&' in cls:
|
||||
andi = cls.index('&&')
|
||||
log.trace('config: %s: %3d: _if[%i]: AND found at %i' % (self.name, self.lc,
|
||||
self.if_depth,
|
||||
andi))
|
||||
log.trace('config: %s: %3d: _if[%i]: AND found at %i' % \
|
||||
(self.name, self.lc,
|
||||
self.if_depth,
|
||||
andi))
|
||||
if ori > 0 or andi > 0:
|
||||
if ori == 0:
|
||||
i = andi
|
||||
@ -929,9 +938,10 @@ class file:
|
||||
i = andi
|
||||
else:
|
||||
i = andi
|
||||
log.trace('config: %s: %3d: _if[%i]: next OP found at %i' % (self.name, self.lc,
|
||||
self.if_depth,
|
||||
i))
|
||||
log.trace('config: %s: %3d: _if[%i]: next OP found at %i' % \
|
||||
(self.name, self.lc,
|
||||
self.if_depth,
|
||||
i))
|
||||
ls = cls[:i]
|
||||
if len(ls) == 0:
|
||||
self._error('invalid if expression: ' + reduce(add, sls, ''))
|
||||
@ -1226,7 +1236,8 @@ class file:
|
||||
log.trace('config: %s: %3d: _parse: directive: %s' % \
|
||||
(self.name, self.lc, ls[0].strip()))
|
||||
return ('directive', ls[0].strip(), ls[1:])
|
||||
log.warning(self._name_line_msg("unknown directive: '" + ls[0] + "'"))
|
||||
log.warning(self._name_line_msg("unknown directive: '" + \
|
||||
ls[0] + "'"))
|
||||
return ('data', [lo])
|
||||
else:
|
||||
return ('data', [lo])
|
||||
@ -1247,7 +1258,8 @@ class file:
|
||||
_package = results[2][0]
|
||||
else:
|
||||
if results[2][0].strip() != '-n':
|
||||
log.warning(self._name_line_msg("unknown directive option: '%s'" % (' '.join(results[2]))))
|
||||
log.warning(self._name_line_msg("unknown directive option: '%s'" % \
|
||||
(' '.join(results[2]))))
|
||||
_package = results[2][1].strip()
|
||||
self._set_package(_package)
|
||||
if directive and directive != results[1]:
|
||||
@ -1257,7 +1269,8 @@ class file:
|
||||
return (directive, info, data)
|
||||
|
||||
def _process_data(self, results, directive, info, data):
|
||||
log.trace('config: %s: %3d: _process_data: result=#%r# directive=#%s# info=#%r# data=#%r#' % \
|
||||
log.trace('config: %s: %3d: _process_data: result=#%r# ' \
|
||||
'directive=#%s# info=#%r# data=#%r#' % \
|
||||
(self.name, self.lc, results, directive, info, data))
|
||||
new_data = []
|
||||
for l in results[1]:
|
||||
@ -1284,10 +1297,12 @@ class file:
|
||||
if info is not None:
|
||||
self._info_append(info, info_data)
|
||||
else:
|
||||
log.warning(self._name_line_msg("invalid format: '%s'" % (info_data[:-1])))
|
||||
log.warning(self._name_line_msg("invalid format: '%s'" % \
|
||||
(info_data[:-1])))
|
||||
else:
|
||||
l = self._expand(l)
|
||||
log.trace('config: %s: %3d: _data: %s %s' % (self.name, self.lc, l, new_data))
|
||||
log.trace('config: %s: %3d: _data: %s %s' % \
|
||||
(self.name, self.lc, l, new_data))
|
||||
new_data.append(l)
|
||||
return (directive, info, data + new_data)
|
||||
|
||||
@ -1303,7 +1318,8 @@ class file:
|
||||
self.package = _package
|
||||
|
||||
def _directive_extend(self, dir, data):
|
||||
log.trace('config: %s: %3d: _directive_extend: %s: %r' % (self.name, self.lc, dir, data))
|
||||
log.trace('config: %s: %3d: _directive_extend: %s: %r' % \
|
||||
(self.name, self.lc, dir, data))
|
||||
self._packages[self.package].directive_extend(dir, data)
|
||||
|
||||
def _info_append(self, info, data):
|
||||
@ -1328,7 +1344,6 @@ class file:
|
||||
return end
|
||||
|
||||
if self.load_depth == 0:
|
||||
self._reset(name)
|
||||
self._packages[self.package] = package(self.package,
|
||||
self.define('%{_arch}'),
|
||||
self)
|
||||
@ -1336,6 +1351,7 @@ class file:
|
||||
self.load_depth += 1
|
||||
|
||||
save_name = self.name
|
||||
save_parent = self.parent
|
||||
save_lc = self.lc
|
||||
|
||||
#
|
||||
@ -1382,7 +1398,9 @@ class file:
|
||||
raise error.general('error opening config file: %s' % (path.host(configname)))
|
||||
|
||||
self.configpath += [configname]
|
||||
self._includes += [configname]
|
||||
|
||||
self._includes += [configname + ':' + self.parent]
|
||||
self.parent = configname
|
||||
|
||||
self.name = self._relative_path(configname)
|
||||
self.lc = 0
|
||||
@ -1413,13 +1431,12 @@ class file:
|
||||
except:
|
||||
config.close()
|
||||
raise
|
||||
|
||||
config.close()
|
||||
|
||||
self.name = save_name
|
||||
self.lc = save_lc
|
||||
|
||||
self.load_depth -= 1
|
||||
finally:
|
||||
config.close()
|
||||
self.name = save_name
|
||||
self.parent = save_parent
|
||||
self.lc = save_lc
|
||||
self.load_depth -= 1
|
||||
|
||||
def defined(self, name):
|
||||
return name in self.macros
|
||||
@ -1456,7 +1473,7 @@ class file:
|
||||
raise error.general('package "' + _package + '" not found')
|
||||
if name not in self._packages[_package].directives:
|
||||
raise error.general('directive "' + name + \
|
||||
'" not found in package "' + _package + '"')
|
||||
'" not found in package "' + _package + '"')
|
||||
return self._packages[_package].directives[name]
|
||||
|
||||
def abspath(self, rpath):
|
||||
|
@ -32,13 +32,9 @@ import sys
|
||||
|
||||
try:
|
||||
import build
|
||||
import check
|
||||
import error
|
||||
import git
|
||||
import log
|
||||
import macros
|
||||
import path
|
||||
import sources
|
||||
import simhost
|
||||
import version
|
||||
except KeyboardInterrupt:
|
||||
print('abort: user terminated', file = sys.stderr)
|
||||
@ -47,548 +43,6 @@ except:
|
||||
print('error: unknown application load error', file = sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
#
|
||||
# Define host profiles so it can simulated on another host.
|
||||
#
|
||||
host_profiles = {
|
||||
'darwin': { '_os': ('none', 'none', 'darwin'),
|
||||
'_host': ('triplet', 'required', 'x86_64-apple-darwin18.5.0'),
|
||||
'_host_vendor': ('none', 'none', 'apple'),
|
||||
'_host_os': ('none', 'none', 'darwin'),
|
||||
'_host_os_version': ('none', 'none', '18.5.0'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'freebsd': { '_os': ('none', 'none', 'freebsd'),
|
||||
'_host': ('triplet', 'required', 'x86_64-freebsd12.0-RELEASE-p3'),
|
||||
'_host_vendor': ('none', 'none', 'pc'),
|
||||
'_host_os': ('none', 'none', 'freebsd'),
|
||||
'_host_os_version': ('none', 'none', '12.0-RELEASE-p3'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'linux': { '_os': ('none', 'none', 'linux'),
|
||||
'_host': ('triplet', 'required', 'x86_64-linux-gnu'),
|
||||
'_host_vendor': ('none', 'none', 'gnu'),
|
||||
'_host_os': ('none', 'none', 'linux'),
|
||||
'_host_os_version': ('none', 'none', '4.18.0-16'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'netbsd': { '_os': ('none', 'none', 'netbsd'),
|
||||
'_host': ('triplet', 'required', 'x86_64-netbsd8.0'),
|
||||
'_host_vendor': ('none', 'none', 'pc'),
|
||||
'_host_os': ('none', 'none', 'netbsd'),
|
||||
'_host_os_version': ('none', 'none', '8.0'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'solaris': { '_os': ('none', 'none', 'solaris'),
|
||||
'_host': ('triplet', 'required', 'x86_64-pc-solaris2'),
|
||||
'_host_vendor': ('none', 'none', 'pc'),
|
||||
'_host_os': ('none', 'none', 'solaris'),
|
||||
'_host_os_version': ('none', 'none', '2'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'win32': { '_os': ('none', 'none', 'win32'),
|
||||
'_windows_os': ('none', 'none', 'mingw32'),
|
||||
'_host': ('triplet', 'required', 'x86_64-w64-mingw32'),
|
||||
'_host_vendor': ('none', 'none', 'pc'),
|
||||
'_host_os': ('none', 'none', 'win32'),
|
||||
'_host_os_version': ('none', 'none', '10'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'cygwin': { '_os': ('none', 'none', 'win32'),
|
||||
'_windows_os': ('none', 'none', 'cygwin'),
|
||||
'_host': ('triplet', 'required', 'x86_64-w64-cygwin'),
|
||||
'_host_vendor': ('none', 'none', 'microsoft'),
|
||||
'_host_os': ('none', 'none', 'win32'),
|
||||
'_host_os_version': ('none', 'none', '10'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
}
|
||||
|
||||
class log_capture(object):
|
||||
def __init__(self):
|
||||
self.log = []
|
||||
log.capture = self.capture
|
||||
|
||||
def __str__(self):
|
||||
return os.linesep.join(self.log)
|
||||
|
||||
def capture(self, text):
|
||||
self.log += [l for l in text.replace(chr(13), '').splitlines()]
|
||||
|
||||
def get(self):
|
||||
return self.log
|
||||
|
||||
def clear(self):
|
||||
self.log = []
|
||||
|
||||
#
|
||||
# A skinny options command line class to get the configs to load.
|
||||
#
|
||||
class options(object):
|
||||
def __init__(self, argv, argopts, defaults):
|
||||
command_path = path.dirname(path.abspath(argv[1]))
|
||||
if len(command_path) == 0:
|
||||
command_path = '.'
|
||||
self.command_path = command_path
|
||||
self.command_name = path.basename(argv[0])
|
||||
extras = ['--dry-run',
|
||||
'--with-download',
|
||||
'--quiet',
|
||||
'--without-log',
|
||||
'--without-error-report',
|
||||
'--without-release-url']
|
||||
self.argv = argv
|
||||
self.args = argv[1:] + extras
|
||||
self.defaults = macros.macros(name = defaults,
|
||||
sbdir = command_path)
|
||||
self.load_overrides()
|
||||
self.opts = { 'params' : extras }
|
||||
self.sb_git()
|
||||
self.rtems_bsp()
|
||||
if argopts.download_dir is not None:
|
||||
self.defaults['_sourcedir'] = ('dir',
|
||||
'optional',
|
||||
path.abspath(argopts.download_dir))
|
||||
self.defaults['_patchdir'] = ('dir',
|
||||
'optional',
|
||||
path.abspath(argopts.download_dir))
|
||||
|
||||
def load_overrides(self):
|
||||
overrides = None
|
||||
if os.name == 'nt':
|
||||
try:
|
||||
import windows
|
||||
overrides = windows.load()
|
||||
host_windows = True
|
||||
host_posix = False
|
||||
except:
|
||||
raise error.general('failed to load Windows host support')
|
||||
elif os.name == 'posix':
|
||||
uname = os.uname()
|
||||
try:
|
||||
if uname[0].startswith('MINGW64_NT'):
|
||||
import windows
|
||||
overrides = windows.load()
|
||||
host_windows = True
|
||||
elif uname[0].startswith('CYGWIN_NT'):
|
||||
import windows
|
||||
overrides = windows.load()
|
||||
elif uname[0] == 'Darwin':
|
||||
import darwin
|
||||
overrides = darwin.load()
|
||||
elif uname[0] == 'FreeBSD':
|
||||
import freebsd
|
||||
overrides = freebsd.load()
|
||||
elif uname[0] == 'NetBSD':
|
||||
import netbsd
|
||||
overrides = netbsd.load()
|
||||
elif uname[0] == 'Linux':
|
||||
import linux
|
||||
overrides = linux.load()
|
||||
elif uname[0] == 'SunOS':
|
||||
import solaris
|
||||
overrides = solaris.load()
|
||||
except error.general as ge:
|
||||
raise error.general('failed to load %s host support: %s' % (uname[0], ge))
|
||||
except:
|
||||
raise error.general('failed to load %s host support' % (uname[0]))
|
||||
else:
|
||||
raise error.general('unsupported host type; please add')
|
||||
if overrides is None:
|
||||
raise error.general('no hosts defaults found; please add')
|
||||
for k in overrides:
|
||||
self.defaults[k] = overrides[k]
|
||||
|
||||
def parse_args(self, arg, error = True, extra = True):
|
||||
for a in range(0, len(self.args)):
|
||||
if self.args[a].startswith(arg):
|
||||
lhs = None
|
||||
rhs = None
|
||||
if '=' in self.args[a]:
|
||||
eqs = self.args[a].split('=')
|
||||
lhs = eqs[0]
|
||||
if len(eqs) > 2:
|
||||
rhs = '='.join(eqs[1:])
|
||||
else:
|
||||
rhs = eqs[1]
|
||||
elif extra:
|
||||
lhs = self.args[a]
|
||||
a += 1
|
||||
if a < len(self.args):
|
||||
rhs = self.args[a]
|
||||
return [lhs, rhs]
|
||||
a += 1
|
||||
return None
|
||||
|
||||
def rtems_bsp(self):
|
||||
self.defaults['rtems_version'] = str(version.version())
|
||||
self.defaults['_target'] = 'arch-rtems'
|
||||
self.defaults['rtems_host'] = 'rtems-arch'
|
||||
self.defaults['with_rtems_bsp'] = 'rtems-bsp'
|
||||
|
||||
def sb_git(self):
|
||||
repo = git.repo(self.defaults.expand('%{_sbdir}'), self)
|
||||
repo_mail = None
|
||||
if repo.valid():
|
||||
repo_valid = '1'
|
||||
repo_head = repo.head()
|
||||
repo_clean = not repo.dirty()
|
||||
repo_remotes = '%{nil}'
|
||||
remotes = repo.remotes()
|
||||
if 'origin' in remotes:
|
||||
repo_remotes = '%s/origin' % (remotes['origin']['url'])
|
||||
repo_id = repo_head
|
||||
if not repo_clean:
|
||||
repo_id += '-modified'
|
||||
repo_mail = repo.email()
|
||||
else:
|
||||
repo_valid = '0'
|
||||
repo_head = '%{nil}'
|
||||
repo_clean = '%{nil}'
|
||||
repo_remotes = '%{nil}'
|
||||
repo_id = 'no-repo'
|
||||
self.defaults['_sbgit_valid'] = repo_valid
|
||||
self.defaults['_sbgit_head'] = repo_head
|
||||
self.defaults['_sbgit_clean'] = str(repo_clean)
|
||||
self.defaults['_sbgit_remotes'] = str(repo_remotes)
|
||||
self.defaults['_sbgit_id'] = repo_id
|
||||
if repo_mail is not None:
|
||||
self.defaults['_sbgit_mail'] = repo_mail
|
||||
|
||||
def get_arg(self, arg):
|
||||
if self.optargs is None or arg not in self.optargs:
|
||||
return None
|
||||
return self.parse_args(arg)
|
||||
|
||||
def with_arg(self, label, default = 'not-found'):
|
||||
# the default if there is no option for without.
|
||||
result = default
|
||||
for pre in ['with', 'without']:
|
||||
arg_str = '--%s-%s' % (pre, label)
|
||||
arg_label = '%s_%s' % (pre, label)
|
||||
arg = self.parse_args(arg_str, error = False, extra = False)
|
||||
if arg is not None:
|
||||
if arg[1] is None:
|
||||
result = 'yes'
|
||||
else:
|
||||
result = arg[1]
|
||||
break
|
||||
return [arg_label, result]
|
||||
|
||||
def dry_run(self):
|
||||
return True
|
||||
|
||||
def keep_going(self):
|
||||
return False
|
||||
|
||||
def quiet(self):
|
||||
return True
|
||||
|
||||
def no_clean(self):
|
||||
return True
|
||||
|
||||
def always_clean(self):
|
||||
return False
|
||||
|
||||
def no_install(self):
|
||||
return True
|
||||
|
||||
def download_disabled(self):
|
||||
return False
|
||||
|
||||
def disable_install(self):
|
||||
return True
|
||||
|
||||
def urls(self):
|
||||
return None
|
||||
|
||||
def info(self):
|
||||
s = ' Command Line: %s%s' % (' '.join(self.argv), os.linesep)
|
||||
s += ' Python: %s' % (sys.version.replace('\n', ''))
|
||||
return s
|
||||
|
||||
class buildset:
|
||||
"""Build a set builds a set of packages."""
|
||||
|
||||
def __init__(self, bset, _configs, opts, macros = None):
|
||||
log.trace('_bset: %s: init' % (bset))
|
||||
self.configs = _configs
|
||||
self.opts = opts
|
||||
if macros is None:
|
||||
self.macros = copy.copy(opts.defaults)
|
||||
else:
|
||||
self.macros = copy.copy(macros)
|
||||
self.macros.define('_rsb_getting_source')
|
||||
log.trace('_bset: %s: macro defaults' % (bset))
|
||||
log.trace(str(self.macros))
|
||||
self.bset = bset
|
||||
_target = self.macros.expand('%{_target}')
|
||||
if len(_target):
|
||||
pkg_prefix = _target
|
||||
else:
|
||||
pkg_prefix = self.macros.expand('%{_host}')
|
||||
self.bset_pkg = '%s-%s-set' % (pkg_prefix, self.bset)
|
||||
self.build_failure = None
|
||||
|
||||
def build_package(self, _config, _build):
|
||||
if not _build.disabled():
|
||||
_build.make()
|
||||
|
||||
def parse(self, bset):
|
||||
|
||||
#
|
||||
# Ouch, this is a copy of the setbuilder.py code.
|
||||
#
|
||||
|
||||
def _clean(line):
|
||||
line = line[0:-1]
|
||||
b = line.find('#')
|
||||
if b >= 0:
|
||||
line = line[1:b]
|
||||
return line.strip()
|
||||
|
||||
bsetname = bset
|
||||
|
||||
if not path.exists(bsetname):
|
||||
for cp in self.macros.expand('%{_configdir}').split(':'):
|
||||
configdir = path.abspath(cp)
|
||||
bsetname = path.join(configdir, bset)
|
||||
if path.exists(bsetname):
|
||||
break
|
||||
bsetname = None
|
||||
if bsetname is None:
|
||||
raise error.general('no build set file found: %s' % (bset))
|
||||
try:
|
||||
log.trace('_bset: %s: open: %s' % (self.bset, bsetname))
|
||||
bset = open(path.host(bsetname), 'r')
|
||||
except IOError as err:
|
||||
raise error.general('error opening bset file: %s' % (bsetname))
|
||||
|
||||
configs = []
|
||||
|
||||
try:
|
||||
lc = 0
|
||||
for l in bset:
|
||||
lc += 1
|
||||
l = _clean(l)
|
||||
if len(l) == 0:
|
||||
continue
|
||||
log.trace('_bset: %s: %03d: %s' % (self.bset, lc, l))
|
||||
ls = l.split()
|
||||
if ls[0][-1] == ':' and ls[0][:-1] == 'package':
|
||||
self.bset_pkg = ls[1].strip()
|
||||
self.macros['package'] = self.bset_pkg
|
||||
elif ls[0][0] == '%':
|
||||
def err(msg):
|
||||
raise error.general('%s:%d: %s' % (self.bset, lc, msg))
|
||||
if ls[0] == '%define':
|
||||
if len(ls) > 2:
|
||||
self.macros.define(ls[1].strip(),
|
||||
' '.join([f.strip() for f in ls[2:]]))
|
||||
else:
|
||||
self.macros.define(ls[1].strip())
|
||||
elif ls[0] == '%undefine':
|
||||
if len(ls) > 2:
|
||||
raise error.general('%s:%d: %undefine requires just the name' % \
|
||||
(self.bset, lc))
|
||||
self.macros.undefine(ls[1].strip())
|
||||
elif ls[0] == '%include':
|
||||
configs += self.parse(ls[1].strip())
|
||||
elif ls[0] in ['%patch', '%source']:
|
||||
sources.process(ls[0][1:], ls[1:], self.macros, err)
|
||||
elif ls[0] == '%hash':
|
||||
sources.hash(ls[1:], self.macros, err)
|
||||
else:
|
||||
l = l.strip()
|
||||
c = build.find_config(l, self.configs)
|
||||
if c is None:
|
||||
raise error.general('%s:%d: cannot find file: %s' % (self.bset, lc, l))
|
||||
configs += [c]
|
||||
except:
|
||||
bset.close()
|
||||
raise
|
||||
|
||||
bset.close()
|
||||
|
||||
return configs
|
||||
|
||||
def load(self):
|
||||
#
|
||||
# If the build set file ends with .cfg the user has passed to the
|
||||
# buildset builder a configuration so we just return it.
|
||||
#
|
||||
if self.bset.endswith('.cfg'):
|
||||
configs = [self.bset]
|
||||
else:
|
||||
exbset = self.macros.expand(self.bset)
|
||||
self.macros['_bset'] = exbset
|
||||
self.macros['_bset_tmp'] = build.short_name(exbset)
|
||||
root, ext = path.splitext(exbset)
|
||||
if exbset.endswith('.bset'):
|
||||
bset = exbset
|
||||
else:
|
||||
bset = '%s.bset' % (exbset)
|
||||
configs = self.parse(bset)
|
||||
return configs
|
||||
|
||||
def set_host_details(self, host, opts, macros):
|
||||
if host not in host_profiles:
|
||||
raise error.general('invalid host: ' + host)
|
||||
for m in host_profiles[host]:
|
||||
opts.defaults[m] = host_profiles[host][m]
|
||||
macros[m] = host_profiles[host][m]
|
||||
macros_to_copy = [('%{_build}', '%{_host}'),
|
||||
('%{_build_alias}', '%{_host_alias}'),
|
||||
('%{_build_arch}', '%{_host_arch}'),
|
||||
('%{_build_cpu}', '%{_host_cpu}'),
|
||||
('%{_build_os}', '%{_host_os}'),
|
||||
('%{_build_vendor}', '%{_host_vendor}')]
|
||||
for m in macros_to_copy:
|
||||
opts.defaults[m[0]] = opts.defaults[m[1]]
|
||||
macros[m[0]] = macros[m[1]]
|
||||
#
|
||||
# Look for a valid cc and cxx.
|
||||
#
|
||||
for cc in ['/usr/bin/cc', '/usr/bin/clang', '/usr/bin/gcc']:
|
||||
if check.check_exe(cc, cc):
|
||||
opts.defaults['__cc'] = cc
|
||||
macros['__cc'] = cc
|
||||
break
|
||||
if not macros.defined('__cc'):
|
||||
raise error.general('no valid cc found')
|
||||
for cxx in ['/usr/bin/c++', '/usr/bin/clang++', '/usr/bin/g++']:
|
||||
if check.check_exe(cxx, cxx):
|
||||
opts.defaults['__cxx'] = cxx
|
||||
macros['__cxx'] = cxx
|
||||
if not macros.defined('__cxx'):
|
||||
raise error.general('no valid c++ found')
|
||||
|
||||
def build(self, host, nesting_count = 0):
|
||||
|
||||
build_error = False
|
||||
|
||||
nesting_count += 1
|
||||
|
||||
log.trace('_bset: %s for %s: make' % (self.bset, host))
|
||||
log.notice('Build Set: %s for %s' % (self.bset, host))
|
||||
|
||||
mail_subject = '%s on %s' % (self.bset,
|
||||
self.macros.expand('%{_host}'))
|
||||
|
||||
current_path = os.environ['PATH']
|
||||
|
||||
start = datetime.datetime.now()
|
||||
|
||||
have_errors = False
|
||||
|
||||
try:
|
||||
configs = self.load()
|
||||
|
||||
log.trace('_bset: %s: configs: %s' % (self.bset, ','.join(configs)))
|
||||
|
||||
sizes_valid = False
|
||||
builds = []
|
||||
for s in range(0, len(configs)):
|
||||
b = None
|
||||
try:
|
||||
#
|
||||
# Each section of the build set gets a separate set of
|
||||
# macros so we do not contaminate one configuration with
|
||||
# another.
|
||||
#
|
||||
opts = copy.copy(self.opts)
|
||||
macros = copy.copy(self.macros)
|
||||
self.set_host_details(host, opts, macros)
|
||||
if configs[s].endswith('.bset'):
|
||||
log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75))
|
||||
bs = buildset(configs[s], self.configs, opts, macros)
|
||||
bs.build(host, nesting_count)
|
||||
del bs
|
||||
elif configs[s].endswith('.cfg'):
|
||||
log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75))
|
||||
try:
|
||||
b = build.build(configs[s],
|
||||
False,
|
||||
opts,
|
||||
macros)
|
||||
except:
|
||||
build_error = True
|
||||
raise
|
||||
self.build_package(configs[s], b)
|
||||
builds += [b]
|
||||
#
|
||||
# Dump post build macros.
|
||||
#
|
||||
log.trace('_bset: macros post-build')
|
||||
log.trace(str(macros))
|
||||
else:
|
||||
raise error.general('invalid config type: %s' % (configs[s]))
|
||||
except error.general as gerr:
|
||||
have_errors = True
|
||||
if b is not None:
|
||||
if self.build_failure is None:
|
||||
self.build_failure = b.name()
|
||||
raise
|
||||
#
|
||||
# Clear out the builds ...
|
||||
#
|
||||
for b in builds:
|
||||
del b
|
||||
except error.general as gerr:
|
||||
if not build_error:
|
||||
log.stderr(str(gerr))
|
||||
raise
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
self.build_failure = 'RSB general failure'
|
||||
raise
|
||||
finally:
|
||||
end = datetime.datetime.now()
|
||||
os.environ['PATH'] = current_path
|
||||
build_time = str(end - start)
|
||||
log.notice('Build Set: Time %s' % (build_time))
|
||||
|
||||
def list_bset_files(opts, configs):
|
||||
ext = '.bset'
|
||||
for p in configs['paths']:
|
||||
print('Examining: %s' % (os.path.relpath(p)))
|
||||
for c in configs['files']:
|
||||
if c.endswith(ext):
|
||||
print(' %s' % (c[:c.rfind('.')]))
|
||||
|
||||
def load_log(logfile):
|
||||
log.default = log.log(streams = [logfile])
|
||||
|
||||
def log_default():
|
||||
return 'rsb-log-getsource-%s.txt' % (datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))
|
||||
|
||||
def load_options(argv, argopts, defaults = '%{_sbdir}/defaults.mc'):
|
||||
opts = options(argv, argopts, defaults)
|
||||
opts.defaults['rtems_version'] = str(argopts.rtems_version)
|
||||
return opts
|
||||
|
||||
def run(args = sys.argv):
|
||||
ec = 0
|
||||
get_sources_error = True
|
||||
@ -617,22 +71,24 @@ def run(args = sys.argv):
|
||||
action = 'store_true')
|
||||
argsp.add_argument('--log', help = 'Log file.',
|
||||
type = str,
|
||||
default = log_default())
|
||||
default = simhost.log_default('getsource'))
|
||||
argsp.add_argument('--trace', help = 'Enable trace logging for debugging.',
|
||||
action = 'store_true')
|
||||
argsp.add_argument('bsets', nargs='*', help = 'Build sets.')
|
||||
|
||||
argopts = argsp.parse_args(args[2:])
|
||||
|
||||
load_log(argopts.log)
|
||||
simhost.load_log(argopts.log)
|
||||
log.notice('RTEMS Source Builder - Get Sources, %s' % (version.string()))
|
||||
log.tracing = argopts.trace
|
||||
|
||||
opts = load_options(args, argopts)
|
||||
opts = simhost.load_options(args, argopts, extras = ['--with-download'])
|
||||
configs = build.get_configs(opts)
|
||||
|
||||
if argopts.list_bsets:
|
||||
list_bset_files(opts, configs)
|
||||
if argopts.list_hosts:
|
||||
simhost.list_hosts()
|
||||
elif argopts.list_bsets:
|
||||
simhost.list_bset_files(opts, configs)
|
||||
else:
|
||||
if argopts.clean:
|
||||
if argopts.download_dir is None:
|
||||
@ -640,14 +96,23 @@ def run(args = sys.argv):
|
||||
if path.exists(argopts.download_dir):
|
||||
log.notice('Cleaning source directory: %s' % (argopts.download_dir))
|
||||
path.removeall(argopts.download_dir)
|
||||
all_bsets = simhost.get_bset_files(configs)
|
||||
if len(argopts.bsets) == 0:
|
||||
raise error.general('no build sets provided on the command line')
|
||||
for bset in argopts.bsets:
|
||||
get_sources_error = True
|
||||
b = buildset(bset, configs, opts)
|
||||
get_sources_error = False
|
||||
for host in host_profiles:
|
||||
b.build(host)
|
||||
bsets = all_bsets
|
||||
else:
|
||||
bsets = argopts.bsets
|
||||
for bset in bsets:
|
||||
b = None
|
||||
try:
|
||||
for host in simhost.profiles:
|
||||
get_sources_error = True
|
||||
b = simhost.buildset(bset, configs, opts)
|
||||
get_sources_error = False
|
||||
b.build(host)
|
||||
del b
|
||||
except error.general as gerr:
|
||||
log.stderr(str(gerr))
|
||||
log.stderr('Build FAILED')
|
||||
b = None
|
||||
except error.general as gerr:
|
||||
if get_sources_error:
|
||||
|
657
source-builder/sb/simhost.py
Normal file
657
source-builder/sb/simhost.py
Normal file
@ -0,0 +1,657 @@
|
||||
#
|
||||
# RTEMS Tools Project (http://www.rtems.org/)
|
||||
# Copyright 2010-2020 Chris Johns (chrisj@rtems.org)
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of the RTEMS Tools package in 'rtems-tools'.
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
#
|
||||
# This code builds a package compiler tool suite given a tool set. A tool
|
||||
# set lists the various tools. These are specific tool configurations.
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import copy
|
||||
import datetime
|
||||
import os
|
||||
|
||||
try:
|
||||
import build
|
||||
import check
|
||||
import error
|
||||
import git
|
||||
import log
|
||||
import macros
|
||||
import path
|
||||
import sources
|
||||
import version
|
||||
except KeyboardInterrupt:
|
||||
print('abort: user terminated', file = sys.stderr)
|
||||
sys.exit(1)
|
||||
except:
|
||||
print('error: unknown application load error', file = sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
#
|
||||
# Define host profiles so it can simulated on another host.
|
||||
#
|
||||
profiles = {
|
||||
'darwin': { '_os': ('none', 'none', 'darwin'),
|
||||
'_host': ('triplet', 'required', 'x86_64-apple-darwin18.5.0'),
|
||||
'_host_vendor': ('none', 'none', 'apple'),
|
||||
'_host_os': ('none', 'none', 'darwin'),
|
||||
'_host_os_version': ('none', 'none', '18.5.0'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'freebsd': { '_os': ('none', 'none', 'freebsd'),
|
||||
'_host': ('triplet', 'required', 'x86_64-freebsd12.0-RELEASE-p3'),
|
||||
'_host_vendor': ('none', 'none', 'pc'),
|
||||
'_host_os': ('none', 'none', 'freebsd'),
|
||||
'_host_os_version': ('none', 'none', '12.0-RELEASE-p3'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'linux': { '_os': ('none', 'none', 'linux'),
|
||||
'_host': ('triplet', 'required', 'x86_64-linux-gnu'),
|
||||
'_host_vendor': ('none', 'none', 'gnu'),
|
||||
'_host_os': ('none', 'none', 'linux'),
|
||||
'_host_os_version': ('none', 'none', '4.18.0-16'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'netbsd': { '_os': ('none', 'none', 'netbsd'),
|
||||
'_host': ('triplet', 'required', 'x86_64-netbsd8.0'),
|
||||
'_host_vendor': ('none', 'none', 'pc'),
|
||||
'_host_os': ('none', 'none', 'netbsd'),
|
||||
'_host_os_version': ('none', 'none', '8.0'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'solaris': { '_os': ('none', 'none', 'solaris'),
|
||||
'_host': ('triplet', 'required', 'x86_64-pc-solaris2'),
|
||||
'_host_vendor': ('none', 'none', 'pc'),
|
||||
'_host_os': ('none', 'none', 'solaris'),
|
||||
'_host_os_version': ('none', 'none', '2'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'win32': { '_os': ('none', 'none', 'win32'),
|
||||
'_windows_os': ('none', 'none', 'mingw32'),
|
||||
'_host': ('triplet', 'required', 'x86_64-w64-mingw32'),
|
||||
'_host_vendor': ('none', 'none', 'pc'),
|
||||
'_host_os': ('none', 'none', 'win32'),
|
||||
'_host_os_version': ('none', 'none', '10'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
'cygwin': { '_os': ('none', 'none', 'win32'),
|
||||
'_windows_os': ('none', 'none', 'cygwin'),
|
||||
'_host': ('triplet', 'required', 'x86_64-w64-cygwin'),
|
||||
'_host_vendor': ('none', 'none', 'microsoft'),
|
||||
'_host_os': ('none', 'none', 'win32'),
|
||||
'_host_os_version': ('none', 'none', '10'),
|
||||
'_host_cpu': ('none', 'none', 'x86_64'),
|
||||
'_host_alias': ('none', 'none', '%{nil}'),
|
||||
'_host_arch': ('none', 'none', 'x86_64'),
|
||||
'_usr': ('dir', 'optional', '/usr/local'),
|
||||
'_var': ('dir', 'optional', '/usr/local/var') },
|
||||
}
|
||||
|
||||
|
||||
class log_capture(object):
|
||||
def __init__(self):
|
||||
self.log = []
|
||||
log.capture = self.capture
|
||||
|
||||
def __str__(self):
|
||||
return os.linesep.join(self.log)
|
||||
|
||||
def capture(self, text):
|
||||
self.log += [l for l in text.replace(chr(13), '').splitlines()]
|
||||
|
||||
def get(self):
|
||||
return self.log
|
||||
|
||||
def clear(self):
|
||||
self.log = []
|
||||
|
||||
def find_bset_config(bset_config, macros):
|
||||
'''Find the build set or config file using the macro config defined path.'''
|
||||
name = bset_config
|
||||
if not path.exists(name):
|
||||
for cp in macros.expand('%{_configdir}').split(':'):
|
||||
configdir = path.abspath(cp)
|
||||
name = path.join(configdir, bset_config)
|
||||
if path.exists(name):
|
||||
break
|
||||
name = None
|
||||
if name is None:
|
||||
raise error.general('no build set file found: %s' % (bset_config))
|
||||
return name
|
||||
|
||||
#
|
||||
# A skinny options command line class to get the configs to load.
|
||||
#
|
||||
class options(object):
|
||||
def __init__(self, argv, argopts, defaults, extras):
|
||||
command_path = path.dirname(path.abspath(argv[1]))
|
||||
if len(command_path) == 0:
|
||||
command_path = '.'
|
||||
self.command_path = command_path
|
||||
self.command_name = path.basename(argv[0])
|
||||
extras += ['--dry-run',
|
||||
'--quiet',
|
||||
'--without-log',
|
||||
'--without-error-report',
|
||||
'--without-release-url']
|
||||
self.argv = argv
|
||||
self.args = argv[1:] + extras
|
||||
self.defaults = macros.macros(name = defaults,
|
||||
sbdir = command_path)
|
||||
self.load_overrides()
|
||||
self.opts = { 'params' : extras }
|
||||
self.sb_git()
|
||||
self.rtems_bsp()
|
||||
if 'download_dir' in argopts and argopts.download_dir is not None:
|
||||
self.defaults['_sourcedir'] = ('dir',
|
||||
'optional',
|
||||
path.abspath(argopts.download_dir))
|
||||
self.defaults['_patchdir'] = ('dir',
|
||||
'optional',
|
||||
path.abspath(argopts.download_dir))
|
||||
|
||||
def load_overrides(self):
|
||||
overrides = None
|
||||
if os.name == 'nt':
|
||||
try:
|
||||
import windows
|
||||
overrides = windows.load()
|
||||
host_windows = True
|
||||
host_posix = False
|
||||
except:
|
||||
raise error.general('failed to load Windows host support')
|
||||
elif os.name == 'posix':
|
||||
uname = os.uname()
|
||||
try:
|
||||
if uname[0].startswith('MINGW64_NT'):
|
||||
import windows
|
||||
overrides = windows.load()
|
||||
host_windows = True
|
||||
elif uname[0].startswith('CYGWIN_NT'):
|
||||
import windows
|
||||
overrides = windows.load()
|
||||
elif uname[0] == 'Darwin':
|
||||
import darwin
|
||||
overrides = darwin.load()
|
||||
elif uname[0] == 'FreeBSD':
|
||||
import freebsd
|
||||
overrides = freebsd.load()
|
||||
elif uname[0] == 'NetBSD':
|
||||
import netbsd
|
||||
overrides = netbsd.load()
|
||||
elif uname[0] == 'Linux':
|
||||
import linux
|
||||
overrides = linux.load()
|
||||
elif uname[0] == 'SunOS':
|
||||
import solaris
|
||||
overrides = solaris.load()
|
||||
except error.general as ge:
|
||||
raise error.general('failed to load %s host support: %s' % (uname[0], ge))
|
||||
except:
|
||||
raise error.general('failed to load %s host support' % (uname[0]))
|
||||
else:
|
||||
raise error.general('unsupported host type; please add')
|
||||
if overrides is None:
|
||||
raise error.general('no hosts defaults found; please add')
|
||||
for k in overrides:
|
||||
self.defaults[k] = overrides[k]
|
||||
|
||||
def parse_args(self, arg, error = True, extra = True):
|
||||
for a in range(0, len(self.args)):
|
||||
if self.args[a].startswith(arg):
|
||||
lhs = None
|
||||
rhs = None
|
||||
if '=' in self.args[a]:
|
||||
eqs = self.args[a].split('=')
|
||||
lhs = eqs[0]
|
||||
if len(eqs) > 2:
|
||||
rhs = '='.join(eqs[1:])
|
||||
else:
|
||||
rhs = eqs[1]
|
||||
elif extra:
|
||||
lhs = self.args[a]
|
||||
a += 1
|
||||
if a < len(self.args):
|
||||
rhs = self.args[a]
|
||||
return [lhs, rhs]
|
||||
a += 1
|
||||
return None
|
||||
|
||||
def rtems_bsp(self):
|
||||
self.defaults['rtems_version'] = str(version.version())
|
||||
self.defaults['_target'] = 'arch-rtems'
|
||||
self.defaults['rtems_host'] = 'rtems-arch'
|
||||
self.defaults['with_rtems_bsp'] = 'rtems-bsp'
|
||||
|
||||
def sb_git(self):
|
||||
repo = git.repo(self.defaults.expand('%{_sbdir}'), self)
|
||||
repo_mail = None
|
||||
if repo.valid():
|
||||
repo_valid = '1'
|
||||
repo_head = repo.head()
|
||||
repo_clean = not repo.dirty()
|
||||
repo_remotes = '%{nil}'
|
||||
remotes = repo.remotes()
|
||||
if 'origin' in remotes:
|
||||
repo_remotes = '%s/origin' % (remotes['origin']['url'])
|
||||
repo_id = repo_head
|
||||
if not repo_clean:
|
||||
repo_id += '-modified'
|
||||
repo_mail = repo.email()
|
||||
else:
|
||||
repo_valid = '0'
|
||||
repo_head = '%{nil}'
|
||||
repo_clean = '%{nil}'
|
||||
repo_remotes = '%{nil}'
|
||||
repo_id = 'no-repo'
|
||||
self.defaults['_sbgit_valid'] = repo_valid
|
||||
self.defaults['_sbgit_head'] = repo_head
|
||||
self.defaults['_sbgit_clean'] = str(repo_clean)
|
||||
self.defaults['_sbgit_remotes'] = str(repo_remotes)
|
||||
self.defaults['_sbgit_id'] = repo_id
|
||||
if repo_mail is not None:
|
||||
self.defaults['_sbgit_mail'] = repo_mail
|
||||
|
||||
def get_arg(self, arg):
|
||||
if self.optargs is None or arg not in self.optargs:
|
||||
return None
|
||||
return self.parse_args(arg)
|
||||
|
||||
def with_arg(self, label, default = 'not-found'):
|
||||
# the default if there is no option for without.
|
||||
result = default
|
||||
for pre in ['with', 'without']:
|
||||
arg_str = '--%s-%s' % (pre, label)
|
||||
arg_label = '%s_%s' % (pre, label)
|
||||
arg = self.parse_args(arg_str, error = False, extra = False)
|
||||
if arg is not None:
|
||||
if arg[1] is None:
|
||||
result = 'yes'
|
||||
else:
|
||||
result = arg[1]
|
||||
break
|
||||
return [arg_label, result]
|
||||
|
||||
def dry_run(self):
|
||||
return True
|
||||
|
||||
def keep_going(self):
|
||||
return False
|
||||
|
||||
def quiet(self):
|
||||
return True
|
||||
|
||||
def no_clean(self):
|
||||
return True
|
||||
|
||||
def always_clean(self):
|
||||
return False
|
||||
|
||||
def no_install(self):
|
||||
return True
|
||||
|
||||
def download_disabled(self):
|
||||
return False
|
||||
|
||||
def disable_install(self):
|
||||
return True
|
||||
|
||||
def urls(self):
|
||||
return None
|
||||
|
||||
def info(self):
|
||||
s = ' Command Line: %s%s' % (' '.join(self.argv), os.linesep)
|
||||
s += ' Python: %s' % (sys.version.replace('\n', ''))
|
||||
return s
|
||||
|
||||
class buildset:
|
||||
"""Build a set builds a set of packages."""
|
||||
|
||||
def __init__(self, bset, _configs, opts, macros = None):
|
||||
log.trace('_bset: %s: init' % (bset))
|
||||
self.parent = 'root'
|
||||
self._includes = []
|
||||
self._errors = []
|
||||
self.configs = _configs
|
||||
self.opts = opts
|
||||
if macros is None:
|
||||
self.macros = copy.copy(opts.defaults)
|
||||
else:
|
||||
self.macros = copy.copy(macros)
|
||||
self.macros.define('_rsb_getting_source')
|
||||
log.trace('_bset: %s: macro defaults' % (bset))
|
||||
log.trace(str(self.macros))
|
||||
self.bset = bset
|
||||
_target = self.macros.expand('%{_target}')
|
||||
if len(_target):
|
||||
pkg_prefix = _target
|
||||
else:
|
||||
pkg_prefix = self.macros.expand('%{_host}')
|
||||
self.bset_pkg = '%s-%s-set' % (pkg_prefix, self.bset)
|
||||
self.build_failure = None
|
||||
|
||||
def _add_includes(self, includes, parent = None):
|
||||
if parent is None:
|
||||
parent = self.parent
|
||||
if not isinstance(includes, list):
|
||||
includes = [includes]
|
||||
self._includes += [i + ':' + parent for i in includes]
|
||||
|
||||
def _rebase_includes(self, includes, parent):
|
||||
if not isinstance(includes, list):
|
||||
includes = [includes]
|
||||
rebased = []
|
||||
for i in includes:
|
||||
if i.split(':', 2)[1] == 'root':
|
||||
rebased += [i.split(':', 2)[0] + ':' + parent]
|
||||
else:
|
||||
rebased += [i]
|
||||
return rebased
|
||||
|
||||
def includes(self):
|
||||
return sorted(list(set(self._includes)))
|
||||
|
||||
def errors(self):
|
||||
return sorted(list(set(self._errors)))
|
||||
|
||||
def build_package(self, _config, _build):
|
||||
if not _build.disabled():
|
||||
_build.make()
|
||||
|
||||
def parse(self, bset):
|
||||
|
||||
#
|
||||
# Ouch, this is a copy of the setbuilder.py code.
|
||||
#
|
||||
|
||||
def _clean(line):
|
||||
line = line[0:-1]
|
||||
b = line.find('#')
|
||||
if b >= 0:
|
||||
line = line[1:b]
|
||||
return line.strip()
|
||||
|
||||
bsetname = find_bset_config(bset, self.macros)
|
||||
|
||||
try:
|
||||
log.trace('_bset: %s: open: %s' % (self.bset, bsetname))
|
||||
bsetf = open(path.host(bsetname), 'r')
|
||||
except IOError as err:
|
||||
raise error.general('error opening bset file: %s' % (bsetname))
|
||||
|
||||
self._add_includes(bsetname)
|
||||
parent = self.parent
|
||||
self.parent = bsetname
|
||||
|
||||
configs = []
|
||||
|
||||
try:
|
||||
lc = 0
|
||||
for l in bsetf:
|
||||
lc += 1
|
||||
l = _clean(l)
|
||||
if len(l) == 0:
|
||||
continue
|
||||
log.trace('_bset: %s: %03d: %s' % (self.bset, lc, l))
|
||||
ls = l.split()
|
||||
if ls[0][-1] == ':' and ls[0][:-1] == 'package':
|
||||
self.bset_pkg = ls[1].strip()
|
||||
self.macros['package'] = self.bset_pkg
|
||||
elif ls[0][0] == '%':
|
||||
def err(msg):
|
||||
raise error.general('%s:%d: %s' % (self.bset, lc, msg))
|
||||
if ls[0] == '%define':
|
||||
if len(ls) > 2:
|
||||
self.macros.define(ls[1].strip(),
|
||||
' '.join([f.strip() for f in ls[2:]]))
|
||||
else:
|
||||
self.macros.define(ls[1].strip())
|
||||
elif ls[0] == '%undefine':
|
||||
if len(ls) > 2:
|
||||
raise error.general('%s:%d: %undefine requires just the name' \
|
||||
% (self.bset, lc))
|
||||
self.macros.undefine(ls[1].strip())
|
||||
elif ls[0] == '%include':
|
||||
configs += self.parse(ls[1].strip())
|
||||
elif ls[0] in ['%patch', '%source']:
|
||||
sources.process(ls[0][1:], ls[1:], self.macros, err)
|
||||
elif ls[0] == '%hash':
|
||||
sources.hash(ls[1:], self.macros, err)
|
||||
else:
|
||||
l = l.strip()
|
||||
c = build.find_config(l, self.configs)
|
||||
if c is None:
|
||||
raise error.general('%s:%d: cannot find file: %s'
|
||||
% (self.bset, lc, l))
|
||||
configs += [c + ':' + self.parent]
|
||||
finally:
|
||||
bsetf.close()
|
||||
self.parent = parent
|
||||
|
||||
return configs
|
||||
|
||||
def load(self):
|
||||
#
|
||||
# If the build set file ends with .cfg the user has passed to the
|
||||
# buildset builder a configuration so we just return it.
|
||||
#
|
||||
if self.bset.endswith('.cfg'):
|
||||
self._add_includes(self.bset)
|
||||
configs = [self.bset]
|
||||
else:
|
||||
exbset = self.macros.expand(self.bset)
|
||||
self.macros['_bset'] = exbset
|
||||
self.macros['_bset_tmp'] = build.short_name(exbset)
|
||||
root, ext = path.splitext(exbset)
|
||||
if exbset.endswith('.bset'):
|
||||
bset = exbset
|
||||
else:
|
||||
bset = '%s.bset' % (exbset)
|
||||
configs = self.parse(bset)
|
||||
return configs
|
||||
|
||||
def set_host_details(self, host, opts, macros):
|
||||
if host not in profiles:
|
||||
raise error.general('invalid host: ' + host)
|
||||
for m in profiles[host]:
|
||||
opts.defaults[m] = profiles[host][m]
|
||||
macros[m] = profiles[host][m]
|
||||
macros_to_copy = [('%{_build}', '%{_host}'),
|
||||
('%{_build_alias}', '%{_host_alias}'),
|
||||
('%{_build_arch}', '%{_host_arch}'),
|
||||
('%{_build_cpu}', '%{_host_cpu}'),
|
||||
('%{_build_os}', '%{_host_os}'),
|
||||
('%{_build_vendor}', '%{_host_vendor}')]
|
||||
for m in macros_to_copy:
|
||||
opts.defaults[m[0]] = opts.defaults[m[1]]
|
||||
macros[m[0]] = macros[m[1]]
|
||||
#
|
||||
# Look for a valid cc and cxx.
|
||||
#
|
||||
for cc in ['/usr/bin/cc', '/usr/bin/clang', '/usr/bin/gcc']:
|
||||
if check.check_exe(cc, cc):
|
||||
opts.defaults['__cc'] = cc
|
||||
macros['__cc'] = cc
|
||||
break
|
||||
if not macros.defined('__cc'):
|
||||
raise error.general('no valid cc found')
|
||||
for cxx in ['/usr/bin/c++', '/usr/bin/clang++', '/usr/bin/g++']:
|
||||
if check.check_exe(cxx, cxx):
|
||||
opts.defaults['__cxx'] = cxx
|
||||
macros['__cxx'] = cxx
|
||||
if not macros.defined('__cxx'):
|
||||
raise error.general('no valid c++ found')
|
||||
|
||||
def build(self, host, nesting_count = 0):
|
||||
|
||||
build_error = False
|
||||
|
||||
nesting_count += 1
|
||||
|
||||
log.trace('_bset: %s for %s: make' % (self.bset, host))
|
||||
log.notice('Build Set: %s for %s' % (self.bset, host))
|
||||
|
||||
mail_subject = '%s on %s' % (self.bset,
|
||||
self.macros.expand('%{_host}'))
|
||||
|
||||
current_path = os.environ['PATH']
|
||||
|
||||
start = datetime.datetime.now()
|
||||
|
||||
have_errors = False
|
||||
|
||||
try:
|
||||
configs = self.load()
|
||||
|
||||
log.trace('_bset: %s: configs: %s' % (self.bset, ','.join(configs)))
|
||||
|
||||
sizes_valid = False
|
||||
builds = []
|
||||
for s in range(0, len(configs)):
|
||||
bs = None
|
||||
b = None
|
||||
try:
|
||||
#
|
||||
# Each section of the build set gets a separate set of
|
||||
# macros so we do not contaminate one configuration with
|
||||
# another.
|
||||
#
|
||||
opts = copy.copy(self.opts)
|
||||
macros = copy.copy(self.macros)
|
||||
self.set_host_details(host, opts, macros)
|
||||
config, parent = configs[s].split(':', 2)
|
||||
if config.endswith('.bset'):
|
||||
log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75))
|
||||
bs = buildset(config, self.configs, opts, macros)
|
||||
bs.build(host, nesting_count)
|
||||
self._includes += \
|
||||
self._rebase_includes(bs.includes(), parent)
|
||||
del bs
|
||||
elif config.endswith('.cfg'):
|
||||
log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75))
|
||||
try:
|
||||
b = build.build(config,
|
||||
False,
|
||||
opts,
|
||||
macros)
|
||||
self._includes += \
|
||||
self._rebase_includes(b.includes(), parent)
|
||||
except:
|
||||
build_error = True
|
||||
raise
|
||||
self.build_package(config, b)
|
||||
builds += [b]
|
||||
#
|
||||
# Dump post build macros.
|
||||
#
|
||||
log.trace('_bset: macros post-build')
|
||||
log.trace(str(macros))
|
||||
else:
|
||||
raise error.general('invalid config type: %s' % (config))
|
||||
except error.general as gerr:
|
||||
have_errors = True
|
||||
if b is not None:
|
||||
if self.build_failure is None:
|
||||
self.build_failure = b.name()
|
||||
self._includes += b.includes()
|
||||
self._errors += [find_bset_config(config, opts.defaults) + ':' + parent] + self._includes
|
||||
raise
|
||||
#
|
||||
# Clear out the builds ...
|
||||
#
|
||||
for b in builds:
|
||||
del b
|
||||
except error.general as gerr:
|
||||
if not build_error:
|
||||
log.stderr(str(gerr))
|
||||
raise
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
self.build_failure = 'RSB general failure'
|
||||
raise
|
||||
finally:
|
||||
end = datetime.datetime.now()
|
||||
os.environ['PATH'] = current_path
|
||||
build_time = str(end - start)
|
||||
log.notice('Build Set: Time %s' % (build_time))
|
||||
|
||||
def list_hosts():
|
||||
hosts = sorted(profiles.keys())
|
||||
max_os_len = max(len(h) for h in hosts)
|
||||
max_host_len = max(len(profiles[h]['_host'][2]) for h in hosts)
|
||||
for h in hosts:
|
||||
print('%*s: %-*s %s' % (max_os_len, h, max_host_len,
|
||||
profiles[h]['_host'][2],
|
||||
profiles[h]['_host'][2]))
|
||||
|
||||
def get_files(configs, ext, localpath):
|
||||
files = []
|
||||
if localpath:
|
||||
for cp in configs['localpaths']:
|
||||
files += [c for c in configs[cp] if c.endswith(ext)]
|
||||
else:
|
||||
files = [c for c in configs['files'] if c.endswith(ext)]
|
||||
return files
|
||||
|
||||
def get_config_files(configs, localpath = False):
|
||||
return get_files(configs, '.cfg', localpath)
|
||||
|
||||
def get_bset_files(configs, localpath = False):
|
||||
return get_files(configs, '.bset', localpath)
|
||||
|
||||
def get_root(configs):
|
||||
return configs['root']
|
||||
|
||||
def list_bset_files(opts, configs):
|
||||
for p in configs['paths']:
|
||||
print('Examining: %s' % (os.path.relpath(p)))
|
||||
for b in get_bset_files(configs):
|
||||
print(' %s' % (b[:b.rfind('.')]))
|
||||
|
||||
def load_log(logfile):
|
||||
log.default = log.log(streams = [logfile])
|
||||
|
||||
def log_default(name):
|
||||
return 'rsb-log-%s-%s.txt' % (name, datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))
|
||||
|
||||
def load_options(argv, argopts, defaults = '%{_sbdir}/defaults.mc', extras = []):
|
||||
opts = options(argv, argopts, defaults, extras)
|
||||
opts.defaults['rtems_version'] = str(argopts.rtems_version)
|
||||
return opts
|
254
source-builder/sb/track.py
Normal file
254
source-builder/sb/track.py
Normal file
@ -0,0 +1,254 @@
|
||||
#
|
||||
# RTEMS Tools Project (http://www.rtems.org/)
|
||||
# Copyright 2020 Chris Johns (chrisj@rtems.org)
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of the RTEMS Tools package in 'rtems-tools'.
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
#
|
||||
# This code builds a package compiler tool suite given a tool set. A tool
|
||||
# set lists the various tools. These are specific tool configurations.
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import copy
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
import build
|
||||
import error
|
||||
import git
|
||||
import log
|
||||
import simhost
|
||||
import version
|
||||
except KeyboardInterrupt:
|
||||
print('abort: user terminated', file = sys.stderr)
|
||||
sys.exit(1)
|
||||
except:
|
||||
print('error: unknown application load error', file = sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def unique(l):
|
||||
return sorted(list(set(l)))
|
||||
|
||||
def filter_deps(deps, ext):
|
||||
rdeps = []
|
||||
for d in deps:
|
||||
ds = d.split(':', 2)
|
||||
if ds[0].endswith(ext):
|
||||
rdeps += [ds[0] + ':' + ds[1]]
|
||||
return sorted(rdeps)
|
||||
|
||||
def normalise_paths(includes, root):
|
||||
normalised = []
|
||||
for inc in unique(includes):
|
||||
config, parent = inc.split(':', 2)
|
||||
if config.startswith(root):
|
||||
config = config[len(root):]
|
||||
if parent.startswith(root):
|
||||
parent = parent[len(root):]
|
||||
normalised += [config + ':' + parent]
|
||||
return normalised
|
||||
|
||||
def process_dependencies(includes):
|
||||
deps = {}
|
||||
incs = [i.split(':', 2) for i in includes]
|
||||
for config, parent in incs:
|
||||
if parent not in deps:
|
||||
deps[parent] = []
|
||||
for inc in incs:
|
||||
if inc[1] == parent:
|
||||
deps[parent] += [inc[0]]
|
||||
for d in deps:
|
||||
deps[d] = unique(deps[d])
|
||||
return deps
|
||||
|
||||
def includes_str(includes):
|
||||
o = []
|
||||
deps = [i.split(':', 2) for i in includes]
|
||||
ll = max([len(d[1]) for d in deps])
|
||||
for d in deps:
|
||||
o += ['%*s %s' % (ll, d[1], d[0])]
|
||||
return o
|
||||
|
||||
def deps_str(deps):
|
||||
def print_node(deps, node, level = 0, prefix = '', indent = ''):
|
||||
o = []
|
||||
if node != 'root':
|
||||
level += 1
|
||||
if level == 1:
|
||||
o += ['']
|
||||
o += [prefix + '+-- ' + node]
|
||||
if node in deps:
|
||||
prefix += indent
|
||||
for c, child in enumerate(deps[node], start = 1):
|
||||
if c < len(deps[node]) and level > 1:
|
||||
indent = '| '
|
||||
else:
|
||||
indent = ' '
|
||||
o += print_node(deps, child, level, prefix, indent)
|
||||
return o
|
||||
return print_node(deps, 'root')
|
||||
|
||||
def run(args = sys.argv):
|
||||
ec = 0
|
||||
output = []
|
||||
try:
|
||||
#
|
||||
# The RSB options support cannot be used because it loads the defaults
|
||||
# for the host which we cannot do here.
|
||||
#
|
||||
description = 'RTEMS Track Dependencies a build set has for all hosts.'
|
||||
|
||||
argsp = argparse.ArgumentParser(prog = 'sb-dep-check',
|
||||
description = description)
|
||||
argsp.add_argument('--rtems-version', help = 'Set the RTEMS version.',
|
||||
type = str,
|
||||
default = version.version())
|
||||
argsp.add_argument('--list-hosts', help = 'List the hosts.',
|
||||
action = 'store_true')
|
||||
argsp.add_argument('--list-bsets', help = 'List the hosts.',
|
||||
action = 'store_true')
|
||||
argsp.add_argument('--output', help = 'Output file.',
|
||||
type = str,
|
||||
default = None)
|
||||
argsp.add_argument('--log', help = 'Log file.',
|
||||
type = str,
|
||||
default = simhost.log_default('trackdeps'))
|
||||
argsp.add_argument('--trace', help = 'Enable trace logging for debugging.',
|
||||
action = 'store_true')
|
||||
argsp.add_argument('--not-referenced',
|
||||
help = 'Write out the list of not config files not referenced.',
|
||||
action = 'store_true')
|
||||
argsp.add_argument('bsets', nargs='*', help = 'Build sets.')
|
||||
|
||||
argopts = argsp.parse_args(args[2:])
|
||||
|
||||
simhost.load_log(argopts.log)
|
||||
log.notice('RTEMS Source Builder - Track Dependencies, %s' % (version.string()))
|
||||
log.tracing = argopts.trace
|
||||
|
||||
opts = simhost.load_options(args, argopts, extras = ['---keep-going'])
|
||||
configs = build.get_configs(opts)
|
||||
|
||||
if argopts.list_hosts:
|
||||
simhost.list_hosts()
|
||||
elif argopts.list_bsets:
|
||||
simhost.list_bset_files(opts, configs)
|
||||
else:
|
||||
all_bsets = simhost.get_bset_files(configs)
|
||||
if len(argopts.bsets) == 0:
|
||||
bsets = all_bsets
|
||||
else:
|
||||
bsets = argopts.bsets
|
||||
includes = []
|
||||
errors = []
|
||||
for bset in bsets:
|
||||
b = None
|
||||
try:
|
||||
for host in simhost.profiles:
|
||||
b = simhost.buildset(bset, configs, opts)
|
||||
b.build(host)
|
||||
includes += b.includes()
|
||||
errors += b.errors()
|
||||
del b
|
||||
except error.general as gerr:
|
||||
log.stderr(str(gerr))
|
||||
log.stderr('Build FAILED')
|
||||
if b:
|
||||
includes += b.includes()
|
||||
errors += b.errors()
|
||||
b = None
|
||||
root = simhost.get_root(configs)
|
||||
all_configs = simhost.get_config_files(configs, True)
|
||||
includes = normalise_paths(includes, root)
|
||||
bsets = filter_deps(includes, '.bset')
|
||||
configs = filter_deps(includes, '.cfg')
|
||||
deps_tree = deps_str(process_dependencies(bsets + configs))
|
||||
bsets = unique([b.split(':', 2)[0] for b in bsets])
|
||||
configs = unique([i.split(':', 2)[0] for i in configs])
|
||||
not_used_configs = [c for c in all_configs if c not in configs]
|
||||
if len(errors) > 0:
|
||||
errors = [e.split(':', 2)[0] for e in normalise_paths(errors, root)]
|
||||
errs = []
|
||||
for e in errors:
|
||||
if e not in bsets + configs:
|
||||
errs += [e]
|
||||
errors = errs
|
||||
if argopts.not_referenced:
|
||||
output = not_used_configs
|
||||
else:
|
||||
output = ['RSB Dependency Tracker',
|
||||
'',
|
||||
'Total buildsets: %d' % (len(all_bsets)),
|
||||
'Total configs: %d' % (len(all_configs)),
|
||||
'']
|
||||
if len(errors) > 0:
|
||||
output += ['Errored File Set (%d):' % (len(errors)),
|
||||
''] + \
|
||||
errors + \
|
||||
['']
|
||||
if len(configs) > 0:
|
||||
output += ['Include Tree(s):',
|
||||
''] + \
|
||||
deps_tree + \
|
||||
['']
|
||||
if len(bsets) > 0:
|
||||
output += ['Buildsets (%d):' % (len(bsets)),
|
||||
''] + \
|
||||
bsets + \
|
||||
['']
|
||||
if len(configs) > 0:
|
||||
output += ['Configurations (%d):' % (len(configs)),
|
||||
''] + \
|
||||
configs + \
|
||||
['']
|
||||
if len(not_used_configs) > 0:
|
||||
output += ['Not referenced (%d): ' % (len(not_used_configs)),
|
||||
''] + \
|
||||
not_used_configs
|
||||
output = os.linesep.join(output)
|
||||
if argopts.output:
|
||||
o = open(argopts.output, "w")
|
||||
o.write(output)
|
||||
o.close
|
||||
else:
|
||||
print()
|
||||
print(output)
|
||||
except error.general as gerr:
|
||||
log.stderr(str(gerr))
|
||||
log.stderr('Build FAILED')
|
||||
ec = 1
|
||||
except error.internal as ierr:
|
||||
log.stderr(str(ierr))
|
||||
log.stderr('Internal Build FAILED')
|
||||
ec = 1
|
||||
except error.exit as eerr:
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
log.notice('abort: user terminated')
|
||||
ec = 1
|
||||
except:
|
||||
raise
|
||||
log.notice('abort: unknown error')
|
||||
ec = 1
|
||||
sys.exit(ec)
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
Loading…
x
Reference in New Issue
Block a user