Add download git support.

Add support to use a git cloned repo as the source. Move the download
code out of the build module and into a separate module. Add to this
module support for git.

Update the GCC common configuration to support using a symlinked
git repo.

Add checks for all languages.
This commit is contained in:
Chris Johns
2013-04-16 14:25:34 +10:00
parent b022691026
commit 649a64c0af
5 changed files with 397 additions and 155 deletions

View File

@@ -1,6 +1,6 @@
#
# RTEMS Tools Project (http://www.rtems.org/)
# Copyright 2010-2012 Chris Johns (chrisj@rtems.org)
# Copyright 2010-2013 Chris Johns (chrisj@rtems.org)
# All rights reserved.
#
# This file is part of the RTEMS Tools package in 'rtems-tools'.
@@ -34,6 +34,7 @@ import urlparse
try:
import check
import config
import download
import error
import execute
import log
@@ -125,115 +126,6 @@ class build:
if not self.opts.dry_run():
path.mkdir(mkpath)
def get_file(self, url, local):
if local is None:
raise error.general('source/patch path invalid')
if not path.isdir(path.dirname(local)) and not self.opts.download_disabled():
_notice(self.opts,
'Creating source directory: %s' % (os.path.relpath(path.host(path.dirname(local)))))
self.mkdir(path.host(path.dirname(local)))
if not path.exists(local):
if self.opts.download_disabled():
raise error.general('source not found: %s' % (path.host(local)))
#
# Not localy found so we need to download it. Check if a URL has
# been provided on the command line.
#
url_bases = self.opts.urls()
urls = []
if url_bases is not None:
for base in url_bases:
if base[-1:] != '/':
base += '/'
url_path = urlparse.urlsplit(url)[2]
slash = url_path.rfind('/')
if slash < 0:
url_file = url_path
else:
url_file = url_path[slash + 1:]
urls.append(urlparse.urljoin(base, url_file))
urls.append(url)
if self.opts.trace():
print '_url:', ','.join(urls), '->', local
for url in urls:
#
# Hack for GitHub.
#
if url.startswith('https://api.github.com'):
url = urlparse.urljoin(url, self.config.expand('tarball/%{version}'))
_notice(self.opts, 'download: %s -> %s' % (url, os.path.relpath(path.host(local))))
if not self.opts.dry_run():
failed = False
_in = None
_out = None
try:
_in = urllib2.urlopen(url)
_out = open(path.host(local), 'wb')
_out.write(_in.read())
except IOError, err:
msg = 'download: %s: error: %s' % (url, str(err))
_notice(self.opts, msg)
if path.exists(local):
os.remove(path.host(local))
failed = True
except ValueError, err:
msg = 'download: %s: error: %s' % (url, str(err))
_notice(self.opts, msg)
if path.exists(local):
os.remove(path.host(local))
failed = True
except:
msg = 'download: %s: error' % (url)
print >> sys.stderr, msg
if _out is not None:
_out.close()
raise
if _out is not None:
_out.close()
if _in is not None:
del _in
if not failed:
if not path.isfile(local):
raise error.general('source is not a file: %s' % (path.host(local)))
return
if not self.opts.dry_run():
raise error.general('downloading %s: all paths have failed, giving up' % (url))
def parse_url(self, url, pathkey):
#
# Split the source up into the parts we need.
#
source = {}
source['url'] = url
source['path'] = path.dirname(url)
source['file'] = path.basename(url)
source['name'], source['ext'] = path.splitext(source['file'])
#
# Get the file. Checks the local source directory first.
#
source['local'] = None
for p in self.config.define(pathkey).split(':'):
local = path.join(path.abspath(p), source['file'])
if source['local'] is None:
source['local'] = local
if path.exists(local):
source['local'] = local
break
#
# Is the file compressed ?
#
esl = source['ext'].split('.')
if esl[-1:][0] == 'gz':
source['compressed'] = '%{__gzip} -dc'
elif esl[-1:][0] == 'bz2':
source['compressed'] = '%{__bzip2} -dc'
elif esl[-1:][0] == 'bz2':
source['compressed'] = '%{__zip} -u'
elif esl[-1:][0] == 'xz':
source['compressed'] = '%{__xz} -dc'
source['script'] = ''
return source
def source(self, package, source_tag):
#
# Scan the sources found in the config file for the one we are
@@ -250,9 +142,11 @@ class build:
break
if url is None:
raise error.general('source tag not found: source%d' % (source_tag))
source = self.parse_url(url, '_sourcedir')
self.get_file(source['url'], source['local'])
if 'compressed' in source:
source = download.parse_url(url, '_sourcedir', self.config, self.opts)
download.get_file(source['url'], source['local'], self.opts, self.config)
if 'symlink' in source:
source['script'] = '%%{__ln_s} %s ${source_dir_%d}' % (source['local'], source_tag)
elif 'compressed' in source:
source['script'] = source['compressed'] + ' ' + \
source['local'] + ' | %{__tar_extract} -'
else:
@@ -275,13 +169,13 @@ class build:
#
# Parse the URL first in the source builder's patch directory.
#
patch = self.parse_url(url, '_patchdir')
patch = download.parse_url(url, '_patchdir', self.config, self.opts)
#
# If not in the source builder package check the source directory.
#
if not path.exists(patch['local']):
patch = self.parse_url(url, '_patchdir')
self.get_file(patch['url'], patch['local'])
patch = download.parse_url(url, '_patchdir', self.config, self.opts)
download.get_file(patch['url'], patch['local'], self.opts, self.config)
if 'compressed' in patch:
patch['script'] = patch['compressed'] + ' ' + patch['local']
else:

View File

@@ -0,0 +1,237 @@
#
# RTEMS Tools Project (http://www.rtems.org/)
# Copyright 2010-2013 Chris Johns (chrisj@rtems.org)
# All rights reserved.
#
# This file is part of the RTEMS Tools package in 'rtems-tools'.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# This code builds a package given a config file. It only builds to be
# installed not to be package unless you run a packager around this.
#
import os
import stat
import sys
import urllib2
import urlparse
import error
import git
import log
import path
def _notice(opts, text):
if not opts.quiet() and not log.default.has_stdout():
print text
log.output(text)
log.flush()
def _output(opts, text):
if not opts.quiet():
log.output(text)
def _http_parser(source, config, opts):
#
# Is the file compressed ?
#
esl = source['ext'].split('.')
if esl[-1:][0] == 'gz':
source['compressed'] = '%{__gzip} -dc'
elif esl[-1:][0] == 'bz2':
source['compressed'] = '%{__bzip2} -dc'
elif esl[-1:][0] == 'bz2':
source['compressed'] = '%{__zip} -u'
elif esl[-1:][0] == 'xz':
source['compressed'] = '%{__xz} -dc'
def _git_parser(source, config, opts):
#
# Symlink.
#
us = source['url'].split('?')
source['path'] = path.dirname(us[0])
source['file'] = path.basename(us[0])
source['name'], source['ext'] = path.splitext(source['file'])
if len(us) > 1:
source['args'] = us[1:]
source['local'] = \
path.join(source['local_prefix'], config.expand('git'), source['file'])
source['symlink'] = source['local']
def _file_parser(source, config, opts):
#
# Symlink.
#
source['symlink'] = source['local']
parsers = { 'http': _http_parser,
'ftp': _http_parser,
'git': _git_parser,
'file': _file_parser }
def parse_url(url, pathkey, config, opts):
#
# Split the source up into the parts we need.
#
source = {}
source['url'] = url
source['path'] = path.dirname(url)
source['file'] = path.basename(url)
source['name'], source['ext'] = path.splitext(source['file'])
#
# Get the file. Checks the local source directory first.
#
source['local'] = None
for p in config.define(pathkey).split(':'):
local = path.join(path.abspath(p), source['file'])
if source['local'] is None:
source['local_prefix'] = path.abspath(p)
source['local'] = local
if path.exists(local):
source['local_prefix'] = path.abspath(p)
source['local'] = local
break
source['script'] = ''
for p in parsers:
if url.startswith(p):
source['type'] = p
if parsers[p](source, config, opts):
break
return source
def _http_downloader(url, local, config, opts):
if path.exists(local):
return True
#
# Hack for GitHub.
#
if url.startswith('https://api.github.com'):
url = urlparse.urljoin(url, config.expand('tarball/%{version}'))
_notice(opts, 'download: %s -> %s' % (url, os.path.relpath(path.host(local))))
failed = False
if not opts.dry_run():
_in = None
_out = None
try:
_in = urllib2.urlopen(url)
_out = open(path.host(local), 'wb')
_out.write(_in.read())
except IOError, err:
msg = 'download: %s: error: %s' % (url, str(err))
_notice(opts, msg)
if path.exists(local):
os.remove(path.host(local))
failed = True
except ValueError, err:
msg = 'download: %s: error: %s' % (url, str(err))
_notice(opts, msg)
if path.exists(local):
os.remove(path.host(local))
failed = True
except:
msg = 'download: %s: error' % (url)
print >> sys.stderr, msg
if _out is not None:
_out.close()
raise
if _out is not None:
_out.close()
if _in is not None:
del _in
if not failed:
if not path.isfile(local):
raise error.general('source is not a file: %s' % (path.host(local)))
return not failed
def _git_downloader(url, local, config, opts):
rlp = os.path.relpath(path.host(local))
us = url.split('?')
repo = git.repo(local, opts, config.macros)
if not repo.valid():
_notice(opts, 'git: clone: %s -> %s' % (us[0], rlp))
if not opts.dry_run():
repo.clone(us[0], local)
for a in us[1:]:
_as = a.split('=')
if _as[0] == 'branch':
_notice(opts, 'git: checkout: %s => %s' % (us[0], _as[1]))
if not opts.dry_run():
repo.checkout(_as[1])
elif _as[0] == 'pull':
_notice(opts, 'git: pull: %s' % (us[0]))
if not opts.dry_run():
repo.pull()
elif _as[0] == 'fetch':
_notice(opts, 'git: fetch: %s -> %s' % (us[0], rlp))
if not opts.dry_run():
repo.fetch()
elif _as[0] == 'reset':
arg = []
if len(_as) > 1:
arg = ['--%s' % (_as[1])]
_notice(opts, 'git: reset: %s' % (us[0]))
if not opts.dry_run():
repo.reset(arg)
return True
def _file_downloader(url, local, config, opts):
if path.exists(local):
return True
return path.isdir(url)
downloaders = { 'http': _http_downloader,
'ftp': _http_downloader,
'git': _git_downloader,
'file': _file_downloader }
def get_file(url, local, opts, config):
if local is None:
raise error.general('source/patch path invalid')
if not path.isdir(path.dirname(local)) and not opts.download_disabled():
_notice(opts,
'Creating source directory: %s' % (os.path.relpath(path.host(path.dirname(local)))))
_output(opts, 'making dir: %s' % (path.host(path.dirname(local))))
if not opts.dry_run():
path.mkdir(path.dirname(local))
if not path.exists(local) and opts.download_disabled():
raise error.general('source not found: %s' % (path.host(local)))
#
# Check if a URL hasbeen provided on the command line.
#
url_bases = opts.urls()
urls = []
if url_bases is not None:
for base in url_bases:
if base[-1:] != '/':
base += '/'
url_path = urlparse.urlsplit(url)[2]
slash = url_path.rfind('/')
if slash < 0:
url_file = url_path
else:
url_file = url_path[slash + 1:]
urls.append(urlparse.urljoin(base, url_file))
urls.append(url)
if opts.trace():
print '_url:', ','.join(urls), '->', local
for url in urls:
for dl in downloaders:
if url.startswith(dl):
if downloaders[dl](url, local, config, opts):
return
if not opts.dry_run():
raise error.general('downloading %s: all paths have failed, giving up' % (url))

View File

@@ -37,7 +37,11 @@ class repo:
def _run(self, args, check = False):
e = execute.capture_execution()
exit_code, proc, output = e.spawn([self.git] + args)
if path.exists(self.path):
cwd = self.path
else:
cwd = None
exit_code, proc, output = e.spawn([self.git] + args, cwd = cwd)
if check:
self._git_exit_code(exit_code)
return exit_code, output
@@ -61,28 +65,63 @@ class repo:
raise error.general('invalid version number from git: %s' % (gvs[2]))
return (int(vs[0]), int(vs[1]), int(vs[2]), int(vs[3]))
def clone(self, url, path):
ec, output = self._run(['clone', url, path])
if ec != 0:
raise error.general('clone of %s failed: %s' % (url, output))
def fetch(self, url, path):
ec, output = self._run(['fetch', url])
if ec != 0:
raise error.general('fetch of %s failed: %s' % (url, output))
def pull(self):
ec, output = self._run(['pull'])
if ec != 0:
raise error.general('pull of %s failed: %s' % (url, output))
def reset(self, args):
if type(args) == str:
args = [args]
ec, output = self._run(['reset'] + args)
if ec != 0:
raise error.general('pull of %s failed: %s' % (url, output))
def branch(self):
ec, output = self._run(['branch'])
if ec == 0:
for b in output.split('\n'):
if b[0] == '*':
return b[2:]
return None
def checkout(self, branch = 'master'):
ec, output = self._run(['checkout', branch])
return ec == 0
def status(self):
_status = {}
ec, output = self._run(['status'])
if ec == 0:
state = 'none'
for l in output.split('\n'):
if l.startswith('# On branch '):
_status['branch'] = l[len('# On branch '):]
elif l.startswith('# Changes to be committed:'):
state = 'staged'
elif l.startswith('# Changes not staged for commit:'):
state = 'unstaged'
elif l.startswith('# Untracked files:'):
state = 'untracked'
elif state != 'none' and l[0] == '#':
if l.strip() != '#' and not l.startswith('# ('):
if state not in _status:
_status[state] = []
l = l[1:]
if ':' in l:
l = l.split(':')[1]
_status[state] += [l.strip()]
if path.exists(self.path):
ec, output = self._run(['status'])
if ec == 0:
state = 'none'
for l in output.split('\n'):
if l.startswith('# On branch '):
_status['branch'] = l[len('# On branch '):]
elif l.startswith('# Changes to be committed:'):
state = 'staged'
elif l.startswith('# Changes not staged for commit:'):
state = 'unstaged'
elif l.startswith('# Untracked files:'):
state = 'untracked'
elif state != 'none' and l[0] == '#':
if l.strip() != '#' and not l.startswith('# ('):
if state not in _status:
_status[state] = []
l = l[1:]
if ':' in l:
l = l.split(':')[1]
_status[state] += [l.strip()]
return _status
def clean(self):
@@ -90,8 +129,10 @@ class repo:
return len(_status) == 1 and 'branch' in _status
def valid(self):
ec, output = self._run(['status'])
return ec == 0
if path.exists(self.path):
ec, output = self._run(['status'])
return ec == 0
return False
def remotes(self):
_remotes = {}