mirror of
https://git.rtems.org/rtems-source-builder
synced 2024-10-09 07:15:10 +08:00
Add download git support.
Add support to use a git cloned repo as the source. Move the download code out of the build module and into a separate module. Add to this module support for git. Update the GCC common configuration to support using a symlinked git repo. Add checks for all languages.
This commit is contained in:
parent
b022691026
commit
649a64c0af
@ -17,10 +17,44 @@
|
||||
%define enable_cxx 1
|
||||
%endif
|
||||
|
||||
#
|
||||
# Default to Ada off.
|
||||
#
|
||||
%ifn %{defined enable_ada}
|
||||
%define enable_ada 0
|
||||
%endif
|
||||
|
||||
#
|
||||
# Default to Go off.
|
||||
#
|
||||
%ifn %{defined enable_go}
|
||||
%define enable_go 0
|
||||
%endif
|
||||
|
||||
#
|
||||
# Default to Fortran off.
|
||||
#
|
||||
%ifn %{defined enable_fortran}
|
||||
%define enable_fortran 0
|
||||
%endif
|
||||
|
||||
#
|
||||
# Default to Java off.
|
||||
#
|
||||
%ifn %{defined enable_java}
|
||||
%define enable_java 0
|
||||
%endif
|
||||
|
||||
#
|
||||
# Default to ObjC off.
|
||||
#
|
||||
%ifn %{defined enable_objc}
|
||||
%define enable_objc 0
|
||||
%endif
|
||||
|
||||
#
|
||||
# Hack to get around the ARM EABBI mess.
|
||||
#
|
||||
%ifn %{defined enable_obsolete}
|
||||
%define enable_obsolete 0
|
||||
%endif
|
||||
|
||||
|
@ -33,9 +33,10 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
|
||||
#
|
||||
%prep
|
||||
# gcc and optional the g++ core if separate packages
|
||||
source_dir_0="gcc-%{gcc_version}"
|
||||
%setup -q -c -n %{name}-%{version}
|
||||
%{?source1:%setup -q -D -T -n %{name}-%{version} -a1}
|
||||
cd gcc-%{gcc_version}
|
||||
cd ${source_dir_0}
|
||||
%{?patch0:%patch0 %{?patch0_opts:%{patch0_opts}}%{!?patch0_opts:-p1}}
|
||||
%{?patch1:%patch1 %{?patch1_opts:%{patch1_opts}}%{!?patch1_opts:-p1}}
|
||||
%{?patch2:%patch2 %{?patch2_opts:%{patch2_opts}}%{!?patch2_opts:-p1}}
|
||||
@ -49,8 +50,9 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
|
||||
cd ..
|
||||
|
||||
# newlib
|
||||
source_dir_10="newlib-%{newlib_version}"
|
||||
%setup -q -D -T -n %{name}-%{version} -a10
|
||||
cd newlib-%{newlib_version}
|
||||
cd ${source_dir_10}
|
||||
%{?patch10:%patch10 %{?patch10_opts:%{patch10_opts}}%{!?patch10_opts:-p1}}
|
||||
%{?patch11:%patch11 %{?patch11_opts:%{patch11_opts}}%{!?patch11_opts:-p1}}
|
||||
%{?patch12:%patch12 %{?patch12_opts:%{patch12_opts}}%{!?patch12_opts:-p1}}
|
||||
@ -64,40 +66,48 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
|
||||
cd ..
|
||||
|
||||
# Link newlib into the gcc source tree
|
||||
ln -s ../newlib-%{newlib_version}/newlib gcc-%{gcc_version}
|
||||
%{__rmfile} ${source_dir_0}/newlib
|
||||
%{__ln_s} $PWD/${source_dir_10}/newlib ${source_dir_0}/newlib
|
||||
|
||||
# MPFR
|
||||
source_dir_20="mpfr-%{mpfr_version}"
|
||||
%setup -q -D -T -n %{name}-%{version} -a20
|
||||
cd mpfr-%{mpfr_version}
|
||||
cd ${source_dir_20}
|
||||
%{?patch20:%patch20 -p1}
|
||||
cd ..
|
||||
# Build MPFR one-tree style
|
||||
ln -s ../mpfr-%{mpfr_version} gcc-%{gcc_version}/mpfr
|
||||
%{__rmfile} ${source_dir_0}/mpfr
|
||||
%{__ln_s} $PWD/${source_dir_20} ${source_dir_0}/mpfr
|
||||
|
||||
# MPC
|
||||
source_dir_21="mpc-%{mpc_version}"
|
||||
%setup -q -D -T -n %{name}-%{version} -a21
|
||||
cd mpc-%{mpc_version}
|
||||
cd ${source_dir_21}
|
||||
%{?patch21:%patch21 -p1}
|
||||
cd ..
|
||||
# Build MPC one-tree style
|
||||
ln -s ../mpc-%{mpc_version} gcc-%{gcc_version}/mpc
|
||||
%{__rmfile} ${source_dir_0}/mpc
|
||||
%{__ln_s} $PWD/${source_dir_21} ${source_dir_0}/mpc
|
||||
|
||||
# GMP
|
||||
source_dir_22="gmp-%{gmp_version}"
|
||||
%setup -q -D -T -n %{name}-%{version} -a22
|
||||
cd gmp-%{gmp_version}
|
||||
cd ${source_dir_22}
|
||||
%{?patch22:%patch22 -p1}
|
||||
cd ..
|
||||
# Build GMP one-tree style
|
||||
ln -s ../gmp-%{gmp_version} gcc-%{gcc_version}/gmp
|
||||
%{__rmfile} ${source_dir_0}/gmp
|
||||
%{__ln_s} $PWD/${source_dir_22} ${source_dir_0}/gmp
|
||||
|
||||
echo "%{gcc_version_message}" > gcc-%{gcc_version}/gcc/DEV-PHASE
|
||||
echo "%{gcc_version_message}" > ${source_dir_0}/gcc/DEV-PHASE
|
||||
|
||||
# Fix timestamps
|
||||
cd gcc-%{gcc_version}
|
||||
cd ${source_dir_0}
|
||||
contrib/gcc_update --touch
|
||||
cd ..
|
||||
|
||||
%build
|
||||
# Build directory support.
|
||||
if test "%{_build}" != "%{_host}" ; then
|
||||
build_dir="build-cxc"
|
||||
else
|
||||
@ -105,10 +115,29 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
|
||||
fi
|
||||
mkdir -p ${build_dir}
|
||||
cd ${build_dir}
|
||||
|
||||
# Languages.
|
||||
languages="c"
|
||||
%if %{enable_cxx}
|
||||
languages="$languages,c++"
|
||||
%endif
|
||||
%if %{enable_ada}
|
||||
languages="$languages,ada"
|
||||
%endif
|
||||
%if %{enable_go}
|
||||
languages="$languages,go"
|
||||
%endif
|
||||
%if %{enable_fortran}
|
||||
languages="$languages,fortran"
|
||||
%endif
|
||||
%if %{enable_java}
|
||||
languages="$languages,java"
|
||||
%endif
|
||||
%if %{enable_objc}
|
||||
languages="$languages,objc"
|
||||
%endif
|
||||
|
||||
# Host flags
|
||||
if test "%{_build}" != "%{_host}" ; then
|
||||
CFLAGS_FOR_BUILD="-g -O2 -Wall"
|
||||
CC="%{_host}-gcc ${SB_OPT_FLAGS}"
|
||||
@ -120,7 +149,8 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
|
||||
CXX=$(echo "%{__cxx} ${SB_OPT_FLAGS}" | sed -e 's,-std=gnu99 ,,')
|
||||
fi
|
||||
export CFLAGS CFLAGS_FOR_BUILD CC CXXFLAGS_FOR_BUILD CXX
|
||||
../gcc-%{gcc_version}/configure \
|
||||
|
||||
../${source_dir_0}/configure \
|
||||
--prefix=%{_prefix} \
|
||||
--bindir=%{_bindir} \
|
||||
--exec_prefix=%{_exec_prefix} \
|
||||
@ -147,18 +177,18 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
|
||||
%{?enable_obsolete:--enable-obsolete} \
|
||||
--enable-languages="$languages"
|
||||
|
||||
unset CFLAGS CFLAGS_FOR_BUILD CC CXXFLAGS_FOR_BUILD CXX
|
||||
|
||||
if test "%_host" != "%_build" ; then
|
||||
# Bug in gcc-3.2.1:
|
||||
# Somehow, gcc doesn't get syslimits.h right for Cdn-Xs
|
||||
mkdir -p gcc/include
|
||||
cp ../gcc-%{gcc_version}/gcc/gsyslimits.h gcc/include/syslimits.h
|
||||
cp ../${source_dir_0}/gcc/gsyslimits.h gcc/include/syslimits.h
|
||||
fi
|
||||
|
||||
%{__make} %{?_smp_mflags} all
|
||||
cd ..
|
||||
|
||||
unset CFLAGS CFLAGS_FOR_BUILD CC CXXFLAGS_FOR_BUILD CXX
|
||||
|
||||
%install
|
||||
%{__rmdir} $SB_BUILD_ROOT
|
||||
|
||||
@ -179,6 +209,12 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
|
||||
# Don't want libffi's man-pages
|
||||
%{__rmfile} $SB_BUILD_ROOT%{_mandir}/man3/*ffi*
|
||||
|
||||
# Clean the symlinks away incase the source is a repo
|
||||
%{__rmfile} ${source_dir_0}/newlib
|
||||
%{__rmfile} ${source_dir_0}/mpfr
|
||||
%{__rmfile} ${source_dir_0}/mpc
|
||||
%{__rmfile} ${source_dir_0}/gmp
|
||||
|
||||
%testing
|
||||
# Add testing here.
|
||||
export RUNTESTFLAGS=--target_board=%{_target}-run
|
||||
|
@ -1,6 +1,6 @@
|
||||
#
|
||||
# RTEMS Tools Project (http://www.rtems.org/)
|
||||
# Copyright 2010-2012 Chris Johns (chrisj@rtems.org)
|
||||
# Copyright 2010-2013 Chris Johns (chrisj@rtems.org)
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of the RTEMS Tools package in 'rtems-tools'.
|
||||
@ -34,6 +34,7 @@ import urlparse
|
||||
try:
|
||||
import check
|
||||
import config
|
||||
import download
|
||||
import error
|
||||
import execute
|
||||
import log
|
||||
@ -125,115 +126,6 @@ class build:
|
||||
if not self.opts.dry_run():
|
||||
path.mkdir(mkpath)
|
||||
|
||||
def get_file(self, url, local):
|
||||
if local is None:
|
||||
raise error.general('source/patch path invalid')
|
||||
if not path.isdir(path.dirname(local)) and not self.opts.download_disabled():
|
||||
_notice(self.opts,
|
||||
'Creating source directory: %s' % (os.path.relpath(path.host(path.dirname(local)))))
|
||||
self.mkdir(path.host(path.dirname(local)))
|
||||
if not path.exists(local):
|
||||
if self.opts.download_disabled():
|
||||
raise error.general('source not found: %s' % (path.host(local)))
|
||||
#
|
||||
# Not localy found so we need to download it. Check if a URL has
|
||||
# been provided on the command line.
|
||||
#
|
||||
url_bases = self.opts.urls()
|
||||
urls = []
|
||||
if url_bases is not None:
|
||||
for base in url_bases:
|
||||
if base[-1:] != '/':
|
||||
base += '/'
|
||||
url_path = urlparse.urlsplit(url)[2]
|
||||
slash = url_path.rfind('/')
|
||||
if slash < 0:
|
||||
url_file = url_path
|
||||
else:
|
||||
url_file = url_path[slash + 1:]
|
||||
urls.append(urlparse.urljoin(base, url_file))
|
||||
urls.append(url)
|
||||
if self.opts.trace():
|
||||
print '_url:', ','.join(urls), '->', local
|
||||
for url in urls:
|
||||
#
|
||||
# Hack for GitHub.
|
||||
#
|
||||
if url.startswith('https://api.github.com'):
|
||||
url = urlparse.urljoin(url, self.config.expand('tarball/%{version}'))
|
||||
_notice(self.opts, 'download: %s -> %s' % (url, os.path.relpath(path.host(local))))
|
||||
if not self.opts.dry_run():
|
||||
failed = False
|
||||
_in = None
|
||||
_out = None
|
||||
try:
|
||||
_in = urllib2.urlopen(url)
|
||||
_out = open(path.host(local), 'wb')
|
||||
_out.write(_in.read())
|
||||
except IOError, err:
|
||||
msg = 'download: %s: error: %s' % (url, str(err))
|
||||
_notice(self.opts, msg)
|
||||
if path.exists(local):
|
||||
os.remove(path.host(local))
|
||||
failed = True
|
||||
except ValueError, err:
|
||||
msg = 'download: %s: error: %s' % (url, str(err))
|
||||
_notice(self.opts, msg)
|
||||
if path.exists(local):
|
||||
os.remove(path.host(local))
|
||||
failed = True
|
||||
except:
|
||||
msg = 'download: %s: error' % (url)
|
||||
print >> sys.stderr, msg
|
||||
if _out is not None:
|
||||
_out.close()
|
||||
raise
|
||||
if _out is not None:
|
||||
_out.close()
|
||||
if _in is not None:
|
||||
del _in
|
||||
if not failed:
|
||||
if not path.isfile(local):
|
||||
raise error.general('source is not a file: %s' % (path.host(local)))
|
||||
return
|
||||
if not self.opts.dry_run():
|
||||
raise error.general('downloading %s: all paths have failed, giving up' % (url))
|
||||
|
||||
def parse_url(self, url, pathkey):
|
||||
#
|
||||
# Split the source up into the parts we need.
|
||||
#
|
||||
source = {}
|
||||
source['url'] = url
|
||||
source['path'] = path.dirname(url)
|
||||
source['file'] = path.basename(url)
|
||||
source['name'], source['ext'] = path.splitext(source['file'])
|
||||
#
|
||||
# Get the file. Checks the local source directory first.
|
||||
#
|
||||
source['local'] = None
|
||||
for p in self.config.define(pathkey).split(':'):
|
||||
local = path.join(path.abspath(p), source['file'])
|
||||
if source['local'] is None:
|
||||
source['local'] = local
|
||||
if path.exists(local):
|
||||
source['local'] = local
|
||||
break
|
||||
#
|
||||
# Is the file compressed ?
|
||||
#
|
||||
esl = source['ext'].split('.')
|
||||
if esl[-1:][0] == 'gz':
|
||||
source['compressed'] = '%{__gzip} -dc'
|
||||
elif esl[-1:][0] == 'bz2':
|
||||
source['compressed'] = '%{__bzip2} -dc'
|
||||
elif esl[-1:][0] == 'bz2':
|
||||
source['compressed'] = '%{__zip} -u'
|
||||
elif esl[-1:][0] == 'xz':
|
||||
source['compressed'] = '%{__xz} -dc'
|
||||
source['script'] = ''
|
||||
return source
|
||||
|
||||
def source(self, package, source_tag):
|
||||
#
|
||||
# Scan the sources found in the config file for the one we are
|
||||
@ -250,9 +142,11 @@ class build:
|
||||
break
|
||||
if url is None:
|
||||
raise error.general('source tag not found: source%d' % (source_tag))
|
||||
source = self.parse_url(url, '_sourcedir')
|
||||
self.get_file(source['url'], source['local'])
|
||||
if 'compressed' in source:
|
||||
source = download.parse_url(url, '_sourcedir', self.config, self.opts)
|
||||
download.get_file(source['url'], source['local'], self.opts, self.config)
|
||||
if 'symlink' in source:
|
||||
source['script'] = '%%{__ln_s} %s ${source_dir_%d}' % (source['local'], source_tag)
|
||||
elif 'compressed' in source:
|
||||
source['script'] = source['compressed'] + ' ' + \
|
||||
source['local'] + ' | %{__tar_extract} -'
|
||||
else:
|
||||
@ -275,13 +169,13 @@ class build:
|
||||
#
|
||||
# Parse the URL first in the source builder's patch directory.
|
||||
#
|
||||
patch = self.parse_url(url, '_patchdir')
|
||||
patch = download.parse_url(url, '_patchdir', self.config, self.opts)
|
||||
#
|
||||
# If not in the source builder package check the source directory.
|
||||
#
|
||||
if not path.exists(patch['local']):
|
||||
patch = self.parse_url(url, '_patchdir')
|
||||
self.get_file(patch['url'], patch['local'])
|
||||
patch = download.parse_url(url, '_patchdir', self.config, self.opts)
|
||||
download.get_file(patch['url'], patch['local'], self.opts, self.config)
|
||||
if 'compressed' in patch:
|
||||
patch['script'] = patch['compressed'] + ' ' + patch['local']
|
||||
else:
|
||||
|
237
source-builder/sb/download.py
Normal file
237
source-builder/sb/download.py
Normal file
@ -0,0 +1,237 @@
|
||||
#
|
||||
# RTEMS Tools Project (http://www.rtems.org/)
|
||||
# Copyright 2010-2013 Chris Johns (chrisj@rtems.org)
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of the RTEMS Tools package in 'rtems-tools'.
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
#
|
||||
# This code builds a package given a config file. It only builds to be
|
||||
# installed not to be package unless you run a packager around this.
|
||||
#
|
||||
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
import urllib2
|
||||
import urlparse
|
||||
|
||||
import error
|
||||
import git
|
||||
import log
|
||||
import path
|
||||
|
||||
def _notice(opts, text):
|
||||
if not opts.quiet() and not log.default.has_stdout():
|
||||
print text
|
||||
log.output(text)
|
||||
log.flush()
|
||||
|
||||
def _output(opts, text):
|
||||
if not opts.quiet():
|
||||
log.output(text)
|
||||
|
||||
def _http_parser(source, config, opts):
|
||||
#
|
||||
# Is the file compressed ?
|
||||
#
|
||||
esl = source['ext'].split('.')
|
||||
if esl[-1:][0] == 'gz':
|
||||
source['compressed'] = '%{__gzip} -dc'
|
||||
elif esl[-1:][0] == 'bz2':
|
||||
source['compressed'] = '%{__bzip2} -dc'
|
||||
elif esl[-1:][0] == 'bz2':
|
||||
source['compressed'] = '%{__zip} -u'
|
||||
elif esl[-1:][0] == 'xz':
|
||||
source['compressed'] = '%{__xz} -dc'
|
||||
|
||||
def _git_parser(source, config, opts):
|
||||
#
|
||||
# Symlink.
|
||||
#
|
||||
us = source['url'].split('?')
|
||||
source['path'] = path.dirname(us[0])
|
||||
source['file'] = path.basename(us[0])
|
||||
source['name'], source['ext'] = path.splitext(source['file'])
|
||||
if len(us) > 1:
|
||||
source['args'] = us[1:]
|
||||
source['local'] = \
|
||||
path.join(source['local_prefix'], config.expand('git'), source['file'])
|
||||
source['symlink'] = source['local']
|
||||
|
||||
|
||||
def _file_parser(source, config, opts):
|
||||
#
|
||||
# Symlink.
|
||||
#
|
||||
source['symlink'] = source['local']
|
||||
|
||||
parsers = { 'http': _http_parser,
|
||||
'ftp': _http_parser,
|
||||
'git': _git_parser,
|
||||
'file': _file_parser }
|
||||
|
||||
def parse_url(url, pathkey, config, opts):
|
||||
#
|
||||
# Split the source up into the parts we need.
|
||||
#
|
||||
source = {}
|
||||
source['url'] = url
|
||||
source['path'] = path.dirname(url)
|
||||
source['file'] = path.basename(url)
|
||||
source['name'], source['ext'] = path.splitext(source['file'])
|
||||
#
|
||||
# Get the file. Checks the local source directory first.
|
||||
#
|
||||
source['local'] = None
|
||||
for p in config.define(pathkey).split(':'):
|
||||
local = path.join(path.abspath(p), source['file'])
|
||||
if source['local'] is None:
|
||||
source['local_prefix'] = path.abspath(p)
|
||||
source['local'] = local
|
||||
if path.exists(local):
|
||||
source['local_prefix'] = path.abspath(p)
|
||||
source['local'] = local
|
||||
break
|
||||
source['script'] = ''
|
||||
for p in parsers:
|
||||
if url.startswith(p):
|
||||
source['type'] = p
|
||||
if parsers[p](source, config, opts):
|
||||
break
|
||||
return source
|
||||
|
||||
def _http_downloader(url, local, config, opts):
|
||||
if path.exists(local):
|
||||
return True
|
||||
#
|
||||
# Hack for GitHub.
|
||||
#
|
||||
if url.startswith('https://api.github.com'):
|
||||
url = urlparse.urljoin(url, config.expand('tarball/%{version}'))
|
||||
_notice(opts, 'download: %s -> %s' % (url, os.path.relpath(path.host(local))))
|
||||
failed = False
|
||||
if not opts.dry_run():
|
||||
_in = None
|
||||
_out = None
|
||||
try:
|
||||
_in = urllib2.urlopen(url)
|
||||
_out = open(path.host(local), 'wb')
|
||||
_out.write(_in.read())
|
||||
except IOError, err:
|
||||
msg = 'download: %s: error: %s' % (url, str(err))
|
||||
_notice(opts, msg)
|
||||
if path.exists(local):
|
||||
os.remove(path.host(local))
|
||||
failed = True
|
||||
except ValueError, err:
|
||||
msg = 'download: %s: error: %s' % (url, str(err))
|
||||
_notice(opts, msg)
|
||||
if path.exists(local):
|
||||
os.remove(path.host(local))
|
||||
failed = True
|
||||
except:
|
||||
msg = 'download: %s: error' % (url)
|
||||
print >> sys.stderr, msg
|
||||
if _out is not None:
|
||||
_out.close()
|
||||
raise
|
||||
if _out is not None:
|
||||
_out.close()
|
||||
if _in is not None:
|
||||
del _in
|
||||
if not failed:
|
||||
if not path.isfile(local):
|
||||
raise error.general('source is not a file: %s' % (path.host(local)))
|
||||
return not failed
|
||||
|
||||
def _git_downloader(url, local, config, opts):
|
||||
rlp = os.path.relpath(path.host(local))
|
||||
us = url.split('?')
|
||||
repo = git.repo(local, opts, config.macros)
|
||||
if not repo.valid():
|
||||
_notice(opts, 'git: clone: %s -> %s' % (us[0], rlp))
|
||||
if not opts.dry_run():
|
||||
repo.clone(us[0], local)
|
||||
for a in us[1:]:
|
||||
_as = a.split('=')
|
||||
if _as[0] == 'branch':
|
||||
_notice(opts, 'git: checkout: %s => %s' % (us[0], _as[1]))
|
||||
if not opts.dry_run():
|
||||
repo.checkout(_as[1])
|
||||
elif _as[0] == 'pull':
|
||||
_notice(opts, 'git: pull: %s' % (us[0]))
|
||||
if not opts.dry_run():
|
||||
repo.pull()
|
||||
elif _as[0] == 'fetch':
|
||||
_notice(opts, 'git: fetch: %s -> %s' % (us[0], rlp))
|
||||
if not opts.dry_run():
|
||||
repo.fetch()
|
||||
elif _as[0] == 'reset':
|
||||
arg = []
|
||||
if len(_as) > 1:
|
||||
arg = ['--%s' % (_as[1])]
|
||||
_notice(opts, 'git: reset: %s' % (us[0]))
|
||||
if not opts.dry_run():
|
||||
repo.reset(arg)
|
||||
return True
|
||||
|
||||
def _file_downloader(url, local, config, opts):
|
||||
if path.exists(local):
|
||||
return True
|
||||
return path.isdir(url)
|
||||
|
||||
downloaders = { 'http': _http_downloader,
|
||||
'ftp': _http_downloader,
|
||||
'git': _git_downloader,
|
||||
'file': _file_downloader }
|
||||
|
||||
def get_file(url, local, opts, config):
|
||||
if local is None:
|
||||
raise error.general('source/patch path invalid')
|
||||
if not path.isdir(path.dirname(local)) and not opts.download_disabled():
|
||||
_notice(opts,
|
||||
'Creating source directory: %s' % (os.path.relpath(path.host(path.dirname(local)))))
|
||||
_output(opts, 'making dir: %s' % (path.host(path.dirname(local))))
|
||||
if not opts.dry_run():
|
||||
path.mkdir(path.dirname(local))
|
||||
if not path.exists(local) and opts.download_disabled():
|
||||
raise error.general('source not found: %s' % (path.host(local)))
|
||||
#
|
||||
# Check if a URL hasbeen provided on the command line.
|
||||
#
|
||||
url_bases = opts.urls()
|
||||
urls = []
|
||||
if url_bases is not None:
|
||||
for base in url_bases:
|
||||
if base[-1:] != '/':
|
||||
base += '/'
|
||||
url_path = urlparse.urlsplit(url)[2]
|
||||
slash = url_path.rfind('/')
|
||||
if slash < 0:
|
||||
url_file = url_path
|
||||
else:
|
||||
url_file = url_path[slash + 1:]
|
||||
urls.append(urlparse.urljoin(base, url_file))
|
||||
urls.append(url)
|
||||
if opts.trace():
|
||||
print '_url:', ','.join(urls), '->', local
|
||||
for url in urls:
|
||||
for dl in downloaders:
|
||||
if url.startswith(dl):
|
||||
if downloaders[dl](url, local, config, opts):
|
||||
return
|
||||
if not opts.dry_run():
|
||||
raise error.general('downloading %s: all paths have failed, giving up' % (url))
|
@ -37,7 +37,11 @@ class repo:
|
||||
|
||||
def _run(self, args, check = False):
|
||||
e = execute.capture_execution()
|
||||
exit_code, proc, output = e.spawn([self.git] + args)
|
||||
if path.exists(self.path):
|
||||
cwd = self.path
|
||||
else:
|
||||
cwd = None
|
||||
exit_code, proc, output = e.spawn([self.git] + args, cwd = cwd)
|
||||
if check:
|
||||
self._git_exit_code(exit_code)
|
||||
return exit_code, output
|
||||
@ -61,28 +65,63 @@ class repo:
|
||||
raise error.general('invalid version number from git: %s' % (gvs[2]))
|
||||
return (int(vs[0]), int(vs[1]), int(vs[2]), int(vs[3]))
|
||||
|
||||
def clone(self, url, path):
|
||||
ec, output = self._run(['clone', url, path])
|
||||
if ec != 0:
|
||||
raise error.general('clone of %s failed: %s' % (url, output))
|
||||
|
||||
def fetch(self, url, path):
|
||||
ec, output = self._run(['fetch', url])
|
||||
if ec != 0:
|
||||
raise error.general('fetch of %s failed: %s' % (url, output))
|
||||
|
||||
def pull(self):
|
||||
ec, output = self._run(['pull'])
|
||||
if ec != 0:
|
||||
raise error.general('pull of %s failed: %s' % (url, output))
|
||||
|
||||
def reset(self, args):
|
||||
if type(args) == str:
|
||||
args = [args]
|
||||
ec, output = self._run(['reset'] + args)
|
||||
if ec != 0:
|
||||
raise error.general('pull of %s failed: %s' % (url, output))
|
||||
|
||||
def branch(self):
|
||||
ec, output = self._run(['branch'])
|
||||
if ec == 0:
|
||||
for b in output.split('\n'):
|
||||
if b[0] == '*':
|
||||
return b[2:]
|
||||
return None
|
||||
|
||||
def checkout(self, branch = 'master'):
|
||||
ec, output = self._run(['checkout', branch])
|
||||
return ec == 0
|
||||
|
||||
def status(self):
|
||||
_status = {}
|
||||
ec, output = self._run(['status'])
|
||||
if ec == 0:
|
||||
state = 'none'
|
||||
for l in output.split('\n'):
|
||||
if l.startswith('# On branch '):
|
||||
_status['branch'] = l[len('# On branch '):]
|
||||
elif l.startswith('# Changes to be committed:'):
|
||||
state = 'staged'
|
||||
elif l.startswith('# Changes not staged for commit:'):
|
||||
state = 'unstaged'
|
||||
elif l.startswith('# Untracked files:'):
|
||||
state = 'untracked'
|
||||
elif state != 'none' and l[0] == '#':
|
||||
if l.strip() != '#' and not l.startswith('# ('):
|
||||
if state not in _status:
|
||||
_status[state] = []
|
||||
l = l[1:]
|
||||
if ':' in l:
|
||||
l = l.split(':')[1]
|
||||
_status[state] += [l.strip()]
|
||||
if path.exists(self.path):
|
||||
ec, output = self._run(['status'])
|
||||
if ec == 0:
|
||||
state = 'none'
|
||||
for l in output.split('\n'):
|
||||
if l.startswith('# On branch '):
|
||||
_status['branch'] = l[len('# On branch '):]
|
||||
elif l.startswith('# Changes to be committed:'):
|
||||
state = 'staged'
|
||||
elif l.startswith('# Changes not staged for commit:'):
|
||||
state = 'unstaged'
|
||||
elif l.startswith('# Untracked files:'):
|
||||
state = 'untracked'
|
||||
elif state != 'none' and l[0] == '#':
|
||||
if l.strip() != '#' and not l.startswith('# ('):
|
||||
if state not in _status:
|
||||
_status[state] = []
|
||||
l = l[1:]
|
||||
if ':' in l:
|
||||
l = l.split(':')[1]
|
||||
_status[state] += [l.strip()]
|
||||
return _status
|
||||
|
||||
def clean(self):
|
||||
@ -90,8 +129,10 @@ class repo:
|
||||
return len(_status) == 1 and 'branch' in _status
|
||||
|
||||
def valid(self):
|
||||
ec, output = self._run(['status'])
|
||||
return ec == 0
|
||||
if path.exists(self.path):
|
||||
ec, output = self._run(['status'])
|
||||
return ec == 0
|
||||
return False
|
||||
|
||||
def remotes(self):
|
||||
_remotes = {}
|
||||
|
Loading…
x
Reference in New Issue
Block a user