chore(eclass): bump to latest code from chromeos

This commit is contained in:
Brandon Philips 2013-03-05 20:28:34 -08:00
parent 7b68e4b6fe
commit d67ee93760
12 changed files with 487 additions and 484 deletions

View File

@ -11,15 +11,80 @@
# Storage directory for Chrome OS Binaries
: ${CROS_BINARY_STORE_DIR:=${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}/cros-binary}
# @ECLASS-VARIABLE: CROS_BINARY_FETCH_REQUIRED
# @DESCRIPTION:
# Internal variable controlling whether cros-binary_fetch is actually ran.
: ${CROS_BINARY_FETCH_REQUIRED:=true}
# @ECLASS-FUNCTION: cros-binary_add_uri
# @DESCRIPTION:
# Add a fetch uri to SRC_URI for the given uri. See
# CROS_BINARY_URI for what is accepted. Note you cannot
# intermix a non-rewritten ssh w/ (http|https|gs).
cros-binary_add_uri()
{
if [[ $# -ne 1 ]]; then
die "cros-binary_add_uri takes exactly one argument; $# given."
fi
local uri="$1"
if [[ "${uri}" =~ ^ssh://([^@]+)@git.chromium.org[^/]*/(.*)$ ]]; then
uri="gs://chromeos-binaries/HOME/${BASH_REMATCH[1]}/${BASH_REMATCH[2]}"
fi
case "${uri}" in
http://*|https://*|gs://*)
SRC_URI+=" ${uri}"
CROS_BINARY_FETCH_REQUIRED=false
CROS_BINARY_STORE_DIR="${DISTDIR}"
;;
*)
die "Unknown protocol: ${uri}"
;;
esac
RESTRICT+="mirror"
}
# @ECLASS-FUNCTION: cros-binary_add_gs_uri
# @DESCRIPTION:
# Wrapper around cros-binary_add_uri. Invoked with 3 arguments;
# the bcs user, the overlay, and the filename (or bcs://<uri> for
# backwards compatibility).
cros-binary_add_gs_uri() {
if [[ $# -ne 3 ]]; then
die "cros-binary_add_bcs_uri needs 3 arguments; $# given."
fi
# Strip leading bcs://...
[[ "${3:0:6}" == "bcs://" ]] && set -- "${1}" "${2}" "${3#bcs://}"
cros-binary_add_uri "gs://chromeos-binaries/HOME/$1/$2/$3"
}
# @ECLASS-FUNCTION: cros-binary_add_overlay_uri
# @DESCRIPTION:
# Wrapper around cros-binary_add_bcs_uri. Invoked with 2 arguments;
# the basic board target (x86-alex for example), and the filename; that filename
# is automatically prefixed with "${CATEGORY}/${PN}/" .
cros-binary_add_overlay_uri() {
if [[ $# -ne 2 ]]; then
die "cros-binary_add_bcs_uri_simple needs 2 arguments; $# given."
fi
cros-binary_add_gs_uri bcs-"$1" overlay-"$1" "${CATEGORY}/${PN}/$2"
}
# @ECLASS-VARIABLE: CROS_BINARY_URI
# @DESCRIPTION:
# URI for the binary may be one of:
# http://
# https://
# ssh://
# gs://
# file:// (file is relative to the files directory)
# TODO: Add "->" support if we get file collisions
# Additionally, all bcs ssh:// urls are rewritten to gs:// automatically
# the appropriate GS bucket- although cros-binary_add_uri is the preferred
# way to do that.
# TODO: Deprecate this variable's support for ssh and http/https.
: ${CROS_BINARY_URI:=}
if [[ -n "${CROS_BINARY_URI}" ]]; then
cros-binary_add_uri "${CROS_BINARY_URI}"
fi
# @ECLASS-VARIABLE: CROS_BINARY_SUM
# @DESCRIPTION:
@ -57,6 +122,7 @@ cros-binary_check_file() {
}
cros-binary_fetch() {
${CROS_BINARY_FETCH_REQUIRED} || return 0
local uri=${CROS_BINARY_URI}
if [[ ! -z "${CROS_BINARY_LOCAL_URI_BASE}" ]]; then
uri="${CROS_BINARY_LOCAL_URI_BASE}/${CROS_BINARY_URI##*/}"
@ -85,7 +151,7 @@ cros-binary_fetch() {
if ! cros-binary_check_file; then
rm -f "${target}"
case "${scheme}" in
http|https)
http|https|ftp)
wget "${uri}" -O "${target}" -nv -nc ||
rm -f "${target}"
;;
@ -129,7 +195,12 @@ cros-binary_src_unpack() {
}
cros-binary_src_install() {
local target="${CROS_BINARY_STORE_DIR}/${CROS_BINARY_URI##*/}"
local target="${CROS_BINARY_URI##*/}"
if ${CROS_BINARY_FETCH_REQUIRED}; then
target="${CROS_BINARY_STORE_DIR}/${target}"
else
target="${DISTDIR}/${target}"
fi
local extension="${CROS_BINARY_URI##*.}"
local flags

View File

@ -42,13 +42,14 @@ ALL_BOARDS=(
daisy-drm
daisy_spring
daisy_snow
dalmore
emeraldlake2
eureka
fb1
haswell
haswell_baskingridge
haswell_wtm1
haswell_wtm2
fox
fox_baskingridge
fox_wtm1
fox_wtm2
ironhide
kiev
klang
@ -58,6 +59,7 @@ ALL_BOARDS=(
parrot
puppy
raspberrypi
ricochet
stout
stumpy
tegra2

View File

@ -76,6 +76,7 @@ IUSE="bootimage cros_ec"
# don't write RDEPEND=$DEPEND. RDEPEND should have an explicit list of what it
# needs to extract and execute the updater.
DEPEND="
>=coreos-base/vboot_reference-1.0-r230
coreos-base/vpd
dev-util/shflags
>=sys-apps/flashrom-0.9.3-r36
@ -102,6 +103,7 @@ RDEPEND="
app-arch/gzip
app-arch/sharutils
app-arch/tar
coreos-base/vboot_reference
sys-apps/util-linux"
# Check for EAPI 2+
@ -130,6 +132,7 @@ _is_in_files() {
# Parameters: URI of file "bcs://filename.tbz2", checksum of file.
# Returns: Nothing
_bcs_fetch() {
${CROS_BINARY_FETCH_REQUIRED} || return 0
local filename="${1##*://}"
local checksum="$2"
@ -174,7 +177,11 @@ _src_unpack() {
# Returns: Location of unpacked firmware as $RETURN_VALUE
_bcs_src_unpack() {
local filename="${1##*://}"
if ${CROS_BINARY_FETCH_REQUIRED}; then
_src_unpack "${CROS_BINARY_STORE_DIR}/${filename}"
else
_src_unpack "${DISTDIR}/${filename}"
fi
RETURN_VALUE="${RETURN_VALUE}"
}

View File

@ -200,6 +200,8 @@ CONFIG_HW_RANDOM=y
CONFIG_TCG_TPM=y
CONFIG_I2C=y
CONFIG_I2C_CHARDEV=m
CONFIG_I2C_MUX=y
CONFIG_I2C_ARBITRATOR_CROS_EC=y
CONFIG_I2C_S3C2410=y
CONFIG_I2C_STUB=m
CONFIG_SPI=y

View File

@ -10,7 +10,8 @@ LICENSE="GPL-2"
SLOT="0"
DEPEND="sys-apps/debianutils
initramfs? ( chromeos-base/chromeos-initramfs )
initramfs? ( coreos-base/chromeos-initramfs )
netboot_ramfs? ( coreos-base/chromeos-initramfs )
"
IUSE="-device_tree -kernel_sources"
@ -39,6 +40,7 @@ CONFIG_FRAGMENTS=(
i2cdev
initramfs
kvm
netboot_ramfs
nfs
pcserial
qmi
@ -47,6 +49,7 @@ CONFIG_FRAGMENTS=(
systemtap
tpm
vfat
x32
)
blkdevram_desc="ram block device"
@ -105,11 +108,17 @@ CONFIG_TCG_TPM=y
CONFIG_TCG_TIS=y
"
initramfs_desc="initramfs"
initramfs_config="
CONFIG_INITRAMFS_SOURCE=\"%ROOT%/var/lib/misc/initramfs.cpio.xz\"
initramfs_desc="Initramfs for factory install shim and recovery image"
initramfs_config='
CONFIG_INITRAMFS_SOURCE="%ROOT%/var/lib/misc/initramfs.cpio.xz"
CONFIG_INITRAMFS_COMPRESSION_XZ=y
"
'
netboot_ramfs_desc="Network boot install initramfs"
netboot_ramfs_config='
CONFIG_INITRAMFS_SOURCE="%ROOT%/var/lib/misc/netboot_ramfs.cpio.xz"
CONFIG_INITRAMFS_COMPRESSION_XZ=y
'
vfat_desc="vfat"
vfat_config="
@ -196,8 +205,20 @@ CONFIG_KPROBES=y
CONFIG_DEBUG_INFO=y
"
x32_desc="x32 ABI support"
x32_config="
CONFIG_X86_X32=y
"
# Add all config fragments as off by default
IUSE="${IUSE} ${CONFIG_FRAGMENTS[@]}"
REQUIRED_USE="
initramfs? ( !netboot_ramfs )
netboot_ramfs? ( !initramfs )
initramfs? ( i2cdev tpm )
netboot_ramfs? ( i2cdev tpm )
x32? ( amd64 )
"
# If an overlay has eclass overrides, but doesn't actually override this
# eclass, we'll have ECLASSDIR pointing to the active overlay's
@ -638,10 +659,14 @@ cros-kernel2_src_install() {
fi
# Check the size of kernel image and issue warning when image size is near
# the limit.
# the limit. For factory install initramfs, we don't care about kernel
# size limit as the image is downloaded over network.
local kernel_image_size=$(stat -c '%s' -L "${D}"/boot/vmlinuz)
einfo "Kernel image size is ${kernel_image_size} bytes."
if [[ ${kernel_image_size} -gt $((8 * 1024 * 1024)) ]]; then
if use netboot_ramfs; then
# No need to check kernel image size.
true
elif [[ ${kernel_image_size} -gt $((8 * 1024 * 1024)) ]]; then
die "Kernel image is larger than 8 MB."
elif [[ ${kernel_image_size} -gt $((7 * 1024 * 1024)) ]]; then
ewarn "Kernel image is larger than 7 MB. Limit is 8 MB."

View File

@ -119,7 +119,7 @@ ARRAY_VARIABLES=( CROS_WORKON_{SUBDIR,REPO,PROJECT,LOCALNAME,DESTDIR,COMMIT,TREE
# Join the tree commits to produce a unique identifier
CROS_WORKON_TREE_COMPOSITE=$(IFS="_"; echo "${CROS_WORKON_TREE[*]}")
IUSE="cros_workon_tree_$CROS_WORKON_TREE_COMPOSITE"
IUSE="cros_workon_tree_$CROS_WORKON_TREE_COMPOSITE profiling"
inherit git-2 flag-o-matic toolchain-funcs
@ -248,9 +248,26 @@ get_rev() {
GIT_DIR="$1" git rev-parse HEAD
}
using_common_mk() {
[[ -n $(find "${S}" -name common.mk -exec grep -l common-mk.git {} +) ]]
}
cros-workon_src_unpack() {
local fetch_method # local|git
# Sanity check. We cannot have S set to WORKDIR because if/when we try
# to check out repos, git will die if it tries to check out into a dir
# that already exists. Some packages might try this when out-of-tree
# builds are enabled, and they'll work fine most of the time because
# they'll be using a full manifest and will just re-use the existing
# checkout in src/platform/*. But if the code detects that it has to
# make its own checkout, things fall apart. For out-of-tree builds,
# the initial $S doesn't even matter because it resets it below to the
# repo in src/platform/.
if [[ ${S} == "${WORKDIR}" ]]; then
die "Sorry, but \$S cannot be set to \$WORKDIR"
fi
# Set the default of CROS_WORKON_DESTDIR. This is done here because S is
# sometimes overridden in ebuilds and we cannot rely on the global state
# (and therefore ordering of eclass inherits and local ebuild overrides).
@ -392,7 +409,10 @@ cros-workon_src_unpack() {
EGIT_SOURCEDIR="${destdir[i]}"
EGIT_COMMIT="${CROS_WORKON_COMMIT[i]}"
# Clones to /var, copies src tree to the /build/<board>/tmp.
git-2_src_unpack
# Make sure git-2 does not run `unpack` for us automatically.
# The normal cros-workon flow above doesn't do it, so don't
# let git-2 do it either. http://crosbug.com/38342
EGIT_NOUNPACK=true git-2_src_unpack
# TODO(zbehan): Support multiple projects for vcsid?
done
set_vcsid "${CROS_WORKON_COMMIT[0]}"
@ -446,21 +466,21 @@ cros-workon_src_prepare() {
local out="$(cros-workon_get_build_dir)"
[[ ${CROS_WORKON_INCREMENTAL_BUILD} != "1" ]] && mkdir -p "${out}"
if [[ -e ${S}/common.mk ]] ; then
if using_common_mk ; then
: ${OUT=${out}}
export OUT
fi
}
cros-workon_src_configure() {
if [[ -e ${S}/common.mk ]] ; then
if using_common_mk ; then
# We somewhat overshoot here, but it isn't harmful,
# and catches all the packages we care about.
tc-export CC CXX AR RANLIB LD NM PKG_CONFIG
# Portage takes care of this for us.
export SPLITDEBUG=0
export MODE=$(usex profiling profiling opt)
if [[ $(type -t cros-debug-add-NDEBUG) == "function" ]] ; then
# Only run this if we've inherited cros-debug.eclass.
cros-debug-add-NDEBUG
@ -504,7 +524,7 @@ cw_emake() {
}
cros-workon_src_compile() {
if [[ -e ${S}/common.mk ]] ; then
if using_common_mk ; then
cw_emake
else
default
@ -512,7 +532,7 @@ cros-workon_src_compile() {
}
cros-workon_src_test() {
if [[ -e ${S}/common.mk ]] ; then
if using_common_mk ; then
emake \
VALGRIND=$(use_if_iuse valgrind && echo 1) \
tests
@ -521,6 +541,32 @@ cros-workon_src_test() {
fi
}
cros-workon_src_install() {
# common.mk supports coverage analysis, but only generates data when
# the tests have been run as part of the build process. Thus this code
# needs to test of the analysis output is present before trying to
# install it.
if using_common_mk ; then
if use profiling; then
LCOV_DIR=$(find "${WORKDIR}" -name "lcov-html")
if [[ $(echo "${LCOV_DIR}" | wc -l) -gt 1 ]] ; then
die "More then one instance of lcov-html " \
"found! The instances are ${LCOV_DIR}. " \
"It is unclear which version to use, " \
"failing install."
fi
if [[ -d "${LCOV_DIR}" ]] ; then
local dir="${PN}"
[[ ${SLOT} != "0" ]] && dir+=":${SLOT}"
insinto "/usr/share/profiling/${dir}/lcov"
doins -r "${LCOV_DIR}"/*
fi
fi
else
default
fi
}
cros-workon_pkg_info() {
print_quoted_array() { printf '"%s"\n' "$@"; }

View File

@ -1,10 +1,9 @@
# Copyright 1999-2013 Gentoo Foundation
# Copyright 1999-2011 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# $Header: /var/cvsroot/gentoo-x86/eclass/git-2.eclass,v 1.30 2013/01/09 17:26:55 axs Exp $
# $Header: /var/cvsroot/gentoo-x86/eclass/git-2.eclass,v 1.14 2011/08/22 04:46:31 vapier Exp $
# @ECLASS: git-2.eclass
# @MAINTAINER:
# Michał Górny <mgorny@gentoo.org>
# Donnie Berkholz <dberkholz@gentoo.org>
# @BLURB: Eclass for fetching and unpacking git repositories.
# @DESCRIPTION:
@ -69,9 +68,6 @@ DEPEND="dev-vcs/git"
# URI for the repository
# e.g. http://foo, git://bar
#
# It can be overriden via env using packagename_LIVE_REPO
# variable.
#
# Support multiple values:
# EGIT_REPO_URI="git://a/b.git http://c/d.git"
@ -124,7 +120,6 @@ DEPEND="dev-vcs/git"
# Default behaviour is to unpack ${A} content.
# @FUNCTION: git-2_init_variables
# @INTERNAL
# @DESCRIPTION:
# Internal function initializing all git variables.
# We define it in function scope so user can define
@ -132,8 +127,7 @@ DEPEND="dev-vcs/git"
git-2_init_variables() {
debug-print-function ${FUNCNAME} "$@"
local esc_pn liverepo livebranch livecommit
esc_pn=${PN//[-+]/_}
local x
: ${EGIT_SOURCEDIR="${S}"}
@ -145,19 +139,19 @@ git-2_init_variables() {
: ${EGIT_MASTER:=master}
liverepo=${esc_pn}_LIVE_REPO
EGIT_REPO_URI=${!liverepo:-${EGIT_REPO_URI}}
[[ ${EGIT_REPO_URI} ]] || die "EGIT_REPO_URI must have some value"
eval x="\$${PN//[-+]/_}_LIVE_REPO"
EGIT_REPO_URI=${x:-${EGIT_REPO_URI}}
[[ -z ${EGIT_REPO_URI} ]] && die "EGIT_REPO_URI must have some value"
: ${EVCS_OFFLINE:=}
livebranch=${esc_pn}_LIVE_BRANCH
[[ ${!livebranch} ]] && ewarn "QA: using \"${esc_pn}_LIVE_BRANCH\" variable, you won't get any support"
EGIT_BRANCH=${!livebranch:-${EGIT_BRANCH:-${EGIT_MASTER}}}
eval x="\$${PN//[-+]/_}_LIVE_BRANCH"
[[ -n ${x} ]] && ewarn "QA: using \"${PN//[-+]/_}_LIVE_BRANCH\" variable, you won't get any support"
EGIT_BRANCH=${x:-${EGIT_BRANCH:-${EGIT_MASTER}}}
livecommit=${esc_pn}_LIVE_COMMIT
[[ ${!livecommit} ]] && ewarn "QA: using \"${esc_pn}_LIVE_COMMIT\" variable, you won't get any support"
EGIT_COMMIT=${!livecommit:-${EGIT_COMMIT:-${EGIT_BRANCH}}}
eval x="\$${PN//[-+]/_}_LIVE_COMMIT"
[[ -n ${x} ]] && ewarn "QA: using \"${PN//[-+]/_}_LIVE_COMMIT\" variable, you won't get any support"
EGIT_COMMIT=${x:-${EGIT_COMMIT:-${EGIT_BRANCH}}}
: ${EGIT_REPACK:=}
@ -165,13 +159,12 @@ git-2_init_variables() {
}
# @FUNCTION: git-2_submodules
# @INTERNAL
# @DESCRIPTION:
# Internal function wrapping the submodule initialisation and update.
git-2_submodules() {
debug-print-function ${FUNCNAME} "$@"
if [[ ${EGIT_HAS_SUBMODULES} ]]; then
if [[ ${EVCS_OFFLINE} ]]; then
if [[ -n ${EGIT_HAS_SUBMODULES} ]]; then
if [[ -n ${EVCS_OFFLINE} ]]; then
# for submodules operations we need to be online
debug-print "${FUNCNAME}: not updating submodules in offline mode"
return 1
@ -192,7 +185,6 @@ git-2_submodules() {
}
# @FUNCTION: git-2_branch
# @INTERNAL
# @DESCRIPTION:
# Internal function that changes branch for the repo based on EGIT_COMMIT and
# EGIT_BRANCH variables.
@ -217,7 +209,6 @@ git-2_branch() {
}
# @FUNCTION: git-2_gc
# @INTERNAL
# @DESCRIPTION:
# Internal function running garbage collector on checked out tree.
git-2_gc() {
@ -225,19 +216,18 @@ git-2_gc() {
local args
if [[ ${EGIT_REPACK} || ${EGIT_PRUNE} ]]; then
pushd "${EGIT_DIR}" > /dev/null
if [[ -n ${EGIT_REPACK} || -n ${EGIT_PRUNE} ]]; then
ebegin "Garbage collecting the repository"
[[ ${EGIT_PRUNE} ]] && args='--prune'
[[ -n ${EGIT_PRUNE} ]] && args='--prune'
debug-print "${FUNCNAME}: git gc ${args}"
git gc ${args}
eend $?
popd > /dev/null
fi
popd > /dev/null
}
# @FUNCTION: git-2_prepare_storedir
# @INTERNAL
# @DESCRIPTION:
# Internal function preparing directory where we are going to store SCM
# repository.
@ -251,53 +241,29 @@ git-2_prepare_storedir() {
if [[ ! -d ${EGIT_STORE_DIR} ]]; then
debug-print "${FUNCNAME}: Creating git main storage directory"
addwrite /
mkdir -m 775 -p "${EGIT_STORE_DIR}" \
mkdir -p "${EGIT_STORE_DIR}" \
|| die "${FUNCNAME}: can't mkdir \"${EGIT_STORE_DIR}\""
fi
# allow writing into EGIT_STORE_DIR
addwrite "${EGIT_STORE_DIR}"
# calculate git.eclass store dir for data
# We will try to clone the old repository,
# and we will remove it if we don't need it anymore.
EGIT_OLD_CLONE=
if [[ ${EGIT_STORE_DIR} == */egit-src ]]; then
local old_store_dir=${EGIT_STORE_DIR/%egit-src/git-src}
local old_location=${old_store_dir}/${EGIT_PROJECT:-${PN}}
if [[ -d ${old_location} ]]; then
EGIT_OLD_CLONE=${old_location}
# required to remove the old clone
addwrite "${old_store_dir}"
fi
fi
# calculate the proper store dir for data
# If user didn't specify the EGIT_DIR, we check if he did specify
# the EGIT_PROJECT or get the folder name from EGIT_REPO_URI.
EGIT_REPO_URI=${EGIT_REPO_URI%/}
if [[ ! ${EGIT_DIR} ]]; then
if [[ ${EGIT_PROJECT} ]]; then
[[ -z ${EGIT_REPO_URI##*/} ]] && EGIT_REPO_URI="${EGIT_REPO_URI%/}"
if [[ -z ${EGIT_DIR} ]]; then
if [[ -n ${EGIT_PROJECT} ]]; then
clone_dir=${EGIT_PROJECT}
else
local strippeduri=${EGIT_REPO_URI%/.git}
clone_dir=${strippeduri##*/}
clone_dir=${EGIT_REPO_URI##*/}
fi
EGIT_DIR=${EGIT_STORE_DIR}/${clone_dir}
if [[ ${EGIT_OLD_CLONE} && ! -d ${EGIT_DIR} ]]; then
elog "${FUNCNAME}: ${CATEGORY}/${PF} will be cloned from old location."
elog "It will be necessary to rebuild the package to fetch updates."
EGIT_REPO_URI="${EGIT_OLD_CLONE} ${EGIT_REPO_URI}"
fi
fi
export EGIT_DIR=${EGIT_DIR}
debug-print "${FUNCNAME}: Storing the repo into \"${EGIT_DIR}\"."
}
# @FUNCTION: git-2_move_source
# @INTERNAL
# @DESCRIPTION:
# Internal function moving sources from the EGIT_DIR to EGIT_SOURCEDIR dir.
git-2_move_source() {
@ -313,7 +279,6 @@ git-2_move_source() {
}
# @FUNCTION: git-2_initial_clone
# @INTERNAL
# @DESCRIPTION:
# Internal function running initial clone on specified repo_uri.
git-2_initial_clone() {
@ -324,7 +289,8 @@ git-2_initial_clone() {
EGIT_REPO_URI_SELECTED=""
for repo_uri in ${EGIT_REPO_URI}; do
debug-print "${FUNCNAME}: git clone ${EGIT_LOCAL_OPTIONS} \"${repo_uri}\" \"${EGIT_DIR}\""
if git clone ${EGIT_LOCAL_OPTIONS} "${repo_uri}" "${EGIT_DIR}"; then
git clone ${EGIT_LOCAL_OPTIONS} "${repo_uri}" "${EGIT_DIR}"
if [[ $? -eq 0 ]]; then
# global variable containing the repo_name we will be using
debug-print "${FUNCNAME}: EGIT_REPO_URI_SELECTED=\"${repo_uri}\""
EGIT_REPO_URI_SELECTED="${repo_uri}"
@ -332,12 +298,12 @@ git-2_initial_clone() {
fi
done
[[ ${EGIT_REPO_URI_SELECTED} ]] \
|| die "${FUNCNAME}: can't fetch from ${EGIT_REPO_URI}"
if [[ -z ${EGIT_REPO_URI_SELECTED} ]]; then
die "${FUNCNAME}: can't fetch from ${EGIT_REPO_URI}"
fi
}
# @FUNCTION: git-2_update_repo
# @INTERNAL
# @DESCRIPTION:
# Internal function running update command on specified repo_uri.
git-2_update_repo() {
@ -345,7 +311,7 @@ git-2_update_repo() {
local repo_uri
if [[ ${EGIT_LOCAL_NONBARE} ]]; then
if [[ -n ${EGIT_LOCAL_NONBARE} ]]; then
# checkout master branch and drop all other local branches
git checkout ${EGIT_MASTER} || die "${FUNCNAME}: can't checkout master branch ${EGIT_MASTER}"
for x in $(git branch | grep -v "* ${EGIT_MASTER}" | tr '\n' ' '); do
@ -360,7 +326,8 @@ git-2_update_repo() {
git config remote.origin.url "${repo_uri}"
debug-print "${EGIT_UPDATE_CMD}"
if ${EGIT_UPDATE_CMD} > /dev/null; then
${EGIT_UPDATE_CMD} > /dev/null
if [[ $? -eq 0 ]]; then
# global variable containing the repo_name we will be using
debug-print "${FUNCNAME}: EGIT_REPO_URI_SELECTED=\"${repo_uri}\""
EGIT_REPO_URI_SELECTED="${repo_uri}"
@ -368,12 +335,12 @@ git-2_update_repo() {
fi
done
[[ ${EGIT_REPO_URI_SELECTED} ]] \
|| die "${FUNCNAME}: can't update from ${EGIT_REPO_URI}"
if [[ -z ${EGIT_REPO_URI_SELECTED} ]]; then
die "${FUNCNAME}: can't update from ${EGIT_REPO_URI}"
fi
}
# @FUNCTION: git-2_fetch
# @INTERNAL
# @DESCRIPTION:
# Internal function fetching repository from EGIT_REPO_URI and storing it in
# specified EGIT_STORE_DIR.
@ -382,7 +349,7 @@ git-2_fetch() {
local oldsha cursha repo_type
[[ ${EGIT_LOCAL_NONBARE} ]] && repo_type="non-bare repository" || repo_type="bare repository"
[[ -n ${EGIT_LOCAL_NONBARE} ]] && repo_type="non-bare repository" || repo_type="bare repository"
if [[ ! -d ${EGIT_DIR} ]]; then
git-2_initial_clone
@ -393,7 +360,7 @@ git-2_fetch() {
echo " at the commit: ${cursha}"
popd > /dev/null
elif [[ ${EVCS_OFFLINE} ]]; then
elif [[ -n ${EVCS_OFFLINE} ]]; then
pushd "${EGIT_DIR}" > /dev/null
cursha=$(git rev-parse ${UPSTREAM_BRANCH})
echo "GIT offline update -->"
@ -429,16 +396,9 @@ git-2_fetch() {
echo " branch: ${EGIT_BRANCH}"
echo " storage directory: \"${EGIT_DIR}\""
echo " checkout type: ${repo_type}"
# Cleanup after git.eclass
if [[ ${EGIT_OLD_CLONE} ]]; then
einfo "${FUNCNAME}: removing old clone in ${EGIT_OLD_CLONE}."
rm -rf "${EGIT_OLD_CLONE}"
fi
}
# @FUNCTION: git_bootstrap
# @INTERNAL
# @DESCRIPTION:
# Internal function that runs bootstrap command on unpacked source.
git-2_bootstrap() {
@ -451,7 +411,7 @@ git-2_bootstrap() {
# enviroment the package will fail if there is no update, thus in
# combination with --keep-going it would lead in not-updating
# pakcages that are up-to-date.
if [[ ${EGIT_BOOTSTRAP} ]]; then
if [[ -n ${EGIT_BOOTSTRAP} ]]; then
pushd "${EGIT_SOURCEDIR}" > /dev/null
einfo "Starting bootstrap"
@ -481,7 +441,6 @@ git-2_bootstrap() {
}
# @FUNCTION: git-2_migrate_repository
# @INTERNAL
# @DESCRIPTION:
# Internal function migrating between bare and normal checkout repository.
# This is based on usage of EGIT_SUBMODULES, at least until they
@ -491,18 +450,24 @@ git-2_bootstrap() {
git-2_migrate_repository() {
debug-print-function ${FUNCNAME} "$@"
local bare returnstate
local target returnstate
# first find out if we have submodules
# or user explicitly wants us to use non-bare clones
if ! [[ ${EGIT_HAS_SUBMODULES} || ${EGIT_NONBARE} ]]; then
bare=1
if [[ -z ${EGIT_HAS_SUBMODULES} ]]; then
target="bare"
else
target="full"
fi
# check if user didn't specify that we want non-bare repo
if [[ -n ${EGIT_NONBARE} ]]; then
target="full"
EGIT_LOCAL_NONBARE="true"
fi
# test if we already have some repo and if so find out if we have
# to migrate the data
if [[ -d ${EGIT_DIR} ]]; then
if [[ ${bare} && -d ${EGIT_DIR}/.git ]]; then
if [[ ${target} == bare && -d ${EGIT_DIR}/.git ]]; then
debug-print "${FUNCNAME}: converting \"${EGIT_DIR}\" to bare copy"
ebegin "Converting \"${EGIT_DIR}\" from non-bare to bare copy"
@ -514,7 +479,8 @@ git-2_migrate_repository() {
rm -rf "${EGIT_DIR}"
mv "${EGIT_DIR}.bare" "${EGIT_DIR}"
eend ${returnstate}
elif [[ ! ${bare} && ! -d ${EGIT_DIR}/.git ]]; then
fi
if [[ ${target} == full && ! -d ${EGIT_DIR}/.git ]]; then
debug-print "${FUNCNAME}: converting \"${EGIT_DIR}\" to non-bare copy"
ebegin "Converting \"${EGIT_DIR}\" from bare to non-bare copy"
@ -534,13 +500,12 @@ git-2_migrate_repository() {
fi
# set various options to work with both targets
if [[ ${bare} ]]; then
if [[ ${target} == bare ]]; then
debug-print "${FUNCNAME}: working in bare repository for \"${EGIT_DIR}\""
EGIT_LOCAL_OPTIONS+="${EGIT_OPTIONS} --bare"
MOVE_COMMAND="git clone -l -s -n ${EGIT_DIR// /\\ }"
EGIT_UPDATE_CMD="git fetch -t -f -u origin ${EGIT_BRANCH}:${EGIT_BRANCH}"
UPSTREAM_BRANCH="${EGIT_BRANCH}"
EGIT_LOCAL_NONBARE=
else
debug-print "${FUNCNAME}: working in bare repository for non-bare \"${EGIT_DIR}\""
MOVE_COMMAND="cp -pPR ."
@ -552,7 +517,6 @@ git-2_migrate_repository() {
}
# @FUNCTION: git-2_cleanup
# @INTERNAL
# @DESCRIPTION:
# Internal function cleaning up all the global variables
# that are not required after the unpack has been done.
@ -592,9 +556,9 @@ git-2_src_unpack() {
# Users can specify some SRC_URI and we should
# unpack the files too.
if [[ ! ${EGIT_NOUNPACK} ]]; then
if [[ -z ${EGIT_NOUNPACK} ]]; then
if has ${EAPI:-0} 0 1; then
[[ ${A} ]] && unpack ${A}
[[ -n ${A} ]] && unpack ${A}
else
default_src_unpack
fi

View File

@ -1,35 +1,31 @@
# Copyright 1999-2013 Gentoo Foundation
# Copyright 1999-2009 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# $Header: /var/cvsroot/gentoo-x86/eclass/git.eclass,v 1.60 2013/01/15 11:23:43 jlec Exp $
# @DEPRECATED
# This eclass has been superseded by git-2 eclass.
# Please modify your ebuilds to use that one instead.
# $Header: /var/cvsroot/gentoo-x86/eclass/git.eclass,v 1.43 2010/02/24 01:16:35 abcd Exp $
# @ECLASS: git.eclass
# @MAINTAINER:
# Tomas Chvatal <scarabeus@gentoo.org>
# Donnie Berkholz <dberkholz@gentoo.org>
# @BLURB: Fetching and unpacking of git repositories
# @BLURB: This eclass provides functions for fetch and unpack git repositories
# @DESCRIPTION:
# The git eclass provides functions to fetch, patch and bootstrap
# software sources from git repositories and is based on the subversion eclass.
# It is necessary to define at least the EGIT_REPO_URI variable.
#
# THANKS TO:
# The eclass is based on subversion eclass.
# If you use this eclass, the ${S} is ${WORKDIR}/${P}.
# It is necessary to define the EGIT_REPO_URI variable at least.
# @THANKS TO:
# Fernando J. Pereda <ferdy@gentoo.org>
inherit eutils
EGIT="git.eclass"
# We DEPEND on a not too ancient git version
# We DEPEND on at least a bit recent git version
DEPEND=">=dev-vcs/git-1.6"
EXPORTED_FUNCTIONS="src_unpack"
case "${EAPI:-0}" in
2|3|4|5) EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS} src_prepare" ;;
3|2) EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS} src_prepare" ;;
1|0) ;;
*) die "EAPI=${EAPI} is not supported" ;;
:) DEPEND="EAPI-UNSUPPORTED" ;;
esac
EXPORT_FUNCTIONS ${EXPORTED_FUNCTIONS}
@ -38,23 +34,21 @@ EXPORT_FUNCTIONS ${EXPORTED_FUNCTIONS}
# @ECLASS-VARIABLE: EGIT_QUIET
# @DESCRIPTION:
# Set to non-empty value to supress some eclass messages.
: ${EGIT_QUIET:=${ESCM_QUIET}}
# Enables user specified verbosity for the eclass elog informations.
# The user just needs to add EGIT_QUIET="ON" to the /etc/make.conf.
: ${EGIT_QUIET:="OFF"}
# @ECLASS-VARIABLE: EGIT_STORE_DIR
# @DESCRIPTION:
# Storage directory for git sources.
# Can be redefined.
: ${EGIT_STORE_DIR:="${PORTAGE_ACTUAL_DISTDIR-${DISTDIR}}/git-src"}
# @ECLASS-VARIABLE: EGIT_UNPACK_DIR
# @DESCRIPTION:
# Directory to unpack git sources in.
[[ -z ${EGIT_STORE_DIR} ]] && EGIT_STORE_DIR="${PORTAGE_ACTUAL_DISTDIR-${DISTDIR}}/git-src"
# @ECLASS-VARIABLE: EGIT_HAS_SUBMODULES
# @DESCRIPTION:
# Set this to non-empty value to enable submodule support (slower).
: ${EGIT_HAS_SUBMODULES:=}
# Set this to "true" to enable the (slower) submodule support.
# This variable should be set before inheriting git.eclass
: ${EGIT_HAS_SUBMODULES:=false}
# @ECLASS-VARIABLE: EGIT_FETCH_CMD
# @DESCRIPTION:
@ -64,7 +58,7 @@ EXPORT_FUNCTIONS ${EXPORTED_FUNCTIONS}
# @ECLASS-VARIABLE: EGIT_UPDATE_CMD
# @DESCRIPTION:
# Git fetch command.
if [[ -n ${EGIT_HAS_SUBMODULES} ]]; then
if ${EGIT_HAS_SUBMODULES}; then
EGIT_UPDATE_CMD="git pull -f -u"
else
EGIT_UPDATE_CMD="git fetch -f -u"
@ -99,16 +93,20 @@ EGIT_DIFFSTAT_CMD="git --no-pager diff --stat"
# ssh://
eval X="\$${PN//[-+]/_}_LIVE_REPO"
if [[ ${X} = "" ]]; then
: ${EGIT_REPO_URI:=}
EGIT_REPO_URI=${EGIT_REPO_URI:=}
else
EGIT_REPO_URI="${X}"
fi
# @ECLASS-VARIABLE: EGIT_PROJECT
# @DESCRIPTION:
# Project name, it must be unique across EGIT_STORE_DIR.
# Git eclass will check out the git repository into ${EGIT_STORE_DIR}/${EGIT_PROJECT}/${EGIT_REPO_URI##*/}
# Default is ${PN}.
: ${EGIT_PROJECT:=${PN}}
# Project name of your ebuild.
# Git eclass will check out the git repository like:
# ${EGIT_STORE_DIR}/${EGIT_PROJECT}/${EGIT_REPO_URI##*/}
# so if you define EGIT_REPO_URI as http://git.collab.net/repo/git or
# http://git.collab.net/repo/git. and PN is subversion-git.
# it will check out like:
# ${EGIT_STORE_DIR}/subversion
: ${EGIT_PROJECT:=${PN/-git}}
# @ECLASS-VARIABLE: EGIT_BOOTSTRAP
# @DESCRIPTION:
@ -120,7 +118,7 @@ fi
# Set this variable to a non-empty value to disable the automatic updating of
# an GIT source tree. This is intended to be set outside the git source
# tree by users.
: ${EGIT_OFFLINE:=${ESCM_OFFLINE}}
EGIT_OFFLINE="${EGIT_OFFLINE:-${ESCM_OFFLINE}}"
# @ECLASS-VARIABLE: EGIT_PATCHES
# @DESCRIPTION:
@ -132,8 +130,8 @@ fi
# @DESCRIPTION:
# git eclass can fetch any branch in git_fetch().
eval X="\$${PN//[-+]/_}_LIVE_BRANCH"
if [[ "${X}" = "" ]]; then
: ${EGIT_BRANCH:=master}
if [[ ${X} = "" ]]; then
EGIT_BRANCH=${EGIT_BRANCH:=master}
else
EGIT_BRANCH="${X}"
fi
@ -142,7 +140,7 @@ fi
# @DESCRIPTION:
# git eclass can checkout any commit.
eval X="\$${PN//[-+]/_}_LIVE_COMMIT"
if [[ "${X}" = "" ]]; then
if [[ ${X} = "" ]]; then
: ${EGIT_COMMIT:=${EGIT_BRANCH}}
else
EGIT_COMMIT="${X}"
@ -150,25 +148,23 @@ fi
# @ECLASS-VARIABLE: EGIT_REPACK
# @DESCRIPTION:
# Set to non-empty value to repack objects to save disk space. However this can
# take a long time with VERY big repositories.
: ${EGIT_REPACK:=}
# git eclass will repack objects to save disk space. However this can take a
# long time with VERY big repositories.
: ${EGIT_REPACK:=false}
# @ECLASS-VARIABLE: EGIT_PRUNE
# @DESCRIPTION:
# Set to non-empty value to prune loose objects on each fetch. This is useful
# if upstream rewinds and rebases branches often.
: ${EGIT_PRUNE:=}
# git eclass can prune the local clone. This is useful if upstream rewinds and
# rebases branches too often.
: ${EGIT_PRUNE:=false}
# @FUNCTION: git_submodules
# @DESCRIPTION:
# Internal function wrapping the submodule initialisation and update
git_submodules() {
if [[ -n ${EGIT_HAS_SUBMODULES} ]]; then
if ${EGIT_HAS_SUBMODULES}; then
debug-print "git submodule init"
git submodule init
debug-print "git submodule sync"
git submodule sync
debug-print "git submodule update"
git submodule update
fi
@ -180,12 +176,13 @@ git_submodules() {
# EGIT_BRANCH variables.
git_branch() {
local branchname=branch-${EGIT_BRANCH} src=origin/${EGIT_BRANCH}
if [[ "${EGIT_COMMIT}" != "${EGIT_BRANCH}" ]]; then
if [[ ${EGIT_COMMIT} != ${EGIT_BRANCH} ]]; then
branchname=tree-${EGIT_COMMIT}
src=${EGIT_COMMIT}
fi
debug-print "git checkout -b ${branchname} ${src}"
git checkout -b ${branchname} ${src} &> /dev/null
git checkout -b ${branchname} ${src} || \
die "${EGIT}: Could not run git checkout -b ${branchname} ${src}"
unset branchname src
}
@ -196,15 +193,11 @@ git_branch() {
git_fetch() {
debug-print-function ${FUNCNAME} "$@"
eqawarn "git.eclass is deprecated."
eqawarn "Please update your ebuilds to use git-2 instead. For details, see"
eqawarn "http://archives.gentoo.org/gentoo-dev/msg_b7ba363cae580845819ae3501fb157e9.xml"
local GIT_DIR EGIT_CLONE_DIR oldsha1 cursha1 extra_clone_opts upstream_branch
[[ -z ${EGIT_HAS_SUBMODULES} ]] && export GIT_DIR
${EGIT_HAS_SUBMODULES} || export GIT_DIR
# choose if user wants elog or just einfo.
if [[ -n ${EGIT_QUIET} ]]; then
if [[ ${EGIT_QUIET} != OFF ]]; then
elogcmd="einfo"
else
elogcmd="elog"
@ -216,7 +209,7 @@ git_fetch() {
# folder.
#[[ ${EGIT_COMMIT} = ${EGIT_BRANCH} ]] && \
# EGIT_FETCH_CMD="${EGIT_FETCH_CMD} --depth 1"
if [[ -n ${EGIT_TREE} ]] ; then
if [[ ! -z ${EGIT_TREE} ]] ; then
EGIT_COMMIT=${EGIT_TREE}
ewarn "QA: Usage of deprecated EGIT_TREE variable detected."
ewarn "QA: Use EGIT_COMMIT variable instead."
@ -238,8 +231,13 @@ git_fetch() {
if [[ ! -d ${EGIT_STORE_DIR} ]] ; then
debug-print "${FUNCNAME}: initial clone. creating git directory"
addwrite /
mkdir -m 775 -p "${EGIT_STORE_DIR}" \
# TODO(ers): Remove this workaround once we figure out how to make
# sure the directories are owned by the user instead of by root.
local old_umask="`umask`"
umask 002
mkdir -p "${EGIT_STORE_DIR}" \
|| die "${EGIT}: can't mkdir ${EGIT_STORE_DIR}."
umask ${old_umask}
export SANDBOX_WRITE="${SANDBOX_WRITE%%:/}"
fi
@ -262,20 +260,23 @@ git_fetch() {
einfo "The ${EGIT_CLONE_DIR} was shallow copy. Refetching."
fi
# repack from bare copy to normal one
if [[ -n ${EGIT_HAS_SUBMODULES} ]] && [[ -d ${GIT_DIR} && ! -d ${GIT_DIR}/.git ]]; then
if ${EGIT_HAS_SUBMODULES} && [[ -d ${GIT_DIR} && ! -d "${GIT_DIR}/.git/" ]]; then
rm -rf "${GIT_DIR}"
einfo "The ${EGIT_CLONE_DIR} was bare copy. Refetching."
fi
if [[ -z ${EGIT_HAS_SUBMODULES} ]] && [[ -d ${GIT_DIR} && -d ${GIT_DIR}/.git ]]; then
if ! ${EGIT_HAS_SUBMODULES} && [[ -d ${GIT_DIR} && -d ${GIT_DIR}/.git ]]; then
rm -rf "${GIT_DIR}"
einfo "The ${EGIT_CLONE_DIR} was not a bare copy. Refetching."
fi
if [[ -n ${EGIT_HAS_SUBMODULES} ]]; then
if ${EGIT_HAS_SUBMODULES}; then
upstream_branch=origin/${EGIT_BRANCH}
else
upstream_branch=${EGIT_BRANCH}
extra_clone_opts=--bare
# Note: Normally clones are created using --bare, which does not fetch
# remote refs and only updates master. This is not okay. --mirror
# changes that.
extra_clone_opts=--mirror
fi
if [[ ! -d ${GIT_DIR} ]] ; then
@ -284,9 +285,14 @@ git_fetch() {
${elogcmd} " repository: ${EGIT_REPO_URI}"
debug-print "${EGIT_FETCH_CMD} ${extra_clone_opts} ${EGIT_OPTIONS} \"${EGIT_REPO_URI}\" ${GIT_DIR}"
# TODO(ers): Remove this workaround once we figure out how to make
# sure the directories are owned by the user instead of by root.
local old_umask="`umask`"
umask 002
${EGIT_FETCH_CMD} ${extra_clone_opts} ${EGIT_OPTIONS} "${EGIT_REPO_URI}" ${GIT_DIR} \
|| die "${EGIT}: can't fetch from ${EGIT_REPO_URI}."
umask ${old_umask}
pushd "${GIT_DIR}" &> /dev/null
cursha1=$(git rev-parse ${upstream_branch})
${elogcmd} " at the commit: ${cursha1}"
@ -311,7 +317,7 @@ git_fetch() {
oldsha1=$(git rev-parse ${upstream_branch})
if [[ -n ${EGIT_HAS_SUBMODULES} ]]; then
if ${EGIT_HAS_SUBMODULES}; then
debug-print "${EGIT_UPDATE_CMD} ${EGIT_OPTIONS}"
# fix branching
git checkout ${EGIT_MASTER}
@ -320,22 +326,26 @@ git_fetch() {
done
${EGIT_UPDATE_CMD} ${EGIT_OPTIONS} \
|| die "${EGIT}: can't update from ${EGIT_REPO_URI}."
else
elif [[ "${EGIT_COMMIT}" = "${EGIT_BRANCH}" ]]; then
debug-print "${EGIT_UPDATE_CMD} ${EGIT_OPTIONS} origin ${EGIT_BRANCH}:${EGIT_BRANCH}"
${EGIT_UPDATE_CMD} ${EGIT_OPTIONS} origin ${EGIT_BRANCH}:${EGIT_BRANCH} \
|| die "${EGIT}: can't update from ${EGIT_REPO_URI}."
else
debug-print "${EGIT_UPDATE_CMD} ${EGIT_OPTIONS} origin"
${EGIT_UPDATE_CMD} ${EGIT_OPTIONS} origin \
|| die "${EGIT}: can't update from ${EGIT_REPO_URI}."
fi
git_submodules
cursha1=$(git rev-parse ${upstream_branch})
# write out message based on the revisions
if [[ "${oldsha1}" != "${cursha1}" ]]; then
if [[ ${oldsha1} != ${cursha1} ]]; then
${elogcmd} " updating from commit: ${oldsha1}"
${elogcmd} " to commit: ${cursha1}"
else
${elogcmd} " at the commit: ${cursha1}"
# @ECLASS-VARIABLE: LIVE_FAIL_FETCH_IF_REPO_NOT_UPDATED
# @ECLASS_VARIABLE: LIVE_FAIL_FETCH_IF_REPO_NOT_UPDATED
# @DESCRIPTION:
# If this variable is set to TRUE in make.conf or somewhere in
# enviroment the package will fail if there is no update, thus in
@ -351,11 +361,9 @@ git_fetch() {
fi
pushd "${GIT_DIR}" &> /dev/null
if [[ -n ${EGIT_REPACK} ]] || [[ -n ${EGIT_PRUNE} ]]; then
if ${EGIT_REPACK} || ${EGIT_PRUNE} ; then
ebegin "Garbage collecting the repository"
local args
[[ -n ${EGIT_PRUNE} ]] && args='--prune'
git gc ${args}
git gc $(${EGIT_PRUNE} && echo '--prune')
eend $?
fi
popd &> /dev/null
@ -364,28 +372,28 @@ git_fetch() {
export EGIT_VERSION="${cursha1}"
# log the repo state
[[ "${EGIT_COMMIT}" != "${EGIT_BRANCH}" ]] && ${elogcmd} " commit: ${EGIT_COMMIT}"
[[ ${EGIT_COMMIT} != ${EGIT_BRANCH} ]] && elog " commit: ${EGIT_COMMIT}"
${elogcmd} " branch: ${EGIT_BRANCH}"
${elogcmd} " storage directory: \"${GIT_DIR}\""
if [[ -n ${EGIT_HAS_SUBMODULES} ]]; then
if ${EGIT_HAS_SUBMODULES}; then
pushd "${GIT_DIR}" &> /dev/null
debug-print "rsync -rlpgo . \"${EGIT_UNPACK_DIR:-${S}}\""
time rsync -rlpgo . "${EGIT_UNPACK_DIR:-${S}}"
debug-print "rsync -rlpgo . \"${S}\""
time rsync -rlpgo . "${S}"
popd &> /dev/null
else
unset GIT_DIR
debug-print "git clone -l -s -n \"${EGIT_STORE_DIR}/${EGIT_CLONE_DIR}\" \"${EGIT_UNPACK_DIR:-${S}}\""
git clone -l -s -n "${EGIT_STORE_DIR}/${EGIT_CLONE_DIR}" "${EGIT_UNPACK_DIR:-${S}}"
debug-print "git clone -l -s -n \"${EGIT_STORE_DIR}/${EGIT_CLONE_DIR}\" \"${S}\""
git clone -l -s -n "${EGIT_STORE_DIR}/${EGIT_CLONE_DIR}" "${S}"
fi
pushd "${EGIT_UNPACK_DIR:-${S}}" &> /dev/null
pushd "${S}" &> /dev/null
git_branch
# submodules always reqire net (thanks to branches changing)
[[ -z ${EGIT_OFFLINE} ]] && git_submodules
[[ -n ${EGIT_OFFLINE} ]] || git_submodules
popd &> /dev/null
echo ">>> Unpacked to ${EGIT_UNPACK_DIR:-${S}}"
echo ">>> Unpacked to ${S}"
}
# @FUNCTION: git_bootstrap
@ -428,19 +436,19 @@ git_bootstrap() {
# @FUNCTION: git_apply_patches
# @DESCRIPTION:
# Apply patches from EGIT_PATCHES bash array.
# Preferred is using the variable as bash array but for now it allows to write
# Preffered is using the variable as bash array but for now it allows to write
# it also as normal space separated string list. (This part of code should be
# removed when all ebuilds get converted on bash array).
git_apply_patches() {
debug-print-function ${FUNCNAME} "$@"
pushd "${EGIT_UNPACK_DIR:-${S}}" > /dev/null
pushd "${S}" > /dev/null
if [[ ${#EGIT_PATCHES[@]} -gt 1 ]] ; then
for i in "${EGIT_PATCHES[@]}"; do
debug-print "$FUNCNAME: git_autopatch: patching from ${i}"
epatch "${i}"
done
elif [[ -n ${EGIT_PATCHES} ]]; then
elif [[ ${EGIT_PATCHES} != "" ]]; then
# no need for loop if space separated string is passed.
debug-print "$FUNCNAME: git_autopatch: patching from ${EGIT_PATCHES}"
epatch "${EGIT_PATCHES}"

View File

@ -1,6 +1,6 @@
# Copyright 1999-2013 Gentoo Foundation
# Copyright 1999-2011 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# $Header: /var/cvsroot/gentoo-x86/eclass/linux-info.eclass,v 1.96 2013/01/24 20:47:23 vapier Exp $
# $Header: /var/cvsroot/gentoo-x86/eclass/linux-info.eclass,v 1.90 2011/08/22 04:46:32 vapier Exp $
# @ECLASS: linux-info.eclass
# @MAINTAINER:
@ -18,14 +18,6 @@
# "kernel config" in this file means:
# The .config of the currently installed sources is used as the first
# preference, with a fall-back to bundled config (/proc/config.gz) if available.
#
# Before using any of the config-handling functions in this eclass, you must
# ensure that one of the following functions has been called (in order of
# preference), otherwise you will get bugs like #364041):
# linux-info_pkg_setup
# linux-info_get_any_version
# get_version
# get_running_version
# A Couple of env vars are available to effect usage of this eclass
# These are as follows:
@ -167,7 +159,7 @@ qeerror() { qout eerror "${@}" ; }
# done by including the configfile, and printing the variable with Make.
# It WILL break if your makefile has missing dependencies!
getfilevar() {
local ERROR basefname basedname myARCH="${ARCH}"
local ERROR basefname basedname myARCH="${ARCH}"
ERROR=0
[ -z "${1}" ] && ERROR=1
@ -291,7 +283,8 @@ require_configured_kernel() {
# MUST call linux_config_exists first.
linux_chkconfig_present() {
linux_config_qa_check linux_chkconfig_present
local RESULT config
local RESULT
local config
config="${KV_OUT_DIR}/.config"
[ ! -f "${config}" ] && config="/proc/config.gz"
RESULT="$(getfilevar_noexec CONFIG_${1} "${config}")"
@ -307,7 +300,8 @@ linux_chkconfig_present() {
# MUST call linux_config_exists first.
linux_chkconfig_module() {
linux_config_qa_check linux_chkconfig_module
local RESULT config
local RESULT
local config
config="${KV_OUT_DIR}/.config"
[ ! -f "${config}" ] && config="/proc/config.gz"
RESULT="$(getfilevar_noexec CONFIG_${1} "${config}")"
@ -323,7 +317,8 @@ linux_chkconfig_module() {
# MUST call linux_config_exists first.
linux_chkconfig_builtin() {
linux_config_qa_check linux_chkconfig_builtin
local RESULT config
local RESULT
local config
config="${KV_OUT_DIR}/.config"
[ ! -f "${config}" ] && config="/proc/config.gz"
RESULT="$(getfilevar_noexec CONFIG_${1} "${config}")"
@ -364,27 +359,40 @@ linux_chkconfig_string() {
# kernel_is 2 6 9 returns true
# @CODE
# Note: duplicated in kernel-2.eclass
# got the jist yet?
kernel_is() {
# if we haven't determined the version yet, we need to.
linux-info_get_any_version
# Now we can continue
local operator test value
local operator testagainst value x=0 y=0 z=0
case ${1#-} in
lt) operator="-lt"; shift;;
gt) operator="-gt"; shift;;
le) operator="-le"; shift;;
ge) operator="-ge"; shift;;
eq) operator="-eq"; shift;;
case ${1} in
-lt|lt) operator="-lt"; shift;;
-gt|gt) operator="-gt"; shift;;
-le|le) operator="-le"; shift;;
-ge|ge) operator="-ge"; shift;;
-eq|eq) operator="-eq"; shift;;
*) operator="-eq";;
esac
[[ $# -gt 3 ]] && die "Error in kernel-2_kernel_is(): too many parameters"
: $(( test = (KV_MAJOR << 16) + (KV_MINOR << 8) + KV_PATCH ))
: $(( value = (${1:-${KV_MAJOR}} << 16) + (${2:-${KV_MINOR}} << 8) + ${3:-${KV_PATCH}} ))
[ ${test} ${operator} ${value} ]
for x in ${@}; do
for((y=0; y<$((3 - ${#x})); y++)); do value="${value}0"; done
value="${value}${x}"
z=$((${z} + 1))
case ${z} in
1) for((y=0; y<$((3 - ${#KV_MAJOR})); y++)); do testagainst="${testagainst}0"; done;
testagainst="${testagainst}${KV_MAJOR}";;
2) for((y=0; y<$((3 - ${#KV_MINOR})); y++)); do testagainst="${testagainst}0"; done;
testagainst="${testagainst}${KV_MINOR}";;
3) for((y=0; y<$((3 - ${#KV_PATCH})); y++)); do testagainst="${testagainst}0"; done;
testagainst="${testagainst}${KV_PATCH}";;
*) die "Error in kernel-2_kernel_is(): Too many parameters.";;
esac
done
[ "${testagainst}" ${operator} "${value}" ] && return 0 || return 1
}
get_localversion() {
@ -493,7 +501,9 @@ get_version() {
# And if we didn't pass it, we can take a nosey in the Makefile
kbuild_output="$(${mkfunc} KBUILD_OUTPUT ${KERNEL_MAKEFILE})"
if [ -e "${kbuild_output}" ]; then
OUTPUT_DIR="${OUTPUT_DIR:-${kbuild_output}}"
fi
# And contrary to existing functions I feel we shouldn't trust the
# directory name to find version information as this seems insane.
@ -517,7 +527,7 @@ get_version() {
# but before we do this, we need to find if we use a different object directory.
# This *WILL* break if the user is using localversions, but we assume it was
# caught before this if they are.
OUTPUT_DIR="${OUTPUT_DIR:-/lib/modules/${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}/build}"
OUTPUT_DIR="${OUTPUT_DIR:-${ROOT}/lib/modules/${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}/build}"
[ -h "${OUTPUT_DIR}" ] && KV_OUT_DIR="$(readlink -f ${OUTPUT_DIR})"
[ -d "${OUTPUT_DIR}" ] && KV_OUT_DIR="${OUTPUT_DIR}"
@ -586,14 +596,11 @@ get_running_version() {
get_version
return $?
else
# This handles a variety of weird kernel versions. Make sure to update
# tests/linux-info:get_running_version.sh if you want to change this.
local kv_full=${KV_FULL//[-+_]*}
KV_MAJOR=$(get_version_component_range 1 ${kv_full})
KV_MINOR=$(get_version_component_range 2 ${kv_full})
KV_PATCH=$(get_version_component_range 3 ${kv_full})
KV_EXTRA="${KV_FULL#${KV_MAJOR}.${KV_MINOR}${KV_PATCH:+.${KV_PATCH}}}"
: ${KV_PATCH:=0}
KV_MAJOR=$(get_version_component_range 1 ${KV_FULL})
KV_MINOR=$(get_version_component_range 2 ${KV_FULL})
KV_PATCH=$(get_version_component_range 3 ${KV_FULL})
KV_PATCH=${KV_PATCH//-*}
KV_EXTRA="${KV_FULL#${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}}"
fi
return 0
}
@ -625,14 +632,7 @@ check_kernel_built() {
require_configured_kernel
get_version
local versionh_path
if kernel_is -ge 3 7; then
versionh_path="include/generated/uapi/linux/version.h"
else
versionh_path="include/linux/version.h"
fi
if [ ! -f "${KV_OUT_DIR}/${versionh_path}" ]
if [ ! -f "${KV_OUT_DIR}/include/linux/version.h" ]
then
eerror "These sources have not yet been prepared."
eerror "We cannot build against an unprepared tree."
@ -655,7 +655,8 @@ check_modules_supported() {
require_configured_kernel
get_version
if ! linux_chkconfig_builtin "MODULES"; then
if ! linux_chkconfig_builtin "MODULES"
then
eerror "These sources do not support loading external modules."
eerror "to be able to use this module please enable \"Loadable modules support\""
eerror "in your kernel, recompile and then try merging this module again."
@ -680,7 +681,8 @@ check_extra_config() {
# Determine if we really need a .config. The only time when we don't need
# one is when all of the CONFIG_CHECK options are prefixed with "~".
for config in ${CONFIG_CHECK}; do
for config in ${CONFIG_CHECK}
do
if [[ "${config:0:1}" != "~" ]]; then
config_required=1
break

View File

@ -1,12 +1,15 @@
# Copyright 1999-2012 Gentoo Foundation
# Copyright 1999-2009 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# $Header: /var/cvsroot/gentoo-x86/eclass/subversion.eclass,v 1.83 2012/07/29 05:54:17 hattya Exp $
# $Header: /var/cvsroot/gentoo-x86/eclass/subversion.eclass,v 1.67 2009/05/10 20:33:38 arfrever Exp $
# @ECLASS: subversion.eclass
# @MAINTAINER:
# Akinori Hattori <hattya@gentoo.org>
# @AUTHOR:
# Bo Ørsted Andresen <zlin@gentoo.org>
# Arfrever Frehtes Taifersar Arahesis <arfrever@gentoo.org>
#
# Original Author: Akinori Hattori <hattya@gentoo.org>
#
# @BLURB: The subversion eclass is written to fetch software sources from subversion repositories
# @DESCRIPTION:
# The subversion eclass provides functions to fetch, patch and bootstrap
@ -19,20 +22,37 @@ ESVN="${ECLASS}"
case "${EAPI:-0}" in
0|1)
EXPORT_FUNCTIONS src_unpack pkg_preinst
DEPEND="dev-vcs/subversion"
;;
*)
EXPORT_FUNCTIONS src_unpack src_prepare pkg_preinst
DEPEND="|| ( dev-vcs/subversion[webdav-neon] dev-vcs/subversion[webdav-serf] )"
;;
esac
DEPEND+=" net-misc/rsync"
DESCRIPTION="Based on the ${ECLASS} eclass"
SUBVERSION_DEPEND="dev-vcs/subversion[webdav-neon,webdav-serf]
net-misc/rsync"
if [[ -z "${ESVN_DISABLE_DEPENDENCIES}" ]]; then
DEPEND="${SUBVERSION_DEPEND}"
fi
# @ECLASS-VARIABLE: ESVN_STORE_DIR
# @DESCRIPTION:
# subversion sources store directory. Users may override this in /etc/make.conf
[[ -z ${ESVN_STORE_DIR} ]] && ESVN_STORE_DIR="${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}/svn-src"
if [[ -z ${ESVN_STORE_DIR} ]]; then
ESVN_STORE_DIR="${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}/svn-src"
# Pick a directory with the same permissions now and in the future. Note
# that we cannot just use USERNAME because the eventual effective user when
# doing the svn commands may change - PORTAGE_USERNAME has not taken effect
# yet. Further complicating things, if features userpriv is not set,
# PORTAGE_USERNAME is going to be ignored. We assume that if we enable
# userpriv in the future, we will also set PORTAGE_USERNAME to something
# other than "portage".
# TODO: remove this once we are using consistent users and userpriv settings
# for emerge and emerge-${BOARD}.
ESVN_STORE_DIR="${ESVN_STORE_DIR}/${PORTAGE_USERNAME:-portage}"
fi
# @ECLASS-VARIABLE: ESVN_FETCH_CMD
# @DESCRIPTION:
@ -61,12 +81,11 @@ ESVN_OPTIONS="${ESVN_OPTIONS:-}"
#
# e.g. http://foo/trunk, svn://bar/trunk, svn://bar/branch/foo@1234
#
# supported URI schemes:
# supported protocols:
# http://
# https://
# svn://
# svn+ssh://
# file://
#
# to peg to a specific revision, append @REV to the repo's uri
ESVN_REPO_URI="${ESVN_REPO_URI:-}"
@ -137,22 +156,18 @@ ESVN_PATCHES="${ESVN_PATCHES:-}"
# don't export the working copy to S.
ESVN_RESTRICT="${ESVN_RESTRICT:-}"
# @ECLASS-VARIABLE: ESVN_DISABLE_DEPENDENCIES
# @DESCRIPTION:
# Set this variable to a non-empty value to disable the automatic inclusion of
# Subversion in dependencies.
ESVN_DISABLE_DEPENDENCIES="${ESVN_DISABLE_DEPENDENCIES:-}"
# @ECLASS-VARIABLE: ESVN_OFFLINE
# @DESCRIPTION:
# Set this variable to a non-empty value to disable the automatic updating of
# an svn source tree. This is intended to be set outside the subversion source
# tree by users.
ESVN_OFFLINE="${ESVN_OFFLINE:-${EVCS_OFFLINE}}"
# @ECLASS-VARIABLE: ESVN_UMASK
# @DESCRIPTION:
# Set this variable to a custom umask. This is intended to be set by users.
# By setting this to something like 002, it can make life easier for people
# who do development as non-root (but are in the portage group), and then
# switch over to building with FEATURES=userpriv. Or vice-versa. Shouldn't
# be a security issue here as anyone who has portage group write access
# already can screw the system over in more creative ways.
ESVN_UMASK="${ESVN_UMASK:-${EVCS_UMASK}}"
ESVN_OFFLINE="${ESVN_OFFLINE:-${ESCM_OFFLINE}}"
# @ECLASS-VARIABLE: ESVN_UP_FREQ
# @DESCRIPTION:
@ -189,33 +204,31 @@ subversion_fetch() {
[[ -n "${ESVN_REVISION}" ]] && revision="${ESVN_REVISION}"
# check for the scheme
local scheme="${repo_uri%%:*}"
case "${scheme}" in
# check for the protocol
local protocol="${repo_uri%%:*}"
case "${protocol}" in
http|https)
;;
svn|svn+ssh)
;;
file)
;;
*)
die "${ESVN}: fetch from '${scheme}' is not yet implemented."
die "${ESVN}: fetch from '${protocol}' is not yet implemented."
;;
esac
addread "/etc/subversion"
addwrite "${ESVN_STORE_DIR}"
if [[ -n "${ESVN_UMASK}" ]]; then
eumask_push "${ESVN_UMASK}"
fi
# Also make the /var/lib/portage/distfiles/svn-src directory writeable in sandbox
# so we can create it if necessary.
addwrite "$(dirname ${ESVN_STORE_DIR})"
if [[ ! -d ${ESVN_STORE_DIR} ]]; then
debug-print "${FUNCNAME}: initial checkout. creating subversion directory"
mkdir -m 775 -p "${ESVN_STORE_DIR}" || die "${ESVN}: can't mkdir ${ESVN_STORE_DIR}."
mkdir -p "${ESVN_STORE_DIR}" || die "${ESVN}: can't mkdir ${ESVN_STORE_DIR}."
fi
pushd "${ESVN_STORE_DIR}" >/dev/null || die "${ESVN}: can't chdir to ${ESVN_STORE_DIR}"
cd "${ESVN_STORE_DIR}" || die "${ESVN}: can't chdir to ${ESVN_STORE_DIR}"
local wc_path="$(subversion__get_wc_path "${repo_uri}")"
local options="${ESVN_OPTIONS} --config-dir ${ESVN_STORE_DIR}/.subversion"
@ -245,7 +258,7 @@ subversion_fetch() {
debug-print "${FUNCNAME}: ${ESVN_FETCH_CMD} ${options} ${repo_uri}"
mkdir -m 775 -p "${ESVN_PROJECT}" || die "${ESVN}: can't mkdir ${ESVN_PROJECT}."
mkdir -p "${ESVN_PROJECT}" || die "${ESVN}: can't mkdir ${ESVN_PROJECT}."
cd "${ESVN_PROJECT}" || die "${ESVN}: can't chdir to ${ESVN_PROJECT}"
if [[ -n "${ESVN_USER}" ]]; then
${ESVN_FETCH_CMD} ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" "${repo_uri}" || die "${ESVN}: can't fetch to ${wc_path} from ${repo_uri}."
@ -257,7 +270,6 @@ subversion_fetch() {
svn upgrade "${wc_path}" &>/dev/null
svn cleanup "${wc_path}" &>/dev/null
subversion_wc_info "${repo_uri}" || die "${ESVN}: unknown problem occurred while accessing working copy."
if [[ -n ${ESVN_REVISION} && ${ESVN_REVISION} != ${ESVN_WC_REVISION} ]]; then
die "${ESVN}: You requested off-line updating and revision ${ESVN_REVISION} but only revision ${ESVN_WC_REVISION} is available locally."
fi
@ -279,25 +291,7 @@ subversion_fetch() {
fi
if [[ -z ${esvn_up_freq} ]]; then
if [[ ${ESVN_WC_UUID} != $(subversion__svn_info "${repo_uri}" "Repository UUID") ]]; then
# UUID mismatch. Delete working copy and check out it again.
einfo "subversion recheck out start -->"
einfo " old UUID: ${ESVN_WC_UUID}"
einfo " new UUID: $(subversion__svn_info "${repo_uri}" "Repository UUID")"
einfo " repository: ${repo_uri}${revision:+@}${revision}"
rm -fr "${ESVN_PROJECT}" || die
debug-print "${FUNCNAME}: ${ESVN_FETCH_CMD} ${options} ${repo_uri}"
mkdir -m 775 -p "${ESVN_PROJECT}" || die "${ESVN}: can't mkdir ${ESVN_PROJECT}."
cd "${ESVN_PROJECT}" || die "${ESVN}: can't chdir to ${ESVN_PROJECT}"
if [[ -n "${ESVN_USER}" ]]; then
${ESVN_FETCH_CMD} ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" "${repo_uri}" || die "${ESVN}: can't fetch to ${wc_path} from ${repo_uri}."
else
${ESVN_FETCH_CMD} ${options} "${repo_uri}" || die "${ESVN}: can't fetch to ${wc_path} from ${repo_uri}."
fi
elif [[ ${ESVN_WC_URL} != $(subversion__get_repository_uri "${repo_uri}") ]]; then
if [[ ${ESVN_WC_URL} != $(subversion__get_repository_uri "${repo_uri}") ]]; then
einfo "subversion switch start -->"
einfo " old repository: ${ESVN_WC_URL}@${ESVN_WC_REVISION}"
einfo " new repository: ${repo_uri}${revision:+@}${revision}"
@ -327,10 +321,6 @@ subversion_fetch() {
fi
fi
if [[ -n "${ESVN_UMASK}" ]]; then
eumask_pop
fi
einfo " working copy: ${wc_path}"
if ! has "export" ${ESVN_RESTRICT}; then
@ -345,7 +335,6 @@ subversion_fetch() {
rsync -rlpgo --exclude=".svn/" . "${S}" || die "${ESVN}: can't export to ${S}."
fi
popd >/dev/null
echo
}
@ -360,36 +349,63 @@ subversion_bootstrap() {
cd "${S}"
if [[ -n ${ESVN_PATCHES} ]]; then
local patch fpatch
einfo "apply patches -->"
local patch fpatch
for patch in ${ESVN_PATCHES}; do
if [[ -f ${patch} ]]; then
epatch "${patch}"
else
for fpatch in ${FILESDIR}/${patch}; do
if [[ -f ${fpatch} ]]; then
epatch "${fpatch}"
else
die "${ESVN}: ${patch} not found"
fi
done
fi
done
echo
fi
if [[ -n ${ESVN_BOOTSTRAP} ]]; then
einfo "begin bootstrap -->"
if [[ -f ${ESVN_BOOTSTRAP} && -x ${ESVN_BOOTSTRAP} ]]; then
einfo " bootstrap with a file: ${ESVN_BOOTSTRAP}"
eval "./${ESVN_BOOTSTRAP}" || die "${ESVN}: can't execute ESVN_BOOTSTRAP."
else
einfo " bootstrap with command: ${ESVN_BOOTSTRAP}"
eval "${ESVN_BOOTSTRAP}" || die "${ESVN}: can't eval ESVN_BOOTSTRAP."
fi
fi
}
# @FUNCTION: subversion_src_unpack
# @DESCRIPTION:
# Default src_unpack. Fetch and, in older EAPIs, bootstrap.
subversion_src_unpack() {
subversion_fetch || die "${ESVN}: unknown problem occurred in subversion_fetch."
if has "${EAPI:-0}" 0 1; then
subversion_bootstrap || die "${ESVN}: unknown problem occurred in subversion_bootstrap."
fi
}
# @FUNCTION: subversion_src_prepare
# @DESCRIPTION:
# Default src_prepare. Bootstrap.
subversion_src_prepare() {
subversion_bootstrap || die "${ESVN}: unknown problem occurred in subversion_bootstrap."
}
# @FUNCTION: subversion_wc_info
# @USAGE: [repo_uri]
# @RETURN: ESVN_WC_URL, ESVN_WC_ROOT, ESVN_WC_UUID, ESVN_WC_REVISION and ESVN_WC_PATH
@ -416,46 +432,6 @@ subversion_wc_info() {
export ESVN_WC_PATH="${wc_path}"
}
# @FUNCTION: subversion_src_unpack
# @DESCRIPTION:
# Default src_unpack. Fetch and, in older EAPIs, bootstrap.
subversion_src_unpack() {
subversion_fetch || die "${ESVN}: unknown problem occurred in subversion_fetch."
if has "${EAPI:-0}" 0 1; then
subversion_bootstrap || die "${ESVN}: unknown problem occurred in subversion_bootstrap."
fi
}
# @FUNCTION: subversion_src_prepare
# @DESCRIPTION:
# Default src_prepare. Bootstrap.
subversion_src_prepare() {
subversion_bootstrap || die "${ESVN}: unknown problem occurred in subversion_bootstrap."
}
# @FUNCTION: subversion_pkg_preinst
# @USAGE: [repo_uri]
# @DESCRIPTION:
# Log the svn revision of source code. Doing this in pkg_preinst because we
# want the logs to stick around if packages are uninstalled without messing with
# config protection.
subversion_pkg_preinst() {
local pkgdate=$(date "+%Y%m%d %H:%M:%S")
subversion_wc_info "${1}"
if [[ -n ${ESCM_LOGDIR} ]]; then
local dir="${ROOT}/${ESCM_LOGDIR}/${CATEGORY}"
if [[ ! -d ${dir} ]]; then
mkdir -p "${dir}" || eerror "Failed to create '${dir}' for logging svn revision"
fi
local logmessage="svn: ${pkgdate} - ${PF}:${SLOT} was merged at revision ${ESVN_WC_REVISION}"
if [[ -d ${dir} ]]; then
echo "${logmessage}" >>"${dir}/${PN}.log"
else
eerror "Could not log the message '${logmessage}' to '${dir}/${PN}.log'"
fi
fi
}
## -- Private Functions
## -- subversion__svn_info() ------------------------------------------------- #
@ -467,9 +443,7 @@ subversion__svn_info() {
local target="${1}"
local key="${2}"
env LC_ALL=C svn info ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" "${target}" \
| grep -i "^${key}" \
| cut -d" " -f2-
env LC_ALL=C svn info "${target}" | grep -i "^${key}" | cut -d" " -f2-
}
## -- subversion__get_repository_uri() --------------------------------------- #
@ -479,13 +453,16 @@ subversion__get_repository_uri() {
local repo_uri="${1}"
debug-print "${FUNCNAME}: repo_uri = ${repo_uri}"
if [[ -z ${repo_uri} ]]; then
die "${ESVN}: ESVN_REPO_URI (or specified URI) is empty."
fi
# delete trailing slash
if [[ -z ${repo_uri##*/} ]]; then
repo_uri="${repo_uri%/}"
fi
repo_uri="${repo_uri%@*}"
echo "${repo_uri}"
@ -507,16 +484,42 @@ subversion__get_wc_path() {
# param $1 - a repository URI.
subversion__get_peg_revision() {
local repo_uri="${1}"
local peg_rev=
debug-print "${FUNCNAME}: repo_uri = ${repo_uri}"
# repo_uri has peg revision?
if [[ ${repo_uri} = *@* ]]; then
peg_rev="${repo_uri##*@}"
debug-print "${FUNCNAME}: peg_rev = ${peg_rev}"
else
# repo_uri has peg revision ?
if [[ ${repo_uri} != *@* ]]; then
debug-print "${FUNCNAME}: repo_uri does not have a peg revision."
fi
local peg_rev=
[[ ${repo_uri} = *@* ]] && peg_rev="${repo_uri##*@}"
debug-print "${FUNCNAME}: peg_rev = ${peg_rev}"
echo "${peg_rev}"
}
# @FUNCTION: subversion_pkg_preinst
# @USAGE: [repo_uri]
# @DESCRIPTION:
# Log the svn revision of source code. Doing this in pkg_preinst because we
# want the logs to stick around if packages are uninstalled without messing with
# config protection.
subversion_pkg_preinst() {
local pkgdate=$(date "+%Y%m%d %H:%M:%S")
subversion_wc_info "${1:-${ESVN_REPO_URI}}"
if [[ -n ${ESCM_LOGDIR} ]]; then
local dir="${ROOT}/${ESCM_LOGDIR}/${CATEGORY}"
if [[ ! -d ${dir} ]]; then
mkdir -p "${dir}" || \
eerror "Failed to create '${dir}' for logging svn revision to '${PORTDIR_SCM}'"
fi
local logmessage="svn: ${pkgdate} - ${PF}:${SLOT} was merged at revision ${ESVN_WC_REVISION}"
if [[ -d ${dir} ]]; then
echo "${logmessage}" >> "${dir}/${PN}.log"
else
eerror "Could not log the message '${logmessage}' to '${dir}/${PN}.log'"
fi
fi
}

View File

@ -1,133 +1 @@
#!/bin/bash
if ! source /etc/init.d/functions.sh ; then
echo "Missing functions.sh. Please to install!" 1>&2
exit 1
fi
inherit() {
local e
for e in "$@" ; do
source ../${e}.eclass
done
}
EXPORT_FUNCTIONS() { :; }
debug-print() {
[[ ${#} -eq 0 ]] && return
if [[ ${ECLASS_DEBUG_OUTPUT} == on ]]; then
printf 'debug: %s\n' "${@}" >&2
elif [[ -n ${ECLASS_DEBUG_OUTPUT} ]]; then
printf 'debug: %s\n' "${@}" >> "${ECLASS_DEBUG_OUTPUT}"
fi
}
debug-print-function() {
debug-print "${1}, parameters: ${*:2}"
}
debug-print-section() {
debug-print "now in section ${*}"
}
has() {
local needle=$1
shift
local x
for x in "$@"; do
[ "${x}" = "${needle}" ] && return 0
done
return 1
}
use() { has "$1" ${IUSE} ; }
die() {
echo "die: $*" 1>&2
exit 1
}
has_version() {
portageq has_version / "$@"
}
KV_major() {
[[ -z $1 ]] && return 1
local KV=$@
echo "${KV%%.*}"
}
KV_minor() {
[[ -z $1 ]] && return 1
local KV=$@
KV=${KV#*.}
echo "${KV%%.*}"
}
KV_micro() {
[[ -z $1 ]] && return 1
local KV=$@
KV=${KV#*.*.}
echo "${KV%%[^[:digit:]]*}"
}
KV_to_int() {
[[ -z $1 ]] && return 1
local KV_MAJOR=$(KV_major "$1")
local KV_MINOR=$(KV_minor "$1")
local KV_MICRO=$(KV_micro "$1")
local KV_int=$(( KV_MAJOR * 65536 + KV_MINOR * 256 + KV_MICRO ))
# We make version 2.2.0 the minimum version we will handle as
# a sanity check ... if its less, we fail ...
if [[ ${KV_int} -ge 131584 ]] ; then
echo "${KV_int}"
return 0
fi
return 1
}
tret=0
tbegin() {
ebegin "Testing $*"
}
texit() {
rm -rf "${tmpdir}"
exit ${tret}
}
tend() {
t eend "$@"
}
t() {
"$@"
local ret=$?
: $(( tret |= ${ret} ))
return ${ret}
}
tmpdir="${PWD}/tmp"
pkg_root="${tmpdir}/$0/${RANDOM}"
T="${pkg_root}/temp"
D="${pkg_root}/image"
WORKDIR="${pkg_root}/work"
ED=${D}
mkdir -p "${D}" "${T}" "${WORKDIR}"
dodir() {
mkdir -p "${@/#/${ED}/}"
}
elog() { einfo "$@" ; }
IUSE=""
CATEGORY="dev-eclass"
PN="tests"
PV="0"
P="${PN}-${PV}"
PF=${P}
source ../../../portage-stable/eclass/tests/tests-common.sh

View File

@ -764,8 +764,13 @@ clang-setup-env() {
case ${ARCH} in
amd64|x86)
export CC="clang" CXX="clang++"
append-flags --sysroot="${SYSROOT}"
append-flags -B$(get_binutils_path_gold)
local clang_flags=(
--sysroot="${SYSROOT}"
-B$(get_binutils_path_gold)
$(usex x86 -m32 '')
)
append-flags "${clang_flags[@]}"
# Some boards use optimizations (e.g. -mfpmath=sse) that
# clang does not support.