flatcar-scripts/pkg_auto/impl/pkg_auto_lib.sh
Krzesimir Nowak 7fdaa4e6b1 pkg-auto: Code style changes
Mostly to avoid repeating variable names when declaring them and
initializing them.

Signed-off-by: Krzesimir Nowak <knowak@microsoft.com>
2025-08-27 16:02:06 +02:00

3645 lines
125 KiB
Bash

#!/bin/bash
#
# TODO:
#
# - Generate a report about missing build deps of board packages in
# sdk. These reports can be generated by processing sdk-pkgs-kv and
# board-bdeps reports, I think.
#
# - Mount Gentoo repo into the SDK container and set up emerge to use
# Gentoo as a primary repo, and portage-stable and coreos-overlay as
# overlays. That way if an updated package pulls in a new package we
# can notice it when it comes from Gentoo (emerge reports also
# source repo like sys-libs/glibc-2.35-r5::gentoo or something like
# this). This would make this script more robust.
#
# - Instead of having a list of packages to update, rather update them
# all in a single commit and have a list of exclusions. The reason
# is that, at this point, almost all of the packages in
# portage-stable are automatically updated, exclusions being usually
# temporary, so it would be better to have a short file with
# temporary exclusions rather than a long file with some commented
# out entries. This probably would render the sort_packages_list.py
# script unnecessary. On the other hand, the current mode of
# operation may be still useful for the coreos-overlay packages,
# because none of them are under automation (some are, though, under
# an ad-hoc automation via github actions).
#
# - Handle package appearance or disappearance. Currently, when a
# package ends up being unused (so it exists, but is not picked up,
# because some other package stopped depending on it) or removed,
# the package ends up in the manual-work-needed file. This probably
# could be handled as an entry in the summary stubs about being
# dropped.
#
# - Find unused packages and eclasses.
#
# - The rename handling should probably also change all instances of
# the old name everywhere outside portage-stable, otherwise emerge
# may fail when some our ebuild still uses the old name, maybe.
#
# Needed to be enabled here to parse some globs inside the functions.
shopt -s extglob
# Saner defaults.
shopt -s nullglob
shopt -s dotglob
if [[ -z ${__PKG_AUTO_LIB_SH_INCLUDED__:-} ]]; then
__PKG_AUTO_LIB_SH_INCLUDED__=x
source "$(dirname "${BASH_SOURCE[0]}")/util.sh"
source "${PKG_AUTO_IMPL_DIR}/cleanups.sh"
source "${PKG_AUTO_IMPL_DIR}/debug.sh"
source "${PKG_AUTO_IMPL_DIR}/gentoo_ver.sh"
source "${PKG_AUTO_IMPL_DIR}/jobs_lib.sh"
source "${PKG_AUTO_IMPL_DIR}/md5_cache_diff_lib.sh"
# Sets up the workdir using the passed config. The config can be
# created basing on the config_template file or using the
# generate_config script.
#
# The path to the workdir can be empty - the function will then create
# a temporary directory.
#
# This also sets the WORKDIR global variable, allowing other function
# to be invoked.
#
# Params:
#
# 1 - path to the workdir
# 2 - path to the config file
function setup_workdir_with_config() {
local workdir file
workdir=${1}; shift
config_file=${1}; shift
local cfg_scripts cfg_aux cfg_reports cfg_old_base cfg_new_base cfg_sdk_image_override
local -a cfg_cleanups cfg_debug_packages
# some defaults
cfg_old_base='origin/main'
cfg_new_base=''
cfg_cleanups=('ignore')
cfg_sdk_image_override=''
cfg_debug_packages=()
local line key value swwc_stripped var_name arch
while read -r line; do
strip_out "${line%%:*}" swwc_stripped
key=${swwc_stripped}
strip_out "${line#*:}" swwc_stripped
value=${swwc_stripped}
if [[ -z ${value} ]]; then
fail "empty value for ${key} in config"
fi
case ${key} in
scripts|aux|reports)
var_name="cfg_${key//-/_}"
local -n var_ref=${var_name}
var_ref=$(realpath "${value}")
unset -n var_ref
;;
old-base|new-base|sdk-image-override)
var_name="cfg_${key//-/_}"
local -n var_ref=${var_name}
var_ref=${value}
unset -n var_ref
;;
cleanups|debug-packages)
var_name="cfg_${key//-/_}"
mapfile -t "${var_name}" <<<"${value//,/$'\n'}"
;;
esac
done < <(cat_meaningful "${config_file}")
if [[ -z "${cfg_new_base}" ]]; then
cfg_new_base=${cfg_old_base}
fi
for key in scripts aux reports; do
var_name="cfg_${key//-/_}"
if [[ -z "${!var_name}" ]]; then
fail "${key} was not specified in config"
fi
done
setup_cleanups "${cfg_cleanups[@]}"
setup_workdir "${workdir}"
add_cleanup "rm -f ${WORKDIR@Q}/config"
cp -a "${config_file}" "${WORKDIR}/config"
setup_worktrees_in_workdir "${cfg_scripts}" "${cfg_old_base}" "${cfg_new_base}" "${cfg_reports}" "${cfg_aux}"
if [[ -n ${cfg_sdk_image_override} ]]; then
override_sdk_image_name "${cfg_sdk_image_override}"
fi
pkg_debug_add "${cfg_debug_packages[@]}"
}
# Goes over the list of automatically updated packages and synces them
# with packages from Gentoo repo. Cleans up missing packages.
#
# The function can only be called after the workdir has been set up
# with setup_workdir_with_config.
#
# Params:
#
# 1 - a path to the Gentoo repo
function perform_sync_with_gentoo() {
local gentoo
gentoo=$(realpath "${1}"); shift
run_sync "${gentoo}"
handle_missing_in_scripts
handle_missing_in_gentoo "${gentoo}"
}
# Generates package update reports. Duh.
#
# The function can only be called after the workdir has been set up
# with setup_workdir_with_config.
#
# The location of the reports is specified in the config that was
# passed to setup_workdir_with_config.
function generate_package_update_reports() {
generate_sdk_reports
handle_gentoo_sync
}
# Saves the new state to a git branch in scripts.
#
# The function can only be called after the workdir has been set up
# with setup_workdir_with_config.
#
# Params:
#
# 1 - name of the new branch
function save_new_state() {
local branch_name
branch_name=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
info "saving new state to branch ${branch_name}"
git -C "${SCRIPTS}" branch --force "${branch_name}" "${NEW_STATE_BRANCH}"
}
#
#
# Implementation details, do not use directly in scripts sourcing this file.
#
#
# Creates a workdir, the path to which is stored in WORKDIR global
# variable.
#
# 1 - predefined work directory path (optional)
function setup_workdir() {
local workdir
workdir=${1:-}
if [[ -z "${workdir}" ]]; then
workdir=$(mktemp --tmpdir --directory 'pkg-auto-workdir.XXXXXXXX')
else
if [[ -e ${workdir} ]] && [[ ! -d ${workdir} ]]; then
fail "Expected ${workdir@Q} to be a directory"
fi
if [[ -d ${workdir} ]] && ! dir_is_empty "${workdir}"; then
fail "Expected ${workdir@Q} to be an empty directory"
fi
fi
declare -g WORKDIR
WORKDIR=$(realpath "${workdir}")
add_cleanup "rmdir ${WORKDIR@Q}"
mkdir -p "${WORKDIR}"
setup_initial_globals_file
}
# Sets up worktrees for the old and new state inside WORKDIR. Creates
# the globals file inside WORKDIR.
#
# 1 - path to scripts repo
# 2 - base for the old state worktree (e.g. origin/main)
# 3 - base for the new state worktree (e.g. origin/main)
# 4 - path to reports directory
function setup_worktrees_in_workdir() {
local scripts old_base new_base reports_dir aux_dir
scripts=${1}; shift
old_base=${1}; shift
new_base=${1}; shift
reports_dir=${1}; shift
aux_dir=${1}; shift
local old_state new_state
old_state="${WORKDIR}/old_state"
new_state="${WORKDIR}/new_state"
# create reports directory now - there may be some developer
# warnings afoot
mkdir -p "${reports_dir}"
setup_worktree "${scripts}" "${old_base}" "old-state-${RANDOM}" "${old_state}"
setup_worktree "${scripts}" "${new_base}" "new-state-${RANDOM}" "${new_state}"
extend_globals_file "${scripts}" "${old_state}" "${new_state}" "${reports_dir}" "${aux_dir}"
}
# Adds an overridden SDK image name to the globals file.
#
# Params:
#
# 1 - image name
function override_sdk_image_name() {
local image_name=${1}; shift
append_to_globals "SDK_IMAGE=${image_name@Q}"
}
# Appends passed lines to the globals file.
#
# Params:
#
# @ - lines to append
function append_to_globals() {
local globals_file
globals_file="${WORKDIR}/globals"
if [[ ! -e "${globals_file}" ]]; then
fail "globals not set yet in workdir"
fi
lines_to_file "${globals_file}" "${@}"
}
# Processes the update files in portage-stable and coreos-overlay to
# figure out potential package renames. The results are stored in the
# passed map.
#
# Params:
#
# 1 - name of a map variable; will be a mapping of old package name to
# new package name
function process_profile_updates_directory() {
local from_to_map_var_name=${1}; shift
local -a ppud_ordered_names
get_ordered_update_filenames ppud_ordered_names
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local bf ps_f co_f pkg f line old new
local -a fields
local -A from_to_f=()
mvm_declare ppud_to_from_set_mvm mvm_mvc_set
for bf in "${ppud_ordered_names[@]}"; do
# coreos-overlay updates may overwrite updates from
# portage-stable, but only from the file of the same name
ps_f=${NEW_PORTAGE_STABLE}/profiles/updates/${bf}
co_f=${NEW_COREOS_OVERLAY}/profiles/updates/${bf}
for f in "${ps_f}" "${co_f}"; do
if [[ ! -f ${f} ]]; then
continue
fi
while read -r line; do
if [[ ${line} != 'move '* ]]; then
# other possibility is "slotmove" - we don't care
# about those.
continue
fi
mapfile -t fields <<<"${line// /$'\n'}"
if [[ ${#fields[@]} -ne 3 ]]; then
fail_lines \
"Malformed line ${line@Q} in updates file ${f@Q}." \
"The line should have 3 fields, has ${#fields[*]}."
fi
from_to_f["${fields[1]}"]=${fields[2]}
done <"${f}"
done
for old in "${!from_to_f[@]}"; do
new=${from_to_f["${old}"]}
update_rename_maps "${from_to_map_var_name}" ppud_to_from_set_mvm "${old}" "${new}"
done
done
mvm_unset ppud_to_from_set_mvm
}
# Gets a merged and ordered list of update files from portage-stable
# and coreos-overlay and stores the in the passed array. The files
# have names like Q1-2018, Q1-2023, Q2-2019 and so on. We need to sort
# them by year, then by quarter.
#
# Params:
#
# 1 - name of a array variable where the ordered names will be stored
function get_ordered_update_filenames() {
local ordered_names_var_name=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local -A names_set=()
local f
for f in "${NEW_PORTAGE_STABLE}/profiles/updates/"* "${NEW_COREOS_OVERLAY}/profiles/updates/"*; do
names_set["${f##*/}"]=x
done
mapfile -t "${ordered_names_var_name}" < <(printf '%s\n' "${!names_set[@]}" | sort --field-separator=- --key=2n --key=1n)
}
# Updates the rename map with the new "old to new package rename". It
# tries to be smart about the rename sequences (though not sure if
# this is necessary, really). If in older update file package foo was
# renamed to bar and in current update file the bar package is renamed
# to quux, then this function adds an entry about the "bar to quux"
# rename, but also updates the older entry about "foo to bar" rename
# to "foo to quux" rename.
#
# Params:
#
# 1 - name of the renames map variable; should be a mapping of old to
# new names
# 2 - name of the set mvm variable; should be a mapping of new name to
# a set of old names (a reverse mapping to the renames map)
# 3 - old name
# 4 - new name
function update_rename_maps() {
local -n ft_map_ref=${1}; shift
local tf_set_mvm_var_name=${1}; shift
local old_name=${1}; shift
local new_name=${1}; shift
local prev_new_name=${ft_map_ref["${old_name}"]:-}
if [[ -n ${prev_new_name} ]] && [[ ${prev_new_name} != "${new_name}" ]]; then
fail_lines \
"Invalid package rename from ${old_name@Q} to ${new_name@Q}." \
"There was already a rename from ${old_name@Q} to ${prev_new_name@Q}."
fi
local -a new_set=()
local urm_set_var_name
mvm_get "${tf_set_mvm_var_name}" "${old_name}" urm_set_var_name
if [[ -n ${urm_set_var_name} ]]; then
local -n old_set_ref=${urm_set_var_name}
new_set+=( "${!old_set_ref[@]}" )
unset -n old_set_ref
fi
new_set+=( "${old_name}" )
mvm_add "${tf_set_mvm_var_name}" "${new_name}" "${new_set[@]}"
local old
for old in "${new_set[@]}"; do
ft_map_ref["${old}"]=${new_name}
done
unset -n ft_map_ref
}
# Sets up a worktree and necessary cleanups.
#
# Params:
#
# 1 - path to the git repo
# 2 - name of a branch to be used as a base of a new worktree branch
# 3 - name of the new worktree branch
# 4 - path where the new worktree will be created
function setup_worktree() {
local repo base branch worktree_dir
repo=${1}; shift
base=${1}; shift
branch=${1}; shift
worktree_dir=${1}; shift
add_cleanup \
"git -C ${worktree_dir@Q} reset --hard HEAD" \
"git -C ${worktree_dir@Q} clean -ffdx" \
"git -C ${repo@Q} worktree remove ${worktree_dir@Q}" \
"git -C ${repo@Q} branch -D ${branch@Q}"
git -C "${repo}" worktree add -b "${branch}" "${worktree_dir}" "${base}"
}
# Creates an initial globals file. It's initial because it contains
# data known up-front, so mostly things that are defined in one place
# to avoid repeating them everywhere.
#
# More stuff will be added later to the globals file based on config
# or else.
function setup_initial_globals_file() {
local sync_script pkg_list_sort_script
sync_script="${PKG_AUTO_IMPL_DIR}/sync_with_gentoo.sh"
pkg_list_sort_script="${PKG_AUTO_IMPL_DIR}/sort_packages_list.py"
local globals_file
globals_file="${WORKDIR}/globals"
local -a sigf_arches
get_valid_arches sigf_arches
add_cleanup "rm -f ${globals_file@Q}"
cat <<EOF >"${globals_file}"
local -a GIT_ENV_VARS ARCHES WHICH REPORTS
local SDK_PKGS BOARD_PKGS
local SYNC_SCRIPT PKG_LIST_SORT_SCRIPT
GIT_ENV_VARS=(
GIT_{AUTHOR,COMMITTER}_{NAME,EMAIL}
)
SYNC_SCRIPT=${sync_script@Q}
PKG_LIST_SORT_SCRIPT=${pkg_list_sort_script@Q}
ARCHES=( ${sigf_arches[*]@Q} )
WHICH=('old' 'new')
SDK_PKGS='sdk-pkgs'
BOARD_PKGS='board-pkgs'
REPORTS=( "\${SDK_PKGS}" "\${BOARD_PKGS}" )
EOF
}
# Extend the globals file with information from config and other
# information derived from it.
#
# Params:
#
# 1 - path to scripts repository
# 2 - path to scripts worktree with old state
# 3 - path to scripts worktree with new state
# 4 - path to reports directory
# 5 - path to aux directory
function extend_globals_file() {
local scripts old_state new_state reports_dir aux_dir
scripts=${1}; shift
old_state=${1}; shift
new_state=${1}; shift
reports_dir=${1}; shift
aux_dir=${1}; shift
local globals_file
globals_file="${WORKDIR}/globals"
if [[ ! -e "${globals_file}" ]]; then
fail 'an initial version of globals file should already exist'
fi
local old_state_branch new_state_branch
old_state_branch=$(git -C "${old_state}" rev-parse --abbrev-ref HEAD)
new_state_branch=$(git -C "${new_state}" rev-parse --abbrev-ref HEAD)
local portage_stable_suffix old_portage_stable new_portage_stable
portage_stable_suffix='sdk_container/src/third_party/portage-stable'
old_portage_stable="${old_state}/${portage_stable_suffix}"
new_portage_stable="${new_state}/${portage_stable_suffix}"
local coreos_overlay_suffix old_coreos_overlay new_coreos_overlay
coreos_overlay_suffix='sdk_container/src/third_party/coreos-overlay'
old_coreos_overlay="${old_state}/${coreos_overlay_suffix}"
new_coreos_overlay="${new_state}/${coreos_overlay_suffix}"
cat <<EOF >>"${globals_file}"
local SCRIPTS OLD_STATE NEW_STATE OLD_STATE_BRANCH NEW_STATE_BRANCH
local PORTAGE_STABLE_SUFFIX OLD_PORTAGE_STABLE NEW_PORTAGE_STABLE REPORTS_DIR
local NEW_STATE_PACKAGES_LIST AUX_DIR
local COREOS_OVERLAY_SUFFIX OLD_COREOS_OVERLAY NEW_COREOS_OVERLAY
SCRIPTS=${scripts@Q}
OLD_STATE=${old_state@Q}
NEW_STATE=${new_state@Q}
OLD_STATE_BRANCH=${old_state_branch@Q}
NEW_STATE_BRANCH=${new_state_branch@Q}
PORTAGE_STABLE_SUFFIX=${portage_stable_suffix@Q}
OLD_PORTAGE_STABLE=${old_portage_stable@Q}
NEW_PORTAGE_STABLE=${new_portage_stable@Q}
REPORTS_DIR=${reports_dir@Q}
COREOS_OVERLAY_SUFFIX=${coreos_overlay_suffix@Q}
OLD_COREOS_OVERLAY=${old_coreos_overlay@Q}
NEW_COREOS_OVERLAY=${new_coreos_overlay@Q}
NEW_STATE_PACKAGES_LIST="\${NEW_STATE}/.github/workflows/portage-stable-packages-list"
AUX_DIR=${aux_dir@Q}
EOF
# shellcheck source=for-shellcheck/globals
source "${globals_file}"
local last_nightly_version_id last_nightly_build_id
# shellcheck source=for-shellcheck/version.txt
last_nightly_version_id=$(source "${NEW_STATE}/sdk_container/.repo/manifests/version.txt"; printf '%s' "${FLATCAR_VERSION_ID}")
# shellcheck source=for-shellcheck/version.txt
last_nightly_build_id=$(source "${NEW_STATE}/sdk_container/.repo/manifests/version.txt"; printf '%s' "${FLATCAR_BUILD_ID}")
local -a locals=() definitions=()
local sdk_image_name sdk_image_var_name=SDK_IMAGE
sdk_image_name="ghcr.io/flatcar/flatcar-sdk-all:${last_nightly_version_id}-${last_nightly_build_id}"
locals+=( "${sdk_image_var_name@Q}" )
definitions+=( "${sdk_image_var_name}=${sdk_image_name@Q}" )
append_to_globals \
'' \
"local ${locals[*]}" \
'' \
"${definitions[@]}"
local -A listing_kinds
local packages_file tag filename stripped old
for arch in "${ARCHES[@]}"; do
for packages_file in "${AUX_DIR}/${arch}/"*_packages.txt; do
filename=${packages_file##*/}
stripped=${filename%_packages.txt}
case ${stripped} in
'flatcar_developer_container')
tag='dev'
;;
'flatcar_production_image')
tag='prod'
;;
'flatcar-'*)
tag="sysext-${stripped#flatcar-}"
;;
'oem-'*)
tag=${stripped#oem-}
;;
*'-flatcar')
tag="sysext-${stripped%-flatcar}"
;;
*)
devel_warn "Unknown listing file ${packages_file@Q}"
continue
;;
esac
old=${listing_kinds["${tag}"]:-}
if [[ -n ${old} ]]; then
if [[ ${old} != "${filename}" ]]; then
devel_warn "Two different packages files (${old@Q} and ${filename@Q} for a single tag ${tag@Q}"
fi
else
listing_kinds["${tag}"]=${filename}
fi
done
done
local -a sorted_tags sorted_lines
mapfile -t sorted_tags < <(printf '%s\n' "${!listing_kinds[@]}" | sort)
for tag in "${sorted_tags[@]}"; do
filename=${listing_kinds["${tag}"]}
sorted_lines+=(" [${tag@Q}]=${filename@Q}")
done
append_to_globals \
'' \
'local -A LISTING_KINDS' \
'' \
'LISTING_KINDS=(' \
"${sorted_lines[@]}" \
')'
}
# Sets up environment variables for some git commands.
#
# Make sure to call the following beforehand:
#
# local -x "${GIT_ENV_VARS[@]}"
#
# The GIT_ENV_VARS array comes from the globals file.
function setup_git_env() {
local bot_name bot_email role what
bot_name='Flatcar Buildbot'
bot_email='buildbot@flatcar-linux.org'
for role in AUTHOR COMMITTER; do
for what in name email; do
local -n var_ref="GIT_${role}_${what^^}"
local -n value_ref="bot_${what}"
var_ref=${value_ref}
unset -n value_ref
unset -n var_ref
done
done
}
# Goes over the packages list and synces them with the passed Gentoo
# repo.
#
# Params:
#
# 1 - path to the Gentoo repo
function run_sync() {
local gentoo
gentoo=${1}; shift
local -a missing_in_scripts missing_in_gentoo
missing_in_scripts=()
missing_in_gentoo=()
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local -x "${GIT_ENV_VARS[@]}"
setup_git_env
local -a packages_to_update
packages_to_update=()
local package
while read -r package; do
if [[ ! -e "${NEW_PORTAGE_STABLE}/${package}" ]]; then
# If this happens, it means that the package was moved to overlay
# or dropped, the list ought to be updated.
missing_in_scripts+=("${package}")
continue
fi
if [[ ! -e "${gentoo}/${package}" ]]; then
# If this happens, it means that the package was obsoleted or moved
# in Gentoo. The obsoletion needs to be handled in the case-by-case
# manner, while move should be handled by doing the same move
# in portage-stable. The build should not break because of the move,
# because most likely it's already reflected in the profiles/updates
# directory.
missing_in_gentoo+=("${package}")
continue
fi
packages_to_update+=( "${package}" )
done < <(cat_meaningful "${NEW_STATE_PACKAGES_LIST}")
env --chdir="${NEW_PORTAGE_STABLE}" "${SYNC_SCRIPT}" -b -- "${gentoo}" "${packages_to_update[@]}"
save_missing_in_scripts "${missing_in_scripts[@]}"
save_missing_in_gentoo "${missing_in_gentoo[@]}"
}
# A helper function that prints the contents of a file skipping empty
# lines and lines starting with a hash.
#
# Params:
#
# 1 - path to a file to print
function cat_meaningful() {
local file
file=${1}; shift
xgrep '^[^#]' "${file}"
}
# Saves a list of package names to a file and adds a cleanup for
# it. The names can be loaded again with load_simple_package_list.
#
# Params:
#
# 1 - path to a file where package names will be stored
# @ - the package names
function save_simple_package_list() {
local file
file=${1}; shift
# rest are packages
add_cleanup "rm -f ${file@Q}"
if [[ ${#} -eq 0 ]]; then
truncate --size=0 "${file}"
else
printf '%s\n' "${@}" >"${file}"
fi
}
# Loads a list of packages saved previously with
# save_simple_package_list.
#
# Params:
#
# 1 - path to a file where packages were stored
# 2 - name of an array variable; will contain package names
function load_simple_package_list() {
local file packages_var_name
file=${1}; shift
packages_var_name=${1}; shift
mapfile -t "${packages_var_name}" <"${file}"
}
# General function for saving missing packages. Takes care of creating
# a directory for the listing.
#
# Params:
#
# 1 - path to a directory which will contain the listing
# 2 - name of the listing file
function save_missing_packages() {
local dir file
dir=${1}; shift
file=${1}; shift
create_cleanup_dir "${dir}"
save_simple_package_list "${dir}/${file}" "${@}"
}
# Creates a directory and adds a cleanup if the directory was missing.
#
# Params:
#
# 1 - path to the directory
function create_cleanup_dir() {
local dir
dir=${1}; shift
if [[ ! -d "${dir}" ]]; then
add_cleanup "rmdir ${dir@Q}"
mkdir "${dir}"
fi
}
# Saves a list of package names that we missing in scripts repo (which
# means that we were asked to sync a package that isn't in scripts to
# begin with).
#
# Params:
#
# @ - package names
function save_missing_in_scripts() {
save_missing_packages "${WORKDIR}/missing_in_scripts" "saved_list" "${@}"
}
# Saves a list of package names that we missing in Gentoo repo (which
# means that we were asked to sync a possibly obsolete or renamed
# package).
#
# Params:
#
# @ - package names
function save_missing_in_gentoo() {
save_missing_packages "${WORKDIR}/missing_in_gentoo" "saved_list" "${@}"
}
# Loads a list of package names that were missing in scripts repo.
#
# Params:
#
# 1 - name of an array variable; will contain package names
function load_missing_in_scripts() {
local packages_var_name
packages_var_name=${1}; shift
load_simple_package_list "${WORKDIR}/missing_in_scripts/saved_list" "${packages_var_name}"
}
# Loads a list of package names that were missing in Gentoo repo.
#
# Params:
#
# 1 - name of an array variable; will contain package names
function load_missing_in_gentoo() {
local packages_var_name
packages_var_name=${1}; shift
load_simple_package_list "${WORKDIR}/missing_in_gentoo/saved_list" "${packages_var_name}"
}
# Handles package names that were missing in scripts by dropping them
# from the listing of packages that should be updated automatically.
function handle_missing_in_scripts() {
local -a hmis_missing_in_scripts
hmis_missing_in_scripts=()
load_missing_in_scripts hmis_missing_in_scripts
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
if [[ ${#hmis_missing_in_scripts[@]} -eq 0 ]]; then
return 0;
fi
# Remove missing in scripts entries from package automation
local dir
dir="${WORKDIR}/missing_in_scripts"
create_cleanup_dir "${dir}"
local missing_re
join_by missing_re '\|' "${missing_in_scripts[@]}"
add_cleanup "rm -f ${dir@Q}/pkg_list"
xgrep --invert-match --line-regexp --fixed-strings --regexp="${missing_re}" "${NEW_STATE_PACKAGES_LIST}" >"${dir}/pkg_list"
"${PKG_LIST_SORT_SCRIPT}" "${dir}/pkg_list" >"${NEW_STATE_PACKAGES_LIST}"
local -x "${GIT_ENV_VARS[@]}"
setup_git_env
git -C "${NEW_STATE}" add "${NEW_STATE_PACKAGES_LIST}"
git -C "${NEW_STATE}" commit --quiet --message '.github: Drop missing packages from automation'
info_lines 'dropped missing packages from automation' "${missing_in_scripts[@]/#/- }"
}
# Helper function to print lines to a file clobbering the old
# contents.
#
# Params:
#
# 1 - path to the file
# @ - lines to print
function lines_to_file_truncate() {
truncate --size=0 "${1}"
lines_to_file "${@}"
}
# Helper function to append lines to a file.
#
# Params:
#
# 1 - path to the file
# @ - lines to print
function lines_to_file() {
printf '%s\n' "${@:2}" >>"${1}"
}
# Adds lines to "manual work needed" file in given directory.
#
# Params:
#
# 1 - directory where the file is
# @ - lines to add
function manual_d() {
local dir=${1}; shift
# rest are lines to print
pkg_debug_lines 'manual work needed:' "${@}"
lines_to_file "${dir}/manual-work-needed" "${@}"
}
# Adds lines to "manual work needed" file in reports.
#
# Params:
#
# @ - lines to add
function manual() {
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
manual_d "${REPORTS_DIR}" "${@}"
}
# Adds lines to "warnings" file in given directory. Should be used to
# report some issues with the processed packages.
#
# Params:
#
# 1 - directory where the file is
# @ - lines to add
function pkg_warn_d() {
local dir=${1}; shift
pkg_debug_lines 'pkg warn:' "${@}"
lines_to_file "${dir}/warnings" "${@}"
}
# Adds lines to "warnings" file in reports. Should be used to report
# some issues with the processed packages.
#
# Params:
#
# @ - lines to add
function pkg_warn() {
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
pkg_warn_d "${REPORTS_DIR}" "${@}"
}
# Adds lines to "developer warnings" file in given directory. Should
# be used to report some failed assumption in the automation, or bugs.
#
# Params:
#
# 1 - directory where the file is
# @ - lines to add
function devel_warn_d() {
local dir=${1}; shift
pkg_debug_lines 'developer warn:' "${@}"
lines_to_file "${dir}/developer-warnings" "${@}"
}
# Adds lines to "developer warnings" file in reports. Should be used
# to report some failed assumption in the automation, or bugs.
#
# Params:
#
# @ - lines to add
function devel_warn() {
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
devel_warn_d "${REPORTS_DIR}" "${@}"
}
# Handles package names that were missing from Gentoo by either
# renaming and syncing them if a rename exists or by adding the
# package to the "manual work needed" file.
function handle_missing_in_gentoo() {
local gentoo
gentoo=${1}; shift
local -a hmig_missing_in_gentoo
hmig_missing_in_gentoo=()
load_missing_in_gentoo hmig_missing_in_gentoo
if [[ ${#hmig_missing_in_gentoo[@]} -eq 0 ]]; then
return 0;
fi
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local -A hmig_rename_map=()
process_profile_updates_directory hmig_rename_map
local -a renamed_from renamed_to
renamed_from=()
renamed_to=()
local -x "${GIT_ENV_VARS[@]}"
setup_git_env
local missing new_name hmig_old_basename hmig_new_basename ebuild ebuild_version_ext new_ebuild_filename
for missing in "${hmig_missing_in_gentoo[@]}"; do
new_name=${hmig_rename_map["${missing}"]:-}
if [[ -z "${new_name}" ]]; then
manual "- package ${missing} is gone from Gentoo and no rename found"
continue
fi
mkdir -p "${NEW_PORTAGE_STABLE}/${new_name%/*}"
git -C "${NEW_STATE}" mv "${NEW_PORTAGE_STABLE}/${missing}" "${NEW_PORTAGE_STABLE}/${new_name}"
basename_out "${missing}" hmig_old_basename
basename_out "${new_name}" hmig_new_basename
if [[ "${hmig_old_basename}" != "${hmig_new_basename}" ]]; then
for ebuild in "${NEW_PORTAGE_STABLE}/${new_name}/${hmig_old_basename}-"*'.ebuild'; do
# 1.2.3-r4.ebuild
ebuild_version_ext=${ebuild##*/"${hmig_old_basename}-"}
new_ebuild_filename="${hmig_new_basename}-${ebuild_version_ext}"
git -C "${NEW_STATE}" mv "${ebuild}" "${NEW_PORTAGE_STABLE}/${new_name}/${new_ebuild_filename}"
done
fi
git -C "${NEW_STATE}" commit --quiet --message "${new_name}: Renamed from ${missing}"
info "renamed ${missing} to ${new_name}"
renamed_from+=("${missing}")
renamed_to+=("${new_name}")
done
if [[ ${#renamed_from[@]} -eq 0 ]]; then
return 0
fi
env --chdir="${NEW_PORTAGE_STABLE}" "${SYNC_SCRIPT}" -b -- "${gentoo}" "${renamed_to[@]}"
local dir renamed_re
dir="${WORKDIR}/missing_in_gentoo"
create_cleanup_dir "${dir}"
join_by renamed_re '\|' "${renamed_from[@]}"
add_cleanup "rm -f ${dir@Q}/pkg_list"
{
xgrep --invert-match --line-regexp --regexp="${renamed_re}" "${NEW_STATE_PACKAGES_LIST}"
printf '%s\n' "${renamed_to[@]}"
} >"${dir}/pkg_list"
"${PKG_LIST_SORT_SCRIPT}" "${dir}/pkg_list" >"${NEW_STATE_PACKAGES_LIST}"
git -C "${NEW_STATE}" add "${NEW_STATE_PACKAGES_LIST}"
git -C "${NEW_STATE}" commit --quiet --message '.github: Update package names in automation'
info 'updated packages names in automation'
}
# Process the package listings stored in the aux directory to find out
# the package tags that describe the kind of image the package is used
# in (base image, developer container, sysext image, etc.)
#
# Params:
#
# 1 - a name to an array mvm variable; will be a mapping of a package
# name to an array of tags
function process_listings() {
local pkg_to_tags_mvm_var_name
pkg_to_tags_mvm_var_name=${1}
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
#mvm_debug_enable pl_pkg_to_tags_set_mvm
mvm_declare pl_pkg_to_tags_set_mvm mvm_mvc_set
local arch kind file listing pkg
for arch in "${ARCHES[@]}"; do
for kind in "${!LISTING_KINDS[@]}"; do
file=${LISTING_KINDS["${kind}"]}
listing="${AUX_DIR}/${arch}/${file}"
if [[ ! -e "${listing}" ]]; then
# some listings are arch-specific, so they will be
# missing for other arches
continue
fi
# lines are like as follows:
#
# acct-group/adm-0-r2::portage-stable
while read -r pkg; do
pkg_debug_enable "${pkg}"
pkg_debug "processing listing ${arch}/${file}: adding tag ${kind^^}"
pkg_debug_disable
mvm_add pl_pkg_to_tags_set_mvm "${pkg}" "${kind^^}"
# VER_ERE_UNBOUNDED and PKG_ERE_UNBOUNDED come from gentoo_ver.sh
done < <(sed -E -e 's#^('"${PKG_ERE_UNBOUNDED}"')-'"${VER_ERE_UNBOUNDED}"'::.*#\1#' "${listing}")
done
done
mvm_iterate pl_pkg_to_tags_set_mvm set_mvm_to_array_mvm_cb "${pkg_to_tags_mvm_var_name}"
mvm_unset pl_pkg_to_tags_set_mvm
#mvm_debug_disable pl_pkg_to_tags_set_mvm
local -a pl_debug_pkgs pl_tags_array_name
pkg_debug_packages pl_debug_pkgs
for pkg in "${pl_debug_pkgs[@]}"; do
mvm_get "${pkg_to_tags_mvm_var_name}" "${pkg}" pl_tags_array_name
local -n tags_ref=${pl_tags_array_name:-EMPTY_ARRAY}
pkg_debug_print_c "${pkg}" "tags stored in ${pkg_to_tags_mvm_var_name}: ${tags_ref[*]}"
unset -n tags_ref
done
}
# A callback to mvm_iterate that turns a set mvm to an array mvm. It
# makes sure that the tag for the production image (or base image) is
# always first in the array.
#
# Params:
#
# 1 - name of the array mvm variable that will be filled (extra arg of
# the callback)
# 2 - name of the package
# 3 - name of the set variable holding tags
# @ - tags
function set_mvm_to_array_mvm_cb() {
local pkg_to_tags_mvm_var_name pkg
pkg_to_tags_mvm_var_name=${1}; shift
pkg=${1}; shift
local -n set_ref=${1}; shift
# rest are set items
local removed
removed=''
local -a prod_item
prod_item=()
if [[ -n ${set_ref['PROD']:-} ]]; then
prod_item+=('PROD')
unset "set_ref['PROD']"
removed=x
fi
local -a sorted_items
mapfile -t sorted_items < <(printf '%s\n' "${!set_ref[@]}" | sort)
if [[ -n ${removed} ]]; then
set_ref['PROD']=x
fi
mvm_add "${pkg_to_tags_mvm_var_name}" "${pkg}" "${prod_item[@]}" "${sorted_items[@]}"
}
# Fields of the sdk job state struct.
#
# SJS_COMMAND_IDX - an array containing a command to run
#
# SJS_STATE_DIR_IDX - run's state directory
#
# SJS_KIND_IDX - run kind (either old or new, meaning reports for
# packages before updates or after updates)
#
# SJS_JOB_NAME_IDX - job variable name
declare -gri SJS_COMMAND_IDX=0 SJS_STATE_DIR_IDX=1 SJS_KIND_IDX=2 SJS_JOB_NAME_IDX=3
# Declare SDK job state variables.
#
# Parameters:
#
# @ - names of variables to be used for states.
function sdk_job_state_declare() {
struct_declare -ga "${@}" "( 'EMPTY_ARRAY' '' '' '' )"
}
# Unset SDK job state variables.
#
# Parameters:
#
# @ - names of state variables
function sdk_job_state_unset() {
local name
for name; do
local -n sdk_job_state_ref=${name}
local array_name=${sdk_job_state_ref[SJS_COMMAND_IDX]}
if [[ ${array_name} != 'EMPTY_ARRAY' ]]; then
unset "${array_name}"
fi
unset array_name
local job_name=${sdk_job_state_ref[SJS_JOB_NAME_IDX]}
if [[ -n ${job_name} ]]; then
job_unset "${job_name}"
fi
unset job_name
unset -n sdk_run_state_ref
done
unset "${@}"
}
# Generate package reports inside SDKs for all arches and states. In
# case of failure, whatever reports where generated so far will be
# stored in salvaged-reports subdirectory of the reports directory.
# Otherwise they will end up in reports-from-sdk subdirectory.
function generate_sdk_reports() {
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
add_cleanup "rmdir ${WORKDIR@Q}/pkg-reports"
mkdir "${WORKDIR}/pkg-reports"
if ! docker images --format '{{.Repository}}:{{.Tag}}' | grep --quiet --line-regexp --fixed-strings "${SDK_IMAGE}"; then
fail "No SDK image named ${SDK_IMAGE@Q} available locally, pull it before running this script"
fi
local sdk_run_kind state_var_name sdk_run_state state_branch_var_name sdk_run_state_branch
local pkg_auto_copy rv
local sdk_reports_dir top_dir dir entry full_path
local -a dir_queue all_dirs all_files
local job_args_var_name sdk_job_state_name
local -a sdk_job_state_names=()
# First create and set up SDK job states for the "before updates"
# (referred as old) and "after updates" (referred as new)
# jobs. This means creating a separate worktrees, state
# directories, and preparing commands to be run as a job.
for sdk_run_kind in "${WHICH[@]}"; do
state_var_name="${sdk_run_kind^^}_STATE"
sdk_run_state="${!state_var_name}_sdk_run"
state_branch_var_name="${sdk_run_kind^^}_STATE_BRANCH"
sdk_run_state_branch="${!state_branch_var_name}-sdk-run"
add_cleanup \
"git -C ${sdk_run_state@Q} reset --hard HEAD" \
"git -C ${sdk_run_state@Q} clean -ffdx" \
"git -C ${SCRIPTS@Q} worktree remove ${sdk_run_state@Q}" \
"git -C ${SCRIPTS@Q} branch -D ${sdk_run_state_branch@Q}"
git -C "${SCRIPTS}" \
worktree add -b "${sdk_run_state_branch}" "${sdk_run_state}" "${!state_branch_var_name}"
pkg_auto_copy=$(mktemp --tmpdir="${WORKDIR}" --directory "pkg-auto-copy.XXXXXXXX")
add_cleanup "rm -rf ${pkg_auto_copy@Q}"
cp -a "${PKG_AUTO_DIR}"/* "${pkg_auto_copy}"
gen_varname job_args_var_name
declare -ga "${job_args_var_name}=()"
local -n job_args_ref=${job_args_var_name}
job_args_ref=(
env
--chdir "${sdk_run_state}"
./run_sdk_container
-C "${SDK_IMAGE}"
-n "pkg-${sdk_run_kind}"
-U
-m "${pkg_auto_copy}:/mnt/host/source/src/scripts/pkg_auto"
--rm
./pkg_auto/inside_sdk_container.sh
pkg-reports
"${ARCHES[@]}"
)
unset -n job_args_ref
gen_varname sdk_job_state_name
sdk_job_state_declare "${sdk_job_state_name}"
local -n sdk_job_state_ref="${sdk_job_state_name}"
sdk_job_state_ref[SJS_COMMAND_IDX]=${job_args_var_name}
sdk_job_state_ref[SJS_STATE_DIR_IDX]=${sdk_run_state}
sdk_job_state_ref[SJS_KIND_IDX]=${sdk_run_kind}
unset -n sdk_job_state_ref
sdk_job_state_names+=( "${sdk_job_state_name}" )
done
# Create the jobs and kick them off.
local sdk_job_name
for sdk_job_state_name in "${sdk_job_state_names[@]}"; do
local -n sdk_job_state_ref=${sdk_job_state_name}
job_args_var_name=${sdk_job_state_ref[SJS_COMMAND_IDX]}
gen_varname sdk_job_name
job_declare "${sdk_job_name}"
all_sdk_jobs+=( "${sdk_job_name}" )
local -n job_args_ref=${job_args_var_name}
job_run -m "${sdk_job_name}" "${job_args_ref[@]}"
unset -n job_args_ref
sdk_job_state_ref[SJS_JOB_NAME_IDX]=${sdk_job_name}
unset -n sdk_job_state_ref
done
# Loop over the current running job states array to check if jobs
# in the array are still alive. The alive jobs will be added to
# the next running job states array that will be looped over (thus
# becoming the "current" array, whereas old "current" array
# becomes "next"). In the meantime gather the output from the jobs
# and print it to the terminal.
local -i current_idx=0 next_idx=1 idx state_count=${#sdk_job_state_names[@]}
local -a sdk_job_state_names_0=( "${sdk_job_state_names[@]}" ) sdk_job_state_names_1=() sdk_job_output_lines=()
local run_loop=x
while [[ state_count -gt 0 ]]; do
local -n sdk_jobs_state_names_ref=sdk_job_state_names_${current_idx}
local -n next_sdk_jobs_state_names_ref=sdk_job_state_names_${next_idx}
next_sdk_jobs_state_names_ref=()
for sdk_job_state_name in "${sdk_jobs_state_names_ref[@]}"; do
local -n sdk_job_state_ref=${sdk_job_state_name}
sdk_job_name=${sdk_job_state_ref[SJS_JOB_NAME_IDX]}
sdk_run_kind=${sdk_job_state_ref[SJS_KIND_IDX]}
unset -n sdk_job_state_ref
if job_is_alive "${sdk_job_name}"; then
next_sdk_jobs_state_names_ref+=( "${sdk_job_state_name}" )
fi
job_get_output "${sdk_job_name}" sdk_job_output_lines
if [[ ${#sdk_job_output_lines[@]} -gt 0 ]]; then
info_lines "${sdk_job_output_lines[@]/#/${sdk_run_kind}: }"
fi
done
state_count=${#next_sdk_jobs_state_names_ref[@]}
if [[ state_count -gt 0 ]]; then
sleep 0.2
fi
idx=${current_idx}
current_idx=${next_idx}
next_idx=${idx}
unset -n sdk_jobs_state_names_ref next_sdk_jobs_state_names_ref
done
# All jobs are done now, so reap them and check if the
# succeeded. If they failed, print the contents of the warning
# files and save the reports they have generated so far in a
# separate place.
local sr_dir_created='' gsr_sdk_run_state_basename
for sdk_job_state_name in "${sdk_job_state_names[@]}"; do
local -n sdk_job_state_ref=${sdk_job_state_name}
sdk_job_name=${sdk_job_state_ref[SJS_JOB_NAME_IDX]}
sdk_run_state=${sdk_job_state_ref[SJS_STATE_DIR_IDX]}
sdk_run_kind=${sdk_job_state_ref[SJS_KIND_IDX]}
unset -n sdk_job_state_ref
job_reap "${sdk_job_name}" rv
if [[ ${rv} -ne 0 ]]; then
# Report generation failed, save the generated reports in
# REPORTS_DIR for further examination by the developer.
local salvaged_dir salvaged_dir_sdk
salvaged_dir="${REPORTS_DIR}/salvaged-reports"
basename_out "${sdk_run_state}" gsr_sdk_run_state_basename
salvaged_dir_sdk="${salvaged_dir}/${gsr_sdk_run_state_basename}"
if [[ -z ${sr_dir_created} ]]; then
rm -rf "${salvaged_dir}"
mkdir -p "${salvaged_dir}"
sr_dir_created=x
fi
{
info "run_sdk_container for ${sdk_run_kind@Q} finished with exit status ${rv}, printing the warnings below for a clue"
info
for file in "${sdk_run_state}/pkg-reports/"*'-warnings'; do
info "from ${file}:"
echo
cat "${file}"
echo
done
info
info 'whatever reports generated by the failed run are saved in'
info "${salvaged_dir_sdk@Q} directory"
info
} >&2
cp -a "${sdk_run_state}/pkg-reports" "${salvaged_dir_sdk}"
unset salvaged_dir salvaged_dir_sdk
else
# We succeeded, so move the reports from the state dir to
# workdir and generate cleanups for the moved reports.
sdk_reports_dir="${WORKDIR}/pkg-reports/${sdk_run_kind}"
top_dir="${sdk_run_state}/pkg-reports"
dir_queue=( "${top_dir}" )
all_dirs=()
all_files=()
while [[ ${#dir_queue[@]} -gt 0 ]]; do
dir=${dir_queue[0]}
dir_queue=( "${dir_queue[@]:1}" )
entry=${dir#"${top_dir}"}
if [[ -z ${entry} ]]; then
all_dirs=( "${sdk_reports_dir}" "${all_dirs[@]}" )
else
entry=${entry#/}
all_dirs=( "${sdk_reports_dir}/${entry}" "${all_dirs[@]}" )
fi
for full_path in "${dir}/"*; do
if [[ -d ${full_path} ]]; then
dir_queue+=( "${full_path}" )
else
entry=${full_path##"${top_dir}/"}
all_files+=( "${sdk_reports_dir}/${entry}" )
fi
done
done
add_cleanup \
"rm -f ${all_files[*]@Q}" \
"rmdir ${all_dirs[*]@Q}"
mv "${sdk_run_state}/pkg-reports" "${sdk_reports_dir}"
fi
done
sdk_job_state_unset "${sdk_job_state_names[@]}"
# salvaged reports directory was created, means that report
# generation failed
if [[ -n ${sr_dir_created} ]]; then
fail "copying done, stopping now"
fi
cp -a "${WORKDIR}/pkg-reports" "${REPORTS_DIR}/reports-from-sdk"
}
source "${PKG_AUTO_IMPL_DIR}/mvm.sh"
# pkginfo mvm is a map mvm that has the following Go-like type:
#
# map[pkg]map[slot]version
#
# pkg, slot and version are strings
# Generate a name for pkginfo mvm based on passed information.
#
# Params:
#
# 1 - which state it refers to (old or new)
# 2 - architecture
# 3 - which report (board packages or SDK packages)
# 4 - name of a variable that will contain the name
function pkginfo_name() {
local which arch report
which=${1}; shift
arch=${1}; shift
report=${1}; shift
local -n pi_name_ref=${1}; shift
pi_name_ref="pkginfo_${which}_${arch}_${report//-/_}_pimap_mvm"
}
# Constructor callback used by mvm_declare for pkginfo mvms.
function pkginfo_constructor() {
mvm_mvc_map_constructor "${@}"
}
# Destructor callback used by mvm_declare for pkginfo mvms.
function pkginfo_destructor() {
mvm_mvc_map_destructor "${@}"
}
# Adder callback used by mvm_declare for pkginfo mvms.
function pkginfo_adder() {
local -n map_ref=${1}; shift
local mark
while [[ ${#} -gt 1 ]]; do
mark=${map_ref["${1}"]:-}
if [[ -n "${mark}" ]]; then
fail "multiple versions for a single slot for a package in a single report"
fi
map_ref["${1}"]=${2}
shift 2
done
}
# Creates a pkginfo mvm.
#
# Params:
#
# 1 - which state it refers to (old or new)
# 2 - architecture
# 3 - which report (board packages or SDK packages)
# 4 - name of a variable that will contain the name of the created
# pkginfo mvm
function pkginfo_declare() {
local which arch report pi_name_var_name
which=${1}; shift
arch=${1}; shift
report=${1}; shift
pi_name_var_name=${1}; shift
pkginfo_name "${which}" "${arch}" "${report}" "${pi_name_var_name}"
local -a extras
extras=(
'which' "${which}"
'arch' "${arch}"
'report' "${report}"
)
mvm_declare "${!pi_name_var_name}" pkginfo -- "${extras[@]}"
}
# Destroys a pkginfo mvm.
#
# Params:
#
# 1 - which state it refers to (old or new)
# 2 - architecture
# 3 - which report (board packages or SDK packages)
function pkginfo_unset() {
local which arch report
which=${1}; shift
arch=${1}; shift
report=${1}; shift
local piu_pi_name
pkginfo_name "${which}" "${arch}" "${report}" piu_pi_name
mvm_unset "${piu_pi_name}"
}
# Processes the report file associated to the passed pkginfo mvm. The
# pkginfo mvm is filled with info about packages, slots and
# versions. Additional information is put into passed package set and
# package to slots set mvm.
#
# Params:
#
# 1 - name of the pkginfo mvm variable
# 2 - name of the set variable, will contain packages in the report
# 3 - name of the set mvm variable, will contain a map of package to
# slots
function pkginfo_process_file() {
mvm_call "${1}" pkginfo_c_process_file "${@:2}"
}
# Helper function for pkginfo_process_file, used by mvm_call.
function pkginfo_c_process_file() {
local pkg_slots_set_mvm_var_name
local -n pkg_set_ref=${1}; shift
pkg_slots_set_mvm_var_name=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local which arch report
mvm_c_get_extra 'which' which
mvm_c_get_extra 'arch' arch
mvm_c_get_extra 'report' report
local report_file
case ${report}:${arch} in
"${SDK_PKGS}:arm64")
# short-circuit it, there's no arm64 sdk
return 0
;;
"${SDK_PKGS}:amd64")
report_file="${WORKDIR}/pkg-reports/${which}/${report}"
;;
"${BOARD_PKGS}:"*)
report_file="${WORKDIR}/pkg-reports/${which}/${arch}-${report}"
;;
*)
local c=${report}:${arch}
devel_warn "unknown report-architecture combination (${c@Q})"
return 0
esac
local pkg version_slot throw_away v s
while read -r pkg version_slot throw_away; do
pkg_debug_enable "${pkg}"
pkg_debug "${which} ${arch} ${report}: ${version_slot}"
v=${version_slot%%:*}
s=${version_slot##*:}
mvm_c_add "${pkg}" "${s}" "${v}"
pkg_set_ref["${pkg}"]='x'
mvm_add "${pkg_slots_set_mvm_var_name}" "${pkg}" "${s}"
pkg_debug_disable
done <"${report_file}"
}
# Gets a profile of the pkginfo mvm. The "profile" is a confusing
# misnomer as it has nothing to do with Gentoo profiles, but rather a
# description of the pkginfo (which is a which-arch-report triplet)
# that is used for reporting.
function pkginfo_profile() {
mvm_call "${1}" pkginfo_c_profile "${@:2}"
}
# Helper function for pkginfo_profile, used by mvm_call.
function pkginfo_c_profile() {
local profile_var_name
profile_var_name=${1}; shift
local which arch report
mvm_c_get_extra 'which' which
mvm_c_get_extra 'arch' arch
mvm_c_get_extra 'report' report
printf -v "${profile_var_name}" '%s-%s-%s' "${which}" "${arch}" "${report}"
}
# Creates pkginfo maps for all the reports and processes the
# reports. Additional information is stored in passed packages array
# and packages to slots set mvm variables.
#
# Params:
#
# 1 - name of the array variable, will contain sorted packages from
# all the reports
# 2 - name of the set mvm variable, will contain a map of package to
# slots
function read_reports() {
local all_pkgs_var_name pkg_slots_set_mvm_var_name
all_pkgs_var_name=${1}; shift
pkg_slots_set_mvm_var_name=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local -A rr_all_packages_set
rr_all_packages_set=()
local arch which report rr_pimap_mvm_var_name
for arch in "${ARCHES[@]}"; do
for which in "${WHICH[@]}"; do
for report in "${REPORTS[@]}"; do
pkginfo_declare "${which}" "${arch}" "${report}" rr_pimap_mvm_var_name
pkginfo_process_file "${rr_pimap_mvm_var_name}" rr_all_packages_set "${pkg_slots_set_mvm_var_name}"
done
done
done
mapfile -t "${all_pkgs_var_name}" < <(printf '%s\n' "${!rr_all_packages_set[@]}" | sort)
}
# Destroys the pkginfo maps for all the reports.
function unset_report_mvms() {
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local arch which report
for arch in "${ARCHES[@]}"; do
for which in "${WHICH[@]}"; do
for report in "${REPORTS[@]}"; do
pkginfo_unset "${which}" "${arch}" "${report}"
done
done
done
}
# Finds out the highest and the lowest version from the passed versions.
#
# Params:
#
# 1 - name of a variable where the min version will be stored
# 2 - name of a variable where the min version will be stored
# @ - the versions
function ver_min_max() {
local -n min_ref=${1}; shift
local -n max_ref=${1}; shift
local min max v
min=''
max=''
for v; do
if [[ -z ${min} ]] || ver_test "${v}" -lt "${min}"; then
min=${v}
fi
if [[ -z ${max} ]] || ver_test "${v}" -gt "${max}"; then
max=${v}
fi
done
min_ref=${min}
max_ref=${max}
}
# Does consistency checks on two profiles for a package using an
# additional map for slots information for a package. Checks if common
# slots for the package in both profiles are using the same
# versions. This is to catch version discrepancies that sometimes
# happen when e.g. a package gets stabilized for one arch, but not for
# the other.
#
# While at it, store package, slot and version range information into
# the passed map.
#
# 1 - package
# 2 - name of the pkginfo mvm for profile 1
# 3 - name of the pkginfo mvm for profile 2
# 4 - name of the pkg to slots to version range map mvm # TODO: This should be the last parameter
# 5 - name of the pkg to all slots set mvm
function consistency_check_for_package() {
local pkg pi1_pimap_mvm_var_name pi2_pimap_mvm_var_name pkg_slot_verminmax_map_mvm_var_name pkg_slots_set_mvm_var_name
pkg=${1}; shift
pi1_pimap_mvm_var_name=${1}; shift
pi2_pimap_mvm_var_name=${1}; shift
pkg_slot_verminmax_map_mvm_var_name=${1}; shift
pkg_slots_set_mvm_var_name=${1}; shift
local ccfp_slot_version1_map_var_name ccfp_slot_version2_map_var_name
mvm_get "${pi1_pimap_mvm_var_name}" "${pkg}" ccfp_slot_version1_map_var_name
mvm_get "${pi2_pimap_mvm_var_name}" "${pkg}" ccfp_slot_version2_map_var_name
local -A empty_map
empty_map=()
local -n slot_version1_map_ref=${ccfp_slot_version1_map_var_name:-empty_map}
local -n slot_version2_map_ref=${ccfp_slot_version2_map_var_name:-empty_map}
local ccfp_slots_set_var_name
mvm_get "${pkg_slots_set_mvm_var_name}" "${pkg}" ccfp_slots_set_var_name
local -n slots_set_ref=${ccfp_slots_set_var_name}
local -a profile_1_slots profile_2_slots common_slots
profile_1_slots=()
profile_2_slots=()
common_slots=()
local ccfp_profile_1 ccfp_profile_2
pkginfo_profile "${pi1_pimap_mvm_var_name}" ccfp_profile_1
pkginfo_profile "${pi2_pimap_mvm_var_name}" ccfp_profile_2
local s v1 v2 ccfp_min ccfp_max mm
pkg_debug "all slots iterated over: ${!slots_set_ref[*]}"
for s in "${!slots_set_ref[@]}"; do
v1=${slot_version1_map_ref["${s}"]:-}
v2=${slot_version2_map_ref["${s}"]:-}
pkg_debug "v1: ${v1}, v2: ${v2}"
if [[ -n ${v1} ]] && [[ -n ${v2} ]]; then
pkg_debug "${s} is a common slot for ${ccfp_profile_1} and ${ccfp_profile_2}"
common_slots+=( "${s}" )
if [[ ${v1} != "${v2}" ]]; then
pkg_warn \
"- version mismatch:" \
" - package: ${pkg}" \
" - slot: ${s}" \
" - profile 1: ${ccfp_profile_1}" \
" - version: ${v1}" \
" - profile 1: ${ccfp_profile_2}" \
" - version: ${v2}"
fi
ver_min_max ccfp_min ccfp_max "${v1}" "${v2}"
mm="${ccfp_min}:${ccfp_max}"
elif [[ -n ${v1} ]]; then
# only side1 has the slot
pkg_debug "${s} is a slot only in ${ccfp_profile_1}"
profile_1_slots+=( "${s}" )
mm="${v1}:${v1}"
elif [[ -n ${v2} ]]; then
# only side 2 has the slot
pkg_debug "${s} is a slot only in ${ccfp_profile_2}"
profile_2_slots+=( "${s}" )
mm="${v2}:${v2}"
else
pkg_debug "${s} is a slot absent from both ${ccfp_profile_1} and ${ccfp_profile_2}"
continue
fi
mvm_add "${pkg_slot_verminmax_map_mvm_var_name}" "${pkg}" "${s}" "${mm}"
done
pkg_debug "common slots: ${common_slots[*]}"
pkg_debug "profile 1 slots: ${profile_1_slots[*]}"
pkg_debug "profile 2 slots: ${profile_2_slots[*]}"
local s1 s2
if [[ ${#common_slots[@]} -gt 0 ]]; then
if [[ ${#profile_1_slots[@]} -gt 0 ]] || [[ ${#profile_2_slots[@]} -gt 0 ]]; then
pkg_warn \
"- suspicious:" \
" - package: ${pkg}" \
" - profile 1: ${ccfp_profile_1}" \
" - profile 2: ${ccfp_profile_2}" \
" - common slots: ${common_slots[*]}" \
" - slots only in profile 1: ${profile_1_slots[*]}" \
" - slots only in profile 2: ${profile_2_slots[*]}" \
" - what: there are slots that exist only on one profile while both profiles also have some common slots"
fi
elif [[ ${#profile_1_slots[@]} -eq 1 ]] && [[ ${#profile_2_slots[@]} -eq 1 ]]; then
s1=${profile_1_slots[0]}
s2=${profile_2_slots[0]}
v1=${slot_version1_map_ref["${s1}"]:-}
v2=${slot_version2_map_ref["${s2}"]:-}
if [[ ${v1} != "${v2}" ]]; then
pkg_warn \
"- version mismatch:" \
" - package ${pkg}" \
" - profile 1: ${ccfp_profile_1}" \
" - slot: ${profile_1_slots[0]}" \
" - version: ${v1}" \
" - profile 2: ${ccfp_profile_2}" \
" - slot: ${profile_2_slots[0]}" \
" - version: ${v2}"
fi
fi
}
# Do consistency checks for the following pairs of profiles for a passed state:
#
# ${arch} sdk <-> ${arch} board
# ${arch1} board <-> ${arch2} board
# ${arch1} sdk <-> ${arch2} sdk
#
# While at it, store package, slot and version range information into
# the passed map.
#
# Params:
#
# 1 - which state should be checked (old or new)
# 2 - name of an array variable that contains all the package names
# 3 - name of the pkg to all slots set mvm variable
# 4 - name of the pkg to slot to version range map mvm variable
function consistency_checks() {
local which pkg_slots_set_mvm_var_name pkg_slot_verminmax_mvm_var_name
which=${1}; shift
local -n all_pkgs_ref=${1}; shift
pkg_slots_set_mvm_var_name=${1}; shift
pkg_slot_verminmax_mvm_var_name=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local cc_pimap_mvm_1_var_name cc_pimap_mvm_2_var_name pkg
local -a all_mvm_names=()
local arch name
# ${arch} SDK <-> ${arch} board
for arch in "${ARCHES[@]}"; do
# currently we only have amd64 SDK, so skip others
if [[ ${arch} != 'amd64' ]]; then
continue
fi
pkginfo_name "${which}" "${arch}" "${SDK_PKGS}" cc_pimap_mvm_1_var_name
pkginfo_name "${which}" "${arch}" "${BOARD_PKGS}" cc_pimap_mvm_2_var_name
name="cc_${arch}_sdk_board_pkg_slot_verminmax_map_mvm"
all_mvm_names+=( "${name}" )
mvm_declare "${name}" mvm_mvc_map
for pkg in "${all_pkgs_ref[@]}"; do
pkg_debug_enable "${pkg}"
pkg_debug "${which} ${arch} sdk <-> ${arch} board"
consistency_check_for_package "${pkg}" "${cc_pimap_mvm_1_var_name}" "${cc_pimap_mvm_2_var_name}" "${name}" "${pkg_slots_set_mvm_var_name}"
pkg_debug_disable
done
done
# We want to check consistency between each pair of arches.
local -a cc_all_arch_pairs
all_pairs cc_all_arch_pairs ':' "${ARCHES[@]}"
local pair arch1 arch2
# ${arch1} board <-> ${arch2} board
for pair in "${cc_all_arch_pairs[@]}"; do
arch1=${pair%:*}
arch2=${pair#*:}
pkginfo_name "${which}" "${arch1}" "${BOARD_PKGS}" cc_pimap_mvm_1_var_name
pkginfo_name "${which}" "${arch2}" "${BOARD_PKGS}" cc_pimap_mvm_2_var_name
name="cc_${arch1}_${arch2}_board_pkg_slot_verminmax_map_mvm"
all_mvm_names+=( "${name}" )
mvm_declare "${name}" mvm_mvc_map
for pkg in "${all_pkgs_ref[@]}"; do
pkg_debug_enable "${pkg}"
pkg_debug "${which} ${arch1} board <-> ${arch2} board"
consistency_check_for_package "${pkg}" "${cc_pimap_mvm_1_var_name}" "${cc_pimap_mvm_2_var_name}" "${name}" "${pkg_slots_set_mvm_var_name}"
pkg_debug_disable
done
done
# ${arch1} sdk <-> ${arch2} sdk
for pair in "${cc_all_arch_pairs[@]}"; do
arch1=${pair%:*}
arch2=${pair#*:}
# We currently only have amd64 SDK, so this loop will
# effectively iterate zero times. When we get the arm64 SDK
# too, this if could be dropped. Getting the listing of arm64
# packages inside amd64 SDK is going to be problem to solve,
# though.
if [[ ${arch1} != 'amd64' ]] || [[ ${arch2} != 'amd64' ]]; then
continue
fi
pkginfo_name "${which}" "${arch1}" "${SDK_PKGS}" cc_pimap_mvm_1_var_name
pkginfo_name "${which}" "${arch2}" "${SDK_PKGS}" cc_pimap_mvm_2_var_name
name="cc_${arch1}_${arch2}_sdk_pkg_slot_verminmax_map_mvm"
all_mvm_names+=( "${name}" )
mvm_declare "${name}" mvm_mvc_map
for pkg in "${all_pkgs_ref[@]}"; do
pkg_debug_enable "${pkg}"
pkg_debug "${which} ${arch1} sdk <-> ${arch2} sdk"
consistency_check_for_package "${pkg}" "${cc_pimap_mvm_1_var_name}" "${cc_pimap_mvm_2_var_name}" "${name}" "${pkg_slots_set_mvm_var_name}"
pkg_debug_disable
done
done
local cc_slots_set_var_name s cc_min cc_max verminmax
local -A empty_map=()
local -a verminmax_map_var_names verminmaxes
local cc_slot_verminmax_map_var_name
for pkg in "${all_pkgs_ref[@]}"; do
pkg_debug_enable "${pkg}"
pkg_debug "${which} verminmax stuff"
verminmax_map_var_names=()
for name in "${all_mvm_names[@]}"; do
mvm_get "${name}" "${pkg}" cc_slot_verminmax_map_var_name
verminmax_map_var_names+=("${cc_slot_verminmax_map_var_name}")
done
if pkg_debug_enabled; then
for name in "${verminmax_map_var_names[@]}"; do
local -n slot_verminmax_map_ref=${name:-empty_map}
pkg_debug_print "all slots in ${name}: ${!slot_verminmax_map_ref[*]}"
pkg_debug_print "all vmms in ${name}: ${slot_verminmax_map_ref[*]}"
unset -n slot_verminmax_map_ref
done
fi
mvm_get "${pkg_slots_set_mvm_var_name}" "${pkg}" cc_slots_set_var_name
local -n slots_set_ref=${cc_slots_set_var_name}
pkg_debug "all slots iterated over: ${!slots_set_ref[*]}"
for s in "${!slots_set_ref[@]}"; do
verminmaxes=()
for name in "${verminmax_map_var_names[@]}"; do
local -n slot_verminmax_map_ref=${name:-empty_map}
verminmax=${slot_verminmax_map_ref["${s}"]:-}
if [[ -n ${verminmax} ]]; then
verminmaxes+=( "${verminmax}" )
fi
unset -n slot_verminmax_map_ref
done
if [[ ${#verminmaxes[@]} -gt 1 ]]; then
ver_min_max cc_min cc_max "${verminmaxes[@]%%:*}" "${verminmaxes[@]##*:}"
verminmax="${cc_min}:${cc_max}"
elif [[ ${#verminmaxes[@]} -eq 1 ]]; then
verminmax=${verminmaxes[0]}
else
continue
fi
pkg_debug "adding vmm ${verminmax} for slot ${s}"
mvm_add "${pkg_slot_verminmax_mvm_var_name}" "${pkg}" "${s}" "${verminmax}"
done
unset -n slots_set_ref
pkg_debug_disable
done
for name in "${all_mvm_names[@]}"; do
mvm_unset "${name}"
done
}
# Read a report describing from which repo the package came and store
# in the passed map.
#
# Params:
#
# 1 - name of a map variable, will contain a mapping of package name
# to repository name
function read_package_sources() {
local -n package_sources_map_ref=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local -a files=()
local which arch
for which in "${WHICH[@]}"; do
files+=( "${WORKDIR}/pkg-reports/${which}/sdk-package-repos" )
for arch in "${ARCHES[@]}"; do
files+=( "${WORKDIR}/pkg-reports/${which}/${arch}-board-package-repos" )
done
done
local file pkg repo saved_repo
for file in "${files[@]}"; do
while read -r pkg repo; do
saved_repo=${package_sources_map_ref["${pkg}"]:-}
if [[ -n ${saved_repo} ]]; then
if [[ ${saved_repo} != "${repo}" ]]; then
pkg_warn \
'- different repos used for the package:' \
" - package: ${pkg}" \
' - repos:' \
" - ${saved_repo}" \
" - ${repo}"
fi
else
package_sources_map_ref["${pkg}"]=${repo}
fi
done <"${file}"
done
}
# Fields of the bunch of maps struct.
#
# BOM_PKG_TO_TAGS_MVM_IDX - mapping of a package name to an array of
# tags (mostly describing where the package
# is used, like SDK or azure OEM or python
# sysext)
#
# BOM_PKG_SLOTS_SET_MVM_IDX - mapping of a package name to a set of
# slots available for the package (slots
# before the update and after the update
# are mixed here)
#
# BOM_OLD_PKG_SLOT_VERMINMAX_MAP_MVM_IDX - mapping of a package name
# to used slots, each slot is
# associated with a pair of
# versions - lowest and
# greatest used version (for
# consistent packages, these
# versions are the same); the
# mapping is for packages
# before the update
#
# BOM_NEW_PKG_SLOT_VERMINMAX_MAP_MVM_IDX - same as above, but for
# packages after the update
#
# BOM_PKG_SOURCES_MAP_IDX - mapping of package name to the repository
# name
declare -gri BOM_PKG_TO_TAGS_MVM_IDX=0 BOM_PKG_SLOTS_SET_MVM_IDX=1 BOM_OLD_PKG_SLOT_VERMINMAX_MAP_MVM_IDX=2 BOM_NEW_PKG_SLOT_VERMINMAX_MAP_MVM_IDX=3 BOM_PKG_SOURCES_MAP_IDX=4
# Declare bunch of maps variables.
#
# Parameters:
#
# @ - names of variables to be used for bunch of maps
function bunch_of_maps_declare() {
struct_declare -ga "${@}" "( '' '' '' '' '' )"
}
# Unset bunch of maps variables.
#
# Parameters:
#
# @ - names of bunch of maps variables
function bunch_of_maps_unset() {
unset "${@}"
}
# Fields of the package output paths struct.
#
# POP_OUT_DIR_IDX - toplevel output directory.
#
# POP_PKG_OUT_DIR_IDX - package-specific output directory under the top-level
# output directory.
#
# POP_PKG_SLOT_OUT_DIR_IDX - slot-specific output directory under the
# package specific output directory.
declare -gri POP_OUT_DIR_IDX=0 POP_PKG_OUT_DIR_IDX=1 POP_PKG_SLOT_OUT_DIR_IDX=2
# Declare package output paths variables.
#
# Parameters:
#
# @ - names of variables to be used for package output paths
function package_output_paths_declare() {
struct_declare -ga "${@}" "( '' '' '')"
}
# Unset package output paths variables.
#
# Parameters:
#
# @ - names of package output paths variables
function package_output_paths_unset() {
unset "${@}"
}
# Fields of package job state struct.
#
# PJS_JOB_IDX - name of a job variable
# PJS_DIR_IDX - path to job's state directory
declare -gri PJS_JOB_IDX=0 PJS_DIR_IDX=1
# Declare package job state variables.
#
# Parameters:
#
# @ - names of variables to be used for package job states
function pkg_job_state_declare() {
struct_declare -ga "${@}" "( '' '' )"
}
# Unset package job state variables.
#
# Parameters:
#
# @ - names of package job state variables
function pkg_job_state_unset() {
local name job_name
for name; do
local -n pkg_job_state_ref=${name}
job_name=${pkg_job_state_ref[PJS_JOB_IDX]}
if [[ -n ${job_name} ]]; then
job_unset "${job_name}"
fi
unset -n ref
done
unset "${@}"
}
# Messages used in communication between package jobs and the main
# process.
#
# READYFORMORE is a message that a package job sends to the main
# process when it is done with processing the current batch of
# packages and asks for more.
#
# WEAREDONE is a message that the main process sends to a package job
# when there are no more packages to process, so the job should
# terminate.
declare -gr ready_for_more_msg='READYFORMORE' we_are_done_msg='WEAREDONE'
# A job function for handling package updates. Receives a batch of
# packages to process, processes them, writes results to a given
# directory and asks for more packages when done.
#
# Parameters:
#
# 1 - output directory for the package handling results
# 2 - name of a bunch of maps variable
function handle_package_changes_job() {
local output_dir=${1}; shift
local bunch_of_maps_var_name=${1}; shift
local we_are_done='' line
local -a reply_lines pair
local -i i pkg_count
local REPLY
while [[ -z ${we_are_done} ]]; do
echo "${ready_for_more_msg}"
read -r
if [[ ${REPLY} = "${we_are_done_msg}" ]]; then
we_are_done=x
elif [[ ${REPLY} =~ ^[0-9]+$ ]]; then
reply_lines=()
pkg_count=${REPLY}
for ((i = 0; i < pkg_count; ++i)); do
read -r
reply_lines+=( "${REPLY}" )
done
for line in "${reply_lines[@]}"; do
mapfile -t pair <<<"${line// /$'\n'}"
if [[ ${#pair[@]} -eq 2 ]]; then
handle_one_package_change "${output_dir}" "${bunch_of_maps_var_name}" "${pair[@]}"
else
echo "invalid message received: ${line@Q}, expected a pair of package names"
fi
done
else
echo "invalid message received: ${REPLY@Q}, expected a number or ${we_are_done_msg@Q}"
fi
done
return 0
}
# Generate a report about one, possibly renamed, package. This
# includes generating diffs between old and new ebuilds for each of
# used slot of a package, diffs of other files the package has, some
# automatic reports based on md5-cache entries along with summary and
# changelog stubs.
#
# Parameters:
#
# 1 - the main output directory, the generated output will end up in
# some subdirectories
# 2 - bunch of maps (generally, package information)
# 3 - old package name
# 4 - new package name
function handle_one_package_change() {
local output_dir=${1}; shift
local -n bunch_of_maps_ref=${1}; shift
local old_name=${1}; shift
local new_name=${1}; shift
local warnings_dir="${output_dir}/warnings"
local updates_dir="${output_dir}/updates"
local pkg_to_tags_mvm_var_name=${bunch_of_maps_ref[BOM_PKG_TO_TAGS_MVM_IDX]}
local pkg_slots_set_mvm_var_name=${bunch_of_maps_ref[BOM_PKG_SLOTS_SET_MVM_IDX]}
local old_pkg_slot_verminmax_map_mvm_var_name=${bunch_of_maps_ref[BOM_OLD_PKG_SLOT_VERMINMAX_MAP_MVM_IDX]}
local new_pkg_slot_verminmax_map_mvm_var_name=${bunch_of_maps_ref[BOM_NEW_PKG_SLOT_VERMINMAX_MAP_MVM_IDX]}
local -n pkg_sources_map_ref=${bunch_of_maps_ref[BOM_PKG_SOURCES_MAP_IDX]}
# The function goes over a pair of old and new package names. For
# each name there will be some checks done (like does this package
# even exist). Each name in the pair has a set of used slots
# associated with it (the most common situation is that each have
# just one slot, but there are some packages that we have multiple
# slots installed, like app-text/docbook-xml-dtd). Some of the
# slots will appear in both old and new package name, sometimes
# there will be slots available only in the old state or only in
# the new state. Each slot for each package name has an associated
# min version and max version. So for common slots we usually
# compare min version for old package with max version for new
# package. Any inconsistencies with the versions should be
# reported by now. There are some edge cases with the slots that
# are not handled by the automation - in such cases there will be
# a "manual action needed" report.
if [[ ${old_name} = "${new_name}" ]]; then
info "handling update of ${new_name}"
else
info "handling update of ${new_name} (renamed from ${old_name})"
fi
pkg_debug_enable "${old_name}" "${new_name}"
pkg_debug 'handling updates'
local old_repo=${pkg_sources_map_ref["${old_name}"]:-}
local new_repo=${pkg_sources_map_ref["${new_name}"]:-}
if [[ -z ${old_repo} ]]; then
pkg_warn_d "${warnings_dir}" \
'- package not in old state' \
" - old package: ${old_name}" \
" - new package: ${new_name}"
pkg_debug_disable
return 0
fi
if [[ -z ${new_repo} ]]; then
pkg_warn_d "${warnings_dir}" \
'- package not in new state' \
" - old package: ${old_name}" \
" - new package: ${new_name}"
pkg_debug_disable
return 0
fi
if [[ ${old_repo} != "${new_repo}" ]]; then
# This is pretty much an arbitrary limitation and I don't
# remember any more why we have it.
pkg_warn_d "${warnings_dir}" \
'- package has moved between repos? unsupported for now' \
" - old package and repo: ${old_name} ${old_repo}" \
" - new package and repo: ${new_name} ${new_repo}"
pkg_debug_disable
return 0
fi
if [[ ${new_repo} != 'portage-stable' ]]; then
# coreos-overlay packages will need a separate handling
pkg_debug 'not a portage-stable package'
pkg_debug_disable
return 0
fi
local hopc_old_slots_set_var_name hopc_new_slots_set_var_name
mvm_get "${pkg_slots_set_mvm_var_name}" "${old_name}" hopc_old_slots_set_var_name
mvm_get "${pkg_slots_set_mvm_var_name}" "${new_name}" hopc_new_slots_set_var_name
: "${hopc_old_slots_set_var_name:=EMPTY_MAP}"
: "${hopc_new_slots_set_var_name:=EMPTY_MAP}"
local hopc_old_slot_verminmax_map_var_name hopc_new_slot_verminmax_map_var_name
mvm_get "${old_pkg_slot_verminmax_map_mvm_var_name}" "${old_name}" hopc_old_slot_verminmax_map_var_name
mvm_get "${new_pkg_slot_verminmax_map_mvm_var_name}" "${new_name}" hopc_new_slot_verminmax_map_var_name
: "${hopc_old_slot_verminmax_map_var_name:=EMPTY_MAP}"
: "${hopc_new_slot_verminmax_map_var_name:=EMPTY_MAP}"
local -n old_slot_verminmax_map_ref=${hopc_old_slot_verminmax_map_var_name}
local -n new_slot_verminmax_map_ref=${hopc_new_slot_verminmax_map_var_name}
# Filter out slots for old and new package name that comes out
# without versions. This may happen, because we collect all slot
# names for the package name, without differentiating whether such
# a slot existed in the old state or still exists in the new
# state. If slot didn't exist in either one then it will come
# without version information. Such a slot is dropped. An example
# would be an update of sys-devel/binutils from 2.42 to 2.43. Each
# binutils version has a separate slot which is named after the
# version. So the slots set would be (2.42 2.43). Slot "2.42" does
# not exist in the new state any more, "2.43" does not yet exist
# in the old state. So those slots for those states will be
# dropped. Thus filtered slots set for the old state will only
# contain 2.42, while for the new state - only 2.43.
local which slots_set_var_name_var_name slot_verminmax_map_var_name_var_name filtered_slots_set_var_name s verminmax
local -A hopc_old_filtered_slots_set hopc_new_filtered_slots_set
for which in "${WHICH[@]}"; do
slots_set_var_name_var_name="hopc_${which}_slots_set_var_name"
slot_verminmax_map_var_name_var_name="hopc_${which}_slot_verminmax_map_var_name"
filtered_slots_set_var_name="hopc_${which}_filtered_slots_set"
local -n which_slots_set_ref=${!slots_set_var_name_var_name}
local -n which_slot_verminmax_map_ref=${!slot_verminmax_map_var_name_var_name}
local -n which_filtered_slots_set_ref=${filtered_slots_set_var_name}
pkg_debug "all unfiltered slots for ${which} name: ${!which_slots_set_ref[*]}"
which_filtered_slots_set_ref=()
for s in "${!which_slots_set_ref[@]}"; do
verminmax=${which_slot_verminmax_map_ref["${s}"]:-}
if [[ -n ${verminmax} ]]; then
which_filtered_slots_set_ref["${s}"]=x
fi
done
pkg_debug "all filtered slots for ${which} name: ${!which_filtered_slots_set_ref[*]}"
unset -n which_filtered_slots_set_ref
unset -n which_slot_verminmax_map_ref
unset -n which_slots_set_ref
done
local -A hopc_only_old_slots_set=() hopc_only_new_slots_set=() hopc_common_slots_set=()
sets_split \
hopc_old_filtered_slots_set hopc_new_filtered_slots_set \
hopc_only_old_slots_set hopc_only_new_slots_set hopc_common_slots_set
pkg_debug "all common slots: ${!hopc_common_slots_set[*]}"
pkg_debug "slots only for old name: ${!hopc_only_old_slots_set[*]}"
pkg_debug "slots only for new name: ${!hopc_only_new_slots_set[*]}"
local update_dir_non_slot="${updates_dir}/${new_name}"
mkdir -p "${update_dir_non_slot}"
package_output_paths_declare hopc_package_output_paths
hopc_package_output_paths[POP_OUT_DIR_IDX]=${updates_dir}
hopc_package_output_paths[POP_PKG_OUT_DIR_IDX]=${update_dir_non_slot}
# POP_PKG_SLOT_OUT_DIR_IDX will be set in loops below
generate_non_ebuild_diffs "${update_dir_non_slot}" "${OLD_PORTAGE_STABLE}" "${NEW_PORTAGE_STABLE}" "${old_name}" "${new_name}"
generate_full_diffs "${update_dir_non_slot}" "${OLD_PORTAGE_STABLE}" "${NEW_PORTAGE_STABLE}" "${old_name}" "${new_name}"
generate_package_mention_reports "${update_dir_non_slot}" "${NEW_STATE}" "${old_name}" "${new_name}"
local hopc_changed=''
local old_verminmax new_verminmax
local hopc_slot_dirname
local update_dir
local old_version new_version
local hopc_cmp_result hopc_slot_changed
pkg_debug 'going over common slots'
for s in "${!hopc_common_slots_set[@]}"; do
old_verminmax=${old_slot_verminmax_map_ref["${s}"]:-}
new_verminmax=${new_slot_verminmax_map_ref["${s}"]:-}
pkg_debug "slot: ${s}, vmm old: ${old_verminmax}, vmm new: ${new_verminmax}"
if [[ -z "${old_verminmax}" ]] || [[ -z "${new_verminmax}" ]]; then
devel_warn_d "${warnings_dir}" \
"- no minmax info available for old and/or new:" \
" - old package: ${old_name}" \
" - slot: ${s}" \
" - minmax: ${old_verminmax}" \
" - new package: ${new_name}" \
" - slot: ${s}" \
" - minmax: ${new_verminmax}"
continue
fi
slot_dirname "${s}" "${s}" hopc_slot_dirname
update_dir="${update_dir_non_slot}/${hopc_slot_dirname}"
mkdir -p "${update_dir}"
hopc_package_output_paths[POP_PKG_SLOT_OUT_DIR_IDX]=${update_dir}
old_version=${old_verminmax%%:*}
new_version=${new_verminmax##*:}
gentoo_ver_cmp_out "${new_version}" "${old_version}" hopc_cmp_result
case ${hopc_cmp_result} in
"${GV_GT}")
handle_pkg_update hopc_package_output_paths "${pkg_to_tags_mvm_var_name}" "${old_name}" "${new_name}" "${old_version}" "${new_version}"
hopc_changed=x
;;
"${GV_EQ}")
hopc_slot_changed=
handle_pkg_as_is hopc_package_output_paths "${pkg_to_tags_mvm_var_name}" "${old_name}" "${new_name}" "${old_version}" hopc_slot_changed
if [[ -z ${hopc_slot_changed} ]]; then
rm -rf "${update_dir}"
else
hopc_changed=x
fi
;;
"${GV_LT}")
handle_pkg_downgrade hopc_package_output_paths "${pkg_to_tags_mvm_var_name}" "${old_name}" "${new_name}" "${s}" "${s}" "${old_version}" "${new_version}"
hopc_changed=x
;;
esac
done
# A "sys-devel/binutils update" case - one old slot and one new
# slot, but different from each other.
local hopc_old_s hopc_new_s
if [[ ${#hopc_only_old_slots_set[@]} -eq 1 ]] && [[ ${#hopc_only_new_slots_set[@]} -eq 1 ]]; then
get_first_from_set hopc_only_old_slots_set hopc_old_s
old_verminmax=${old_slot_verminmax_map_ref["${hopc_old_s}"]:-}
get_first_from_set hopc_only_new_slots_set hopc_new_s
new_verminmax=${new_slot_verminmax_map_ref["${hopc_new_s}"]:-}
pkg_debug "jumping from slot ${hopc_old_s} (vmm: ${old_verminmax}) to slot ${hopc_new_s} (vmm: ${new_verminmax})"
if [[ -z "${old_verminmax}" ]] || [[ -z "${new_verminmax}" ]]; then
devel_warn_d "${warnings_dir}" \
"- no verminmax info available for old and/or new:" \
" - old package: ${old_name}" \
" - slot: ${hopc_old_s}" \
" - minmax: ${old_verminmax}" \
" - new package: ${new_name}" \
" - slot: ${hopc_new_s}" \
" - minmax: ${new_verminmax}"
else
slot_dirname "${hopc_old_s}" "${hopc_new_s}" hopc_slot_dirname
update_dir="${update_dir_non_slot}/${hopc_slot_dirname}"
mkdir -p "${update_dir}"
hopc_package_output_paths[POP_PKG_SLOT_OUT_DIR_IDX]=${update_dir}
old_version=${old_verminmax%%:*}
new_version=${new_verminmax##*:}
gentoo_ver_cmp_out "${new_version}" "${old_version}" hopc_cmp_result
case ${hopc_cmp_result} in
"${GV_GT}")
handle_pkg_update hopc_package_output_paths "${pkg_to_tags_mvm_var_name}" "${old_name}" "${new_name}" "${old_version}" "${new_version}"
hopc_changed=x
;;
"${GV_EQ}")
hopc_slot_changed=
handle_pkg_as_is hopc_package_output_paths "${pkg_to_tags_mvm_var_name}" "${old_name}" "${new_name}" "${old_version}" hopc_slot_changed
if [[ -z ${hopc_slot_changed} ]]; then
rm -rf "${update_dir}"
else
hopc_changed=x
fi
;;
"${GV_LT}")
handle_pkg_downgrade hopc_package_output_paths "${pkg_to_tags_mvm_var_name}" "${old_name}" "${new_name}" "${hopc_old_s}" "${hopc_new_s}" "${old_version}" "${new_version}"
hopc_changed=x
;;
esac
fi
elif [[ ${#hopc_only_old_slots_set[@]} -gt 0 ]] || [[ ${#hopc_only_new_slots_set[@]} -gt 0 ]]; then
pkg_debug 'complicated slots situation, needs manual intervention'
local -a lines=(
'- handle package update:'
' - old package name:'
" - name: ${old_name}"
' - slots:'
)
for s in "${!hopc_old_filtered_slots_set[@]}"; do
old_verminmax=${old_slot_verminmax_map_ref["${s}"]:-}
lines+=(" - ${s}, minmax: ${old_verminmax}")
done
lines+=(
' - new package name:'
" - name: ${new_name}"
' - slots:'
)
for s in "${!hopc_new_filtered_slots_set[@]}"; do
new_verminmax=${new_slot_verminmax_map_ref["${s}"]:-}
lines+=(" - ${s}, minmax: ${new_verminmax}")
done
manual_d "${warnings_dir}" "${lines[@]}"
unset lines
fi
package_output_paths_unset hopc_package_output_paths
unset -n new_slot_verminmax_map_ref old_slot_verminmax_map_ref
# if nothing changed, drop the entire update directory for the
# package, and possibly the parent directory if it became
# empty (parent directory being a category directory, like
# sys-apps)
local hopc_category_dir
if [[ -z ${hopc_changed} ]]; then
pkg_debug 'no changes, dropping reports'
rm -rf "${update_dir_non_slot}"
dirname_out "${update_dir_non_slot}" hopc_category_dir
if dir_is_empty "${hopc_category_dir}"; then
rmdir "${hopc_category_dir}"
fi
fi
pkg_debug_disable
}
# Reads the reports, does consistency checks, runs jobs to process all
# the packages, and writes out reports into the reports directory.
#
# Params:
#
# 1 - name of the renames map variable
# 2 - name of the package tags map mvm variable
function handle_package_changes() {
local pkg_to_tags_mvm_var_name
local -n renamed_old_to_new_map_ref=${1}; shift
pkg_to_tags_mvm_var_name=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local -a hpc_all_pkgs
hpc_all_pkgs=()
# map[package]map[slot]interface{}
mvm_declare hpc_pkg_slots_set_mvm mvm_mvc_set
read_reports hpc_all_pkgs hpc_pkg_slots_set_mvm
info "doing package consistency checks"
# map[package]map[slot]string (string being "min version:max version")
mvm_declare hpc_old_pkg_slot_verminmax_map_mvm mvm_mvc_map
mvm_declare hpc_new_pkg_slot_verminmax_map_mvm mvm_mvc_map
consistency_checks old hpc_all_pkgs hpc_pkg_slots_set_mvm hpc_old_pkg_slot_verminmax_map_mvm
consistency_checks new hpc_all_pkgs hpc_pkg_slots_set_mvm hpc_new_pkg_slot_verminmax_map_mvm
unset_report_mvms
info "preparing for handling package changes"
# TODO: when we handle moving packages between repos, then there
# should be two maps, for old and new state
local -A hpc_package_sources_map
hpc_package_sources_map=()
read_package_sources hpc_package_sources_map
local -a old_pkgs new_pkgs
old_pkgs=()
new_pkgs=()
# The following loop fills the old_pkgs and new_pkgs arrays sorted
# package names, where old package name at index I has it's new
# counterpart at the same index. For the most part, both old and
# new names will be the same, since the renames are rather rare.
# map[package]index
local -A added_pkg_to_index_map=()
local pkg other
for pkg in "${hpc_all_pkgs[@]}"; do
other=${renamed_old_to_new_map_ref["${pkg}"]:-}
if [[ -n "${other}" ]]; then
# There seem be a rename from ${pkg} to ${other}
pkg_debug_enable "${pkg}" "${other}"
pkg_debug "${pkg} renamed to ${other}"
pkg_debug_disable
local other_idx
other_idx=${added_pkg_to_index_map["${other}"]:-}
if [[ -n ${other_idx} ]]; then
# Looks like we have already processed the ${other}
# name. In this case, both old_pkgs[${other_idx}] and
# new_pkgs[${other_idx}] should just be
# ${other}. Since ${pkg} is the old name (${other} is
# new), we update old_pkgs to hold the old name. Just
# make sure that old_pkgs indeed had the new name
# first.
local other_old
other_old=${old_pkgs["${other_idx}"]}
if [[ ${other_old} = "${other}" ]]; then
old_pkgs["${other_idx}"]=${pkg}
else
manual \
'- there seem to be two old packages in our repos that are supposed to be renamed to the same name:' \
" - old package 1: ${pkg}" \
" - old package 2: ${other_old}" \
" - new package: ${other}"
fi
unset other_idx other_old
continue
else
unset other_idx
fi
# Looks like we haven't processed the ${other} name yet,
# it probably will come up later, which will be taken care
# of by the "pkg_debug 'handled already through some
# rename'" part below, after else.
local pkg_idx
# doesn't matter if it's length of new_pkgs or old_pkgs,
# both are assumed to have the same length
pkg_idx=${#old_pkgs[@]}
old_pkgs+=("${pkg}")
new_pkgs+=("${other}")
added_pkg_to_index_map["${pkg}"]=${pkg_idx}
added_pkg_to_index_map["${other}"]=${pkg_idx}
unset pkg_idx
else
pkg_debug_enable "${pkg}"
if [[ -n ${added_pkg_to_index_map["${pkg}"]:-} ]]; then
pkg_debug 'handled already through some rename'
else
pkg_debug "${pkg} is not renamed"
local pkg_idx
# doesn't matter if it's length of new_pkgs or old_pkgs,
# both are assumed to have the same length
pkg_idx=${#old_pkgs[@]}
old_pkgs+=("${pkg}")
new_pkgs+=("${pkg}")
added_pkg_to_index_map["${pkg}"]=${pkg_idx}
fi
pkg_debug_disable
fi
done
unset added_pkg_to_index_map
bunch_of_maps_declare hpc_bunch_of_maps
hpc_bunch_of_maps[BOM_PKG_TO_TAGS_MVM_IDX]=${pkg_to_tags_mvm_var_name}
hpc_bunch_of_maps[BOM_PKG_SLOTS_SET_MVM_IDX]=hpc_pkg_slots_set_mvm
hpc_bunch_of_maps[BOM_OLD_PKG_SLOT_VERMINMAX_MAP_MVM_IDX]=hpc_old_pkg_slot_verminmax_map_mvm
hpc_bunch_of_maps[BOM_NEW_PKG_SLOT_VERMINMAX_MAP_MVM_IDX]=hpc_new_pkg_slot_verminmax_map_mvm
hpc_bunch_of_maps[BOM_PKG_SOURCES_MAP_IDX]=hpc_package_sources_map
# We will be spawning as many jobs below as there are available
# processors/cores. Each job has its own work directory and will
# be receiving packages to process in batches of five. Once all
# the packages are processed, their reports are aggregated into a
# single one.
local -i pkg_batch_size=5 this_batch_size pkg_idx=0 pkg_count=${#old_pkgs[@]}
local -a pkg_batch old_pkgs_batch new_pkgs_batch
local pkg_job_top_dir="${WORKDIR}/pkgjobdirs"
create_cleanup_dir "${pkg_job_top_dir}"
local -a pkg_job_state_names=()
local pkg_job_state_name pkg_job_name pkg_job_dir pkg_job_warnings_dir pkg_job_updates_dir
local -i job_count i
get_num_proc job_count
local file
local -a paths
# Set up environment for each job, create a job state and kick off
# the job.
for ((i = 0; i < job_count; ++i)); do
gen_varname pkg_job_state_name
pkg_job_state_declare "${pkg_job_state_name}"
gen_varname pkg_job_name
job_declare "${pkg_job_name}"
pkg_job_dir="${pkg_job_top_dir}/j${i}"
pkg_job_warnings_dir="${pkg_job_dir}/warnings"
pkg_job_updates_dir="${pkg_job_dir}/updates"
create_cleanup_dir "${pkg_job_dir}"
create_cleanup_dir "${pkg_job_warnings_dir}"
create_cleanup_dir "${pkg_job_updates_dir}"
paths=()
for file in developer-warnings warnings manual-work-needed; do
paths+=( "${pkg_job_dir}/warnings/${file}" )
done
for file in summary_stubs changelog_stubs; do
paths+=( "${pkg_job_dir}/updates/${file}" )
done
# TODO: That's a bit messy
add_cleanup "find -P ${pkg_job_updates_dir@Q} -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} +"
add_cleanup "rm -f ${paths[*]@Q}"
job_run -m "${pkg_job_name}" handle_package_changes_job "${pkg_job_dir}" hpc_bunch_of_maps
local -n pkg_job_state_ref="${pkg_job_state_name}"
pkg_job_state_ref[PJS_JOB_IDX]=${pkg_job_name}
pkg_job_state_ref[PJS_DIR_IDX]=${pkg_job_dir}
unset -n pkg_job_state_ref
pkg_job_state_names+=( "${pkg_job_state_name}" )
done
# We have two job arrays, "current" and "next". When iterating the
# "current" array, we will be putting all still alive jobs into
# the "next" array, which will become "current" in the next
# iteration. In every iteration we collect the output and send
# another batch of packages to be processed by a job if it's
# ready. We terminate the jobs when we have run out of
# packages. The looping finishes when all the jobs are terminated.
local -i current_idx=0 next_idx=1 idx state_count=${#pkg_job_state_names[@]}
local -a pkg_job_state_names_0=( "${pkg_job_state_names[@]}" ) pkg_job_state_names_1=() pkg_job_output_lines
local pkg_job_output_line pkg_job_input_sent
while [[ state_count -gt 0 ]]; do
local -n pkg_job_state_names_ref=pkg_job_state_names_${current_idx}
local -n next_pkg_job_state_names_ref=pkg_job_state_names_${next_idx}
next_pkg_job_state_names_ref=()
for pkg_job_state_name in "${pkg_job_state_names_ref[@]}"; do
local -n pkg_job_state_ref=${pkg_job_state_name}
pkg_job_name=${pkg_job_state_ref[PJS_JOB_IDX]}
unset -n pkg_job_state_ref
if job_is_alive "${pkg_job_name}"; then
next_pkg_job_state_names_ref+=( "${pkg_job_state_name}" )
fi
job_get_output "${pkg_job_name}" pkg_job_output_lines
pkg_job_input_sent=
for pkg_job_output_line in "${pkg_job_output_lines[@]}"; do
if [[ ${pkg_job_output_line} = "${ready_for_more_msg}" ]]; then
if [[ -z ${pkg_job_input_sent} ]]; then
if [[ pkg_idx -ge pkg_count ]]; then
job_send_input "${pkg_job_name}" "${we_are_done_msg}"
else
old_pkgs_batch=( "${old_pkgs[@]:pkg_idx:pkg_batch_size}" )
new_pkgs_batch=( "${new_pkgs[@]:pkg_idx:pkg_batch_size}" )
this_batch_size=${#old_pkgs_batch[@]}
pkg_batch=( "${this_batch_size}" )
for ((i = 0; i < this_batch_size; ++i)); do
old_pkg=${old_pkgs_batch[i]}
new_pkg=${new_pkgs_batch[i]}
pkg_batch+=( "${old_pkg} ${new_pkg}" )
done
pkg_idx=$((pkg_idx + pkg_batch_size))
job_send_input "${pkg_job_name}" "${pkg_batch[@]}"
fi
pkg_job_input_sent=x
fi
else
# The job already used info to print this line, so
# we should just echo it, otherwise we will get
# repeated prefixes ("script_name: script_name:
# something happenend")
echo "${pkg_job_output_line}"
fi
done
done
state_count=${#next_pkg_job_state_names_ref[@]}
if [[ state_count -gt 0 ]]; then
sleep 0.2
fi
unset -n pkg_job_state_names_ref next_pkg_job_state_names_ref
idx=${current_idx}
current_idx=${next_idx}
next_idx=${idx}
done
# All the jobs are done, so here we collect all their reports and
# merge them into the main ones in reports directory.
local some_job_failed='' hpc_filename
local -i hpc_rv
truncate --size=0 "${REPORTS_DIR}/updates/summary_stubs" "${REPORTS_DIR}/updates/changelog_stubs"
for pkg_job_state_name in "${pkg_job_state_names[@]}"; do
local -n pkg_job_state_ref=${pkg_job_state_name}
pkg_job_name=${pkg_job_state_ref[PJS_JOB_IDX]}
pkg_job_dir=${pkg_job_state_ref[PJS_DIR_IDX]}
unset -n pkg_job_state_ref
job_reap "${pkg_job_name}" hpc_rv
if [[ hpc_rv -ne 0 ]]; then
some_job_failed=x
fi
for file in "${pkg_job_dir}/warnings/"*; do
basename_out "${file}" hpc_filename
cat "${file}" >>"${REPORTS_DIR}/${hpc_filename}"
done
for file in "${pkg_job_dir}/updates/"*; do
basename_out "${file}" hpc_filename
if [[ -f ${file} ]]; then
cat "${file}" >>"${REPORTS_DIR}/updates/${hpc_filename}"
elif [[ -d ${file} ]]; then
if [[ ! -d "${REPORTS_DIR}/updates/${hpc_filename}" ]]; then
mkdir -p "${REPORTS_DIR}/updates/${hpc_filename}"
fi
mv "${file}/"* "${REPORTS_DIR}/updates/${hpc_filename}"
fi
done
done
pkg_job_state_unset "${pkg_job_state_names[@]}"
bunch_of_maps_unset hpc_bunch_of_maps
mvm_unset hpc_new_pkg_slot_verminmax_map_mvm
mvm_unset hpc_old_pkg_slot_verminmax_map_mvm
mvm_unset hpc_pkg_slots_set_mvm
if [[ -n ${some_job_failed} ]]; then
fail "some job failed"
fi
}
# Gets the first item from the passed set.
#
# Mostly intended to "unwrap" a single-element set.
#
# Params:
#
# 1 - name of the set variable
# 2 - name of the variable where the element will be stored
function get_first_from_set() {
local -n set_ref=${1}; shift
local -n return_ref=${1}; shift
local item
for item in "${!set_ref[@]}"; do
return_ref=${item}
return 0
done
return_ref=''
}
# Write information to reports directory about the package update
# (meaning specifically that the new version is greater than the old
# one).
#
# Params:
#
# 1 - package output paths variable name
# 2 - name of the package tags set mvm variable
# 3 - old package name
# 4 - new package name
# 5 - old version
# 6 - new version
function handle_pkg_update() {
local -n package_output_paths_ref=${1}; shift
local pkg_to_tags_mvm_var_name=${1}; shift
local old_pkg=${1}; shift
local new_pkg=${1}; shift
local old=${1}; shift
local new=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local old_no_r=${old%-r+([0-9])}
local new_no_r=${new%-r+([0-9])}
local pkg_name=${new_pkg#*/}
local -a lines=( "0:from ${old} to ${new}" )
if [[ ${old_pkg} != "${new_pkg}" ]]; then
lines+=( "0:renamed from ${old_pkg}" )
fi
local out_dir=${package_output_paths_ref[POP_PKG_SLOT_OUT_DIR_IDX]}
generate_ebuild_diff "${out_dir}" "${OLD_PORTAGE_STABLE}" "${NEW_PORTAGE_STABLE}" "${old_pkg}" "${new_pkg}" "${old}" "${new}"
local diff_report_name
gen_varname diff_report_name
diff_report_declare "${diff_report_name}"
generate_cache_diff_report "${diff_report_name}" "${WORKDIR}/pkg-reports/old/portage-stable-cache" "${WORKDIR}/pkg-reports/new/portage-stable-cache" "${old_pkg}" "${new_pkg}" "${old}" "${new}"
local -n diff_report_ref=${diff_report_name}
local -n diff_lines_ref=${diff_report_ref[${DR_LINES_IDX}]}
lines+=( "${diff_lines_ref[@]}" )
unset -n diff_lines_ref
unset -n diff_report_ref
diff_report_unset "${diff_report_name}"
if [[ -s "${out_dir}/ebuild.diff" ]]; then
lines+=( '0:TODO: review ebuild.diff' )
fi
local out_dir_non_slot=${package_output_paths_ref[POP_PKG_OUT_DIR_IDX]}
if [[ -s "${out_dir_non_slot}/other.diff" ]]; then
lines+=( '0:TODO: review other.diff' )
fi
lines+=( '0:TODO: review occurences' )
if [[ ${old_pkg} != "${new_pkg}" ]]; then
lines+=( '0:TODO: review occurences-for-old-name' )
fi
local -a hpu_tags
tags_for_pkg "${pkg_to_tags_mvm_var_name}" "${new_pkg}" hpu_tags
local top_out_dir=${package_output_paths_ref[POP_OUT_DIR_IDX]}
if ver_test "${new_no_r}" -gt "${old_no_r}"; then
# version bump
generate_changelog_entry_stub "${top_out_dir}" "${pkg_name}" "${new_no_r}" "${hpu_tags[@]}"
lines+=( '0:release notes: TODO' )
fi
generate_summary_stub "${top_out_dir}" "${new_pkg}" "${hpu_tags[@]}" -- "${lines[@]}"
}
# Write information to reports directory about the modified package
# (meaning specifically that the new version is equal than the old
# one).
#
# Params:
#
# 1 - package output paths variable name
# 2 - name of the package tags set mvm variable
# 3 - old package name
# 4 - new package name
# 5 - version
# 6 - name of a "bool" variable where info is stored if relevant files
# has changed (empty means nothing changed, non-empty means
# something has changed)
function handle_pkg_as_is() {
local -n package_output_paths_ref=${1}; shift
local pkg_to_tags_mvm_var_name=${1}; shift
local old_pkg=${1}; shift
local new_pkg=${1}; shift
local v=${1}; shift
local -n changed_ref=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local pkg_name=${new_pkg#/}
local -a lines=( "0:still at ${v}" )
local renamed=''
if [[ ${old_pkg} != "${new_pkg}" ]]; then
lines+=( "0:renamed from ${old_pkg}" )
renamed=x
fi
local out_dir=${package_output_paths_ref[POP_PKG_SLOT_OUT_DIR_IDX]}
generate_ebuild_diff "${out_dir}" "${OLD_PORTAGE_STABLE}" "${NEW_PORTAGE_STABLE}" "${old_pkg}" "${new_pkg}" "${v}" "${v}"
local modified=''
local diff_report_name
gen_varname diff_report_name
diff_report_declare "${diff_report_name}"
generate_cache_diff_report "${diff_report_name}" "${WORKDIR}/pkg-reports/old/portage-stable-cache" "${WORKDIR}/pkg-reports/new/portage-stable-cache" "${old_pkg}" "${new_pkg}" "${v}" "${v}"
local -n diff_report_ref=${diff_report_name}
local -n diff_lines_ref=${diff_report_ref[${DR_LINES_IDX}]}
if [[ ${#diff_lines_ref[@]} -gt 0 ]]; then
lines+=( "${diff_lines_ref[@]}" )
modified=x
fi
unset -n diff_lines_ref
unset -n diff_report_ref
diff_report_unset "${diff_report_name}"
if [[ -s "${out_dir}/ebuild.diff" ]]; then
lines+=( '0:TODO: review ebuild.diff' )
modified=x
fi
local out_dir_non_slot=${package_output_paths_ref[POP_PKG_OUT_DIR_IDX]}
if [[ -s "${out_dir_non_slot}/other.diff" ]]; then
lines+=( '0:TODO: review other.diff' )
modified=x
fi
if [[ -z ${renamed} ]] && [[ -z ${modified} ]]; then
# Nothing relevant has changed, return early.
return 0
fi
changed_ref=x
lines+=( '0:TODO: review occurences' )
if [[ ${old_pkg} != "${new_pkg}" ]]; then
lines+=( '0:TODO: review occurences-for-old-name' )
fi
local top_out_dir=${package_output_paths_ref[POP_OUT_DIR_IDX]}
local -a hpai_tags
tags_for_pkg "${pkg_to_tags_mvm_var_name}" "${new_pkg}" hpai_tags
generate_summary_stub "${top_out_dir}" "${new_pkg}" "${hpai_tags[@]}" -- "${lines[@]}"
}
# Write information to reports directory about the package downgrade
# (meaning specifically that the new version is lower than the old
# one).
#
# Params:
#
# 1 - package output paths variable name
# 2 - name of the package tags set mvm variable
# 3 - old package name
# 4 - new package name
# 5 - old version
# 6 - new version
function handle_pkg_downgrade() {
local -n package_output_paths_ref=${1}; shift
local pkg_to_tags_mvm_var_name=${1}; shift
local old_pkg=${1}; shift
local new_pkg=${1}; shift
local old=${1}; shift
local new=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local old_no_r=${old%-r+([0-9])}
local new_no_r=${new%-r+([0-9])}
local pkg_name=${new_pkg#*/}
local -a lines=( "0:downgraded from ${old} to ${new}" )
if [[ ${old_pkg} != "${new_pkg}" ]]; then
lines+=( "0:renamed from ${old_pkg}" )
fi
local out_dir=${package_output_paths_ref[POP_PKG_SLOT_OUT_DIR_IDX]}
generate_ebuild_diff "${out_dir}" "${OLD_PORTAGE_STABLE}" "${NEW_PORTAGE_STABLE}" "${old_pkg}" "${new_pkg}" "${old}" "${new}"
local diff_report_name
gen_varname diff_report_name
diff_report_declare "${diff_report_name}"
generate_cache_diff_report "${diff_report_name}" "${WORKDIR}/pkg-reports/old/portage-stable-cache" "${WORKDIR}/pkg-reports/new/portage-stable-cache" "${old_pkg}" "${new_pkg}" "${old}" "${new}"
local -n diff_report_ref=${diff_report_name}
local -n diff_lines_ref=${diff_report_ref[${DR_LINES_IDX}]}
lines+=( "${diff_lines_ref[@]}" )
unset -n diff_lines_ref
unset -n diff_report_ref
diff_report_unset "${diff_report_name}"
if [[ -s "${out_dir}/ebuild.diff" ]]; then
lines+=( '0:TODO: review ebuild.diff' )
fi
local out_dir_non_slot=${package_output_paths_ref[POP_PKG_OUT_DIR_IDX]}
if [[ -s "${out_dir_non_slot}/other.diff" ]]; then
lines+=( '0:TODO: review other.diff' )
fi
lines+=( '0:TODO: review occurences' )
if [[ ${old_pkg} != "${new_pkg}" ]]; then
lines+=( '0:TODO: review occurences-for-old-name' )
fi
local -a hpd_tags
tags_for_pkg "${pkg_to_tags_mvm_var_name}" "${new_pkg}" hpd_tags
local top_out_dir=${package_output_paths_ref[POP_OUT_DIR_IDX]}
if ver_test "${new_no_r}" -lt "${old_no_r}"; then
# version bump
generate_changelog_entry_stub "${top_out_dir}" "${pkg_name}" "${new_no_r}" "${hpd_tags[@]}"
lines+=( "0:release notes: TODO" )
fi
generate_summary_stub "${top_out_dir}" "${new_pkg}" "${hpd_tags[@]}" -- "${lines[@]}"
}
# Retrieves tags for a package.
#
# Params:
#
# 1 - name of the package tags set mvm variable
# 2 - package name
# 3 - name of the array variable, where the tags will be stored
function tags_for_pkg() {
local pkg_to_tags_mvm_var_name pkg
pkg_to_tags_mvm_var_name=${1}; shift
pkg=${1}; shift
local -n tags_ref=${1}; shift
local tfp_tags_var_name
mvm_get "${pkg_to_tags_mvm_var_name}" "${pkg}" tfp_tags_var_name
pkg_debug_enable "${pkg}"
pkg_debug "checking for tags in ${pkg_to_tags_mvm_var_name}"
if [[ -z ${tfp_tags_var_name} ]]; then
pkg_debug "no tags available"
tags_ref=()
else
local -n tags_in_mvm_ref=${tfp_tags_var_name}
tags_ref=( "${tags_in_mvm_ref[@]}" )
pkg_debug "tags available: ${tags_in_mvm_ref[*]}"
fi
pkg_debug_disable
}
# Adds a changelog stub to changelog file in reports directory.
#
# Params:
# 1 - output directory
# 2 - package name (shortened, without the category)
# 3 - version
# @ - package tags
function generate_changelog_entry_stub() {
local out_dir=${1}; shift
local pkg_name=${1}; shift
local v=${1}; shift
# rest are tags
local -a applied_tags=()
for tag; do
case ${tag} in
PROD)
applied_tags+=( 'base' )
;;
*)
# add lower-cased tag
applied_tags+=( "${tag,,}" )
;;
esac
done
local gces_tags=''
if [[ ${#applied_tags[@]} -gt 0 ]]; then
join_by gces_tags ', ' "${applied_tags[@]}"
else
# no tags, it means it's an SDK package
gces_tags='SDK'
fi
printf '%s %s: %s ([%s](TODO))\n' '-' "${gces_tags}" "${pkg_name}" "${v}" >>"${out_dir}/changelog_stubs"
}
# Adds a stub to the summary file in reports directory.
#
# Params:
# 1 - output directory
# 2 - package
# @ - tags followed by double dash followed by lines to append to the
# file
function generate_summary_stub() {
local out_dir=${1}; shift
local pkg=${1}; shift
# rest are tags separated followed by double dash followed by lines
local -a tags=()
while [[ ${#} -gt 0 ]]; do
if [[ ${1} = '--' ]]; then
shift
break
fi
tags+=( "${1}" )
shift
done
# rest are lines
{
printf '%s %s:' '-' "${pkg}"
if [[ ${#tags[@]} -gt 0 ]]; then
printf ' [%s]' "${tags[@]}"
fi
printf '\n'
local indent_line indent line
for indent_line; do
indent=${indent_line%%:*}
line=${indent_line#*:}
if [[ ${indent} -gt 0 ]]; then
printf -- ' %.0s' $(seq 1 "${indent}")
fi
printf ' - %s\n' "${line}"
done
printf '\n'
} >>"${out_dir}/summary_stubs"
}
# Generate diffs between directories in old state and new state for a
# package.
#
# Params:
#
# 1 - output directory
# 2 - path to portage-stable in old state
# 3 - path to portage-stable in new state
# 4 - old package name
# 5 - new package name
function generate_full_diffs() {
local out_dir=${1}; shift
local old_ps=${1}; shift
local new_ps=${1}; shift
local old_pkg=${1}; shift
local new_pkg=${1}; shift
local old_path="${old_ps}/${old_pkg}"
local new_path="${new_ps}/${new_pkg}"
local -a common_diff_opts=(
--recursive
--unified=3
)
xdiff "${common_diff_opts[@]}" --new-file "${old_path}" "${new_path}" >"${out_dir}/full.diff"
xdiff "${common_diff_opts[@]}" --brief "${old_path}" "${new_path}" >"${out_dir}/brief-summary"
}
# Generate a diff between non-ebuild, non-Manifest files for old and
# new package.
#
# Params:
#
# 1 - output directory
# 2 - path to portage-stable in old state
# 3 - path to portage-stable in new state
# 4 - old package name
# 5 - new package name
function generate_non_ebuild_diffs() {
local out_dir=${1}; shift
local old_ps=${1}; shift
local new_ps=${1}; shift
local old_pkg=${1}; shift
local new_pkg=${1}; shift
local old_path="${old_ps}/${old_pkg}"
local new_path="${new_ps}/${new_pkg}"
local -a diff_opts=(
--recursive
--unified=3
# Show contents of deleted or added files too.
--new-file
# Ignore ebuilds and the Manifest file.
--exclude='*.ebuild'
--exclude='Manifest'
)
xdiff "${diff_opts[@]}" "${old_path}" "${new_path}" >"${out_dir}/other.diff"
}
# Generate a diff between specific ebuilds for old and new package.
#
# Params:
#
# 1 - output directory
# 2 - path to portage-stable in old state
# 3 - path to portage-stable in new state
# 4 - old package name
# 5 - new package name
# 6 - old package version
# 7 - new package version
function generate_ebuild_diff() {
local out_dir=${1}; shift
local old_ps=${1}; shift
local new_ps=${1}; shift
local old_pkg=${1}; shift
local new_pkg=${1}; shift
local old=${1}; shift
local new=${1}; shift
local old_pkg_name=${old_pkg#*/}
local new_pkg_name=${new_pkg#*/}
local old_path="${old_ps}/${old_pkg}/${old_pkg_name}-${old}.ebuild"
local new_path="${new_ps}/${new_pkg}/${new_pkg_name}-${new}.ebuild"
xdiff --unified=3 "${old_path}" "${new_path}" >"${out_dir}/ebuild.diff"
}
function generate_cache_diff_report() {
local diff_report_var_name=${1}; shift
local old_cache_dir=${1}; shift
local new_cache_dir=${1}; shift
local old_pkg=${1}; shift
local new_pkg=${1}; shift
local old=${1}; shift
local new=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local old_entry=${old_cache_dir}/${old_pkg}-${old}
local new_entry=${new_cache_dir}/${new_pkg}-${new}
local old_cache_name new_cache_name
gen_varname old_cache_name
gen_varname new_cache_name
cache_file_declare "${old_cache_name}" "${new_cache_name}"
parse_cache_file "${old_cache_name}" "${old_entry}" "${ARCHES[@]}"
parse_cache_file "${new_cache_name}" "${new_entry}" "${ARCHES[@]}"
diff_cache_data "${old_cache_name}" "${new_cache_name}" "${diff_report_var_name}"
cache_file_unset "${old_cache_name}" "${new_cache_name}"
}
# Generate a report with information where the old and new packages
# are mentioned in entire scripts repository. May result in two
# separate reports if the package got renamed.
#
# 1 - output directory
# 2 - path to scripts repo
# 3 - old package name
# 4 - new package name
function generate_package_mention_reports() {
local out_dir=${1}; shift
local scripts=${1}; shift
local old_pkg=${1}; shift
local new_pkg=${1}; shift
generate_mention_report_for_package "${scripts}" "${new_pkg}" >"${out_dir}/occurences"
if [[ ${old_pkg} != "${new_pkg}" ]]; then
generate_mention_report_for_package "${scripts}" "${old_pkg}" >"${out_dir}/occurences-for-old-name"
fi
}
# Generate a report with information where the package is mentioned in
# entire scripts repository.
#
# 1 - path to scripts repo
# 3 - package name
function generate_mention_report_for_package() {
local scripts pkg
scripts=${1}; shift
pkg=${1}; shift
local ps co
ps='sdk_container/src/third_party/portage-stable'
co='sdk_container/src/third_party/coreos-overlay'
yell "${pkg} in overlay profiles"
grep_pkg "${scripts}" "${pkg}" "${co}/profiles"
yell "${pkg} in Gentoo profiles"
grep_pkg "${scripts}" "${pkg}" "${ps}/profiles"
# shellcheck disable=SC2164 # we use set -e, so the script will exit if it fails
pushd "${scripts}/${co}" >/dev/null
yell "${pkg} in env overrides"
cat_entries "coreos/config/env/${pkg}"@(|-+([0-9])*)
yell "${pkg} in user patches"
local dir
for dir in "coreos/user-patches/${pkg}"@(|-+([0-9])*); do
echo "BEGIN DIRECTORY: ${dir}"
cat_entries "${dir}"/*
echo "END DIRECTORY: ${dir}"
done
# shellcheck disable=SC2164 # we use set -e, so the script will exit if it fails
popd >/dev/null
yell "${pkg} in overlay (outside profiles)"
grep_pkg "${scripts}" "${pkg}" "${co}" ":(exclude)${co}/profiles"
yell "${pkg} in Gentoo (outside profiles)"
grep_pkg "${scripts}" "${pkg}" "${ps}" ":(exclude)${ps}/profiles"
yell "${pkg} in scripts (outside overlay and Gentoo)"
grep_pkg "${scripts}" "${pkg}" ":(exclude)${ps}" ":(exclude)${co}"
}
# Gets a slot-specific directory name for ebuild diffs.
#
# Params:
#
# 1 - old slot
# 2 - new slot
# 3 - name of a variable where the path will be stored
function slot_dirname() {
local old_s=${1}; shift
local new_s=${1}; shift
local -n dirname_ref=${1}; shift
# slots may have slashes in them - replace them with "-slash-"
local slot_dir
if [[ ${old_s} = "${new_s}" ]]; then
slot_dir=${old_s//\//-slash-}
else
slot_dir="${old_s//\//-slash-}-to-${new_s//\//-slash-}"
fi
dirname_ref=${slot_dir}
}
# Greps for a package name in selected directories of the passed
# repo. It prints, so the invocation needs to be captured.
#
# Params:
#
# 1 - path to scripts repo
# 2 - package name
# @ - directories in the repo to limit the search for
function grep_pkg() {
local scripts pkg
scripts=${1}; shift
pkg=${1}; shift
# rest are directories
GIT_PAGER='' git -C "${scripts}" grep "${pkg}"'\(-[0-9$]\|[^a-zA-Z0-9_-]\|$\)' -- "${@}" || :
}
# Prints the passed files preceding and following with BEGIN ENTRY and
# END ENTRY markers.
#
# Params:
#
# @ - the files to print
function cat_entries() {
for entry; do
echo "BEGIN ENTRY: ${entry}"
cat "${entry}"
echo "END ENTRY: ${entry}"
done
}
# Reads the listings and renames, handles updates of both packages and
# non-packages (eclasses, licenses, profiles, etc.)
function handle_gentoo_sync() {
#mvm_debug_enable hgs_pkg_to_tags_mvm
mvm_declare hgs_pkg_to_tags_mvm
process_listings hgs_pkg_to_tags_mvm
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
local -A hgs_renames_old_to_new_map=()
process_profile_updates_directory hgs_renames_old_to_new_map
mkdir -p "${REPORTS_DIR}/updates"
handle_package_changes hgs_renames_old_to_new_map hgs_pkg_to_tags_mvm
mvm_unset hgs_pkg_to_tags_mvm
#mvm_debug_disable hgs_pkg_to_tags_mvm
local old_head new_head
old_head=$(git -C "${OLD_STATE}" rev-parse HEAD)
new_head=$(git -C "${NEW_STATE}" rev-parse HEAD)
local -A non_package_updates_set
non_package_updates_set=()
local path in_ps category
if [[ "${old_head}" != "${new_head}" ]]; then
while read -r path; do
if [[ ${path} != "${PORTAGE_STABLE_SUFFIX}/"* ]]; then
continue
fi
in_ps=${path#"${PORTAGE_STABLE_SUFFIX}/"}
category=${in_ps%%/*}
case "${category}" in
eclass)
if [[ ${in_ps} != 'eclass/'+([^/])'.eclass' ]]; then
fail "unexpected updated file inside eclass directory: '${path}'"
fi
non_package_updates_set["${in_ps}"]=x
;;
licenses|metadata|profiles|scripts)
non_package_updates_set["${category}"]=x
;;
virtual|*-*)
# Package update, already handled
:
;;
*)
fail "unexpected updated file '${path}'"
;;
esac
done < <(git -C "${NEW_STATE}" diff-tree --no-commit-id --name-only -r "${old_head}" "${new_head}")
fi
local entry
for entry in "${!non_package_updates_set[@]}"; do
case "${entry}" in
eclass/*)
handle_eclass "${entry}"
;;
licenses)
handle_licenses
;;
metadata)
info "not handling metadata updates, skipping"
;;
profiles)
handle_profiles
;;
scripts)
handle_scripts
;;
*)
fail "unknown non-package update for ${entry}"
;;
esac
done
sort_summary_stubs
sort_changelog_stubs
}
# Sorts entries in the summary file if it exists.
function sort_summary_stubs() {
if [[ -f "${REPORTS_DIR}/updates/summary_stubs" ]]; then
sort_like_summary_stubs "${REPORTS_DIR}/updates/summary_stubs"
fi
}
# Sorts entries in the summary file.
#
# Lines look like as follows:
#
# -BEGIN-
# - dev-lang/python: [DEV]
# - from 3.11.4 to 3.11.5
# - no changes in ebuild
# - release notes: TODO
#
# - app-emulation/qemu:
# - from 8.0.3 to 8.0.4
# - no changes in ebuild
# - release notes: TODO
#
# -END-
function sort_like_summary_stubs() {
local f
f=${1}; shift
mvm_declare groups_mvm
local -a lines entries
lines=()
entries=()
local -A dups
dups=()
local REPLY line entry sss_lines_name dup_count
while read -r; do
if [[ -z ${REPLY} ]]; then
if [[ ${#lines[@]} -gt 0 ]]; then
line=${lines[0]}
entry=${line#-+([[:space:]])}
entry=${entry%%:*}
dup_count=${dups["${entry}"]:-0}
if [[ ${dup_count} -gt 0 ]]; then
dup_count=$((dup_count + 1))
mvm_add groups_mvm "${entry}@${dup_count}" "${lines[@]}"
dups["${entry}"]=${dup_count}
else
mvm_get groups_mvm "${entry}" sss_lines_name
if [[ -n ${sss_lines_name} ]]; then
local -n lines_ref=${sss_lines_name}
mvm_add groups_mvm "${entry}@1" "${lines_ref[@]}"
unset -n lines_ref
mvm_remove groups_mvm "${entry}"
mvm_add groups_mvm "${entry}@2" "${lines[@]}"
dups["${entry}"]=2
else
mvm_add groups_mvm "${entry}" "${lines[@]}"
entries+=( "${entry}" )
fi
fi
lines=()
fi
else
lines+=( "${REPLY}" )
fi
done < <(cat "${f}"; echo) # echo for final empty line, just in case
if [[ ${#entries[@]} -eq 0 ]]; then
return 0
fi
local idx
{
while read -r line; do
dup_count=${dups["${line}"]:-0}
if [[ ${dup_count} -gt 0 ]]; then
idx=0
while [[ ${idx} -lt ${dup_count} ]]; do
idx=$((idx + 1))
mvm_get groups_mvm "${line}@${idx}" sss_lines_name
local -n lines_ref=${sss_lines_name}
printf '%s\n' "${lines_ref[@]}" ''
unset -n lines_ref
done
else
mvm_get groups_mvm "${line}" sss_lines_name
local -n lines_ref=${sss_lines_name}
printf '%s\n' "${lines_ref[@]}" ''
unset -n lines_ref
fi
done < <(printf '%s\n' "${entries[@]}" | csort)
} >"${f}"
mvm_unset groups_mvm
}
# Sorts entries in changelog stub if it exists.
function sort_changelog_stubs() {
if [[ -f "${REPORTS_DIR}/updates/changelog_stubs" ]]; then
sort_like_changelog_stubs "${REPORTS_DIR}/updates/changelog_stubs"
fi
}
# Sorts entries in changelog stub.
function sort_like_changelog_stubs() {
local f t
f=${1}; shift
t="${f}.tmp"
csort --output="${t}" "${f}"
mv -f "${t}" "${f}"
}
# Invokes sort with C locale. Meant to be used in bash pipelines.
#
# Params:
#
# @ - additional parameters to passed to sort
function csort() {
LC_ALL=C sort "${@}"
}
# Handle an eclass update. Basically generate a diff.
#
# Params:
#
# 1 - path to eclass file within an ebuild repo
function handle_eclass() {
local eclass
eclass=${1}; shift
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
info "handling update of ${eclass}"
local -a lines
lines=()
if [[ -e "${OLD_PORTAGE_STABLE}/${eclass}" ]] && [[ -e "${NEW_PORTAGE_STABLE}/${eclass}" ]]; then
mkdir -p "${REPORTS_DIR}/updates/${eclass}"
xdiff --unified=3 "${OLD_PORTAGE_STABLE}/${eclass}" "${NEW_PORTAGE_STABLE}/${eclass}" >"${REPORTS_DIR}/updates/${eclass}/eclass.diff"
lines+=( '0:TODO: review the diff' )
elif [[ -e "${OLD_PORTAGE_STABLE}/${eclass}" ]]; then
lines+=( '0:unused, dropped' )
else
lines+=( '0:added from Gentoo' )
fi
generate_summary_stub "${REPORTS_DIR}/updates" "${eclass}" -- "${lines[@]}"
}
# Handle profile changes. Generates three different diffs - changes in
# relevant profiles (ancestors of the profiles used by board packages
# and SDK), a full diff between all the profiles, and a list of
# possibly irrelevant files that has changed too.
function handle_profiles() {
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
info "handling update of profiles"
local -a files=()
local which arch
for which in "${WHICH[@]}"; do
files+=("${WORKDIR}/pkg-reports/${which}/sdk-profiles")
for arch in "${ARCHES[@]}"; do
files+=("${WORKDIR}/pkg-reports/${which}/${arch}-board-profiles")
done
done
local -A profile_dirs_set
profile_dirs_set=()
local line
while read -r line; do
profile_dirs_set["${line}"]=x
done < <(xgrep --no-filename '^portage-stable:' "${files[@]}" | cut -d: -f2-)
local -a diff_opts
diff_opts=(
--recursive
--unified=3
--new-file # treat absent files as empty
)
local out_dir
out_dir="${REPORTS_DIR}/updates/profiles"
mkdir -p "${out_dir}"
xdiff "${diff_opts[@]}" \
"${OLD_PORTAGE_STABLE}/profiles" "${NEW_PORTAGE_STABLE}/profiles" >"${out_dir}/full.diff"
local relevant
relevant=''
local -a relevant_lines possibly_irrelevant_files
relevant_lines=()
possibly_irrelevant_files=()
local REPLY path dir mark
while read -r; do
if [[ ${REPLY} = "diff "* ]]; then
path=${REPLY##*"${NEW_PORTAGE_STABLE}/profiles/"}
dirname_out "${path}" dir
relevant=''
mark=${profile_dirs_set["${dir}"]:-}
if [[ -n "${mark}" ]]; then
relevant=x
else
case ${dir} in
.|desc|desc/*|updates|updates/*)
relevant=x
;;
esac
fi
if [[ -z ${relevant} ]]; then
possibly_irrelevant_files+=( "profiles/${path}" )
fi
fi
if [[ -n ${relevant} ]]; then
relevant_lines+=( "${REPLY}" )
fi
done <"${out_dir}/full.diff"
lines_to_file_truncate "${out_dir}/relevant.diff" "${relevant_lines[@]}"
lines_to_file_truncate "${out_dir}/possibly-irrelevant-files" "${possibly_irrelevant_files[@]}"
generate_summary_stub "${REPORTS_DIR}/updates" profiles -- '0:TODO: review the diffs'
}
# Handles changes in license directory. Generates brief reports and
# diffs about dropped, added or modified licenses.
function handle_licenses() {
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
info "handling update of licenses"
local -a dropped=() added=() changed=()
local line hl_stripped
# Lines are:
#
# Only in <PORTAGE_STABLE_X>/licenses: BSL-1.1
#
# or
#
# Files <PORTAGE_STABLE_1>/licenses/BSL-1.1 and <PORTAGE_STABLE_2>/licenses/BSL-1.1 differ
while read -r line; do
if [[ ${line} = 'Only in '* ]]; then
strip_out "${line##*:}" hl_stripped
if [[ ${line} = *"${OLD_STATE}"* ]]; then
dropped+=( "${hl_stripped}" )
elif [[ ${line} = *"${NEW_STATE}"* ]]; then
added+=( "${hl_stripped}" )
else
devel_warn "- unhandled license change: ${line}"
fi
elif [[ ${line} = 'Files '*' differ' ]]; then
line=${line##"Files ${OLD_PORTAGE_STABLE}/licenses/"}
line=${line%% *}
strip_out "${line}" hl_stripped
changed+=( "${hl_stripped}" )
else
devel_warn \
'- unhandled diff --brief line:' \
" - ${line}"
fi
done < <(xdiff --brief --recursive "${OLD_PORTAGE_STABLE}/licenses" "${NEW_PORTAGE_STABLE}/licenses")
local out_dir
out_dir="${REPORTS_DIR}/updates/licenses"
mkdir -p "${out_dir}"
lines_to_file_truncate \
"${out_dir}/brief-summary" \
'- removed:' \
"${dropped[@]/#/ - }" \
'- added:' \
"${added[@]/#/ - }" \
'- modified:' \
"${changed[@]/#/ - }"
truncate --size=0 "${out_dir}/modified.diff"
local c
for c in "${changed[@]}"; do
xdiff --unified=3 "${OLD_PORTAGE_STABLE}/licenses/${c}" "${NEW_PORTAGE_STABLE}/licenses/${c}" >>"${out_dir}/modified.diff"
done
local -a lines
lines=()
local joined
if [[ ${#dropped[@]} -gt 0 ]]; then
join_by joined ', ' "${dropped[@]}"
lines+=( "0:dropped ${joined}" )
fi
if [[ ${#added[@]} -gt 0 ]]; then
join_by joined ', ' "${added[@]}"
lines+=( "0:added ${joined}" )
fi
if [[ ${#changed[@]} -gt 0 ]]; then
join_by joined ', ' "${changed[@]}"
lines+=( "0:updated ${joined}" )
fi
generate_summary_stub "${REPORTS_DIR}/updates" licenses -- "${lines[@]}"
}
# Generates reports about changes inside the scripts directory.
function handle_scripts() {
# shellcheck source=for-shellcheck/globals
source "${WORKDIR}/globals"
info "handling update of scripts"
local out_dir
out_dir="${REPORTS_DIR}/updates/scripts"
mkdir -p "${out_dir}"
xdiff --unified=3 --recursive "${OLD_PORTAGE_STABLE}/scripts" "${NEW_PORTAGE_STABLE}/scripts" >"${out_dir}/scripts.diff"
generate_summary_stub "${REPORTS_DIR}/updates" scripts -- '0:TODO: review the diffs'
}
fi