mirror of
https://github.com/flatcar/scripts.git
synced 2025-09-23 06:31:18 +02:00
Merge pull request #696 from flatcar/t-lo/speed-up-ci-build
Github Actions CI: streamline build, export all artifacts required by testing, improve testing
This commit is contained in:
commit
a8101a6d6c
231
.github/workflows/ci.yaml
vendored
231
.github/workflows/ci.yaml
vendored
@ -1,13 +1,11 @@
|
|||||||
name: "Run build"
|
name: "Run build"
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
|
# Run when the PR is ready and each time a review is re-requested
|
||||||
|
# (i.e. after feedback has been addressed).
|
||||||
|
types: [review_requested, ready_for_review]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
bincache_server:
|
|
||||||
description: |
|
|
||||||
Bincache server.
|
|
||||||
default: "bincache.flatcar-linux.net"
|
|
||||||
required: true
|
|
||||||
image_formats:
|
image_formats:
|
||||||
description: |
|
description: |
|
||||||
Space-separated vendor formats to build.
|
Space-separated vendor formats to build.
|
||||||
@ -36,10 +34,13 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.ref_name }}
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref_name }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
permissions: {}
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
packages:
|
packages:
|
||||||
|
# Do not run when still in draft mode but a review was requested anyway
|
||||||
|
if: github.event.pull_request.draft == false
|
||||||
name: "Build Flatcar packages"
|
name: "Build Flatcar packages"
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- self-hosted
|
||||||
@ -76,10 +77,24 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
submodules: true
|
submodules: true
|
||||||
|
|
||||||
|
# Hack alert: actions/checkout will check out the (disjunct) merge commit of a PR
|
||||||
|
# instead of its head commit. That commit is not connected to any branch.
|
||||||
|
# This causes breakage downstream e.g. when the devcontainer test wants to check out
|
||||||
|
# the ref in the scripts repo that corresponds to this build.
|
||||||
|
- name: If this is a PR build, use head commit instead of the merge commit
|
||||||
|
if: ${{ github.event.pull_request.head.sha }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
exec 2>&1
|
||||||
|
set -x
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
git checkout ${{ github.event.pull_request.head.sha }}
|
||||||
|
git submodule update
|
||||||
|
|
||||||
- name: Set environment
|
- name: Set environment
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
BUILDCACHE_SERVER="bincache.flatcar-linux.net"
|
|
||||||
arch="${{ matrix.arch }}"
|
arch="${{ matrix.arch }}"
|
||||||
COREOS_REMOTE=""
|
COREOS_REMOTE=""
|
||||||
COREOS_REF=""
|
COREOS_REF=""
|
||||||
@ -87,14 +102,12 @@ jobs:
|
|||||||
PORTAGE_REF=""
|
PORTAGE_REF=""
|
||||||
IMAGE_FORMATS="qemu_uefi"
|
IMAGE_FORMATS="qemu_uefi"
|
||||||
|
|
||||||
[ -z "${{ github.event.inputs.bincache_server }}" ] || BUILDCACHE_SERVER="${{ github.event.inputs.bincache_server }}"
|
|
||||||
[ -z "${{ github.event.inputs.coreos_remote }}" ] || COREOS_REMOTE="${{ github.event.inputs.coreos_remote }}"
|
[ -z "${{ github.event.inputs.coreos_remote }}" ] || COREOS_REMOTE="${{ github.event.inputs.coreos_remote }}"
|
||||||
[ -z "${{ github.event.inputs.coreos_ref }}" ] || COREOS_REF="${{ github.event.inputs.coreos_ref }}"
|
[ -z "${{ github.event.inputs.coreos_ref }}" ] || COREOS_REF="${{ github.event.inputs.coreos_ref }}"
|
||||||
[ -z "${{ github.event.inputs.portage_remote }}" ] || PORTAGE_REMOTE="${{ github.event.inputs.portage_remote }}"
|
[ -z "${{ github.event.inputs.portage_remote }}" ] || PORTAGE_REMOTE="${{ github.event.inputs.portage_remote }}"
|
||||||
[ -z "${{ github.event.inputs.portage_ref }}" ] || PORTAGE_REF="${{ github.event.inputs.portage_ref }}"
|
[ -z "${{ github.event.inputs.portage_ref }}" ] || PORTAGE_REF="${{ github.event.inputs.portage_ref }}"
|
||||||
[ -z "${{ github.event.inputs.image_formats }}" ] || IMAGE_FORMATS="${{ github.event.inputs.image_formats }}"
|
[ -z "${{ github.event.inputs.image_formats }}" ] || IMAGE_FORMATS="${{ github.event.inputs.image_formats }}"
|
||||||
|
|
||||||
echo "BUILDCACHE_SERVER=${BUILDCACHE_SERVER}" >> $GITHUB_ENV
|
|
||||||
echo "arch=${arch}" >> $GITHUB_ENV
|
echo "arch=${arch}" >> $GITHUB_ENV
|
||||||
echo "COREOS_REMOTE=${COREOS_REMOTE}" >> $GITHUB_ENV
|
echo "COREOS_REMOTE=${COREOS_REMOTE}" >> $GITHUB_ENV
|
||||||
echo "COREOS_REF=${COREOS_REF}" >> $GITHUB_ENV
|
echo "COREOS_REF=${COREOS_REF}" >> $GITHUB_ENV
|
||||||
@ -102,6 +115,15 @@ jobs:
|
|||||||
echo "PORTAGE_REF=${PORTAGE_REF}" >> $GITHUB_ENV
|
echo "PORTAGE_REF=${PORTAGE_REF}" >> $GITHUB_ENV
|
||||||
echo "IMAGE_FORMATS=${IMAGE_FORMATS}" >> $GITHUB_ENV
|
echo "IMAGE_FORMATS=${IMAGE_FORMATS}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
# Artifact root for images and torcx tarball as seen from within the container
|
||||||
|
echo "CI_CONTAINER_ARTIFACT_ROOT=/home/sdk/trunk/src/scripts/artifacts" >> $GITHUB_ENV
|
||||||
|
echo "CI_CONTAINER_TORCX_ROOT=/home/sdk/trunk/src/scripts/artifacts/torcx" >> $GITHUB_ENV
|
||||||
|
mkdir -p artifacts/torcx
|
||||||
|
|
||||||
|
# Placeholder URL for run-kola-tests.yaml, "Extract artifacts" step which will replace
|
||||||
|
# this with its IP address.
|
||||||
|
echo "TORCX_TESTS_PACKAGE_URL=http://localhost:12345" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Checkout submodules
|
- name: Checkout submodules
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
@ -129,7 +151,7 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
exec 2>&1
|
exec 2>&1
|
||||||
set +x
|
set -x
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
source ci-automation/ci_automation_common.sh
|
source ci-automation/ci_automation_common.sh
|
||||||
@ -144,92 +166,76 @@ jobs:
|
|||||||
docker_image_from_registry_or_buildcache "${sdk_name}" "${docker_sdk_vernum}"
|
docker_image_from_registry_or_buildcache "${sdk_name}" "${docker_sdk_vernum}"
|
||||||
sdk_image="$(docker_image_fullname "${sdk_name}" "${docker_sdk_vernum}")"
|
sdk_image="$(docker_image_fullname "${sdk_name}" "${docker_sdk_vernum}")"
|
||||||
|
|
||||||
vernum="${version#*-}" # remove main-,alpha-,beta-,stable-,lts- version tag
|
container_name="flatcar-ci-build"
|
||||||
docker_vernum="$(vernum_to_docker_image_version "${vernum}")"
|
echo "container_name=${container_name}" >> "$GITHUB_ENV"
|
||||||
packages_container="flatcar-packages-${arch}-${docker_vernum}"
|
|
||||||
|
|
||||||
# Create version file
|
# Create version file
|
||||||
(
|
(
|
||||||
source sdk_lib/sdk_container_common.sh
|
source sdk_lib/sdk_container_common.sh
|
||||||
create_versionfile "$sdk_version" "$version"
|
create_versionfile "$sdk_version" "$version"
|
||||||
)
|
)
|
||||||
./run_sdk_container -n "${packages_container}" -v "${version}" \
|
|
||||||
|
# Run the packages build. This will create the ci build container
|
||||||
|
# which will be re-used by subsequent build steps.
|
||||||
|
./run_sdk_container -n "${container_name}" -v "${version}" \
|
||||||
-C "${sdk_image}" \
|
-C "${sdk_image}" \
|
||||||
./build_packages --board="${arch}-usr" \
|
./build_packages --board="${arch}-usr" \
|
||||||
--torcx_output_root="${CONTAINER_TORCX_ROOT}"
|
--torcx_output_root="${CI_CONTAINER_TORCX_ROOT}" \
|
||||||
|
--torcx_extra_pkg_url="${TORCX_TESTS_PACKAGE_URL}"
|
||||||
|
|
||||||
# copy torcx manifest and docker tarball for publishing
|
# Create binpkgs tarball for archiving as artifact later
|
||||||
torcx_tmp="__build__/torcx_tmp"
|
./run_sdk_container -n "${container_name}" \
|
||||||
rm -rf "${torcx_tmp}"
|
tar -C "/build/${arch}-usr/var/lib/portage/pkgs/" \
|
||||||
mkdir "${torcx_tmp}"
|
-cvf binpkgs.tar .
|
||||||
./run_sdk_container -n "${packages_container}" -v "${version}" \
|
|
||||||
-C "${sdk_image}" \
|
|
||||||
cp -r "${CONTAINER_TORCX_ROOT}/" \
|
|
||||||
"${torcx_tmp}"
|
|
||||||
|
|
||||||
source sdk_container/.repo/manifests/version.txt
|
- name: Extract build logs
|
||||||
vernum="${FLATCAR_VERSION}"
|
if: always()
|
||||||
docker_vernum="$(vernum_to_docker_image_version "${vernum}")"
|
shell: bash
|
||||||
packages_image="flatcar-packages-${arch}"
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
set -x
|
||||||
|
# Copy logs
|
||||||
|
./run_sdk_container -n "${container_name}" \
|
||||||
|
tar -cJf ebuild_logs.tar.xz /build/${arch}-usr/var/log/portage \
|
||||||
|
/build/${arch}-usr/var/tmp/portage
|
||||||
|
|
||||||
echo "vernum=${vernum}" >> $GITHUB_ENV
|
- name: Upload build logs
|
||||||
echo "docker_vernum=${docker_vernum}" >> $GITHUB_ENV
|
if: always()
|
||||||
echo "packages_image=${packages_image}" >> $GITHUB_ENV
|
uses: actions/upload-artifact@v3
|
||||||
echo "arch=${arch}" >> $GITHUB_ENV
|
with:
|
||||||
echo "sdk_image=${sdk_image}" >> $GITHUB_ENV
|
retention-days: 7
|
||||||
echo "packages_container=${packages_container}" >> $GITHUB_ENV
|
name: ${{ matrix.arch }}-build-logs
|
||||||
docker commit "${packages_container}" "${packages_image}:${docker_vernum}"
|
path: |
|
||||||
docker rm -f "${packages_container}"
|
scripts/ebuild_logs.tar.xz
|
||||||
|
|
||||||
- name: Build image
|
- name: Build image
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
set +x
|
set -x
|
||||||
|
|
||||||
echo 'channel="developer"' >> $GITHUB_ENV
|
echo 'channel="developer"' >> $GITHUB_ENV
|
||||||
channel="developer"
|
channel="developer"
|
||||||
|
|
||||||
source ci-automation/ci_automation_common.sh
|
source ci-automation/ci_automation_common.sh
|
||||||
|
|
||||||
packages="flatcar-packages-${arch}"
|
|
||||||
packages_image="${packages}:${docker_vernum}"
|
|
||||||
image="flatcar-images-${arch}"
|
|
||||||
image_container="${image}-${docker_vernum}"
|
|
||||||
official_arg="--noofficial"
|
official_arg="--noofficial"
|
||||||
|
|
||||||
echo "image=flatcar-images-${arch}" >> $GITHUB_ENV
|
./run_sdk_container -n "${container_name}" \
|
||||||
echo "image_image=${image}:${docker_vernum}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
./run_sdk_container -x ./ci-cleanup.sh -n "${image_container}" -C "${packages_image}" \
|
|
||||||
-v "${vernum}" \
|
|
||||||
mkdir -p "${CONTAINER_IMAGE_ROOT}"
|
|
||||||
./run_sdk_container -n "${image_container}" -C "${packages_image}" \
|
|
||||||
-v "${vernum}" \
|
|
||||||
./set_official --board="${arch}-usr" "${official_arg}"
|
./set_official --board="${arch}-usr" "${official_arg}"
|
||||||
./run_sdk_container -n "${image_container}" -C "${packages_image}" \
|
./run_sdk_container -n "${container_name}" \
|
||||||
-v "${vernum}" \
|
|
||||||
./build_image --board="${arch}-usr" --group="${channel}" \
|
./build_image --board="${arch}-usr" --group="${channel}" \
|
||||||
--output_root="${CONTAINER_IMAGE_ROOT}" \
|
--output_root="${CI_CONTAINER_ARTIFACT_ROOT}" \
|
||||||
--torcx_root="${CONTAINER_TORCX_ROOT}" prodtar container
|
--torcx_root="${CI_CONTAINER_TORCX_ROOT}" prodtar container
|
||||||
|
|
||||||
# Copy logs
|
- name: Build VM image(s)
|
||||||
./run_sdk_container -n "${image_container}" -C "${packages_image}" -v "${vernum}" \
|
|
||||||
tar -cJf ebuild_logs.tar.xz /build/${arch}-usr/var/log/portage \
|
|
||||||
/build/${arch}-usr/var/tmp/portage
|
|
||||||
|
|
||||||
docker commit "${image_container}" "${image}:${docker_vernum}"
|
|
||||||
docker rm -f "${image_container}"
|
|
||||||
|
|
||||||
- name: Build VM image
|
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
set +x
|
set -x
|
||||||
|
|
||||||
source ci-automation/ci_automation_common.sh
|
source ci-automation/ci_automation_common.sh
|
||||||
|
|
||||||
vms_container="flatcar-vms-${docker_vernum}"
|
|
||||||
images_out="images"
|
images_out="images"
|
||||||
|
|
||||||
has_packet=0
|
has_packet=0
|
||||||
@ -254,35 +260,96 @@ jobs:
|
|||||||
|
|
||||||
for format in ${formats}; do
|
for format in ${formats}; do
|
||||||
echo " ################### VENDOR '${format}' ################### "
|
echo " ################### VENDOR '${format}' ################### "
|
||||||
./run_sdk_container -n "${vms_container}" -C "${image_image}" \
|
./run_sdk_container -n "${container_name}" \
|
||||||
-v "${vernum}" \
|
|
||||||
./image_to_vm.sh --format "${format}" --board="${arch}-usr" \
|
./image_to_vm.sh --format "${format}" --board="${arch}-usr" \
|
||||||
--from "${CONTAINER_IMAGE_ROOT}/${arch}-usr/latest" \
|
--from "${CI_CONTAINER_ARTIFACT_ROOT}/${arch}-usr/latest" \
|
||||||
--image_compression_formats=bz2
|
--image_compression_formats=bz2
|
||||||
done
|
done
|
||||||
|
|
||||||
# copy resulting images
|
# upload-artifacts cannot handle artifact uploads from sym-linked directories (no, really)
|
||||||
./run_sdk_container -n "${vms_container}" \
|
# so we move things around.
|
||||||
-v "${vernum}" \
|
mkdir -p artifacts/images
|
||||||
mv "${CONTAINER_IMAGE_ROOT}/${arch}-usr" "./${images_out}"
|
(
|
||||||
|
cd artifacts/${arch}-usr/latest/
|
||||||
|
mv * ../../images/
|
||||||
|
)
|
||||||
|
|
||||||
# remove symlinks before upload
|
# create a tarball for torcx package + JSON file because upload-artifacts cannot handle filenames containing colons
|
||||||
find "./${images_out}" -type l -delete
|
# (such as "docker:20.10.torcx.tgz")
|
||||||
|
mv artifacts/torcx/${arch}-usr/latest/torcx_manifest.json artifacts/torcx/pkgs/
|
||||||
|
tar -C artifacts/torcx/pkgs/ -cvf torcx.tar .
|
||||||
|
|
||||||
docker rm -f "${vms_container}"
|
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload binpkgs
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: images-${{ matrix.arch }}
|
retention-days: 7
|
||||||
|
name: ${{ matrix.arch }}-binpkgs
|
||||||
path: |
|
path: |
|
||||||
scripts/images/**/*.img.bz2
|
scripts/binpkgs.tar
|
||||||
scripts/images/**/*.bin.bz2
|
|
||||||
scripts/images/**/flatcar_production_*_efi_*.fd
|
- name: Upload update image (used with kola tests later)
|
||||||
scripts/images/**/*.txt
|
uses: actions/upload-artifact@v3
|
||||||
scripts/images/**/flatcar_production_*.sh
|
with:
|
||||||
scripts/images/**/flatcar_test_update.gz
|
retention-days: 7
|
||||||
scripts/ebuild_logs.tar.xz
|
name: ${{ matrix.arch }}-test-update
|
||||||
|
path: |
|
||||||
|
scripts/artifacts/images/flatcar_test_update.gz
|
||||||
|
|
||||||
|
- name: Upload generic image
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
retention-days: 7
|
||||||
|
name: ${{ matrix.arch }}-generic-image
|
||||||
|
path: |
|
||||||
|
scripts/artifacts/images/flatcar_production_image.bin.bz2
|
||||||
|
scripts/artifacts/images/flatcar_production_image.grub
|
||||||
|
scripts/artifacts/images/flatcar_production_image.shim
|
||||||
|
scripts/artifacts/images/flatcar_production_image.vmlinuz
|
||||||
|
scripts/artifacts/images/flatcar_production_image*.txt
|
||||||
|
scripts/artifacts/images/flatcar_production_image*.json
|
||||||
|
scripts/artifacts/images/flatcar_production_image_pcr_policy.zip
|
||||||
|
scripts/artifacts/images/flatcar_production_*_efi_*.fd
|
||||||
|
|
||||||
|
- name: Upload developer container
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
retention-days: 7
|
||||||
|
name: ${{ matrix.arch }}-devcontainer
|
||||||
|
path: |
|
||||||
|
scripts/artifacts/images/flatcar_developer_container*
|
||||||
|
|
||||||
|
- name: Upload torcx tarball
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
retention-days: 7
|
||||||
|
name: ${{ matrix.arch }}-torcx
|
||||||
|
path: |
|
||||||
|
scripts/torcx.tar
|
||||||
|
|
||||||
|
# Clean up what we uploaded already so the "vendor images" wildcard
|
||||||
|
# works when uploading artifacts in the next step.
|
||||||
|
- name: Remove update, generic and devcontainer images
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
set -x
|
||||||
|
rm -f artifacts/images/flatcar_test_update.gz \
|
||||||
|
artifacts/images/flatcar_production_image* \
|
||||||
|
artifacts/images/flatcar_developer_container* \
|
||||||
|
artifacts/images/flatcar_production_update*
|
||||||
|
|
||||||
|
- name: Upload vendor images
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
retention-days: 7
|
||||||
|
name: ${{ matrix.arch }}-vm-images
|
||||||
|
path: |
|
||||||
|
scripts/artifacts/images/*.img.bz2
|
||||||
|
scripts/artifacts/images/*.bin.bz2
|
||||||
|
scripts/artifacts/images/flatcar_production_*_efi_*.fd
|
||||||
|
scripts/artifacts/images/*.txt
|
||||||
|
scripts/artifacts/images/flatcar_production_*.sh
|
||||||
|
|
||||||
test:
|
test:
|
||||||
needs: packages
|
needs: packages
|
||||||
|
10
.github/workflows/dispatch-kola-tests.yaml
vendored
10
.github/workflows/dispatch-kola-tests.yaml
vendored
@ -8,14 +8,18 @@ on:
|
|||||||
required: true
|
required: true
|
||||||
default: ci.yaml
|
default: ci.yaml
|
||||||
description: |
|
description: |
|
||||||
The workflow ID from where we'll download the artifacts to be tested.
|
The workflow name or ID from where we'll download the artifacts to be tested.
|
||||||
|
E.g. the name of the YAML file (w/o path) of the respective workflow.
|
||||||
workflow_run_id:
|
workflow_run_id:
|
||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
description: |
|
description: |
|
||||||
The run ID of the workflow specified in workflow_name_or_id
|
The run ID of the workflow specified in workflow_name_or_id.
|
||||||
|
You can e.g. get this from a run's URL -
|
||||||
|
https://github.com/flatcar/scripts/actions/runs/<ID> .
|
||||||
|
|
||||||
permissions: {}
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
|
274
.github/workflows/run-kola-tests.yaml
vendored
274
.github/workflows/run-kola-tests.yaml
vendored
@ -34,7 +34,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
sudo rm /bin/sh
|
sudo rm /bin/sh
|
||||||
sudo ln -s /bin/bash /bin/sh
|
sudo ln -s /bin/bash /bin/sh
|
||||||
sudo apt-get install -y ca-certificates curl gnupg lsb-release qemu-system git bzip2 jq dnsmasq
|
sudo apt-get install -y ca-certificates curl gnupg lsb-release qemu-system git bzip2 jq dnsmasq python3
|
||||||
sudo systemctl stop dnsmasq
|
sudo systemctl stop dnsmasq
|
||||||
sudo systemctl mask dnsmasq
|
sudo systemctl mask dnsmasq
|
||||||
|
|
||||||
@ -60,45 +60,171 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
submodules: true
|
submodules: true
|
||||||
|
|
||||||
- name: Download artifact
|
# Hack alert: actions/checkout will check out the (disjunct) merge commit of a PR
|
||||||
|
# instead of its head commit. That commit is not connected to any branch.
|
||||||
|
# This is not technically necessary for the tests run but it is done to remain aligned
|
||||||
|
# with the ref.
|
||||||
|
- name: If this is a PR build, use head commit instead of the merge commit
|
||||||
|
if: ${{ github.event.pull_request.head.sha }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
exec 2>&1
|
||||||
|
set -x
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
cd scripts
|
||||||
|
git checkout ${{ github.event.pull_request.head.sha }}
|
||||||
|
git submodule update
|
||||||
|
|
||||||
|
- name: Download binpkgs
|
||||||
if: ${{ !inputs.workflow_run_id }}
|
if: ${{ !inputs.workflow_run_id }}
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: images-${{ matrix.arch }}
|
name: ${{ matrix.arch }}-binpkgs
|
||||||
|
|
||||||
- name: Download artifacts from other workflow
|
- name: Download test update image
|
||||||
|
if: ${{ !inputs.workflow_run_id }}
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.arch }}-test-update
|
||||||
|
|
||||||
|
- name: Download generic image
|
||||||
|
if: ${{ !inputs.workflow_run_id }}
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.arch }}-generic-image
|
||||||
|
|
||||||
|
- name: Download developer container
|
||||||
|
if: ${{ !inputs.workflow_run_id }}
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.arch }}-devcontainer
|
||||||
|
|
||||||
|
- name: Download torcx tarball
|
||||||
|
if: ${{ !inputs.workflow_run_id }}
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.arch }}-torcx
|
||||||
|
|
||||||
|
- name: Download binpkgs from other workflow
|
||||||
uses: gabriel-samfira/action-download-artifact@v5
|
uses: gabriel-samfira/action-download-artifact@v5
|
||||||
if: ${{ inputs.workflow_run_id }}
|
if: ${{ inputs.workflow_run_id }}
|
||||||
with:
|
with:
|
||||||
workflow: ${{ inputs.workflow_name_or_id }}
|
workflow: ${{ inputs.workflow_name_or_id }}
|
||||||
workflow_conclusion: success
|
workflow_conclusion: success
|
||||||
run_id: ${{ inputs.workflow_run_id }}
|
run_id: ${{ inputs.workflow_run_id }}
|
||||||
name: images-${{ matrix.arch }}
|
name: ${{ matrix.arch }}-binpkgs
|
||||||
|
|
||||||
|
- name: Download test update image from other workflow
|
||||||
|
uses: gabriel-samfira/action-download-artifact@v5
|
||||||
|
if: ${{ inputs.workflow_run_id }}
|
||||||
|
with:
|
||||||
|
workflow: ${{ inputs.workflow_name_or_id }}
|
||||||
|
workflow_conclusion: success
|
||||||
|
run_id: ${{ inputs.workflow_run_id }}
|
||||||
|
name: ${{ matrix.arch }}-test-update
|
||||||
|
|
||||||
|
- name: Download generic image from other workflow
|
||||||
|
uses: gabriel-samfira/action-download-artifact@v5
|
||||||
|
if: ${{ inputs.workflow_run_id }}
|
||||||
|
with:
|
||||||
|
workflow: ${{ inputs.workflow_name_or_id }}
|
||||||
|
workflow_conclusion: success
|
||||||
|
run_id: ${{ inputs.workflow_run_id }}
|
||||||
|
name: ${{ matrix.arch }}-generic-image
|
||||||
|
|
||||||
|
- name: Download developer container from other workflow
|
||||||
|
uses: gabriel-samfira/action-download-artifact@v5
|
||||||
|
if: ${{ inputs.workflow_run_id }}
|
||||||
|
with:
|
||||||
|
workflow: ${{ inputs.workflow_name_or_id }}
|
||||||
|
workflow_conclusion: success
|
||||||
|
run_id: ${{ inputs.workflow_run_id }}
|
||||||
|
name: ${{ matrix.arch }}-devcontainer
|
||||||
|
|
||||||
|
- name: Download torcx tarball from other workflow
|
||||||
|
uses: gabriel-samfira/action-download-artifact@v5
|
||||||
|
if: ${{ inputs.workflow_run_id }}
|
||||||
|
with:
|
||||||
|
workflow: ${{ inputs.workflow_name_or_id }}
|
||||||
|
workflow_conclusion: success
|
||||||
|
run_id: ${{ inputs.workflow_run_id }}
|
||||||
|
name: ${{ matrix.arch }}-torcx
|
||||||
|
|
||||||
|
- name: Extract artifacts
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
exec 2>&1
|
||||||
|
set -x
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Set up a webserver for devcontainer and torcx tests.
|
||||||
|
# The respective tests will download devcontainer and torcx tarball via http.
|
||||||
|
# The devcontainer test will then run a build
|
||||||
|
# which will download and install binpkgs into the dev container.
|
||||||
|
# For the sake of that test we will serve both via a temporary local web server.
|
||||||
|
TESTS_WEBSERVER_WEBROOT="scripts/devcontainer-webroot"
|
||||||
|
default_rout_device="$(sudo ip -j route sh default |jq -r .[0].dev)"
|
||||||
|
TESTS_WEBSERVER_IP="$(sudo ip -j address show dev "${default_rout_device}" | jq -r .[0].addr_info[0].local)"
|
||||||
|
TESTS_WEBSERVER_PORT=12345
|
||||||
|
echo "TESTS_WEBSERVER_WEBROOT=${TESTS_WEBSERVER_WEBROOT}" >> "$GITHUB_ENV"
|
||||||
|
echo "TESTS_WEBSERVER_IP=${TESTS_WEBSERVER_IP}" >> "$GITHUB_ENV"
|
||||||
|
echo "TESTS_WEBSERVER_PORT=${TESTS_WEBSERVER_PORT}" >> "$GITHUB_ENV"
|
||||||
|
|
||||||
|
mkdir ${TESTS_WEBSERVER_WEBROOT}
|
||||||
|
mv flatcar_developer_container* ${TESTS_WEBSERVER_WEBROOT}
|
||||||
|
tar -C ${TESTS_WEBSERVER_WEBROOT} -xvf binpkgs.tar
|
||||||
|
|
||||||
|
tar -C ${TESTS_WEBSERVER_WEBROOT} -xvf torcx.tar
|
||||||
|
|
||||||
|
# Move torcx package into plain webroot
|
||||||
|
# (path consists of <arch>/<packagename>/<checksum>/<packagename>:<version>.torcx.tar.gz)
|
||||||
|
mv "${TESTS_WEBSERVER_WEBROOT}/${{ matrix.arch }}-usr"/*/*/*.torcx.tgz \
|
||||||
|
"${TESTS_WEBSERVER_WEBROOT}"
|
||||||
|
|
||||||
|
# Update torcx.json's http URL to point to the webserver IP.
|
||||||
|
# ci.yaml defines the "localhost" placeholder in its "Set Environment" step.
|
||||||
|
sed -i "s,http://localhost:12345,http://${TESTS_WEBSERVER_IP}:${TESTS_WEBSERVER_PORT}," \
|
||||||
|
"${TESTS_WEBSERVER_WEBROOT}/torcx_manifest.json"
|
||||||
|
cat "${TESTS_WEBSERVER_WEBROOT}/torcx_manifest.json"
|
||||||
|
|
||||||
|
# Extract the generic image we'll use for qemu tests.
|
||||||
|
# Note that the qemu[_uefi] tests use the generic image instead of the
|
||||||
|
# qemu vendor VM image ("Astronaut: [...] Always have been.").
|
||||||
|
bzip2 --decompress flatcar_production_image.bin.bz2
|
||||||
|
mv flatcar_production_image.bin flatcar_production_qemu_uefi_efi_code.fd scripts/
|
||||||
|
|
||||||
|
mv flatcar_test_update.gz scripts/
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
exec 2>&1
|
exec 2>&1
|
||||||
set +x
|
set -x
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
# extract the image.
|
python3 -m http.server -d "${TESTS_WEBSERVER_WEBROOT}" -b "${TESTS_WEBSERVER_IP}" "${TESTS_WEBSERVER_PORT}" &
|
||||||
IMG_ARCHIVE=$(readlink -f images/**/flatcar_production_image.bin.bz2)
|
|
||||||
QEMU_UEFI_BIOS_FILE=$(readlink -f images/**/flatcar_production_qemu_uefi_efi_code.fd)
|
|
||||||
bzip2 --decompress ${IMG_ARCHIVE}
|
|
||||||
|
|
||||||
cp ${IMG_ARCHIVE%%.bz2} ./scripts/
|
|
||||||
cp ${QEMU_UEFI_BIOS_FILE} ./scripts/
|
|
||||||
|
|
||||||
pushd scripts
|
pushd scripts
|
||||||
source ci-automation/test.sh
|
source ci-automation/test.sh
|
||||||
|
|
||||||
|
# Provide our own torcx prepare function so we use our local manifest json.
|
||||||
|
# This is called by test_run below.
|
||||||
|
function __prepare_torcx() {
|
||||||
|
shift; shift # no need for arch or vernum
|
||||||
|
local destdir="$1"
|
||||||
|
cp "../${TESTS_WEBSERVER_WEBROOT}/torcx_manifest.json" "${destdir}"
|
||||||
|
}
|
||||||
|
|
||||||
PARALLEL_ARCH=10
|
PARALLEL_ARCH=10
|
||||||
|
|
||||||
cat > sdk_container/.env <<EOF
|
cat > sdk_container/.env <<EOF
|
||||||
# export the QEMU_IMAGE_NAME to avoid to download it.
|
# export the QEMU_IMAGE_NAME to avoid to download it.
|
||||||
export QEMU_IMAGE_NAME="/work/flatcar_production_image.bin"
|
export QEMU_IMAGE_NAME="/work/flatcar_production_image.bin"
|
||||||
export QEMU_UEFI_BIOS="/work/flatcar_production_qemu_uefi_efi_code.fd"
|
export QEMU_UEFI_BIOS="/work/flatcar_production_qemu_uefi_efi_code.fd"
|
||||||
|
export QEMU_UPDATE_PAYLOAD="/work/flatcar_test_update.gz"
|
||||||
|
export QEMU_DEVCONTAINER_URL="http://${TESTS_WEBSERVER_IP}:${TESTS_WEBSERVER_PORT}"
|
||||||
|
export QEMU_DEVCONTAINER_BINHOST_URL="http://${TESTS_WEBSERVER_IP}:${TESTS_WEBSERVER_PORT}"
|
||||||
export PARALLEL_TESTS=${PARALLEL_ARCH}
|
export PARALLEL_TESTS=${PARALLEL_ARCH}
|
||||||
# The runner uses lxc containers for kola, and can't use loopback devices to
|
# The runner uses lxc containers for kola, and can't use loopback devices to
|
||||||
# prepare the serial console setting - this means that kola may miss some errors
|
# prepare the serial console setting - this means that kola may miss some errors
|
||||||
@ -108,14 +234,126 @@ jobs:
|
|||||||
export MAX_RETRIES=5
|
export MAX_RETRIES=5
|
||||||
export SKIP_COPY_TO_BINCACHE=1
|
export SKIP_COPY_TO_BINCACHE=1
|
||||||
|
|
||||||
# run the test.
|
# run the tests.
|
||||||
test_run ${{ matrix.arch }} qemu_uefi
|
test_run ${{ matrix.arch }} qemu_uefi
|
||||||
|
test_run ${{ matrix.arch }} qemu_update
|
||||||
|
|
||||||
- name: Upload artifacts
|
# Stop the background webserver
|
||||||
|
set +e
|
||||||
|
kill %1
|
||||||
|
set -e
|
||||||
|
|
||||||
|
- name: Upload detailed test logs
|
||||||
if: always()
|
if: always()
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: test-results-${{ matrix.arch }}
|
name: ${{ matrix.arch }}-test-logs-and-results
|
||||||
path: |
|
path: |
|
||||||
scripts/__TESTS__
|
scripts/__TESTS__/*/_kola_temp/
|
||||||
scripts/results-.*.tap
|
scripts/__TESTS__/*/*.tap
|
||||||
|
scripts/__TESTS__/*/*.txt
|
||||||
|
scripts/results-*.tap
|
||||||
|
scripts/results-*.md
|
||||||
|
|
||||||
|
- name: Upload raw TAP files of all runs for later merging
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.arch }}-raw-tapfiles
|
||||||
|
path: |
|
||||||
|
scripts/__TESTS__/*/*.tap
|
||||||
|
|
||||||
|
|
||||||
|
merge_and_publish_results:
|
||||||
|
name: "Merge TAP reports and post results"
|
||||||
|
needs: tests
|
||||||
|
if: always()
|
||||||
|
runs-on:
|
||||||
|
- self-hosted
|
||||||
|
- debian
|
||||||
|
- kola
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Prepare machine
|
||||||
|
shell: bash
|
||||||
|
working-directory: ${{ github.workspace }}
|
||||||
|
run: |
|
||||||
|
sudo rm /bin/sh
|
||||||
|
sudo ln -s /bin/bash /bin/sh
|
||||||
|
sudo apt-get install -y ca-certificates curl gnupg lsb-release git bzip2 jq sqlite3
|
||||||
|
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
path: scripts
|
||||||
|
fetch-depth: 0
|
||||||
|
submodules: true
|
||||||
|
|
||||||
|
# Hack alert: actions/checkout will check out the (disjunct) merge commit of a PR
|
||||||
|
# instead of its head commit. That commit is not connected to any branch.
|
||||||
|
# This is not technically necessary for the tests run but it is done to remain aligned
|
||||||
|
# with the ref.
|
||||||
|
- name: If this is a PR build, use head commit instead of the merge commit
|
||||||
|
if: ${{ github.event.pull_request.head.sha }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
exec 2>&1
|
||||||
|
set -x
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
cd scripts
|
||||||
|
git checkout ${{ github.event.pull_request.head.sha }}
|
||||||
|
git submodule update
|
||||||
|
|
||||||
|
# This is clunky. Haven't figured out how to re-use matrix.arch here for downloads,
|
||||||
|
# so we download each arch individually.
|
||||||
|
- name: Download amd64 tapfiles
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: amd64-raw-tapfiles
|
||||||
|
path: scripts/__TAP__/amd64
|
||||||
|
|
||||||
|
- name: Download arm64 tapfiles
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: arm64-raw-tapfiles
|
||||||
|
path: scripts/__TAP__/arm64
|
||||||
|
|
||||||
|
- name: Create Test Summary
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
exec 2>&1
|
||||||
|
set -x
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
cd scripts
|
||||||
|
|
||||||
|
ls -laR __TAP__
|
||||||
|
|
||||||
|
source ci-automation/tapfile_helper_lib.sh
|
||||||
|
|
||||||
|
all_archs=""
|
||||||
|
for arch in __TAP__/*; do
|
||||||
|
arch_name="$(basename "${arch}")"
|
||||||
|
all_archs="${all_archs} ${arch_name}"
|
||||||
|
for vendor in "${arch}"/*; do
|
||||||
|
vendor_name="$(basename "${vendor}")"
|
||||||
|
run=1
|
||||||
|
for tap in "${vendor}"/*.tap; do
|
||||||
|
tap_ingest_tapfile "${tap}" "${vendor_name}-${arch_name}" "${run}"
|
||||||
|
((run++))
|
||||||
|
done
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
|
source sdk_container/.repo/manifests/version.txt
|
||||||
|
tap_generate_report "${all_archs}" "${FLATCAR_VERSION}" "md" "true" > test-results.md
|
||||||
|
|
||||||
|
cat test-results.md >> "$GITHUB_STEP_SUMMARY"
|
||||||
|
|
||||||
|
- name: If started from a PR, post test summary to PR
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
uses: mshick/add-pr-comment@v2
|
||||||
|
with:
|
||||||
|
message-path: "test-results.md"
|
||||||
|
@ -47,6 +47,11 @@ CONTAINER_IMAGE_ROOT="/home/sdk/build/images"
|
|||||||
# echo "export PARALLEL_TESTS=\"5\"" > sdk_container/.env
|
# echo "export PARALLEL_TESTS=\"5\"" > sdk_container/.env
|
||||||
# to override the number of test cases to be run in parallel.
|
# to override the number of test cases to be run in parallel.
|
||||||
|
|
||||||
|
# -- General --
|
||||||
|
|
||||||
|
# "tap" for TAP reports, "md" for markdown are currently supported
|
||||||
|
TEST_REPORT_FORMATS=("tap" "md")
|
||||||
|
|
||||||
# -- QEMU --
|
# -- QEMU --
|
||||||
|
|
||||||
QEMU_IMAGE_NAME=${QEMU_IMAGE_NAME:-flatcar_production_image.bin}
|
QEMU_IMAGE_NAME=${QEMU_IMAGE_NAME:-flatcar_production_image.bin}
|
||||||
@ -62,6 +67,17 @@ QEMU_BIOS="/usr/share/qemu/bios-256k.bin"
|
|||||||
# Published by vms.sh as part of the qemu vendor build.
|
# Published by vms.sh as part of the qemu vendor build.
|
||||||
QEMU_UEFI_BIOS="${QEMU_UEFI_BIOS:-flatcar_production_qemu_uefi_efi_code.fd}"
|
QEMU_UEFI_BIOS="${QEMU_UEFI_BIOS:-flatcar_production_qemu_uefi_efi_code.fd}"
|
||||||
|
|
||||||
|
# Update payload for the qemu_update.sh test.
|
||||||
|
# The default path set below is relative to TEST_WORK_DIR
|
||||||
|
QEMU_UPDATE_PAYLOAD="tmp/flatcar_test_update.gz"
|
||||||
|
|
||||||
|
# Devcontainer settings for isolated / local testing w/o a remote
|
||||||
|
# devcontainer server and/or binhost.
|
||||||
|
# Can optionally be set to http / https URLs to pull the dev container
|
||||||
|
# and the binpackages from.
|
||||||
|
QEMU_DEVCONTAINER_URL="${QEMU_DEVCONTAINER_URL:-}"
|
||||||
|
QEMU_DEVCONTAINER_BINHOST_URL="${QEMU_DEVCONTAINER_BINHOST_URL:-}"
|
||||||
|
|
||||||
|
|
||||||
# -- Equinix Metal --
|
# -- Equinix Metal --
|
||||||
EQUINIXMETAL_PARALLEL="${PARALLEL_TESTS:-4}"
|
EQUINIXMETAL_PARALLEL="${PARALLEL_TESTS:-4}"
|
||||||
|
@ -197,27 +197,167 @@ function tap_failed_tests_for_vendor() {
|
|||||||
}
|
}
|
||||||
# --
|
# --
|
||||||
|
|
||||||
|
# TAP output format primitives for tap_generate_report()
|
||||||
|
|
||||||
|
__tap_print_header() {
|
||||||
|
local arch="$1"
|
||||||
|
local version="$2"
|
||||||
|
local vendors="$3"
|
||||||
|
local count="$4"
|
||||||
|
|
||||||
|
# We use count + 1 here because the very first "test result" will just print
|
||||||
|
# the list of platforms tested, not an actual test's result.
|
||||||
|
echo "1..$((count+1))"
|
||||||
|
echo "ok - Version: ${version}, Architecture: ${arch}"
|
||||||
|
echo " ---"
|
||||||
|
echo " Platforms tested: ${vendors}"
|
||||||
|
echo " ..."
|
||||||
|
}
|
||||||
|
# --
|
||||||
|
|
||||||
|
__tap_print_test_verdict() {
|
||||||
|
local verdict="$1"
|
||||||
|
local name="$2"
|
||||||
|
local succeded_vendors="$3"
|
||||||
|
local failed_vendors="$4"
|
||||||
|
|
||||||
|
echo "${verdict} - ${test_name}"
|
||||||
|
echo " ---"
|
||||||
|
|
||||||
|
if [ -n "${succeded_vendors}" ] ; then
|
||||||
|
echo " Succeeded: ${succeded_vendors}"
|
||||||
|
fi
|
||||||
|
if [ -n "${failed_vendors}" ] ; then
|
||||||
|
echo " Failed: ${failed_vendors}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
# --
|
||||||
|
|
||||||
|
__tap_print_test_run_diag_output() {
|
||||||
|
local vendor="$1"
|
||||||
|
local run="$2"
|
||||||
|
echo " Error messages for ${vendor}, run ${run}:"
|
||||||
|
cat -
|
||||||
|
}
|
||||||
|
# --
|
||||||
|
|
||||||
|
__tap_finish_test_verdict() {
|
||||||
|
local verdict="$1"
|
||||||
|
local name="$2"
|
||||||
|
local succeded_vendors="$3"
|
||||||
|
local failed_vendors="$4"
|
||||||
|
echo " ..."
|
||||||
|
}
|
||||||
|
# --
|
||||||
|
|
||||||
|
__tap_finish_test_report() {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
# --
|
||||||
|
|
||||||
|
# markdown output format primitives for tap_generate_report()
|
||||||
|
|
||||||
|
__md_print_header() {
|
||||||
|
local arch="$1"
|
||||||
|
local version="$2"
|
||||||
|
local vendors="$3"
|
||||||
|
local count="$4"
|
||||||
|
|
||||||
|
echo "### Test report for ${version} / ${arch}"
|
||||||
|
echo
|
||||||
|
echo "**Platforms tested** : ${vendors}"
|
||||||
|
}
|
||||||
|
# --
|
||||||
|
|
||||||
|
__md_print_test_verdict() {
|
||||||
|
local verdict="$1"
|
||||||
|
local name="$2"
|
||||||
|
local succeded_vendors="$3"
|
||||||
|
local failed_vendors="$4"
|
||||||
|
|
||||||
|
v=""
|
||||||
|
if [ "${verdict}" = "not ok" ] ; then
|
||||||
|
v=""
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo -n "${v} **${name}**"
|
||||||
|
if [ -n "${succeded_vendors}" ] ; then
|
||||||
|
echo -n " 🟢 Succeeded: ${succeded_vendors}"
|
||||||
|
fi
|
||||||
|
if [ -n "${failed_vendors}" ] ; then
|
||||||
|
echo -n " ❌ Failed: ${failed_vendors}"
|
||||||
|
fi
|
||||||
|
echo
|
||||||
|
if [ "${verdict}" = "not ok" ] ; then
|
||||||
|
echo
|
||||||
|
echo "<details>"
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
# --
|
||||||
|
|
||||||
|
__md_print_test_run_diag_output() {
|
||||||
|
local vendor="$1"
|
||||||
|
local run="$2"
|
||||||
|
|
||||||
|
echo "* Diagnostic output for ${vendor}, run ${run}"
|
||||||
|
echo
|
||||||
|
echo " \`\`\`"
|
||||||
|
cat -
|
||||||
|
echo " \`\`\`"
|
||||||
|
echo
|
||||||
|
|
||||||
|
}
|
||||||
|
# --
|
||||||
|
#
|
||||||
|
__md_finish_test_verdict() {
|
||||||
|
local verdict="$1"
|
||||||
|
local name="$2"
|
||||||
|
local succeded_vendors="$3"
|
||||||
|
local failed_vendors="$4"
|
||||||
|
if [ "${verdict}" = "not ok" ] ; then
|
||||||
|
echo
|
||||||
|
echo "</details>"
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
# --
|
||||||
|
|
||||||
|
__md_finish_test_report() {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
# --
|
||||||
|
|
||||||
|
|
||||||
# Print the tap file from contents of the database.
|
# Print the tap file from contents of the database.
|
||||||
# INPUT:
|
# INPUT:
|
||||||
# 1: <arch> - Architecture to be included in the first line of the report
|
# 1: <arch> - Architecture to be included in the first line of the report
|
||||||
# 2: <version> - OS version tested, to be included in the first line of the report
|
# 2: <version> - OS version tested, to be included in the first line of the report
|
||||||
# 3: <include_transient_errors> - If set to "true" then debug output of transient test failures
|
# 3: <format> - Output format of the report. "tap" and "markdown" are supported.
|
||||||
|
# 4: <include_transient_errors> - If set to "true" then debug output of transient test failures
|
||||||
# is included in the result report.
|
# is included in the result report.
|
||||||
function tap_generate_report() {
|
function tap_generate_report() {
|
||||||
local arch="$1"
|
local arch="$1"
|
||||||
local version="$2"
|
local version="$2"
|
||||||
local full_error_report="${3:-false}"
|
local format="$3"
|
||||||
|
local full_error_report="${4:-false}"
|
||||||
|
|
||||||
|
case "${format}" in
|
||||||
|
tap) ;;
|
||||||
|
md) ;;
|
||||||
|
*) echo "ERROR: tap_generate_report() unknown format '${format}'" >&2
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
|
||||||
local count
|
local count
|
||||||
count="$(__sqlite3_wrapper 'SELECT count(name) FROM test_case;')"
|
count="$(__sqlite3_wrapper 'SELECT count(name) FROM test_case;')"
|
||||||
local vendors
|
local vendors
|
||||||
vendors="$(__sqlite3_wrapper 'SELECT name FROM vendor;' | tr '\n' ' ')"
|
vendors="$(__sqlite3_wrapper 'SELECT name FROM vendor;' | tr '\n' ' ')"
|
||||||
|
|
||||||
echo "1..$((count+1))"
|
__"${format}"_print_header "${arch}" "${version}" "${vendors}" "${count}"
|
||||||
echo "ok - Version: ${version}, Architecture: ${arch}"
|
|
||||||
echo " ---"
|
|
||||||
echo " Platforms tested: ${vendors}"
|
|
||||||
echo " ..."
|
|
||||||
|
|
||||||
# Print result line for every test, including platforms it succeeded on
|
# Print result line for every test, including platforms it succeeded on
|
||||||
# and transient failed runs.
|
# and transient failed runs.
|
||||||
@ -265,21 +405,17 @@ function tap_generate_report() {
|
|||||||
r=r ", " $2
|
r=r ", " $2
|
||||||
else
|
else
|
||||||
r="(" $2 ; }
|
r="(" $2 ; }
|
||||||
END { if (t) print t r ")"; }'
|
END { if (t) print t " " r ")"; }'
|
||||||
}
|
}
|
||||||
|
|
||||||
local succeded
|
local succeeded
|
||||||
succeded="$(list_runs 1)"
|
succeeded="$(list_runs 1)"
|
||||||
local failed
|
local failed
|
||||||
failed="$(list_runs 0)"
|
failed="$(list_runs 0)"
|
||||||
|
|
||||||
echo "${verdict} - ${test_name}"
|
__"${format}"_print_test_verdict "${verdict}" "${test_name}" \
|
||||||
echo " ---"
|
"${succeeded}" "${failed}"
|
||||||
if [ -n "${succeded}" ] ; then
|
|
||||||
echo " Succeeded: ${succeded}"
|
|
||||||
fi
|
|
||||||
if [ -n "${failed}" ] ; then
|
if [ -n "${failed}" ] ; then
|
||||||
echo " Failed: ${failed}"
|
|
||||||
if [ "${verdict}" = "not ok" -o "${full_error_report}" = "true" ] ; then
|
if [ "${verdict}" = "not ok" -o "${full_error_report}" = "true" ] ; then
|
||||||
# generate diagnostic output, per failed run.
|
# generate diagnostic output, per failed run.
|
||||||
__sqlite3_wrapper -csv "
|
__sqlite3_wrapper -csv "
|
||||||
@ -291,7 +427,7 @@ function tap_generate_report() {
|
|||||||
ORDER BY t.run DESC;" | \
|
ORDER BY t.run DESC;" | \
|
||||||
sed 's/,/ /' | \
|
sed 's/,/ /' | \
|
||||||
while read -r vendor run; do
|
while read -r vendor run; do
|
||||||
echo " Error messages for ${vendor}, run ${run}:"
|
{
|
||||||
__sqlite3_wrapper -csv "
|
__sqlite3_wrapper -csv "
|
||||||
SELECT t.output FROM test_run AS t, test_case AS c
|
SELECT t.output FROM test_run AS t, test_case AS c
|
||||||
WHERE t.case_id=c.id
|
WHERE t.case_id=c.id
|
||||||
@ -299,10 +435,14 @@ function tap_generate_report() {
|
|||||||
AND t.run='${run}';" | \
|
AND t.run='${run}';" | \
|
||||||
sed 's/"/ /g' | \
|
sed 's/"/ /g' | \
|
||||||
awk '{print " L" NR ": \"" $0 "\""}'
|
awk '{print " L" NR ": \"" $0 "\""}'
|
||||||
|
} | __"${format}"_print_test_run_diag_output "${vendor}" "${run}"
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
echo " ..."
|
__"${format}"_finish_test_verdict "${verdict}" "${test_name}" \
|
||||||
|
"${succeeded}" "${failed}"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
__"${format}"_finish_test_report
|
||||||
}
|
}
|
||||||
# --
|
# --
|
||||||
|
@ -165,8 +165,8 @@ function _test_run_impl() {
|
|||||||
# Make the torcx artifacts available to test implementation
|
# Make the torcx artifacts available to test implementation
|
||||||
__prepare_torcx "${arch}" "${vernum}" "${work_dir}"
|
__prepare_torcx "${arch}" "${vernum}" "${work_dir}"
|
||||||
|
|
||||||
local tap_merged_summary="results-${image}.tap"
|
local tap_merged_summary="results-${image}"
|
||||||
local tap_merged_detailed="results-${image}-detailed.tap"
|
local tap_merged_detailed="results-${image}-detailed"
|
||||||
local retry=""
|
local retry=""
|
||||||
local success=false
|
local success=false
|
||||||
local print_give_up=true
|
local print_give_up=true
|
||||||
@ -242,9 +242,9 @@ function _test_run_impl() {
|
|||||||
copy_to_buildcache "testing/${vernum}/${arch}/${image}" \
|
copy_to_buildcache "testing/${vernum}/${arch}/${image}" \
|
||||||
"${tests_dir}/"*.tap
|
"${tests_dir}/"*.tap
|
||||||
copy_to_buildcache "testing/${vernum}/${arch}/${image}" \
|
copy_to_buildcache "testing/${vernum}/${arch}/${image}" \
|
||||||
"${tap_merged_summary}"
|
"${tap_merged_summary}"*
|
||||||
copy_to_buildcache "testing/${vernum}/${arch}/${image}" \
|
copy_to_buildcache "testing/${vernum}/${arch}/${image}" \
|
||||||
"${tap_merged_detailed}"
|
"${tap_merged_detailed}"*
|
||||||
fi
|
fi
|
||||||
if ! $success; then
|
if ! $success; then
|
||||||
return 1
|
return 1
|
||||||
|
@ -19,8 +19,12 @@ failfile="$6"
|
|||||||
merged_summary="$7"
|
merged_summary="$7"
|
||||||
merged_detailed="$8"
|
merged_detailed="$8"
|
||||||
|
|
||||||
|
source ci-automation/ci-config.env
|
||||||
source ci-automation/tapfile_helper_lib.sh
|
source ci-automation/tapfile_helper_lib.sh
|
||||||
tap_ingest_tapfile "${tapfile}" "${image}" "${retry}"
|
tap_ingest_tapfile "${tapfile}" "${image}" "${retry}"
|
||||||
tap_failed_tests_for_vendor "${image}" > "${failfile}"
|
tap_failed_tests_for_vendor "${image}" > "${failfile}"
|
||||||
tap_generate_report "${arch}" "${vernum}" > "${merged_summary}"
|
|
||||||
tap_generate_report "${arch}" "${vernum}" "true" > "${merged_detailed}"
|
for format in "${TEST_REPORT_FORMATS[@]}"; do
|
||||||
|
tap_generate_report "${arch}" "${vernum}" "${format}" > "${merged_summary}.${format}"
|
||||||
|
tap_generate_report "${arch}" "${vernum}" "${format}" "true" > "${merged_detailed}.${format}"
|
||||||
|
done
|
||||||
|
@ -42,6 +42,16 @@ if [ "${CIA_TESTSCRIPT}" = "qemu_uefi.sh" ] ; then
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
declare -a devcontainer_opts
|
||||||
|
if [ -n "${QEMU_DEVCONTAINER_URL}" ] ; then
|
||||||
|
echo "++++ Using custom devcontainer URL '${QEMU_DEVCONTAINER_URL}'"
|
||||||
|
devcontainer_opts+=( "--devcontainer-url" "${QEMU_DEVCONTAINER_URL}" )
|
||||||
|
fi
|
||||||
|
if [ -n "${QEMU_DEVCONTAINER_BINHOST_URL}" ] ; then
|
||||||
|
echo "++++ Using custom devcontainer binhost '${QEMU_DEVCONTAINER_BINHOST_URL}'"
|
||||||
|
devcontainer_opts+=( "--devcontainer-binhost-url" "${QEMU_DEVCONTAINER_BINHOST_URL}" )
|
||||||
|
fi
|
||||||
|
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
kola run \
|
kola run \
|
||||||
@ -53,6 +63,7 @@ kola run \
|
|||||||
--tapfile="${CIA_TAPFILE}" \
|
--tapfile="${CIA_TAPFILE}" \
|
||||||
--torcx-manifest="${CIA_TORCX_MANIFEST}" \
|
--torcx-manifest="${CIA_TORCX_MANIFEST}" \
|
||||||
${QEMU_KOLA_SKIP_MANGLE:+--qemu-skip-mangle} \
|
${QEMU_KOLA_SKIP_MANGLE:+--qemu-skip-mangle} \
|
||||||
|
"${devcontainer_opts[@]}" \
|
||||||
"${@}"
|
"${@}"
|
||||||
|
|
||||||
set +x
|
set +x
|
||||||
|
@ -23,9 +23,9 @@ if [ "$*" != "" ] && [ "$*" != "*" ] && [[ "$*" != *"cl.update.payload" ]]; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
mkdir -p tmp/
|
mkdir -p "$(dirname ${QEMU_UPDATE_PAYLOAD})"
|
||||||
if [ -f tmp/flatcar_test_update.gz ] ; then
|
if [ -f "${QEMU_UPDATE_PAYLOAD}" ] ; then
|
||||||
echo "++++ ${CIA_TESTSCRIPT}: Using existing ./tmp/flatcar_test_update.gz for testing ${CIA_VERNUM} (${CIA_ARCH}) ++++"
|
echo "++++ ${CIA_TESTSCRIPT}: Using existing ${QEMU_UPDATE_PAYLOAD} for testing ${CIA_VERNUM} (${CIA_ARCH}) ++++"
|
||||||
else
|
else
|
||||||
echo "++++ ${CIA_TESTSCRIPT}: downloading flatcar_test_update.gz for ${CIA_VERNUM} (${CIA_ARCH}) ++++"
|
echo "++++ ${CIA_TESTSCRIPT}: downloading flatcar_test_update.gz for ${CIA_VERNUM} (${CIA_ARCH}) ++++"
|
||||||
copy_from_buildcache "images/${CIA_ARCH}/${CIA_VERNUM}/flatcar_test_update.gz" tmp/
|
copy_from_buildcache "images/${CIA_ARCH}/${CIA_VERNUM}/flatcar_test_update.gz" tmp/
|
||||||
@ -105,7 +105,7 @@ run_kola_tests() {
|
|||||||
--qemu-image="${image}" \
|
--qemu-image="${image}" \
|
||||||
--tapfile="${instance_tapfile}" \
|
--tapfile="${instance_tapfile}" \
|
||||||
--torcx-manifest="${CIA_TORCX_MANIFEST}" \
|
--torcx-manifest="${CIA_TORCX_MANIFEST}" \
|
||||||
--update-payload=tmp/flatcar_test_update.gz \
|
--update-payload="${QEMU_UPDATE_PAYLOAD}" \
|
||||||
${QEMU_KOLA_SKIP_MANGLE:+--qemu-skip-mangle} \
|
${QEMU_KOLA_SKIP_MANGLE:+--qemu-skip-mangle} \
|
||||||
cl.update.payload
|
cl.update.payload
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user