mirror of
https://github.com/hashicorp/vault.git
synced 2025-12-25 03:11:40 +01:00
- If we encounter a deadlock/long running test it is better to have go test timeout. As we've noticed if we hit the GitHub step timeout, we lose all information about what was running at the time of the timeout making things harder to diagnose. - Having the timeout through go test itself on a long running test it outputs what test was running along with a full panic output within the logs which is quite useful to diagnose
700 lines
32 KiB
YAML
700 lines
32 KiB
YAML
on:
|
|
workflow_call:
|
|
inputs:
|
|
go-arch:
|
|
description: The execution architecture (arm, amd64, etc.)
|
|
required: true
|
|
type: string
|
|
total-runners:
|
|
description: Number of runners to use for executing non-binary tests.
|
|
required: true
|
|
type: string
|
|
binary-tests:
|
|
description: Whether to run the binary tests.
|
|
required: false
|
|
default: false
|
|
type: boolean
|
|
env-vars:
|
|
description: A map of environment variables as JSON.
|
|
required: false
|
|
type: string
|
|
default: '{}'
|
|
extra-flags:
|
|
description: A space-separated list of additional build flags.
|
|
required: false
|
|
type: string
|
|
default: ''
|
|
runs-on:
|
|
description: An expression indicating which kind of runners to use Go testing jobs.
|
|
required: false
|
|
type: string
|
|
default: '"ubuntu-latest"'
|
|
runs-on-small:
|
|
description: An expression indicating which kind of runners to use for small computing jobs.
|
|
required: false
|
|
type: string
|
|
default: '"ubuntu-latest"'
|
|
go-tags:
|
|
description: A comma-separated list of additional build tags to consider satisfied during the build.
|
|
required: false
|
|
type: string
|
|
name:
|
|
description: |
|
|
A unique identifier to use for labeling artifacts and workflows. It is commonly used to
|
|
specify context, e.g: fips, race, testonly, standard.
|
|
required: true
|
|
type: string
|
|
go-test-parallelism:
|
|
description: The parallelism parameter for Go tests
|
|
required: false
|
|
default: 20
|
|
type: number
|
|
go-test-timeout:
|
|
description: The timeout parameter for Go tests
|
|
required: false
|
|
default: 50m
|
|
type: string
|
|
timeout-minutes:
|
|
description: The maximum number of minutes that this workflow should run
|
|
required: false
|
|
default: 60
|
|
type: number
|
|
testonly:
|
|
description: Whether to run the tests tagged with testonly.
|
|
required: false
|
|
default: false
|
|
type: boolean
|
|
test-timing-cache-enabled:
|
|
description: Cache the gotestsum test timing data.
|
|
required: false
|
|
default: true
|
|
type: boolean
|
|
test-timing-cache-key:
|
|
description: The cache key to use for gotestsum test timing data.
|
|
required: false
|
|
default: go-test-reports
|
|
type: string
|
|
checkout-ref:
|
|
description: The ref to use for checkout.
|
|
required: false
|
|
default: ${{ github.ref }}
|
|
type: string
|
|
outputs:
|
|
data-race-output:
|
|
description: A textual output of any data race detector failures
|
|
value: ${{ jobs.status.outputs.data-race-output }}
|
|
data-race-result:
|
|
description: Whether or not there were any data races detected
|
|
value: ${{ jobs.status.outputs.data-race-result }}
|
|
|
|
env: ${{ fromJSON(inputs.env-vars) }}
|
|
|
|
jobs:
|
|
test-matrix:
|
|
permissions:
|
|
id-token: write # Note: this permission is explicitly required for Vault auth
|
|
contents: read
|
|
runs-on: ${{ fromJSON(inputs.runs-on-small) }}
|
|
outputs:
|
|
go-test-dir: ${{ steps.metadata.outputs.go-test-dir }}
|
|
matrix: ${{ steps.build.outputs.matrix }}
|
|
matrix_ids: ${{ steps.build.outputs.matrix_ids }}
|
|
steps:
|
|
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
|
with:
|
|
ref: ${{ inputs.checkout-ref }}
|
|
- name: Authenticate to Vault
|
|
id: vault-auth
|
|
if: github.repository == 'hashicorp/vault-enterprise'
|
|
run: vault-auth
|
|
- name: Fetch Secrets
|
|
id: secrets
|
|
if: github.repository == 'hashicorp/vault-enterprise'
|
|
uses: hashicorp/vault-action@v3
|
|
with:
|
|
url: ${{ steps.vault-auth.outputs.addr }}
|
|
caCertificate: ${{ steps.vault-auth.outputs.ca_certificate }}
|
|
token: ${{ steps.vault-auth.outputs.token }}
|
|
secrets: |
|
|
kv/data/github/${{ github.repository }}/datadog-ci DATADOG_API_KEY;
|
|
kv/data/github/${{ github.repository }}/github-token username-and-token | github-token;
|
|
kv/data/github/${{ github.repository }}/license license_1 | VAULT_LICENSE_CI;
|
|
kv/data/github/${{ github.repository }}/license license_2 | VAULT_LICENSE_2;
|
|
- id: setup-git-private
|
|
name: Setup Git configuration (private)
|
|
if: github.repository == 'hashicorp/vault-enterprise'
|
|
run: |
|
|
git config --global url."https://${{ steps.secrets.outputs.github-token }}@github.com".insteadOf https://github.com
|
|
- id: setup-git-public
|
|
name: Setup Git configuration (public)
|
|
if: github.repository != 'hashicorp/vault-enterprise'
|
|
run: |
|
|
git config --global url."https://${{ secrets.ELEVATED_GITHUB_TOKEN}}@github.com".insteadOf https://github.com
|
|
- uses: ./.github/actions/set-up-go
|
|
with:
|
|
github-token: ${{ github.repository == 'hashicorp/vault-enterprise' && steps.secrets.outputs.github-token || secrets.ELEVATED_GITHUB_TOKEN }}
|
|
- id: metadata
|
|
name: Set up metadata
|
|
run: echo "go-test-dir=test-results/go-test" | tee -a "$GITHUB_OUTPUT"
|
|
- uses: ./.github/actions/set-up-gotestsum
|
|
- run: mkdir -p ${{ steps.metadata.outputs.go-test-dir }}
|
|
- uses: actions/cache/restore@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
|
|
if: inputs.test-timing-cache-enabled
|
|
with:
|
|
path: ${{ steps.metadata.outputs.go-test-dir }}
|
|
key: ${{ inputs.test-timing-cache-key }}-${{ github.run_number }}
|
|
restore-keys: |
|
|
${{ inputs.test-timing-cache-key }}-
|
|
- name: Sanitize timing files
|
|
id: sanitize-timing-files
|
|
run: |
|
|
# Prune invalid timing files
|
|
find '${{ steps.metadata.outputs.go-test-dir }}' -mindepth 1 -type f -name "*.json" -exec sh -c '
|
|
file="$1";
|
|
jq . "$file" || rm "$file"
|
|
' shell {} \; > /dev/null 2>&1
|
|
- name: Build matrix excluding binary, integration, and testonly tests
|
|
id: build-non-binary
|
|
if: ${{ !inputs.testonly }}
|
|
env:
|
|
GOPRIVATE: github.com/hashicorp/*
|
|
run: |
|
|
# testonly tests need additional build tag though let's exclude them anyway for clarity
|
|
(
|
|
make all-packages | grep -v "_binary" | grep -v "vault/integ" | grep -v "testonly" | gotestsum tool ci-matrix --debug \
|
|
--partitions "${{ inputs.total-runners }}" \
|
|
--timing-files '${{ steps.metadata.outputs.go-test-dir }}/*.json' > matrix.json
|
|
)
|
|
- name: Build matrix for tests tagged with testonly
|
|
if: ${{ inputs.testonly }}
|
|
env:
|
|
GOPRIVATE: github.com/hashicorp/*
|
|
run: |
|
|
set -exo pipefail
|
|
# enable glob expansion
|
|
shopt -s nullglob
|
|
# testonly tagged tests need an additional tag to be included
|
|
# also running some extra tests for sanity checking with the testonly build tag
|
|
(
|
|
go list -tags=testonly ./vault/external_tests/{kv,token,*replication-perf*,*testonly*} ./command/*testonly* ./vault/ | gotestsum tool ci-matrix --debug \
|
|
--partitions "${{ inputs.total-runners }}" \
|
|
--timing-files '${{ steps.metadata.outputs.go-test-dir }}/*.json' > matrix.json
|
|
)
|
|
# disable glob expansion
|
|
shopt -u nullglob
|
|
- name: Capture list of binary tests
|
|
if: inputs.binary-tests
|
|
id: list-binary-tests
|
|
run: |
|
|
LIST="$(make all-packages | grep "_binary" | xargs)"
|
|
echo "list=$LIST" >> "$GITHUB_OUTPUT"
|
|
- name: Build complete matrix
|
|
id: build
|
|
run: |
|
|
set -exo pipefail
|
|
matrix_file="matrix.json"
|
|
if [ "${{ inputs.binary-tests}}" == "true" ] && [ -n "${{ steps.list-binary-tests.outputs.list }}" ]; then
|
|
export BINARY_TESTS="${{ steps.list-binary-tests.outputs.list }}"
|
|
jq --arg BINARY "${BINARY_TESTS}" --arg BINARY_INDEX "${{ inputs.total-runners }}" \
|
|
'.include += [{
|
|
"id": $BINARY_INDEX,
|
|
"estimatedRuntime": "N/A",
|
|
"packages": $BINARY,
|
|
"description": "partition $BINARY_INDEX - binary test packages"
|
|
}]' matrix.json > new-matrix.json
|
|
matrix_file="new-matrix.json"
|
|
fi
|
|
# convert the json to a map keyed by id
|
|
(
|
|
echo -n "matrix="
|
|
jq -c \
|
|
'.include | map( { (.id|tostring): . } ) | add' "$matrix_file"
|
|
) | tee -a "$GITHUB_OUTPUT"
|
|
# extract an array of ids from the json
|
|
(
|
|
echo -n "matrix_ids="
|
|
jq -c \
|
|
'[ .include[].id | tostring ]' "$matrix_file"
|
|
) | tee -a "$GITHUB_OUTPUT"
|
|
|
|
test-go:
|
|
needs: test-matrix
|
|
permissions:
|
|
actions: read
|
|
contents: read
|
|
id-token: write # Note: this permission is explicitly required for Vault auth
|
|
runs-on: ${{ fromJSON(inputs.runs-on) }}
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
id: ${{ fromJSON(needs.test-matrix.outputs.matrix_ids) }}
|
|
env:
|
|
GOPRIVATE: github.com/hashicorp/*
|
|
TIMEOUT_IN_MINUTES: ${{ inputs.timeout-minutes }}
|
|
outputs:
|
|
go-test-results-download-pattern: ${{ steps.metadata.outputs.go-test-results-download-pattern }}
|
|
data-race-log-download-pattern: ${{ steps.metadata.outputs.data-race-log-download-pattern }}
|
|
steps:
|
|
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
|
with:
|
|
ref: ${{ inputs.checkout-ref }}
|
|
- uses: ./.github/actions/set-up-go
|
|
with:
|
|
github-token: ${{ secrets.ELEVATED_GITHUB_TOKEN }}
|
|
- id: metadata
|
|
name: Set up metadata
|
|
run: |
|
|
# Metadata variables that are used throughout the workflow
|
|
# Example comments assume:
|
|
# - needs.test-matrix.outputs.go-test-dir == test-results/go-test
|
|
# - inputs.name == testonly
|
|
# - inputs.checkout-ref == main
|
|
# - matrix.id == 1
|
|
ref="$(tr / - <<< "${{ inputs.checkout-ref }}")" # main, but removes special characters from refs with /
|
|
name="${{ inputs.name }}-${ref}-${{ matrix.id }}" # testonly-main-1
|
|
go_test_dir='${{ needs.test-matrix.outputs.go-test-dir }}' # test-results/go-test
|
|
test_results_dir="$(dirname "$go_test_dir")" # test-results
|
|
go_test_dir_absolute="$(pwd)/${go_test_dir}" # /home/runner/work/vault/vault/test-results/go-test
|
|
go_test_log_dir="${go_test_dir}/logs" # test-results/go-test/logs
|
|
go_test_log_dir_absolute="${go_test_dir_absolute}/logs" # /home/runner/work/vault/vault/test-results/go-test/logs
|
|
go_test_log_archive_name="test-logs-${name}.tar" # test-logs-testonly-main-1.tar
|
|
go_test_results_upload_key="${test_results_dir}-${name}" # test-results/go-test-testonly-main-1
|
|
go_test_results_download_pattern="${test_results_dir}-${{ inputs.name }}-*" # test-results/go-test-testonly-main-*
|
|
gotestsum_results_prefix="results" # results
|
|
gotestsum_junitfile=${go_test_dir}/${gotestsum_results_prefix}-${name}.xml # test-results/go-test/results-testonly-main-1.xml
|
|
gotestsum_jsonfile=${go_test_dir}/${gotestsum_results_prefix}-${name}.json # test-results/go-test/results-testonly-main-1.json
|
|
gotestsum_timing_events=failure-summary-${name}.json # failure-summary-testonly-main-1.json
|
|
failure_summary_file_name="failure-summary-${name}.md" # failure-summary-testonly-main-1.md
|
|
data_race_log_file="data-race.log" # data-race.log
|
|
data_race_log_download_pattern="data-race-${{ inputs.name }}*.log" # data-race-testonly-main-*.log
|
|
data_race_log_upload_key="data-race-${name}.log" # data-race-testonly-main-1.log
|
|
{
|
|
echo "name=${name}"
|
|
echo "failure-summary-file-name=${failure_summary_file_name}"
|
|
echo "data-race-log-file=${data_race_log_file}"
|
|
echo "data-race-log-download-pattern=${data_race_log_download_pattern}"
|
|
echo "data-race-log-upload-key=${data_race_log_upload_key}"
|
|
echo "go-test-dir=${go_test_dir}"
|
|
echo "go-test-log-archive-name=${go_test_log_archive_name}"
|
|
echo "go-test-log-dir=${go_test_log_dir}"
|
|
echo "go-test-log-dir-absolute=${go_test_log_dir_absolute}"
|
|
echo "go-test-results-download-pattern=${go_test_results_download_pattern}"
|
|
echo "go-test-results-upload-key=${go_test_results_upload_key}"
|
|
echo "gotestsum-jsonfile=${gotestsum_jsonfile}"
|
|
echo "gotestsum-junitfile=${gotestsum_junitfile}"
|
|
echo "gotestsum-results-prefix=${gotestsum_results_prefix}"
|
|
echo "gotestsum-timing-events=${gotestsum_timing_events}"
|
|
} | tee -a "$GITHUB_OUTPUT"
|
|
- name: Authenticate to Vault
|
|
id: vault-auth
|
|
if: github.repository == 'hashicorp/vault-enterprise'
|
|
run: vault-auth
|
|
- name: Fetch Secrets
|
|
id: secrets
|
|
if: github.repository == 'hashicorp/vault-enterprise'
|
|
uses: hashicorp/vault-action@v3
|
|
with:
|
|
url: ${{ steps.vault-auth.outputs.addr }}
|
|
caCertificate: ${{ steps.vault-auth.outputs.ca_certificate }}
|
|
token: ${{ steps.vault-auth.outputs.token }}
|
|
secrets: |
|
|
kv/data/github/${{ github.repository }}/datadog-ci DATADOG_API_KEY;
|
|
kv/data/github/${{ github.repository }}/github-token username-and-token | github-token;
|
|
kv/data/github/${{ github.repository }}/license license_1 | VAULT_LICENSE_CI;
|
|
kv/data/github/${{ github.repository }}/license license_2 | VAULT_LICENSE_2;
|
|
- id: setup-git-private
|
|
name: Setup Git configuration (private)
|
|
if: github.repository == 'hashicorp/vault-enterprise'
|
|
run: |
|
|
git config --global url."https://${{ steps.secrets.outputs.github-token }}@github.com".insteadOf https://github.com
|
|
- id: setup-git-public
|
|
name: Setup Git configuration (public)
|
|
if: github.repository != 'hashicorp/vault-enterprise'
|
|
run: |
|
|
git config --global url."https://${{ secrets.ELEVATED_GITHUB_TOKEN}}@github.com".insteadOf https://github.com
|
|
- uses: ./.github/actions/install-external-tools
|
|
- name: Build Vault HSM binary for tests
|
|
if: inputs.binary-tests && matrix.id == inputs.total-runners && github.repository == 'hashicorp/vault-enterprise'
|
|
env:
|
|
GOPRIVATE: github.com/hashicorp/*
|
|
run: |
|
|
set -exo pipefail
|
|
time make prep enthsmdev
|
|
# The subsequent build of vault will blow away the bin folder
|
|
mv bin/vault vault-hsm-binary
|
|
- if: inputs.binary-tests && matrix.id == inputs.total-runners
|
|
name: Build dev binary for binary tests
|
|
# The dev mode binary has to exist for binary tests that are dispatched on the last runner.
|
|
env:
|
|
GOPRIVATE: github.com/hashicorp/*
|
|
run: time make prep dev
|
|
- name: Install gVisor
|
|
# Enterprise repo runners do not allow sudo, so can't install gVisor there yet.
|
|
if: github.repository != 'hashicorp/vault-enterprise'
|
|
run: |
|
|
(
|
|
set -e
|
|
ARCH="$(uname -m)"
|
|
URL="https://storage.googleapis.com/gvisor/releases/release/latest/${ARCH}"
|
|
wget --quiet "${URL}/runsc" "${URL}/runsc.sha512" \
|
|
"${URL}/containerd-shim-runsc-v1" "${URL}/containerd-shim-runsc-v1.sha512"
|
|
sha512sum -c runsc.sha512 \
|
|
-c containerd-shim-runsc-v1.sha512
|
|
rm -f -- *.sha512
|
|
chmod a+rx runsc containerd-shim-runsc-v1
|
|
sudo mv runsc containerd-shim-runsc-v1 /usr/local/bin
|
|
)
|
|
sudo tee /etc/docker/daemon.json <<EOF
|
|
{
|
|
"runtimes": {
|
|
"runsc": {
|
|
"path": "/usr/local/bin/runsc",
|
|
"runtimeArgs": [
|
|
"--host-uds=create"
|
|
]
|
|
}
|
|
}
|
|
}
|
|
EOF
|
|
sudo systemctl reload docker
|
|
- name: Install rootless Docker
|
|
# Enterprise repo runners do not allow sudo, so can't system packages there yet.
|
|
if: github.repository != 'hashicorp/vault-enterprise'
|
|
run: |
|
|
sudo apt-get install -y uidmap dbus-user-session
|
|
export FORCE_ROOTLESS_INSTALL=1
|
|
curl -fsSL https://get.docker.com/rootless | sh
|
|
mkdir -p ~/.config/docker/
|
|
tee ~/.config/docker/daemon.json <<EOF
|
|
{
|
|
"runtimes": {
|
|
"runsc": {
|
|
"path": "/usr/local/bin/runsc",
|
|
"runtimeArgs": [
|
|
"--host-uds=create",
|
|
"--ignore-cgroups"
|
|
]
|
|
}
|
|
}
|
|
}
|
|
EOF
|
|
systemctl --user restart docker
|
|
# Ensure the original rootful Docker install is still the default.
|
|
docker context use default
|
|
- id: run-go-tests
|
|
name: Run Go tests
|
|
timeout-minutes: ${{ fromJSON(env.TIMEOUT_IN_MINUTES) }}
|
|
env:
|
|
COMMIT_SHA: ${{ github.sha }}
|
|
run: |
|
|
set -exo pipefail
|
|
|
|
# Build the dynamically generated source files.
|
|
make prep
|
|
|
|
packages=$(echo "${{ toJSON(needs.test-matrix.outputs.matrix) }}" | jq -c -r --arg id "${{ matrix.id }}" '.[$id] | .packages')
|
|
|
|
if [ -z "$packages" ]; then
|
|
echo "no test packages to run"
|
|
exit 1
|
|
fi
|
|
# We don't want VAULT_LICENSE set when running Go tests, because that's
|
|
# not what developers have in their environments and it could break some
|
|
# tests; it would be like setting VAULT_TOKEN. However some non-Go
|
|
# CI commands, like the UI tests, shouldn't have to worry about licensing.
|
|
# So we provide the tests which want an externally supplied license with licenses
|
|
# via the VAULT_LICENSE_CI and VAULT_LICENSE_2 environment variables, and here we unset it.
|
|
unset VAULT_LICENSE
|
|
|
|
# Assign test licenses to relevant variables if they aren't already
|
|
if [[ ${{ github.repository }} == 'hashicorp/vault' ]]; then
|
|
export VAULT_LICENSE_CI=${{ secrets.ci_license }}
|
|
export VAULT_LICENSE_2=${{ secrets.ci_license_2 }}
|
|
fi
|
|
|
|
# The docker/binary tests are more expensive, and we've had problems with timeouts when running at full
|
|
# parallelism. The default if -p isn't specified is to use NumCPUs, which seems fine for regular tests.
|
|
package_parallelism=""
|
|
|
|
if [ -f vault-hsm-binary ]; then
|
|
VAULT_HSM_BINARY="$(pwd)/vault-hsm-binary"
|
|
export VAULT_HSM_BINARY
|
|
fi
|
|
|
|
if [ -f bin/vault ]; then
|
|
VAULT_BINARY="$(pwd)/bin/vault"
|
|
export VAULT_BINARY
|
|
|
|
package_parallelism="-p 2"
|
|
fi
|
|
|
|
# If running Go tests on the enterprise repo, add a flag to rerun failed tests.
|
|
# This is to address the issues with flaky tests affecting the reliability of CI.
|
|
if [[ "${{github.repository}}" == 'hashicorp/vault-enterprise' ]]; then
|
|
RERUN_FAILS="--rerun-fails"
|
|
fi
|
|
|
|
VAULT_TEST_LOG_DIR='${{ steps.metadata.outputs.go-test-log-dir-absolute }}'
|
|
export VAULT_TEST_LOG_DIR
|
|
mkdir -p "$VAULT_TEST_LOG_DIR"
|
|
# shellcheck disable=SC2086 # can't quote RERUN_FAILS
|
|
GOARCH=${{ inputs.go-arch }} VAULT_ADDR='' \
|
|
gotestsum --format=short-verbose \
|
|
--junitfile '${{ steps.metadata.outputs.gotestsum-junitfile }}' \
|
|
--jsonfile '${{ steps.metadata.outputs.gotestsum-jsonfile }}' \
|
|
--jsonfile-timing-events '${{ steps.metadata.outputs.gotestsum-timing-events }}' \
|
|
$RERUN_FAILS \
|
|
--packages "$packages" \
|
|
-- \
|
|
$package_parallelism \
|
|
-tags "${{ inputs.go-tags }}" \
|
|
-timeout=${{ inputs.go-test-timeout }} \
|
|
-parallel=${{ inputs.go-test-parallelism }} \
|
|
${{ inputs.extra-flags }} \
|
|
- name: Prepare datadog-ci
|
|
if: (github.repository == 'hashicorp/vault' || github.repository == 'hashicorp/vault-enterprise') && (success() || failure())
|
|
continue-on-error: true
|
|
run: |
|
|
if type datadog-ci > /dev/null 2>&1; then
|
|
exit 0
|
|
fi
|
|
# Curl does not always exit 1 if things go wrong. To determine if this is successful we'll
|
|
# we'll silence all non-error output and check the results to determine success.
|
|
if ! out="$(curl -sSL --fail https://github.com/DataDog/datadog-ci/releases/latest/download/datadog-ci_linux-x64 --output /usr/local/bin/datadog-ci 2>&1)"; then
|
|
printf "failed to download datadog-ci: %s" "$out"
|
|
fi
|
|
if [[ -n "$out" ]]; then
|
|
printf "failed to download datadog-ci: %s" "$out"
|
|
fi
|
|
chmod +x /usr/local/bin/datadog-ci
|
|
- name: Upload test results to DataDog
|
|
continue-on-error: true
|
|
env:
|
|
DD_ENV: ci
|
|
run: |
|
|
if [[ ${{ github.repository }} == 'hashicorp/vault' ]]; then
|
|
export DATADOG_API_KEY=${{ secrets.DATADOG_API_KEY }}
|
|
fi
|
|
datadog-ci junit upload --service "$GITHUB_REPOSITORY" '${{ steps.metadata.outputs.gotestsum-junitfile }}'
|
|
if: success() || failure()
|
|
- name: Archive test logs
|
|
if: always()
|
|
id: archive-test-logs
|
|
# actions/upload-artifact will compress the artifact for us. We create a tarball to preserve
|
|
# permissions and to support file names with special characters.
|
|
run: |
|
|
tar -cvf '${{ steps.metadata.outputs.go-test-log-archive-name }}' -C "${{ steps.metadata.outputs.go-test-log-dir }}" .
|
|
- name: Upload test logs archives
|
|
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6
|
|
with:
|
|
name: ${{ steps.metadata.outputs.go-test-log-archive-name }}
|
|
path: ${{ steps.metadata.outputs.go-test-log-archive-name }}
|
|
retention-days: 7
|
|
if: success() || failure()
|
|
- name: Upload test results
|
|
if: success() || failure()
|
|
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6
|
|
with:
|
|
name: ${{ steps.metadata.outputs.go-test-results-upload-key }}
|
|
path: |
|
|
${{ steps.metadata.outputs.go-test-dir }}/${{ steps.metadata.outputs.gotestsum-results-prefix}}*.json
|
|
${{ steps.metadata.outputs.go-test-dir }}/${{ steps.metadata.outputs.gotestsum-results-prefix}}*.xml
|
|
# We cache relevant timing data with actions/cache later so we can let the file expire quickly
|
|
retention-days: 1
|
|
- name: Check for data race failures
|
|
if: success() || failure()
|
|
id: data-race-check
|
|
working-directory: ${{ needs.test-matrix.outputs.go-test-dir }}
|
|
run: |
|
|
# Scan gotestsum output files for data race errors.
|
|
data_race_tests=()
|
|
data_race_log='${{ steps.metadata.outputs.data-race-log-file }}'
|
|
for file in *.json; do
|
|
# Check if test results contains offending phrase
|
|
if grep -q "WARNING: DATA RACE" "$file"; then
|
|
data_race_tests+=("test-go (${{ matrix.id }})")
|
|
touch "$data_race_log"
|
|
|
|
# Write output to our log file so we can aggregate it in the final workflow
|
|
{
|
|
echo "=============== test-go (${{ matrix.id }}) ==========================="
|
|
sed -n '/WARNING: DATA RACE/,/==================/p' "$file" | jq -r -j '.Output'
|
|
} | tee -a "$data_race_log"
|
|
fi
|
|
done
|
|
|
|
result="success"
|
|
# Fail the action if there were any failed race tests
|
|
if (("${#data_race_tests[@]}" > 0)); then
|
|
result="failure"
|
|
fi
|
|
echo "data-race-result=${result}" | tee -a "$GITHUB_OUTPUT"
|
|
- name: Upload data race detector failure log
|
|
if: |
|
|
(success() || failure()) &&
|
|
steps.data-race-check.outputs.data-race-result == 'failure'
|
|
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6
|
|
with:
|
|
name: ${{ steps.metadata.outputs.data-race-log-upload-key }}
|
|
path: ${{ steps.metadata.outputs.go-test-dir }}/${{ steps.metadata.outputs.data-race-log-file }}
|
|
# Set the minimum retention possible. We only upload this because it's the only way to
|
|
# aggregate results from matrix workflows.
|
|
retention-days: 1
|
|
if-no-files-found: error # Make sure we always upload the data race logs if it failed
|
|
# GitHub Actions doesn't expose the job ID or the URL to the job execution,
|
|
# so we have to fetch it from the API
|
|
- name: Fetch job logs URL
|
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
|
if: success() || failure()
|
|
continue-on-error: true
|
|
with:
|
|
retries: 3
|
|
script: |
|
|
// We surround the whole script with a try-catch block, to avoid each of the matrix jobs
|
|
// displaying an error in the GHA workflow run annotations, which gets very noisy.
|
|
// If an error occurs, it will be logged so that we don't lose any information about the reason for failure.
|
|
try {
|
|
const fs = require("fs");
|
|
const result = await github.rest.actions.listJobsForWorkflowRun({
|
|
owner: context.repo.owner,
|
|
per_page: 100,
|
|
repo: context.repo.repo,
|
|
run_id: context.runId,
|
|
});
|
|
|
|
// Determine what job name to use for the query. These values are hardcoded, because GHA doesn't
|
|
// expose them in any of the contexts available within a workflow run.
|
|
let prefixToSearchFor;
|
|
switch ("${{ inputs.name }}") {
|
|
case "race":
|
|
prefixToSearchFor = 'Run Go tests with data race detection / test-go (${{ matrix.id }})'
|
|
break
|
|
case "fips":
|
|
prefixToSearchFor = 'Run Go tests with FIPS configuration / test-go (${{ matrix.id }})'
|
|
break
|
|
default:
|
|
prefixToSearchFor = 'Run Go tests / test-go (${{ matrix.id }})'
|
|
}
|
|
|
|
const jobData = result.data.jobs.filter(
|
|
(job) => job.name.startsWith(prefixToSearchFor)
|
|
);
|
|
const url = jobData[0].html_url;
|
|
const envVarName = "GH_JOB_URL";
|
|
const envVar = envVarName + "=" + url;
|
|
const envFile = process.env.GITHUB_ENV;
|
|
|
|
fs.appendFile(envFile, envVar, (err) => {
|
|
if (err) throw err;
|
|
console.log("Successfully set " + envVarName + " to: " + url);
|
|
});
|
|
} catch (error) {
|
|
console.log("Error: " + error);
|
|
return
|
|
}
|
|
- name: Prepare failure summary
|
|
if: success() || failure()
|
|
continue-on-error: true
|
|
run: |
|
|
# This jq query filters out successful tests, leaving only the failures.
|
|
# Then, it formats the results into rows of a Markdown table.k
|
|
# An example row will resemble this:
|
|
# | github.com/hashicorp/vault/package | TestName | fips | 0 | 2 | [view results](github.com/link-to-logs) |
|
|
jq -r -n 'inputs
|
|
| select(.Action == "fail")
|
|
| "| ${{inputs.name}} | \(.Package) | \(.Test // "-") | \(.Elapsed) | ${{ matrix.id }} | [view test results :scroll:](${{ env.GH_JOB_URL }}) |"' \
|
|
'${{ steps.metadata.outputs.gotestsum-timing-events }}' \
|
|
>> '${{ steps.metadata.outputs.failure-summary-file-name }}'
|
|
- name: Upload failure summary
|
|
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6
|
|
if: success() || failure()
|
|
with:
|
|
name: ${{ steps.metadata.outputs.failure-summary-file-name }}
|
|
path: ${{ steps.metadata.outputs.failure-summary-file-name }}
|
|
|
|
|
|
status:
|
|
# Perform final data aggregation and determine overall status
|
|
if: always()
|
|
needs:
|
|
- test-matrix
|
|
- test-go
|
|
runs-on: ${{ fromJSON(inputs.runs-on-small) }}
|
|
outputs:
|
|
data-race-output: ${{ steps.status.outputs.data-race-output }}
|
|
data-race-result: ${{ steps.status.outputs.data-race-result }}
|
|
steps:
|
|
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
|
with:
|
|
pattern: ${{ needs.test-go.outputs.data-race-log-download-pattern }}
|
|
path: data-race-logs
|
|
merge-multiple: true
|
|
# Determine our success/failure status by checking the result status and data race status.
|
|
- id: status
|
|
name: Determine status result
|
|
run: |
|
|
# Determine status result
|
|
result="success"
|
|
|
|
# Aggregate all of our test workflows and determine our Go test result from them.
|
|
test_go_results=$(tr -d '\n' <<< '${{ toJSON(needs.*.result) }}' | jq -Mrc)
|
|
if ! grep -q -v -E '(failure|cancelled)' <<< "$test_go_results"; then
|
|
test_go_result="failed"
|
|
result="failed"
|
|
else
|
|
test_go_result="success"
|
|
fi
|
|
|
|
# If we have downloaded data race detector logs then at least one Go test job detected
|
|
# a data race during execution. We'll fail on that.
|
|
if [ -z "$(ls -A data-race-logs)" ]; then
|
|
data_race_output=""
|
|
data_race_result="success"
|
|
else
|
|
data_race_output="$(cat data-race-logs/*)"
|
|
data_race_result="failed"
|
|
result="failed"
|
|
fi
|
|
|
|
# Write Go and data race results to outputs.
|
|
{
|
|
echo "data-race-output<<EOFDATARACEOUTPUT"$'\n'"${data_race_output}"$'\n'EOFDATARACEOUTPUT
|
|
echo "data-race-result=${data_race_result}"
|
|
echo "result=${result}"
|
|
echo "test-go-result=${test_go_result}"
|
|
echo "test-go-results=${test_go_results}"
|
|
} | tee -a "$GITHUB_OUTPUT"
|
|
# Aggregate, prune, and cache our timing data
|
|
- if: ${{ ! cancelled() && needs.test-go.result == 'success' && inputs.test-timing-cache-enabled }}
|
|
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
|
|
with:
|
|
path: ${{ needs.test-matrix.outputs.go-test-dir }}
|
|
key: ${{ inputs.test-timing-cache-key }}-${{ github.run_number }}
|
|
restore-keys: |
|
|
${{ inputs.test-timing-cache-key }}-
|
|
- if: ${{ ! cancelled() && needs.test-go.result == 'success' && inputs.test-timing-cache-enabled }}
|
|
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
|
with:
|
|
path: ${{ needs.test-matrix.outputs.go-test-dir }}
|
|
pattern: ${{ needs.test-go.outputs.go-test-results-download-pattern }}
|
|
merge-multiple: true
|
|
- if: ${{ ! cancelled() && needs.test-go.result == 'success' && inputs.test-timing-cache-enabled }}
|
|
name: Prune any invalid timing files
|
|
run: |
|
|
ls -lhR '${{ needs.test-matrix.outputs.go-test-dir }}'
|
|
find '${{ needs.test-matrix.outputs.go-test-dir }}' -mindepth 1 -type f -mtime +3 -delete
|
|
|
|
# Prune invalid timing files
|
|
find '${{ needs.test-matrix.outputs.go-test-dir }}' -mindepth 1 -type f -name "*.json" -exec sh -c '
|
|
file="$1";
|
|
jq . "$file" || rm "$file"
|
|
' shell {} \; > /dev/null 2>&1
|
|
|
|
ls -lhR '${{ needs.test-matrix.outputs.go-test-dir }}'
|
|
# Determine our overall pass/fail with our Go test results
|
|
- if: always() && steps.status.outputs.result != 'success'
|
|
name: Check for failed status
|
|
run: |
|
|
printf "One or more required go-test workflows failed. Required workflow statuses: ${{ steps.status.outputs.test-go-results }}\n ${{ steps.status.outputs.data-race-output }}"
|
|
exit 1
|