mirror of
https://github.com/hashicorp/vault.git
synced 2025-08-05 22:27:03 +02:00
VAULT-31402: Add verification for all container images (#28605)
* VAULT-31402: Add verification for all container images Add verification for all container images that are generated as part of the build. Before this change we only ever tested a limited subset of "default" containers based on Alpine Linux that we publish via the Docker hub and AWS ECR. Now we support testing all Alpine and UBI based container images. We also verify the repository and tag information embedded in each by deploying them and verifying the repo and tag metadata match our expectations. This does change the k8s scenario interface quite a bit. We now take in an archive image and set image/repo/tag information based on the scenario variants. To enable this I also needed to add `tar` to the UBI base image. It was already available in the Alpine image and is used to copy utilities to the image when deploying and configuring the cluster via Enos. Since some images contain multiple tags we also add samples for each image and randomly select which variant to test on a given PR. Signed-off-by: Ryan Cragun <me@ryan.ec>
This commit is contained in:
parent
08e8776dfb
commit
c8e6169d5d
74
.github/actions/containerize/action.yml
vendored
74
.github/actions/containerize/action.yml
vendored
@ -10,31 +10,24 @@ description: |
|
||||
|
||||
inputs:
|
||||
docker:
|
||||
type: boolean
|
||||
description: |
|
||||
Package the binary into a Docker container suitable for the Docker and AWS registries. We'll
|
||||
automatically determine the correct tags and target depending on the vault edition.
|
||||
default: true
|
||||
default: 'true'
|
||||
goarch:
|
||||
type: string
|
||||
description: The Go GOARCH value environment variable to set during the build.
|
||||
goos:
|
||||
type: string
|
||||
description: The Go GOOS value environment variable to set during the build.
|
||||
redhat:
|
||||
type: boolean
|
||||
description: Package the binary into a UBI container suitable for the Redhat Quay registry.
|
||||
default: false
|
||||
default: 'false'
|
||||
vault-binary-path:
|
||||
type: string
|
||||
description: The path to the vault binary.
|
||||
default: dist/vault
|
||||
vault-edition:
|
||||
type: string
|
||||
description: The edition of vault to build.
|
||||
default: ce
|
||||
vault-version:
|
||||
type: string
|
||||
description: The vault version.
|
||||
|
||||
outputs:
|
||||
@ -48,31 +41,52 @@ runs:
|
||||
- id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ '${{ inputs.vault-edition }}' =~ 'ce' ]]; then
|
||||
# CE containers
|
||||
container_version='${{ inputs.vault-version }}'
|
||||
docker_container_tags='docker.io/hashicorp/vault:${{ inputs.vault-version }} public.ecr.aws/hashicorp/vault:${{ inputs.vault-version }}'
|
||||
docker_container_target='default'
|
||||
redhat_container_tags='quay.io/redhat-isv-containers/5f89bb5e0b94cf64cfeb500a:${{ inputs.vault-version }}-ubi'
|
||||
redhat_container_target='ubi'
|
||||
else
|
||||
# Ent containers
|
||||
container_version='${{ inputs.vault-version }}+${{ inputs.vault-edition }}'
|
||||
|
||||
if [[ '${{ inputs.vault-edition }}' =~ 'fips' ]]; then
|
||||
# Ent FIPS 140-2 containers
|
||||
docker_container_tags='docker.io/hashicorp/vault-enterprise-fips:${{ inputs.vault-version }}-${{ inputs.vault-edition }} public.ecr.aws/hashicorp/vault-enterprise-fips:${{ inputs.vault-version }}-${{ inputs.vault-edition }}'
|
||||
docker_container_target='ubi-fips'
|
||||
redhat_container_tags='quay.io/redhat-isv-containers/6283f645d02c6b16d9caeb8e:${{ inputs.vault-version }}-${{ inputs.vault-edition }}-ubi'
|
||||
redhat_container_target='ubi-fips'
|
||||
else
|
||||
# All other Ent containers
|
||||
case '${{ inputs.vault-edition }}' in
|
||||
"ce")
|
||||
container_version='${{ inputs.vault-version }}'
|
||||
docker_container_tags='docker.io/hashicorp/vault:${{ inputs.vault-version }} public.ecr.aws/hashicorp/vault:${{ inputs.vault-version }}'
|
||||
docker_container_target='default'
|
||||
redhat_container_tags='quay.io/redhat-isv-containers/5f89bb5e0b94cf64cfeb500a:${{ inputs.vault-version }}-ubi'
|
||||
redhat_container_target='ubi'
|
||||
;;
|
||||
"ent")
|
||||
container_version='${{ inputs.vault-version }}+${{ inputs.vault-edition }}'
|
||||
docker_container_tags='docker.io/hashicorp/vault-enterprise:${{ inputs.vault-version }}-${{ inputs.vault-edition}} public.ecr.aws/hashicorp/vault-enterprise:${{ inputs.vault-version }}-${{ inputs.vault-edition }}'
|
||||
docker_container_target='default'
|
||||
redhat_container_tags='quay.io/redhat-isv-containers/5f89bb9242e382c85087dce2:${{ inputs.vault-version }}-${{ inputs.vault-edition }}-ubi'
|
||||
redhat_container_target='ubi'
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
"ent.hsm")
|
||||
container_version='${{ inputs.vault-version }}+${{ inputs.vault-edition }}'
|
||||
docker_container_tags='docker.io/hashicorp/vault-enterprise:${{ inputs.vault-version }}-${{ inputs.vault-edition}} public.ecr.aws/hashicorp/vault-enterprise:${{ inputs.vault-version }}-${{ inputs.vault-edition }}'
|
||||
docker_container_target='ubi-hsm'
|
||||
redhat_container_tags='quay.io/redhat-isv-containers/5f89bb9242e382c85087dce2:${{ inputs.vault-version }}-${{ inputs.vault-edition }}-ubi'
|
||||
redhat_container_target='ubi-hsm'
|
||||
;;
|
||||
"ent.hsm.fips1402")
|
||||
container_version='${{ inputs.vault-version }}+${{ inputs.vault-edition }}'
|
||||
docker_container_tags='docker.io/hashicorp/vault-enterprise:${{ inputs.vault-version }}-${{ inputs.vault-edition}} public.ecr.aws/hashicorp/vault-enterprise:${{ inputs.vault-version }}-${{ inputs.vault-edition }}'
|
||||
docker_container_target='ubi-hsm-fips'
|
||||
redhat_container_tags='quay.io/redhat-isv-containers/5f89bb9242e382c85087dce2:${{ inputs.vault-version }}-${{ inputs.vault-edition }}-ubi'
|
||||
redhat_container_target='ubi-hsm-fips'
|
||||
;;
|
||||
"ent.fips1402")
|
||||
# NOTE: For compatibility we still publish the ent.fips1402 containers to different
|
||||
# namespaces. All ent, ent.hsm, and ent.hsm.fips1402 containers are released in the
|
||||
# enterprise namespaces. After we've updated the upstream docker action to support
|
||||
# multiple tags we can start to tag images with both namespaces, publish to both, and
|
||||
# eventually sunset the fips1402 specific namespaces.
|
||||
container_version='${{ inputs.vault-version }}+${{ inputs.vault-edition }}'
|
||||
docker_container_tags='docker.io/hashicorp/vault-enterprise-fips:${{ inputs.vault-version }}-${{ inputs.vault-edition }} public.ecr.aws/hashicorp/vault-enterprise-fips:${{ inputs.vault-version }}-${{ inputs.vault-edition }}'
|
||||
docker_container_target='ubi-fips'
|
||||
redhat_container_tags='quay.io/redhat-isv-containers/6283f645d02c6b16d9caeb8e:${{ inputs.vault-version }}-${{ inputs.vault-edition }}-ubi'
|
||||
redhat_container_target='ubi-fips'
|
||||
;;
|
||||
*)
|
||||
echo "Cannot generate container tags for unknown vault edition: ${{ inputs.vault-edition }}" 2>&1
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
{
|
||||
echo "container-version=${container_version}"
|
||||
echo "docker-container-tags=${docker_container_tags}"
|
||||
|
28
.github/workflows/build-artifacts-ce.yml
vendored
28
.github/workflows/build-artifacts-ce.yml
vendored
@ -9,12 +9,15 @@ on:
|
||||
inputs:
|
||||
build-all:
|
||||
type: boolean
|
||||
description: Build all extended artifacts
|
||||
default: false
|
||||
build-date:
|
||||
type: string
|
||||
description: The date associated with the revision SHA
|
||||
required: true
|
||||
checkout-ref:
|
||||
type: string
|
||||
description: The repo Git SHA to checkout
|
||||
default: ""
|
||||
compute-build:
|
||||
type: string # JSON encoded to support passing arrays
|
||||
@ -30,15 +33,19 @@ on:
|
||||
required: true
|
||||
vault-revision:
|
||||
type: string
|
||||
description: The revision SHA of vault
|
||||
required: true
|
||||
vault-version:
|
||||
type: string
|
||||
description: The version of vault
|
||||
required: true
|
||||
vault-version-package:
|
||||
type: string
|
||||
description: Whether or not to package the binary as Debian and RPM packages
|
||||
required: true
|
||||
web-ui-cache-key:
|
||||
type: string
|
||||
description: The UI asset cache key
|
||||
required: true
|
||||
workflow_call:
|
||||
inputs:
|
||||
@ -119,7 +126,26 @@ jobs:
|
||||
# Outputs are strings so we need to encode our collection outputs as JSON.
|
||||
testable-containers: |
|
||||
[
|
||||
{ "artifact": "${{ github.event.repository.name }}_default_linux_amd64_${{ inputs.vault-version }}_${{ inputs.vault-revision }}.docker.tar" }
|
||||
{
|
||||
"sample": "ce_default_linux_amd64_ent_docker",
|
||||
"artifact": "${{ github.event.repository.name }}_default_linux_amd64_${{ inputs.vault-version }}_${{ inputs.vault-revision }}.docker.tar",
|
||||
"edition": "ce"
|
||||
},
|
||||
{
|
||||
"sample": "ce_default_linux_arm64_ce_docker",
|
||||
"artifact": "${{ github.event.repository.name }}_default_linux_arm64_${{ inputs.vault-version }}_${{ inputs.vault-revision }}.docker.tar",
|
||||
"edition": "ce"
|
||||
},
|
||||
{
|
||||
"sample": "ce_ubi_linux_amd64_ce_redhat",
|
||||
"artifact": "${{ github.event.repository.name}}_ubi_linux_amd64_${{ inputs.vault-version}}_${{ inputs.vault-revision }}.docker.redhat.tar",
|
||||
"edition": "ce"
|
||||
},
|
||||
{
|
||||
"sample": "ce_ubi_linux_arm64_ce_redhat",
|
||||
"artifact": "${{ github.event.repository.name}}_ubi_linux_arm64_${{ inputs.vault-version}}_${{ inputs.vault-revision }}.docker.redhat.tar",
|
||||
"edition": "ce"
|
||||
}
|
||||
]
|
||||
testable-packages: |
|
||||
[
|
||||
|
12
.github/workflows/build.yml
vendored
12
.github/workflows/build.yml
vendored
@ -255,16 +255,18 @@ jobs:
|
||||
- setup
|
||||
- ui
|
||||
- artifacts
|
||||
uses: ./.github/workflows/enos-run-k8s.yml
|
||||
uses: ./.github/workflows/test-run-enos-scenario-containers.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJSON(needs.artifacts.outputs.testable-containers) }}
|
||||
with:
|
||||
artifact-build-date: ${{ needs.setup.outputs.build-date }}
|
||||
artifact-name: ${{ matrix.artifact }}
|
||||
artifact-revision: ${{ needs.setup.outputs.vault-revision }}
|
||||
artifact-version: ${{ needs.setup.outputs.vault-version-metadata }}
|
||||
build-artifact-name: ${{ matrix.artifact }}
|
||||
sample-max: 1
|
||||
sample-name: ${{ matrix.sample }}
|
||||
vault-edition: ${{ matrix.edition }}
|
||||
vault-revision: ${{ needs.setup.outputs.vault-revision }}
|
||||
vault-version: ${{ needs.setup.outputs.vault-version-metadata }}
|
||||
secrets: inherit
|
||||
|
||||
completed-successfully:
|
||||
|
113
.github/workflows/enos-run-k8s.yml
vendored
113
.github/workflows/enos-run-k8s.yml
vendored
@ -1,113 +0,0 @@
|
||||
---
|
||||
name: enos-k8s
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
artifact-build-date:
|
||||
required: false
|
||||
type: string
|
||||
artifact-name:
|
||||
required: true
|
||||
type: string
|
||||
artifact-revision:
|
||||
required: true
|
||||
type: string
|
||||
artifact-version:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
ARTIFACT_BUILD_DATE: ${{ inputs.artifact-build-date }}
|
||||
ARTIFACT_NAME: ${{ inputs.artifact-name }}
|
||||
ARTIFACT_REVISION: ${{ inputs.artifact-revision }}
|
||||
ARTIFACT_VERSION: ${{ inputs.artifact-version }}
|
||||
|
||||
jobs:
|
||||
enos:
|
||||
name: Integration
|
||||
runs-on: ${{ fromJSON(contains(inputs.artifact-name, 'vault-enterprise') && (contains(inputs.artifact-name, 'arm64') && '["self-hosted","ondemand","os=ubuntu-arm","type=c6g.xlarge"]' || '["self-hosted","linux","small"]') || '"ubuntu-latest"') }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ELEVATED_GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
||||
- name: Set up Terraform
|
||||
uses: hashicorp/setup-terraform@v3
|
||||
with:
|
||||
# the Terraform wrapper will break Terraform execution in Enos because
|
||||
# it changes the output to text when we expect it to be JSON.
|
||||
terraform_wrapper: false
|
||||
- name: Set up Enos
|
||||
uses: hashicorp/action-setup-enos@v1
|
||||
with:
|
||||
github-token: ${{ secrets.ELEVATED_GITHUB_TOKEN }}
|
||||
- name: Download Docker Image
|
||||
id: download
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: ${{ inputs.artifact-name }}
|
||||
path: ./enos/support/downloads
|
||||
- name: Prepare for scenario execution
|
||||
env:
|
||||
IS_ENT: ${{ contains(env.ARTIFACT_NAME, 'vault-enterprise' ) }}
|
||||
run: |
|
||||
mkdir -p ./enos/support/terraform-plugin-cache
|
||||
if [ "$IS_ENT" == true ]; then
|
||||
echo "${{ secrets.VAULT_LICENSE }}" > ./enos/support/vault.hclic || true
|
||||
echo "edition=ent" >> "$GITHUB_ENV"
|
||||
echo "edition set to 'ent'"
|
||||
echo "image_repo=hashicorp/vault-enterprise" >> "$GITHUB_ENV"
|
||||
echo "image repo set to 'hashicorp/vault-enterprise'"
|
||||
else
|
||||
echo "edition=ce" >> "$GITHUB_ENV"
|
||||
echo "edition set to 'ce'"
|
||||
echo "image_repo=hashicorp/vault" >> "$GITHUB_ENV"
|
||||
echo "image repo set to 'hashicorp/vault'"
|
||||
fi
|
||||
- name: Run Enos scenario
|
||||
id: run
|
||||
# Continue once and retry to handle occasional blips when creating
|
||||
# infrastructure.
|
||||
continue-on-error: true
|
||||
env:
|
||||
ENOS_VAR_tfc_api_token: ${{ secrets.TF_API_TOKEN }}
|
||||
ENOS_VAR_terraform_plugin_cache_dir: ../support/terraform-plugin-cache
|
||||
ENOS_VAR_vault_build_date: ${{ env.ARTIFACT_BUILD_DATE }}
|
||||
ENOS_VAR_vault_product_version: ${{ env.ARTIFACT_VERSION }}
|
||||
ENOS_VAR_vault_product_revision: ${{ env.ARTIFACT_REVISION }}
|
||||
ENOS_VAR_vault_docker_image_archive: ${{steps.download.outputs.download-path}}/${{ env.ARTIFACT_NAME }}
|
||||
ENOS_VAR_vault_image_repository: ${{ env.image_repo }}
|
||||
run: |
|
||||
enos scenario run --timeout 10m0s --chdir ./enos/k8s edition:${{ env.edition }}
|
||||
- name: Retry Enos scenario
|
||||
id: run_retry
|
||||
if: steps.run.outcome == 'failure'
|
||||
env:
|
||||
ENOS_VAR_tfc_api_token: ${{ secrets.TF_API_TOKEN }}
|
||||
ENOS_VAR_terraform_plugin_cache_dir: ../support/terraform-plugin-cache
|
||||
ENOS_VAR_vault_build_date: ${{ env.ARTIFACT_BUILD_DATE }}
|
||||
ENOS_VAR_vault_product_version: ${{ env.ARTIFACT_VERSION }}
|
||||
ENOS_VAR_vault_product_revision: ${{ env.ARTIFACT_REVISION }}
|
||||
ENOS_VAR_vault_docker_image_archive: ${{steps.download.outputs.download-path}}/${{ env.ARTIFACT_NAME }}
|
||||
ENOS_VAR_vault_image_repository: ${{ env.image_repo }}
|
||||
run: |
|
||||
enos scenario run --timeout 10m0s --chdir ./enos/k8s edition:${{ env.edition }}
|
||||
- name: Destroy Enos scenario
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
ENOS_VAR_tfc_api_token: ${{ secrets.TF_API_TOKEN }}
|
||||
ENOS_VAR_terraform_plugin_cache_dir: ./support/terraform-plugin-cache
|
||||
ENOS_VAR_vault_build_date: ${{ env.ARTIFACT_BUILD_DATE }}
|
||||
ENOS_VAR_vault_product_version: ${{ env.ARTIFACT_VERSION }}
|
||||
ENOS_VAR_vault_product_revision: ${{ env.ARTIFACT_REVISION }}
|
||||
ENOS_VAR_vault_docker_image_archive: ${{steps.download.outputs.download-path}}
|
||||
ENOS_VAR_vault_image_repository: ${{ env.image_repo }}
|
||||
run: |
|
||||
enos scenario destroy --timeout 10m0s --chdir ./enos/k8s edition:${{ env.edition }}
|
||||
- name: Cleanup Enos runtime directories
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
rm -rf /tmp/enos*
|
||||
rm -rf ./enos/support
|
||||
rm -rf ./enos/k8s/.enos
|
140
.github/workflows/test-run-enos-scenario-containers.yml
vendored
Normal file
140
.github/workflows/test-run-enos-scenario-containers.yml
vendored
Normal file
@ -0,0 +1,140 @@
|
||||
---
|
||||
name: enos-containers
|
||||
|
||||
on:
|
||||
# Only trigger this working using workflow_call. This workflow requires many
|
||||
# secrets that must be inherited from the caller workflow.
|
||||
workflow_call:
|
||||
inputs:
|
||||
# The name of the artifact that we're going to use for testing. This should
|
||||
# match exactly to build artifacts uploaded to Github and Artifactory.
|
||||
build-artifact-name:
|
||||
required: true
|
||||
type: string
|
||||
# The maximum number of scenarios to include in the test sample.
|
||||
sample-max:
|
||||
default: 1
|
||||
type: number
|
||||
# The name of the enos scenario sample that defines compatible scenarios we can
|
||||
# can test with.
|
||||
sample-name:
|
||||
required: true
|
||||
type: string
|
||||
vault-edition:
|
||||
required: false
|
||||
type: string
|
||||
default: ce
|
||||
# The Git commit SHA used as the revision when building vault
|
||||
vault-revision:
|
||||
required: true
|
||||
type: string
|
||||
vault-version:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
metadata:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
build-date: ${{ steps.metadata.outputs.build-date }}
|
||||
sample: ${{ steps.metadata.outputs.sample }}
|
||||
vault-version: ${{ steps.metadata.outputs.vault-version }}
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
||||
with:
|
||||
ref: ${{ inputs.vault-revision }}
|
||||
- uses: hashicorp/action-setup-enos@v1
|
||||
with:
|
||||
github-token: ${{ secrets.ELEVATED_GITHUB_TOKEN }}
|
||||
- id: metadata
|
||||
run: |
|
||||
build_date=$(make ci-get-date)
|
||||
sample_seed=$(date +%s)
|
||||
if ! sample=$(enos scenario sample observe "${{ inputs.sample-name }}" --chdir ./enos/k8s --min 1 --max "${{ inputs.sample-max }}" --seed "${sample_seed}" --format json | jq -c ".observation.elements"); then
|
||||
echo "failed to do sample observation: $sample" 2>&1
|
||||
exit 1
|
||||
fi
|
||||
if [[ "${{ inputs.vault-edition }}" == "ce" ]]; then
|
||||
vault_version="${{ inputs.vault-version }}"
|
||||
else
|
||||
# shellcheck disable=2001
|
||||
vault_version="$(sed 's/+ent/+${{ inputs.vault-edition }}/g' <<< '${{ inputs.vault-version }}')"
|
||||
fi
|
||||
{
|
||||
echo "build-date=${build_date}"
|
||||
echo "vault-version=${vault_version}"
|
||||
echo "sample=${sample}"
|
||||
echo "sample-seed=${sample_seed}" # This isn't used outside of here but is nice to know for duplicating observations
|
||||
} | tee -a "$GITHUB_OUTPUT"
|
||||
|
||||
run:
|
||||
needs: metadata
|
||||
name: run ${{ matrix.scenario.id.filter }}
|
||||
runs-on: ${{ fromJSON(contains(inputs.build-artifact-name, 'vault-enterprise') && (contains(inputs.build-artifact-name, 'arm64') && '["self-hosted","ondemand","os=ubuntu-arm","type=c6g.xlarge"]' || '["self-hosted","linux","small"]') || (contains(inputs.build-artifact-name, 'arm64') && '"ubuntu-22.04-arm"' || '"ubuntu-latest"')) }}
|
||||
strategy:
|
||||
fail-fast: false # don't fail as that can skip required cleanup steps for jobs
|
||||
matrix:
|
||||
include: ${{ fromJSON(needs.metadata.outputs.sample) }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ELEVATED_GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
||||
- uses: hashicorp/setup-terraform@v3
|
||||
with:
|
||||
# the Terraform wrapper will break Terraform execution in Enos because
|
||||
# it changes the output to text when we expect it to be JSON.
|
||||
terraform_wrapper: false
|
||||
- uses: hashicorp/action-setup-enos@v1
|
||||
with:
|
||||
github-token: ${{ secrets.ELEVATED_GITHUB_TOKEN }}
|
||||
- name: Download Docker Image
|
||||
id: download
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: ${{ inputs.build-artifact-name }}
|
||||
path: ./enos/support/downloads
|
||||
- if: inputs.vault-edition != 'ce'
|
||||
name: Configure license
|
||||
run: |
|
||||
echo "${{ secrets.VAULT_LICENSE }}" > ./enos/support/vault.hclic || true
|
||||
- name: Run Enos scenario
|
||||
id: run
|
||||
# Continue once and retry to handle occasional blips when creating
|
||||
# infrastructure.
|
||||
continue-on-error: true
|
||||
env:
|
||||
ENOS_VAR_terraform_plugin_cache_dir: ../support/terraform-plugin-cache
|
||||
ENOS_VAR_vault_build_date: ${{ needs.metadata.outputs.build-date }}
|
||||
ENOS_VAR_vault_version: ${{ needs.metadata.outputs.vault-version }}
|
||||
ENOS_VAR_vault_revision: ${{ inputs.vault-revision }}
|
||||
ENOS_VAR_container_image_archive: ${{steps.download.outputs.download-path}}/${{ inputs.build-artifact-name }}
|
||||
run: |
|
||||
mkdir -p ./enos/support/terraform-plugin-cache
|
||||
enos scenario run --timeout 10m0s --chdir ./enos/k8s ${{ matrix.scenario.id.filter }}
|
||||
- name: Retry Enos scenario
|
||||
id: run_retry
|
||||
if: steps.run.outcome == 'failure'
|
||||
env:
|
||||
ENOS_VAR_terraform_plugin_cache_dir: ../support/terraform-plugin-cache
|
||||
ENOS_VAR_vault_build_date: ${{ needs.metadata.outputs.build-date }}
|
||||
ENOS_VAR_vault_version: ${{ needs.metadata.outputs.vault-version }}
|
||||
ENOS_VAR_vault_revision: ${{ inputs.vault-revision }}
|
||||
ENOS_VAR_container_image_archive: ${{steps.download.outputs.download-path}}/${{ inputs.build-artifact-name }}
|
||||
run: |
|
||||
enos scenario run --timeout 10m0s --chdir ./enos/k8s ${{ matrix.scenario.id.filter }}
|
||||
- name: Destroy Enos scenario
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
ENOS_VAR_terraform_plugin_cache_dir: ../support/terraform-plugin-cache
|
||||
ENOS_VAR_vault_build_date: ${{ needs.metadata.outputs.build-date }}
|
||||
ENOS_VAR_vault_version: ${{ needs.metadata.outputs.vault-version }}
|
||||
ENOS_VAR_vault_revision: ${{ inputs.vault-revision }}
|
||||
ENOS_VAR_container_image_archive: ${{steps.download.outputs.download-path}}/${{ inputs.build-artifact-name }}
|
||||
run: |
|
||||
enos scenario destroy --timeout 10m0s --grpc-listen http://localhost --chdir ./enos/k8s ${{ matrix.scenario.id.filter }}
|
||||
- name: Cleanup Enos runtime directories
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
rm -rf /tmp/enos*
|
||||
rm -rf ./enos/support
|
||||
rm -rf ./enos/k8s/.enos
|
@ -59,7 +59,10 @@ jobs:
|
||||
run: |
|
||||
build_date=$(make ci-get-date)
|
||||
sample_seed=$(date +%s)
|
||||
sample=$(enos scenario sample observe "${{ inputs.sample-name }}" --chdir ./enos --min 1 --max "${{ inputs.sample-max }}" --seed "${sample_seed}" --format json | jq -c ".observation.elements")
|
||||
if ! sample=$(enos scenario sample observe "${{ inputs.sample-name }}" --chdir ./enos --min 1 --max "${{ inputs.sample-max }}" --seed "${sample_seed}" --format json | jq -c ".observation.elements"); then
|
||||
echo "failed to do sample observation: $sample" 2>&1
|
||||
exit 1
|
||||
fi
|
||||
if [[ "${{ inputs.vault-edition }}" == "ce" ]]; then
|
||||
vault_version="${{ inputs.vault-version }}"
|
||||
else
|
||||
|
129
.github/workflows/test-run-enos-scenario.yml
vendored
Normal file
129
.github/workflows/test-run-enos-scenario.yml
vendored
Normal file
@ -0,0 +1,129 @@
|
||||
# Reusable workflow called by interactive scenario tests in GHA
|
||||
name: Test run Vault Enos scenario
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
artifact-source:
|
||||
type: string
|
||||
description: "The artifact source to test artifactory or local (use local for current branch)"
|
||||
required: true
|
||||
artifact-type:
|
||||
type: string
|
||||
description: "The Vault artifact type to test"
|
||||
required: true
|
||||
distro:
|
||||
type: string
|
||||
description: "Linux distribution that Vault replication will be tested on"
|
||||
required: true
|
||||
product-version:
|
||||
type: string
|
||||
description: "Vault version to test (vault_product_version)"
|
||||
required: false
|
||||
scenario:
|
||||
type: string
|
||||
description: "Enos test scenario to run"
|
||||
required: true
|
||||
ssh-key-name:
|
||||
type: string
|
||||
default: ${{ github.event.repository.name }}-ci-ssh-key
|
||||
vault-revision:
|
||||
type: string
|
||||
description: "The git SHA of the Vault release (vault_revision)"
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
enos-run-vault-interactive-test:
|
||||
name: Enos run Vault interactive test
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ELEVATED_GITHUB_TOKEN }}
|
||||
# Pass in enos variables
|
||||
ENOS_VAR_aws_ssh_keypair_name: ${{ inputs.ssh-key-name }}
|
||||
ENOS_VAR_vault_log_level: trace
|
||||
ENOS_VAR_aws_ssh_private_key_path: ./support/private_key.pem
|
||||
ENOS_VAR_tfc_api_token: ${{ secrets.TF_API_TOKEN }}
|
||||
ENOS_VAR_artifactory_username: ${{ secrets.ARTIFACTORY_USER }}
|
||||
ENOS_VAR_artifactory_token: ${{ secrets.ARTIFACTORY_TOKEN }}
|
||||
ENOS_VAR_terraform_plugin_cache_dir: ./support/terraform-plugin-cache
|
||||
ENOS_VAR_vault_license_path: ./support/vault.hclic
|
||||
ENOS_DEBUG_DATA_ROOT_DIR: /tmp/enos-debug-data
|
||||
VAULT_METADATA: ent
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
||||
- name: Set product version and revision
|
||||
# If the Vault version and revision are not provided as workflow inputs, incase of
|
||||
# testing local artifact, the environment variables ENOS_VAR_vault_product_version
|
||||
# and ENOS_VAR_vault_revision are set using the current branch
|
||||
id: set-version-sha
|
||||
run: |
|
||||
[[ -n "${{ inputs.product-version }}" ]] && echo "ENOS_VAR_vault_product_version=${{ inputs.product-version }}" >> "$GITHUB_ENV" || echo "ENOS_VAR_vault_product_version=$(make ci-get-version)" >> "$GITHUB_ENV"
|
||||
[[ -n "${{ inputs.vault-revision }}" ]] && echo "ENOS_VAR_vault_revision=${{ inputs.vault-revision }}" >> "$GITHUB_ENV" || echo "ENOS_VAR_vault_revision=$(make ci-get-revision)" >> "$GITHUB_ENV"
|
||||
- uses: ./.github/actions/set-up-go
|
||||
with:
|
||||
github-token: ${{ secrets.ELEVATED_GITHUB_TOKEN }}
|
||||
- name: Configure Git
|
||||
run: git config --global url."https://${{ secrets.ELEVATED_GITHUB_TOKEN }}:@github.com".insteadOf "https://github.com"
|
||||
- name: Set up node
|
||||
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
|
||||
with:
|
||||
node-version: 14
|
||||
cache-dependency-path: ui/yarn.lock
|
||||
- uses: hashicorp/setup-terraform@v2
|
||||
with:
|
||||
# the Terraform wrapper will break Terraform execution in Enos because
|
||||
# it changes the output to text when we expect it to be JSON.
|
||||
terraform_wrapper: false
|
||||
- uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_CI }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_CI }}
|
||||
aws-region: 'us-west-1'
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN_CI }}
|
||||
role-skip-session-tagging: true
|
||||
role-duration-seconds: 3600
|
||||
- uses: hashicorp/action-setup-enos@v1
|
||||
with:
|
||||
github-token: ${{ secrets.ELEVATED_GITHUB_TOKEN }}
|
||||
- name: Prepare scenario dependencies
|
||||
id: scenario-deps
|
||||
run: |
|
||||
mkdir -p ./enos/support/terraform-plugin-cache
|
||||
mkdir -p /tmp/enos-scenario-logs
|
||||
echo logsdir="/tmp/enos-scenario-logs" >> "$GITHUB_OUTPUT"
|
||||
echo "${{ secrets.SSH_KEY_PRIVATE_CI }}" > ./enos/support/private_key.pem
|
||||
chmod 600 ./enos/support/private_key.pem
|
||||
- name: Setup Vault Enterprise License
|
||||
id: license
|
||||
run: echo "${{ secrets.VAULT_LICENSE }}" > ./enos/support/vault.hclic
|
||||
- name: Run Enos scenario
|
||||
id: run
|
||||
run: enos scenario run --timeout 60m0s --chdir ./enos ${{ inputs.scenario }}
|
||||
- name: Collect logs when scenario fails
|
||||
id: collect_logs
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
bash -x ./scripts/gha_enos_logs.sh "${{ steps.scenario-deps.outputs.logsdir }}" "${{ inputs.scenario }}" "${{ inputs.distro }}" "${{ inputs.artifact-type }}" 2>/dev/null
|
||||
find "${{ steps.scenario-deps.outputs.logsdir }}" -maxdepth 0 -empty -exec rmdir {} \;
|
||||
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: enos-scenario-logs
|
||||
path: ${{ steps.scenario-deps.outputs.logsdir }}
|
||||
retention-days: 1
|
||||
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: enos-debug-data-logs
|
||||
path: ${{ env.ENOS_DEBUG_DATA_ROOT_DIR }}
|
||||
retention-days: 1
|
||||
- name: Ensure scenario has been destroyed
|
||||
if: ${{ always() }}
|
||||
run: enos scenario destroy --timeout 60m0s --grpc-listen http://localhost --chdir ./enos ${{ inputs.scenario }}
|
||||
- name: Clean up Enos runtime directories
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
rm -rf /tmp/enos*
|
||||
rm -rf ./enos/support
|
||||
rm -rf ./enos/.enos
|
25
Dockerfile
25
Dockerfile
@ -2,7 +2,7 @@
|
||||
# SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
## DOCKERHUB DOCKERFILE ##
|
||||
FROM alpine:3 as default
|
||||
FROM alpine:3 AS default
|
||||
|
||||
ARG BIN_NAME
|
||||
# NAME and PRODUCT_VERSION are the name of the software in releases.hashicorp.com
|
||||
@ -34,7 +34,11 @@ ENV VERSION=$VERSION
|
||||
# Create a non-root user to run the software.
|
||||
RUN addgroup ${NAME} && adduser -S -G ${NAME} ${NAME}
|
||||
|
||||
RUN apk add --no-cache libcap su-exec dumb-init tzdata
|
||||
RUN apk add --no-cache libcap su-exec dumb-init tzdata curl && \
|
||||
mkdir -p /usr/share/doc/vault && \
|
||||
curl -o /usr/share/doc/vault/EULA.txt https://eula.hashicorp.com/EULA.txt && \
|
||||
curl -o /usr/share/doc/vault/TermsOfEvaluation.txt https://eula.hashicorp.com/TermsOfEvaluation.txt && \
|
||||
apk del curl
|
||||
|
||||
COPY dist/$TARGETOS/$TARGETARCH/$BIN_NAME /bin/
|
||||
|
||||
@ -75,7 +79,7 @@ CMD ["server", "-dev"]
|
||||
|
||||
|
||||
## UBI DOCKERFILE ##
|
||||
FROM registry.access.redhat.com/ubi8/ubi-minimal as ubi
|
||||
FROM registry.access.redhat.com/ubi8/ubi-minimal AS ubi
|
||||
|
||||
ARG BIN_NAME
|
||||
# NAME and PRODUCT_VERSION are the name of the software in releases.hashicorp.com
|
||||
@ -111,7 +115,7 @@ COPY LICENSE /licenses/LICENSE.txt
|
||||
# this (https://github.com/hashicorp/docker-vault/blob/master/ubi/Dockerfile),
|
||||
# we copy in the Vault binary from CRT.
|
||||
RUN set -eux; \
|
||||
microdnf install -y ca-certificates gnupg openssl libcap tzdata procps shadow-utils util-linux
|
||||
microdnf install -y ca-certificates gnupg openssl libcap tzdata procps shadow-utils util-linux tar
|
||||
|
||||
# Create a non-root user to run the software.
|
||||
RUN groupadd --gid 1000 vault && \
|
||||
@ -127,7 +131,7 @@ COPY dist/$TARGETOS/$TARGETARCH/$BIN_NAME /bin/
|
||||
# storage backend, if desired; the server will be started with /vault/config as
|
||||
# the configuration directory so you can add additional config files in that
|
||||
# location.
|
||||
ENV HOME /home/vault
|
||||
ENV HOME=/home/vault
|
||||
RUN mkdir -p /vault/logs && \
|
||||
mkdir -p /vault/file && \
|
||||
mkdir -p /vault/config && \
|
||||
@ -136,6 +140,11 @@ RUN mkdir -p /vault/logs && \
|
||||
chgrp -R 0 $HOME && chmod -R g+rwX $HOME && \
|
||||
chgrp -R 0 /vault && chmod -R g+rwX /vault
|
||||
|
||||
# Include EULA and Terms of Eval
|
||||
RUN mkdir -p /usr/share/doc/vault && \
|
||||
curl -o /usr/share/doc/vault/EULA.txt https://eula.hashicorp.com/EULA.txt && \
|
||||
curl -o /usr/share/doc/vault/TermsOfEvaluation.txt https://eula.hashicorp.com/TermsOfEvaluation.txt
|
||||
|
||||
# Expose the logs directory as a volume since there's potentially long-running
|
||||
# state in there
|
||||
VOLUME /vault/logs
|
||||
@ -162,3 +171,9 @@ USER vault
|
||||
# # By default you'll get a single-node development server that stores everything
|
||||
# # in RAM and bootstraps itself. Don't use this configuration for production.
|
||||
CMD ["server", "-dev"]
|
||||
|
||||
FROM ubi AS ubi-fips
|
||||
|
||||
FROM ubi AS ubi-hsm
|
||||
|
||||
FROM ubi AS ubi-hsm-fips
|
||||
|
@ -12,39 +12,39 @@ module "load_docker_image" {
|
||||
module "k8s_deploy_vault" {
|
||||
source = "../modules/k8s_deploy_vault"
|
||||
|
||||
vault_instance_count = var.vault_instance_count
|
||||
vault_instance_count = var.instance_count
|
||||
}
|
||||
|
||||
module "k8s_verify_build_date" {
|
||||
source = "../modules/k8s_vault_verify_build_date"
|
||||
|
||||
vault_instance_count = var.vault_instance_count
|
||||
vault_instance_count = var.instance_count
|
||||
}
|
||||
|
||||
module "k8s_verify_replication" {
|
||||
source = "../modules/k8s_vault_verify_replication"
|
||||
|
||||
vault_instance_count = var.vault_instance_count
|
||||
vault_instance_count = var.instance_count
|
||||
}
|
||||
|
||||
module "k8s_verify_ui" {
|
||||
source = "../modules/k8s_vault_verify_ui"
|
||||
|
||||
vault_instance_count = var.vault_instance_count
|
||||
vault_instance_count = var.instance_count
|
||||
}
|
||||
|
||||
module "k8s_verify_version" {
|
||||
source = "../modules/k8s_vault_verify_version"
|
||||
|
||||
vault_instance_count = var.vault_instance_count
|
||||
vault_product_version = var.vault_product_version
|
||||
vault_product_revision = var.vault_product_revision
|
||||
vault_instance_count = var.instance_count
|
||||
vault_product_version = var.vault_version
|
||||
vault_product_revision = var.vault_revision
|
||||
}
|
||||
|
||||
module "k8s_verify_write_data" {
|
||||
source = "../modules/k8s_vault_verify_write_data"
|
||||
|
||||
vault_instance_count = var.vault_instance_count
|
||||
vault_instance_count = var.instance_count
|
||||
}
|
||||
|
||||
module "read_license" {
|
||||
|
14
enos/k8s/enos-qualities.hcl
Normal file
14
enos/k8s/enos-qualities.hcl
Normal file
@ -0,0 +1,14 @@
|
||||
# Copyright (c) HashiCorp, Inc.
|
||||
# SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
quality "vault_artifact_container_alpine" {
|
||||
description = "The candidate binary packaged as an Alpine package is used for testing"
|
||||
}
|
||||
|
||||
quality "vault_artifact_container_ubi" {
|
||||
description = "The candidate binary packaged as an UBI package is used for testing"
|
||||
}
|
||||
|
||||
quality "vault_artifact_container_tags" {
|
||||
description = "The candidate binary has the expected tags"
|
||||
}
|
38
enos/k8s/enos-samples-ce.hcl
Normal file
38
enos/k8s/enos-samples-ce.hcl
Normal file
@ -0,0 +1,38 @@
|
||||
// Copyright (c) HashiCorp, Inc.
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
sample "ce_default_linux_amd64_ent_docker" {
|
||||
subset "k8s" {
|
||||
matrix {
|
||||
repo = ["docker", "ecr"]
|
||||
edition = ["ce"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sample "ce_default_linux_arm64_ce_docker" {
|
||||
subset "k8s" {
|
||||
matrix {
|
||||
repo = ["docker", "ecr"]
|
||||
edition = ["ce"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sample "ce_ubi_linux_amd64_ce_redhat" {
|
||||
subset "k8s" {
|
||||
matrix {
|
||||
repo = ["quay"]
|
||||
edition = ["ce"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sample "ce_ubi_linux_arm64_ce_redhat" {
|
||||
subset "k8s" {
|
||||
matrix {
|
||||
repo = ["quay"]
|
||||
edition = ["ce"]
|
||||
}
|
||||
}
|
||||
}
|
@ -2,8 +2,17 @@
|
||||
# SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
scenario "k8s" {
|
||||
description = <<-EOF
|
||||
The k8s scenario verifies Vault when running in Kubernetes mode. The build can be a container
|
||||
in a remote repository or a local container archive tarball.
|
||||
|
||||
The scenario creates a new kind kubernetes cluster in Docker and creates a Vault Cluster using
|
||||
the candidate artifact and verifies behavior against the Vault cluster.
|
||||
EOF
|
||||
|
||||
matrix {
|
||||
edition = ["ce", "ent"]
|
||||
edition = ["ce", "ent", "ent.fips1402", "ent.hsm", "ent.hsm.fips1402"]
|
||||
repo = ["docker", "ecr", "quay"]
|
||||
}
|
||||
|
||||
terraform_cli = terraform_cli.default
|
||||
@ -15,15 +24,106 @@ scenario "k8s" {
|
||||
]
|
||||
|
||||
locals {
|
||||
image_path = abspath(var.vault_docker_image_archive)
|
||||
|
||||
image_repo = var.vault_image_repository != null ? var.vault_image_repository : matrix.edition == "ce" ? "hashicorp/vault" : "hashicorp/vault-enterprise"
|
||||
image_tag = replace(var.vault_product_version, "+ent", "-ent")
|
||||
|
||||
// For now this works as the vault_version includes metadata. If we ever get to the point that
|
||||
// vault_version excludes metadata we'll have to include the matrix.edition here as well.
|
||||
tag_version = replace(var.vault_version, "+ent", "-ent")
|
||||
tag_version_ubi = "${local.tag_version}-ubi"
|
||||
// When we load candidate images into our k8s cluster we verify that the archives embedded
|
||||
// repository and tag match our expectations. This is the source of truth for what we _expect_
|
||||
// various artifacts to have. The source of truth for what we use when building is defined in
|
||||
// .github/actions/containerize. If you are modifying these expectations you likely need to
|
||||
// modify the source of truth there.
|
||||
repo_metadata = {
|
||||
"ce" = {
|
||||
docker = {
|
||||
// https://hub.docker.com/r/hashicorp/vault
|
||||
repo = "hashicorp/vault"
|
||||
tag = local.tag_version
|
||||
}
|
||||
ecr = {
|
||||
// https://gallery.ecr.aws/hashicorp/vault
|
||||
repo = "public.ecr.aws/hashicorp/vault"
|
||||
tag = local.tag_version
|
||||
}
|
||||
quay = {
|
||||
// https://catalog.redhat.com/software/containers/hashicorp/vault/5fda55bd2937386820429e0c
|
||||
repo = "quay.io/redhat-isv-containers/5f89bb5e0b94cf64cfeb500a"
|
||||
tag = local.tag_version_ubi
|
||||
}
|
||||
},
|
||||
"ent" = {
|
||||
docker = {
|
||||
// https://hub.docker.com/r/hashicorp/vault-enterprise
|
||||
repo = "hashicorp/vault-enterprise"
|
||||
tag = local.tag_version
|
||||
}
|
||||
ecr = {
|
||||
// https://gallery.ecr.aws/hashicorp/vault-enterprise
|
||||
repo = "public.ecr.aws/hashicorp/vault-enterprise"
|
||||
tag = local.tag_version
|
||||
}
|
||||
quay = {
|
||||
// https://catalog.redhat.com/software/containers/hashicorp/vault-enterprise/5fda5633ac3db90370a26443
|
||||
repo = "quay.io/redhat-isv-containers/5f89bb9242e382c85087dce2"
|
||||
tag = local.tag_version_ubi
|
||||
}
|
||||
},
|
||||
"ent.fips1402" = {
|
||||
docker = {
|
||||
// https://hub.docker.com/r/hashicorp/vault-enterprise-fips
|
||||
repo = "hashicorp/vault-enterprise-fips"
|
||||
tag = local.tag_version
|
||||
}
|
||||
ecr = {
|
||||
// https://gallery.ecr.aws/hashicorp/vault-enterprise-fips
|
||||
repo = "public.ecr.aws/hashicorp/vault-enterprise-fips"
|
||||
tag = local.tag_version
|
||||
}
|
||||
quay = {
|
||||
// https://catalog.redhat.com/software/containers/hashicorp/vault-enterprise-fips/628d50e37ff70c66a88517ea
|
||||
repo = "quay.io/redhat-isv-containers/6283f645d02c6b16d9caeb8e"
|
||||
tag = local.tag_version_ubi
|
||||
}
|
||||
},
|
||||
"ent.hsm" = {
|
||||
docker = {
|
||||
// https://hub.docker.com/r/hashicorp/vault-enterprise
|
||||
repo = "hashicorp/vault-enterprise"
|
||||
tag = local.tag_version
|
||||
}
|
||||
ecr = {
|
||||
// https://gallery.ecr.aws/hashicorp/vault-enterprise
|
||||
repo = "public.ecr.aws/hashicorp/vault-enterprise"
|
||||
tag = local.tag_version
|
||||
}
|
||||
quay = {
|
||||
// https://catalog.redhat.com/software/containers/hashicorp/vault-enterprise/5fda5633ac3db90370a26443
|
||||
repo = "quay.io/redhat-isv-containers/5f89bb9242e382c85087dce2"
|
||||
tag = local.tag_version_ubi
|
||||
}
|
||||
},
|
||||
"ent.hsm.fips1402" = {
|
||||
docker = {
|
||||
// https://hub.docker.com/r/hashicorp/vault-enterprise
|
||||
repo = "hashicorp/vault-enterprise"
|
||||
tag = local.tag_version
|
||||
}
|
||||
ecr = {
|
||||
// https://gallery.ecr.aws/hashicorp/vault-enterprise
|
||||
repo = "public.ecr.aws/hashicorp/vault-enterprise"
|
||||
tag = local.tag_version
|
||||
}
|
||||
quay = {
|
||||
// https://catalog.redhat.com/software/containers/hashicorp/vault-enterprise/5fda5633ac3db90370a26443
|
||||
repo = "quay.io/redhat-isv-containers/5f89bb9242e382c85087dce2"
|
||||
tag = local.tag_version_ubi
|
||||
}
|
||||
},
|
||||
}
|
||||
// The additional '-0' is required in the constraint since without it, the semver function will
|
||||
// only compare the non-pre-release parts (Major.Minor.Patch) of the version and the constraint,
|
||||
// which can lead to unexpected results.
|
||||
version_includes_build_date = semverconstraint(var.vault_product_version, ">=1.11.0-0")
|
||||
version_includes_build_date = semverconstraint(var.vault_version, ">=1.11.0-0")
|
||||
}
|
||||
|
||||
step "read_license" {
|
||||
@ -44,20 +144,34 @@ scenario "k8s" {
|
||||
}
|
||||
|
||||
step "load_docker_image" {
|
||||
module = module.load_docker_image
|
||||
description = <<-EOF
|
||||
Load an verify the tags of a Vault container image into the kind k8s cluster. If no
|
||||
var.container_image_archive has been set it will attempt to load an image matching the
|
||||
var.vault_version from the matrix.repo.
|
||||
EOF
|
||||
module = module.load_docker_image
|
||||
depends_on = [step.create_kind_cluster]
|
||||
|
||||
verifies = [
|
||||
quality.vault_artifact_container_alpine,
|
||||
quality.vault_artifact_container_ubi,
|
||||
quality.vault_artifact_container_tags,
|
||||
]
|
||||
|
||||
variables {
|
||||
cluster_name = step.create_kind_cluster.cluster_name
|
||||
image = local.image_repo
|
||||
tag = local.image_tag
|
||||
archive = var.vault_docker_image_archive
|
||||
image = local.repo_metadata[matrix.edition][matrix.repo].repo
|
||||
tag = local.repo_metadata[matrix.edition][matrix.repo].tag
|
||||
archive = var.container_image_archive
|
||||
}
|
||||
|
||||
depends_on = [step.create_kind_cluster]
|
||||
}
|
||||
|
||||
step "deploy_vault" {
|
||||
module = module.k8s_deploy_vault
|
||||
depends_on = [
|
||||
step.load_docker_image,
|
||||
step.create_kind_cluster,
|
||||
]
|
||||
|
||||
variables {
|
||||
image_tag = step.load_docker_image.tag
|
||||
@ -65,29 +179,14 @@ scenario "k8s" {
|
||||
image_repository = step.load_docker_image.repository
|
||||
kubeconfig_base64 = step.create_kind_cluster.kubeconfig_base64
|
||||
vault_edition = matrix.edition
|
||||
vault_log_level = var.vault_log_level
|
||||
vault_log_level = var.log_level
|
||||
ent_license = matrix.edition != "ce" ? step.read_license.license : null
|
||||
}
|
||||
|
||||
depends_on = [step.load_docker_image, step.create_kind_cluster]
|
||||
}
|
||||
|
||||
step "verify_build_date" {
|
||||
skip_step = !local.version_includes_build_date
|
||||
module = module.k8s_verify_build_date
|
||||
|
||||
variables {
|
||||
vault_pods = step.deploy_vault.vault_pods
|
||||
vault_root_token = step.deploy_vault.vault_root_token
|
||||
kubeconfig_base64 = step.create_kind_cluster.kubeconfig_base64
|
||||
context_name = step.create_kind_cluster.context_name
|
||||
}
|
||||
|
||||
depends_on = [step.deploy_vault]
|
||||
}
|
||||
|
||||
step "verify_replication" {
|
||||
module = module.k8s_verify_replication
|
||||
module = module.k8s_verify_replication
|
||||
depends_on = [step.deploy_vault]
|
||||
|
||||
variables {
|
||||
vault_pods = step.deploy_vault.vault_pods
|
||||
@ -95,12 +194,11 @@ scenario "k8s" {
|
||||
kubeconfig_base64 = step.create_kind_cluster.kubeconfig_base64
|
||||
context_name = step.create_kind_cluster.context_name
|
||||
}
|
||||
|
||||
depends_on = [step.deploy_vault]
|
||||
}
|
||||
|
||||
step "verify_version" {
|
||||
module = module.k8s_verify_version
|
||||
module = module.k8s_verify_version
|
||||
depends_on = [step.deploy_vault]
|
||||
|
||||
variables {
|
||||
vault_pods = step.deploy_vault.vault_pods
|
||||
@ -111,12 +209,11 @@ scenario "k8s" {
|
||||
check_build_date = local.version_includes_build_date
|
||||
vault_build_date = var.vault_build_date
|
||||
}
|
||||
|
||||
depends_on = [step.deploy_vault]
|
||||
}
|
||||
|
||||
step "verify_write_data" {
|
||||
module = module.k8s_verify_write_data
|
||||
module = module.k8s_verify_write_data
|
||||
depends_on = [step.deploy_vault]
|
||||
|
||||
variables {
|
||||
vault_pods = step.deploy_vault.vault_pods
|
||||
@ -124,7 +221,5 @@ scenario "k8s" {
|
||||
kubeconfig_base64 = step.create_kind_cluster.kubeconfig_base64
|
||||
context_name = step.create_kind_cluster.context_name
|
||||
}
|
||||
|
||||
depends_on = [step.deploy_vault]
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ terraform "k8s" {
|
||||
|
||||
required_providers {
|
||||
enos = {
|
||||
source = "registry.terraform.io/hashicorp-forge/enos"
|
||||
source = "registry.terraform.io/hashicorp-forge/enos"
|
||||
}
|
||||
|
||||
helm = {
|
||||
|
@ -1,37 +1,19 @@
|
||||
# Copyright (c) HashiCorp, Inc.
|
||||
# SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
variable "vault_image_repository" {
|
||||
description = "The repository for the docker image to load, i.e. hashicorp/vault"
|
||||
variable "container_image_archive" {
|
||||
description = "The path to the location of the container image archive to test"
|
||||
type = string
|
||||
default = null
|
||||
default = null # If none is given we'll simply load a container from a repo
|
||||
}
|
||||
|
||||
variable "vault_log_level" {
|
||||
variable "log_level" {
|
||||
description = "The server log level for Vault logs. Supported values (in order of detail) are trace, debug, info, warn, and err."
|
||||
type = string
|
||||
default = "info"
|
||||
default = "trace"
|
||||
}
|
||||
|
||||
variable "vault_product_version" {
|
||||
description = "The vault product version to test"
|
||||
type = string
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "vault_product_revision" {
|
||||
type = string
|
||||
description = "The vault product revision to test"
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "vault_docker_image_archive" {
|
||||
description = "The path to the location of the docker image archive to test"
|
||||
type = string
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "vault_instance_count" {
|
||||
variable "instance_count" {
|
||||
description = "How many instances to create for the Vault cluster"
|
||||
type = number
|
||||
default = 3
|
||||
@ -44,7 +26,17 @@ variable "terraform_plugin_cache_dir" {
|
||||
}
|
||||
|
||||
variable "vault_build_date" {
|
||||
description = "The build date for the vault docker image"
|
||||
description = "The expected vault build date"
|
||||
type = string
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "vault_revision" {
|
||||
type = string
|
||||
description = "The expected vault revision"
|
||||
}
|
||||
|
||||
variable "vault_version" {
|
||||
description = "The expected vault version"
|
||||
type = string
|
||||
}
|
||||
|
@ -1,61 +0,0 @@
|
||||
# Copyright (c) HashiCorp, Inc.
|
||||
# SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
|
||||
terraform {
|
||||
required_providers {
|
||||
enos = {
|
||||
source = "registry.terraform.io/hashicorp-forge/enos"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
locals {
|
||||
vault_instances = toset([for idx in range(var.vault_instance_count) : tostring(idx)])
|
||||
}
|
||||
|
||||
# Get the date from the vault status command - status_date
|
||||
# Format the original status output with ISO-8601 - formatted_date
|
||||
# Format the original status output with awk - awk_date
|
||||
# Compare the formatted outputs - date_comparison
|
||||
resource "enos_remote_exec" "status_date" {
|
||||
for_each = local.vault_instances
|
||||
|
||||
transport = {
|
||||
kubernetes = {
|
||||
kubeconfig_base64 = var.kubeconfig_base64
|
||||
context_name = var.context_name
|
||||
pod = var.vault_pods[each.key].name
|
||||
namespace = var.vault_pods[each.key].namespace
|
||||
}
|
||||
}
|
||||
|
||||
inline = ["${var.vault_bin_path} status -format=json | grep build_date | cut -d \\\" -f 4"]
|
||||
}
|
||||
|
||||
resource "enos_remote_exec" "formatted_date" {
|
||||
for_each = local.vault_instances
|
||||
|
||||
transport = {
|
||||
kubernetes = {
|
||||
kubeconfig_base64 = var.kubeconfig_base64
|
||||
context_name = var.context_name
|
||||
pod = var.vault_pods[each.key].name
|
||||
namespace = var.vault_pods[each.key].namespace
|
||||
}
|
||||
}
|
||||
|
||||
inline = ["date -d \"${enos_remote_exec.status_date[each.key].stdout}\" -D '%Y-%m-%dT%H:%M:%SZ' -I"]
|
||||
}
|
||||
|
||||
resource "enos_local_exec" "awk_date" {
|
||||
for_each = local.vault_instances
|
||||
|
||||
inline = ["echo ${enos_remote_exec.status_date[each.key].stdout} | awk -F\"T\" '{printf $1}'"]
|
||||
}
|
||||
|
||||
resource "enos_local_exec" "date_comparison" {
|
||||
for_each = local.vault_instances
|
||||
|
||||
inline = ["[[ ${enos_local_exec.awk_date[each.key].stdout} == ${enos_remote_exec.formatted_date[each.key].stdout} ]] && echo \"Verification for build date format ${enos_remote_exec.status_date[each.key].stdout} succeeded\" || \"invalid build_date, must be formatted as RFC 3339: ${enos_remote_exec.status_date[each.key].stdout}\""]
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
# Copyright (c) HashiCorp, Inc.
|
||||
# SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
variable "vault_instance_count" {
|
||||
type = number
|
||||
description = "How many vault instances are in the cluster"
|
||||
}
|
||||
|
||||
variable "vault_pods" {
|
||||
type = list(object({
|
||||
name = string
|
||||
namespace = string
|
||||
}))
|
||||
description = "The vault instances for the cluster to verify"
|
||||
}
|
||||
|
||||
variable "vault_bin_path" {
|
||||
type = string
|
||||
description = "The path to the vault binary"
|
||||
default = "/bin/vault"
|
||||
}
|
||||
|
||||
variable "vault_root_token" {
|
||||
type = string
|
||||
description = "The vault root token"
|
||||
}
|
||||
|
||||
variable "kubeconfig_base64" {
|
||||
type = string
|
||||
description = "The base64 encoded version of the Kubernetes configuration file"
|
||||
}
|
||||
|
||||
variable "context_name" {
|
||||
type = string
|
||||
description = "The name of the k8s context for Vault"
|
||||
}
|
Loading…
Reference in New Issue
Block a user