chore: migrate unit tests to nextest and llvm-cov

* add nextest and llvm-cov for running unit tests. 
* test results and coverage output to store_test_results and store_artifacts. 
* upgrade build image and Docker Rust version 1.78.0 → 1.81.0

Closes SYNC-4611
This commit is contained in:
Nick Shirley 2025-03-10 10:34:54 -06:00 committed by GitHub
parent 028d9a7e3e
commit 8c56cae890
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 483 additions and 326 deletions

View File

@ -60,11 +60,6 @@ commands:
name: Rust Clippy Spanner name: Rust Clippy Spanner
command: | command: |
cargo clippy --workspace --all-targets --no-default-features --features=syncstorage-db/spanner --features=py_verifier -- -D warnings cargo clippy --workspace --all-targets --no-default-features --features=syncstorage-db/spanner --features=py_verifier -- -D warnings
cargo-build:
steps:
- run:
name: cargo build
command: cargo build
setup-mysql: setup-mysql:
steps: steps:
- run: - run:
@ -91,17 +86,43 @@ commands:
"$CIRCLE_PROJECT_REPONAME" \ "$CIRCLE_PROJECT_REPONAME" \
"$CIRCLE_BUILD_URL" > syncserver/version.json "$CIRCLE_BUILD_URL" > syncserver/version.json
run-tests: install-test-deps:
steps: steps:
- run: - run:
name: cargo test name: Install test dependencies
command: cargo test --workspace --verbose command: cargo install cargo-nextest cargo-llvm-cov
make-test-dir:
steps:
- run: - run:
name: quota test name: Create test-results directory
command: cargo test --workspace --verbose command: mkdir -p workflow/test-results
run-unit-tests:
steps:
- run:
name: nextest with code coverage
command: make test_with_coverage
- run:
name: nextest with code coverage (quota enforced)
command: make test_with_coverage
environment: environment:
SYNC_SYNCSTORAGE__ENFORCE_QUOTA: 1 SYNC_SYNCSTORAGE__ENFORCE_QUOTA: 1
merge-unit-test-coverage:
steps:
- run:
when: always
name: Merge llvm-cov results
command: make merge_coverage_results
store-unit-test-results:
steps:
- store_test_results:
path: workflow/test-results
- store_artifacts:
path: workflow/test-results
run-e2e-mysql-tests: run-e2e-mysql-tests:
steps: steps:
- run: - run:
@ -168,7 +189,7 @@ commands:
jobs: jobs:
checks: checks:
docker: docker:
- image: cimg/rust:1.78.0 # RUST_VER - image: cimg/rust:1.81.0 # RUST_VER
auth: auth:
username: $DOCKER_USER username: $DOCKER_USER
password: $DOCKER_PASS password: $DOCKER_PASS
@ -185,7 +206,7 @@ jobs:
build-and-test: build-and-test:
docker: docker:
- image: cimg/rust:1.78.0 # RUST_VER - image: cimg/rust:1.81.0 # RUST_VER
auth: auth:
username: $DOCKER_USER username: $DOCKER_USER
password: $DOCKER_PASS password: $DOCKER_PASS
@ -215,13 +236,19 @@ jobs:
#- setup-sccache #- setup-sccache
#- restore-sccache-cache #- restore-sccache-cache
- write-version - write-version
- cargo-build - install-test-deps
- run-tests - make-test-dir
- run-unit-tests
- merge-unit-test-coverage
- store-unit-test-results
# if the above tests don't run tokenserver-db tests (i.e. using --workspace)
# then run-tokenserver-scripts-tests will fail. These tests expect the db to be
# configured already, and it appears unit-tests modify the db to the expected state
- run-tokenserver-scripts-tests - run-tokenserver-scripts-tests
#- save-sccache-cache #- save-sccache-cache
build-mysql-image: build-mysql-image:
docker: docker:
- image: cimg/rust:1.78.0 # RUST_VER - image: cimg/rust:1.81.0 # RUST_VER
auth: auth:
username: $DOCKER_USER username: $DOCKER_USER
password: $DOCKER_PASS password: $DOCKER_PASS
@ -253,7 +280,7 @@ jobs:
build-spanner-image: build-spanner-image:
docker: docker:
- image: cimg/rust:1.78.0 # RUST_VER - image: cimg/rust:1.81.0 # RUST_VER
auth: auth:
username: $DOCKER_USER username: $DOCKER_USER
password: $DOCKER_PASS password: $DOCKER_PASS

23
.config/nextest.toml Normal file
View File

@ -0,0 +1,23 @@
[store]
dir = "target/nextest"
[profile.default]
retries = 0
test-threads = 1
threads-required = 1
status-level = "pass"
final-status-level = "flaky"
failure-output = "immediate"
success-output = "never"
fail-fast = false
slow-timeout = { period = "300s" }
[profile.ci]
fail-fast = false
[profile.ci.junit]
path = "junit.xml"
report-name = "syncstorage-unit-tests"
store-success-output = false
store-failure-output = true

3
.gitignore vendored
View File

@ -37,3 +37,6 @@ tools/tokenserver/loadtests/*.pem
tools/tokenserver/loadtests/*.pub tools/tokenserver/loadtests/*.pub
venv venv
.vscode/settings.json .vscode/settings.json
# circleci
workspace

649
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
# NOTE: Ensure builder's Rust version matches CI's in .circleci/config.yml # NOTE: Ensure builder's Rust version matches CI's in .circleci/config.yml
FROM docker.io/lukemathwalker/cargo-chef:0.1.67-rust-1.78-bullseye as chef FROM docker.io/lukemathwalker/cargo-chef:0.1.67-rust-1.81-bullseye as chef
WORKDIR /app WORKDIR /app
FROM chef AS planner FROM chef AS planner

View File

@ -10,6 +10,20 @@ PATH_TO_SYNC_SPANNER_KEYS = `pwd`/service-account.json
# https://github.com/mozilla-services/server-syncstorage # https://github.com/mozilla-services/server-syncstorage
PATH_TO_GRPC_CERT = ../server-syncstorage/local/lib/python2.7/site-packages/grpc/_cython/_credentials/roots.pem PATH_TO_GRPC_CERT = ../server-syncstorage/local/lib/python2.7/site-packages/grpc/_cython/_credentials/roots.pem
# In order to be consumed by the ETE Test Metric Pipeline, files need to follow a strict naming
# convention: {job_number}__{utc_epoch_datetime}__{workflow}__{test_suite}__results{-index}.xml
# TODO: update workflow name appropriately
WORKFLOW := build-deploy
EPOCH_TIME := $(shell date +"%s")
TEST_RESULTS_DIR ?= workflow/test-results
TEST_PROFILE := $(if $(CIRCLECI),ci,default)
TEST_FILE_PREFIX := $(if $(CIRCLECI),$(CIRCLE_BUILD_NUM)__$(EPOCH_TIME)__$(CIRCLE_PROJECT_REPONAME)__$(WORKFLOW)__)
UNIT_JUNIT_XML := $(TEST_RESULTS_DIR)/$(TEST_FILE_PREFIX)unit__results.xml
UNIT_COVERAGE_JSON := $(TEST_RESULTS_DIR)/$(TEST_FILE_PREFIX)unit__coverage.json
SYNC_SYNCSTORAGE__DATABASE_URL ?= mysql://sample_user:sample_password@localhost/syncstorage_rs
SYNC_TOKENSERVER__DATABASE_URL ?= mysql://sample_user:sample_password@localhost/tokenserver_rs
SRC_ROOT = $(shell pwd) SRC_ROOT = $(shell pwd)
PYTHON_SITE_PACKGES = $(shell $(SRC_ROOT)/venv/bin/python -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())") PYTHON_SITE_PACKGES = $(shell $(SRC_ROOT)/venv/bin/python -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
@ -68,8 +82,22 @@ run_spanner: python
RUST_BACKTRACE=full \ RUST_BACKTRACE=full \
cargo run --no-default-features --features=syncstorage-db/spanner --features=py_verifier -- --config config/local.toml cargo run --no-default-features --features=syncstorage-db/spanner --features=py_verifier -- --config config/local.toml
.ONESHELL:
test: test:
SYNC_SYNCSTORAGE__DATABASE_URL=mysql://sample_user:sample_password@localhost/syncstorage_rs \ SYNC_SYNCSTORAGE__DATABASE_URL=${SYNC_SYNCSTORAGE__DATABASE_URL} \
SYNC_TOKENSERVER__DATABASE_URL=mysql://sample_user:sample_password@localhost/tokenserver_rs \ SYNC_TOKENSERVER__DATABASE_URL=${SYNC_TOKENSERVER__DATABASE_URL} \
RUST_TEST_THREADS=1 \ RUST_TEST_THREADS=1 \
cargo test --workspace cargo nextest run --workspace --profile ${TEST_PROFILE} $(ARGS)
.ONESHELL:
test_with_coverage:
SYNC_SYNCSTORAGE__DATABASE_URL=${SYNC_SYNCSTORAGE__DATABASE_URL} \
SYNC_TOKENSERVER__DATABASE_URL=${SYNC_TOKENSERVER__DATABASE_URL} \
RUST_TEST_THREADS=1 \
cargo llvm-cov --no-report --summary-only \
nextest --workspace --profile ${TEST_PROFILE}; exit_code=$$?
mv target/nextest/${TEST_PROFILE}/junit.xml ${UNIT_JUNIT_XML}
exit $$exit_code
merge_coverage_results:
cargo llvm-cov report --summary-only --json --output-path ${UNIT_COVERAGE_JSON}

View File

@ -241,7 +241,18 @@ We use [env_logger](https://crates.io/crates/env_logger): set the `RUST_LOG` env
### Unit tests ### Unit tests
`make test` - open the Makefile to adjust your `SYNC_SYNCSTORAGE__DATABASE_URL` as needed. You'll need [`nextest`](https://nexte.st/docs/installation/from-source/) and [`llvm-cov`](https://github.com/taiki-e/cargo-llvm-cov?tab=readme-ov-file#installation) installed for full unittest and test coverage.
$ cargo install cargo-nextest --locked
$ cargo install cargo-llvm-cov --locked
- `make test` - Runs all tests
- `make test_with_coverage` - This will use `llvm-cov` to run tests and generate [source-based code coverage](https://clang.llvm.org/docs/SourceBasedCodeCoverage.html)
If you need to override `SYNC_SYNCSTORAGE__DATABASE_URL` or `SYNC_TOKENSERVER__DATABASE_URL` variables, you can modify them in the `Makefile` or by setting them in your shell
$ echo 'export SYNC_SYNCSTORAGE__DATABASE_URL="mysql://sample_user:sample_password@localhost/syncstorage_rs"' >> ~/.zshrc
$ echo 'export SYNC_TOKENSERVER__DATABASE_URL="mysql://sample_user:sample_password@localhost/tokenserver?rs"' >> ~/.zshrc
#### Debugging unit test state #### Debugging unit test state
@ -250,7 +261,7 @@ default, we use the diesel test_transaction functionality to ensure test data
is not committed to the database. Therefore, there is an environment variable is not committed to the database. Therefore, there is an environment variable
which can be used to turn off test_transaction. which can be used to turn off test_transaction.
SYNC_SYNCSTORAGE__DATABASE_USE_TEST_TRANSACTIONS=false cargo test [testname] SYNC_SYNCSTORAGE__DATABASE_USE_TEST_TRANSACTIONS=false make test ARGS="[testname]"
Note that you will almost certainly want to pass a single test name. When running Note that you will almost certainly want to pass a single test name. When running
the entire test suite, data from previous tests will cause future tests to fail. the entire test suite, data from previous tests will cause future tests to fail.

View File

@ -131,11 +131,7 @@ impl VerifyToken for Verifier {
let result: Bound<PyAny> = client let result: Bound<PyAny> = client
.getattr("verify_token")? .getattr("verify_token")?
.call((token,), None) .call((token,), None)
.map_err(|e| { .inspect_err(|e| e.print_and_set_sys_last_vars(py))?;
e.print_and_set_sys_last_vars(py);
e
})?;
if result.is_none() { if result.is_none() {
Ok(None) Ok(None)
} else { } else {

View File

@ -32,29 +32,21 @@ impl PyTokenlib {
) -> Result<(String, String), TokenserverError> { ) -> Result<(String, String), TokenserverError> {
Python::with_gil(|py| { Python::with_gil(|py| {
// `import tokenlib` // `import tokenlib`
let module = PyModule::import_bound(py, "tokenlib").map_err(|e| { let module = PyModule::import_bound(py, "tokenlib")
e.print_and_set_sys_last_vars(py); .inspect_err(|e| e.print_and_set_sys_last_vars(py))?;
e
})?;
// `kwargs = { 'secret': shared_secret }` // `kwargs = { 'secret': shared_secret }`
let kwargs = [("secret", shared_secret)].into_py_dict_bound(py); let kwargs = [("secret", shared_secret)].into_py_dict_bound(py);
// `token = tokenlib.make_token(plaintext, **kwargs)` // `token = tokenlib.make_token(plaintext, **kwargs)`
let token = module let token = module
.getattr("make_token")? .getattr("make_token")?
.call((plaintext,), Some(&kwargs)) .call((plaintext,), Some(&kwargs))
.map_err(|e| { .inspect_err(|e| e.print_and_set_sys_last_vars(py))
e.print_and_set_sys_last_vars(py);
e
})
.and_then(|x| x.extract())?; .and_then(|x| x.extract())?;
// `derived_secret = tokenlib.get_derived_secret(token, **kwargs)` // `derived_secret = tokenlib.get_derived_secret(token, **kwargs)`
let derived_secret = module let derived_secret = module
.getattr("get_derived_secret")? .getattr("get_derived_secret")?
.call((&token,), Some(&kwargs)) .call((&token,), Some(&kwargs))
.map_err(|e| { .inspect_err(|e| e.print_and_set_sys_last_vars(py))
e.print_and_set_sys_last_vars(py);
e
})
.and_then(|x| x.extract())?; .and_then(|x| x.extract())?;
// `return (token, derived_secret)` // `return (token, derived_secret)`
Ok((token, derived_secret)) Ok((token, derived_secret))