From 9e89b6025ee55edcb1830bb9dd47e32deecb0de6 Mon Sep 17 00:00:00 2001 From: Barry Chen Date: Fri, 10 Apr 2026 12:32:57 -0500 Subject: [PATCH] refactor: use configurable running service for Python integration tests (#2186) * refactor: use configurable running service for Python integration tests Instead of configuring and starting server instances within Python as test fixtures, simply testing against a running server. A mix of make target and docker-compose changes is used to achieve the same level of test coverage that previously relied on the (re-)configure and (re-)start of the services in conftest.py. A new `make` target, 'run_local_e2e_tests', can be used to run the integration tests locally. However, the stage FxA JWT validation tests in test_e2e.py are excluded. Those tests rely on the JWK configuration of the Token Server and the stage FxA API, making them less "local". Anyone working on that specific integration can certainly invoke those tests themselves. This patch also: - deletes some duplicate docs - moves the docker-compose yamls into a dir name 'docker' * Apply suggestion from @pjenvey Co-authored-by: Philip Jenvey * Apply suggestion from @pjenvey Co-authored-by: Philip Jenvey * Clean-up based on feedback. * Remove integration_tests/conftest.py * Prune yamls --------- Co-authored-by: Philip Jenvey --- Makefile | 128 +++++++++++++---- docker-compose.e2e.mysql.yaml | 47 ------ docker-compose.e2e.postgres.yaml | 47 ------ docker-compose.e2e.spanner.yaml | 47 ------ docker/docker-compose.e2e.jwk-cache.yaml | 19 +++ docker/docker-compose.e2e.mysql.yaml | 36 +++++ docker/docker-compose.e2e.no-jwk-cache.yaml | 9 ++ docker/docker-compose.e2e.postgres.yaml | 36 +++++ docker/docker-compose.e2e.spanner.yaml | 35 +++++ .../docker-compose.mysql.yaml | 0 .../docker-compose.postgres.yaml | 0 .../docker-compose.spanner.yaml | 0 docs/src/introduction.md | 74 +--------- docs/src/testing.md | 23 +-- tools/integration_tests/conftest.py | 136 ------------------ tools/integration_tests/test_storage.py | 3 +- tools/integration_tests/test_support.py | 2 +- .../tokenserver/test_authorization.py | 2 - .../integration_tests/tokenserver/test_e2e.py | 2 - .../tokenserver/test_misc.py | 2 - .../tokenserver/test_node_assignment.py | 2 - 21 files changed, 251 insertions(+), 399 deletions(-) delete mode 100644 docker-compose.e2e.mysql.yaml delete mode 100644 docker-compose.e2e.postgres.yaml delete mode 100644 docker-compose.e2e.spanner.yaml create mode 100644 docker/docker-compose.e2e.jwk-cache.yaml create mode 100644 docker/docker-compose.e2e.mysql.yaml create mode 100644 docker/docker-compose.e2e.no-jwk-cache.yaml create mode 100644 docker/docker-compose.e2e.postgres.yaml create mode 100644 docker/docker-compose.e2e.spanner.yaml rename docker-compose.mysql.yaml => docker/docker-compose.mysql.yaml (100%) rename docker-compose.postgres.yaml => docker/docker-compose.postgres.yaml (100%) rename docker-compose.spanner.yaml => docker/docker-compose.spanner.yaml (100%) delete mode 100644 tools/integration_tests/conftest.py diff --git a/Makefile b/Makefile index bd1da5c0..3bdfda40 100644 --- a/Makefile +++ b/Makefile @@ -69,61 +69,121 @@ clean: cargo clean docker_start_mysql: - docker compose -f docker-compose.mysql.yaml up -d + docker compose -f docker/docker-compose.mysql.yaml up -d docker_start_mysql_rebuild: - docker compose -f docker-compose.mysql.yaml up --build -d + docker compose -f docker/docker-compose.mysql.yaml up --build -d docker_stop_mysql: - docker compose -f docker-compose.mysql.yaml down + docker compose -f docker/docker-compose.mysql.yaml down docker_start_spanner: - docker compose -f docker-compose.spanner.yaml up -d + docker compose -f docker/docker-compose.spanner.yaml up -d docker_start_spanner_rebuild: - docker compose -f docker-compose.spanner.yaml up --build -d + docker compose -f docker/docker-compose.spanner.yaml up --build -d docker_stop_spanner: - docker compose -f docker-compose.spanner.yaml down + docker compose -f docker/docker-compose.spanner.yaml down .ONESHELL: docker_run_mysql_e2e_tests: + exit_code=0 + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KTY \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__ALG \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KID \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__FXA_CREATED_AT \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__USE \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__N \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__E \ + RESULTS_FILENAME=mysql_no_jwk_integration_results.xml \ docker compose \ - -f docker-compose.mysql.yaml \ - -f docker-compose.e2e.mysql.yaml \ + -f docker/docker-compose.mysql.yaml \ + -f docker/docker-compose.e2e.mysql.yaml \ + -f docker/docker-compose.e2e.no-jwk-cache.yaml \ up \ - --exit-code-from mysql-e2e-tests \ - --abort-on-container-exit; - exit_code=$$?; - docker cp mysql-e2e-tests:/mysql_integration_results.xml ${MYSQL_INT_JUNIT_XML}; - docker cp mysql-e2e-tests:/mysql_no_jwk_integration_results.xml ${MYSQL_NO_JWK_INT_JUNIT_XML}; - exit $$exit_code; + --exit-code-from e2e-tests \ + --abort-on-container-exit || exit_code=$$? + docker cp mysql-e2e-tests:/mysql_no_jwk_integration_results.xml ${MYSQL_NO_JWK_INT_JUNIT_XML} + RESULTS_FILENAME=mysql_integration_results.xml docker compose \ + -f docker/docker-compose.mysql.yaml \ + -f docker/docker-compose.e2e.mysql.yaml \ + -f docker/docker-compose.e2e.jwk-cache.yaml \ + up \ + --exit-code-from e2e-tests \ + --abort-on-container-exit || exit_code=$$? + docker cp mysql-e2e-tests:/mysql_integration_results.xml ${MYSQL_INT_JUNIT_XML} + docker compose \ + -f docker/docker-compose.mysql.yaml \ + -f docker/docker-compose.e2e.mysql.yaml \ + down -v --remove-orphans + exit $$exit_code .ONESHELL: docker_run_postgres_e2e_tests: + exit_code=0 + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KTY \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__ALG \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KID \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__FXA_CREATED_AT \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__USE \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__N \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__E \ + RESULTS_FILENAME=postgres_no_jwk_integration_results.xml \ docker compose \ - -f docker-compose.postgres.yaml \ - -f docker-compose.e2e.postgres.yaml \ + -f docker/docker-compose.postgres.yaml \ + -f docker/docker-compose.e2e.postgres.yaml \ + -f docker/docker-compose.e2e.no-jwk-cache.yaml \ up \ - --exit-code-from postgres-e2e-tests \ - --abort-on-container-exit; - exit_code=$$?; - docker cp postgres-e2e-tests:/postgres_integration_results.xml ${POSTGRES_INT_JUNIT_XML}; - docker cp postgres-e2e-tests:/postgres_no_jwk_integration_results.xml ${POSTGRES_NO_JWK_INT_JUNIT_XML}; - exit $$exit_code; + --exit-code-from e2e-tests \ + --abort-on-container-exit || exit_code=$$? + docker cp postgres-e2e-tests:/postgres_no_jwk_integration_results.xml ${POSTGRES_NO_JWK_INT_JUNIT_XML} + RESULTS_FILENAME=postgres_integration_results.xml docker compose \ + -f docker/docker-compose.postgres.yaml \ + -f docker/docker-compose.e2e.postgres.yaml \ + -f docker/docker-compose.e2e.jwk-cache.yaml \ + up \ + --exit-code-from e2e-tests \ + --abort-on-container-exit || exit_code=$$? + docker cp postgres-e2e-tests:/postgres_integration_results.xml ${POSTGRES_INT_JUNIT_XML} + docker compose \ + -f docker/docker-compose.postgres.yaml \ + -f docker/docker-compose.e2e.postgres.yaml \ + down -v --remove-orphans + exit $$exit_code .ONESHELL: docker_run_spanner_e2e_tests: + exit_code=0 + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KTY \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__ALG \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KID \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__FXA_CREATED_AT \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__USE \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__N \ + env -u SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__E \ + RESULTS_FILENAME=spanner_no_jwk_integration_results.xml \ docker compose \ - -f docker-compose.spanner.yaml \ - -f docker-compose.e2e.spanner.yaml \ + -f docker/docker-compose.spanner.yaml \ + -f docker/docker-compose.e2e.spanner.yaml \ + -f docker/docker-compose.e2e.no-jwk-cache.yaml \ up \ - --exit-code-from spanner-e2e-tests \ - --abort-on-container-exit; - exit_code=$$?; - docker cp spanner-e2e-tests:/spanner_integration_results.xml ${SPANNER_INT_JUNIT_XML}; - docker cp spanner-e2e-tests:/spanner_no_jwk_integration_results.xml ${SPANNER_NO_JWK_INT_JUNIT_XML}; - exit $$exit_code; + --exit-code-from e2e-tests \ + --abort-on-container-exit || exit_code=$$? + docker cp spanner-e2e-tests:/spanner_no_jwk_integration_results.xml ${SPANNER_NO_JWK_INT_JUNIT_XML} + RESULTS_FILENAME=spanner_integration_results.xml docker compose \ + -f docker/docker-compose.spanner.yaml \ + -f docker/docker-compose.e2e.spanner.yaml \ + -f docker/docker-compose.e2e.jwk-cache.yaml \ + up \ + --exit-code-from e2e-tests \ + --abort-on-container-exit || exit_code=$$? + docker cp spanner-e2e-tests:/spanner_integration_results.xml ${SPANNER_INT_JUNIT_XML} + docker compose \ + -f docker/docker-compose.spanner.yaml \ + -f docker/docker-compose.e2e.spanner.yaml \ + down -v --remove-orphans + exit $$exit_code run_mysql: $(INSTALL_STAMP) # See https://github.com/PyO3/pyo3/issues/1741 for discussion re: why we need to set the @@ -180,6 +240,14 @@ run_token_server_integration_tests: poetry install --no-root --without dev poetry run pytest tools/tokenserver --junit-xml=${INTEGRATION_JUNIT_XML} +run_local_e2e_tests: + PYTHONPATH=$(PWD)/tools \ + SYNC_MASTER_SECRET=$${SYNC_MASTER_SECRET:-secret0} \ + SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL=$${SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL:-http://localhost:6000} \ + TOKENSERVER_HOST=$${TOKENSERVER_HOST:-http://localhost:8000} \ + poetry -C tools/integration_tests \ + run pytest . --ignore=tokenserver/test_e2e.py + .PHONY: install install: $(INSTALL_STAMP) ## Install dependencies with poetry $(INSTALL_STAMP): pyproject.toml poetry.lock diff --git a/docker-compose.e2e.mysql.yaml b/docker-compose.e2e.mysql.yaml deleted file mode 100644 index 38b87958..00000000 --- a/docker-compose.e2e.mysql.yaml +++ /dev/null @@ -1,47 +0,0 @@ -services: - mysql-e2e-tests: - container_name: mysql-e2e-tests - depends_on: - sync-db: - condition: service_healthy - mock-fxa-server: - condition: service_started - tokenserver-db: - condition: service_healthy - # this depend is to avoid migration collisions. - # the syncserver isn't actually used for the tests, - # but collisions can happen particularly in CI. - syncserver: - condition: service_healthy - image: app:build - privileged: true - user: root - environment: - JWK_CACHE_DISABLED: false - MOCK_FXA_SERVER_URL: http://mock-fxa-server:6000 - SYNC_HOST: 0.0.0.0 - SYNC_MASTER_SECRET: secret0 - SYNC_SYNCSTORAGE__DATABASE_URL: mysql://test:test@sync-db:3306/syncstorage - SYNC_TOKENSERVER__DATABASE_URL: mysql://test:test@tokenserver-db:3306/tokenserver - SYNC_TOKENSERVER__ENABLED: "true" - SYNC_TOKENSERVER__NODE_TYPE: mysql - SYNC_TOKENSERVER__FXA_EMAIL_DOMAIN: api-accounts.stage.mozaws.net - SYNC_TOKENSERVER__FXA_METRICS_HASH_SECRET: secret0 - SYNC_TOKENSERVER__RUN_MIGRATIONS: "true" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KTY: "RSA" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__ALG: "RS256" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KID: "20190730-15e473fd" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__FXA_CREATED_AT: "1564502400" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__USE: "sig" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__N: "15OpVGC7ws_SlU0gRbRh1Iwo8_gR8ElX2CDnbN5blKyXLg-ll0ogktoDXc-tDvTabRTxi7AXU0wWQ247odhHT47y5uz0GASYXdfPponynQ_xR9CpNn1eEL1gvDhQN9rfPIzfncl8FUi9V4WMd5f600QC81yDw9dX-Z8gdkru0aDaoEKF9-wU2TqrCNcQdiJCX9BISotjz_9cmGwKXFEekQNJWBeRQxH2bUmgwUK0HaqwW9WbYOs-zstNXXWFsgK9fbDQqQeGehXLZM4Cy5Mgl_iuSvnT3rLzPo2BmlxMLUvRqBx3_v8BTtwmNGA0v9O0FJS_mnDq0Iue0Dz8BssQCQ" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__E: "AQAB" - TOKENSERVER_HOST: http://localhost:8000 - SQLALCHEMY_SILENCE_UBER_WARNING: 1 - entrypoint: > - /bin/sh -c " - exit_code=0; - PYTHONPATH=/app pytest /app/tools/integration_tests/ --junit-xml=/mysql_integration_results.xml || exit_code=$$?; - export JWK_CACHE_DISABLED=true; - PYTHONPATH=/app pytest /app/tools/integration_tests/ --junit-xml=/mysql_no_jwk_integration_results.xml || exit_code=$$?; - exit $$exit_code; - " diff --git a/docker-compose.e2e.postgres.yaml b/docker-compose.e2e.postgres.yaml deleted file mode 100644 index ab89778a..00000000 --- a/docker-compose.e2e.postgres.yaml +++ /dev/null @@ -1,47 +0,0 @@ -services: - postgres-e2e-tests: - container_name: postgres-e2e-tests - depends_on: - mock-fxa-server: - condition: service_started - syncserver: - condition: service_healthy - sync-db: - condition: service_healthy - tokenserver-db: - condition: service_healthy - image: app:build - privileged: true - user: root - environment: - # Setting this to false will delete any of those keys before starting - # the syncserver and starting the test. This can be set/passed - # in from CircleCI when calling `docker-compose -f docker-compose.e2e.postgres.yaml` - JWK_CACHE_DISABLED: false - MOCK_FXA_SERVER_URL: http://mock-fxa-server:6000 - SYNC_HOST: 0.0.0.0 - SYNC_MASTER_SECRET: secret0 - SYNC_SYNCSTORAGE__DATABASE_URL: postgres://test:test@sync-db:5432/syncstorage - SYNC_TOKENSERVER__DATABASE_URL: postgres://test:test@tokenserver-db:5432/tokenserver - SYNC_TOKENSERVER__ENABLED: "true" - SYNC_TOKENSERVER__NODE_TYPE: postgres - SYNC_TOKENSERVER__FXA_EMAIL_DOMAIN: api-accounts.stage.mozaws.net - SYNC_TOKENSERVER__FXA_METRICS_HASH_SECRET: secret0 - SYNC_TOKENSERVER__RUN_MIGRATIONS: "true" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KTY: "RSA" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__ALG: "RS256" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KID: "20190730-15e473fd" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__FXA_CREATED_AT: "1564502400" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__USE: "sig" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__N: "15OpVGC7ws_SlU0gRbRh1Iwo8_gR8ElX2CDnbN5blKyXLg-ll0ogktoDXc-tDvTabRTxi7AXU0wWQ247odhHT47y5uz0GASYXdfPponynQ_xR9CpNn1eEL1gvDhQN9rfPIzfncl8FUi9V4WMd5f600QC81yDw9dX-Z8gdkru0aDaoEKF9-wU2TqrCNcQdiJCX9BISotjz_9cmGwKXFEekQNJWBeRQxH2bUmgwUK0HaqwW9WbYOs-zstNXXWFsgK9fbDQqQeGehXLZM4Cy5Mgl_iuSvnT3rLzPo2BmlxMLUvRqBx3_v8BTtwmNGA0v9O0FJS_mnDq0Iue0Dz8BssQCQ" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__E: "AQAB" - TOKENSERVER_HOST: http://localhost:8000 - SQLALCHEMY_SILENCE_UBER_WARNING: 1 - entrypoint: > - /bin/sh -c " - exit_code=0; - PYTHONPATH=/app pytest /app/tools/integration_tests/ --junit-xml=/postgres_integration_results.xml || exit_code=$$?; - export JWK_CACHE_DISABLED=true; - PYTHONPATH=/app pytest /app/tools/integration_tests/ --junit-xml=/postgres_no_jwk_integration_results.xml || exit_code=$$?; - exit $$exit_code; - " diff --git a/docker-compose.e2e.spanner.yaml b/docker-compose.e2e.spanner.yaml deleted file mode 100644 index 262e0fb8..00000000 --- a/docker-compose.e2e.spanner.yaml +++ /dev/null @@ -1,47 +0,0 @@ -services: - spanner-e2e-tests: - container_name: spanner-e2e-tests - depends_on: - mock-fxa-server: - condition: service_started - syncserver: - condition: service_healthy - tokenserver-db: - condition: service_healthy - image: app:build - privileged: true - user: root - environment: - # Some tests can run without the `FXA_OAUTH...` vars. - # Setting this to false will delete any of those keys before starting - # the syncserver and startging the test. This can be set/passed - # in from CircleCI when calling `docker-compose -f docker-compose.e2e.spanner.yaml` - JWK_CACHE_DISABLED: false - MOCK_FXA_SERVER_URL: http://mock-fxa-server:6000 - SYNC_HOST: 0.0.0.0 - SYNC_MASTER_SECRET: secret0 - SYNC_SYNCSTORAGE__DATABASE_URL: spanner://projects/test-project/instances/test-instance/databases/test-database - SYNC_SYNCSTORAGE__SPANNER_EMULATOR_HOST: sync-db:9010 - SYNC_TOKENSERVER__DATABASE_URL: mysql://test:test@tokenserver-db:3306/tokenserver - SYNC_TOKENSERVER__ENABLED: "true" - SYNC_TOKENSERVER__NODE_TYPE: spanner - SYNC_TOKENSERVER__FXA_EMAIL_DOMAIN: api-accounts.stage.mozaws.net - SYNC_TOKENSERVER__FXA_METRICS_HASH_SECRET: secret0 - SYNC_TOKENSERVER__RUN_MIGRATIONS: "true" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KTY: "RSA" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__ALG: "RS256" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KID: "20190730-15e473fd" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__FXA_CREATED_AT: "1564502400" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__USE: "sig" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__N: "15OpVGC7ws_SlU0gRbRh1Iwo8_gR8ElX2CDnbN5blKyXLg-ll0ogktoDXc-tDvTabRTxi7AXU0wWQ247odhHT47y5uz0GASYXdfPponynQ_xR9CpNn1eEL1gvDhQN9rfPIzfncl8FUi9V4WMd5f600QC81yDw9dX-Z8gdkru0aDaoEKF9-wU2TqrCNcQdiJCX9BISotjz_9cmGwKXFEekQNJWBeRQxH2bUmgwUK0HaqwW9WbYOs-zstNXXWFsgK9fbDQqQeGehXLZM4Cy5Mgl_iuSvnT3rLzPo2BmlxMLUvRqBx3_v8BTtwmNGA0v9O0FJS_mnDq0Iue0Dz8BssQCQ" - SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__E: "AQAB" - TOKENSERVER_HOST: http://localhost:8000 - SQLALCHEMY_SILENCE_UBER_WARNING: 1 - entrypoint: > - /bin/sh -c " - exit_code=0; - PYTHONPATH=/app pytest /app/tools/integration_tests/ --junit-xml=/spanner_integration_results.xml || exit_code=$$?; - export JWK_CACHE_DISABLED=true; - PYTHONPATH=/app pytest /app/tools/integration_tests/ --junit-xml=/spanner_no_jwk_integration_results.xml || exit_code=$$?; - exit $$exit_code; - " diff --git a/docker/docker-compose.e2e.jwk-cache.yaml b/docker/docker-compose.e2e.jwk-cache.yaml new file mode 100644 index 00000000..7b8232ee --- /dev/null +++ b/docker/docker-compose.e2e.jwk-cache.yaml @@ -0,0 +1,19 @@ +services: + syncserver: + environment: + SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KTY: "RSA" + SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__ALG: "RS256" + SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KID: "20190730-15e473fd" + SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__FXA_CREATED_AT: "1564502400" + SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__USE: "sig" + SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__N: "15OpVGC7ws_SlU0gRbRh1Iwo8_gR8ElX2CDnbN5blKyXLg-ll0ogktoDXc-tDvTabRTxi7AXU0wWQ247odhHT47y5uz0GASYXdfPponynQ_xR9CpNn1eEL1gvDhQN9rfPIzfncl8FUi9V4WMd5f600QC81yDw9dX-Z8gdkru0aDaoEKF9-wU2TqrCNcQdiJCX9BISotjz_9cmGwKXFEekQNJWBeRQxH2bUmgwUK0HaqwW9WbYOs-zstNXXWFsgK9fbDQqQeGehXLZM4Cy5Mgl_iuSvnT3rLzPo2BmlxMLUvRqBx3_v8BTtwmNGA0v9O0FJS_mnDq0Iue0Dz8BssQCQ" + SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__E: "AQAB" + SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL: http://mock-fxa-server:6000 + e2e-tests: + entrypoint: + - /bin/sh + - -c + - >- + PYTHONPATH=/app + pytest /app/tools/integration_tests/ + --junit-xml=/${RESULTS_FILENAME} diff --git a/docker/docker-compose.e2e.mysql.yaml b/docker/docker-compose.e2e.mysql.yaml new file mode 100644 index 00000000..4aa6b4fa --- /dev/null +++ b/docker/docker-compose.e2e.mysql.yaml @@ -0,0 +1,36 @@ +services: + syncserver: + environment: + SYNC_TOKENSERVER__ENABLED: "true" + SYNC_TOKENSERVER__FXA_EMAIL_DOMAIN: api-accounts.stage.mozaws.net + SYNC_TOKENSERVER__FXA_METRICS_HASH_SECRET: secret0 + SYNC_TOKENSERVER__RUN_MIGRATIONS: "true" + e2e-tests: + container_name: mysql-e2e-tests + depends_on: + mock-fxa-server: + condition: service_started + syncserver: + condition: service_healthy + sync-db: + condition: service_healthy + tokenserver-db: + condition: service_healthy + image: app:build + privileged: true + user: root + environment: + SYNC_SERVER_URL: http://syncserver:8000 + TOKENSERVER_HOST: http://syncserver:8000 + SYNC_MASTER_SECRET: secret0 + SYNC_SYNCSTORAGE__DATABASE_URL: mysql://test:test@sync-db:3306/syncstorage + SYNC_TOKENSERVER__DATABASE_URL: mysql://test:test@tokenserver-db:3306/tokenserver + SQLALCHEMY_SILENCE_UBER_WARNING: 1 + RESULTS_FILENAME: ${RESULTS_FILENAME:-mysql_integration_results.xml} + entrypoint: + - /bin/sh + - -c + - >- + PYTHONPATH=/app + pytest /app/tools/integration_tests/ + --junit-xml=/${RESULTS_FILENAME} diff --git a/docker/docker-compose.e2e.no-jwk-cache.yaml b/docker/docker-compose.e2e.no-jwk-cache.yaml new file mode 100644 index 00000000..16003483 --- /dev/null +++ b/docker/docker-compose.e2e.no-jwk-cache.yaml @@ -0,0 +1,9 @@ +services: + e2e-tests: + entrypoint: + - /bin/sh + - -c + - >- + PYTHONPATH=/app + pytest /app/tools/integration_tests/tokenserver/test_e2e.py + --junit-xml=/${RESULTS_FILENAME} diff --git a/docker/docker-compose.e2e.postgres.yaml b/docker/docker-compose.e2e.postgres.yaml new file mode 100644 index 00000000..a862fa91 --- /dev/null +++ b/docker/docker-compose.e2e.postgres.yaml @@ -0,0 +1,36 @@ +services: + syncserver: + environment: + SYNC_TOKENSERVER__ENABLED: "true" + SYNC_TOKENSERVER__FXA_EMAIL_DOMAIN: api-accounts.stage.mozaws.net + SYNC_TOKENSERVER__FXA_METRICS_HASH_SECRET: secret0 + SYNC_TOKENSERVER__RUN_MIGRATIONS: "true" + e2e-tests: + container_name: postgres-e2e-tests + depends_on: + mock-fxa-server: + condition: service_started + syncserver: + condition: service_healthy + sync-db: + condition: service_healthy + tokenserver-db: + condition: service_healthy + image: app:build + privileged: true + user: root + environment: + SYNC_SERVER_URL: http://syncserver:8000 + TOKENSERVER_HOST: http://syncserver:8000 + SYNC_MASTER_SECRET: secret0 + SYNC_SYNCSTORAGE__DATABASE_URL: postgres://test:test@sync-db:5432/syncstorage + SYNC_TOKENSERVER__DATABASE_URL: postgres://test:test@tokenserver-db:5432/tokenserver + SQLALCHEMY_SILENCE_UBER_WARNING: 1 + RESULTS_FILENAME: ${RESULTS_FILENAME:-postgres_integration_results.xml} + entrypoint: + - /bin/sh + - -c + - >- + PYTHONPATH=/app + pytest /app/tools/integration_tests/ + --junit-xml=/${RESULTS_FILENAME} diff --git a/docker/docker-compose.e2e.spanner.yaml b/docker/docker-compose.e2e.spanner.yaml new file mode 100644 index 00000000..235637f1 --- /dev/null +++ b/docker/docker-compose.e2e.spanner.yaml @@ -0,0 +1,35 @@ +services: + syncserver: + environment: + SYNC_TOKENSERVER__ENABLED: "true" + SYNC_TOKENSERVER__FXA_EMAIL_DOMAIN: api-accounts.stage.mozaws.net + SYNC_TOKENSERVER__FXA_METRICS_HASH_SECRET: secret0 + SYNC_TOKENSERVER__RUN_MIGRATIONS: "true" + e2e-tests: + container_name: spanner-e2e-tests + depends_on: + mock-fxa-server: + condition: service_started + syncserver: + condition: service_healthy + tokenserver-db: + condition: service_healthy + image: app:build + privileged: true + user: root + environment: + SYNC_SERVER_URL: http://syncserver:8000 + TOKENSERVER_HOST: http://syncserver:8000 + SYNC_MASTER_SECRET: secret0 + SYNC_SYNCSTORAGE__DATABASE_URL: spanner://projects/test-project/instances/test-instance/databases/test-database + SYNC_SYNCSTORAGE__SPANNER_EMULATOR_HOST: sync-db:9010 + SYNC_TOKENSERVER__DATABASE_URL: mysql://test:test@tokenserver-db:3306/tokenserver + SQLALCHEMY_SILENCE_UBER_WARNING: 1 + RESULTS_FILENAME: ${RESULTS_FILENAME:-spanner_integration_results.xml} + entrypoint: + - /bin/sh + - -c + - >- + PYTHONPATH=/app + pytest /app/tools/integration_tests/ + --junit-xml=/${RESULTS_FILENAME} diff --git a/docker-compose.mysql.yaml b/docker/docker-compose.mysql.yaml similarity index 100% rename from docker-compose.mysql.yaml rename to docker/docker-compose.mysql.yaml diff --git a/docker-compose.postgres.yaml b/docker/docker-compose.postgres.yaml similarity index 100% rename from docker-compose.postgres.yaml rename to docker/docker-compose.postgres.yaml diff --git a/docker-compose.spanner.yaml b/docker/docker-compose.spanner.yaml similarity index 100% rename from docker-compose.spanner.yaml rename to docker/docker-compose.spanner.yaml diff --git a/docs/src/introduction.md b/docs/src/introduction.md index 42affe0e..2fa2836a 100644 --- a/docs/src/introduction.md +++ b/docs/src/introduction.md @@ -433,14 +433,14 @@ This requires access to [Google Cloud Rust (raw)](https://crates.io/crates/googl 1. Make sure you have [Docker installed](https://docs.docker.com/install/) locally. 2. Copy the contents of mozilla-rust-sdk into top level root dir here. -3. Comment out the `image` value under `syncserver` in either docker-compose.mysql.yml or docker-compose.spanner.yml (depending on which database backend you want to run), and add this instead: +3. Comment out the `image` value under `syncserver` in either docker/docker-compose.mysql.yaml or docker/docker-compose.spanner.yaml (depending on which database backend you want to run), and add this instead: ```yml build: context: . ``` -4. If you are using MySQL, adjust the MySQL db credentials in docker-compose.mysql.yml to match your local setup. +4. If you are using MySQL, adjust the MySQL db credentials in docker/docker-compose.mysql.yaml to match your local setup. 5. `make docker_start_mysql` or `make docker_start_spanner` - You can verify it's working by visiting [localhost:8000/\_\_heartbeat\_\_](http://localhost:8000/__heartbeat__) ### Connecting to Firefox @@ -477,76 +477,6 @@ If you see a problem related to `libssl` you may need to specify the `cargo` opt - If you're having trouble working with Sentry to create releases, try authenticating using their self hosted server option that's outlined [here](https://docs.sentry.io/product/cli/configuration/) Ie, `sentry-cli --url https://selfhosted.url.com/ login`. It's also recommended to create a `.sentryclirc` config file. See [this example](https://github.com/mozilla-services/syncstorage-rs/blob/master/.sentryclirc.example) for the config values you'll need. -## Tests - -### Unit tests - -Run unit tests for a specific database backend using one of the following make targets: - -- MySQL: `make test` or `make test_with_coverage` -- Postgres: `make postgres_test_with_coverage` -- Spanner: `make spanner_test_with_coverage` - -These commands will run the Rust test suite using cargo-nextest and generate coverage reports using cargo-llvm-cov. - -### End-to-End tests - -End-to-end (E2E) tests validate the complete integration of syncstorage-rs with a real database backend and mock Firefox Accounts server. These tests run the full Python integration test suite located in [tools/integration_tests/](../../tools/integration_tests/). - -#### Running E2E Tests Locally - -To run E2E tests, you'll need to: - -1. Build a Docker image for your target backend using the appropriate Makefile target -2. Run the E2E test suite using docker-compose - -The E2E tests are available for three database backends: - -**MySQL:** -```bash -make docker_run_mysql_e2e_tests -``` - -**Postgres:** -```bash -make docker_run_postgres_e2e_tests -``` - -**Spanner:** -```bash -make docker_run_spanner_e2e_tests -``` - -Each E2E test run: -1. Starts the required services (database, mock FxA server, syncserver) using docker-compose -2. Runs the Python integration tests with JWK caching enabled -3. Runs the tests again with JWK caching disabled -4. Outputs JUnit XML test results - -The E2E test configurations are defined in: -- [docker-compose.e2e.mysql.yaml](../../docker-compose.e2e.mysql.yaml) -- [docker-compose.e2e.postgres.yaml](../../docker-compose.e2e.postgres.yaml) -- [docker-compose.e2e.spanner.yaml](../../docker-compose.e2e.spanner.yaml) - -These compose files extend the base service definitions from their corresponding `docker-compose..yaml` files. - -#### How E2E Tests Work - -The E2E tests: -- Run in a containerized environment with all dependencies (database, syncserver, mock FxA) -- Execute integration tests from [tools/integration_tests/](../../tools/integration_tests/) using pytest -- Test OAuth token validation with both cached and non-cached JWKs -- Validate tokenserver functionality, including user allocation and token generation -- Test syncstorage operations like BSO creation, retrieval, and deletion - -#### CI/CD - -In GitHub Actions, E2E tests run as part of the CI/CD pipeline for each backend: -- [.github/workflows/mysql.yml](../../.github/workflows/mysql.yml) - `mysql-e2e-tests` job -- [.github/workflows/postgres.yml](../../.github/workflows/postgres.yml) - `postgres-e2e-tests` job -- [.github/workflows/spanner.yml](../../.github/workflows/spanner.yml) - `spanner-e2e-tests` job - -Each workflow builds a Docker image, runs unit tests, then executes E2E tests using the same make targets described above. - [System Requirements](#system-requirements) - [Local Setup](#local-setup) diff --git a/docs/src/testing.md b/docs/src/testing.md index 8dc2ba70..5dead5cf 100644 --- a/docs/src/testing.md +++ b/docs/src/testing.md @@ -73,18 +73,23 @@ make docker_run_postgres_e2e_tests make docker_run_spanner_e2e_tests ``` -Each E2E test run: -1. Starts the required services (database, mock FxA server, syncserver) using docker-compose -2. Runs the Python integration tests with JWK caching enabled -3. Runs the tests again with JWK caching disabled -4. Outputs JUnit XML test results +Each E2E test run performs two separate docker-compose invocations: +1. **No Local JWK run**: starts services with no local JWK configured, runs only `test_e2e.py` against FxA stage +2. **Local JWK & Mocked FxA run**: runs all integration tests using a mocked local FxA server and local JWK; the local JWK affects only the tests in `test_e2e.py` +3. Outputs JUnit XML test results for each run The E2E test configurations are defined in: -- [docker-compose.e2e.mysql.yaml](../../docker-compose.e2e.mysql.yaml) -- [docker-compose.e2e.postgres.yaml](../../docker-compose.e2e.postgres.yaml) -- [docker-compose.e2e.spanner.yaml](../../docker-compose.e2e.spanner.yaml) +- [docker/docker-compose.e2e.mysql.yaml](../../docker/docker-compose.e2e.mysql.yaml) - base +- [docker/docker-compose.e2e.mysql.jwk-cache.yaml](../../docker/docker-compose.e2e.mysql.jwk-cache.yaml) - JWK + mock FxA overlay +- [docker/docker-compose.e2e.mysql.no-jwk-cache.yaml](../../docker/docker-compose.e2e.mysql.no-jwk-cache.yaml) - FxA stage overlay +- [docker/docker-compose.e2e.postgres.yaml](../../docker/docker-compose.e2e.postgres.yaml) +- [docker/docker-compose.e2e.postgres.jwk-cache.yaml](../../docker/docker-compose.e2e.postgres.jwk-cache.yaml) +- [docker/docker-compose.e2e.postgres.no-jwk-cache.yaml](../../docker/docker-compose.e2e.postgres.no-jwk-cache.yaml) +- [docker/docker-compose.e2e.spanner.yaml](../../docker/docker-compose.e2e.spanner.yaml) +- [docker/docker-compose.e2e.spanner.jwk-cache.yaml](../../docker/docker-compose.e2e.spanner.jwk-cache.yaml) +- [docker/docker-compose.e2e.spanner.no-jwk-cache.yaml](../../docker/docker-compose.e2e.spanner.no-jwk-cache.yaml) -These compose files extend the base service definitions from their corresponding `docker-compose..yaml` files. +These compose files extend the base service definitions from their corresponding `docker/docker-compose..yaml` files. Syncserver configuration (JWK, FxA OAuth URL, CORS) is defined in the `syncserver` block of the e2e overlays. #### How E2E Tests Work diff --git a/tools/integration_tests/conftest.py b/tools/integration_tests/conftest.py deleted file mode 100644 index 15566440..00000000 --- a/tools/integration_tests/conftest.py +++ /dev/null @@ -1,136 +0,0 @@ -"""Pytest configuration and fixtures for integration tests.""" - -import os -import psutil -import signal -import subprocess -import time -import pytest -import requests # type: ignore[import-untyped] -import logging - -DEBUG_BUILD = "target/debug/syncserver" -RELEASE_BUILD = "/app/bin/syncserver" -# max number of attempts to check server heartbeat -SYNC_SERVER_STARTUP_MAX_ATTEMPTS = 35 -JWK_CACHE_DISABLED = os.environ.get("JWK_CACHE_DISABLED") - -logger = logging.getLogger("tokenserver.scripts.conftest") - -# Local setup for fixtures - - -def _terminate_process(process): - """Gracefully terminate the process and its children.""" - proc = psutil.Process(pid=process.pid) - child_proc = proc.children(recursive=True) - for p in [proc] + child_proc: - os.kill(p.pid, signal.SIGTERM) - process.wait() - - -def _wait_for_server_startup(max_attempts=SYNC_SERVER_STARTUP_MAX_ATTEMPTS): - """Wait for the __heartbeat__ endpoint to return a 200. - - Pause for 1 second between attempts. Raise a RuntimeError if the server - does not start after the specific number of attempts. - """ - itter = 0 - while True: - if itter >= max_attempts: - raise RuntimeError("Server failed to start within the timeout period.") - try: - req = requests.get("http://localhost:8000/__heartbeat__", timeout=2) - if req.status_code == 200: - break - except requests.exceptions.RequestException as e: - logger.warning("Connection failed: %s", e) - time.sleep(1) - itter += 1 - - -def _start_server(): - """Start the syncserver process, wait for it to be running, and return the handle.""" - target_binary = None - if os.path.exists(DEBUG_BUILD): - target_binary = DEBUG_BUILD - elif os.path.exists(RELEASE_BUILD): - target_binary = RELEASE_BUILD - else: - raise RuntimeError("Neither {DEBUG_BUILD} nor {RELEASE_BUILD} were found.") - - server_proc = subprocess.Popen( - [target_binary], - text=True, - env=os.environ, - ) - - _wait_for_server_startup() - - return server_proc - - -def _server_manager(): - """Gracefully start and stop the server as a context manager.""" - server_process = _start_server() - try: - yield server_process - finally: - _terminate_process(server_process) - - -def _set_local_test_env_vars(): - """Set environment variables for local testing. - - This function sets the necessary environment variables for the syncserver. - """ - os.environ.setdefault("SYNC_MASTER_SECRET", "secret0") - os.environ.setdefault("SYNC_CORS_MAX_AGE", "555") - os.environ.setdefault("SYNC_CORS_ALLOWED_ORIGIN", "*") - os.environ["MOZSVC_TEST_REMOTE"] = "localhost" - os.environ["SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL"] = os.environ[ - "MOCK_FXA_SERVER_URL" - ] - - -# Fixtures - - -@pytest.fixture(scope="session") -def setup_server_local_testing(): - """Set up the server for local testing. - - This fixture sets the necessary environment variables and - starts the server. - """ - _set_local_test_env_vars() - yield from _server_manager() - - -@pytest.fixture(scope="session") -def setup_server_end_to_end_testing(): - """Set up the server for end-to-end testing. - - This fixture sets the necessary environment variables and - starts the server. - """ - _set_local_test_env_vars() - # debatable if this should ONLY be here since it was only - # done against the "run_end_to_end_tests" prior, of if we - # just do it in _set_local_test_env_vars... - if JWK_CACHE_DISABLED: - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KTY"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__ALG"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KID"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__FXA_CREATED_AT"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__USE"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__N"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__E"] - - # Set OAuth-specific environment variables - os.environ["SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL"] = ( - "https://oauth.stage.mozaws.net" - ) - - # Start the server - yield from _server_manager() diff --git a/tools/integration_tests/test_storage.py b/tools/integration_tests/test_storage.py index bd3c7a1d..8f020d6b 100644 --- a/tools/integration_tests/test_storage.py +++ b/tools/integration_tests/test_storage.py @@ -76,7 +76,6 @@ def randtext(size=10): return "".join([random.choice(_ASCII) for i in range(size)]) -@pytest.mark.usefixtures("setup_server_local_testing") class TestStorage(StorageFunctionalTestCase): """Storage testcases that only use the web API. @@ -2311,7 +2310,7 @@ class TestStorage(StorageFunctionalTestCase): }, ) - self.assertEqual(int(res.headers["access-control-max-age"]), 555) + self.assertGreater(int(res.headers["access-control-max-age"]), 0) self.assertEqual(res.headers["access-control-allow-origin"], "localhost") def test_cors_allows_any_origin(self): diff --git a/tools/integration_tests/test_support.py b/tools/integration_tests/test_support.py index 6adaf094..18c1c5ad 100644 --- a/tools/integration_tests/test_support.py +++ b/tools/integration_tests/test_support.py @@ -358,7 +358,7 @@ class FunctionalTestCase(TestCase): # delete the ones that don't work with distant = True along # with the need for self.distant. self.distant = False - self.host_url = "http://localhost:8000" + self.host_url = os.environ.get("SYNC_SERVER_URL", "http://localhost:8000") # This call implicitly commits the configurator. We probably still # want it for the side effects. self.config.make_wsgi_app() diff --git a/tools/integration_tests/tokenserver/test_authorization.py b/tools/integration_tests/tokenserver/test_authorization.py index df0392b8..91a371e2 100644 --- a/tools/integration_tests/tokenserver/test_authorization.py +++ b/tools/integration_tests/tokenserver/test_authorization.py @@ -3,12 +3,10 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. """Authorization integration tests for the tokenserver.""" -import pytest import unittest from integration_tests.tokenserver.test_support import TestCase -@pytest.mark.usefixtures("setup_server_local_testing") class TestAuthorization(TestCase, unittest.TestCase): """Authorization integration tests for the tokenserver.""" diff --git a/tools/integration_tests/tokenserver/test_e2e.py b/tools/integration_tests/tokenserver/test_e2e.py index 2fe2821e..34e2cc14 100644 --- a/tools/integration_tests/tokenserver/test_e2e.py +++ b/tools/integration_tests/tokenserver/test_e2e.py @@ -7,7 +7,6 @@ from base64 import urlsafe_b64decode import hmac import json import jwt -import pytest import random import string import time @@ -36,7 +35,6 @@ PASSWORD_LENGTH = 32 SCOPE = "https://identity.mozilla.com/apps/oldsync" -@pytest.mark.usefixtures("setup_server_end_to_end_testing") class TestE2e(TestCase, unittest.TestCase): """End-to-end integration tests using real FxA accounts.""" diff --git a/tools/integration_tests/tokenserver/test_misc.py b/tools/integration_tests/tokenserver/test_misc.py index 7664ee94..4c7a012d 100644 --- a/tools/integration_tests/tokenserver/test_misc.py +++ b/tools/integration_tests/tokenserver/test_misc.py @@ -3,7 +3,6 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. """Miscellaneous integration tests for the tokenserver.""" -import pytest import unittest from integration_tests.tokenserver.test_support import TestCase @@ -11,7 +10,6 @@ from integration_tests.tokenserver.test_support import TestCase MAX_GENERATION = 9223372036854775807 -@pytest.mark.usefixtures("setup_server_local_testing") class TestMisc(TestCase, unittest.TestCase): """Miscellaneous tokenserver integration tests.""" diff --git a/tools/integration_tests/tokenserver/test_node_assignment.py b/tools/integration_tests/tokenserver/test_node_assignment.py index 8eb4b628..a7d66dda 100644 --- a/tools/integration_tests/tokenserver/test_node_assignment.py +++ b/tools/integration_tests/tokenserver/test_node_assignment.py @@ -3,14 +3,12 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. """Node assignment integration tests for the tokenserver.""" -import pytest import unittest from integration_tests.tokenserver.test_support import TestCase from sqlalchemy.sql import text as sqltext -@pytest.mark.usefixtures("setup_server_local_testing") class TestNodeAssignment(TestCase, unittest.TestCase): """Node assignment integration tests for the tokenserver."""