diff --git a/.circleci/config.yml b/.circleci/config.yml index 1c10173a..82ea71d8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -101,7 +101,7 @@ commands: steps: - run: name: Install test dependencies - command: cargo install cargo-nextest cargo-llvm-cov + command: cargo install --locked cargo-nextest cargo-llvm-cov make-test-dir: steps: @@ -145,34 +145,18 @@ commands: make run_token_server_integration_tests environment: SYNCSTORAGE_RS_IMAGE: app:build - run-e2e-mysql-tests: + run-e2e-tests: + parameters: + db: + type: enum + enum: ["mysql", "spanner"] steps: - run: - name: e2e tests (syncstorage mysql) + name: e2e tests (syncstorage << parameters.db >>) command: > - /usr/local/bin/docker-compose - -f docker-compose.mysql.yaml - -f docker-compose.e2e.mysql.yaml - up - --exit-code-from mysql-e2e-tests - --abort-on-container-exit + make docker_run_<< parameters.db >>_e2e_tests environment: - SYNCSTORAGE_RS_IMAGE: app:build - - - run-e2e-spanner-tests: - steps: - - run: - name: e2e tests (syncstorage spanner) - command: > - /usr/local/bin/docker-compose - -f docker-compose.spanner.yaml - -f docker-compose.e2e.spanner.yaml - up - --exit-code-from spanner-e2e-tests - --abort-on-container-exit - environment: - SYNCSTORAGE_RS_IMAGE: app:build + SYNCSTORAGE_RS_IMAGE: app:build upload-to-gcs: parameters: @@ -283,7 +267,6 @@ jobs: # if the above tests don't run tokenserver-db tests (i.e. using --workspace) # then run-tokenserver-scripts-tests will fail. These tests expect the db to be # configured already, and it appears unit-tests modify the db to the expected state - - run-tokenserver-integration-tests - store-test-results - upload-to-gcs: source: workflow/test-results @@ -324,11 +307,13 @@ jobs: - run: name: Save docker-compose config command: cp docker-compose*mysql.yaml /home/circleci/cache + - run: + name: Save Makefile to cache + command: cp Makefile /home/circleci/cache - save_cache: key: mysql-{{ .Branch }}-{{ .Environment.CIRCLE_SHA1 }}-{{ epoch }} paths: - /home/circleci/cache - build-spanner-image: docker: - image: cimg/rust:1.86 # RUST_VER @@ -361,6 +346,9 @@ jobs: - run: name: Save docker-compose config command: cp docker-compose*spanner.yaml /home/circleci/cache + - run: + name: Save Makefile to cache + command: cp Makefile /home/circleci/cache - save_cache: key: spanner-{{ .Branch }}-{{ .Environment.CIRCLE_SHA1 }}-{{ epoch }} paths: @@ -422,7 +410,7 @@ jobs: mysql-e2e-tests: docker: - - image: docker/compose:1.24.0 + - image: cimg/base:2025.04 auth: username: $DOCKER_USER password: $DOCKER_PASS @@ -434,14 +422,24 @@ jobs: - run: name: Restore Docker image cache command: docker load -i /home/circleci/cache/docker.tar + - run: + name: Restore Makefile from save_cache + command: cp /home/circleci/cache/Makefile . - run: name: Restore docker-compose config command: cp /home/circleci/cache/docker-compose*.yaml . - - run-e2e-mysql-tests + - make-test-dir + - run-e2e-tests: + db: mysql + - store-test-results + - upload-to-gcs: + source: workflow/test-results + destination: gs://ecosystem-test-eng-metrics/syncstorage-rs/junit + extension: xml spanner-e2e-tests: docker: - - image: docker/compose:1.24.0 + - image: cimg/base:2025.04 auth: username: $DOCKER_USER password: $DOCKER_PASS @@ -453,10 +451,20 @@ jobs: - run: name: Restore Docker image cache command: docker load -i /home/circleci/cache/docker.tar + - run: + name: Restore Makefile from save_cache + command: cp /home/circleci/cache/Makefile . - run: name: Restore docker-compose config command: cp /home/circleci/cache/docker-compose*.yaml . - - run-e2e-spanner-tests + - make-test-dir + - run-e2e-tests: + db: spanner + - store-test-results + - upload-to-gcs: + source: workflow/test-results + destination: gs://ecosystem-test-eng-metrics/syncstorage-rs/junit + extension: xml deploy: docker: diff --git a/Makefile b/Makefile index 037e9ee0..01c6a660 100644 --- a/Makefile +++ b/Makefile @@ -20,7 +20,13 @@ TEST_PROFILE := $(if $(CIRCLECI),ci,default) TEST_FILE_PREFIX := $(if $(CIRCLECI),$(CIRCLE_BUILD_NUM)__$(EPOCH_TIME)__$(CIRCLE_PROJECT_REPONAME)__$(WORKFLOW)__) UNIT_JUNIT_XML := $(TEST_RESULTS_DIR)/$(TEST_FILE_PREFIX)unit__results.xml UNIT_COVERAGE_JSON := $(TEST_RESULTS_DIR)/$(TEST_FILE_PREFIX)unit__coverage.json -INTEGRATION_JUNIT_XML := $(TEST_RESULTS_DIR)/$(TEST_FILE_PREFIX)integration__results.xml + +SPANNER_INT_JUNIT_XML := $(TEST_RESULTS_DIR)/$(TEST_FILE_PREFIX)spanner_integration__results.xml +SPANNER_NO_JWK_INT_JUNIT_XML := $(TEST_RESULTS_DIR)/$(TEST_FILE_PREFIX)spanner_no_oauth_integration__results.xml +MYSQL_INT_JUNIT_XML := $(TEST_RESULTS_DIR)/$(TEST_FILE_PREFIX)mysql_integration__results.xml +MYSQL_NO_JWK_INT_JUNIT_XML := $(TEST_RESULTS_DIR)/$(TEST_FILE_PREFIX)mysql_no_oauth_integration__results.xml + +LOCAL_INTEGRATION_JUNIT_XML := $(TEST_RESULTS_DIR)/$(TEST_FILE_PREFIX)local_integration__results.xml SYNC_SYNCSTORAGE__DATABASE_URL ?= mysql://sample_user:sample_password@localhost/syncstorage_rs SYNC_TOKENSERVER__DATABASE_URL ?= mysql://sample_user:sample_password@localhost/tokenserver_rs @@ -40,22 +46,48 @@ clean: rm -r venv docker_start_mysql: - docker-compose -f docker-compose.mysql.yaml up -d + docker compose -f docker-compose.mysql.yaml up -d docker_start_mysql_rebuild: - docker-compose -f docker-compose.mysql.yaml up --build -d + docker compose -f docker-compose.mysql.yaml up --build -d docker_stop_mysql: - docker-compose -f docker-compose.mysql.yaml down + docker compose -f docker-compose.mysql.yaml down docker_start_spanner: - docker-compose -f docker-compose.spanner.yaml up -d + docker compose -f docker-compose.spanner.yaml up -d docker_start_spanner_rebuild: - docker-compose -f docker-compose.spanner.yaml up --build -d + docker compose -f docker-compose.spanner.yaml up --build -d docker_stop_spanner: - docker-compose -f docker-compose.spanner.yaml down + docker compose -f docker-compose.spanner.yaml down + +.ONESHELL: +docker_run_mysql_e2e_tests: + docker compose \ + -f docker-compose.mysql.yaml \ + -f docker-compose.e2e.mysql.yaml \ + up \ + --exit-code-from mysql-e2e-tests \ + --abort-on-container-exit; + exit_code=$$?; + docker cp mysql-e2e-tests:/mysql_integration_results.xml ${MYSQL_INT_JUNIT_XML}; + docker cp mysql-e2e-tests:/mysql_no_jwk_integration_results.xml ${MYSQL_NO_JWK_INT_JUNIT_XML}; + exit $$exit_code; + +.ONESHELL: +docker_run_spanner_e2e_tests: + docker compose \ + -f docker-compose.spanner.yaml \ + -f docker-compose.e2e.spanner.yaml \ + up \ + --exit-code-from spanner-e2e-tests \ + --abort-on-container-exit; + exit_code=$$?; + docker cp spanner-e2e-tests:/spanner_integration_results.xml ${SPANNER_INT_JUNIT_XML}; + docker cp spanner-e2e-tests:/spanner_no_jwk_integration_results.xml ${SPANNER_NO_JWK_INT_JUNIT_XML}; + exit $$exit_code; python: python3 -m venv venv @@ -104,4 +136,4 @@ merge_coverage_results: .ONESHELL: run_token_server_integration_tests: pip3 install -r tools/tokenserver/requirements.txt - pytest tools/tokenserver --junit-xml=${INTEGRATION_JUNIT_XML} \ No newline at end of file + pytest tools/tokenserver --junit-xml=${INTEGRATION_JUNIT_XML} diff --git a/docker-compose.e2e.mysql.yaml b/docker-compose.e2e.mysql.yaml index 59ad5152..a655eeb9 100644 --- a/docker-compose.e2e.mysql.yaml +++ b/docker-compose.e2e.mysql.yaml @@ -1,27 +1,24 @@ version: "3" services: - sync-db: - tokenserver-db: - syncserver: - depends_on: - - sync-db - - tokenserver-db - # TODO: either syncserver should retry the db connection - # itself a few times or should include a wait-for-it.sh script - # inside its docker that would do this for us. - entrypoint: > - /bin/sh -c " - sleep 15; - /app/bin/syncserver; - " mysql-e2e-tests: + container_name: mysql-e2e-tests depends_on: - - mock-fxa-server - - syncserver + sync-db: + condition: service_healthy + mock-fxa-server: + condition: service_started + tokenserver-db: + condition: service_healthy + # this depend is to avoid migration collisions. + # the syncserver isn't actually used for the tests, + # but collisions can happen particularly in CI. + syncserver: + condition: service_started image: app:build privileged: true user: root environment: + JWK_CACHE_DISABLED: false MOCK_FXA_SERVER_URL: http://mock-fxa-server:6000 SYNC_HOST: 0.0.0.0 SYNC_MASTER_SECRET: secret0 @@ -43,5 +40,9 @@ services: TOKENSERVER_HOST: http://localhost:8000 entrypoint: > /bin/sh -c " - sleep 28; python3 /app/tools/integration_tests/run.py 'http://localhost:8000#secret0' + exit_code=0; + pytest /app/tools/integration_tests/ --junit-xml=/mysql_integration_results.xml || exit_code=$$?; + export JWK_CACHE_DISABLED=true; + pytest /app/tools/integration_tests/ --junit-xml=/mysql_no_jwk_integration_results.xml || exit_code=$$?; + exit $$exit_code; " diff --git a/docker-compose.e2e.spanner.yaml b/docker-compose.e2e.spanner.yaml index cd3a5084..90e2ea57 100644 --- a/docker-compose.e2e.spanner.yaml +++ b/docker-compose.e2e.spanner.yaml @@ -1,27 +1,23 @@ version: "3" services: - sync-db: - sync-db-setup: - tokenserver-db: - syncserver: - depends_on: - - sync-db-setup - # TODO: either syncserver should retry the db connection - # itself a few times or should include a wait-for-it.sh script - # inside its docker that would do this for us. - entrypoint: > - /bin/sh -c " - sleep 15; - /app/bin/syncserver; - " spanner-e2e-tests: + container_name: spanner-e2e-tests depends_on: - - mock-fxa-server - - syncserver + mock-fxa-server: + condition: service_started + syncserver: + condition: service_started + tokenserver-db: + condition: service_healthy image: app:build privileged: true user: root environment: + # Some tests can run without the `FXA_OAUTH...` vars. + # Setting this to false will delete any of those keys before starting + # the syncserver and startging the test. This can be set/passed + # in from CircleCI when calling `docker-compose -f docker-compose.e2e.spanner.yaml` + JWK_CACHE_DISABLED: false MOCK_FXA_SERVER_URL: http://mock-fxa-server:6000 SYNC_HOST: 0.0.0.0 SYNC_MASTER_SECRET: secret0 @@ -44,5 +40,9 @@ services: TOKENSERVER_HOST: http://localhost:8000 entrypoint: > /bin/sh -c " - sleep 28; python3 /app/tools/integration_tests/run.py 'http://localhost:8000#secret0' + exit_code=0; + pytest /app/tools/integration_tests/ --junit-xml=/spanner_integration_results.xml || exit_code=$$?; + export JWK_CACHE_DISABLED=true; + pytest /app/tools/integration_tests/ --junit-xml=/spanner_no_jwk_integration_results.xml || exit_code=$$?; + exit $$exit_code; " diff --git a/docker-compose.mysql.yaml b/docker-compose.mysql.yaml index 1a469720..a6c9c9f8 100644 --- a/docker-compose.mysql.yaml +++ b/docker-compose.mysql.yaml @@ -24,6 +24,12 @@ services: MYSQL_DATABASE: syncstorage MYSQL_USER: test MYSQL_PASSWORD: test + healthcheck: + test: ["CMD-SHELL", "mysqladmin -uroot -p$${MYSQL_ROOT_PASSWORD} version"] + interval: 2s + retries: 10 + start_period: 20s + timeout: 2s tokenserver-db: image: docker.io/library/mysql:5.7 @@ -39,6 +45,12 @@ services: MYSQL_DATABASE: tokenserver MYSQL_USER: test MYSQL_PASSWORD: test + healthcheck: + test: ["CMD-SHELL", "mysqladmin -uroot -p$${MYSQL_ROOT_PASSWORD} version"] + interval: 2s + retries: 10 + start_period: 20s + timeout: 2s mock-fxa-server: image: app:build @@ -58,8 +70,10 @@ services: ports: - "8000:8000" depends_on: - - sync-db - - tokenserver-db + sync-db: + condition: service_healthy + tokenserver-db: + condition: service_healthy environment: SYNC_HOST: 0.0.0.0 SYNC_MASTER_SECRET: secret0 diff --git a/docker-compose.spanner.yaml b/docker-compose.spanner.yaml index de1dc806..2f623056 100644 --- a/docker-compose.spanner.yaml +++ b/docker-compose.spanner.yaml @@ -35,6 +35,12 @@ services: MYSQL_DATABASE: tokenserver MYSQL_USER: test MYSQL_PASSWORD: test + healthcheck: + test: ["CMD-SHELL", "mysqladmin -uroot -p$${MYSQL_ROOT_PASSWORD} version"] + interval: 2s + retries: 10 + start_period: 20s + timeout: 2s mock-fxa-server: image: app:build restart: "no" diff --git a/tools/integration_tests/conftest.py b/tools/integration_tests/conftest.py new file mode 100644 index 00000000..68dc309f --- /dev/null +++ b/tools/integration_tests/conftest.py @@ -0,0 +1,162 @@ +import os +import psutil +import signal +import subprocess +import time +import pytest +import requests +import logging + +DEBUG_BUILD = "target/debug/syncserver" +RELEASE_BUILD = "/app/bin/syncserver" +# max number of attempts to check server heartbeat +SYNC_SERVER_STARTUP_MAX_ATTEMPTS = 30 +JWK_CACHE_DISABLED = os.environ.get("JWK_CACHE_DISABLED") + +logger = logging.getLogger("tokenserver.scripts.conftest") + +# Local setup for fixtures + + +def _terminate_process(process): + """ + Gracefully terminate the process and its children. + """ + proc = psutil.Process(pid=process.pid) + child_proc = proc.children(recursive=True) + for p in [proc] + child_proc: + os.kill(p.pid, signal.SIGTERM) + process.wait() + + +def _wait_for_server_startup(max_attempts=SYNC_SERVER_STARTUP_MAX_ATTEMPTS): + """ + Waits for the __heartbeat__ endpoint to return a 200, pausing for 1 second + between attempts. Raises a RuntimeError if the server does not start after + the specific number of attempts. + """ + itter = 0 + while True: + if itter >= max_attempts: + raise RuntimeError( + "Server failed to start within the timeout period." + ) + try: + req = requests.get("http://localhost:8000/__heartbeat__", + timeout=2) + if req.status_code == 200: + break + except requests.exceptions.RequestException as e: + logger.warning("Connection failed: %s", e) + time.sleep(1) + itter += 1 + + +def _start_server(): + """ + Starts the syncserver process, waits for it to be running, + and return the process handle. + """ + target_binary = None + if os.path.exists(DEBUG_BUILD): + target_binary = DEBUG_BUILD + elif os.path.exists(RELEASE_BUILD): + target_binary = RELEASE_BUILD + else: + raise RuntimeError( + "Neither {DEBUG_BUILD} nor {RELEASE_BUILD} were found." + ) + + server_proc = subprocess.Popen( + target_binary, + shell=True, + text=True, + env=os.environ, + ) + + _wait_for_server_startup() + + return server_proc + + +def _server_manager(): + """ + Context manager to gracefully start and stop the server. + """ + server_process = _start_server() + try: + yield server_process + finally: + _terminate_process(server_process) + + +def _set_local_test_env_vars(): + """ + Set environment variables for local testing. + This function sets the necessary environment variables for the syncserver. + """ + os.environ.setdefault("SYNC_MASTER_SECRET", "secret0") + os.environ.setdefault("SYNC_CORS_MAX_AGE", "555") + os.environ.setdefault("SYNC_CORS_ALLOWED_ORIGIN", "*") + os.environ["MOZSVC_TEST_REMOTE"] = "localhost" + os.environ["SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL"] = \ + os.environ["MOCK_FXA_SERVER_URL"] + +# Fixtures + + +@pytest.fixture(scope="session") +def setup_server_local_testing(): + """ + Fixture to set up the server for local testing. + This fixture sets the necessary environment variables and + starts the server. + """ + _set_local_test_env_vars() + yield from _server_manager() + + +@pytest.fixture(scope="session") +def setup_server_local_testing_with_oauth(): + """ + Fixture to set up the server for local testing with OAuth. + This fixture sets the necessary environment variables and + starts the server. + """ + _set_local_test_env_vars() + + # Set OAuth-specific environment variables + os.environ["TOKENSERVER_AUTH_METHOD"] = "oauth" + + # Start the server + yield from _server_manager() + + +@pytest.fixture(scope="session") +def setup_server_end_to_end_testing(): + """ + Fixture to set up the server for end-to-end testing. + This fixture sets the necessary environment variables and + starts the server. + """ + _set_local_test_env_vars() + # debatable if this should ONLY be here since it was only + # done against the "run_end_to_end_tests" prior, of if we + # just do it in _set_local_test_env_vars... + if JWK_CACHE_DISABLED: + del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KTY"] + del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__ALG"] + del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KID"] + del os.environ[ + "SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__FXA_CREATED_AT" + ] + del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__USE"] + del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__N"] + del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__E"] + + # Set OAuth-specific environment variables + os.environ["SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL"] = \ + "https://oauth.stage.mozaws.net" + + # Start the server + yield from _server_manager() diff --git a/tools/integration_tests/run.py b/tools/integration_tests/run.py deleted file mode 100644 index 923faf70..00000000 --- a/tools/integration_tests/run.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python3 - -import os.path -import psutil -import signal -import subprocess -import sys -from test_storage import TestStorage -from test_support import run_live_functional_tests -import time -from tokenserver.run import (run_end_to_end_tests, run_local_tests) - -DEBUG_BUILD = "target/debug/syncserver" -RELEASE_BUILD = "/app/bin/syncserver" - - -def terminate_process(process): - proc = psutil.Process(pid=process.pid) - child_proc = proc.children(recursive=True) - for p in [proc] + child_proc: - os.kill(p.pid, signal.SIGTERM) - process.wait() - - -if __name__ == "__main__": - # When run as a script, this file will execute the - # functional tests against a live webserver. - target_binary = None - if os.path.exists(DEBUG_BUILD): - target_binary = DEBUG_BUILD - elif os.path.exists(RELEASE_BUILD): - target_binary = RELEASE_BUILD - else: - raise RuntimeError( - "Neither target/debug/syncserver \ - nor /app/bin/syncserver were found." - ) - - def start_server(): - the_server_subprocess = subprocess.Popen( - target_binary, shell=True, env=os.environ - ) - - # TODO we should change this to watch for a log message on startup - # to know when to continue instead of sleeping for a fixed amount - time.sleep(20) - - return the_server_subprocess - - os.environ.setdefault("SYNC_MASTER_SECRET", "secret0") - os.environ.setdefault("SYNC_CORS_MAX_AGE", "555") - os.environ.setdefault("SYNC_CORS_ALLOWED_ORIGIN", "*") - mock_fxa_server_url = os.environ["MOCK_FXA_SERVER_URL"] - url = "%s/v2" % mock_fxa_server_url - os.environ["SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL"] = mock_fxa_server_url - the_server_subprocess = start_server() - try: - res = 0 - res |= run_live_functional_tests(TestStorage, sys.argv) - os.environ["TOKENSERVER_AUTH_METHOD"] = "oauth" - res |= run_local_tests() - finally: - terminate_process(the_server_subprocess) - - os.environ["SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL"] = \ - "https://oauth.stage.mozaws.net" - the_server_subprocess = start_server() - try: - res |= run_end_to_end_tests() - finally: - terminate_process(the_server_subprocess) - - # Run the Tokenserver end-to-end tests without the JWK cached - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KTY"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__ALG"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__KID"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__FXA_CREATED_AT"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__USE"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__N"] - del os.environ["SYNC_TOKENSERVER__FXA_OAUTH_PRIMARY_JWK__E"] - - the_server_subprocess = start_server() - try: - verbosity = int(os.environ.get("VERBOSITY", "1")) - res |= run_end_to_end_tests(verbosity=verbosity) - finally: - terminate_process(the_server_subprocess) - - sys.exit(res) diff --git a/tools/integration_tests/test_storage.py b/tools/integration_tests/test_storage.py index 7e9714aa..c74b93bb 100644 --- a/tools/integration_tests/test_storage.py +++ b/tools/integration_tests/test_storage.py @@ -14,9 +14,7 @@ consider it a bug. """ -# unittest imported by pytest requirement -import unittest - +import pytest import re import json @@ -74,6 +72,7 @@ def randtext(size=10): return "".join([random.choice(_ASCII) for i in range(size)]) +@pytest.mark.usefixtures('setup_server_local_testing') class TestStorage(StorageFunctionalTestCase): """Storage testcases that only use the web API. @@ -84,7 +83,7 @@ class TestStorage(StorageFunctionalTestCase): def setUp(self): super(TestStorage, self).setUp() self.root = "/1.5/%d" % (self.user_id,) - # Reset the storage to a known state, aka "empty". + self.retry_delete(self.root) @contextlib.contextmanager @@ -127,9 +126,9 @@ class TestStorage(StorageFunctionalTestCase): resp = self.app.get(self.root + "/info/collections") res = resp.json keys = sorted(list(res.keys())) - self.assertEquals(keys, ["xxx_col1", "xxx_col2"]) - self.assertEquals(res["xxx_col1"], ts1) - self.assertEquals(res["xxx_col2"], ts2) + self.assertEqual(keys, ["xxx_col1", "xxx_col2"]) + self.assertEqual(res["xxx_col1"], ts1) + self.assertEqual(res["xxx_col2"], ts2) # Updating items in xxx_col2, check timestamps. bsos = [{"id": str(i).zfill(2), "payload": "yyy"} for i in range(2)] resp = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) @@ -138,9 +137,9 @@ class TestStorage(StorageFunctionalTestCase): resp = self.app.get(self.root + "/info/collections") res = resp.json keys = sorted(list(res.keys())) - self.assertEquals(keys, ["xxx_col1", "xxx_col2"]) - self.assertEquals(res["xxx_col1"], ts1) - self.assertEquals(res["xxx_col2"], ts2) + self.assertEqual(keys, ["xxx_col1", "xxx_col2"]) + self.assertEqual(res["xxx_col1"], ts1) + self.assertEqual(res["xxx_col2"], ts2) def test_get_collection_count(self): # xxx_col1 gets 3 items, xxx_col2 gets 5 items. @@ -151,9 +150,9 @@ class TestStorage(StorageFunctionalTestCase): # those counts should be reflected back in query. resp = self.app.get(self.root + "/info/collection_counts") res = resp.json - self.assertEquals(len(res), 2) - self.assertEquals(res["xxx_col1"], 3) - self.assertEquals(res["xxx_col2"], 5) + self.assertEqual(len(res), 2) + self.assertEqual(res["xxx_col1"], 3) + self.assertEqual(res["xxx_col2"], 5) def test_bad_cache(self): # fixes #637332 @@ -171,7 +170,7 @@ class TestStorage(StorageFunctionalTestCase): # 3. get collection info again, should find the new ones resp = self.app.get(self.root + "/info/collections") - self.assertEquals(len(resp.json), numcols + 1) + self.assertEqual(len(resp.json), numcols + 1) def test_get_collection_only(self): bsos = [{"id": str(i).zfill(2), "payload": "xxx"} for i in range(5)] @@ -180,14 +179,14 @@ class TestStorage(StorageFunctionalTestCase): # non-existent collections appear as empty resp = self.app.get(self.root + "/storage/nonexistent") res = resp.json - self.assertEquals(res, []) + self.assertEqual(res, []) # try just getting all items at once. resp = self.app.get(self.root + "/storage/xxx_col2") res = resp.json res.sort() - self.assertEquals(res, ["00", "01", "02", "03", "04"]) - self.assertEquals(int(resp.headers["X-Weave-Records"]), 5) + self.assertEqual(res, ["00", "01", "02", "03", "04"]) + self.assertEqual(int(resp.headers["X-Weave-Records"]), 5) # trying various filters @@ -197,7 +196,7 @@ class TestStorage(StorageFunctionalTestCase): res = self.app.get(self.root + "/storage/xxx_col2?ids=01,03,17") res = res.json res.sort() - self.assertEquals(res, ["01", "03"]) + self.assertEqual(res, ["01", "03"]) # "newer" # Returns only ids for objects in the collection that have been last @@ -216,15 +215,15 @@ class TestStorage(StorageFunctionalTestCase): self.assertTrue(ts1 < ts2) res = self.app.get(self.root + "/storage/xxx_col2?newer=%s" % ts1) - self.assertEquals(res.json, ["129"]) + self.assertEqual(res.json, ["129"]) res = self.app.get(self.root + "/storage/xxx_col2?newer=%s" % ts2) - self.assertEquals(res.json, []) + self.assertEqual(res.json, []) res = self.app.get( self.root + "/storage/xxx_col2?newer=%s" % (ts1 - 1) ) - self.assertEquals(sorted(res.json), ["128", "129"]) + self.assertEqual(sorted(res.json), ["128", "129"]) # "older" # Returns only ids for objects in the collection that have been last @@ -243,19 +242,19 @@ class TestStorage(StorageFunctionalTestCase): self.assertTrue(ts1 < ts2) res = self.app.get(self.root + "/storage/xxx_col2?older=%s" % ts1) - self.assertEquals(res.json, []) + self.assertEqual(res.json, []) res = self.app.get(self.root + "/storage/xxx_col2?older=%s" % ts2) - self.assertEquals(res.json, ["128"]) + self.assertEqual(res.json, ["128"]) res = self.app.get( self.root + "/storage/xxx_col2?older=%s" % (ts2 + 1) ) - self.assertEquals(sorted(res.json), ["128", "129"]) + self.assertEqual(sorted(res.json), ["128", "129"]) qs = "?older=%s&newer=%s" % (ts2 + 1, ts1) res = self.app.get(self.root + "/storage/xxx_col2" + qs) - self.assertEquals(sorted(res.json), ["129"]) + self.assertEqual(sorted(res.json), ["129"]) # "full" # If defined, returns the full BSO, rather than just the id. @@ -263,7 +262,7 @@ class TestStorage(StorageFunctionalTestCase): keys = list(res.json[0].keys()) keys.sort() wanted = ["id", "modified", "payload"] - self.assertEquals(keys, wanted) + self.assertEqual(keys, wanted) res = self.app.get(self.root + "/storage/xxx_col2") self.assertTrue(isinstance(res.json, list)) @@ -281,84 +280,84 @@ class TestStorage(StorageFunctionalTestCase): query_url = self.root + "/storage/xxx_col2?sort=index" res = self.app.get(query_url) all_items = res.json - self.assertEquals(len(all_items), 10) + self.assertEqual(len(all_items), 10) res = self.app.get(query_url + "&limit=2") - self.assertEquals(res.json, all_items[:2]) + self.assertEqual(res.json, all_items[:2]) # "offset" # Skips over items that have already been returned. next_offset = res.headers["X-Weave-Next-Offset"] res = self.app.get(query_url + "&limit=3&offset=" + next_offset) - self.assertEquals(res.json, all_items[2:5]) + self.assertEqual(res.json, all_items[2:5]) next_offset = res.headers["X-Weave-Next-Offset"] res = self.app.get(query_url + "&offset=" + next_offset) - self.assertEquals(res.json, all_items[5:]) + self.assertEqual(res.json, all_items[5:]) self.assertTrue("X-Weave-Next-Offset" not in res.headers) res = self.app.get(query_url + "&limit=10000&offset=" + next_offset) - self.assertEquals(res.json, all_items[5:]) + self.assertEqual(res.json, all_items[5:]) self.assertTrue("X-Weave-Next-Offset" not in res.headers) # "offset" again, this time ordering by descending timestamp. query_url = self.root + "/storage/xxx_col2?sort=newest" res = self.app.get(query_url) all_items = res.json - self.assertEquals(len(all_items), 10) + self.assertEqual(len(all_items), 10) res = self.app.get(query_url + "&limit=2") - self.assertEquals(res.json, all_items[:2]) + self.assertEqual(res.json, all_items[:2]) next_offset = res.headers["X-Weave-Next-Offset"] res = self.app.get(query_url + "&limit=3&offset=" + next_offset) - self.assertEquals(res.json, all_items[2:5]) + self.assertEqual(res.json, all_items[2:5]) next_offset = res.headers["X-Weave-Next-Offset"] res = self.app.get(query_url + "&offset=" + next_offset) - self.assertEquals(res.json, all_items[5:]) + self.assertEqual(res.json, all_items[5:]) self.assertTrue("X-Weave-Next-Offset" not in res.headers) res = self.app.get(query_url + "&limit=10000&offset=" + next_offset) - self.assertEquals(res.json, all_items[5:]) + self.assertEqual(res.json, all_items[5:]) # "offset" again, this time ordering by ascending timestamp. query_url = self.root + "/storage/xxx_col2?sort=oldest" res = self.app.get(query_url) all_items = res.json - self.assertEquals(len(all_items), 10) + self.assertEqual(len(all_items), 10) res = self.app.get(query_url + "&limit=2") - self.assertEquals(res.json, all_items[:2]) + self.assertEqual(res.json, all_items[:2]) next_offset = res.headers["X-Weave-Next-Offset"] res = self.app.get(query_url + "&limit=3&offset=" + next_offset) - self.assertEquals(res.json, all_items[2:5]) + self.assertEqual(res.json, all_items[2:5]) next_offset = res.headers["X-Weave-Next-Offset"] res = self.app.get(query_url + "&offset=" + next_offset) - self.assertEquals(res.json, all_items[5:]) + self.assertEqual(res.json, all_items[5:]) self.assertTrue("X-Weave-Next-Offset" not in res.headers) res = self.app.get(query_url + "&limit=10000&offset=" + next_offset) - self.assertEquals(res.json, all_items[5:]) + self.assertEqual(res.json, all_items[5:]) # "offset" once more, this time with no explicit ordering query_url = self.root + "/storage/xxx_col2?" res = self.app.get(query_url) all_items = res.json - self.assertEquals(len(all_items), 10) + self.assertEqual(len(all_items), 10) res = self.app.get(query_url + "&limit=2") - self.assertEquals(res.json, all_items[:2]) + self.assertEqual(res.json, all_items[:2]) next_offset = res.headers["X-Weave-Next-Offset"] res = self.app.get(query_url + "&limit=3&offset=" + next_offset) - self.assertEquals(res.json, all_items[2:5]) + self.assertEqual(res.json, all_items[2:5]) next_offset = res.headers["X-Weave-Next-Offset"] res = self.app.get(query_url + "&offset=" + next_offset) - self.assertEquals(res.json, all_items[5:]) + self.assertEqual(res.json, all_items[5:]) self.assertTrue("X-Weave-Next-Offset" not in res.headers) res = self.app.get(query_url + "&limit=10000&offset=" + next_offset) @@ -375,15 +374,15 @@ class TestStorage(StorageFunctionalTestCase): res = self.app.get(self.root + "/storage/xxx_col2?sort=newest") res = res.json - self.assertEquals(res, ["02", "01", "00"]) + self.assertEqual(res, ["02", "01", "00"]) res = self.app.get(self.root + "/storage/xxx_col2?sort=oldest") res = res.json - self.assertEquals(res, ["00", "01", "02"]) + self.assertEqual(res, ["00", "01", "02"]) res = self.app.get(self.root + "/storage/xxx_col2?sort=index") res = res.json - self.assertEquals(res, ["01", "02", "00"]) + self.assertEqual(res, ["01", "02", "00"]) def test_alternative_formats(self): bsos = [{"id": str(i).zfill(2), "payload": "xxx"} for i in range(5)] @@ -394,18 +393,18 @@ class TestStorage(StorageFunctionalTestCase): self.root + "/storage/xxx_col2", headers=[("Accept", "application/json")], ) - self.assertEquals(res.content_type.split(";")[0], "application/json") + self.assertEqual(res.content_type.split(";")[0], "application/json") res = res.json res.sort() - self.assertEquals(res, ["00", "01", "02", "03", "04"]) + self.assertEqual(res, ["00", "01", "02", "03", "04"]) # application/newlines res = self.app.get( self.root + "/storage/xxx_col2", headers=[("Accept", "application/newlines")], ) - self.assertEquals(res.content_type, "application/newlines") + self.assertEqual(res.content_type, "application/newlines") self.assertTrue(res.body.endswith(b"\n")) res = [ @@ -413,11 +412,11 @@ class TestStorage(StorageFunctionalTestCase): for line in res.body.decode("utf-8").strip().split("\n") ] res.sort() - self.assertEquals(res, ["00", "01", "02", "03", "04"]) + self.assertEqual(res, ["00", "01", "02", "03", "04"]) # unspecified format defaults to json res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(res.content_type.split(";")[0], "application/json") + self.assertEqual(res.content_type.split(";")[0], "application/json") # unkown format gets a 406 self.app.get( @@ -433,7 +432,7 @@ class TestStorage(StorageFunctionalTestCase): self.retry_post_json(self.root + "/storage/xxx_col2", bsos) # Get them all, along with their timestamps. res = self.app.get(self.root + "/storage/xxx_col2?full=true").json - self.assertEquals(len(res), 5) + self.assertEqual(len(res), 5) timestamps = sorted([r["modified"] for r in res]) # The timestamp of the collection should be the max of all those. self.app.get( @@ -456,8 +455,8 @@ class TestStorage(StorageFunctionalTestCase): res = res.json keys = list(res.keys()) keys.sort() - self.assertEquals(keys, ["id", "modified", "payload"]) - self.assertEquals(res["id"], "01") + self.assertEqual(keys, ["id", "modified", "payload"]) + self.assertEqual(res["id"], "01") # unexisting object self.app.get(self.root + "/storage/xxx_col2/99", status=404) @@ -477,7 +476,7 @@ class TestStorage(StorageFunctionalTestCase): self.root + "/storage/xxx_col2/01", headers={"X-If-Modified-Since": str(res["modified"] - 1)}, ) - self.assertEquals(res.json["id"], "01") + self.assertEqual(res.json["id"], "01") def test_set_item(self): # let's create an object @@ -485,14 +484,14 @@ class TestStorage(StorageFunctionalTestCase): self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) res = self.app.get(self.root + "/storage/xxx_col2/12345") res = res.json - self.assertEquals(res["payload"], _PLD) + self.assertEqual(res["payload"], _PLD) # now let's update it bso = {"payload": "YYY"} self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) res = self.app.get(self.root + "/storage/xxx_col2/12345") res = res.json - self.assertEquals(res["payload"], "YYY") + self.assertEqual(res["payload"], "YYY") def test_set_collection(self): # sending two bsos @@ -504,10 +503,10 @@ class TestStorage(StorageFunctionalTestCase): # checking what we did res = self.app.get(self.root + "/storage/xxx_col2/12") res = res.json - self.assertEquals(res["payload"], _PLD) + self.assertEqual(res["payload"], _PLD) res = self.app.get(self.root + "/storage/xxx_col2/13") res = res.json - self.assertEquals(res["payload"], _PLD) + self.assertEqual(res["payload"], _PLD) # one more time, with changes bso1 = {"id": "13", "payload": "XyX"} @@ -518,10 +517,10 @@ class TestStorage(StorageFunctionalTestCase): # checking what we did res = self.app.get(self.root + "/storage/xxx_col2/14") res = res.json - self.assertEquals(res["payload"], _PLD) + self.assertEqual(res["payload"], _PLD) res = self.app.get(self.root + "/storage/xxx_col2/13") res = res.json - self.assertEquals(res["payload"], "XyX") + self.assertEqual(res["payload"], "XyX") # sending two bsos with one bad sortindex bso1 = {"id": "one", "payload": _PLD} @@ -542,7 +541,7 @@ class TestStorage(StorageFunctionalTestCase): headers={"Content-Type": "application/newlines"}, ) items = self.app.get(self.root + "/storage/xxx_col2").json - self.assertEquals(len(items), 2) + self.assertEqual(len(items), 2) # If we send an unknown content type, we get an error. self.retry_delete(self.root + "/storage/xxx_col2") body = json_dumps(bsos) @@ -553,7 +552,7 @@ class TestStorage(StorageFunctionalTestCase): status=415, ) items = self.app.get(self.root + "/storage/xxx_col2").json - self.assertEquals(len(items), 0) + self.assertEqual(len(items), 0) def test_set_item_input_formats(self): # If we send with application/json it should work. @@ -564,7 +563,7 @@ class TestStorage(StorageFunctionalTestCase): headers={"Content-Type": "application/json"}, ) item = self.app.get(self.root + "/storage/xxx_col2/TEST").json - self.assertEquals(item["payload"], _PLD) + self.assertEqual(item["payload"], _PLD) # If we send json with some other content type, it should fail self.retry_delete(self.root + "/storage/xxx_col2") self.app.put( @@ -581,7 +580,7 @@ class TestStorage(StorageFunctionalTestCase): headers={"Content-Type": "text/plain"}, ) item = self.app.get(self.root + "/storage/xxx_col2/TEST").json - self.assertEquals(item["payload"], _PLD) + self.assertEqual(item["payload"], _PLD) def test_app_newlines_when_payloads_contain_newlines(self): # Send some application/newlines with embedded newline chars. @@ -590,7 +589,7 @@ class TestStorage(StorageFunctionalTestCase): {"id": "02", "payload": "\nmarco\npolo\n"}, ] body = "\n".join(json_dumps(bso) for bso in bsos) - self.assertEquals(len(body.split("\n")), 2) + self.assertEqual(len(body.split("\n")), 2) self.app.post( self.root + "/storage/xxx_col2", body, @@ -598,10 +597,10 @@ class TestStorage(StorageFunctionalTestCase): ) # Read them back as JSON list, check payloads. items = self.app.get(self.root + "/storage/xxx_col2?full=1").json - self.assertEquals(len(items), 2) + self.assertEqual(len(items), 2) items.sort(key=lambda bso: bso["id"]) - self.assertEquals(items[0]["payload"], bsos[0]["payload"]) - self.assertEquals(items[1]["payload"], bsos[1]["payload"]) + self.assertEqual(items[0]["payload"], bsos[0]["payload"]) + self.assertEqual(items[1]["payload"], bsos[1]["payload"]) # Read them back as application/newlines, check payloads. res = self.app.get( self.root + "/storage/xxx_col2?full=1", @@ -613,10 +612,10 @@ class TestStorage(StorageFunctionalTestCase): json_loads(line) for line in res.body.decode("utf-8").strip().split("\n") ] - self.assertEquals(len(items), 2) + self.assertEqual(len(items), 2) items.sort(key=lambda bso: bso["id"]) - self.assertEquals(items[0]["payload"], bsos[0]["payload"]) - self.assertEquals(items[1]["payload"], bsos[1]["payload"]) + self.assertEqual(items[0]["payload"], bsos[0]["payload"]) + self.assertEqual(items[1]["payload"], bsos[1]["payload"]) def test_collection_usage(self): self.retry_delete(self.root + "/storage") @@ -640,24 +639,24 @@ class TestStorage(StorageFunctionalTestCase): bsos = [bso1, bso2, bso3] self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(len(res.json), 3) + self.assertEqual(len(res.json), 3) # deleting all items self.retry_delete(self.root + "/storage/xxx_col2") items = self.app.get(self.root + "/storage/xxx_col2").json - self.assertEquals(len(items), 0) + self.assertEqual(len(items), 0) # Deletes the ids for objects in the collection that are in the # provided comma-separated list. self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(len(res.json), 3) + self.assertEqual(len(res.json), 3) self.retry_delete(self.root + "/storage/xxx_col2?ids=12,14") res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(len(res.json), 1) + self.assertEqual(len(res.json), 1) self.retry_delete(self.root + "/storage/xxx_col2?ids=13") res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(len(res.json), 0) + self.assertEqual(len(res.json), 0) def test_delete_item(self): # creating a collection of three @@ -667,13 +666,13 @@ class TestStorage(StorageFunctionalTestCase): bsos = [bso1, bso2, bso3] self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(len(res.json), 3) + self.assertEqual(len(res.json), 3) ts = float(res.headers["X-Last-Modified"]) # deleting item 13 self.retry_delete(self.root + "/storage/xxx_col2/13") res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(len(res.json), 2) + self.assertEqual(len(res.json), 2) # unexisting item should return a 404 self.retry_delete(self.root + "/storage/xxx_col2/12982", status=404) @@ -690,19 +689,18 @@ class TestStorage(StorageFunctionalTestCase): bsos = [bso1, bso2, bso3] self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(len(res.json), 3) + self.assertEqual(len(res.json), 3) # deleting all self.retry_delete(self.root + "/storage") items = self.app.get(self.root + "/storage/xxx_col2").json - self.assertEquals(len(items), 0) + self.assertEqual(len(items), 0) self.retry_delete(self.root + "/storage/xxx_col2", status=200) - self.assertEquals(len(items), 0) + self.assertEqual(len(items), 0) def test_x_timestamp_header(self): - # This can't be run against a live server. if self.distant: - raise unittest.SkipTest + pytest.skip("Test cannot be run against a live server.") bsos = [{"id": str(i).zfill(2), "payload": "xxx"} for i in range(5)] self.retry_post_json(self.root + "/storage/xxx_col2", bsos) @@ -788,8 +786,8 @@ class TestStorage(StorageFunctionalTestCase): ts = res2.headers["X-Last-Modified"] # All of those should have left the BSO unchanged res2 = self.app.get(self.root + "/storage/xxx_col2/12345") - self.assertEquals(res2.json["payload"], _PLD) - self.assertEquals( + self.assertEqual(res2.json["payload"], _PLD) + self.assertEqual( res2.headers["X-Last-Modified"], res.headers["X-Last-Modified"] ) # Using an X-If-Unmodified-Since equal to @@ -836,57 +834,30 @@ class TestStorage(StorageFunctionalTestCase): self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) res = self.app.get(self.root + "/info/quota") used = res.json[0] - self.assertEquals(used - old_used, len(_PLD) / 1024.0) - - def test_overquota(self): - # This can't be run against a live server. - raise unittest.SkipTest - if self.distant: - raise unittest.SkipTest - - # Clear out any data that's already in the store. - self.retry_delete(self.root + "/storage") - - # Set a low quota for the storage. - self.config.registry.settings["storage.quota_size"] = 700 - - # Check the the remaining quota is correctly reported. - bso = {"payload": _PLD} - res = self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) - wanted = str(round(200 / 1024.0, 2)) - self.assertEquals(res.headers["X-Weave-Quota-Remaining"], wanted) - - # Set the quota so that they're over their limit. - self.config.registry.settings["storage.quota_size"] = 10 - bso = {"payload": _PLD} - res = self.retry_put_json( - self.root + "/storage/xxx_col2/12345", bso, status=403 - ) - self.assertEquals(res.content_type.split(";")[0], "application/json") - self.assertEquals(res.json["status"], "quota-exceeded") + self.assertEqual(used - old_used, len(_PLD) / 1024.0) def test_get_collection_ttl(self): bso = {"payload": _PLD, "ttl": 0} res = self.retry_put_json(self.root + "/storage/xxx_col2/12345", bso) time.sleep(1.1) res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(res.json, []) + self.assertEqual(res.json, []) bso = {"payload": _PLD, "ttl": 2} res = self.retry_put_json(self.root + "/storage/xxx_col2/123456", bso) # it should exists now res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(len(res.json), 1) + self.assertEqual(len(res.json), 1) # trying a second put again self.retry_put_json(self.root + "/storage/xxx_col2/123456", bso) res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(len(res.json), 1) + self.assertEqual(len(res.json), 1) time.sleep(2.1) res = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(len(res.json), 0) + self.assertEqual(len(res.json), 0) def test_multi_item_post_limits(self): res = self.app.get(self.root + "/info/configuration") @@ -898,7 +869,7 @@ class TestStorage(StorageFunctionalTestCase): # Can't run against live server if it doesn't # report the right config options. if self.distant: - raise unittest.SkipTest + pytest.skip("") max_bytes = get_limit_config(self.config, "max_post_bytes") max_count = get_limit_config(self.config, "max_post_records") max_req_bytes = get_limit_config(self.config, "max_request_bytes") @@ -910,8 +881,8 @@ class TestStorage(StorageFunctionalTestCase): ] res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = res.json - self.assertEquals(len(res["success"]), max_count - 5) - self.assertEquals(len(res["failed"]), 0) + self.assertEqual(len(res["success"]), max_count - 5) + self.assertEqual(len(res["failed"]), 0) # Uploading max_count+5 items should produce five failures. bsos = [ @@ -920,8 +891,8 @@ class TestStorage(StorageFunctionalTestCase): ] res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = res.json - self.assertEquals(len(res["success"]), max_count) - self.assertEquals(len(res["failed"]), 5) + self.assertEqual(len(res["success"]), max_count) + self.assertEqual(len(res["failed"]), 5) # Uploading items such that the last item puts us over the # cumulative limit on payload size, should produce 1 failure. @@ -940,8 +911,8 @@ class TestStorage(StorageFunctionalTestCase): res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = res.json - self.assertEquals(len(res["success"]), max_items) - self.assertEquals(len(res["failed"]), 1) + self.assertEqual(len(res["success"]), max_items) + self.assertEqual(len(res["failed"]), 1) def test_weird_args(self): # pushing some data in xxx_col2 @@ -963,7 +934,7 @@ class TestStorage(StorageFunctionalTestCase): # what about a crazy ids= string ? ids = ",".join([randtext(10) for i in range(100)]) res = self.app.get(self.root + "/storage/xxx_col2?ids=%s" % ids) - self.assertEquals(res.json, []) + self.assertEqual(res.json, []) # trying unexpected args - they should not break self.app.get(self.root + "/storage/xxx_col2?blabla=1", status=200) @@ -979,7 +950,7 @@ class TestStorage(StorageFunctionalTestCase): ] res = self.retry_post_json(self.root + "/storage/passwords", bsos) res = res.json - self.assertEquals(len(res["success"]), 5) + self.assertEqual(len(res["success"]), 5) # now deleting some of them ids = ",".join( @@ -998,7 +969,7 @@ class TestStorage(StorageFunctionalTestCase): bsos = [{"id": "test-%d" % i, "payload": _PLD} for i in range(5)] res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = res.json - self.assertEquals(len(res["success"]), 5) + self.assertEqual(len(res["success"]), 5) # now delete some of them ids = ",".join(["test-%d" % i for i in range(2)]) ids = urllib.request.quote(ids) @@ -1037,7 +1008,7 @@ class TestStorage(StorageFunctionalTestCase): res = self.app.get(self.root + "/storage/xxx_col2?newer=%s" % ts) res = res.json try: - self.assertEquals(sorted(res), ["03", "04"]) + self.assertEqual(sorted(res), ["03", "04"]) except AssertionError: # need to display the whole collection to understand the issue msg = "Timestamp used: %s" % ts @@ -1066,7 +1037,7 @@ class TestStorage(StorageFunctionalTestCase): # of bso 1 and 2, should not return them res = self.app.get(self.root + "/storage/xxx_meh?newer=%s" % ts) res = res.json - self.assertEquals(sorted(res), ["03", "04"]) + self.assertEqual(sorted(res), ["03", "04"]) def test_strict_older(self): # send two bsos in the 'xxx_meh' collection @@ -1086,7 +1057,7 @@ class TestStorage(StorageFunctionalTestCase): # of bso 3 and 4, should not return them res = self.app.get(self.root + "/storage/xxx_meh?older=%s" % ts) res = res.json - self.assertEquals(sorted(res), ["01", "02"]) + self.assertEqual(sorted(res), ["01", "02"]) def test_handling_of_invalid_json_in_bso_uploads(self): # Single upload with JSON that's not a BSO. @@ -1094,32 +1065,32 @@ class TestStorage(StorageFunctionalTestCase): res = self.retry_put_json( self.root + "/storage/xxx_col2/invalid", bso, status=400 ) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) bso = 42 res = self.retry_put_json( self.root + "/storage/xxx_col2/invalid", bso, status=400 ) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) bso = {"id": ["01", "02"], "payload": {"3": "4"}} res = self.retry_put_json( self.root + "/storage/xxx_col2/invalid", bso, status=400 ) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) # Batch upload with JSON that's not a list of BSOs bsos = "notalist" res = self.retry_post_json( self.root + "/storage/xxx_col2", bsos, status=400 ) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) bsos = 42 res = self.retry_post_json( self.root + "/storage/xxx_col2", bsos, status=400 ) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) # Batch upload a list with something that's not a valid data dict. # It should fail out entirely, as the input is seriously broken. @@ -1133,8 +1104,8 @@ class TestStorage(StorageFunctionalTestCase): bsos = [{"id": "01", "payload": "GOOD"}, {"id": "02", "invalid": "ya"}] res = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) res = res.json - self.assertEquals(len(res["success"]), 1) - self.assertEquals(len(res["failed"]), 1) + self.assertEqual(len(res["success"]), 1) + self.assertEqual(len(res["failed"]), 1) def test_handling_of_invalid_bso_fields(self): coll_url = self.root + "/storage/xxx_col2" @@ -1171,43 +1142,43 @@ class TestStorage(StorageFunctionalTestCase): res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) # Invalid sortindex - not an integer bso = {"id": "TEST", "payload": "testing", "sortindex": "2.6"} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) # Invalid sortindex - larger than max value bso = {"id": "TEST", "payload": "testing", "sortindex": "1" + "0" * 9} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) # Invalid payload - not a string bso = {"id": "TEST", "payload": 42} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) # Invalid ttl - not an integer bso = {"id": "TEST", "payload": "testing", "ttl": "eh?"} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) # Invalid ttl - not an integer bso = {"id": "TEST", "payload": "testing", "ttl": "4.2"} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) # Invalid BSO - unknown field bso = {"id": "TEST", "unexpected": "spanish-inquisition"} res = self.retry_post_json(coll_url, [bso]) self.assertTrue(res.json["failed"] and not res.json["success"]) res = self.retry_put_json(coll_url + "/" + bso["id"], bso, status=400) - self.assertEquals(res.json, WEAVE_INVALID_WBO) + self.assertEqual(res.json, WEAVE_INVALID_WBO) def test_that_batch_gets_are_limited_to_max_number_of_ids(self): bso = {"id": "01", "payload": "testing"} @@ -1216,12 +1187,12 @@ class TestStorage(StorageFunctionalTestCase): # Getting with less than the limit works OK. ids = ",".join(str(i).zfill(2) for i in range(BATCH_MAX_IDS - 1)) res = self.app.get(self.root + "/storage/xxx_col2?ids=" + ids) - self.assertEquals(res.json, ["01"]) + self.assertEqual(res.json, ["01"]) # Getting with equal to the limit works OK. ids = ",".join(str(i).zfill(2) for i in range(BATCH_MAX_IDS)) res = self.app.get(self.root + "/storage/xxx_col2?ids=" + ids) - self.assertEquals(res.json, ["01"]) + self.assertEqual(res.json, ["01"]) # Getting with more than the limit fails. ids = ",".join(str(i).zfill(2) for i in range(BATCH_MAX_IDS + 1)) @@ -1335,11 +1306,11 @@ class TestStorage(StorageFunctionalTestCase): time.sleep(0.8) items = self.app.get(self.root + "/storage/xxx_col2?full=1").json items = dict((item["id"], item) for item in items) - self.assertEquals(sorted(list(items.keys())), ["TEST2", "TEST3"]) + self.assertEqual(sorted(list(items.keys())), ["TEST2", "TEST3"]) # The existing item should have retained its payload. # The new item should have got a default payload of empty string. - self.assertEquals(items["TEST2"]["payload"], "x") - self.assertEquals(items["TEST3"]["payload"], "") + self.assertEqual(items["TEST2"]["payload"], "x") + self.assertEqual(items["TEST3"]["payload"], "") ts2 = items["TEST2"]["modified"] ts3 = items["TEST3"]["modified"] self.assertTrue(ts2 < ts3) @@ -1359,26 +1330,26 @@ class TestStorage(StorageFunctionalTestCase): bsos = [{"id": str(i).zfill(2), "ttl": 10} for i in range(3, 7)] bsos[0]["payload"] = "xx" r = self.retry_post_json(self.root + "/storage/xxx_col2", bsos) - self.assertEquals(len(r.json["success"]), 4) + self.assertEqual(len(r.json["success"]), 4) ts2 = float(r.headers["X-Last-Modified"]) # If we wait then items 0, 1, 2 should have expired. # Items 3, 4, 5, 6 should still exist. time.sleep(0.8) items = self.app.get(self.root + "/storage/xxx_col2?full=1").json items = dict((item["id"], item) for item in items) - self.assertEquals(sorted(list(items.keys())), ["03", "04", "05", "06"]) + self.assertEqual(sorted(list(items.keys())), ["03", "04", "05", "06"]) # Items 3 and 4 should have the specified payloads. # Items 5 and 6 should have payload defaulted to empty string. - self.assertEquals(items["03"]["payload"], "xx") - self.assertEquals(items["04"]["payload"], "x") - self.assertEquals(items["05"]["payload"], "") - self.assertEquals(items["06"]["payload"], "") + self.assertEqual(items["03"]["payload"], "xx") + self.assertEqual(items["04"]["payload"], "x") + self.assertEqual(items["05"]["payload"], "") + self.assertEqual(items["06"]["payload"], "") # All items created or modified by the request should get their # timestamps update. Just bumping the ttl should not bump timestamp. - self.assertEquals(items["03"]["modified"], ts2) - self.assertEquals(items["04"]["modified"], ts1) - self.assertEquals(items["05"]["modified"], ts2) - self.assertEquals(items["06"]["modified"], ts2) + self.assertEqual(items["03"]["modified"], ts2) + self.assertEqual(items["04"]["modified"], ts1) + self.assertEqual(items["05"]["modified"], ts2) + self.assertEqual(items["06"]["modified"], ts2) def test_that_negative_integer_fields_are_not_accepted(self): # ttls cannot be negative @@ -1430,18 +1401,18 @@ class TestStorage(StorageFunctionalTestCase): # in the base tests because there's nothing memcached-specific here. self.app.get(self.root + "/storage/meta/global", status=404) res = self.app.get(self.root + "/storage/meta") - self.assertEquals(res.json, []) + self.assertEqual(res.json, []) self.retry_put_json( self.root + "/storage/meta/global", {"payload": "blob"} ) res = self.app.get(self.root + "/storage/meta") - self.assertEquals(res.json, ["global"]) + self.assertEqual(res.json, ["global"]) res = self.app.get(self.root + "/storage/meta/global") - self.assertEquals(res.json["payload"], "blob") + self.assertEqual(res.json["payload"], "blob") # It should not have extra keys. keys = list(res.json.keys()) keys.sort() - self.assertEquals(keys, ["id", "modified", "payload"]) + self.assertEqual(keys, ["id", "modified", "payload"]) # It should have a properly-formatted "modified" field. modified_re = r"['\"]modified['\"]:\s*[0-9]+\.[0-9][0-9]\s*[,}]" self.assertTrue(re.search(modified_re, res.body.decode("utf-8"))) @@ -1452,12 +1423,12 @@ class TestStorage(StorageFunctionalTestCase): ) ts = float(res.headers["X-Weave-Timestamp"]) res = self.app.get(self.root + "/storage/meta/global") - self.assertEquals(res.json["modified"], ts) + self.assertEqual(res.json["modified"], ts) def test_that_404_responses_have_a_json_body(self): res = self.app.get(self.root + "/nonexistent/url", status=404) - self.assertEquals(res.content_type, "application/json") - self.assertEquals(res.json, 0) + self.assertEqual(res.content_type, "application/json") + self.assertEqual(res.json, 0) def test_that_internal_server_fields_are_not_echoed(self): self.retry_post_json( @@ -1467,7 +1438,7 @@ class TestStorage(StorageFunctionalTestCase): self.root + "/storage/xxx_col1/two", {"payload": "blub"} ) res = self.app.get(self.root + "/storage/xxx_col1?full=1") - self.assertEquals(len(res.json), 2) + self.assertEqual(len(res.json), 2) for item in res.json: self.assertTrue("id" in item) self.assertTrue("payload" in item) @@ -1484,7 +1455,7 @@ class TestStorage(StorageFunctionalTestCase): # This can't be run against a live server because we # have to forge an auth token to test things properly. if self.distant: - raise unittest.SkipTest + pytest.skip("Test cannot be run against a live server.") # Write some items while we've got a good token. bsos = [{"id": str(i).zfill(2), "payload": "xxx"} for i in range(3)] @@ -1493,8 +1464,8 @@ class TestStorage(StorageFunctionalTestCase): # Check that we can read the info correctly. resp = self.app.get(self.root + "/info/collections") - self.assertEquals(list(resp.json.keys()), ["xxx_col1"]) - self.assertEquals(resp.json["xxx_col1"], ts) + self.assertEqual(list(resp.json.keys()), ["xxx_col1"]) + self.assertEqual(resp.json["xxx_col1"], ts) # Forge an expired token to use for the test. auth_policy = self.config.registry.getUtility(IAuthenticationPolicy) @@ -1519,8 +1490,8 @@ class TestStorage(StorageFunctionalTestCase): # But it still allows access to /info/collections. resp = self.app.get(self.root + "/info/collections") - self.assertEquals(list(resp.json.keys()), ["xxx_col1"]) - self.assertEquals(resp.json["xxx_col1"], ts) + self.assertEqual(list(resp.json.keys()), ["xxx_col1"]) + self.assertEqual(resp.json["xxx_col1"], ts) def test_pagination_with_newer_and_sort_by_oldest(self): # Twelve bsos with three different modification times. @@ -1564,7 +1535,7 @@ class TestStorage(StorageFunctionalTestCase): # They should all be in order, starting from the item # *after* the one that was used for the newer= timestamp. - self.assertEquals( + self.assertEqual( sorted(int(item["id"]) for item in items), list(range(start + 1, NUM_ITEMS)), ) @@ -1611,7 +1582,7 @@ class TestStorage(StorageFunctionalTestCase): # They should all be in order, up to the item *before* # the one that was used for the older= timestamp. - self.assertEquals( + self.assertEqual( sorted(int(item["id"]) for item in items), list(range(0, start)), ) @@ -1642,15 +1613,15 @@ class TestStorage(StorageFunctionalTestCase): batch = resp.json["batch"] # The collection should not be reported as modified. - self.assertEquals(orig_modified, resp.headers["X-Last-Modified"]) + self.assertEqual(orig_modified, resp.headers["X-Last-Modified"]) # And reading from it shouldn't show the new records yet. resp = self.app.get(endpoint) res = resp.json res.sort() - self.assertEquals(res, ["12", "13"]) - self.assertEquals(int(resp.headers["X-Weave-Records"]), 2) - self.assertEquals(orig_modified, resp.headers["X-Last-Modified"]) + self.assertEqual(res, ["12", "13"]) + self.assertEqual(int(resp.headers["X-Weave-Records"]), 2) + self.assertEqual(orig_modified, resp.headers["X-Last-Modified"]) bso5 = {"id": "c", "payload": "tinsel"} bso6 = {"id": "13", "payload": "portnoy"} @@ -1658,31 +1629,31 @@ class TestStorage(StorageFunctionalTestCase): commit = "?batch={0}&commit=true".format(batch) resp = self.retry_post_json(endpoint + commit, [bso5, bso6, bso0]) committed = resp.json["modified"] - self.assertEquals( + self.assertEqual( resp.json["modified"], float(resp.headers["X-Last-Modified"]) ) # make sure /info/collections got updated resp = self.app.get(self.root + "/info/collections") - self.assertEquals(float(resp.headers["X-Last-Modified"]), committed) - self.assertEquals(resp.json["xxx_col2"], committed) + self.assertEqual(float(resp.headers["X-Last-Modified"]), committed) + self.assertEqual(resp.json["xxx_col2"], committed) # make sure the changes applied resp = self.app.get(endpoint) res = resp.json res.sort() - self.assertEquals(res, ["12", "13", "14", "a", "b", "c"]) - self.assertEquals(int(resp.headers["X-Weave-Records"]), 6) + self.assertEqual(res, ["12", "13", "14", "a", "b", "c"]) + self.assertEqual(int(resp.headers["X-Weave-Records"]), 6) resp = self.app.get(endpoint + "/13") - self.assertEquals(resp.json["payload"], "portnoy") - self.assertEquals(committed, float(resp.headers["X-Last-Modified"])) - self.assertEquals(committed, resp.json["modified"]) + self.assertEqual(resp.json["payload"], "portnoy") + self.assertEqual(committed, float(resp.headers["X-Last-Modified"])) + self.assertEqual(committed, resp.json["modified"]) resp = self.app.get(endpoint + "/c") - self.assertEquals(resp.json["payload"], "tinsel") - self.assertEquals(committed, resp.json["modified"]) + self.assertEqual(resp.json["payload"], "tinsel") + self.assertEqual(committed, resp.json["modified"]) resp = self.app.get(endpoint + "/14") - self.assertEquals(resp.json["payload"], "itsybitsy") - self.assertEquals(committed, resp.json["modified"]) + self.assertEqual(resp.json["payload"], "itsybitsy") + self.assertEqual(committed, resp.json["modified"]) # empty commit POST bso7 = {"id": "a", "payload": "burrito"} @@ -1694,15 +1665,15 @@ class TestStorage(StorageFunctionalTestCase): resp1 = self.retry_post_json(endpoint + commit, []) committed = resp1.json["modified"] - self.assertEquals(committed, float(resp1.headers["X-Last-Modified"])) + self.assertEqual(committed, float(resp1.headers["X-Last-Modified"])) resp2 = self.app.get(endpoint + "/a") - self.assertEquals(committed, float(resp2.headers["X-Last-Modified"])) - self.assertEquals(committed, resp2.json["modified"]) - self.assertEquals(resp2.json["payload"], "burrito") + self.assertEqual(committed, float(resp2.headers["X-Last-Modified"])) + self.assertEqual(committed, resp2.json["modified"]) + self.assertEqual(resp2.json["payload"], "burrito") resp3 = self.app.get(endpoint + "/e") - self.assertEquals(committed, resp3.json["modified"]) + self.assertEqual(committed, resp3.json["modified"]) def test_aaa_batch_commit_collision(self): # It's possible that a batch contain a BSO inside a batch as well @@ -1775,7 +1746,7 @@ class TestStorage(StorageFunctionalTestCase): # res = self.retry_post_json(endpoint, [], headers={ # 'X-Weave-Records': str(limits['max_post_records'] + 1) # }, status=400) -# self.assertEquals(res.json, WEAVE_SIZE_LIMIT_EXCEEDED) +# self.assertEqual(res.json, WEAVE_SIZE_LIMIT_EXCEEDED) # # bsos = [{'id': str(x), 'payload': ''} # for x in range(limits['max_post_records'])] @@ -1783,7 +1754,7 @@ class TestStorage(StorageFunctionalTestCase): # self.assertFalse(res.json['failed']) # bsos.append({'id': 'toomany', 'payload': ''}) # res = self.retry_post_json(endpoint, bsos) -# self.assertEquals(res.json['failed']['toomany'], 'retry bso') +# self.assertEqual(res.json['failed']['toomany'], 'retry bso') # # # `max_total_records` is an (inclusive) limit on the # # total number of items in a batch. We can only enforce @@ -1795,7 +1766,7 @@ class TestStorage(StorageFunctionalTestCase): # res = self.retry_post_json(endpoint, [], headers={ # 'X-Weave-Total-Records': str(limits['max_total_records'] + 1) # }, status=400) -# self.assertEquals(res.json, WEAVE_SIZE_LIMIT_EXCEEDED) +# self.assertEqual(res.json, WEAVE_SIZE_LIMIT_EXCEEDED) # # # `max_post_bytes` is an (inclusive) limit on the # # total size of payloads in a single post. @@ -1806,7 +1777,7 @@ class TestStorage(StorageFunctionalTestCase): # res = self.retry_post_json(endpoint, [], headers={ # 'X-Weave-Bytes': str(limits['max_post_bytes'] + 1) # }, status=400) -# self.assertEquals(res.json, WEAVE_SIZE_LIMIT_EXCEEDED) +# self.assertEqual(res.json, WEAVE_SIZE_LIMIT_EXCEEDED) bsos = [ {"id": "little", "payload": "XXX"}, {"id": "big", "payload": "X" * (limits["max_post_bytes"] - 3)}, @@ -1835,7 +1806,7 @@ class TestStorage(StorageFunctionalTestCase): }, status=400, ) - self.assertEquals(res.json, WEAVE_SIZE_LIMIT_EXCEEDED) + self.assertEqual(res.json, WEAVE_SIZE_LIMIT_EXCEEDED) def test_batch_partial_update(self): collection = self.root + "/storage/xxx_col2" @@ -1853,18 +1824,18 @@ class TestStorage(StorageFunctionalTestCase): ] resp = self.retry_post_json(collection + "?batch=true", bsos) batch = resp.json["batch"] - self.assertEquals(orig_ts, float(resp.headers["X-Last-Modified"])) + self.assertEqual(orig_ts, float(resp.headers["X-Last-Modified"])) # The updated item hasn't been written yet. resp = self.app.get(collection + "?full=1") res = resp.json res.sort(key=lambda bso: bso["id"]) - self.assertEquals(len(res), 2) - self.assertEquals(res[0]["payload"], "aai") - self.assertEquals(res[1]["payload"], "bee") - self.assertEquals(res[0]["modified"], orig_ts) - self.assertEquals(res[1]["modified"], orig_ts) - self.assertEquals(res[1]["sortindex"], 17) + self.assertEqual(len(res), 2) + self.assertEqual(res[0]["payload"], "aai") + self.assertEqual(res[1]["payload"], "bee") + self.assertEqual(res[0]["modified"], orig_ts) + self.assertEqual(res[1]["modified"], orig_ts) + self.assertEqual(res[1]["sortindex"], 17) endpoint = collection + "?batch={0}&commit=true".format(batch) resp = self.retry_post_json(endpoint, []) @@ -1874,16 +1845,16 @@ class TestStorage(StorageFunctionalTestCase): resp = self.app.get(collection + "?full=1") res = resp.json res.sort(key=lambda bso: bso["id"]) - self.assertEquals(len(res), 3) - self.assertEquals(res[0]["payload"], "aai") - self.assertEquals(res[1]["payload"], "bii") - self.assertEquals(res[2]["payload"], "sea") - self.assertEquals(res[0]["modified"], orig_ts) - self.assertEquals(res[1]["modified"], commit_ts) - self.assertEquals(res[2]["modified"], commit_ts) + self.assertEqual(len(res), 3) + self.assertEqual(res[0]["payload"], "aai") + self.assertEqual(res[1]["payload"], "bii") + self.assertEqual(res[2]["payload"], "sea") + self.assertEqual(res[0]["modified"], orig_ts) + self.assertEqual(res[1]["modified"], commit_ts) + self.assertEqual(res[2]["modified"], commit_ts) # Fields not touched by the batch, should have been preserved. - self.assertEquals(res[1]["sortindex"], 17) + self.assertEqual(res[1]["sortindex"], 17) def test_batch_ttl_update(self): collection = self.root + "/storage/xxx_col2" @@ -1903,28 +1874,28 @@ class TestStorage(StorageFunctionalTestCase): resp = self.retry_post_json( endpoint, [{"id": "a", "ttl": 2}], status=202 ) - self.assertEquals(orig_ts, float(resp.headers["X-Last-Modified"])) + self.assertEqual(orig_ts, float(resp.headers["X-Last-Modified"])) resp = self.retry_post_json( endpoint, [{"id": "b", "ttl": 2}], status=202 ) - self.assertEquals(orig_ts, float(resp.headers["X-Last-Modified"])) + self.assertEqual(orig_ts, float(resp.headers["X-Last-Modified"])) resp = self.retry_post_json(endpoint + "&commit=true", [], status=200) # The payloads should be unchanged resp = self.app.get(collection + "?full=1") res = resp.json res.sort(key=lambda bso: bso["id"]) - self.assertEquals(len(res), 3) - self.assertEquals(res[0]["payload"], "ayy") - self.assertEquals(res[1]["payload"], "bea") - self.assertEquals(res[2]["payload"], "see") + self.assertEqual(len(res), 3) + self.assertEqual(res[0]["payload"], "ayy") + self.assertEqual(res[1]["payload"], "bea") + self.assertEqual(res[2]["payload"], "see") # If we wait, the ttls should kick in time.sleep(2.1) resp = self.app.get(collection + "?full=1") res = resp.json - self.assertEquals(len(res), 1) - self.assertEquals(res[0]["payload"], "see") + self.assertEqual(len(res), 1) + self.assertEqual(res[0]["payload"], "see") def test_batch_ttl_is_based_on_commit_timestamp(self): collection = self.root + "/storage/xxx_col2" @@ -1946,14 +1917,14 @@ class TestStorage(StorageFunctionalTestCase): time.sleep(1.6) resp = self.app.get(collection) res = resp.json - self.assertEquals(len(res), 1) - self.assertEquals(res[0], "a") + self.assertEqual(len(res), 1) + self.assertEqual(res[0], "a") # Wait some more, and the ttl should kick in. time.sleep(1.6) resp = self.app.get(collection) res = resp.json - self.assertEquals(len(res), 0) + self.assertEqual(len(res), 0) def test_batch_with_immediate_commit(self): collection = self.root + "/storage/xxx_col2" @@ -1971,17 +1942,17 @@ class TestStorage(StorageFunctionalTestCase): committed = resp.json["modified"] resp = self.app.get(self.root + "/info/collections") - self.assertEquals(float(resp.headers["X-Last-Modified"]), committed) - self.assertEquals(resp.json["xxx_col2"], committed) + self.assertEqual(float(resp.headers["X-Last-Modified"]), committed) + self.assertEqual(resp.json["xxx_col2"], committed) resp = self.app.get(collection + "?full=1") - self.assertEquals(float(resp.headers["X-Last-Modified"]), committed) + self.assertEqual(float(resp.headers["X-Last-Modified"]), committed) res = resp.json res.sort(key=lambda bso: bso["id"]) - self.assertEquals(len(res), 3) - self.assertEquals(res[0]["payload"], "aih") - self.assertEquals(res[1]["payload"], "bie") - self.assertEquals(res[2]["payload"], "cee") + self.assertEqual(len(res), 3) + self.assertEqual(res[0]["payload"], "aih") + self.assertEqual(res[1]["payload"], "bie") + self.assertEqual(res[2]["payload"], "cee") def test_batch_uploads_properly_update_info_collections(self): collection1 = self.root + "/storage/xxx_col1" @@ -1999,9 +1970,9 @@ class TestStorage(StorageFunctionalTestCase): ts2 = resp.json["modified"] resp = self.app.get(self.root + "/info/collections") - self.assertEquals(float(resp.headers["X-Last-Modified"]), ts2) - self.assertEquals(resp.json["xxx_col1"], ts1) - self.assertEquals(resp.json["xxx_col2"], ts2) + self.assertEqual(float(resp.headers["X-Last-Modified"]), ts2) + self.assertEqual(resp.json["xxx_col1"], ts1) + self.assertEqual(resp.json["xxx_col2"], ts2) # Overwrite in place, timestamp should change. resp = self.retry_post_json( @@ -2011,9 +1982,9 @@ class TestStorage(StorageFunctionalTestCase): ts2 = resp.json["modified"] resp = self.app.get(self.root + "/info/collections") - self.assertEquals(float(resp.headers["X-Last-Modified"]), ts2) - self.assertEquals(resp.json["xxx_col1"], ts1) - self.assertEquals(resp.json["xxx_col2"], ts2) + self.assertEqual(float(resp.headers["X-Last-Modified"]), ts2) + self.assertEqual(resp.json["xxx_col1"], ts1) + self.assertEqual(resp.json["xxx_col2"], ts2) # Add new items, timestamp should change resp = self.retry_post_json( @@ -2025,9 +1996,9 @@ class TestStorage(StorageFunctionalTestCase): ts1 = resp.json["modified"] resp = self.app.get(self.root + "/info/collections") - self.assertEquals(float(resp.headers["X-Last-Modified"]), ts1) - self.assertEquals(resp.json["xxx_col1"], ts1) - self.assertEquals(resp.json["xxx_col2"], ts2) + self.assertEqual(float(resp.headers["X-Last-Modified"]), ts1) + self.assertEqual(resp.json["xxx_col1"], ts1) + self.assertEqual(resp.json["xxx_col2"], ts2) def test_batch_with_failing_bsos(self): collection = self.root + "/storage/xxx_col2" @@ -2056,9 +2027,9 @@ class TestStorage(StorageFunctionalTestCase): resp = self.app.get(collection + "?full=1") res = resp.json res.sort(key=lambda bso: bso["id"]) - self.assertEquals(len(res), 2) - self.assertEquals(res[0]["payload"], "aai") - self.assertEquals(res[1]["payload"], "sea") + self.assertEqual(len(res), 2) + self.assertEqual(res[0]["payload"], "aai") + self.assertEqual(res[1]["payload"], "sea") def test_batch_id_is_correctly_scoped_to_a_collection(self): collection1 = self.root + "/storage/xxx_col1" @@ -2087,11 +2058,11 @@ class TestStorage(StorageFunctionalTestCase): resp = self.app.get(collection1 + "?full=1") res = resp.json res.sort(key=lambda bso: bso["id"]) - self.assertEquals(len(res), 4) - self.assertEquals(res[0]["payload"], "aih") - self.assertEquals(res[1]["payload"], "bie") - self.assertEquals(res[2]["payload"], "cee") - self.assertEquals(res[3]["payload"], "dii") + self.assertEqual(len(res), 4) + self.assertEqual(res[0]["payload"], "aih") + self.assertEqual(res[1]["payload"], "bie") + self.assertEqual(res[2]["payload"], "cee") + self.assertEqual(res[3]["payload"], "dii") def test_users_with_the_same_batch_id_get_separate_data(self): # Try to generate two users with the same batch-id. @@ -2111,19 +2082,19 @@ class TestStorage(StorageFunctionalTestCase): self.retry_post_json(self.root + req, []) # It should only have a single item. resp = self.app.get(self.root + "/storage/xxx_col1") - self.assertEquals(resp.json, ["b"]) + self.assertEqual(resp.json, ["b"]) # The first user's collection should still be empty. # Now have the first user commit their batch. req = "/storage/xxx_col1?batch={0}&commit=true".format(batch1) self.retry_post_json(self.root + req, []) # It should only have a single item. resp = self.app.get(self.root + "/storage/xxx_col1") - self.assertEquals(resp.json, ["a"]) + self.assertEqual(resp.json, ["a"]) # If we didn't make a conflict, try again. if batch1 == batch2: break else: - raise unittest.SkipTest("failed to generate conflicting batchid") + pytest.skip("failed to generate conflicting batchid") def test_that_we_dont_resurrect_committed_batches(self): # This retry loop tries to trigger a situation where we: @@ -2147,11 +2118,12 @@ class TestStorage(StorageFunctionalTestCase): if batch1 == batch2: break else: - raise unittest.SkipTest("failed to trigger re-use of batchid") + pytest.skip("failed to trigger re-use of batchid") + # Despite having the same batchid, the second batch should # be completely independent of the first. resp = self.app.get(self.root + "/storage/xxx_col2") - self.assertEquals(resp.json, ["j"]) + self.assertEqual(resp.json, ["j"]) def test_batch_id_is_correctly_scoped_to_a_user(self): collection = self.root + "/storage/xxx_col1" @@ -2183,11 +2155,11 @@ class TestStorage(StorageFunctionalTestCase): resp = self.app.get(collection + "?full=1") res = resp.json res.sort(key=lambda bso: bso["id"]) - self.assertEquals(len(res), 4) - self.assertEquals(res[0]["payload"], "aih") - self.assertEquals(res[1]["payload"], "bie") - self.assertEquals(res[2]["payload"], "cee") - self.assertEquals(res[3]["payload"], "di") + self.assertEqual(len(res), 4) + self.assertEqual(res[0]["payload"], "aih") + self.assertEqual(res[1]["payload"], "bie") + self.assertEqual(res[2]["payload"], "cee") + self.assertEqual(res[3]["payload"], "di") # bug 1332552 make sure ttl:null use the default ttl def test_create_bso_with_null_ttl(self): @@ -2195,7 +2167,7 @@ class TestStorage(StorageFunctionalTestCase): self.retry_put_json(self.root + "/storage/xxx_col2/TEST1", bso) time.sleep(0.1) res = self.app.get(self.root + "/storage/xxx_col2/TEST1?full=1") - self.assertEquals(res.json["payload"], "x") + self.assertEqual(res.json["payload"], "x") def test_rejection_of_known_bad_payloads(self): bso = { @@ -2230,8 +2202,8 @@ class TestStorage(StorageFunctionalTestCase): res = self.retry_post_json( self.root + "/storage/xxx_col?batch=true", bsos ) - self.assertEquals(len(res.json["success"]), 5) - self.assertEquals(len(res.json["failed"]), 0) + self.assertEqual(len(res.json["success"]), 5) + self.assertEqual(len(res.json["failed"]), 0) batch = res.json["batch"] self.app.post( self.root + "/storage/xxx_col?commit=true&batch=" + batch, diff --git a/tools/integration_tests/test_support.py b/tools/integration_tests/test_support.py index d8f45cf3..0fb851ac 100644 --- a/tools/integration_tests/test_support.py +++ b/tools/integration_tests/test_support.py @@ -9,7 +9,6 @@ import functools from konfig import Config, SettingsDict import hawkauthlib import os -import optparse from pyramid.authorization import ACLAuthorizationPolicy from pyramid.config import Configurator from pyramid.interfaces import IAuthenticationPolicy @@ -32,7 +31,6 @@ from webtest import TestApp from zope.interface import implementer -global_secret = None VALID_FXA_ID_REGEX = re.compile("^[A-Za-z0-9=\\-_]{1,64}$") @@ -357,7 +355,6 @@ class FunctionalTestCase(TestCase): # This call implicitly commits the configurator. We probably still # want it for the side effects. self.config.make_wsgi_app() - host_url = urlparse.urlparse(self.host_url) self.app = TestApp( self.host_url, @@ -397,6 +394,7 @@ class StorageFunctionalTestCase(FunctionalTestCase, StorageTestCase): def _authenticate(self): policy = self.config.registry.getUtility(IAuthenticationPolicy) + global_secret = os.environ.get("SYNC_MASTER_SECRET") if global_secret is not None: policy.secrets._secrets = [global_secret] self.user_id = random.randint(1, 100000) @@ -801,90 +799,3 @@ class SyncStorageAuthenticationPolicy(TokenServerAuthenticationPolicy): raise ValueError("invalid device_id in token data") """ return user - - -def run_live_functional_tests(TestCaseClass, argv=None): - """Execute the given suite of testcases against a live server.""" - if argv is None: - argv = sys.argv - - # This will only work using a StorageFunctionalTestCase subclass, - # since we override the _authenticate() method. - assert issubclass(TestCaseClass, StorageFunctionalTestCase) - - usage = "Usage: %prog [options] " - parser = optparse.OptionParser(usage=usage) - parser.add_option( - "-x", - "--failfast", - action="store_true", - help="stop after the first failed test", - ) - parser.add_option( - "", - "--config-file", - help="name of the config file in use by the server", - ) - parser.add_option( - "", - "--use-token-server", - action="store_true", - help="the given URL is a tokenserver, not an endpoint", - ) - parser.add_option( - "", "--email", help="email address to use for tokenserver tests" - ) - parser.add_option( - "", - "--audience", - help="assertion audience to use for tokenserver tests", - ) - - try: - opts, args = parser.parse_args(argv) - except SystemExit as e: - return e.args[0] - if len(args) != 2: - parser.print_usage() - return 2 - - url = args[1] - if opts.config_file is not None: - os.environ["MOZSVC_TEST_INI_FILE"] = opts.config_file - - # If we're not using the tokenserver, the default implementation of - # _authenticate will do just fine. We optionally accept the token - # signing secret in the url hash fragement. - if opts.email is not None: - msg = "cant specify email address unless using live tokenserver" - raise ValueError(msg) - if opts.audience is not None: - msg = "cant specify audience unless using live tokenserver" - raise ValueError(msg) - host_url = urlparse.urlparse(url) - if host_url.fragment: - global global_secret - global_secret = host_url.fragment - host_url = host_url._replace(fragment="") - os.environ["MOZSVC_TEST_REMOTE"] = "localhost" - - # Now use the unittest2 runner to execute them. - suite = unittest.TestSuite() - import test_storage - - test_prefix = os.environ.get("SYNC_TEST_PREFIX", "test") - suite.addTest(unittest.findTestCases(test_storage, test_prefix)) - # suite.addTest(unittest.makeSuite(LiveTestCases, prefix=test_prefix)) - runner = unittest.TextTestRunner( - stream=sys.stderr, - failfast=opts.failfast, - verbosity=2, - ) - res = runner.run(suite) - if not res.wasSuccessful(): - return 1 - return 0 - - -# Tell over-zealous test discovery frameworks that this isn't a real test. -run_live_functional_tests.__test__ = False diff --git a/tools/integration_tests/tokenserver/run.py b/tools/integration_tests/tokenserver/run.py deleted file mode 100644 index 1799c976..00000000 --- a/tools/integration_tests/tokenserver/run.py +++ /dev/null @@ -1,35 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this file, -# You can obtain one at http://mozilla.org/MPL/2.0/. -import unittest - -from tokenserver.test_authorization import TestAuthorization -from tokenserver.test_e2e import TestE2e -from tokenserver.test_misc import TestMisc -from tokenserver.test_node_assignment import TestNodeAssignment - - -def run_local_tests(): - test_classes = [TestAuthorization, TestMisc, TestNodeAssignment] - - return run_tests(test_classes) - - -def run_end_to_end_tests(verbosity=1): - return run_tests([TestE2e], verbosity=verbosity) - - -def run_tests(test_cases, verbosity=1): - loader = unittest.TestLoader() - success = True - - for test_case in test_cases: - suite = loader.loadTestsFromTestCase(test_case) - runner = unittest.TextTestRunner(verbosity=verbosity) - res = runner.run(suite) - success = success and res.wasSuccessful() - - if success: - return 0 - else: - return 1 diff --git a/tools/integration_tests/tokenserver/test_authorization.py b/tools/integration_tests/tokenserver/test_authorization.py index 4cf1370f..ecd8fc5c 100644 --- a/tools/integration_tests/tokenserver/test_authorization.py +++ b/tools/integration_tests/tokenserver/test_authorization.py @@ -1,10 +1,12 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. +import pytest import unittest from tokenserver.test_support import TestCase +@pytest.mark.usefixtures('setup_server_local_testing_with_oauth') class TestAuthorization(TestCase, unittest.TestCase): def setUp(self): super(TestAuthorization, self).setUp() @@ -370,15 +372,15 @@ class TestAuthorization(TestCase, unittest.TestCase): client_state='aaaa') # It's ok to request a shorter-duration token. res = self.app.get('/1.0/sync/1.5?duration=12', headers=headers) - self.assertEquals(res.json['duration'], 12) + self.assertEqual(res.json['duration'], 12) # But you can't exceed the server's default value. res = self.app.get('/1.0/sync/1.5?duration=4000', headers=headers) - self.assertEquals(res.json['duration'], 3600) + self.assertEqual(res.json['duration'], 3600) # And nonsense values are ignored. res = self.app.get('/1.0/sync/1.5?duration=lolwut', headers=headers) - self.assertEquals(res.json['duration'], 3600) + self.assertEqual(res.json['duration'], 3600) res = self.app.get('/1.0/sync/1.5?duration=-1', headers=headers) - self.assertEquals(res.json['duration'], 3600) + self.assertEqual(res.json['duration'], 3600) # Although all servers are now writing keys_changed_at, we still need this # case to be handled. See this PR for more information: diff --git a/tools/integration_tests/tokenserver/test_e2e.py b/tools/integration_tests/tokenserver/test_e2e.py index 3c6b1638..85d68b4d 100644 --- a/tools/integration_tests/tokenserver/test_e2e.py +++ b/tools/integration_tests/tokenserver/test_e2e.py @@ -5,6 +5,7 @@ from base64 import urlsafe_b64decode import hmac import json import jwt +import pytest import random import string import time @@ -33,6 +34,7 @@ PASSWORD_LENGTH = 32 SCOPE = 'https://identity.mozilla.com/apps/oldsync' +@pytest.mark.usefixtures('setup_server_end_to_end_testing') class TestE2e(TestCase, unittest.TestCase): def setUp(self): diff --git a/tools/integration_tests/tokenserver/test_misc.py b/tools/integration_tests/tokenserver/test_misc.py index ff046c58..96eb641e 100644 --- a/tools/integration_tests/tokenserver/test_misc.py +++ b/tools/integration_tests/tokenserver/test_misc.py @@ -1,6 +1,7 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. +import pytest import unittest from tokenserver.test_support import TestCase @@ -8,6 +9,7 @@ from tokenserver.test_support import TestCase MAX_GENERATION = 9223372036854775807 +@pytest.mark.usefixtures('setup_server_local_testing_with_oauth') class TestMisc(TestCase, unittest.TestCase): def setUp(self): super(TestMisc, self).setUp() @@ -57,7 +59,7 @@ class TestMisc(TestCase, unittest.TestCase): res = self.app.get('/1.0/sync/1.5', headers=headers) self.assertIn('https://example.com/1.5', res.json['api_endpoint']) self.assertIn('duration', res.json) - self.assertEquals(res.json['duration'], 3600) + self.assertEqual(res.json['duration'], 3600) def test_current_user_is_the_most_up_to_date(self): # Add some users diff --git a/tools/integration_tests/tokenserver/test_node_assignment.py b/tools/integration_tests/tokenserver/test_node_assignment.py index c8b41326..9ab621aa 100644 --- a/tools/integration_tests/tokenserver/test_node_assignment.py +++ b/tools/integration_tests/tokenserver/test_node_assignment.py @@ -1,11 +1,13 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. +import pytest import unittest from tokenserver.test_support import TestCase +@pytest.mark.usefixtures('setup_server_local_testing_with_oauth') class TestNodeAssignment(TestCase, unittest.TestCase): def setUp(self): super(TestNodeAssignment, self).setUp()