chore: upload test artifacts to gcs

* chore: upload test results to gcs

* chore: use specific ete variables for gcp upload

* chore: override google_project_number to blank

* chore: use actual env var

* style: fixes flake8 E302 error

* docs: add documentation for new circleci vars

Closes SYNC-4584
This commit is contained in:
Nick Shirley 2025-03-25 16:33:49 -06:00 committed by GitHub
parent d5452ad51c
commit aeedcf1e19
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 56 additions and 1 deletions

View File

@ -4,7 +4,17 @@
# DOCKER_USER - login info for docker hub
# DOCKER_PASS
#
# To avoid collision with other GCP connections we create specific vars for
# the ETE Test Pipeline.
# ETE_GOOGLE_PROJECT_ID - GCP Project ID for ecosystem-test-eng
# ETE_GCLOUD_SERVICE_KEY - GCP syncstorage specific Service Account JSON Key
# ETE_GOOGLE_PROJECT_NUMBER - GCP Project Number for ecosystem-test-eng
#
version: 2.1
orbs:
gcp-cli: circleci/gcp-cli@3.3.1
commands:
display-versions:
steps:
@ -163,6 +173,31 @@ commands:
environment:
SYNCSTORAGE_RS_IMAGE: app:build
upload-to-gcs:
parameters:
source:
type: string
destination:
type: string
extension:
type: enum
enum: ["xml", "json"]
steps:
- run:
name: Upload << parameters.source >> << parameters.extension >> Files to GCS
when: always # Ensure the step runs even if previous steps, like test runs, fail
command: |
if [ "$CIRCLE_BRANCH" = "master" ]; then
FILES=$(ls -1 << parameters.source>>/*.<< parameters.extension>> )
if [ -z "$FILES" ]; then
echo "No << parameters.extension >> files found in << parameters.source >>/"
exit 1
fi
gsutil cp $FILES << parameters.destination >>
else
echo "Skipping artifact upload, not on 'master' branch."
fi
setup-sccache:
steps:
- run:
@ -227,6 +262,10 @@ jobs:
MYSQL_DATABASE: syncstorage
resource_class: large
steps:
- gcp-cli/setup:
google_project_id: ETE_GOOGLE_PROJECT_ID
gcloud_service_key: ETE_GCLOUD_SERVICE_KEY
google_project_number: ETE_GOOGLE_PROJECT_NUMBER
- checkout
- display-versions
- setup-python
@ -245,6 +284,14 @@ jobs:
# configured already, and it appears unit-tests modify the db to the expected state
- run-tokenserver-integration-tests
- store-test-results
- upload-to-gcs:
source: workflow/test-results
destination: gs://ecosystem-test-eng-metrics/syncstorage-rs/junit
extension: xml
- upload-to-gcs:
source: workflow/test-results
destination: gs://ecosystem-test-eng-metrics/syncstorage-rs/coverage
extension: json
#- save-sccache-cache
build-mysql-image:
docker:

View File

@ -0,0 +1,6 @@
[pytest]
markers =
migration_records: mark a test as a migration records test
addopts =
-m 'not migration_records'

View File

@ -43,7 +43,7 @@ class TestDatabase(unittest.TestCase):
def test_node_allocation(self):
user = self.database.get_user('test1@example.com')
self.assertEquals(user, None)
self.assertEqual(user, None)
user = self.database.allocate_user('test1@example.com')
wanted = 'https://phx12'

View File

@ -1,6 +1,7 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
import hawkauthlib
import re
@ -189,6 +190,7 @@ class TestPurgeOldRecords(PurgeOldRecordsTestCase):
self.assertEqual(len(self.service_requests), 0)
@pytest.mark.migration_records
class TestMigrationRecords(PurgeOldRecordsTestCase):
"""Test user records that were migrated from the old MySQL cluster of
syncstorage nodes to a single Spanner node