test: move db-tests back into the main crate (#465)

* test: move db-tests back into the main crate
* test: fixup db tests imports and remove db-test crate remnants
* test: kill the no longer needed db_test feature

Closes #410
This commit is contained in:
Philip Jenvey 2020-03-02 21:49:43 -08:00 committed by GitHub
parent ecfca9fdf5
commit f699085363
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 134 additions and 3731 deletions

View File

@ -35,7 +35,7 @@ commands:
- run:
name: Rust Clippy
command: |
cargo clippy --all --all-targets -- -D warnings
cargo clippy --all --all-targets --all-features -- -D warnings
cargo-build:
steps:
- run:
@ -75,23 +75,6 @@ commands:
- run:
name: cargo test
command: cargo test --all --verbose
- run:
name: Setup db-tests
command: rustup show # Should download the nightly in ./rust-toolchain
working_directory: db-tests
- run:
name: cargo build db-tests
command: cargo build
working_directory: db-tests
- run:
name: db-tests (mysql)
command: cargo test --all --verbose
working_directory: db-tests
- run:
# Clear the unneeded target/ so it's not sent to docker in the next step
name: db-tests cleanup
command: cargo clean
working_directory: db-tests
run-e2e-tests:
steps:

44
Cargo.lock generated
View File

@ -535,6 +535,14 @@ dependencies = [
"cc 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "codegen"
version = "0.1.0"
dependencies = [
"quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.44 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "config"
version = "0.9.3"
@ -1857,6 +1865,14 @@ name = "proc-macro-nested"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "proc-macro2"
version = "0.4.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "proc-macro2"
version = "1.0.6"
@ -1887,6 +1903,14 @@ name = "quick-error"
version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "quote"
version = "0.6.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "quote"
version = "1.0.2"
@ -2530,6 +2554,16 @@ name = "subtle"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "syn"
version = "0.15.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "syn"
version = "1.0.11"
@ -2552,6 +2586,7 @@ dependencies = [
"bytes 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"cadence 0.19.1 (registry+https://github.com/rust-lang/crates.io-index)",
"chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
"codegen 0.1.0",
"config 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
"diesel 1.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"diesel_logger 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2926,6 +2961,11 @@ name = "unicode-segmentation"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "unicode-xid"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "unicode-xid"
version = "0.2.0"
@ -3340,10 +3380,12 @@ dependencies = [
"checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b"
"checksum proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)" = "ecd45702f76d6d3c75a80564378ae228a85f0b59d2f3ed43c91b4a69eb2ebfc5"
"checksum proc-macro-nested 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "369a6ed065f249a159e06c45752c780bda2fb53c995718f9e484d08daa9eb42e"
"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
"checksum proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9c9e470a8dc4aeae2dee2f335e8f533e2d4b347e1434e5671afc49b054592f27"
"checksum protobuf 2.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f00e4a3cb64ecfeac2c0a73c74c68ae3439d7a6bead3870be56ad5dd2620a6f"
"checksum publicsuffix 1.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3bbaa49075179162b49acac1c6aa45fb4dafb5f13cf6794276d77bc7fd95757b"
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
"checksum r2d2 0.8.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1497e40855348e4a8a40767d8e55174bce1e445a3ac9254ad44ad468ee0485af"
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
@ -3414,6 +3456,7 @@ dependencies = [
"checksum string 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d24114bfcceb867ca7f71a0d3fe45d45619ec47a6fbfa98cb14e14250bfa5d6d"
"checksum strsim 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6446ced80d6c486436db5c078dde11a9f73d42b57fb273121e160b84f63d894c"
"checksum subtle 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2d67a5a62ba6e01cb2192ff309324cb4875d0c451d55fe2319433abe7a05a8ee"
"checksum syn 0.15.44 (registry+https://github.com/rust-lang/crates.io-index)" = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5"
"checksum syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "dff0acdb207ae2fe6d5976617f887eb1e35a2ba52c13c7234c790960cdad9238"
"checksum synstructure 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "67656ea1dc1b41b1451851562ea232ec2e5a80242139f7e679ceccfb5d61f545"
"checksum take_mut 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60"
@ -3446,6 +3489,7 @@ dependencies = [
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
"checksum unicode-normalization 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b561e267b2326bb4cebfc0ef9e68355c7abe6c6f522aeac2f5bf95d56c59bdcf"
"checksum unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e83e153d1053cbb5a118eeff7fd5be06ed99153f00dbcd8ae310c5fb2b22edc0"
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
"checksum untrusted 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "55cd1f4b4e96b46aeb8d4855db4a7a9bd96eeeb5c6a1ab54593328761642ce2f"
"checksum url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a"

View File

@ -67,6 +67,8 @@ validator = "0.10"
validator_derive = "0.10"
woothee = "0.10"
[dev-dependencies]
codegen = { version = "0.1.0", path = "codegen" }
[features]
db_test = []
no_auth = []

View File

@ -32,4 +32,4 @@ run_spanner:
GOOGLE_APPLICATION_CREDENTIALS=$(PATH_TO_SYNC_SPANNER_KEYS) GRPC_DEFAULT_SSL_ROOTS_FILE_PATH=$(PATH_TO_GRPC_CERT) make run
test:
cd db-tests && SYNC_DATABASE_URL=$(SYNC_DATABASE_URL) RUST_TEST_THREADS=1 cargo test
SYNC_DATABASE_URL=$(SYNC_DATABASE_URL) RUST_TEST_THREADS=1 cargo test

3609
db-tests/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,15 +0,0 @@
[package]
name = "db-tests"
version = "0.1.0"
authors = ["Philip Jenvey <pjenvey@underboss.org>"]
edition = "2018"
[dependencies]
codegen = { version = "0.1.0", path = "../codegen" }
env_logger = "0.7.0"
futures-preview = { git = "https://github.com/rust-lang-nursery/futures-rs", rev = "744ece9", features = ["compat"] }
lazy_static = "1.4.0"
log="0.4"
rand = "0.7.2"
syncstorage = { path = "../", features = ["db_test"] }

View File

@ -1 +0,0 @@
stable

View File

@ -89,25 +89,25 @@ impl Db for MockDb {
Ok(())
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
mock_db_method!(get_collection_id, GetCollectionId);
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
mock_db_method!(create_collection, CreateCollection);
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
mock_db_method!(touch_collection, TouchCollection);
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn timestamp(&self) -> SyncTimestamp {
Default::default()
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn set_timestamp(&self, _: SyncTimestamp) {}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
mock_db_method!(delete_batch, DeleteBatch);
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn clear_coll_cache(&self) {}
}

View File

@ -6,6 +6,8 @@ pub mod mysql;
pub mod params;
pub mod results;
pub mod spanner;
#[cfg(test)]
mod tests;
pub mod util;
use std::fmt::Debug;
@ -200,25 +202,25 @@ pub trait Db: Send + Debug {
/// Internal methods used by the db tests
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn get_collection_id(&self, name: String) -> DbFuture<i32>;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn create_collection(&self, name: String) -> DbFuture<i32>;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn touch_collection(&self, params: params::TouchCollection) -> DbFuture<SyncTimestamp>;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn timestamp(&self) -> SyncTimestamp;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn set_timestamp(&self, timestamp: SyncTimestamp);
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn delete_batch(&self, params: params::DeleteBatch) -> DbFuture<()>;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn clear_coll_cache(&self);
}

View File

@ -4,7 +4,7 @@ mod diesel_ext;
pub mod models;
pub mod pool;
mod schema;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
mod test;
pub use self::pool::MysqlDbPool;

View File

@ -15,7 +15,7 @@ use diesel::{
sql_types::{BigInt, Integer, Nullable, Text},
Connection, ExpressionMethods, GroupByDsl, OptionalExtension, QueryDsl, RunQueryDsl,
};
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
use diesel_logger::LoggingConnection;
use super::{
@ -92,9 +92,9 @@ pub struct MysqlDb {
unsafe impl Send for MysqlDb {}
pub struct MysqlDbInner {
#[cfg(not(any(test, feature = "db_test")))]
#[cfg(not(test))]
pub(super) conn: Conn,
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
pub(super) conn: LoggingConnection<Conn>,
session: RefCell<MysqlDbSession>,
@ -117,9 +117,9 @@ impl Deref for MysqlDb {
impl MysqlDb {
pub fn new(conn: Conn, coll_cache: Arc<CollectionCache>, metrics: &Metrics) -> Self {
let inner = MysqlDbInner {
#[cfg(not(any(test, feature = "db_test")))]
#[cfg(not(test))]
conn,
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
conn: LoggingConnection::new(conn),
session: RefCell::new(Default::default()),
};
@ -861,7 +861,7 @@ impl MysqlDb {
pub fn validate_batch_id(&self, id: String) -> Result<()> {
batch::validate_batch_id(&id)
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
batch_db_method!(delete_batch_sync, delete, DeleteBatch);
pub fn get_batch_sync(&self, params: params::GetBatch) -> Result<Option<results::GetBatch>> {
@ -965,19 +965,19 @@ impl Db for MysqlDb {
self.validate_batch_id(params)
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn get_collection_id(&self, name: String) -> DbFuture<i32> {
let db = self.clone();
Box::pin(block(move || db.get_collection_id(&name).map_err(Into::into)).map_err(Into::into))
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn create_collection(&self, name: String) -> DbFuture<i32> {
let db = self.clone();
Box::pin(block(move || db.create_collection(&name).map_err(Into::into)).map_err(Into::into))
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn touch_collection(&self, param: params::TouchCollection) -> DbFuture<SyncTimestamp> {
let db = self.clone();
Box::pin(
@ -989,20 +989,20 @@ impl Db for MysqlDb {
)
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn timestamp(&self) -> SyncTimestamp {
self.timestamp()
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn set_timestamp(&self, timestamp: SyncTimestamp) {
self.session.borrow_mut().timestamp = timestamp;
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
sync_db_method!(delete_batch, delete_batch_sync, DeleteBatch);
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn clear_coll_cache(&self) {
self.coll_cache.clear();
}

View File

@ -15,7 +15,7 @@ use diesel::{
};
use super::models::{MysqlDb, Result};
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
use super::test::TestTransactionCustomizer;
use crate::db::{error::DbError, Db, DbFuture, DbPool, STD_COLLS};
use crate::server::metrics::Metrics;
@ -56,7 +56,7 @@ impl MysqlDbPool {
let manager = ConnectionManager::<MysqlConnection>::new(settings.database_url.clone());
let builder = Pool::builder().max_size(settings.database_pool_max_size.unwrap_or(10));
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
let builder = if settings.database_use_test_transactions {
builder.connection_customizer(Box::new(TestTransactionCustomizer))
} else {
@ -143,7 +143,7 @@ impl CollectionCache {
.cloned())
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
pub fn clear(&self) {
self.by_name.write().expect("by_name write").clear();
self.by_id.write().expect("by_id write").clear();

View File

@ -1,5 +1,3 @@
#![cfg_attr(feature = "db_test", allow(dead_code, unused_imports))]
use std::{collections::HashMap, result::Result as StdResult};
use diesel::{

View File

@ -136,13 +136,13 @@ impl From<BatchBsoBody> for PostCollectionBso {
}
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
pub type GetCollectionId = String;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
pub type CreateCollection = String;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
data! {
TouchCollection {
user_id: HawkIdentifier,

View File

@ -43,7 +43,7 @@ pub struct GetBso {
pub sortindex: Option<i32>,
// NOTE: expiry (ttl) is never rendered to clients and only loaded for
// tests: this and its associated queries/loading could be wrapped in
// #[cfg(any(test, feature = "db_test"))]
// #[cfg(test)]
#[serde(skip_serializing)]
#[serde(skip_deserializing)]
#[sql_type = "BigInt"]
@ -69,11 +69,11 @@ pub struct PostBsos {
pub failed: HashMap<String, String>,
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
pub type GetCollectionId = i32;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
pub type CreateCollection = i32;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
pub type TouchCollection = SyncTimestamp;

View File

@ -95,7 +95,7 @@ pub async fn append_async(db: &SpannerDb, params: params::AppendToBatch) -> Resu
)
.await?;
if !exists {
// NOTE: db_tests expects this but it doesn't seem necessary w/ the
// NOTE: db tests expects this but it doesn't seem necessary w/ the
// handler validating the batch before appends
Err(DbErrorKind::BatchNotFound)?
}

View File

@ -6,7 +6,7 @@ pub mod manager;
pub mod models;
pub mod pool;
mod support;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
mod test_util;
pub use self::pool::SpannerDbPool;

View File

@ -1,4 +1,4 @@
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
use actix_web::web::block;
use futures::compat::Future01CompatExt;
use futures::future::TryFutureExt;
@ -185,7 +185,7 @@ impl SpannerDb {
// This should always run within a r/w transaction, so that: "If a
// transaction successfully commits, then no other writer modified the
// data that was read in the transaction after it was read."
if !cfg!(any(test, feature = "db_test")) && !self.in_write_transaction() {
if !cfg!(test) && !self.in_write_transaction() {
Err(DbError::internal("Can't escalate read-lock to write-lock"))?
}
let result = self
@ -220,7 +220,7 @@ impl SpannerDb {
// This should always run within a r/w transaction, so that: "If a
// transaction successfully commits, then no other writer modified the
// data that was read in the transaction after it was read."
if !cfg!(any(test, feature = "db_test")) && !self.in_write_transaction() {
if !cfg!(test) && !self.in_write_transaction() {
Err(DbError::internal("Can't escalate read-lock to write-lock"))?
}
let result = self
@ -516,7 +516,7 @@ impl SpannerDb {
let spanner = &self.conn;
if cfg!(any(test, feature = "db_test")) && spanner.use_test_transactions {
if cfg!(test) && spanner.use_test_transactions {
// don't commit test transactions
return Ok(());
}
@ -543,7 +543,7 @@ impl SpannerDb {
let spanner = &self.conn;
if cfg!(any(test, feature = "db_test")) && spanner.use_test_transactions {
if cfg!(test) && spanner.use_test_transactions {
// don't commit test transactions
return Ok(());
}
@ -970,7 +970,7 @@ impl SpannerDb {
// buffered on the client side and only issued to Spanner in the final
// transaction Commit.
let timestamp = self.timestamp()?;
if !cfg!(any(test, feature = "db_test")) && self.session.borrow().touched_collection {
if !cfg!(test) && self.session.borrow().touched_collection {
// No need to touch it again (except during tests where we
// currently reuse Dbs for multiple requests)
return Ok(timestamp);
@ -1037,7 +1037,7 @@ impl SpannerDb {
// buffered on the client side and only issued to Spanner in the final
// transaction Commit.
let timestamp = self.timestamp()?;
if !cfg!(any(test, feature = "db_test")) && self.session.borrow().touched_collection {
if !cfg!(test) && self.session.borrow().touched_collection {
// No need to touch it again (except during tests where we
// currently reuse Dbs for multiple requests)
return Ok(timestamp);
@ -1524,9 +1524,9 @@ impl SpannerDb {
Ok(result)
}
// NOTE: Currently this put_bso_sync impl. is only used during db_tests,
// NOTE: Currently this put_bso_sync impl. is only used during db tests,
// see above for the non-tests version
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
pub fn put_bso_sync(&self, bso: params::PutBso) -> Result<results::PutBso> {
use crate::db::util::to_rfc3339;
let collection_id = self.get_or_create_collection_id(&bso.collection)?;
@ -1679,9 +1679,9 @@ impl SpannerDb {
Ok(touch)
}
// NOTE: Currently this post_bso_sync impl. is only used during db_tests,
// NOTE: Currently this post_bso_sync impl. is only used during db tests,
// see above for the non-tests version
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
pub fn post_bsos_sync(&self, input: params::PostBsos) -> Result<results::PostBsos> {
let collection_id = self.get_or_create_collection_id(&input.collection)?;
let mut result = results::PostBsos {
@ -1858,25 +1858,25 @@ impl Db for SpannerDb {
Box::pin(async move { db.get_bso_timestamp_async(param).map_err(Into::into).await })
}
#[cfg(not(any(test, feature = "db_test")))]
#[cfg(not(test))]
fn put_bso(&self, param: params::PutBso) -> DbFuture<results::PutBso> {
let db = self.clone();
Box::pin(async move { db.put_bso_async(param).map_err(Into::into).await })
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn put_bso(&self, param: params::PutBso) -> DbFuture<results::PutBso> {
let db = self.clone();
Box::pin(block(move || db.put_bso_sync(param).map_err(Into::into)).map_err(Into::into))
}
#[cfg(not(any(test, feature = "db_test")))]
#[cfg(not(test))]
fn post_bsos(&self, param: params::PostBsos) -> DbFuture<results::PostBsos> {
let db = self.clone();
Box::pin(async move { db.post_bsos_async(param).map_err(Into::into).await })
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn post_bsos(&self, param: params::PostBsos) -> DbFuture<results::PostBsos> {
let db = self.clone();
Box::pin(block(move || db.post_bsos_sync(param).map_err(Into::into)).map_err(Into::into))
@ -1911,19 +1911,19 @@ impl Db for SpannerDb {
Box::pin(async move { batch::commit_async(&db, param).map_err(Into::into).await })
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn get_collection_id(&self, name: String) -> DbFuture<i32> {
let db = self.clone();
Box::pin(block(move || db.get_collection_id(&name).map_err(Into::into)).map_err(Into::into))
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn create_collection(&self, name: String) -> DbFuture<i32> {
let db = self.clone();
Box::pin(block(move || db.create_collection(&name).map_err(Into::into)).map_err(Into::into))
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn touch_collection(&self, param: params::TouchCollection) -> DbFuture<SyncTimestamp> {
let db = self.clone();
Box::pin(
@ -1935,24 +1935,24 @@ impl Db for SpannerDb {
)
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn timestamp(&self) -> SyncTimestamp {
self.timestamp()
.expect("set_timestamp() not called yet for SpannerDb")
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn set_timestamp(&self, timestamp: SyncTimestamp) {
SpannerDb::set_timestamp(self, timestamp)
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn delete_batch(&self, param: params::DeleteBatch) -> DbFuture<results::DeleteBatch> {
let db = self.clone();
Box::pin(async move { batch::delete_async(&db, param).map_err(Into::into).await })
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
fn clear_coll_cache(&self) {
self.coll_cache.clear();
}

View File

@ -13,7 +13,7 @@ use diesel::r2d2::Pool;
use scheduled_thread_pool::ScheduledThreadPool;
use super::models::Result;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
use super::test_util::SpannerTestTransactionCustomizer;
use crate::db::{error::DbError, Db, DbFuture, DbPool, STD_COLLS};
use crate::server::metrics::Metrics;
@ -66,7 +66,7 @@ impl SpannerDbPool {
let mut metrics = metrics.clone();
metrics.start_timer("storage.spanner.pool.get", None);
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
let builder = if settings.database_use_test_transactions {
builder.connection_customizer(Box::new(SpannerTestTransactionCustomizer))
} else {
@ -153,7 +153,7 @@ impl CollectionCache {
.cloned())
}
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
pub fn clear(&self) {
self.by_name.write().expect("by_name write").clear();
self.by_id.write().expect("by_id write").clear();

View File

@ -1,15 +1,12 @@
use futures::compat::Future01CompatExt;
use codegen::async_test;
use log::debug;
use syncstorage::{
use super::support::{db, gbso, hid, postbso, Result};
use crate::{
db::{error::DbErrorKind, params, util::SyncTimestamp, BATCH_LIFETIME},
error::ApiErrorKind,
};
use crate::support::{db, gbso, hid, postbso, Result};
fn cb(user_id: u32, coll: &str, bsos: Vec<params::PostCollectionBso>) -> params::CreateBatch {
params::CreateBatch {
user_id: hid(user_id),

View File

@ -1,13 +1,13 @@
#![allow(clippy::cognitive_complexity)]
use std::collections::HashMap;
use futures::compat::Future01CompatExt;
use lazy_static::lazy_static;
use rand::{distributions::Alphanumeric, thread_rng, Rng};
use codegen::async_test;
use syncstorage::db::{mysql::models::DEFAULT_BSO_TTL, params, util::SyncTimestamp, Sorting};
use crate::support::{db, dbso, dbsos, gbso, gbsos, hid, pbso, postbso, Result};
use super::support::{db, dbso, dbsos, gbso, gbsos, hid, pbso, postbso, Result};
use crate::db::{mysql::models::DEFAULT_BSO_TTL, params, util::SyncTimestamp, Sorting};
// distant future (year 2099) timestamp for tests
const MAX_TIMESTAMP: u64 = 4_070_937_600_000;

View File

@ -5,4 +5,4 @@ mod support;
#[cfg(test)]
mod batch;
#[cfg(test)]
mod db_tests;
mod db;

View File

@ -1,8 +1,9 @@
use env_logger;
use futures::compat::Future01CompatExt;
use std::str::FromStr;
use syncstorage::{
db::{params, util::SyncTimestamp, Db, Sorting},
use env_logger;
use crate::{
db::{params, pool_from_settings, util::SyncTimestamp, Db, Sorting},
error::ApiError,
server::metrics,
settings::{Secrets, ServerLimits, Settings},
@ -28,7 +29,7 @@ pub async fn db() -> Result<Box<dyn Db>> {
};
let metrics = metrics::Metrics::noop();
let pool = syncstorage::db::pool_from_settings(&settings, &metrics)?;
let pool = pool_from_settings(&settings, &metrics)?;
let db = pool.get().await?;
// Spanner won't have a timestamp until lock_for_xxx are called: fill one
// in for it

View File

@ -63,8 +63,8 @@ impl SyncTimestamp {
Ok(SyncTimestamp::from_milliseconds(val as u64))
}
/// Exposed separately for db-tests
#[cfg(any(test, feature = "db_test"))]
/// Exposed separately for db tests
#[cfg(test)]
pub fn _from_i64(val: i64) -> Result<Self, DbError> {
SyncTimestamp::from_i64(val)
}

View File

@ -28,7 +28,7 @@ pub struct Settings {
pub host: String,
pub database_url: String,
pub database_pool_max_size: Option<u32>,
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
pub database_use_test_transactions: bool,
/// Server-enforced limits for request payloads.
@ -53,7 +53,7 @@ impl Default for Settings {
host: "127.0.0.1".to_string(),
database_url: "mysql://root@127.0.0.1/syncstorage".to_string(),
database_pool_max_size: None,
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
database_use_test_transactions: false,
limits: ServerLimits::default(),
master_secret: Secrets::default(),
@ -75,7 +75,7 @@ impl Settings {
s.set_default("port", i64::from(DEFAULT_PORT))?;
s.set_default("host", "127.0.0.1")?;
s.set_default("human_logs", false)?;
#[cfg(any(test, feature = "db_test"))]
#[cfg(test)]
s.set_default("database_use_test_transactions", false)?;
s.set_default("master_secret", "")?;
s.set_default("limits.max_post_bytes", i64::from(DEFAULT_MAX_POST_BYTES))?;

View File

@ -1,6 +1,7 @@
//! Types for parsing and authenticating HAWK headers.
//! Matches the [Python logic](https://github.com/mozilla-services/tokenlib).
//! We may want to extract this to its own repo/crate in due course.
#![cfg_attr(feature = "no_auth", allow(dead_code, unused_imports, unused_variables))]
use base64;
use chrono::offset::Utc;