chore: updates for Rust 1.66 (#1451)

This commit is contained in:
Ethan Donowitz 2023-01-09 15:06:52 -05:00 committed by GitHub
parent 973e90fae8
commit d11787965c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 11 additions and 11 deletions

View File

@ -1,4 +1,4 @@
FROM rust:1.65-buster as builder FROM rust:1.66-buster as builder
WORKDIR /app WORKDIR /app
ADD . /app ADD . /app
ENV PATH=$PATH:/root/.cargo/bin ENV PATH=$PATH:/root/.cargo/bin

View File

@ -139,13 +139,13 @@ pub fn commit(db: &MysqlDb, params: params::CommitBatch) -> Result<results::Comm
let collection_id = db.get_collection_id(&params.collection)?; let collection_id = db.get_collection_id(&params.collection)?;
let timestamp = db.timestamp(); let timestamp = db.timestamp();
sql_query(include_str!("batch_commit.sql")) sql_query(include_str!("batch_commit.sql"))
.bind::<BigInt, _>(user_id as i64) .bind::<BigInt, _>(user_id)
.bind::<Integer, _>(&collection_id) .bind::<Integer, _>(&collection_id)
.bind::<BigInt, _>(&db.timestamp().as_i64()) .bind::<BigInt, _>(&db.timestamp().as_i64())
.bind::<BigInt, _>(&db.timestamp().as_i64()) .bind::<BigInt, _>(&db.timestamp().as_i64())
.bind::<BigInt, _>((MAXTTL as i64) * 1000) // XXX: .bind::<BigInt, _>((MAXTTL as i64) * 1000) // XXX:
.bind::<BigInt, _>(&batch_id) .bind::<BigInt, _>(&batch_id)
.bind::<BigInt, _>(user_id as i64) .bind::<BigInt, _>(user_id)
.bind::<BigInt, _>(&db.timestamp().as_i64()) .bind::<BigInt, _>(&db.timestamp().as_i64())
.bind::<BigInt, _>(&db.timestamp().as_i64()) .bind::<BigInt, _>(&db.timestamp().as_i64())
.execute(&db.conn)?; .execute(&db.conn)?;
@ -258,7 +258,7 @@ pub fn validate_batch_id(id: &str) -> Result<()> {
} }
fn encode_id(id: i64) -> String { fn encode_id(id: i64) -> String {
base64::encode(&id.to_string()) base64::encode(id.to_string())
} }
fn decode_id(id: &str) -> Result<i64> { fn decode_id(id: &str) -> Result<i64> {

View File

@ -403,7 +403,7 @@ impl MysqlDb {
collection: bso.collection.clone(), collection: bso.collection.clone(),
collection_id, collection_id,
})?; })?;
if usage.total_bytes >= self.quota.size as usize { if usage.total_bytes >= self.quota.size {
let mut tags = HashMap::default(); let mut tags = HashMap::default();
tags.insert("collection".to_owned(), bso.collection.clone()); tags.insert("collection".to_owned(), bso.collection.clone());
self.metrics.incr_with_tags("storage.quota.at_limit", tags); self.metrics.incr_with_tags("storage.quota.at_limit", tags);
@ -489,7 +489,7 @@ impl MysqlDb {
bso::expiry, bso::expiry,
)) ))
.filter(bso::user_id.eq(user_id)) .filter(bso::user_id.eq(user_id))
.filter(bso::collection_id.eq(collection_id as i32)) // XXX: .filter(bso::collection_id.eq(collection_id))
.filter(bso::expiry.gt(now)) .filter(bso::expiry.gt(now))
.into_boxed(); .into_boxed();
@ -572,7 +572,7 @@ impl MysqlDb {
let mut query = bso::table let mut query = bso::table
.select(bso::id) .select(bso::id)
.filter(bso::user_id.eq(user_id)) .filter(bso::user_id.eq(user_id))
.filter(bso::collection_id.eq(collection_id as i32)) // XXX: .filter(bso::collection_id.eq(collection_id))
.filter(bso::expiry.gt(self.timestamp().as_i64())) .filter(bso::expiry.gt(self.timestamp().as_i64()))
.into_boxed(); .into_boxed();

View File

@ -1662,7 +1662,7 @@ impl SpannerDb {
warn!("Quota at limit for user's collection: ({} bytes)", usage.total_bytes; "collection"=>collection); warn!("Quota at limit for user's collection: ({} bytes)", usage.total_bytes; "collection"=>collection);
} }
} }
Ok(Some(usage.total_bytes as usize)) Ok(Some(usage.total_bytes))
} }
// NOTE: Currently this put_bso_async_test impl. is only used during db tests, // NOTE: Currently this put_bso_async_test impl. is only used during db tests,

View File

@ -625,7 +625,7 @@ async fn get_collection_usage() -> Result<()> {
db.put_bso(pbso( db.put_bso(pbso(
uid, uid,
coll, coll,
&format!("b{}", i as i32), &format!("b{}", i),
Some(&String::from_utf8_lossy(&payload)), Some(&String::from_utf8_lossy(&payload)),
None, None,
None, None,

View File

@ -83,7 +83,7 @@ fn get_token_plaintext(
context: format!("Failed to decode the client state hex: {}", e), context: format!("Failed to decode the client state hex: {}", e),
..TokenserverError::internal_error() ..TokenserverError::internal_error()
})?; })?;
let client_state_b64 = base64::encode_config(&client_state, base64::URL_SAFE_NO_PAD); let client_state_b64 = base64::encode_config(client_state, base64::URL_SAFE_NO_PAD);
format!( format!(
"{:013}-{:}", "{:013}-{:}",

View File

@ -313,7 +313,7 @@ pub async fn post_collection_batch(
CreateBatch { CreateBatch {
id: id.clone(), id: id.clone(),
size: if coll.quota_enabled { size: if coll.quota_enabled {
Some(usage.total_bytes as usize) Some(usage.total_bytes)
} else { } else {
None None
}, },