refactor storage get and set methods.

These previously accepted a hash and scopes. The hash was required to be
a hash of the provided scopes but that wasn't enforced by the compiler.
We now have the compiler enforce that by creating a HashedScopes type
that ties the scopes and the hash together and pass that into the
storage methods.
This commit is contained in:
Glenn Griffin
2019-11-15 09:39:07 -08:00
parent f76dea5dbd
commit 4b4b2fe3f4
3 changed files with 85 additions and 54 deletions

View File

@@ -35,8 +35,8 @@ where
where
T: AsRef<str>,
{
let scope_key = storage::ScopeHash::new(scopes);
match self.storage.get(scope_key, scopes) {
let hashed_scopes = storage::HashedScopes::from(scopes);
match self.storage.get(hashed_scopes) {
Some(t) if !t.expired() => {
// unexpired token found
Ok(t)
@@ -59,9 +59,7 @@ where
}
Ok(token) => token,
};
self.storage
.set(scope_key, scopes, Some(token.clone()))
.await;
self.storage.set(hashed_scopes, Some(token.clone())).await;
Ok(token)
}
None
@@ -74,7 +72,7 @@ where
.auth_flow
.token(&self.hyper_client, &self.app_secret, scopes)
.await?;
self.storage.set(scope_key, scopes, Some(t.clone())).await;
self.storage.set(hashed_scopes, Some(t.clone())).await;
Ok(t)
}
}

View File

@@ -263,9 +263,9 @@ where
where
T: AsRef<str>,
{
let hash = storage::ScopeHash::new(scopes);
let hashed_scopes = storage::HashedScopes::from(scopes);
let cache = &self.cache;
match cache.get(hash, scopes) {
match cache.get(hashed_scopes) {
Some(token) if !token.expired() => return Ok(token),
_ => {}
}
@@ -277,7 +277,7 @@ where
scopes,
)
.await?;
cache.set(hash, scopes, Some(token.clone())).await;
cache.set(hashed_scopes, Some(token.clone())).await;
Ok(token)
}
/// Send a request for a new Bearer token to the OAuth provider.
@@ -399,12 +399,9 @@ mod tests {
assert!(acc
.cache
.get(
dbg!(storage::ScopeHash::new(&[
"https://www.googleapis.com/auth/pubsub"
])),
&["https://www.googleapis.com/auth/pubsub"],
)
.get(storage::HashedScopes::from(&[
"https://www.googleapis.com/auth/pubsub"
]))
.is_some());
// Test that token is in cache (otherwise mock will tell us)
let tok = acc

View File

@@ -13,24 +13,53 @@ use std::sync::Mutex;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct ScopeHash(u64);
#[derive(Debug)]
pub struct HashedScopes<'a, T> {
hash: u64,
scopes: &'a [T],
}
impl ScopeHash {
/// Calculate a hash value describing the scopes. The order of the scopes in the
/// list does not change the hash value. i.e. two lists that contains the exact
/// same scopes, but in different order will return the same hash value.
pub fn new<T>(scopes: &[T]) -> Self
where
T: AsRef<str>,
{
let mut hash_sum = DefaultHasher::new().finish();
// Implement Clone manually. Auto derive fails to work correctly because we want
// Clone to be implemented regardless of whether T is Clone or not.
impl<'a, T> Clone for HashedScopes<'a, T> {
fn clone(&self) -> Self {
HashedScopes {
hash: self.hash,
scopes: self.scopes,
}
}
}
impl<'a, T> Copy for HashedScopes<'a, T> {}
impl<'a, T> From<&'a [T]> for HashedScopes<'a, T>
where
T: AsRef<str>,
{
fn from(scopes: &'a [T]) -> Self {
// Calculate a hash value describing the scopes. The order of the scopes in the
// list does not change the hash value. i.e. two lists that contains the exact
// same scopes, but in different order will return the same hash value.
let mut hash = DefaultHasher::new().finish();
for scope in scopes {
let mut hasher = DefaultHasher::new();
scope.as_ref().hash(&mut hasher);
hash_sum ^= hasher.finish();
hash ^= hasher.finish();
}
ScopeHash(hash_sum)
HashedScopes { hash, scopes }
}
}
impl<'a, T> HashedScopes<'a, T>
where
T: AsRef<str>,
{
// implement an inherent from method even though From is implemented. This
// is because passing an array ref like &[&str; 1] (&["foo"]) will be auto
// deref'd to a slice on function boundaries, but it will not implement the
// From trait. This inherent method just serves to auto deref from array
// refs to slices and proxy to the From impl.
pub fn from(scopes: &'a [T]) -> Self {
<Self as From<&'a [T]>>::from(scopes)
}
}
@@ -40,23 +69,23 @@ pub(crate) enum Storage {
}
impl Storage {
pub(crate) async fn set<T>(&self, h: ScopeHash, scopes: &[T], token: Option<Token>)
pub(crate) async fn set<T>(&self, scopes: HashedScopes<'_, T>, token: Option<Token>)
where
T: AsRef<str>,
{
match self {
Storage::Memory { tokens } => tokens.lock().unwrap().set(h, scopes, token),
Storage::Disk(disk_storage) => disk_storage.set(h, scopes, token).await,
Storage::Memory { tokens } => tokens.lock().unwrap().set(scopes, token),
Storage::Disk(disk_storage) => disk_storage.set(scopes, token).await,
}
}
pub(crate) fn get<T>(&self, h: ScopeHash, scopes: &[T]) -> Option<Token>
pub(crate) fn get<T>(&self, scopes: HashedScopes<T>) -> Option<Token>
where
T: AsRef<str>,
{
match self {
Storage::Memory { tokens } => tokens.lock().unwrap().get(h, scopes),
Storage::Disk(disk_storage) => disk_storage.get(h, scopes),
Storage::Memory { tokens } => tokens.lock().unwrap().get(scopes),
Storage::Disk(disk_storage) => disk_storage.get(scopes),
}
}
}
@@ -64,7 +93,7 @@ impl Storage {
/// A single stored token.
#[derive(Debug, Clone, Serialize, Deserialize)]
struct JSONToken {
pub hash: ScopeHash,
pub hash: u64,
pub scopes: Option<Vec<String>>,
pub token: Token,
}
@@ -107,38 +136,45 @@ impl JSONTokens {
Ok(container)
}
fn get<T>(&self, h: ScopeHash, scopes: &[T]) -> Option<Token>
fn get<T>(&self, scopes: HashedScopes<T>) -> Option<Token>
where
T: AsRef<str>,
{
for t in self.tokens.iter() {
if let Some(token_scopes) = &t.scopes {
if scopes
.scopes
.iter()
.all(|s| token_scopes.iter().any(|t| t == s.as_ref()))
{
return Some(t.token.clone());
}
} else if h == t.hash {
} else if scopes.hash == t.hash {
return Some(t.token.clone());
}
}
None
}
fn set<T>(&mut self, h: ScopeHash, scopes: &[T], token: Option<Token>)
fn set<T>(&mut self, scopes: HashedScopes<T>, token: Option<Token>)
where
T: AsRef<str>,
{
eprintln!("setting: {:?}, {:?}", h, token);
self.tokens.retain(|x| x.hash != h);
eprintln!("setting: {:?}, {:?}", scopes.hash, token);
self.tokens.retain(|x| x.hash != scopes.hash);
match token {
None => (),
Some(t) => {
self.tokens.push(JSONToken {
hash: h,
scopes: Some(scopes.iter().map(|x| x.as_ref().to_string()).collect()),
hash: scopes.hash,
scopes: Some(
scopes
.scopes
.iter()
.map(|x| x.as_ref().to_string())
.collect(),
),
token: t,
});
}
@@ -183,13 +219,13 @@ impl DiskStorage {
})
}
async fn set<T>(&self, h: ScopeHash, scopes: &[T], token: Option<Token>)
async fn set<T>(&self, scopes: HashedScopes<'_, T>, token: Option<Token>)
where
T: AsRef<str>,
{
let cloned_tokens = {
let mut tokens = self.tokens.lock().unwrap();
tokens.set(h, scopes, token);
tokens.set(scopes, token);
tokens.clone()
};
self.write_tx
@@ -199,11 +235,11 @@ impl DiskStorage {
.expect("disk storage task not running");
}
pub(crate) fn get<T>(&self, h: ScopeHash, scopes: &[T]) -> Option<Token>
pub(crate) fn get<T>(&self, scopes: HashedScopes<T>) -> Option<Token>
where
T: AsRef<str>,
{
self.tokens.lock().unwrap().get(h, scopes)
self.tokens.lock().unwrap().get(scopes)
}
}
@@ -215,24 +251,24 @@ mod tests {
fn test_hash_scopes() {
// Idential list should hash equal.
assert_eq!(
ScopeHash::new(&["foo", "bar"]),
ScopeHash::new(&["foo", "bar"])
HashedScopes::from(&["foo", "bar"]).hash,
HashedScopes::from(&["foo", "bar"]).hash,
);
// The hash should be order independent.
assert_eq!(
ScopeHash::new(&["bar", "foo"]),
ScopeHash::new(&["foo", "bar"])
HashedScopes::from(&["bar", "foo"]).hash,
HashedScopes::from(&["foo", "bar"]).hash,
);
assert_eq!(
ScopeHash::new(&["bar", "baz", "bat"]),
ScopeHash::new(&["baz", "bar", "bat"])
HashedScopes::from(&["bar", "baz", "bat"]).hash,
HashedScopes::from(&["baz", "bar", "bat"]).hash,
);
// Ensure hashes differ when the contents are different by more than
// just order.
assert_ne!(
ScopeHash::new(&["foo", "bar", "baz"]),
ScopeHash::new(&["foo", "bar"])
HashedScopes::from(&["foo", "bar", "baz"]).hash,
HashedScopes::from(&["foo", "bar"]).hash,
);
}
}