add: redis cache support
This commit is contained in:
parent
1d9a96ae69
commit
38dbf10130
13 changed files with 541 additions and 17 deletions
77
crates/core/src/cache/mod.rs
vendored
Normal file
77
crates/core/src/cache/mod.rs
vendored
Normal file
|
@ -0,0 +1,77 @@
|
|||
#![allow(async_fn_in_trait)]
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
|
||||
pub const EXPIRE_AT: i64 = 3_600_000;
|
||||
|
||||
#[allow(type_alias_bounds)]
|
||||
pub type TimedObject<T: Serialize + DeserializeOwned> = (i64, T);
|
||||
|
||||
#[cfg(feature = "redis")]
|
||||
pub mod redis;
|
||||
|
||||
#[cfg(not(feature = "redis"))]
|
||||
pub mod no_cache;
|
||||
|
||||
/// A simple cache "database".
|
||||
pub trait Cache {
|
||||
type Item;
|
||||
type Client;
|
||||
|
||||
/// Create a new [`Cache`].
|
||||
async fn new() -> Self;
|
||||
/// Get a connection to the cache.
|
||||
async fn get_con(&self) -> Self::Client;
|
||||
|
||||
/// Get a cache object by its identifier
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `id` - `String` of the object's id
|
||||
async fn get(&self, id: Self::Item) -> Option<String>;
|
||||
/// Set a cache object by its identifier and content
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `id` - `String` of the object's id
|
||||
/// * `content` - `String` of the object's content
|
||||
async fn set(&self, id: Self::Item, content: Self::Item) -> bool;
|
||||
/// Update a cache object by its identifier and content
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `id` - `String` of the object's id
|
||||
/// * `content` - `String` of the object's content
|
||||
async fn update(&self, id: Self::Item, content: Self::Item) -> bool;
|
||||
/// Remove a cache object by its identifier
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `id` - `String` of the object's id
|
||||
async fn remove(&self, id: Self::Item) -> bool;
|
||||
/// Remove a cache object by its identifier('s start)
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `id` - `String` of the object's id('s start)
|
||||
async fn remove_starting_with(&self, id: Self::Item) -> bool;
|
||||
/// Increment a cache object by its identifier
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `id` - `String` of the object's id
|
||||
async fn incr(&self, id: Self::Item) -> bool;
|
||||
/// Decrement a cache object by its identifier
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `id` - `String` of the object's id
|
||||
async fn decr(&self, id: Self::Item) -> bool;
|
||||
|
||||
/// Get a cache object by its identifier
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `id` - `String` of the object's id
|
||||
async fn get_timed<T: Serialize + DeserializeOwned>(
|
||||
&self,
|
||||
id: Self::Item,
|
||||
) -> Option<TimedObject<T>>;
|
||||
/// Set a cache object by its identifier and content
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `id` - `String` of the object's id
|
||||
/// * `content` - `String` of the object's content
|
||||
async fn set_timed<T: Serialize + DeserializeOwned>(&self, id: Self::Item, content: T) -> bool;
|
||||
}
|
62
crates/core/src/cache/no_cache.rs
vendored
Normal file
62
crates/core/src/cache/no_cache.rs
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
use serde::{Serialize, de::DeserializeOwned};
|
||||
|
||||
use super::{Cache, EXPIRE_AT, TimedObject};
|
||||
|
||||
pub const EPOCH_YEAR: u32 = 2025;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct NoCache {
|
||||
pub client: Option<u32>,
|
||||
}
|
||||
|
||||
impl Cache for NoCache {
|
||||
type Item = String;
|
||||
type Client = Option<u32>;
|
||||
|
||||
async fn new() -> Self {
|
||||
Self { client: None }
|
||||
}
|
||||
|
||||
async fn get_con(&self) -> Self::Client {
|
||||
None
|
||||
}
|
||||
|
||||
async fn get(&self, id: Self::Item) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
async fn set(&self, id: Self::Item, content: Self::Item) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
async fn update(&self, id: Self::Item, content: Self::Item) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
async fn remove(&self, id: Self::Item) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
async fn remove_starting_with(&self, id: Self::Item) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
async fn incr(&self, id: Self::Item) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
async fn decr(&self, id: Self::Item) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
async fn get_timed<T: Serialize + DeserializeOwned>(
|
||||
&self,
|
||||
id: Self::Item,
|
||||
) -> Option<TimedObject<T>> {
|
||||
None
|
||||
}
|
||||
|
||||
async fn set_timed<T: Serialize + DeserializeOwned>(&self, id: Self::Item, content: T) -> bool {
|
||||
None
|
||||
}
|
||||
}
|
123
crates/core/src/cache/redis.rs
vendored
Normal file
123
crates/core/src/cache/redis.rs
vendored
Normal file
|
@ -0,0 +1,123 @@
|
|||
use redis::Commands;
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
|
||||
use super::{Cache, EXPIRE_AT, TimedObject};
|
||||
|
||||
pub const EPOCH_YEAR: u32 = 2025;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RedisCache {
|
||||
pub client: redis::Client,
|
||||
}
|
||||
|
||||
impl Cache for RedisCache {
|
||||
type Item = String;
|
||||
type Client = redis::Connection;
|
||||
|
||||
async fn new() -> Self {
|
||||
Self {
|
||||
client: redis::Client::open("redis://127.0.0.1:6379").unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_con(&self) -> Self::Client {
|
||||
self.client.get_connection().unwrap()
|
||||
}
|
||||
|
||||
async fn get(&self, id: Self::Item) -> Option<String> {
|
||||
self.get_con().await.get(id).ok()
|
||||
}
|
||||
|
||||
async fn set(&self, id: Self::Item, content: Self::Item) -> bool {
|
||||
let mut c = self.get_con().await;
|
||||
let res: Result<String, redis::RedisError> = c.set(id, content);
|
||||
|
||||
res.is_ok()
|
||||
}
|
||||
|
||||
async fn update(&self, id: Self::Item, content: Self::Item) -> bool {
|
||||
self.set(id, content).await
|
||||
}
|
||||
|
||||
async fn remove(&self, id: Self::Item) -> bool {
|
||||
let mut c = self.get_con().await;
|
||||
let res: Result<String, redis::RedisError> = c.del(id);
|
||||
|
||||
res.is_ok()
|
||||
}
|
||||
|
||||
async fn remove_starting_with(&self, id: Self::Item) -> bool {
|
||||
let mut c = self.get_con().await;
|
||||
|
||||
// get keys
|
||||
let mut cmd = redis::cmd("DEL");
|
||||
let keys: Result<Vec<String>, redis::RedisError> = c.keys(id);
|
||||
|
||||
for key in keys.unwrap() {
|
||||
cmd.arg(key);
|
||||
}
|
||||
|
||||
// remove
|
||||
let res: Result<String, redis::RedisError> = cmd.query(&mut c);
|
||||
|
||||
res.is_ok()
|
||||
}
|
||||
|
||||
async fn incr(&self, id: Self::Item) -> bool {
|
||||
let mut c = self.get_con().await;
|
||||
let res: Result<String, redis::RedisError> = c.incr(id, 1);
|
||||
|
||||
res.is_ok()
|
||||
}
|
||||
|
||||
async fn decr(&self, id: Self::Item) -> bool {
|
||||
let mut c = self.get_con().await;
|
||||
let res: Result<String, redis::RedisError> = c.decr(id, 1);
|
||||
|
||||
res.is_ok()
|
||||
}
|
||||
|
||||
async fn get_timed<T: Serialize + DeserializeOwned>(
|
||||
&self,
|
||||
id: Self::Item,
|
||||
) -> Option<TimedObject<T>> {
|
||||
let mut c = self.get_con().await;
|
||||
let res: Result<String, redis::RedisError> = c.get(&id);
|
||||
|
||||
match res {
|
||||
Ok(d) => match serde_json::from_str::<TimedObject<T>>(&d) {
|
||||
Ok(d) => {
|
||||
// check time
|
||||
let now = tetratto_shared::epoch_timestamp(EPOCH_YEAR);
|
||||
|
||||
if now - d.0 >= EXPIRE_AT {
|
||||
// expired key, remove and return None
|
||||
self.remove(id).await;
|
||||
return None;
|
||||
}
|
||||
|
||||
// return
|
||||
Some(d)
|
||||
}
|
||||
Err(_) => None,
|
||||
},
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
async fn set_timed<T: Serialize + DeserializeOwned>(&self, id: Self::Item, content: T) -> bool {
|
||||
let mut c = self.get_con().await;
|
||||
let res: Result<String, redis::RedisError> = c.set(
|
||||
id,
|
||||
match serde_json::to_string::<TimedObject<T>>(&(
|
||||
tetratto_shared::epoch_timestamp(EPOCH_YEAR),
|
||||
content,
|
||||
)) {
|
||||
Ok(s) => s,
|
||||
Err(_) => return false,
|
||||
},
|
||||
);
|
||||
|
||||
res.is_ok()
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue