aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authoromagdy <omar.professional8777@gmail.com>2025-07-22 06:59:07 +0300
committeromagdy <omar.professional8777@gmail.com>2025-07-22 06:59:07 +0300
commit7e7ad6f35e32810f2083c0a78afe1e38c1c9f5f0 (patch)
treedd9476fddb39c58cd1d76fa0362c1e48f03148e9 /src
parent8e071250e87cf92c9b11360cf61848f42ea45645 (diff)
downloadredis-rust-7e7ad6f35e32810f2083c0a78afe1e38c1c9f5f0.tar.xz
redis-rust-7e7ad6f35e32810f2083c0a78afe1e38c1c9f5f0.zip
feat: Implemented reading a key from rdb file
Diffstat (limited to 'src')
-rw-r--r--src/main.rs30
-rw-r--r--src/rdb.rs35
-rw-r--r--src/resp_commands.rs39
3 files changed, 63 insertions, 41 deletions
diff --git a/src/main.rs b/src/main.rs
index effc1e7..e82527e 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,4 +1,5 @@
#![allow(unused_imports)]
+use core::time;
use std::{
collections::HashMap,
env,
@@ -9,7 +10,10 @@ use std::{
time::{Duration, SystemTime, UNIX_EPOCH},
};
-use codecrafters_redis::shared_cache::*;
+use codecrafters_redis::{
+ rdb::{KeyExpiry, ParseError, RDBFile, RedisValue},
+ shared_cache::*,
+};
use codecrafters_redis::{resp_commands::RedisCommands, Config};
use codecrafters_redis::{
resp_parser::{parse, RespType},
@@ -62,6 +66,30 @@ fn main() -> std::io::Result<()> {
match Config::new() {
Ok(conf) => {
if let Some(conf) = conf {
+ let mut cache = cache.lock().unwrap();
+
+ let dir = conf.dir.clone().unwrap();
+ let dbfilename = conf.dbfilename.clone().unwrap();
+ if let Some(rdb_file) = RDBFile::read(dir, dbfilename).unwrap() {
+ let hash_table = &rdb_file.databases.get(&0).unwrap().hash_table;
+
+ for (key, db_entry) in hash_table.iter() {
+ let value = match &db_entry.value {
+ RedisValue::String(data) => String::from_utf8(data.clone()).unwrap(),
+ RedisValue::Integer(data) => data.to_string(),
+ _ => {
+ unreachable!()
+ }
+ };
+ let expires_at = if let Some(key_expiry) = &db_entry.expiry {
+ Some(key_expiry.timestamp)
+ } else {
+ None
+ };
+ let cache_entry = CacheEntry { value, expires_at };
+ cache.insert(String::from_utf8(key.clone()).unwrap(), cache_entry);
+ }
+ }
config = Arc::new(Some(conf));
}
}
diff --git a/src/rdb.rs b/src/rdb.rs
index 1a4de11..0195cb9 100644
--- a/src/rdb.rs
+++ b/src/rdb.rs
@@ -4,7 +4,8 @@
use std::{
collections::{HashMap, HashSet},
- fs, io, isize,
+ fs::{self, File},
+ io, isize,
path::Path,
};
@@ -187,16 +188,19 @@ impl TryFrom<u8> for ValueType {
}
}
+#[derive(Debug)]
pub struct KeyExpiry {
pub timestamp: u64,
pub unit: ExpiryUnit,
}
+#[derive(Debug)]
pub enum ExpiryUnit {
Seconds,
Milliseconds,
}
+#[derive(Debug)]
pub struct DatabaseEntry {
pub expiry: Option<KeyExpiry>,
pub value_type: ValueType,
@@ -394,6 +398,7 @@ pub struct HashTableSizeInfo {
pub type DatabaseIndex = usize;
+#[derive(Debug)]
pub struct RDBDatabase {
pub database_index: DatabaseIndex,
pub size_hints: HashTableSizeInfo,
@@ -423,11 +428,12 @@ fn parse_db_key_value(
value: value,
};
- let key_data = if let RedisValue::String(data) = key {
- data
- } else {
- return Err(ParseError::UnexpectedEof);
+ let key_data = match key {
+ RedisValue::String(data) => data,
+ RedisValue::Integer(data) => data.to_string().as_bytes().to_vec(),
+ _ => return Err(ParseError::InvalidMetadata),
};
+
hash_table.insert(key_data, database_entry);
Ok(())
@@ -580,6 +586,7 @@ impl FromBytes for RDBDatabase {
}
}
+#[derive(Debug)]
pub struct RDBFile {
pub header: RDBHeader,
pub metadata: Option<RDBMetaData>,
@@ -649,17 +656,23 @@ impl FromBytes for RDBFile {
}
impl RDBFile {
- pub fn read(dir: String, dbfilename: String) -> Result<Self, anyhow::Error> {
+ pub fn read(dir: String, dbfilename: String) -> Result<Option<Self>, anyhow::Error> {
let dir = Path::new(&dir);
let file_path = dir.join(dbfilename);
// Read file to bytes
- let bytes = fs::read(&file_path)?;
- let (rdb_file, consumed) = RDBFile::from_bytes(&bytes)?;
+ if file_path.exists() {
+ let bytes = fs::read(&file_path)?;
+ let (rdb_file, consumed) = RDBFile::from_bytes(&bytes)?;
+
+ // sanity check
+ assert!(bytes.len() == consumed);
- // sanity check
- assert!(bytes.len() == consumed);
- Ok(rdb_file)
+ Ok(Some(rdb_file))
+ } else {
+ File::create(file_path)?;
+ Ok(None)
+ }
}
}
diff --git a/src/resp_commands.rs b/src/resp_commands.rs
index 9d35b1b..d61a4be 100644
--- a/src/resp_commands.rs
+++ b/src/resp_commands.rs
@@ -128,6 +128,7 @@ impl RedisCommands {
RC::Echo(echo_string) => resp!(echo_string),
RC::Get(key) => {
let mut cache = cache.lock().unwrap();
+
match cache.get(&key).cloned() {
Some(entry) => {
if entry.is_expired() {
@@ -223,35 +224,15 @@ impl RedisCommands {
let cache = cache.lock().unwrap();
let regex = Regex::new(&query).unwrap();
- let config = config.clone();
-
- if let Some(conf) = config.as_ref() {
- let dir = conf.dir.clone().unwrap();
- let dbfilename = conf.dbfilename.clone().unwrap();
- let rdb_file = RDBFile::read(dir, dbfilename).unwrap();
-
- let hash_table = &rdb_file.databases.get(&0).unwrap().hash_table;
- let matching_keys: Vec<RT> = hash_table
- .keys()
- .map(|key| str::from_utf8(key).unwrap())
- .filter_map(|key| {
- regex
- .is_match(key)
- .then(|| RT::BulkString(key.as_bytes().to_vec()))
- })
- .collect();
- RT::Array(matching_keys).to_resp_bytes()
- } else {
- let matching_keys: Vec<RT> = cache
- .keys()
- .filter_map(|key| {
- regex
- .is_match(key)
- .then(|| RT::BulkString(key.as_bytes().to_vec()))
- })
- .collect();
- RT::Array(matching_keys).to_resp_bytes()
- }
+ let matching_keys: Vec<RT> = cache
+ .keys()
+ .filter_map(|key| {
+ regex
+ .is_match(key)
+ .then(|| RT::BulkString(key.as_bytes().to_vec()))
+ })
+ .collect();
+ RT::Array(matching_keys).to_resp_bytes()
}
RC::Invalid => todo!(),
}