diff options
| -rw-r--r-- | src/lib.rs | 15 | ||||
| -rw-r--r-- | src/main.rs | 52 |
2 files changed, 43 insertions, 24 deletions
@@ -11,6 +11,7 @@ pub mod shared_cache; pub struct Config { pub dir: Option<String>, pub dbfilename: Option<String>, + pub port: Option<String>, } pub type SharedConfig = Arc<Option<Config>>; @@ -25,6 +26,7 @@ impl Config { let mut dir = None; let mut dbfilename = None; + let mut port = None; let mut i = 1; // Skip program name while i < args.len() { @@ -43,12 +45,23 @@ impl Config { dbfilename = Some(args[i + 1].clone()); i += 2; } + "--port" => { + if i + 1 >= args.len() { + return Err("--dbfilename requires a value".to_string()); + } + port = Some(args[i + 1].clone()); + i += 2; + } _ => { return Err(format!("Unknown argument: {}", args[i])); } } } - Ok(Some(Config { dir, dbfilename })) + Ok(Some(Config { + dir, + dbfilename, + port, + })) } } diff --git a/src/main.rs b/src/main.rs index e82527e..5fdd6c3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -57,37 +57,39 @@ fn handle_client(mut stream: TcpStream, cache: SharedCache, config: SharedConfig } fn main() -> std::io::Result<()> { - let listener = TcpListener::bind("127.0.0.1:6379").unwrap(); let cache: SharedCache = Arc::new(Mutex::new(HashMap::new())); let mut config: SharedConfig = Arc::new(None); - - spawn_cleanup_thread(cache.clone()); + let mut port = "6379".to_string(); match Config::new() { Ok(conf) => { if let Some(conf) = conf { let mut cache = cache.lock().unwrap(); + let dir = conf.dir.clone().unwrap_or("".to_string()); + let dbfilename = conf.dbfilename.clone().unwrap_or("".to_string()); + port = conf.port.clone().unwrap_or("6379".to_string()); + if let Ok(rdb_file) = RDBFile::read(dir, dbfilename) { + if let Some(rdb) = rdb_file { + let hash_table = &rdb.databases.get(&0).unwrap().hash_table; - let dir = conf.dir.clone().unwrap(); - let dbfilename = conf.dbfilename.clone().unwrap(); - if let Some(rdb_file) = RDBFile::read(dir, dbfilename).unwrap() { - let hash_table = &rdb_file.databases.get(&0).unwrap().hash_table; - - for (key, db_entry) in hash_table.iter() { - let value = match &db_entry.value { - RedisValue::String(data) => String::from_utf8(data.clone()).unwrap(), - RedisValue::Integer(data) => data.to_string(), - _ => { - unreachable!() - } - }; - let expires_at = if let Some(key_expiry) = &db_entry.expiry { - Some(key_expiry.timestamp) - } else { - None - }; - let cache_entry = CacheEntry { value, expires_at }; - cache.insert(String::from_utf8(key.clone()).unwrap(), cache_entry); + for (key, db_entry) in hash_table.iter() { + let value = match &db_entry.value { + RedisValue::String(data) => { + String::from_utf8(data.clone()).unwrap() + } + RedisValue::Integer(data) => data.to_string(), + _ => { + unreachable!() + } + }; + let expires_at = if let Some(key_expiry) = &db_entry.expiry { + Some(key_expiry.timestamp) + } else { + None + }; + let cache_entry = CacheEntry { value, expires_at }; + cache.insert(String::from_utf8(key.clone()).unwrap(), cache_entry); + } } } config = Arc::new(Some(conf)); @@ -99,6 +101,10 @@ fn main() -> std::io::Result<()> { } } + let listener = TcpListener::bind(format!("127.0.0.1:{}", port)).unwrap(); + + spawn_cleanup_thread(cache.clone()); + for stream in listener.incoming() { match stream { Ok(stream) => { |
