From e12cadaf85d523643d64eee2d08cefe256e4cdda Mon Sep 17 00:00:00 2001 From: honhimW Date: Wed, 25 Dec 2024 10:30:31 +0800 Subject: [PATCH] feat: [Explorer] ReJSON-RL Supports - read value & highlighting; - value length: object/array/string; --- Cargo.toml | 1 + src/cli.rs | 2 +- src/redis_opt.rs | 69 +++++++++++++++++++++++++++++++++++++++++++- src/tabs/explorer.rs | 17 ++++++++++- 4 files changed, 86 insertions(+), 3 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 1747562..073744d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -35,6 +35,7 @@ redis = { workspace = true, features = [ "tokio-comp", "tokio-rustls-comp", "cluster-async", + "json" ] } deadpool-redis = { git = "https://github.com/honhimW/deadpool.git", branch = "logging", package = "deadpool-redis", features = [ "serde", diff --git a/src/cli.rs b/src/cli.rs index 3259ac7..03bdc1a 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -4,7 +4,7 @@ use std::collections::BTreeMap; pub fn cli() -> Result { let command = Command::new("ratisui") - .about("Redis TUI build on Ratatui") + .about("Redis TUI build with Ratatui.") .args([ arg!(-t --target "named redis target, default read from config file if exist"), arg!(-T --theme "theme configuration file name, under ~/.config/ratisui/theme/.ron"), diff --git a/src/redis_opt.rs b/src/redis_opt.rs index 06aadd7..a7e7eb3 100644 --- a/src/redis_opt.rs +++ b/src/redis_opt.rs @@ -11,7 +11,7 @@ use futures::StreamExt; use log::{info}; use once_cell::sync::Lazy; use redis::ConnectionAddr::{Tcp, TcpTls}; -use redis::{AsyncCommands, AsyncIter, Client, Cmd, ConnectionAddr, ConnectionInfo, ConnectionLike, FromRedisValue, RedisConnectionInfo, ScanOptions, ToRedisArgs, Value, VerbatimFormat}; +use redis::{AsyncCommands, AsyncIter, Client, Cmd, ConnectionAddr, ConnectionInfo, ConnectionLike, FromRedisValue, JsonAsyncCommands, RedisConnectionInfo, ScanOptions, ToRedisArgs, Value, VerbatimFormat}; use std::collections::HashMap; use std::future::Future; use std::ops::DerefMut; @@ -984,6 +984,73 @@ impl RedisOperations { } } + pub async fn json_type(&self, key: K) -> Result { + if self.is_cluster() { + let pool = &self.cluster_pool.clone().context("should be cluster")?; + let mut connection = pool.get().await?; + let v: Vec = connection.json_type(key, ".").await?; + let s = v.get(0).cloned().unwrap_or_default(); + Ok(s) + } else { + let mut connection = self.pool.get().await?; + let v: Vec = connection.json_type(key, ".").await?; + let s = v.get(0).cloned().unwrap_or_default(); + Ok(s) + } + } + + pub async fn json_strlen(&self, key: K) -> Result { + if self.is_cluster() { + let pool = &self.cluster_pool.clone().context("should be cluster")?; + let mut connection = pool.get().await?; + let v: usize = connection.json_str_len(key, ".").await?; + Ok(v) + } else { + let mut connection = self.pool.get().await?; + let v: usize = connection.json_str_len(key, ".").await?; + Ok(v) + } + } + + pub async fn json_arrlen(&self, key: K) -> Result { + if self.is_cluster() { + let pool = &self.cluster_pool.clone().context("should be cluster")?; + let mut connection = pool.get().await?; + let v: usize = connection.json_arr_len(key, ".").await?; + Ok(v) + } else { + let mut connection = self.pool.get().await?; + let v: usize = connection.json_arr_len(key, ".").await?; + Ok(v) + } + } + + pub async fn json_objlen(&self, key: K) -> Result { + if self.is_cluster() { + let pool = &self.cluster_pool.clone().context("should be cluster")?; + let mut connection = pool.get().await?; + let v: usize = connection.json_obj_len(key, ".").await?; + Ok(v) + } else { + let mut connection = self.pool.get().await?; + let v: usize = connection.json_obj_len(key, ".").await?; + Ok(v) + } + } + + pub async fn json_get(&self, key: K) -> Result { + if self.is_cluster() { + let pool = &self.cluster_pool.clone().context("should be cluster")?; + let mut connection = pool.get().await?; + let v: V = connection.json_get(key, ".").await?; + Ok(v) + } else { + let mut connection = self.pool.get().await?; + let v: V = connection.json_get(key, ".").await?; + Ok(v) + } + } + // pub async fn sscan(&self, key: K) -> Result> { // if self.is_cluster() { // let pool = &self.cluster_pool.clone().context("should be cluster")?; diff --git a/src/tabs/explorer.rs b/src/tabs/explorer.rs index 973210f..301357e 100644 --- a/src/tabs/explorer.rs +++ b/src/tabs/explorer.rs @@ -166,7 +166,7 @@ fn get_type_color(key_type: &str) -> Color { "Set" | "set" => { get_color(|t| &t.tab.explorer.key_type.set) } "ZSet" | "zset" => { get_color(|t| &t.tab.explorer.key_type.zset) } "String" | "string" => { get_color(|t| &t.tab.explorer.key_type.string) } - "JSON" | "json" => { get_color(|t| &t.tab.explorer.key_type.json) } + "JSON" | "json" | "ReJSON-RL" | "ReJSON" => { get_color(|t| &t.tab.explorer.key_type.json) } "Stream" | "stream" => { get_color(|t| &t.tab.explorer.key_type.stream) } "unknown" => { get_color(|t| &t.tab.explorer.key_type.unknown) } _ => { Color::default() } @@ -889,6 +889,16 @@ impl ExplorerTab { "set" => Ok(op.scard(key_name_clone).await?), "zset" => Ok(op.zcard(key_name_clone).await?), "stream" => Ok(op.xlen(key_name_clone).await?), + "rejson-rl" => { + let json_type = op.json_type(&key_name_clone).await?; + let len = match json_type.to_ascii_lowercase().as_str() { + "object" => op.json_objlen(key_name_clone).await?, + "array" => op.json_arrlen(key_name_clone).await?, + "string" => op.json_strlen(key_name_clone).await?, + _ => 0, + }; + Ok(len) + }, _ => Ok(0) } }).await; @@ -969,6 +979,11 @@ impl ExplorerTab { }).collect(); data.selected_stream_value = (true, Some(hash_value)); } + "rejson-rl" => { + let bytes: Vec = op.json_get(key_name_clone).await?; + let result = deserialize_bytes(bytes).context("Failed to deserialize string")?; + data.selected_string_value = (true, Some((result.0, result.1))); + } _ => {} } Ok(data)