starting refactor to shadcn and redb
This commit is contained in:
7
src-tauri/.gitignore
vendored
Normal file
7
src-tauri/.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/target/
|
||||
|
||||
# Generated by Tauri
|
||||
# will have schema files for capabilities auto-completion
|
||||
/gen/schemas
|
||||
3152
src-tauri/Cargo.lock
generated
3152
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -3,21 +3,23 @@ name = "resona"
|
||||
version = "0.1.0"
|
||||
description = "A Tauri App"
|
||||
authors = ["you"]
|
||||
license = ""
|
||||
repository = ""
|
||||
default-run = "resona"
|
||||
edition = "2021"
|
||||
rust-version = "1.60"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[lib]
|
||||
# The `_lib` suffix may seem redundant but it is necessary
|
||||
# to make the lib name unique and wouldn't conflict with the bin name.
|
||||
# This seems to be only an issue on Windows, see https://github.com/rust-lang/cargo/issues/8519
|
||||
name = "resona_lib"
|
||||
crate-type = ["staticlib", "cdylib", "rlib"]
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.0.2", features = [] }
|
||||
tauri-build = { version = "2", features = [] }
|
||||
|
||||
[dependencies]
|
||||
serde_json = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
tauri = { version = "2.0", features = [] }
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
tokio = { version = "1.36", features = ["full"] }
|
||||
uuid = { version = "1.7", features = ["v4", "serde"] }
|
||||
rusqlite = "0.32"
|
||||
tauri = { version = "2", features = [] }
|
||||
tauri-plugin-opener = "2"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"description": "Capability for the main window",
|
||||
"windows": ["main"],
|
||||
"permissions": [
|
||||
"core:default"
|
||||
"core:default",
|
||||
"opener:default"
|
||||
]
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
{"default":{"identifier":"default","description":"Capability for the main window","local":true,"windows":["main"],"permissions":["core:default"]}}
|
||||
{"default":{"identifier":"default","description":"Capability for the main window","local":true,"windows":["main"],"permissions":["core:default","opener:default"]}}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,75 +0,0 @@
|
||||
use crate::models::{RequestBody, ResponseBody};
|
||||
use reqwest::header::{HeaderMap, HeaderName, HeaderValue};
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
use std::time::Instant;
|
||||
|
||||
pub async fn send_request(request: RequestBody) -> Result<ResponseBody, String> {
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Create headers
|
||||
let mut headers = HeaderMap::new();
|
||||
for header in request.headers.iter().filter(|h| h.enabled) {
|
||||
headers.insert(
|
||||
HeaderName::from_str(&header.key).map_err(|e| e.to_string())?,
|
||||
HeaderValue::from_str(&header.value).map_err(|e| e.to_string())?,
|
||||
);
|
||||
}
|
||||
|
||||
let start_time = Instant::now();
|
||||
|
||||
// Build and send request
|
||||
let response = match request.method.as_str() {
|
||||
"GET" => client.get(&request.url),
|
||||
"POST" => client.post(&request.url),
|
||||
"PUT" => client.put(&request.url),
|
||||
"DELETE" => client.delete(&request.url),
|
||||
"PATCH" => client.patch(&request.url),
|
||||
_ => return Err("Unsupported HTTP method".to_string()),
|
||||
}
|
||||
.headers(headers);
|
||||
|
||||
// Add body for methods that support it
|
||||
let response = if let Some(body) = request.body {
|
||||
response.body(body)
|
||||
} else {
|
||||
response
|
||||
};
|
||||
|
||||
// Send the request
|
||||
let response = response
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to send request: {}", e))?;
|
||||
|
||||
// Process response
|
||||
let status = response.status().as_u16();
|
||||
let status_text = response.status().to_string();
|
||||
|
||||
// Convert response headers
|
||||
let headers: HashMap<String, String> = response
|
||||
.headers()
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
(
|
||||
k.to_string(),
|
||||
v.to_str().unwrap_or("").to_string(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let body = response
|
||||
.text()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read response body: {}", e))?;
|
||||
|
||||
let elapsed = start_time.elapsed().as_millis();
|
||||
|
||||
Ok(ResponseBody {
|
||||
status,
|
||||
status_text,
|
||||
headers,
|
||||
body,
|
||||
time: elapsed,
|
||||
})
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
use crate::storage::{Storage, Collection};
|
||||
use std::sync::Mutex;
|
||||
use tauri::State;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn create_collection(
|
||||
state: State<'_, Mutex<Storage>>,
|
||||
workspace_id: String,
|
||||
name: String,
|
||||
description: Option<String>,
|
||||
) -> Result<Collection, String> {
|
||||
let mut storage = state.lock().map_err(|e| e.to_string())?;
|
||||
storage
|
||||
.create_collection(&workspace_id, &name, description.as_deref())
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_workspace_collections(
|
||||
state: State<'_, Mutex<Storage>>,
|
||||
workspace_id: String,
|
||||
) -> Result<Vec<Collection>, String> {
|
||||
let mut storage = state.lock().map_err(|e| e.to_string())?;
|
||||
storage
|
||||
.get_workspace_collections(&workspace_id)
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn update_collection(
|
||||
state: State<'_, Mutex<Storage>>,
|
||||
id: String,
|
||||
name: String,
|
||||
description: Option<String>,
|
||||
) -> Result<Collection, String> {
|
||||
let mut storage = state.lock().map_err(|e| e.to_string())?;
|
||||
storage
|
||||
.update_collection(&id, &name, description.as_deref())
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_collection(
|
||||
state: State<'_, Mutex<Storage>>,
|
||||
id: String,
|
||||
) -> Result<(), String> {
|
||||
let mut storage = state.lock().map_err(|e| e.to_string())?;
|
||||
storage.delete_collection(&id).map_err(|e| e.to_string())
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
mod workspace;
|
||||
mod collection;
|
||||
|
||||
pub use workspace::*;
|
||||
pub use collection::*;
|
||||
|
||||
pub fn handlers() -> impl Fn(tauri::ipc::Invoke) -> bool {
|
||||
tauri::generate_handler![
|
||||
create_workspace,
|
||||
get_workspaces,
|
||||
get_workspace,
|
||||
update_workspace,
|
||||
delete_workspace,
|
||||
create_collection,
|
||||
get_workspace_collections,
|
||||
update_collection,
|
||||
delete_collection
|
||||
]
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
use crate::storage::{Storage, Workspace};
|
||||
use std::sync::Mutex;
|
||||
use tauri::State;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn create_workspace(
|
||||
state: State<'_, Mutex<Storage>>,
|
||||
name: String,
|
||||
description: Option<String>,
|
||||
) -> Result<Workspace, String> {
|
||||
let mut storage = state.lock().map_err(|e| e.to_string())?;
|
||||
storage
|
||||
.create_workspace(&name, description.as_deref())
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_workspaces(
|
||||
state: State<'_, Mutex<Storage>>,
|
||||
) -> Result<Vec<Workspace>, String> {
|
||||
let mut storage = state.lock().map_err(|e| e.to_string())?;
|
||||
storage.get_workspaces().map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_workspace(
|
||||
state: State<'_, Mutex<Storage>>,
|
||||
id: String,
|
||||
) -> Result<Option<Workspace>, String> {
|
||||
let mut storage = state.lock().map_err(|e| e.to_string())?;
|
||||
storage.get_workspace(&id).map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn update_workspace(
|
||||
state: State<'_, Mutex<Storage>>,
|
||||
id: String,
|
||||
name: String,
|
||||
description: Option<String>,
|
||||
) -> Result<Workspace, String> {
|
||||
let mut storage = state.lock().map_err(|e| e.to_string())?;
|
||||
storage
|
||||
.update_workspace(&id, &name, description.as_deref())
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_workspace(
|
||||
state: State<'_, Mutex<Storage>>,
|
||||
id: String,
|
||||
) -> Result<(), String> {
|
||||
let mut storage = state.lock().map_err(|e| e.to_string())?;
|
||||
storage.delete_workspace(&id).map_err(|e| e.to_string())
|
||||
}
|
||||
@@ -7,6 +7,7 @@ fn greet(name: &str) -> String {
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
pub fn run() {
|
||||
tauri::Builder::default()
|
||||
.plugin(tauri_plugin_opener::init())
|
||||
.invoke_handler(tauri::generate_handler![greet])
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
|
||||
@@ -1,27 +1,6 @@
|
||||
mod models;
|
||||
mod storage;
|
||||
mod commands;
|
||||
mod client;
|
||||
|
||||
use commands::handlers as h;
|
||||
use storage::Storage;
|
||||
use tauri::Manager;
|
||||
use std::sync::Mutex;
|
||||
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
|
||||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
|
||||
|
||||
fn main() {
|
||||
let handlers = h();
|
||||
tauri::Builder::default()
|
||||
.setup(|app| {
|
||||
let app_dir = app.path().app_data_dir()
|
||||
.expect("Failed to get app data dir");
|
||||
|
||||
let storage = Storage::new(app_dir)
|
||||
.expect("Failed to initialize storage");
|
||||
|
||||
app.manage(Mutex::new(storage));
|
||||
Ok(())
|
||||
})
|
||||
.invoke_handler(handlers)
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
}
|
||||
resona_lib::run()
|
||||
}
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Variable {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub value: String,
|
||||
pub description: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Header {
|
||||
pub id: String,
|
||||
pub key: String,
|
||||
pub value: String,
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct RequestBody {
|
||||
pub url: String,
|
||||
pub method: String,
|
||||
pub headers: Vec<Header>,
|
||||
pub body: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ResponseBody {
|
||||
pub status: u16,
|
||||
pub status_text: String,
|
||||
pub headers: HashMap<String, String>,
|
||||
pub body: String,
|
||||
pub time: u128,
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
use super::*;
|
||||
use rusqlite::{params, Result};
|
||||
|
||||
impl Storage {
|
||||
pub fn create_collection(
|
||||
&mut self,
|
||||
workspace_id: &str,
|
||||
name: &str,
|
||||
description: Option<&str>,
|
||||
) -> Result<Collection> {
|
||||
let now = Self::now();
|
||||
let id = uuid::Uuid::new_v4().to_string();
|
||||
|
||||
self.conn.get_mut().unwrap().execute(
|
||||
"INSERT INTO collections (id, workspace_id, name, description, created_at, updated_at)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
|
||||
params![id, workspace_id, name, description, now, now],
|
||||
)?;
|
||||
|
||||
Ok(Collection {
|
||||
id,
|
||||
workspace_id: workspace_id.to_string(),
|
||||
name: name.to_string(),
|
||||
description: description.map(String::from),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_workspace_collections(&mut self, workspace_id: &str) -> Result<Vec<Collection>> {
|
||||
let mut stmt = self.conn.get_mut().unwrap().prepare(
|
||||
"SELECT id, workspace_id, name, description, created_at, updated_at
|
||||
FROM collections
|
||||
WHERE workspace_id = ?
|
||||
ORDER BY created_at DESC"
|
||||
)?;
|
||||
|
||||
let collections = stmt.query_map([workspace_id], |row| {
|
||||
Ok(Collection {
|
||||
id: row.get(0)?,
|
||||
workspace_id: row.get(1)?,
|
||||
name: row.get(2)?,
|
||||
description: row.get(3)?,
|
||||
created_at: row.get(4)?,
|
||||
updated_at: row.get(5)?,
|
||||
})
|
||||
})?;
|
||||
|
||||
collections.collect()
|
||||
}
|
||||
|
||||
pub fn update_collection(
|
||||
&mut self,
|
||||
id: &str,
|
||||
name: &str,
|
||||
description: Option<&str>,
|
||||
) -> Result<Collection> {
|
||||
let now = Self::now();
|
||||
|
||||
self.conn.get_mut().unwrap().execute(
|
||||
"UPDATE collections
|
||||
SET name = ?1, description = ?2, updated_at = ?3
|
||||
WHERE id = ?4",
|
||||
params![name, description, now, id],
|
||||
)?;
|
||||
|
||||
let mut stmt = self.conn.get_mut().unwrap().prepare(
|
||||
"SELECT workspace_id, created_at FROM collections WHERE id = ?"
|
||||
)?;
|
||||
|
||||
let (workspace_id, created_at) = stmt.query_row([id], |row| {
|
||||
Ok((row.get::<_, String>(0)?, row.get::<_, i64>(1)?))
|
||||
})?;
|
||||
|
||||
Ok(Collection {
|
||||
id: id.to_string(),
|
||||
workspace_id,
|
||||
name: name.to_string(),
|
||||
description: description.map(String::from),
|
||||
created_at,
|
||||
updated_at: now,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_collection(&mut self, id: &str) -> Result<()> {
|
||||
let tx = self.conn.get_mut().unwrap().transaction()?;
|
||||
|
||||
// Delete all requests in this collection
|
||||
tx.execute(
|
||||
"DELETE FROM requests WHERE collection_id = ?",
|
||||
params![id],
|
||||
)?;
|
||||
|
||||
// Delete the collection
|
||||
tx.execute(
|
||||
"DELETE FROM collections WHERE id = ?",
|
||||
params![id],
|
||||
)?;
|
||||
|
||||
tx.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,158 +0,0 @@
|
||||
mod workspace;
|
||||
mod collection;
|
||||
|
||||
pub use workspace::*;
|
||||
pub use collection::*;
|
||||
|
||||
use rusqlite::Connection;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::sync::RwLock;
|
||||
use std::path::PathBuf;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Workspace {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub created_at: i64,
|
||||
pub updated_at: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Collection {
|
||||
pub id: String,
|
||||
pub workspace_id: String,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub created_at: i64,
|
||||
pub updated_at: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Variable {
|
||||
pub id: String,
|
||||
pub workspace_id: String,
|
||||
pub name: String,
|
||||
pub value: String,
|
||||
pub description: Option<String>,
|
||||
pub created_at: i64,
|
||||
pub updated_at: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Request {
|
||||
pub id: String,
|
||||
pub collection_id: String,
|
||||
pub name: String,
|
||||
pub method: String,
|
||||
pub url: String,
|
||||
pub created_at: i64,
|
||||
pub updated_at: i64,
|
||||
}
|
||||
|
||||
pub struct Storage {
|
||||
conn: RwLock<Connection>,
|
||||
data_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl Storage {
|
||||
pub fn new(app_dir: PathBuf) -> rusqlite::Result<Self> {
|
||||
let db_path = app_dir.join("resona.db");
|
||||
let data_dir = app_dir.join("data");
|
||||
|
||||
fs::create_dir_all(&data_dir).map_err(|e| rusqlite::Error::InvalidPath(data_dir.clone()))?;
|
||||
|
||||
let conn = Connection::open(db_path)?;
|
||||
Self::init_database(&conn)?;
|
||||
|
||||
Ok(Storage { conn: RwLock::new(conn), data_dir })
|
||||
}
|
||||
|
||||
fn init_database(conn: &Connection) -> rusqlite::Result<()> {
|
||||
conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS workspaces (
|
||||
id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS collections (
|
||||
id TEXT PRIMARY KEY,
|
||||
workspace_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(id)
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS variables (
|
||||
id TEXT PRIMARY KEY,
|
||||
workspace_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(id)
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS requests (
|
||||
id TEXT PRIMARY KEY,
|
||||
collection_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
method TEXT NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
FOREIGN KEY(collection_id) REFERENCES collections(id)
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
// Create indexes
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_collections_workspace
|
||||
ON collections(workspace_id)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_variables_workspace
|
||||
ON variables(workspace_id)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_requests_collection
|
||||
ON requests(collection_id)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn now() -> i64 {
|
||||
SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs() as i64
|
||||
}
|
||||
|
||||
pub(crate) fn get_request_data_path(&self, request_id: &str) -> PathBuf {
|
||||
self.data_dir.join(format!("request_{}.json", request_id))
|
||||
}
|
||||
}
|
||||
@@ -1,115 +0,0 @@
|
||||
use super::*;
|
||||
use rusqlite::{params, Result, OptionalExtension};
|
||||
|
||||
impl Storage {
|
||||
pub fn create_workspace(&mut self, name: &str, description: Option<&str>) -> Result<Workspace> {
|
||||
let now = Self::now();
|
||||
let id = uuid::Uuid::new_v4().to_string();
|
||||
|
||||
self.conn.get_mut().unwrap().execute(
|
||||
"INSERT INTO workspaces (id, name, description, created_at, updated_at)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5)",
|
||||
params![id, name, description, now, now],
|
||||
)?;
|
||||
|
||||
Ok(Workspace {
|
||||
id,
|
||||
name: name.to_string(),
|
||||
description: description.map(String::from),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_workspaces(&mut self) -> Result<Vec<Workspace>> {
|
||||
let mut stmt = self.conn.get_mut().unwrap().prepare(
|
||||
"SELECT id, name, description, created_at, updated_at
|
||||
FROM workspaces
|
||||
ORDER BY created_at DESC"
|
||||
)?;
|
||||
|
||||
let workspace_iter = stmt.query_map([], |row| {
|
||||
Ok(Workspace {
|
||||
id: row.get(0)?,
|
||||
name: row.get(1)?,
|
||||
description: row.get(2)?,
|
||||
created_at: row.get(3)?,
|
||||
updated_at: row.get(4)?,
|
||||
})
|
||||
})?;
|
||||
|
||||
workspace_iter.collect()
|
||||
}
|
||||
|
||||
pub fn get_workspace(&mut self, id: &str) -> Result<Option<Workspace>> {
|
||||
let mut stmt = self.conn.get_mut().unwrap().prepare(
|
||||
"SELECT id, name, description, created_at, updated_at
|
||||
FROM workspaces
|
||||
WHERE id = ?"
|
||||
)?;
|
||||
|
||||
stmt.query_row(params![id], |row| {
|
||||
Ok(Workspace {
|
||||
id: row.get(0)?,
|
||||
name: row.get(1)?,
|
||||
description: row.get(2)?,
|
||||
created_at: row.get(3)?,
|
||||
updated_at: row.get(4)?,
|
||||
})
|
||||
})
|
||||
.optional()
|
||||
}
|
||||
|
||||
pub fn update_workspace(&mut self, id: &str, name: &str, description: Option<&str>) -> Result<Workspace> {
|
||||
let now = Self::now();
|
||||
|
||||
self.conn.get_mut().unwrap().execute(
|
||||
"UPDATE workspaces
|
||||
SET name = ?1, description = ?2, updated_at = ?3
|
||||
WHERE id = ?4",
|
||||
params![name, description, now, id],
|
||||
)?;
|
||||
|
||||
Ok(Workspace {
|
||||
id: id.to_string(),
|
||||
name: name.to_string(),
|
||||
description: description.map(String::from),
|
||||
created_at: self.get_workspace(id)?.map(|w| w.created_at).unwrap_or(now),
|
||||
updated_at: now,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_workspace(&mut self, id: &str) -> Result<()> {
|
||||
// Start a transaction to ensure data consistency
|
||||
let tx = self.conn.get_mut().unwrap().transaction()?;
|
||||
|
||||
// Delete variables
|
||||
tx.execute(
|
||||
"DELETE FROM variables WHERE workspace_id = ?",
|
||||
params![id],
|
||||
)?;
|
||||
|
||||
// Delete requests from collections in this workspace
|
||||
tx.execute(
|
||||
"DELETE FROM requests
|
||||
WHERE collection_id IN (
|
||||
SELECT id FROM collections WHERE workspace_id = ?
|
||||
)",
|
||||
params![id],
|
||||
)?;
|
||||
|
||||
// Delete collections
|
||||
tx.execute(
|
||||
"DELETE FROM collections WHERE workspace_id = ?",
|
||||
params![id],
|
||||
)?;
|
||||
|
||||
// Finally delete the workspace
|
||||
tx.execute("DELETE FROM workspaces WHERE id = ?", params![id])?;
|
||||
|
||||
// Commit the transaction
|
||||
tx.commit()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -12,7 +12,7 @@
|
||||
"app": {
|
||||
"windows": [
|
||||
{
|
||||
"title": "Resona",
|
||||
"title": "resona",
|
||||
"width": 800,
|
||||
"height": 600
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user