added redb and moved workspaces to use db. using tags instead of environment for workspace

This commit is contained in:
xyroscar
2025-11-26 16:38:11 -08:00
parent ce75694ffb
commit 0d23ffcaec
16 changed files with 1430 additions and 228 deletions

118
src-tauri/Cargo.lock generated
View File

@@ -451,8 +451,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2"
dependencies = [
"iana-time-zone",
"js-sys",
"num-traits",
"serde",
"wasm-bindgen",
"windows-link 0.2.1",
]
@@ -679,13 +681,34 @@ dependencies = [
"crypto-common",
]
[[package]]
name = "directories"
version = "5.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35"
dependencies = [
"dirs-sys 0.4.1",
]
[[package]]
name = "dirs"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e"
dependencies = [
"dirs-sys",
"dirs-sys 0.5.0",
]
[[package]]
name = "dirs-sys"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
dependencies = [
"libc",
"option-ext",
"redox_users 0.4.6",
"windows-sys 0.48.0",
]
[[package]]
@@ -696,7 +719,7 @@ checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab"
dependencies = [
"libc",
"option-ext",
"redox_users",
"redox_users 0.5.2",
"windows-sys 0.61.2",
]
@@ -2858,6 +2881,15 @@ version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539"
[[package]]
name = "redb"
version = "2.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8eca1e9d98d5a7e9002d0013e18d5a9b000aee942eb134883a82f06ebffb6c01"
dependencies = [
"libc",
]
[[package]]
name = "redox_syscall"
version = "0.5.18"
@@ -2867,6 +2899,17 @@ dependencies = [
"bitflags 2.10.0",
]
[[package]]
name = "redox_users"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43"
dependencies = [
"getrandom 0.2.16",
"libredox",
"thiserror 1.0.69",
]
[[package]]
name = "redox_users"
version = "0.5.2"
@@ -2966,11 +3009,16 @@ dependencies = [
name = "resona"
version = "0.1.0"
dependencies = [
"chrono",
"directories",
"redb",
"serde",
"serde_json",
"tauri",
"tauri-build",
"tauri-plugin-opener",
"thiserror 2.0.17",
"uuid",
]
[[package]]
@@ -4688,6 +4736,15 @@ dependencies = [
"windows-targets 0.42.2",
]
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets 0.48.5",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
@@ -4730,6 +4787,21 @@ dependencies = [
"windows_x86_64_msvc 0.42.2",
]
[[package]]
name = "windows-targets"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [
"windows_aarch64_gnullvm 0.48.5",
"windows_aarch64_msvc 0.48.5",
"windows_i686_gnu 0.48.5",
"windows_i686_msvc 0.48.5",
"windows_x86_64_gnu 0.48.5",
"windows_x86_64_gnullvm 0.48.5",
"windows_x86_64_msvc 0.48.5",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
@@ -4787,6 +4859,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
@@ -4805,6 +4883,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
@@ -4823,6 +4907,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
[[package]]
name = "windows_i686_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
@@ -4853,6 +4943,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
[[package]]
name = "windows_i686_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
@@ -4871,6 +4967,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
@@ -4889,6 +4991,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
@@ -4907,6 +5015,12 @@ version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"

View File

@@ -23,3 +23,12 @@ tauri-plugin-opener = "2"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
# Database
redb = "2"
# Utilities
uuid = { version = "1", features = ["v4", "serde"] }
thiserror = "2"
chrono = { version = "0.4", features = ["serde"] }
directories = "5"

View File

@@ -0,0 +1,130 @@
//! Database initialization and management
use std::path::PathBuf;
use std::sync::Arc;
use directories::ProjectDirs;
use redb::Database as RedbDatabase;
use super::error::{DbError, DbResult};
use super::tables::*;
/// Main database wrapper
pub struct Database {
db: Arc<RedbDatabase>,
}
impl Database {
/// Create or open the database at the default application data directory
pub fn open() -> DbResult<Self> {
let path = Self::get_db_path()?;
// Ensure parent directory exists
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
let db = RedbDatabase::create(&path)?;
let database = Self { db: Arc::new(db) };
// Initialize tables
database.init_tables()?;
Ok(database)
}
/// Open database at a specific path (useful for testing)
#[allow(dead_code)]
pub fn open_at(path: PathBuf) -> DbResult<Self> {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
let db = RedbDatabase::create(&path)?;
let database = Self { db: Arc::new(db) };
database.init_tables()?;
Ok(database)
}
/// Get the default database path
fn get_db_path() -> DbResult<PathBuf> {
let proj_dirs = ProjectDirs::from("com", "xyroscar", "resona")
.ok_or_else(|| DbError::Io(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Could not determine application data directory",
)))?;
Ok(proj_dirs.data_dir().join("resona.redb"))
}
/// Initialize all tables
fn init_tables(&self) -> DbResult<()> {
let write_txn = self.db.begin_write()?;
// Create main tables
write_txn.open_table(WORKSPACES)?;
write_txn.open_table(WORKSPACE_SYNC_GROUPS)?;
write_txn.open_table(COLLECTIONS)?;
write_txn.open_table(REQUESTS)?;
write_txn.open_table(VARIABLES)?;
write_txn.open_table(APP_SETTINGS)?;
// Create index tables
write_txn.open_table(COLLECTIONS_BY_WORKSPACE)?;
write_txn.open_table(REQUESTS_BY_COLLECTION)?;
write_txn.open_table(REQUESTS_BY_WORKSPACE)?;
write_txn.open_table(VARIABLES_BY_SCOPE)?;
write_txn.open_table(WORKSPACES_BY_SYNC_GROUP)?;
write_txn.commit()?;
Ok(())
}
/// Get a reference to the underlying redb database
#[allow(dead_code)]
pub fn inner(&self) -> &RedbDatabase {
&self.db
}
/// Begin a read transaction
pub fn begin_read(&self) -> DbResult<redb::ReadTransaction> {
Ok(self.db.begin_read()?)
}
/// Begin a write transaction
pub fn begin_write(&self) -> DbResult<redb::WriteTransaction> {
Ok(self.db.begin_write()?)
}
}
impl Clone for Database {
fn clone(&self) -> Self {
Self {
db: Arc::clone(&self.db),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::env::temp_dir;
#[test]
fn test_database_creation() {
let path = temp_dir().join("resona_test.redb");
let _ = std::fs::remove_file(&path); // Clean up any previous test
let db = Database::open_at(path.clone()).expect("Failed to create database");
// Verify tables exist by attempting to read from them
let read_txn = db.begin_read().expect("Failed to begin read transaction");
let _ = read_txn.open_table(WORKSPACES).expect("Workspaces table should exist");
// Clean up
drop(db);
let _ = std::fs::remove_file(&path);
}
}

39
src-tauri/src/db/error.rs Normal file
View File

@@ -0,0 +1,39 @@
//! Database error types
use thiserror::Error;
#[derive(Error, Debug)]
pub enum DbError {
#[error("Database error: {0}")]
Database(#[from] redb::DatabaseError),
#[error("Storage error: {0}")]
Storage(#[from] redb::StorageError),
#[error("Table error: {0}")]
Table(#[from] redb::TableError),
#[error("Transaction error: {0}")]
Transaction(#[from] redb::TransactionError),
#[error("Commit error: {0}")]
Commit(#[from] redb::CommitError),
#[error("Not found: {0}")]
NotFound(String),
#[error("Serialization error: {0}")]
Serialization(String),
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
}
pub type DbResult<T> = Result<T, DbError>;
// Implement conversion to tauri::Error for command returns
impl From<DbError> for String {
fn from(err: DbError) -> Self {
err.to_string()
}
}

11
src-tauri/src/db/mod.rs Normal file
View File

@@ -0,0 +1,11 @@
//! Database module for Resona
//!
//! This module handles all database operations using redb as the storage backend.
mod database;
mod error;
mod tables;
pub use database::Database;
pub use error::{DbError, DbResult};
pub use tables::*;

View File

@@ -0,0 +1,47 @@
//! Table definitions for redb
//!
//! All tables are defined here as constants for consistent access across the application.
use redb::TableDefinition;
/// Workspaces table: workspace_id -> workspace JSON
pub const WORKSPACES: TableDefinition<&str, &str> = TableDefinition::new("workspaces");
/// Workspace sync groups table: sync_group_id -> sync_group JSON
pub const WORKSPACE_SYNC_GROUPS: TableDefinition<&str, &str> =
TableDefinition::new("workspace_sync_groups");
/// Collections table: collection_id -> collection JSON
pub const COLLECTIONS: TableDefinition<&str, &str> = TableDefinition::new("collections");
/// Requests table: request_id -> request JSON
pub const REQUESTS: TableDefinition<&str, &str> = TableDefinition::new("requests");
/// Variables table: variable_id -> variable JSON
pub const VARIABLES: TableDefinition<&str, &str> = TableDefinition::new("variables");
/// App settings table: "settings" -> settings JSON (single row)
pub const APP_SETTINGS: TableDefinition<&str, &str> = TableDefinition::new("app_settings");
// Index tables for efficient lookups
/// Collections by workspace index: workspace_id -> collection_ids JSON array
pub const COLLECTIONS_BY_WORKSPACE: TableDefinition<&str, &str> =
TableDefinition::new("idx_collections_by_workspace");
/// Requests by collection index: collection_id -> request_ids JSON array
pub const REQUESTS_BY_COLLECTION: TableDefinition<&str, &str> =
TableDefinition::new("idx_requests_by_collection");
/// Requests by workspace (standalone) index: workspace_id -> request_ids JSON array
pub const REQUESTS_BY_WORKSPACE: TableDefinition<&str, &str> =
TableDefinition::new("idx_requests_by_workspace");
/// Variables by scope index: scope_key -> variable_ids JSON array
/// scope_key format: "global", "workspace:{id}", "collection:{id}", "request:{id}"
pub const VARIABLES_BY_SCOPE: TableDefinition<&str, &str> =
TableDefinition::new("idx_variables_by_scope");
/// Workspaces by sync group index: sync_group_id -> workspace_ids JSON array
pub const WORKSPACES_BY_SYNC_GROUP: TableDefinition<&str, &str> =
TableDefinition::new("idx_workspaces_by_sync_group");

View File

@@ -1,14 +1,44 @@
// Learn more about Tauri commands at https://tauri.app/develop/calling-rust/
#[tauri::command]
fn greet(name: &str) -> String {
format!("Hello, {}! You've been greeted from Rust!", name)
}
// Resona - API Client Application
mod db;
mod workspaces;
use db::Database;
// Re-export workspace commands for generate_handler macro
use workspaces::{
add_workspace_to_sync_group, create_sync_group, create_workspace, delete_sync_group,
delete_workspace, get_sync_group, get_sync_group_for_workspace, get_sync_groups,
get_workspace, get_workspaces, get_workspaces_by_sync_group, remove_workspace_from_sync_group,
update_sync_group, update_workspace,
};
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
// Initializing the database
let db = Database::open().expect("Failed to initialize database");
tauri::Builder::default()
.plugin(tauri_plugin_opener::init())
.invoke_handler(tauri::generate_handler![greet])
.manage(db)
.invoke_handler(tauri::generate_handler![
// Workspace commands
get_workspaces,
get_workspace,
create_workspace,
update_workspace,
delete_workspace,
// Sync group commands
get_sync_groups,
get_sync_group,
get_sync_group_for_workspace,
create_sync_group,
update_sync_group,
delete_sync_group,
get_workspaces_by_sync_group,
add_workspace_to_sync_group,
remove_workspace_from_sync_group,
])
.run(tauri::generate_context!())
.expect("error while running tauri application");
}

View File

@@ -0,0 +1,143 @@
//! Tauri commands for operations on workspace
use tauri::State;
use crate::db::Database;
use super::types::{
CreateSyncGroupInput, CreateWorkspaceInput, UpdateSyncGroupInput, UpdateWorkspaceInput,
Workspace, WorkspaceSyncGroup,
};
use super::workspace::WorkspaceService;
/// Get all workspaces
#[tauri::command]
pub fn get_workspaces(db: State<Database>) -> Result<Vec<Workspace>, String> {
let service = WorkspaceService::new(db.inner().clone());
service.get_all().map_err(|e| e.to_string())
}
/// Get a workspace by ID
#[tauri::command]
pub fn get_workspace(db: State<Database>, id: String) -> Result<Workspace, String> {
let service = WorkspaceService::new(db.inner().clone());
service.get(&id).map_err(|e| e.to_string())
}
/// Create a new workspace
#[tauri::command]
pub fn create_workspace(
db: State<Database>,
input: CreateWorkspaceInput,
) -> Result<Workspace, String> {
let service = WorkspaceService::new(db.inner().clone());
service.create(input).map_err(|e| e.to_string())
}
/// Update an existing workspace
#[tauri::command]
pub fn update_workspace(
db: State<Database>,
input: UpdateWorkspaceInput,
) -> Result<Workspace, String> {
let service = WorkspaceService::new(db.inner().clone());
service.update(input).map_err(|e| e.to_string())
}
/// Delete a workspace
#[tauri::command]
pub fn delete_workspace(db: State<Database>, id: String) -> Result<(), String> {
let service = WorkspaceService::new(db.inner().clone());
service.delete(&id).map_err(|e| e.to_string())
}
/// Get all sync groups
#[tauri::command]
pub fn get_sync_groups(db: State<Database>) -> Result<Vec<WorkspaceSyncGroup>, String> {
let service = WorkspaceService::new(db.inner().clone());
service.get_all_sync_groups().map_err(|e| e.to_string())
}
/// Get a sync group by ID
#[tauri::command]
pub fn get_sync_group(db: State<Database>, id: String) -> Result<WorkspaceSyncGroup, String> {
let service = WorkspaceService::new(db.inner().clone());
service.get_sync_group(&id).map_err(|e| e.to_string())
}
/// Get sync group for a workspace
#[tauri::command]
pub fn get_sync_group_for_workspace(
db: State<Database>,
workspace_id: String,
) -> Result<Option<WorkspaceSyncGroup>, String> {
let service = WorkspaceService::new(db.inner().clone());
service
.get_sync_group_for_workspace(&workspace_id)
.map_err(|e| e.to_string())
}
/// Create a new sync group
#[tauri::command]
pub fn create_sync_group(
db: State<Database>,
input: CreateSyncGroupInput,
) -> Result<WorkspaceSyncGroup, String> {
let service = WorkspaceService::new(db.inner().clone());
service.create_sync_group(input).map_err(|e| e.to_string())
}
/// Update an existing sync group
#[tauri::command]
pub fn update_sync_group(
db: State<Database>,
input: UpdateSyncGroupInput,
) -> Result<WorkspaceSyncGroup, String> {
let service = WorkspaceService::new(db.inner().clone());
service.update_sync_group(input).map_err(|e| e.to_string())
}
/// Delete a sync group
#[tauri::command]
pub fn delete_sync_group(db: State<Database>, id: String) -> Result<(), String> {
let service = WorkspaceService::new(db.inner().clone());
service.delete_sync_group(&id).map_err(|e| e.to_string())
}
/// Get workspaces by sync group
#[tauri::command]
pub fn get_workspaces_by_sync_group(
db: State<Database>,
sync_group_id: String,
) -> Result<Vec<Workspace>, String> {
let service = WorkspaceService::new(db.inner().clone());
service
.get_workspaces_by_sync_group(&sync_group_id)
.map_err(|e| e.to_string())
}
/// Add a workspace to a sync group
#[tauri::command]
pub fn add_workspace_to_sync_group(
db: State<Database>,
sync_group_id: String,
workspace_id: String,
) -> Result<(), String> {
let service = WorkspaceService::new(db.inner().clone());
service
.add_workspace_to_sync_group(&sync_group_id, &workspace_id)
.map_err(|e| e.to_string())
}
/// Remove a workspace from a sync group
#[tauri::command]
pub fn remove_workspace_from_sync_group(
db: State<Database>,
sync_group_id: String,
workspace_id: String,
) -> Result<(), String> {
let service = WorkspaceService::new(db.inner().clone());
service
.remove_workspace_from_sync_group(&sync_group_id, &workspace_id)
.map_err(|e| e.to_string())
}

View File

@@ -0,0 +1,21 @@
//! Workspaces module
//!
//! Handles workspace management including CRUD operations and sync groups.
mod commands;
mod types;
mod workspace;
// Re-export commands for use in lib.rs
pub use commands::*;
// Re-export types for external use (frontend bindings)
#[allow(unused_imports)]
pub use types::{
CreateSyncGroupInput, CreateWorkspaceInput, UpdateSyncGroupInput, UpdateWorkspaceInput,
Workspace, WorkspaceSyncGroup,
};
// WorkspaceService is used internally by commands
#[allow(unused_imports)]
pub(crate) use workspace::WorkspaceService;

View File

@@ -0,0 +1,97 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct Workspace {
pub id: String,
pub name: String,
pub description: String,
#[serde(default)]
pub tags: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub sync_group_id: Option<String>,
#[serde(default = "Utc::now")]
pub created_at: DateTime<Utc>,
#[serde(default = "Utc::now")]
pub updated_at: DateTime<Utc>,
}
impl Workspace {
pub fn new(name: String, description: String) -> Self {
let now = Utc::now();
Self {
id: Uuid::new_v4().to_string(),
name,
description,
tags: Vec::new(),
sync_group_id: None,
created_at: now,
updated_at: now,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CreateWorkspaceInput {
pub name: String,
pub description: String,
#[serde(default)]
pub tags: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateWorkspaceInput {
pub id: String,
pub name: Option<String>,
pub description: Option<String>,
pub tags: Option<Vec<String>>,
pub sync_group_id: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WorkspaceSyncGroup {
pub id: String,
pub name: String,
pub workspace_ids: Vec<String>,
pub synced_variable_names: Vec<String>,
pub sync_secrets: bool,
#[serde(default = "Utc::now")]
pub created_at: DateTime<Utc>,
#[serde(default = "Utc::now")]
pub updated_at: DateTime<Utc>,
}
impl WorkspaceSyncGroup {
pub fn new(name: String, workspace_ids: Vec<String>) -> Self {
let now = Utc::now();
Self {
id: Uuid::new_v4().to_string(),
name,
workspace_ids,
synced_variable_names: Vec::new(),
sync_secrets: false,
created_at: now,
updated_at: now,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CreateSyncGroupInput {
pub name: String,
pub workspace_ids: Vec<String>,
#[serde(default)]
pub synced_variable_names: Vec<String>,
#[serde(default)]
pub sync_secrets: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateSyncGroupInput {
pub id: String,
pub name: Option<String>,
pub synced_variable_names: Option<Vec<String>>,
pub sync_secrets: Option<bool>,
}

View File

@@ -0,0 +1,572 @@
use chrono::Utc;
use redb::ReadableTable;
use crate::db::{
Database, DbError, DbResult, WORKSPACES, WORKSPACE_SYNC_GROUPS, WORKSPACES_BY_SYNC_GROUP,
};
use super::types::{
CreateSyncGroupInput, CreateWorkspaceInput, UpdateSyncGroupInput, UpdateWorkspaceInput,
Workspace, WorkspaceSyncGroup,
};
pub struct WorkspaceService {
db: Database,
}
impl WorkspaceService {
pub fn new(db: Database) -> Self {
Self { db }
}
pub fn get_all(&self) -> DbResult<Vec<Workspace>> {
let read_txn = self.db.begin_read()?;
let table = read_txn.open_table(WORKSPACES)?;
let mut workspaces = Vec::new();
for entry in table.iter()? {
let (_, value) = entry?;
let workspace: Workspace = serde_json::from_str(value.value())
.map_err(|e| DbError::Serialization(e.to_string()))?;
workspaces.push(workspace);
}
workspaces.sort_by(|a, b| a.name.cmp(&b.name));
Ok(workspaces)
}
pub fn get(&self, id: &str) -> DbResult<Workspace> {
let read_txn = self.db.begin_read()?;
let table = read_txn.open_table(WORKSPACES)?;
let value = table
.get(id)?
.ok_or_else(|| DbError::NotFound(format!("Workspace not found: {}", id)))?;
let workspace: Workspace = serde_json::from_str(value.value())
.map_err(|e| DbError::Serialization(e.to_string()))?;
Ok(workspace)
}
pub fn create(&self, input: CreateWorkspaceInput) -> DbResult<Workspace> {
let mut workspace = Workspace::new(input.name, input.description);
workspace.tags = input.tags;
let json = serde_json::to_string(&workspace)
.map_err(|e| DbError::Serialization(e.to_string()))?;
let write_txn = self.db.begin_write()?;
{
let mut table = write_txn.open_table(WORKSPACES)?;
table.insert(workspace.id.as_str(), json.as_str())?;
}
write_txn.commit()?;
Ok(workspace)
}
pub fn update(&self, input: UpdateWorkspaceInput) -> DbResult<Workspace> {
let mut workspace = self.get(&input.id)?;
if let Some(name) = input.name {
workspace.name = name;
}
if let Some(description) = input.description {
workspace.description = description;
}
if let Some(tags) = input.tags {
workspace.tags = tags;
}
if let Some(sync_group_id) = input.sync_group_id {
workspace.sync_group_id = Some(sync_group_id);
}
workspace.updated_at = Utc::now();
// Write back
let json = serde_json::to_string(&workspace)
.map_err(|e| DbError::Serialization(e.to_string()))?;
let write_txn = self.db.begin_write()?;
{
let mut table = write_txn.open_table(WORKSPACES)?;
table.insert(workspace.id.as_str(), json.as_str())?;
}
write_txn.commit()?;
Ok(workspace)
}
/// Delete a workspace
pub fn delete(&self, id: &str) -> DbResult<()> {
// First get the workspace to check sync_group_id
let workspace = self.get(id)?;
let sync_group_id = workspace.sync_group_id.clone();
let write_txn = self.db.begin_write()?;
// Remove from workspaces table
{
let mut table = write_txn.open_table(WORKSPACES)?;
table.remove(id)?;
}
// Remove from sync group index if applicable
if let Some(group_id) = sync_group_id {
self.remove_from_sync_index(&write_txn, &group_id, id)?;
}
write_txn.commit()?;
Ok(())
}
/// Helper to remove a workspace ID from the sync group index
fn remove_from_sync_index(
&self,
write_txn: &redb::WriteTransaction,
group_id: &str,
workspace_id: &str,
) -> DbResult<()> {
let mut idx_table = write_txn.open_table(WORKSPACES_BY_SYNC_GROUP)?;
// Read current IDs
let ids_json = match idx_table.get(group_id)? {
Some(value) => value.value().to_string(),
None => return Ok(()),
};
let mut ids: Vec<String> = serde_json::from_str(&ids_json)
.map_err(|e| DbError::Serialization(e.to_string()))?;
ids.retain(|i| i != workspace_id);
let new_json = serde_json::to_string(&ids)
.map_err(|e| DbError::Serialization(e.to_string()))?;
idx_table.insert(group_id, new_json.as_str())?;
Ok(())
}
/// Helper to add a workspace ID to the sync group index
fn add_to_sync_index(
&self,
write_txn: &redb::WriteTransaction,
group_id: &str,
workspace_id: &str,
) -> DbResult<()> {
let mut idx_table = write_txn.open_table(WORKSPACES_BY_SYNC_GROUP)?;
// Read current IDs or start with empty
let ids_json = match idx_table.get(group_id)? {
Some(value) => value.value().to_string(),
None => "[]".to_string(),
};
let mut ids: Vec<String> = serde_json::from_str(&ids_json)
.map_err(|e| DbError::Serialization(e.to_string()))?;
if !ids.contains(&workspace_id.to_string()) {
ids.push(workspace_id.to_string());
}
let new_json = serde_json::to_string(&ids)
.map_err(|e| DbError::Serialization(e.to_string()))?;
idx_table.insert(group_id, new_json.as_str())?;
Ok(())
}
// ==================== Sync Group Operations ====================
/// Get all sync groups
pub fn get_all_sync_groups(&self) -> DbResult<Vec<WorkspaceSyncGroup>> {
let read_txn = self.db.begin_read()?;
let table = read_txn.open_table(WORKSPACE_SYNC_GROUPS)?;
let mut groups = Vec::new();
for entry in table.iter()? {
let (_, value) = entry?;
let group: WorkspaceSyncGroup = serde_json::from_str(value.value())
.map_err(|e| DbError::Serialization(e.to_string()))?;
groups.push(group);
}
Ok(groups)
}
/// Get a sync group by ID
pub fn get_sync_group(&self, id: &str) -> DbResult<WorkspaceSyncGroup> {
let read_txn = self.db.begin_read()?;
let table = read_txn.open_table(WORKSPACE_SYNC_GROUPS)?;
let value = table
.get(id)?
.ok_or_else(|| DbError::NotFound(format!("Sync group not found: {}", id)))?;
let group: WorkspaceSyncGroup = serde_json::from_str(value.value())
.map_err(|e| DbError::Serialization(e.to_string()))?;
Ok(group)
}
/// Get sync group for a workspace
pub fn get_sync_group_for_workspace(
&self,
workspace_id: &str,
) -> DbResult<Option<WorkspaceSyncGroup>> {
let workspace = self.get(workspace_id)?;
match workspace.sync_group_id {
Some(group_id) => Ok(Some(self.get_sync_group(&group_id)?)),
None => Ok(None),
}
}
/// Create a new sync group
pub fn create_sync_group(&self, input: CreateSyncGroupInput) -> DbResult<WorkspaceSyncGroup> {
let mut group = WorkspaceSyncGroup::new(input.name, input.workspace_ids.clone());
group.synced_variable_names = input.synced_variable_names;
group.sync_secrets = input.sync_secrets;
let json = serde_json::to_string(&group)
.map_err(|e| DbError::Serialization(e.to_string()))?;
let write_txn = self.db.begin_write()?;
// Insert sync group
{
let mut table = write_txn.open_table(WORKSPACE_SYNC_GROUPS)?;
table.insert(group.id.as_str(), json.as_str())?;
}
// Update index
{
let mut idx_table = write_txn.open_table(WORKSPACES_BY_SYNC_GROUP)?;
let ids_json = serde_json::to_string(&input.workspace_ids)
.map_err(|e| DbError::Serialization(e.to_string()))?;
idx_table.insert(group.id.as_str(), ids_json.as_str())?;
}
// Update each workspace's sync_group_id
{
let mut ws_table = write_txn.open_table(WORKSPACES)?;
for ws_id in &input.workspace_ids {
// Read workspace
let ws_json = match ws_table.get(ws_id.as_str())? {
Some(value) => value.value().to_string(),
None => continue,
};
let mut workspace: Workspace = serde_json::from_str(&ws_json)
.map_err(|e| DbError::Serialization(e.to_string()))?;
workspace.sync_group_id = Some(group.id.clone());
workspace.updated_at = Utc::now();
let new_ws_json = serde_json::to_string(&workspace)
.map_err(|e| DbError::Serialization(e.to_string()))?;
ws_table.insert(ws_id.as_str(), new_ws_json.as_str())?;
}
}
write_txn.commit()?;
Ok(group)
}
/// Update a sync group
pub fn update_sync_group(&self, input: UpdateSyncGroupInput) -> DbResult<WorkspaceSyncGroup> {
// Read existing
let mut group = self.get_sync_group(&input.id)?;
// Apply updates
if let Some(name) = input.name {
group.name = name;
}
if let Some(synced_variable_names) = input.synced_variable_names {
group.synced_variable_names = synced_variable_names;
}
if let Some(sync_secrets) = input.sync_secrets {
group.sync_secrets = sync_secrets;
}
group.updated_at = Utc::now();
// Write back
let json = serde_json::to_string(&group)
.map_err(|e| DbError::Serialization(e.to_string()))?;
let write_txn = self.db.begin_write()?;
{
let mut table = write_txn.open_table(WORKSPACE_SYNC_GROUPS)?;
table.insert(group.id.as_str(), json.as_str())?;
}
write_txn.commit()?;
Ok(group)
}
/// Delete a sync group
pub fn delete_sync_group(&self, id: &str) -> DbResult<()> {
// Get the sync group to find associated workspaces
let group = self.get_sync_group(id)?;
let workspace_ids = group.workspace_ids.clone();
let write_txn = self.db.begin_write()?;
// Remove sync_group_id from all associated workspaces
{
let mut ws_table = write_txn.open_table(WORKSPACES)?;
for ws_id in &workspace_ids {
let ws_json = match ws_table.get(ws_id.as_str())? {
Some(value) => value.value().to_string(),
None => continue,
};
let mut workspace: Workspace = serde_json::from_str(&ws_json)
.map_err(|e| DbError::Serialization(e.to_string()))?;
workspace.sync_group_id = None;
workspace.updated_at = Utc::now();
let new_ws_json = serde_json::to_string(&workspace)
.map_err(|e| DbError::Serialization(e.to_string()))?;
ws_table.insert(ws_id.as_str(), new_ws_json.as_str())?;
}
}
// Remove from sync groups table
{
let mut table = write_txn.open_table(WORKSPACE_SYNC_GROUPS)?;
table.remove(id)?;
}
// Remove from index
{
let mut idx_table = write_txn.open_table(WORKSPACES_BY_SYNC_GROUP)?;
idx_table.remove(id)?;
}
write_txn.commit()?;
Ok(())
}
/// Get workspaces by sync group
pub fn get_workspaces_by_sync_group(&self, sync_group_id: &str) -> DbResult<Vec<Workspace>> {
let read_txn = self.db.begin_read()?;
let idx_table = read_txn.open_table(WORKSPACES_BY_SYNC_GROUP)?;
let workspace_ids: Vec<String> = match idx_table.get(sync_group_id)? {
Some(value) => serde_json::from_str(value.value())
.map_err(|e| DbError::Serialization(e.to_string()))?,
None => return Ok(Vec::new()),
};
drop(idx_table);
drop(read_txn);
let mut workspaces = Vec::new();
for ws_id in workspace_ids {
if let Ok(workspace) = self.get(&ws_id) {
workspaces.push(workspace);
}
}
Ok(workspaces)
}
/// Add a workspace to a sync group
pub fn add_workspace_to_sync_group(
&self,
sync_group_id: &str,
workspace_id: &str,
) -> DbResult<()> {
// Read existing data
let mut group = self.get_sync_group(sync_group_id)?;
let mut workspace = self.get(workspace_id)?;
// Update in memory
if !group.workspace_ids.contains(&workspace_id.to_string()) {
group.workspace_ids.push(workspace_id.to_string());
group.updated_at = Utc::now();
}
workspace.sync_group_id = Some(sync_group_id.to_string());
workspace.updated_at = Utc::now();
// Serialize
let group_json = serde_json::to_string(&group)
.map_err(|e| DbError::Serialization(e.to_string()))?;
let ws_json = serde_json::to_string(&workspace)
.map_err(|e| DbError::Serialization(e.to_string()))?;
// Write all changes
let write_txn = self.db.begin_write()?;
{
let mut table = write_txn.open_table(WORKSPACE_SYNC_GROUPS)?;
table.insert(sync_group_id, group_json.as_str())?;
}
self.add_to_sync_index(&write_txn, sync_group_id, workspace_id)?;
{
let mut ws_table = write_txn.open_table(WORKSPACES)?;
ws_table.insert(workspace_id, ws_json.as_str())?;
}
write_txn.commit()?;
Ok(())
}
/// Remove a workspace from a sync group
pub fn remove_workspace_from_sync_group(
&self,
sync_group_id: &str,
workspace_id: &str,
) -> DbResult<()> {
// Read existing data
let mut group = self.get_sync_group(sync_group_id)?;
let mut workspace = self.get(workspace_id)?;
// Update in memory
group.workspace_ids.retain(|id| id != workspace_id);
group.updated_at = Utc::now();
workspace.sync_group_id = None;
workspace.updated_at = Utc::now();
// Serialize
let group_json = serde_json::to_string(&group)
.map_err(|e| DbError::Serialization(e.to_string()))?;
let ws_json = serde_json::to_string(&workspace)
.map_err(|e| DbError::Serialization(e.to_string()))?;
// Write all changes
let write_txn = self.db.begin_write()?;
{
let mut table = write_txn.open_table(WORKSPACE_SYNC_GROUPS)?;
table.insert(sync_group_id, group_json.as_str())?;
}
self.remove_from_sync_index(&write_txn, sync_group_id, workspace_id)?;
{
let mut ws_table = write_txn.open_table(WORKSPACES)?;
ws_table.insert(workspace_id, ws_json.as_str())?;
}
write_txn.commit()?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::env::temp_dir;
fn create_test_db() -> Database {
let path = temp_dir().join(format!("resona_test_{}.redb", uuid::Uuid::new_v4()));
Database::open_at(path).expect("Failed to create test database")
}
#[test]
fn test_workspace_crud() {
let db = create_test_db();
let service = WorkspaceService::new(db);
let workspace = service
.create(CreateWorkspaceInput {
name: "Test Workspace".to_string(),
description: "A test workspace".to_string(),
tags: vec!["Development".to_string()],
})
.expect("Failed to create workspace");
assert_eq!(workspace.name, "Test Workspace");
assert_eq!(workspace.tags, vec!["Development".to_string()]);
let fetched = service.get(&workspace.id).expect("Failed to get workspace");
assert_eq!(fetched.id, workspace.id);
let updated = service
.update(UpdateWorkspaceInput {
id: workspace.id.clone(),
name: Some("Updated Workspace".to_string()),
description: None,
tags: None,
sync_group_id: None,
})
.expect("Failed to update workspace");
assert_eq!(updated.name, "Updated Workspace");
assert_eq!(updated.description, "A test workspace");
let all = service.get_all().expect("Failed to get all workspaces");
assert_eq!(all.len(), 1);
service
.delete(&workspace.id)
.expect("Failed to delete workspace");
let all = service.get_all().expect("Failed to get all workspaces");
assert_eq!(all.len(), 0);
}
#[test]
fn test_sync_groups() {
let db = create_test_db();
let service = WorkspaceService::new(db);
let ws1 = service
.create(CreateWorkspaceInput {
name: "Workspace 1".to_string(),
description: "First workspace".to_string(),
tags: vec!["Development".to_string()],
})
.expect("Failed to create workspace 1");
let ws2 = service
.create(CreateWorkspaceInput {
name: "Workspace 2".to_string(),
description: "Second workspace".to_string(),
tags: vec!["Production".to_string()],
})
.expect("Failed to create workspace 2");
// Create sync group
let group = service
.create_sync_group(CreateSyncGroupInput {
name: "Test Sync Group".to_string(),
workspace_ids: vec![ws1.id.clone(), ws2.id.clone()],
synced_variable_names: vec!["API_KEY".to_string()],
sync_secrets: false,
})
.expect("Failed to create sync group");
// Verify workspaces are linked
let ws1_updated = service.get(&ws1.id).expect("Failed to get workspace 1");
assert_eq!(ws1_updated.sync_group_id, Some(group.id.clone()));
// Get workspaces by sync group
let grouped = service
.get_workspaces_by_sync_group(&group.id)
.expect("Failed to get workspaces by sync group");
assert_eq!(grouped.len(), 2);
// Delete sync group
service
.delete_sync_group(&group.id)
.expect("Failed to delete sync group");
// Verify workspaces are unlinked
let ws1_final = service.get(&ws1.id).expect("Failed to get workspace 1");
assert_eq!(ws1_final.sync_group_id, None);
}
}

View File

@@ -32,7 +32,8 @@
let newName = $state("");
let newDescription = $state("");
let environment = $state("");
let tags = $state<string[]>([]);
let tagInput = $state("");
let copyVariables = $state(true);
let copySecrets = $state(false);
let createSyncGroup = $state(false);
@@ -46,7 +47,7 @@
if (open && sourceWorkspace) {
newName = `${sourceWorkspace.Name} (Copy)`;
newDescription = sourceWorkspace.Description;
environment = "";
tags = [...(sourceWorkspace.Tags ?? [])];
syncGroupName = `${sourceWorkspace.Name} Environments`;
loadVariables();
}
@@ -76,6 +77,18 @@
variablesToSync = [];
}
function addTag() {
const trimmed = tagInput.trim();
if (trimmed && !tags.includes(trimmed)) {
tags = [...tags, trimmed];
tagInput = "";
}
}
function removeTag(tag: string) {
tags = tags.filter((t) => t !== tag);
}
async function handleSubmit() {
if (!sourceWorkspace) return;
@@ -85,7 +98,7 @@
sourceWorkspaceId: sourceWorkspace.Id,
newName,
newDescription,
environment: environment || undefined,
tags,
copyVariables,
copySecrets,
createSyncGroup,
@@ -143,14 +156,44 @@
/>
</div>
<div class="grid grid-cols-4 items-center gap-4">
<Label for="environment" class="text-end">Environment</Label>
<Input
id="environment"
class="col-span-3"
placeholder="e.g., Development, Staging, Production"
bind:value={environment}
/>
<div class="grid grid-cols-4 items-start gap-4">
<Label for="tags" class="text-end pt-2">Tags</Label>
<div class="col-span-3 space-y-2">
<div class="flex gap-2">
<Input
id="tags"
placeholder="Add a tag..."
bind:value={tagInput}
onkeydown={(e) =>
e.key === "Enter" && (e.preventDefault(), addTag())}
/>
<Button
type="button"
variant="outline"
size="sm"
onclick={addTag}
>
Add
</Button>
</div>
{#if tags.length > 0}
<div class="flex flex-wrap gap-1">
{#each tags as tag}
<Badge variant="secondary" class="gap-1">
{tag}
<button
type="button"
class="ml-1 hover:text-destructive"
onclick={() => removeTag(tag)}
aria-label="Remove tag"
>
×
</button>
</Badge>
{/each}
</div>
{/if}
</div>
</div>
</div>

View File

@@ -1,7 +1,8 @@
import type { Workspace, WorkspaceSyncGroup } from "$lib/types/workspace";
import type { Collection } from "$lib/types/collection";
import type { Request } from "$lib/types/request";
import type { Variable } from "$lib/types/variable";
import type {
Workspace,
WorkspaceSyncGroup,
CreateWorkspaceInput,
} from "$lib/types/workspace";
import {
get_collections_by_workspace,
get_standalone_requests_by_workspace,
@@ -12,90 +13,17 @@ import {
get_workspace_variables,
create_variable,
update_variable,
get_variables_by_scope,
} from "./variables";
const syncGroups: Map<string, WorkspaceSyncGroup> = new Map();
export async function get_sync_groups(): Promise<WorkspaceSyncGroup[]> {
return [...syncGroups.values()];
}
export async function get_sync_group(
id: string
): Promise<WorkspaceSyncGroup | null> {
return syncGroups.get(id) ?? null;
}
export async function get_sync_group_for_workspace(
workspaceId: string
): Promise<WorkspaceSyncGroup | null> {
for (const group of syncGroups.values()) {
if (group.workspaceIds.includes(workspaceId)) {
return group;
}
}
return null;
}
export async function create_sync_group(
name: string,
workspaceIds: string[],
syncedVariableNames: string[] = [],
syncSecrets: boolean = false
): Promise<WorkspaceSyncGroup> {
const group: WorkspaceSyncGroup = {
id: crypto.randomUUID(),
name,
workspaceIds,
syncedVariableNames,
syncSecrets,
};
syncGroups.set(group.id, group);
return group;
}
export async function update_sync_group(
id: string,
updates: Partial<Omit<WorkspaceSyncGroup, "id">>
): Promise<boolean> {
const group = syncGroups.get(id);
if (!group) return false;
Object.assign(group, updates);
return true;
}
export async function delete_sync_group(id: string): Promise<boolean> {
return syncGroups.delete(id);
}
export async function add_workspace_to_sync_group(
groupId: string,
workspaceId: string
): Promise<boolean> {
const group = syncGroups.get(groupId);
if (!group) return false;
if (!group.workspaceIds.includes(workspaceId)) {
group.workspaceIds.push(workspaceId);
}
return true;
}
export async function remove_workspace_from_sync_group(
groupId: string,
workspaceId: string
): Promise<boolean> {
const group = syncGroups.get(groupId);
if (!group) return false;
group.workspaceIds = group.workspaceIds.filter((id) => id !== workspaceId);
return true;
}
import {
create_sync_group as createSyncGroupBackend,
get_sync_group as getSyncGroupBackend,
} from "./workspaces";
export type DuplicateWorkspaceOptions = {
sourceWorkspaceId: string;
newName: string;
newDescription: string;
environment?: string;
tags?: string[];
copyVariables: boolean;
copySecrets: boolean;
createSyncGroup: boolean;
@@ -110,13 +38,13 @@ export type DuplicateWorkspaceResult = {
export async function duplicate_workspace(
options: DuplicateWorkspaceOptions,
createWorkspaceFn: (workspace: Omit<Workspace, "Id">) => Promise<Workspace>
createWorkspaceFn: (input: CreateWorkspaceInput) => Promise<Workspace>
): Promise<DuplicateWorkspaceResult> {
const {
sourceWorkspaceId,
newName,
newDescription,
environment,
tags = [],
copyVariables,
copySecrets,
createSyncGroup: shouldCreateSyncGroup,
@@ -126,15 +54,13 @@ export async function duplicate_workspace(
// Create the new workspace
const newWorkspace = await createWorkspaceFn({
Name: newName,
Description: newDescription,
environment,
syncGroupId: null,
name: newName,
description: newDescription,
tags,
});
// Copy collections and requests
const collections = await get_collections_by_workspace(sourceWorkspaceId);
const collectionIdMap = new Map<string, string>();
for (const collection of collections) {
const newCollection = await create_collection({
@@ -142,9 +68,7 @@ export async function duplicate_workspace(
description: collection.description,
workspaceId: newWorkspace.Id,
});
collectionIdMap.set(collection.id, newCollection.id);
// Copy requests in collection
for (const request of collection.requests) {
await create_request({
name: request.name,
@@ -184,9 +108,7 @@ export async function duplicate_workspace(
if (copyVariables) {
const sourceVariables = await get_workspace_variables(sourceWorkspaceId);
for (const variable of sourceVariables) {
// Skip secrets if not copying them
if (variable.isSecret && !copySecrets) {
// Create variable with empty value for secrets
await create_variable({
name: variable.name,
value: "",
@@ -211,13 +133,12 @@ export async function duplicate_workspace(
// Create sync group if requested
let syncGroup: WorkspaceSyncGroup | undefined;
if (shouldCreateSyncGroup) {
syncGroup = await create_sync_group(
syncGroupName || `${newName} Sync Group`,
[sourceWorkspaceId, newWorkspace.Id],
variablesToSync,
copySecrets
);
newWorkspace.syncGroupId = syncGroup.id;
syncGroup = await createSyncGroupBackend({
name: syncGroupName || `${newName} Sync Group`,
workspace_ids: [sourceWorkspaceId, newWorkspace.Id],
synced_variable_names: variablesToSync,
sync_secrets: copySecrets,
});
}
return { workspace: newWorkspace, syncGroup };
@@ -227,43 +148,39 @@ export async function sync_variables_in_group(
groupId: string,
sourceWorkspaceId: string
): Promise<{ synced: number; skipped: number }> {
const group = syncGroups.get(groupId);
const group = await getSyncGroupBackend(groupId);
if (!group) return { synced: 0, skipped: 0 };
const sourceVariables = await get_workspace_variables(sourceWorkspaceId);
let synced = 0;
let skipped = 0;
for (const targetWorkspaceId of group.workspaceIds) {
for (const targetWorkspaceId of group.WorkspaceIds) {
if (targetWorkspaceId === sourceWorkspaceId) continue;
const targetVariables = await get_workspace_variables(targetWorkspaceId);
const targetVarMap = new Map(targetVariables.map((v) => [v.name, v]));
for (const sourceVar of sourceVariables) {
// Only sync variables that are in the sync list
if (!group.syncedVariableNames.includes(sourceVar.name)) {
if (!group.SyncedVariableNames.includes(sourceVar.name)) {
skipped++;
continue;
}
// Skip secrets if not syncing them
if (sourceVar.isSecret && !group.syncSecrets) {
if (sourceVar.isSecret && !group.SyncSecrets) {
skipped++;
continue;
}
const targetVar = targetVarMap.get(sourceVar.name);
if (targetVar) {
// Update existing variable
await update_variable(targetVar.id, { value: sourceVar.value });
synced++;
} else {
// Create new variable in target workspace
await create_variable({
name: sourceVar.name,
value:
sourceVar.isSecret && !group.syncSecrets ? "" : sourceVar.value,
sourceVar.isSecret && !group.SyncSecrets ? "" : sourceVar.value,
scope: "workspace",
scopeId: targetWorkspaceId,
isSecret: sourceVar.isSecret,

View File

@@ -1,96 +1,91 @@
import type { Workspace } from "$lib/types/workspace";
const ws: Map<string, Workspace> = new Map<string, Workspace>([
[
"1",
{
Id: "1",
Name: "Test 1",
Description: "This is a test description",
environment: "Development",
syncGroupId: null,
},
],
[
"2",
{
Id: "2",
Name: "Test 2",
Description: "This is a longer test description",
environment: "Production",
syncGroupId: null,
},
],
[
"3",
{
Id: "3",
Name: "Test 3",
Description: "This is a slightly longer test description",
syncGroupId: null,
},
],
[
"4",
{
Id: "4",
Name: "Test 4",
Description: "This is an even slightly longer test description",
syncGroupId: null,
},
],
[
"5",
{
Id: "5",
Name: "Test 5",
Description:
"This is a veryyyyyyyyyyyyyyyyyyyyyyyyyyy longggggggggggggggggggggggggggg test descriptionnnnnnnnnnnnnnnnnnnnnnnnnnnnnn",
syncGroupId: null,
},
],
]);
function convert_to_list(ws: Map<string, Workspace>): Workspace[] {
return [...ws.values()];
}
import { invoke } from "@tauri-apps/api/core";
import type {
Workspace,
CreateWorkspaceInput,
UpdateWorkspaceInput,
WorkspaceSyncGroup,
CreateSyncGroupInput,
UpdateSyncGroupInput,
} from "$lib/types/workspace";
export async function get_workspaces(): Promise<Workspace[]> {
return convert_to_list(ws);
return invoke<Workspace[]>("get_workspaces");
}
export async function get_workspace(id: string): Promise<Workspace | null> {
return ws.get(id) ?? null;
}
export async function update_workspace(workspace: Workspace): Promise<boolean> {
let w = ws.get(workspace.Id);
if (w != undefined) {
w.Name = workspace.Name;
w.Description = workspace.Description;
} else {
return false;
}
return true;
export async function get_workspace(id: string): Promise<Workspace> {
return invoke<Workspace>("get_workspace", { id });
}
export async function create_workspace(
workspace: Omit<Workspace, "Id">
input: CreateWorkspaceInput
): Promise<Workspace> {
const newWorkspace: Workspace = {
...workspace,
Id: crypto.randomUUID(),
};
ws.set(newWorkspace.Id, newWorkspace);
return newWorkspace;
return invoke<Workspace>("create_workspace", { input });
}
export async function delete_workspace(id: string): Promise<boolean> {
return ws.delete(id);
export async function update_workspace(
input: UpdateWorkspaceInput
): Promise<Workspace> {
return invoke<Workspace>("update_workspace", { input });
}
export async function delete_workspace(id: string): Promise<void> {
return invoke<void>("delete_workspace", { id });
}
export async function get_sync_groups(): Promise<WorkspaceSyncGroup[]> {
return invoke<WorkspaceSyncGroup[]>("get_sync_groups");
}
export async function get_sync_group(id: string): Promise<WorkspaceSyncGroup> {
return invoke<WorkspaceSyncGroup>("get_sync_group", { id });
}
export async function get_sync_group_for_workspace(
workspaceId: string
): Promise<WorkspaceSyncGroup | null> {
return invoke<WorkspaceSyncGroup | null>("get_sync_group_for_workspace", {
workspaceId,
});
}
export async function create_sync_group(
input: CreateSyncGroupInput
): Promise<WorkspaceSyncGroup> {
return invoke<WorkspaceSyncGroup>("create_sync_group", { input });
}
export async function update_sync_group(
input: UpdateSyncGroupInput
): Promise<WorkspaceSyncGroup> {
return invoke<WorkspaceSyncGroup>("update_sync_group", { input });
}
export async function delete_sync_group(id: string): Promise<void> {
return invoke<void>("delete_sync_group", { id });
}
export async function get_workspaces_by_sync_group(
syncGroupId: string
): Promise<Workspace[]> {
return [...ws.values()].filter((w) => w.syncGroupId === syncGroupId);
return invoke<Workspace[]>("get_workspaces_by_sync_group", { syncGroupId });
}
export async function add_workspace_to_sync_group(
syncGroupId: string,
workspaceId: string
): Promise<void> {
return invoke<void>("add_workspace_to_sync_group", {
syncGroupId,
workspaceId,
});
}
export async function remove_workspace_from_sync_group(
syncGroupId: string,
workspaceId: string
): Promise<void> {
return invoke<void>("remove_workspace_from_sync_group", {
syncGroupId,
workspaceId,
});
}

View File

@@ -2,16 +2,48 @@ export type Workspace = {
Id: string;
Name: string;
Description: string;
syncGroupId?: string | null;
environment?: string;
Tags: string[];
SyncGroupId?: string | null;
CreatedAt: string;
UpdatedAt: string;
};
export type CreateWorkspaceInput = {
name: string;
description: string;
tags?: string[];
};
export type UpdateWorkspaceInput = {
id: string;
name?: string;
description?: string;
tags?: string[];
sync_group_id?: string;
};
export type WorkspaceSyncGroup = {
id: string;
Id: string;
Name: string;
WorkspaceIds: string[];
SyncedVariableNames: string[];
SyncSecrets: boolean;
CreatedAt: string;
UpdatedAt: string;
};
export type CreateSyncGroupInput = {
name: string;
workspaceIds: string[];
syncedVariableNames: string[];
syncSecrets: boolean;
workspace_ids: string[];
synced_variable_names?: string[];
sync_secrets?: boolean;
};
export type UpdateSyncGroupInput = {
id: string;
name?: string;
synced_variable_names?: string[];
sync_secrets?: boolean;
};
export type AppSettings = {

View File

@@ -92,16 +92,16 @@
async function handleDialogSubmit() {
if (dialogMode === "create") {
await create_workspace({
Name: workspaceName,
Description: workspaceDescription,
name: workspaceName,
description: workspaceDescription,
tags: [],
});
} else if (selectedWorkspace != null) {
const w: Workspace = {
Id: selectedWorkspace.Id,
Name: workspaceName,
Description: workspaceDescription,
};
await update_workspace(w);
await update_workspace({
id: selectedWorkspace.Id,
name: workspaceName,
description: workspaceDescription,
});
}
await loadWorkspaces();
@@ -109,8 +109,8 @@
}
async function handleDuplicate(options: DuplicateWorkspaceOptions) {
await duplicate_workspace(options, async (ws) => {
return await create_workspace(ws);
await duplicate_workspace(options, async (input) => {
return await create_workspace(input);
});
await loadWorkspaces();
}
@@ -230,10 +230,12 @@
>
{workspace.Description}
</Card.Description>
{#if workspace.environment}
<Badge variant="secondary" class="w-fit mt-2"
>{workspace.environment}</Badge
>
{#if workspace.Tags && workspace.Tags.length > 0}
<div class="flex flex-wrap gap-1 mt-2">
{#each workspace.Tags as tag}
<Badge variant="secondary" class="text-xs">{tag}</Badge>
{/each}
</div>
{/if}
</Card.Header>
<Card.Footer class="flex items-center justify-center gap-2">