forked from trinitrix/core
feat(storage): added a central storage management system
This commit is contained in:
parent
6c3d250e4b
commit
fc74de33dc
File diff suppressed because it is too large
Load Diff
|
@ -44,6 +44,7 @@ rand = "0.8.5"
|
|||
serde = { version = "1.0.201", features = ["derive"] }
|
||||
rmp-serde = "1.3.0"
|
||||
strum = { version = "0.26.2", features = ["derive"] }
|
||||
sqlx = { version = "0.7.4", features = ["sqlite"] }
|
||||
|
||||
# config
|
||||
trinitry = { version = "0.1.0" }
|
||||
|
|
|
@ -23,12 +23,12 @@ pub mod command_interface;
|
|||
pub mod config;
|
||||
pub mod events;
|
||||
pub mod status;
|
||||
pub mod storage;
|
||||
|
||||
use std::{collections::HashMap, path::PathBuf, sync::OnceLock};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use cli_log::{debug, warn};
|
||||
use directories::ProjectDirs;
|
||||
use keymaps::trie::Node;
|
||||
use tokio::sync::mpsc::{self, Sender};
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
@ -37,6 +37,7 @@ use tokio_util::sync::CancellationToken;
|
|||
// lua_command_manager::LuaCommandManager,
|
||||
// };
|
||||
|
||||
use crate::app::storage::Storage;
|
||||
use crate::{
|
||||
app::{
|
||||
events::{Event, EventStatus},
|
||||
|
@ -58,7 +59,7 @@ pub struct App<U: TrinitrixUi> {
|
|||
input_listener_killer: CancellationToken,
|
||||
|
||||
// lua: LuaCommandManager,
|
||||
project_dirs: ProjectDirs,
|
||||
storage: Storage,
|
||||
|
||||
key_mappings: HashMap<State, Node<extern "C" fn()>>,
|
||||
}
|
||||
|
@ -89,10 +90,7 @@ impl<U: TrinitrixUi> App<U> {
|
|||
// lua: LuaCommandManager::new(tx),
|
||||
|
||||
// TODO: We probably want to populate the strings below a bit more <2023-09-09>
|
||||
project_dirs: ProjectDirs::from("", "", "trinitrix").context(
|
||||
"Failed to allocate project direcectory paths, \
|
||||
please ensure your $HOME is set correctly",
|
||||
)?,
|
||||
storage: Storage::new().await?,
|
||||
key_mappings: HashMap::new(),
|
||||
})
|
||||
}
|
||||
|
@ -112,9 +110,10 @@ impl<U: TrinitrixUi> App<U> {
|
|||
config::lua::load(self.tx.clone(), config_file).await?;
|
||||
warn!("Loading cli config file, will ignore the default locations");
|
||||
} else {
|
||||
let config_file = config::lua::check_for_config_file(self.project_dirs.config_dir())
|
||||
.await
|
||||
.context("Failed to check for the config file")?;
|
||||
let config_file =
|
||||
config::lua::check_for_config_file(self.storage.project_dirs().config_dir())
|
||||
.await
|
||||
.context("Failed to check for the config file")?;
|
||||
|
||||
if let Some(config) = config_file {
|
||||
config::lua::load(self.tx.clone(), config).await?;
|
||||
|
|
|
@ -0,0 +1,82 @@
|
|||
use anyhow::Result;
|
||||
use sqlx::{FromRow, SqlitePool};
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryInto;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, FromRow)]
|
||||
pub struct RawAccount {
|
||||
id: Vec<u8>,
|
||||
cbs: Vec<u8>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Account {
|
||||
id: Uuid,
|
||||
cbs: Uuid,
|
||||
}
|
||||
|
||||
impl From<Account> for RawAccount {
|
||||
fn from(value: Account) -> Self {
|
||||
Self {
|
||||
id: value.id.into_bytes().to_vec(),
|
||||
cbs: value.cbs.into_bytes().to_vec(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<RawAccount> for Account {
|
||||
type Error = anyhow::Error;
|
||||
fn try_from(value: RawAccount) -> std::result::Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
id: Uuid::from_slice(value.id.as_slice())?,
|
||||
cbs: Uuid::from_slice(value.cbs.as_slice())?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Accounts {
|
||||
pool: SqlitePool,
|
||||
accounts: HashMap<Uuid, Account>,
|
||||
}
|
||||
|
||||
impl Accounts {
|
||||
pub async fn new(pool: SqlitePool) -> Result<Self> {
|
||||
sqlx::query(
|
||||
r#"CREATE TABLE IF NOT EXIST Accounts (
|
||||
id BINARY(16) PRIMARY KEY,
|
||||
cbs BINARY(16)
|
||||
);"#,
|
||||
)
|
||||
.execute(&pool)
|
||||
.await?;
|
||||
|
||||
let raw_accounts: Vec<RawAccount> = sqlx::query_as(r#"SELECT * FROM Accounts;"#)
|
||||
.fetch_all(&pool)
|
||||
.await?;
|
||||
let mut accounts = HashMap::new();
|
||||
for account in raw_accounts {
|
||||
let account: Account = account.try_into()?;
|
||||
accounts.insert(account.id, account);
|
||||
}
|
||||
|
||||
Ok(Self { pool, accounts })
|
||||
}
|
||||
|
||||
pub async fn add(&mut self, id: Uuid, cbs: Uuid) -> Result<()> {
|
||||
let account = Account { id, cbs };
|
||||
|
||||
self.accounts.insert(id, account.clone());
|
||||
|
||||
let raw: RawAccount = account.into();
|
||||
|
||||
sqlx::query(r#"INSERT INTO Accounts VALUES ?, ?;"#)
|
||||
.bind(raw.id)
|
||||
.bind(raw.cbs)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
use anyhow::Result;
|
||||
use sqlx::{FromRow, SqlitePool};
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryInto;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, FromRow)]
|
||||
pub struct RawBackend {
|
||||
id: Vec<u8>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Backend {
|
||||
id: Uuid,
|
||||
}
|
||||
|
||||
impl From<Backend> for RawBackend {
|
||||
fn from(value: Backend) -> Self {
|
||||
Self {
|
||||
id: value.id.into_bytes().to_vec(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<RawBackend> for Backend {
|
||||
type Error = anyhow::Error;
|
||||
fn try_from(value: RawBackend) -> std::result::Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
id: Uuid::from_slice(value.id.as_slice())?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Backends {
|
||||
pool: SqlitePool,
|
||||
backends: HashMap<Uuid, Backend>,
|
||||
}
|
||||
|
||||
impl Backends {
|
||||
pub async fn new(pool: SqlitePool) -> Result<Self> {
|
||||
sqlx::query(
|
||||
r#"CREATE TABLE IF NOT EXIST Backends (
|
||||
id BINARY(16) PRIMARY KEY
|
||||
);"#,
|
||||
)
|
||||
.execute(&pool)
|
||||
.await?;
|
||||
|
||||
let raw_backends: Vec<RawBackend> = sqlx::query_as(r#"SELECT * FROM Backends;"#)
|
||||
.fetch_all(&pool)
|
||||
.await?;
|
||||
let mut backends = HashMap::new();
|
||||
for backend in raw_backends {
|
||||
let backend: Backend = backend.try_into()?;
|
||||
backends.insert(backend.id, backend);
|
||||
}
|
||||
|
||||
Ok(Self { pool, backends })
|
||||
}
|
||||
|
||||
pub async fn add(&mut self, id: Uuid) -> Result<()> {
|
||||
let backend = Backend { id };
|
||||
|
||||
self.backends.insert(id, backend.clone());
|
||||
|
||||
let raw: RawBackend = backend.into();
|
||||
|
||||
sqlx::query(r#"INSERT INTO Backends VALUES ?;"#)
|
||||
.bind(raw.id)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
mod accounts;
|
||||
mod backends;
|
||||
|
||||
pub use accounts::{Account, Accounts};
|
||||
pub use backends::{Backend, Backends};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use directories::ProjectDirs;
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
const DATABASE_NAME: &str = "storage.sqlite";
|
||||
const PROJECT_DIRS: (&str, &str, &str) = ("", "", "trinitrix");
|
||||
|
||||
pub struct Storage {
|
||||
project_dirs: ProjectDirs,
|
||||
accounts: Accounts,
|
||||
backends: Backends,
|
||||
}
|
||||
|
||||
impl Storage {
|
||||
pub async fn new() -> Result<Self> {
|
||||
let project_dirs = ProjectDirs::from("", "", "trinitrix").context(
|
||||
"Failed to allocate project directory paths, please ensure your $HOME is set correctly",
|
||||
)?;
|
||||
|
||||
let pool = SqlitePool::connect(&format!(
|
||||
"sqlite:{}/{}",
|
||||
project_dirs.data_dir().to_string_lossy(),
|
||||
DATABASE_NAME
|
||||
))
|
||||
.await?;
|
||||
|
||||
let accounts = Accounts::new(pool.clone()).await?;
|
||||
let backends = Backends::new(pool).await?;
|
||||
|
||||
Ok(Self {
|
||||
project_dirs,
|
||||
accounts,
|
||||
backends,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn project_dirs(&self) -> &ProjectDirs {
|
||||
&self.project_dirs
|
||||
}
|
||||
|
||||
pub fn accounts(&self) -> &Accounts {
|
||||
&self.accounts
|
||||
}
|
||||
|
||||
pub fn backends(&self) -> &Backends {
|
||||
&self.backends
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue