mirror of
https://github.com/atuinsh/atuin.git
synced 2025-01-11 16:59:09 +01:00
perf(dotfiles): cache aliases and read straight from file (#1918)
* cache aliases when set locally * handle rebuild on sync and tidy things a bit * support all shells except nu * make clippy happy * fmt * fix for no features
This commit is contained in:
parent
61daae27ab
commit
894eaa6faf
@ -75,6 +75,14 @@ pub fn data_dir() -> PathBuf {
|
||||
data_dir.join("atuin")
|
||||
}
|
||||
|
||||
pub fn dotfiles_cache_dir() -> PathBuf {
|
||||
// In most cases, this will be ~/.local/share/atuin/dotfiles/cache
|
||||
let data_dir = std::env::var("XDG_DATA_HOME")
|
||||
.map_or_else(|_| home_dir().join(".local").join("share"), PathBuf::from);
|
||||
|
||||
data_dir.join("atuin").join("dotfiles").join("cache")
|
||||
}
|
||||
|
||||
pub fn get_current_dir() -> String {
|
||||
// Prefer PWD environment variable over cwd if available to better support symbolic links
|
||||
match env::var("PWD") {
|
||||
|
@ -1,12 +1,39 @@
|
||||
use super::Alias;
|
||||
use std::path::PathBuf;
|
||||
|
||||
// Configuration for bash
|
||||
pub fn build(aliases: &[Alias]) -> String {
|
||||
let mut config = String::new();
|
||||
use crate::store::AliasStore;
|
||||
|
||||
for alias in aliases {
|
||||
config.push_str(&format!("alias {}='{}'\n", alias.name, alias.value));
|
||||
async fn cached_aliases(path: PathBuf, store: &AliasStore) -> String {
|
||||
match tokio::fs::read_to_string(path).await {
|
||||
Ok(aliases) => aliases,
|
||||
Err(r) => {
|
||||
// we failed to read the file for some reason, but the file does exist
|
||||
// fallback to generating new aliases on the fly
|
||||
|
||||
store.posix().await.unwrap_or_else(|e| {
|
||||
format!("echo 'Atuin: failed to read and generate aliases: \n{r}\n{e}'",)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return bash dotfile config
|
||||
///
|
||||
/// Do not return an error. We should not prevent the shell from starting.
|
||||
///
|
||||
/// In the worst case, Atuin should not function but the shell should start correctly.
|
||||
///
|
||||
/// While currently this only returns aliases, it will be extended to also return other synced dotfiles
|
||||
pub async fn config(store: &AliasStore) -> String {
|
||||
// First try to read the cached config
|
||||
let aliases = atuin_common::utils::dotfiles_cache_dir().join("aliases.bash");
|
||||
|
||||
if aliases.exists() {
|
||||
return cached_aliases(aliases, store).await;
|
||||
}
|
||||
|
||||
config
|
||||
if let Err(e) = store.build().await {
|
||||
return format!("echo 'Atuin: failed to generate aliases: {}'", e);
|
||||
}
|
||||
|
||||
cached_aliases(aliases, store).await
|
||||
}
|
||||
|
@ -1,12 +1,40 @@
|
||||
use super::Alias;
|
||||
|
||||
// Configuration for fish
|
||||
pub fn build(aliases: &[Alias]) -> String {
|
||||
let mut config = String::new();
|
||||
use std::path::PathBuf;
|
||||
|
||||
for alias in aliases {
|
||||
config.push_str(&format!("alias {}='{}'\n", alias.name, alias.value));
|
||||
use crate::store::AliasStore;
|
||||
|
||||
async fn cached_aliases(path: PathBuf, store: &AliasStore) -> String {
|
||||
match tokio::fs::read_to_string(path).await {
|
||||
Ok(aliases) => aliases,
|
||||
Err(r) => {
|
||||
// we failed to read the file for some reason, but the file does exist
|
||||
// fallback to generating new aliases on the fly
|
||||
|
||||
store.posix().await.unwrap_or_else(|e| {
|
||||
format!("echo 'Atuin: failed to read and generate aliases: \n{r}\n{e}'",)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return fish dotfile config
|
||||
///
|
||||
/// Do not return an error. We should not prevent the shell from starting.
|
||||
///
|
||||
/// In the worst case, Atuin should not function but the shell should start correctly.
|
||||
///
|
||||
/// While currently this only returns aliases, it will be extended to also return other synced dotfiles
|
||||
pub async fn config(store: &AliasStore) -> String {
|
||||
// First try to read the cached config
|
||||
let aliases = atuin_common::utils::dotfiles_cache_dir().join("aliases.fish");
|
||||
|
||||
if aliases.exists() {
|
||||
return cached_aliases(aliases, store).await;
|
||||
}
|
||||
|
||||
config
|
||||
if let Err(e) = store.build().await {
|
||||
return format!("echo 'Atuin: failed to generate aliases: {}'", e);
|
||||
}
|
||||
|
||||
cached_aliases(aliases, store).await
|
||||
}
|
||||
|
@ -1,12 +1,39 @@
|
||||
use super::Alias;
|
||||
use std::path::PathBuf;
|
||||
|
||||
// Configuration for xonsh
|
||||
pub fn build(aliases: &[Alias]) -> String {
|
||||
let mut config = String::new();
|
||||
use crate::store::AliasStore;
|
||||
|
||||
for alias in aliases {
|
||||
config.push_str(&format!("aliases['{}'] ='{}'\n", alias.name, alias.value));
|
||||
async fn cached_aliases(path: PathBuf, store: &AliasStore) -> String {
|
||||
match tokio::fs::read_to_string(path).await {
|
||||
Ok(aliases) => aliases,
|
||||
Err(r) => {
|
||||
// we failed to read the file for some reason, but the file does exist
|
||||
// fallback to generating new aliases on the fly
|
||||
|
||||
store.xonsh().await.unwrap_or_else(|e| {
|
||||
format!("echo 'Atuin: failed to read and generate aliases: \n{r}\n{e}'",)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return xonsh dotfile config
|
||||
///
|
||||
/// Do not return an error. We should not prevent the shell from starting.
|
||||
///
|
||||
/// In the worst case, Atuin should not function but the shell should start correctly.
|
||||
///
|
||||
/// While currently this only returns aliases, it will be extended to also return other synced dotfiles
|
||||
pub async fn config(store: &AliasStore) -> String {
|
||||
// First try to read the cached config
|
||||
let aliases = atuin_common::utils::dotfiles_cache_dir().join("aliases.xsh");
|
||||
|
||||
if aliases.exists() {
|
||||
return cached_aliases(aliases, store).await;
|
||||
}
|
||||
|
||||
config
|
||||
if let Err(e) = store.build().await {
|
||||
return format!("echo 'Atuin: failed to generate aliases: {}'", e);
|
||||
}
|
||||
|
||||
cached_aliases(aliases, store).await
|
||||
}
|
||||
|
@ -1,12 +1,39 @@
|
||||
use super::Alias;
|
||||
use std::path::PathBuf;
|
||||
|
||||
// Configuration for zsh
|
||||
pub fn build(aliases: &[Alias]) -> String {
|
||||
let mut config = String::new();
|
||||
use crate::store::AliasStore;
|
||||
|
||||
for alias in aliases {
|
||||
config.push_str(&format!("alias {}='{}'\n", alias.name, alias.value));
|
||||
async fn cached_aliases(path: PathBuf, store: &AliasStore) -> String {
|
||||
match tokio::fs::read_to_string(path).await {
|
||||
Ok(aliases) => aliases,
|
||||
Err(r) => {
|
||||
// we failed to read the file for some reason, but the file does exist
|
||||
// fallback to generating new aliases on the fly
|
||||
|
||||
store.posix().await.unwrap_or_else(|e| {
|
||||
format!("echo 'Atuin: failed to read and generate aliases: \n{r}\n{e}'",)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return zsh dotfile config
|
||||
///
|
||||
/// Do not return an error. We should not prevent the shell from starting.
|
||||
///
|
||||
/// In the worst case, Atuin should not function but the shell should start correctly.
|
||||
///
|
||||
/// While currently this only returns aliases, it will be extended to also return other synced dotfiles
|
||||
pub async fn config(store: &AliasStore) -> String {
|
||||
// First try to read the cached config
|
||||
let aliases = atuin_common::utils::dotfiles_cache_dir().join("aliases.zsh");
|
||||
|
||||
if aliases.exists() {
|
||||
return cached_aliases(aliases, store).await;
|
||||
}
|
||||
|
||||
config
|
||||
if let Err(e) = store.build().await {
|
||||
return format!("echo 'Atuin: failed to generate aliases: {}'", e);
|
||||
}
|
||||
|
||||
cached_aliases(aliases, store).await
|
||||
}
|
||||
|
@ -136,6 +136,54 @@ impl AliasStore {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn posix(&self) -> Result<String> {
|
||||
let aliases = self.aliases().await?;
|
||||
|
||||
let mut config = String::new();
|
||||
|
||||
for alias in aliases {
|
||||
config.push_str(&format!("alias {}='{}'\n", alias.name, alias.value));
|
||||
}
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
pub async fn xonsh(&self) -> Result<String> {
|
||||
let aliases = self.aliases().await?;
|
||||
|
||||
let mut config = String::new();
|
||||
|
||||
for alias in aliases {
|
||||
config.push_str(&format!("aliases['{}'] ='{}'\n", alias.name, alias.value));
|
||||
}
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
pub async fn build(&self) -> Result<()> {
|
||||
let dir = atuin_common::utils::dotfiles_cache_dir();
|
||||
tokio::fs::create_dir_all(dir.clone()).await?;
|
||||
|
||||
// Build for all supported shells
|
||||
let posix = self.posix().await?;
|
||||
let xonsh = self.xonsh().await?;
|
||||
|
||||
// All the same contents, maybe optimize in the future or perhaps there will be quirks
|
||||
// per-shell
|
||||
// I'd prefer separation atm
|
||||
let zsh = dir.join("aliases.zsh");
|
||||
let bash = dir.join("aliases.bash");
|
||||
let fish = dir.join("aliases.fish");
|
||||
let xsh = dir.join("aliases.xsh");
|
||||
|
||||
tokio::fs::write(zsh, &posix).await?;
|
||||
tokio::fs::write(bash, &posix).await?;
|
||||
tokio::fs::write(fish, &posix).await?;
|
||||
tokio::fs::write(xsh, &xonsh).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn set(&self, name: &str, value: &str) -> Result<()> {
|
||||
if name.len() + value.len() > CONFIG_SHELL_ALIAS_FIELD_MAX_LEN {
|
||||
return Err(eyre!(
|
||||
@ -169,6 +217,9 @@ impl AliasStore {
|
||||
.push(&record.encrypt::<PASETO_V4>(&self.encryption_key))
|
||||
.await?;
|
||||
|
||||
// set mutates shell config, so build again
|
||||
self.build().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -202,6 +253,9 @@ impl AliasStore {
|
||||
.push(&record.encrypt::<PASETO_V4>(&self.encryption_key))
|
||||
.await?;
|
||||
|
||||
// delete mutates shell config, so build again
|
||||
self.build().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -365,7 +365,7 @@ impl Cmd {
|
||||
let (_, downloaded) = record::sync::sync(settings, &store).await?;
|
||||
Settings::save_sync_time()?;
|
||||
|
||||
history_store.incremental_build(db, &downloaded).await?;
|
||||
crate::sync::build(settings, &store, db, Some(&downloaded)).await?;
|
||||
} else {
|
||||
debug!("running periodic background sync");
|
||||
sync::sync(settings, false, db).await?;
|
||||
|
@ -18,8 +18,7 @@ pub fn init_static(disable_up_arrow: bool, disable_ctrl_r: bool) {
|
||||
pub async fn init(store: AliasStore, disable_up_arrow: bool, disable_ctrl_r: bool) -> Result<()> {
|
||||
init_static(disable_up_arrow, disable_ctrl_r);
|
||||
|
||||
let aliases = store.aliases().await?;
|
||||
let aliases = atuin_dotfiles::shell::bash::build(&aliases[..]);
|
||||
let aliases = atuin_dotfiles::shell::bash::config(&store).await;
|
||||
|
||||
println!("{aliases}");
|
||||
|
||||
|
@ -37,8 +37,7 @@ bind -M insert \e\[A _atuin_bind_up";
|
||||
pub async fn init(store: AliasStore, disable_up_arrow: bool, disable_ctrl_r: bool) -> Result<()> {
|
||||
init_static(disable_up_arrow, disable_ctrl_r);
|
||||
|
||||
let aliases = store.aliases().await?;
|
||||
let aliases = atuin_dotfiles::shell::fish::build(&aliases[..]);
|
||||
let aliases = atuin_dotfiles::shell::fish::config(&store).await;
|
||||
|
||||
println!("{aliases}");
|
||||
|
||||
|
@ -23,8 +23,7 @@ pub fn init_static(disable_up_arrow: bool, disable_ctrl_r: bool) {
|
||||
pub async fn init(store: AliasStore, disable_up_arrow: bool, disable_ctrl_r: bool) -> Result<()> {
|
||||
init_static(disable_up_arrow, disable_ctrl_r);
|
||||
|
||||
let aliases = store.aliases().await?;
|
||||
let aliases = atuin_dotfiles::shell::xonsh::build(&aliases[..]);
|
||||
let aliases = atuin_dotfiles::shell::xonsh::config(&store).await;
|
||||
|
||||
println!("{aliases}");
|
||||
|
||||
|
@ -31,8 +31,7 @@ bindkey -M vicmd 'k' atuin-up-search-vicmd";
|
||||
pub async fn init(store: AliasStore, disable_up_arrow: bool, disable_ctrl_r: bool) -> Result<()> {
|
||||
init_static(disable_up_arrow, disable_ctrl_r);
|
||||
|
||||
let aliases = store.aliases().await?;
|
||||
let aliases = atuin_dotfiles::shell::zsh::build(&aliases[..]);
|
||||
let aliases = atuin_dotfiles::shell::zsh::config(&store).await;
|
||||
|
||||
println!("{aliases}");
|
||||
|
||||
|
@ -1,10 +1,8 @@
|
||||
use clap::Args;
|
||||
use eyre::{Result, WrapErr};
|
||||
use eyre::Result;
|
||||
|
||||
use atuin_client::{
|
||||
database::Database,
|
||||
encryption,
|
||||
history::store::HistoryStore,
|
||||
record::store::Store,
|
||||
record::sync::Operation,
|
||||
record::{sqlite_store::SqliteStore, sync},
|
||||
@ -73,13 +71,7 @@ impl Pull {
|
||||
|
||||
println!("Downloaded {} records", downloaded.len());
|
||||
|
||||
let encryption_key: [u8; 32] = encryption::load_key(settings)
|
||||
.context("could not load encryption key")?
|
||||
.into();
|
||||
|
||||
let host_id = Settings::host_id().expect("failed to get host_id");
|
||||
let history_store = HistoryStore::new(store.clone(), host_id, encryption_key);
|
||||
history_store.incremental_build(db, &downloaded).await?;
|
||||
crate::sync::build(settings, &store, db, Some(&downloaded)).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
use atuin_dotfiles::store::AliasStore;
|
||||
use clap::Args;
|
||||
use eyre::{bail, Result};
|
||||
|
||||
@ -28,6 +29,10 @@ impl Rebuild {
|
||||
.await?;
|
||||
}
|
||||
|
||||
"dotfiles" => {
|
||||
self.rebuild_dotfiles(settings, store.clone()).await?;
|
||||
}
|
||||
|
||||
tag => bail!("unknown tag: {tag}"),
|
||||
}
|
||||
|
||||
@ -49,4 +54,15 @@ impl Rebuild {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn rebuild_dotfiles(&self, settings: &Settings, store: SqliteStore) -> Result<()> {
|
||||
let encryption_key: [u8; 32] = encryption::load_key(settings)?.into();
|
||||
|
||||
let host_id = Settings::host_id().expect("failed to get host_id");
|
||||
let alias_store = AliasStore::new(store, host_id, encryption_key);
|
||||
|
||||
alias_store.build().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -90,7 +90,7 @@ async fn run(
|
||||
|
||||
let (uploaded, downloaded) = sync::sync(settings, &store).await?;
|
||||
|
||||
history_store.incremental_build(db, &downloaded).await?;
|
||||
crate::sync::build(settings, &store, db, Some(&downloaded)).await?;
|
||||
|
||||
println!("{uploaded}/{} up/down to record store", downloaded.len());
|
||||
|
||||
@ -113,7 +113,7 @@ async fn run(
|
||||
// we'll want to run sync once more, as there will now be stuff to upload
|
||||
let (uploaded, downloaded) = sync::sync(settings, &store).await?;
|
||||
|
||||
history_store.incremental_build(db, &downloaded).await?;
|
||||
crate::sync::build(settings, &store, db, Some(&downloaded)).await?;
|
||||
|
||||
println!("{uploaded}/{} up/down to record store", downloaded.len());
|
||||
}
|
||||
|
@ -8,6 +8,9 @@ use command::AtuinCmd;
|
||||
|
||||
mod command;
|
||||
|
||||
#[cfg(feature = "sync")]
|
||||
mod sync;
|
||||
|
||||
const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const SHA: &str = env!("GIT_HASH");
|
||||
|
||||
|
37
atuin/src/sync.rs
Normal file
37
atuin/src/sync.rs
Normal file
@ -0,0 +1,37 @@
|
||||
use atuin_dotfiles::store::AliasStore;
|
||||
use eyre::{Context, Result};
|
||||
|
||||
use atuin_client::{
|
||||
database::Database, history::store::HistoryStore, record::sqlite_store::SqliteStore,
|
||||
settings::Settings,
|
||||
};
|
||||
use atuin_common::record::RecordId;
|
||||
|
||||
/// This is the only crate that ties together all other crates.
|
||||
/// Therefore, it's the only crate where functions tying together all stores can live
|
||||
|
||||
/// Rebuild all stores after a sync
|
||||
/// Note: for history, this only does an _incremental_ sync. Hence the need to specify downloaded
|
||||
/// records.
|
||||
pub async fn build(
|
||||
settings: &Settings,
|
||||
store: &SqliteStore,
|
||||
db: &dyn Database,
|
||||
downloaded: Option<&[RecordId]>,
|
||||
) -> Result<()> {
|
||||
let encryption_key: [u8; 32] = atuin_client::encryption::load_key(settings)
|
||||
.context("could not load encryption key")?
|
||||
.into();
|
||||
|
||||
let host_id = Settings::host_id().expect("failed to get host_id");
|
||||
|
||||
let downloaded = downloaded.unwrap_or(&[]);
|
||||
|
||||
let history_store = HistoryStore::new(store.clone(), host_id, encryption_key);
|
||||
let alias_store = AliasStore::new(store.clone(), host_id, encryption_key);
|
||||
|
||||
history_store.incremental_build(db, downloaded).await?;
|
||||
alias_store.build().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
Loading…
Reference in New Issue
Block a user