Ok fuck this approach

I'm not a fan. It's starting to feel... wrong. Let's start again :)
This commit is contained in:
Ellie Huxtable 2023-03-15 21:53:22 +00:00
parent d85620d827
commit 58e3d5db33
7 changed files with 44 additions and 23 deletions

View File

@ -139,6 +139,21 @@ impl<'a> Client<'a> {
Ok(count.count)
}
pub async fn event_count(&self) -> Result<i64> {
let url = format!("{}/event/count", self.sync_addr);
let url = Url::parse(url.as_str())?;
let resp = self.client.get(url).send().await?;
if resp.status() != StatusCode::OK {
bail!("failed to get count (are you logged in?)");
}
let count = resp.json::<CountResponse>().await?;
Ok(count.count)
}
pub async fn get_history(
&self,
sync_ts: chrono::DateTime<Utc>,

View File

@ -67,6 +67,7 @@ pub trait Database: Send + Sync {
async fn update(&self, h: &History) -> Result<()>;
async fn history_count(&self) -> Result<i64>;
async fn event_count(&self) -> Result<i64>;
async fn merge_events(&self) -> Result<i64>;

View File

@ -129,6 +129,7 @@ pub struct Settings {
pub scroll_context_lines: usize,
#[serde(with = "serde_regex", default = "RegexSet::empty")]
pub history_filter: RegexSet,
pub sync_events: bool,
// This is automatically loaded when settings is created. Do not set in
// config! Keep secrets and settings apart.
@ -317,6 +318,7 @@ impl Settings {
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789",
)?
.set_default("scroll_context_lines", 1)?
.set_default("sync_events", false)?
.add_source(
Environment::with_prefix("atuin")
.prefix_separator("_")

View File

@ -3,7 +3,7 @@ use std::convert::TryInto;
use chrono::prelude::*;
use eyre::Result;
use atuin_common::{api::AddHistoryRequest, utils::hash_str};
use atuin_common::{api::AddEventRequest, utils::hash_str};
use crate::{
api_client,
@ -23,14 +23,14 @@ use crate::{
// Check if remote has things we don't, and if so, download them.
// Returns (num downloaded, total local)
async fn sync_download(
async fn sync_event_download(
force: bool,
client: &api_client::Client<'_>,
db: &mut (impl Database + Send),
) -> Result<(i64, i64)> {
debug!("starting sync download");
let remote_count = client.count().await?;
let remote_count = client.event_count().await?;
let initial_local = db.event_count().await?;
let mut local_count = initial_local;
@ -52,7 +52,7 @@ async fn sync_download(
db.save_bulk(&page).await?;
local_count = db.history_count().await?;
local_count = db.event_count().await?;
if page.len() < HISTORY_PAGE_SIZE.try_into().unwrap() {
break;
@ -78,7 +78,7 @@ async fn sync_download(
}
// Check if we have things remote doesn't, and if so, upload them
async fn sync_upload(
async fn sync_event_upload(
settings: &Settings,
_force: bool,
client: &api_client::Client<'_>,
@ -89,7 +89,7 @@ async fn sync_upload(
let initial_remote_count = client.count().await?;
let mut remote_count = initial_remote_count;
let local_count = db.history_count().await?;
let local_count = db.event_count().await?;
debug!("remote has {}, we have {}", remote_count, local_count);
@ -111,18 +111,19 @@ async fn sync_upload(
let data = encrypt(&i, &key)?;
let data = serde_json::to_string(&data)?;
let add_hist = AddHistoryRequest {
let add_hist = AddEventRequest {
id: i.id,
timestamp: i.timestamp,
data,
hostname: hash_str(&i.hostname),
event_type: i.event_type,
};
buffer.push(add_hist);
}
// anything left over outside of the 100 block size
client.post_history(&buffer).await?;
client.post_event(&buffer).await?;
cursor = buffer.last().unwrap().timestamp;
remote_count = client.count().await?;
@ -132,7 +133,11 @@ async fn sync_upload(
Ok(())
}
pub async fn sync(settings: &Settings, force: bool, db: &mut (impl Database + Send)) -> Result<()> {
pub async fn sync_event(
settings: &Settings,
force: bool,
db: &mut (impl Database + Send),
) -> Result<()> {
db.merge_events().await?;
let client = api_client::Client::new(

View File

@ -65,17 +65,9 @@ pub async fn list<DB: Database>(
.with_status(StatusCode::INTERNAL_SERVER_ERROR));
}
let events: Vec<String> = events
.unwrap()
.iter()
.map(|i| i.data.to_string())
.collect();
let events: Vec<String> = events.unwrap().iter().map(|i| i.data.to_string()).collect();
debug!(
"loaded {} events for user {}",
events.len(),
user.id
);
debug!("loaded {} events for user {}", events.len(), user.id);
Ok(Json(SyncEventResponse { events }))
}
@ -90,7 +82,7 @@ pub async fn add<DB: Database>(
let events: Vec<NewEvent> = req
.into_iter()
.map(|h| NewEvent{
.map(|h| NewEvent {
client_id: h.id,
user_id: user.id,
hostname: h.hostname,

View File

@ -72,8 +72,9 @@ pub fn router<DB: Database + Clone + Send + Sync + 'static>(
.route("/sync/count", get(handlers::history::count))
.route("/sync/history", get(handlers::history::list))
.route("/sync/calendar/:focus", get(handlers::history::calendar))
.route("/sync/event", get(handlers::event::list))
.route("/sync/event", post(handlers::event::add));
.route("/event/count", get(handlers::event::count))
.route("/event/sync", get(handlers::event::list))
.route("/event/sync", post(handlers::event::add));
let path = settings.path.as_str();
if path.is_empty() {

View File

@ -60,7 +60,12 @@ impl Cmd {
}
async fn run(settings: &Settings, force: bool, db: &mut impl Database) -> Result<()> {
atuin_client::sync::sync(settings, force, db).await?;
if settings.sync_events {
atuin_client::sync_event::sync_event(settings, force, db).await?;
} else {
atuin_client::sync::sync(settings, force, db).await?;
}
println!(
"Sync complete! {} items in database, force: {}",
db.history_count().await?,