summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/app.rs9
-rw-r--r--src/bin/hi-recanonicalize.rs9
-rw-r--r--src/bin/hi.rs (renamed from src/main.rs)0
-rw-r--r--src/boot/app.rs36
-rw-r--r--src/boot/routes.rs27
-rw-r--r--src/boot/routes/get.rs27
-rw-r--r--src/boot/routes/mod.rs11
-rw-r--r--src/boot/routes/test.rs130
-rw-r--r--src/channel/app.rs89
-rw-r--r--src/channel/history.rs5
-rw-r--r--src/channel/repo.rs349
-rw-r--r--src/channel/routes.rs121
-rw-r--r--src/channel/routes/channel/delete.rs39
-rw-r--r--src/channel/routes/channel/mod.rs9
-rw-r--r--src/channel/routes/channel/post.rs58
-rw-r--r--src/channel/routes/channel/test/delete.rs154
-rw-r--r--src/channel/routes/channel/test/mod.rs2
-rw-r--r--src/channel/routes/channel/test/post.rs (renamed from src/channel/routes/test/on_send.rs)71
-rw-r--r--src/channel/routes/mod.rs19
-rw-r--r--src/channel/routes/post.rs60
-rw-r--r--src/channel/routes/test.rs221
-rw-r--r--src/channel/routes/test/mod.rs2
-rw-r--r--src/channel/routes/test/on_create.rs88
-rw-r--r--src/channel/snapshot.rs5
-rw-r--r--src/cli/mod.rs (renamed from src/cli.rs)4
-rw-r--r--src/cli/recanonicalize.rs86
-rw-r--r--src/db/mod.rs15
-rw-r--r--src/error.rs17
-rw-r--r--src/event/app.rs30
-rw-r--r--src/event/repo.rs5
-rw-r--r--src/event/routes/get.rs (renamed from src/event/routes.rs)51
-rw-r--r--src/event/routes/mod.rs11
-rw-r--r--src/event/routes/test.rs88
-rw-r--r--src/event/sequence.rs11
-rw-r--r--src/expire.rs2
-rw-r--r--src/invite/app.rs27
-rw-r--r--src/invite/mod.rs8
-rw-r--r--src/invite/repo.rs66
-rw-r--r--src/invite/routes.rs97
-rw-r--r--src/invite/routes/invite/get.rs39
-rw-r--r--src/invite/routes/invite/mod.rs4
-rw-r--r--src/invite/routes/invite/post.rs52
-rw-r--r--src/invite/routes/mod.rs16
-rw-r--r--src/invite/routes/post.rs19
-rw-r--r--src/lib.rs2
-rw-r--r--src/login/app.rs29
-rw-r--r--src/login/extract.rs15
-rw-r--r--src/login/history.rs6
-rw-r--r--src/login/mod.rs2
-rw-r--r--src/login/password.rs7
-rw-r--r--src/login/repo.rs140
-rw-r--r--src/login/routes.rs97
-rw-r--r--src/login/routes/login/mod.rs4
-rw-r--r--src/login/routes/login/post.rs52
-rw-r--r--src/login/routes/login/test.rs (renamed from src/login/routes/test/login.rs)48
-rw-r--r--src/login/routes/logout/mod.rs4
-rw-r--r--src/login/routes/logout/post.rs47
-rw-r--r--src/login/routes/logout/test.rs79
-rw-r--r--src/login/routes/mod.rs12
-rw-r--r--src/login/routes/test/logout.rs97
-rw-r--r--src/login/routes/test/mod.rs2
-rw-r--r--src/login/snapshot.rs3
-rw-r--r--src/message/app.rs47
-rw-r--r--src/message/body.rs30
-rw-r--r--src/message/history.rs5
-rw-r--r--src/message/mod.rs5
-rw-r--r--src/message/repo.rs234
-rw-r--r--src/message/routes.rs46
-rw-r--r--src/message/routes/message/mod.rs46
-rw-r--r--src/message/routes/message/test.rs160
-rw-r--r--src/message/routes/mod.rs9
-rw-r--r--src/message/snapshot.rs8
-rw-r--r--src/name.rs85
-rw-r--r--src/normalize/mod.rs36
-rw-r--r--src/normalize/string.rs112
-rw-r--r--src/setup/app.rs9
-rw-r--r--src/setup/routes/mod.rs9
-rw-r--r--src/setup/routes/post.rs (renamed from src/setup/routes.rs)45
-rw-r--r--src/test/fixtures/channel.rs36
-rw-r--r--src/test/fixtures/cookie.rs37
-rw-r--r--src/test/fixtures/event.rs7
-rw-r--r--src/test/fixtures/filter.rs11
-rw-r--r--src/test/fixtures/identity.rs46
-rw-r--r--src/test/fixtures/login.rs18
-rw-r--r--src/test/fixtures/message.rs30
-rw-r--r--src/test/fixtures/mod.rs7
-rw-r--r--src/token/app.rs49
-rw-r--r--src/token/extract/cookie.rs (renamed from src/token/extract/identity_token.rs)22
-rw-r--r--src/token/extract/identity.rs15
-rw-r--r--src/token/extract/mod.rs4
-rw-r--r--src/token/repo/auth.rs69
-rw-r--r--src/token/repo/mod.rs2
-rw-r--r--src/token/repo/token.rs66
-rw-r--r--src/ui.rs134
-rw-r--r--src/ui/assets.rs63
-rw-r--r--src/ui/error.rs18
-rw-r--r--src/ui/middleware.rs15
-rw-r--r--src/ui/mime.rs22
-rw-r--r--src/ui/mod.rs7
-rw-r--r--src/ui/routes/ch/channel.rs61
-rw-r--r--src/ui/routes/ch/mod.rs1
-rw-r--r--src/ui/routes/get.rs30
-rw-r--r--src/ui/routes/invite/invite.rs55
-rw-r--r--src/ui/routes/invite/mod.rs4
-rw-r--r--src/ui/routes/login.rs11
-rw-r--r--src/ui/routes/mod.rs26
-rw-r--r--src/ui/routes/path.rs9
-rw-r--r--src/ui/routes/setup.rs43
108 files changed, 3264 insertions, 1375 deletions
diff --git a/src/app.rs b/src/app.rs
index cb05061..6d71259 100644
--- a/src/app.rs
+++ b/src/app.rs
@@ -5,14 +5,12 @@ use crate::{
channel::app::Channels,
event::{self, app::Events},
invite::app::Invites,
+ login::app::Logins,
message::app::Messages,
setup::app::Setup,
token::{self, app::Tokens},
};
-#[cfg(test)]
-use crate::login::app::Logins;
-
#[derive(Clone)]
pub struct App {
db: SqlitePool,
@@ -49,6 +47,11 @@ impl App {
Invites::new(&self.db)
}
+ #[cfg(not(test))]
+ pub const fn logins(&self) -> Logins {
+ Logins::new(&self.db)
+ }
+
#[cfg(test)]
pub const fn logins(&self) -> Logins {
Logins::new(&self.db, &self.events)
diff --git a/src/bin/hi-recanonicalize.rs b/src/bin/hi-recanonicalize.rs
new file mode 100644
index 0000000..4081276
--- /dev/null
+++ b/src/bin/hi-recanonicalize.rs
@@ -0,0 +1,9 @@
+use clap::Parser;
+
+use hi::cli;
+
+#[tokio::main]
+async fn main() -> Result<(), cli::recanonicalize::Error> {
+ let args = cli::recanonicalize::Args::parse();
+ args.run().await
+}
diff --git a/src/main.rs b/src/bin/hi.rs
index d0830ff..d0830ff 100644
--- a/src/main.rs
+++ b/src/bin/hi.rs
diff --git a/src/boot/app.rs b/src/boot/app.rs
index ef48b2f..e716b58 100644
--- a/src/boot/app.rs
+++ b/src/boot/app.rs
@@ -2,8 +2,11 @@ use sqlx::sqlite::SqlitePool;
use super::Snapshot;
use crate::{
- channel::repo::Provider as _, event::repo::Provider as _, login::repo::Provider as _,
+ channel::{self, repo::Provider as _},
+ event::repo::Provider as _,
+ login::{self, repo::Provider as _},
message::repo::Provider as _,
+ name,
};
pub struct Boot<'a> {
@@ -15,12 +18,12 @@ impl<'a> Boot<'a> {
Self { db }
}
- pub async fn snapshot(&self) -> Result<Snapshot, sqlx::Error> {
+ pub async fn snapshot(&self) -> Result<Snapshot, Error> {
let mut tx = self.db.begin().await?;
let resume_point = tx.sequence().current().await?;
let logins = tx.logins().all(resume_point.into()).await?;
- let channels = tx.channels().all(resume_point.into()).await?;
+ let channels = tx.channels().all(resume_point).await?;
let messages = tx.messages().all(resume_point.into()).await?;
tx.commit().await?;
@@ -48,3 +51,30 @@ impl<'a> Boot<'a> {
})
}
}
+
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub enum Error {
+ Name(#[from] name::Error),
+ Database(#[from] sqlx::Error),
+}
+
+impl From<login::repo::LoadError> for Error {
+ fn from(error: login::repo::LoadError) -> Self {
+ use login::repo::LoadError;
+ match error {
+ LoadError::Name(error) => error.into(),
+ LoadError::Database(error) => error.into(),
+ }
+ }
+}
+
+impl From<channel::repo::LoadError> for Error {
+ fn from(error: channel::repo::LoadError) -> Self {
+ use channel::repo::LoadError;
+ match error {
+ LoadError::Name(error) => error.into(),
+ LoadError::Database(error) => error.into(),
+ }
+ }
+}
diff --git a/src/boot/routes.rs b/src/boot/routes.rs
deleted file mode 100644
index 80f70bd..0000000
--- a/src/boot/routes.rs
+++ /dev/null
@@ -1,27 +0,0 @@
-use axum::{
- extract::{Json, State},
- routing::get,
- Router,
-};
-
-use super::Snapshot;
-use crate::{app::App, error::Internal, login::Login};
-
-#[cfg(test)]
-mod test;
-
-pub fn router() -> Router<App> {
- Router::new().route("/api/boot", get(boot))
-}
-
-async fn boot(State(app): State<App>, login: Login) -> Result<Json<Boot>, Internal> {
- let snapshot = app.boot().snapshot().await?;
- Ok(Boot { login, snapshot }.into())
-}
-
-#[derive(serde::Serialize)]
-struct Boot {
- login: Login,
- #[serde(flatten)]
- snapshot: Snapshot,
-}
diff --git a/src/boot/routes/get.rs b/src/boot/routes/get.rs
new file mode 100644
index 0000000..563fbf1
--- /dev/null
+++ b/src/boot/routes/get.rs
@@ -0,0 +1,27 @@
+use axum::{
+ extract::{Json, State},
+ response::{self, IntoResponse},
+};
+
+use crate::{app::App, boot::Snapshot, error::Internal, login::Login, token::extract::Identity};
+
+pub async fn handler(State(app): State<App>, identity: Identity) -> Result<Response, Internal> {
+ let snapshot = app.boot().snapshot().await?;
+ Ok(Response {
+ login: identity.login,
+ snapshot,
+ })
+}
+
+#[derive(serde::Serialize)]
+pub struct Response {
+ pub login: Login,
+ #[serde(flatten)]
+ pub snapshot: Snapshot,
+}
+
+impl IntoResponse for Response {
+ fn into_response(self) -> response::Response {
+ Json(self).into_response()
+ }
+}
diff --git a/src/boot/routes/mod.rs b/src/boot/routes/mod.rs
new file mode 100644
index 0000000..e4d5ac8
--- /dev/null
+++ b/src/boot/routes/mod.rs
@@ -0,0 +1,11 @@
+use axum::{routing::get, Router};
+
+use crate::app::App;
+
+mod get;
+#[cfg(test)]
+mod test;
+
+pub fn router() -> Router<App> {
+ Router::new().route("/api/boot", get(get::handler))
+}
diff --git a/src/boot/routes/test.rs b/src/boot/routes/test.rs
index 5f2ba6f..8808b70 100644
--- a/src/boot/routes/test.rs
+++ b/src/boot/routes/test.rs
@@ -1,14 +1,134 @@
-use axum::extract::{Json, State};
+use axum::extract::State;
-use crate::{boot::routes, test::fixtures};
+use super::get;
+use crate::test::fixtures;
#[tokio::test]
async fn returns_identity() {
let app = fixtures::scratch_app().await;
- let login = fixtures::login::fictitious();
- let Json(response) = routes::boot(State(app), login.clone())
+
+ let viewer = fixtures::identity::fictitious();
+ let response = get::handler(State(app), viewer.clone())
+ .await
+ .expect("boot always succeeds");
+
+ assert_eq!(viewer.login, response.login);
+}
+
+#[tokio::test]
+async fn includes_logins() {
+ let app = fixtures::scratch_app().await;
+ let spectator = fixtures::login::create(&app, &fixtures::now()).await;
+
+ let viewer = fixtures::identity::fictitious();
+ let response = get::handler(State(app), viewer)
+ .await
+ .expect("boot always succeeds");
+
+ assert!(response.snapshot.logins.contains(&spectator));
+}
+
+#[tokio::test]
+async fn includes_channels() {
+ let app = fixtures::scratch_app().await;
+ let channel = fixtures::channel::create(&app, &fixtures::now()).await;
+
+ let viewer = fixtures::identity::fictitious();
+ let response = get::handler(State(app), viewer)
+ .await
+ .expect("boot always succeeds");
+
+ assert!(response.snapshot.channels.contains(&channel));
+}
+
+#[tokio::test]
+async fn includes_messages() {
+ let app = fixtures::scratch_app().await;
+ let sender = fixtures::login::create(&app, &fixtures::now()).await;
+ let channel = fixtures::channel::create(&app, &fixtures::now()).await;
+ let message = fixtures::message::send(&app, &channel, &sender, &fixtures::now()).await;
+
+ let viewer = fixtures::identity::fictitious();
+ let response = get::handler(State(app), viewer)
+ .await
+ .expect("boot always succeeds");
+
+ assert!(response.snapshot.messages.contains(&message));
+}
+
+#[tokio::test]
+async fn excludes_expired_messages() {
+ let app = fixtures::scratch_app().await;
+ let sender = fixtures::login::create(&app, &fixtures::ancient()).await;
+ let channel = fixtures::channel::create(&app, &fixtures::ancient()).await;
+ let expired_message =
+ fixtures::message::send(&app, &channel, &sender, &fixtures::ancient()).await;
+
+ app.messages()
+ .expire(&fixtures::now())
+ .await
+ .expect("expiry never fails");
+
+ let viewer = fixtures::identity::fictitious();
+ let response = get::handler(State(app), viewer)
+ .await
+ .expect("boot always succeeds");
+
+ assert!(!response.snapshot.messages.contains(&expired_message));
+}
+
+#[tokio::test]
+async fn excludes_deleted_messages() {
+ let app = fixtures::scratch_app().await;
+ let sender = fixtures::login::create(&app, &fixtures::now()).await;
+ let channel = fixtures::channel::create(&app, &fixtures::now()).await;
+ let deleted_message = fixtures::message::send(&app, &channel, &sender, &fixtures::now()).await;
+
+ app.messages()
+ .delete(&deleted_message.id, &fixtures::now())
+ .await
+ .expect("deleting valid message succeeds");
+
+ let viewer = fixtures::identity::fictitious();
+ let response = get::handler(State(app), viewer)
+ .await
+ .expect("boot always succeeds");
+
+ assert!(!response.snapshot.messages.contains(&deleted_message));
+}
+
+#[tokio::test]
+async fn excludes_expired_channels() {
+ let app = fixtures::scratch_app().await;
+ let expired_channel = fixtures::channel::create(&app, &fixtures::ancient()).await;
+
+ app.channels()
+ .expire(&fixtures::now())
+ .await
+ .expect("expiry never fails");
+
+ let viewer = fixtures::identity::fictitious();
+ let response = get::handler(State(app), viewer)
+ .await
+ .expect("boot always succeeds");
+
+ assert!(!response.snapshot.channels.contains(&expired_channel));
+}
+
+#[tokio::test]
+async fn excludes_deleted_channels() {
+ let app = fixtures::scratch_app().await;
+ let deleted_channel = fixtures::channel::create(&app, &fixtures::now()).await;
+
+ app.channels()
+ .delete(&deleted_channel.id, &fixtures::now())
+ .await
+ .expect("deleting a valid channel succeeds");
+
+ let viewer = fixtures::identity::fictitious();
+ let response = get::handler(State(app), viewer)
.await
.expect("boot always succeeds");
- assert_eq!(login, response.login);
+ assert!(!response.snapshot.channels.contains(&deleted_channel));
}
diff --git a/src/channel/app.rs b/src/channel/app.rs
index 5d6cada..7bfa0f7 100644
--- a/src/channel/app.rs
+++ b/src/channel/app.rs
@@ -2,12 +2,16 @@ use chrono::TimeDelta;
use itertools::Itertools;
use sqlx::sqlite::SqlitePool;
-use super::{repo::Provider as _, Channel, History, Id};
+use super::{
+ repo::{LoadError, Provider as _},
+ Channel, History, Id,
+};
use crate::{
clock::DateTime,
db::{Duplicate as _, NotFound as _},
event::{repo::Provider as _, Broadcaster, Event, Sequence},
message::repo::Provider as _,
+ name::{self, Name},
};
pub struct Channels<'a> {
@@ -20,14 +24,14 @@ impl<'a> Channels<'a> {
Self { db, events }
}
- pub async fn create(&self, name: &str, created_at: &DateTime) -> Result<Channel, CreateError> {
+ pub async fn create(&self, name: &Name, created_at: &DateTime) -> Result<Channel, CreateError> {
let mut tx = self.db.begin().await?;
let created = tx.sequence().next(created_at).await?;
let channel = tx
.channels()
.create(name, &created)
.await
- .duplicate(|| CreateError::DuplicateName(name.into()))?;
+ .duplicate(|| CreateError::DuplicateName(name.clone()))?;
tx.commit().await?;
self.events
@@ -38,12 +42,12 @@ impl<'a> Channels<'a> {
// This function is careless with respect to time, and gets you the channel as
// it exists in the specific moment when you call it.
- pub async fn get(&self, channel: &Id) -> Result<Option<Channel>, sqlx::Error> {
+ pub async fn get(&self, channel: &Id) -> Result<Option<Channel>, Error> {
let mut tx = self.db.begin().await?;
let channel = tx.channels().by_id(channel).await.optional()?;
tx.commit().await?;
- Ok(channel.iter().flat_map(History::events).collect())
+ Ok(channel.as_ref().and_then(History::as_snapshot))
}
pub async fn delete(&self, channel: &Id, deleted_at: &DateTime) -> Result<(), Error> {
@@ -54,13 +58,16 @@ impl<'a> Channels<'a> {
.by_id(channel)
.await
.not_found(|| Error::NotFound(channel.clone()))?;
+ channel
+ .as_snapshot()
+ .ok_or_else(|| Error::Deleted(channel.id().clone()))?;
let mut events = Vec::new();
- let messages = tx.messages().in_channel(&channel, None).await?;
+ let messages = tx.messages().live(&channel).await?;
for message in messages {
let deleted = tx.sequence().next(deleted_at).await?;
- let message = tx.messages().delete(message.id(), &deleted).await?;
+ let message = tx.messages().delete(&message, &deleted).await?;
events.extend(
message
.events()
@@ -70,7 +77,7 @@ impl<'a> Channels<'a> {
}
let deleted = tx.sequence().next(deleted_at).await?;
- let channel = tx.channels().delete(channel.id(), &deleted).await?;
+ let channel = tx.channels().delete(&channel, &deleted).await?;
events.extend(
channel
.events()
@@ -85,7 +92,7 @@ impl<'a> Channels<'a> {
Ok(())
}
- pub async fn expire(&self, relative_to: &DateTime) -> Result<(), sqlx::Error> {
+ pub async fn expire(&self, relative_to: &DateTime) -> Result<(), ExpireError> {
// Somewhat arbitrarily, expire after 90 days.
let expire_at = relative_to.to_owned() - TimeDelta::days(90);
@@ -115,26 +122,80 @@ impl<'a> Channels<'a> {
Ok(())
}
+
+ pub async fn purge(&self, relative_to: &DateTime) -> Result<(), sqlx::Error> {
+ // Somewhat arbitrarily, purge after 7 days.
+ let purge_at = relative_to.to_owned() - TimeDelta::days(7);
+
+ let mut tx = self.db.begin().await?;
+ tx.channels().purge(&purge_at).await?;
+ tx.commit().await?;
+
+ Ok(())
+ }
+
+ pub async fn recanonicalize(&self) -> Result<(), sqlx::Error> {
+ let mut tx = self.db.begin().await?;
+ tx.channels().recanonicalize().await?;
+ tx.commit().await?;
+
+ Ok(())
+ }
}
#[derive(Debug, thiserror::Error)]
pub enum CreateError {
#[error("channel named {0} already exists")]
- DuplicateName(String),
+ DuplicateName(Name),
+ #[error(transparent)]
+ Database(#[from] sqlx::Error),
#[error(transparent)]
- DatabaseError(#[from] sqlx::Error),
+ Name(#[from] name::Error),
+}
+
+impl From<LoadError> for CreateError {
+ fn from(error: LoadError) -> Self {
+ match error {
+ LoadError::Database(error) => error.into(),
+ LoadError::Name(error) => error.into(),
+ }
+ }
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("channel {0} not found")]
NotFound(Id),
+ #[error("channel {0} deleted")]
+ Deleted(Id),
#[error(transparent)]
- DatabaseError(#[from] sqlx::Error),
+ Database(#[from] sqlx::Error),
+ #[error(transparent)]
+ Name(#[from] name::Error),
+}
+
+impl From<LoadError> for Error {
+ fn from(error: LoadError) -> Self {
+ match error {
+ LoadError::Database(error) => error.into(),
+ LoadError::Name(error) => error.into(),
+ }
+ }
}
#[derive(Debug, thiserror::Error)]
-pub enum InternalError {
+pub enum ExpireError {
#[error(transparent)]
- DatabaseError(#[from] sqlx::Error),
+ Database(#[from] sqlx::Error),
+ #[error(transparent)]
+ Name(#[from] name::Error),
+}
+
+impl From<LoadError> for ExpireError {
+ fn from(error: LoadError) -> Self {
+ match error {
+ LoadError::Database(error) => error.into(),
+ LoadError::Name(error) => error.into(),
+ }
+ }
}
diff --git a/src/channel/history.rs b/src/channel/history.rs
index 78b3437..4b9fcc7 100644
--- a/src/channel/history.rs
+++ b/src/channel/history.rs
@@ -31,6 +31,11 @@ impl History {
.filter(Sequence::up_to(resume_point.into()))
.collect()
}
+
+ // Snapshot of this channel as of all events recorded in this history.
+ pub fn as_snapshot(&self) -> Option<Channel> {
+ self.events().collect()
+ }
}
// Event factories
diff --git a/src/channel/repo.rs b/src/channel/repo.rs
index 2f57581..a49db52 100644
--- a/src/channel/repo.rs
+++ b/src/channel/repo.rs
@@ -1,9 +1,12 @@
+use futures::stream::{StreamExt as _, TryStreamExt as _};
use sqlx::{sqlite::Sqlite, SqliteConnection, Transaction};
use crate::{
channel::{Channel, History, Id},
clock::DateTime,
+ db::NotFound,
event::{Instant, ResumePoint, Sequence},
+ name::{self, Name},
};
pub trait Provider {
@@ -19,130 +22,162 @@ impl<'c> Provider for Transaction<'c, Sqlite> {
pub struct Channels<'t>(&'t mut SqliteConnection);
impl<'c> Channels<'c> {
- pub async fn create(&mut self, name: &str, created: &Instant) -> Result<History, sqlx::Error> {
+ pub async fn create(&mut self, name: &Name, created: &Instant) -> Result<History, sqlx::Error> {
let id = Id::generate();
- let channel = sqlx::query!(
+ let name = name.clone();
+ let display_name = name.display();
+ let canonical_name = name.canonical();
+ let created = *created;
+
+ sqlx::query!(
r#"
insert
- into channel (id, name, created_at, created_sequence)
- values ($1, $2, $3, $4)
- returning
- id as "id: Id",
- name,
- created_at as "created_at: DateTime",
- created_sequence as "created_sequence: Sequence"
+ into channel (id, created_at, created_sequence)
+ values ($1, $2, $3)
"#,
id,
- name,
created.at,
created.sequence,
)
- .map(|row| History {
+ .execute(&mut *self.0)
+ .await?;
+
+ sqlx::query!(
+ r#"
+ insert into channel_name (id, display_name, canonical_name)
+ values ($1, $2, $3)
+ "#,
+ id,
+ display_name,
+ canonical_name,
+ )
+ .execute(&mut *self.0)
+ .await?;
+
+ let channel = History {
channel: Channel {
- id: row.id,
- name: row.name,
- },
- created: Instant {
- at: row.created_at,
- sequence: row.created_sequence,
+ id,
+ name: name.clone(),
+ deleted_at: None,
},
+ created,
deleted: None,
- })
- .fetch_one(&mut *self.0)
- .await?;
+ };
Ok(channel)
}
- pub async fn by_id(&mut self, channel: &Id) -> Result<History, sqlx::Error> {
+ pub async fn by_id(&mut self, channel: &Id) -> Result<History, LoadError> {
let channel = sqlx::query!(
r#"
select
id as "id: Id",
- name,
- created_at as "created_at: DateTime",
- created_sequence as "created_sequence: Sequence"
+ name.display_name as "display_name?: String",
+ name.canonical_name as "canonical_name?: String",
+ channel.created_at as "created_at: DateTime",
+ channel.created_sequence as "created_sequence: Sequence",
+ deleted.deleted_at as "deleted_at?: DateTime",
+ deleted.deleted_sequence as "deleted_sequence?: Sequence"
from channel
+ left join channel_name as name
+ using (id)
+ left join channel_deleted as deleted
+ using (id)
where id = $1
"#,
channel,
)
- .map(|row| History {
- channel: Channel {
- id: row.id,
- name: row.name,
- },
- created: Instant {
- at: row.created_at,
- sequence: row.created_sequence,
- },
- deleted: None,
+ .map(|row| {
+ Ok::<_, name::Error>(History {
+ channel: Channel {
+ id: row.id,
+ name: Name::optional(row.display_name, row.canonical_name)?.unwrap_or_default(),
+ deleted_at: row.deleted_at,
+ },
+ created: Instant::new(row.created_at, row.created_sequence),
+ deleted: Instant::optional(row.deleted_at, row.deleted_sequence),
+ })
})
.fetch_one(&mut *self.0)
- .await?;
+ .await??;
Ok(channel)
}
- pub async fn all(&mut self, resume_at: ResumePoint) -> Result<Vec<History>, sqlx::Error> {
+ pub async fn all(&mut self, resume_at: Sequence) -> Result<Vec<History>, LoadError> {
let channels = sqlx::query!(
r#"
select
id as "id: Id",
- name,
- created_at as "created_at: DateTime",
- created_sequence as "created_sequence: Sequence"
+ name.display_name as "display_name?: String",
+ name.canonical_name as "canonical_name?: String",
+ channel.created_at as "created_at: DateTime",
+ channel.created_sequence as "created_sequence: Sequence",
+ deleted.deleted_at as "deleted_at?: DateTime",
+ deleted.deleted_sequence as "deleted_sequence?: Sequence"
from channel
- where coalesce(created_sequence <= $1, true)
- order by channel.name
+ left join channel_name as name
+ using (id)
+ left join channel_deleted as deleted
+ using (id)
+ where channel.created_sequence <= $1
+ order by name.canonical_name
"#,
resume_at,
)
- .map(|row| History {
- channel: Channel {
- id: row.id,
- name: row.name,
- },
- created: Instant {
- at: row.created_at,
- sequence: row.created_sequence,
- },
- deleted: None,
+ .map(|row| {
+ Ok::<_, name::Error>(History {
+ channel: Channel {
+ id: row.id,
+ name: Name::optional(row.display_name, row.canonical_name)?.unwrap_or_default(),
+ deleted_at: row.deleted_at,
+ },
+ created: Instant::new(row.created_at, row.created_sequence),
+ deleted: Instant::optional(row.deleted_at, row.deleted_sequence),
+ })
})
- .fetch_all(&mut *self.0)
+ .fetch(&mut *self.0)
+ .map(|res| Ok::<_, LoadError>(res??))
+ .try_collect()
.await?;
Ok(channels)
}
- pub async fn replay(
- &mut self,
- resume_at: Option<Sequence>,
- ) -> Result<Vec<History>, sqlx::Error> {
+ pub async fn replay(&mut self, resume_at: ResumePoint) -> Result<Vec<History>, LoadError> {
let channels = sqlx::query!(
r#"
select
id as "id: Id",
- name,
- created_at as "created_at: DateTime",
- created_sequence as "created_sequence: Sequence"
+ name.display_name as "display_name: String",
+ name.canonical_name as "canonical_name: String",
+ channel.created_at as "created_at: DateTime",
+ channel.created_sequence as "created_sequence: Sequence",
+ deleted.deleted_at as "deleted_at?: DateTime",
+ deleted.deleted_sequence as "deleted_sequence?: Sequence"
from channel
- where coalesce(created_sequence > $1, true)
+ left join channel_name as name
+ using (id)
+ left join channel_deleted as deleted
+ using (id)
+ where coalesce(channel.created_sequence > $1, true)
"#,
resume_at,
)
- .map(|row| History {
- channel: Channel {
- id: row.id,
- name: row.name,
- },
- created: Instant {
- at: row.created_at,
- sequence: row.created_sequence,
- },
- deleted: None,
+ .map(|row| {
+ Ok::<_, name::Error>(History {
+ channel: Channel {
+ id: row.id,
+ name: Name::optional(row.display_name, row.canonical_name)?.unwrap_or_default(),
+ deleted_at: row.deleted_at,
+ },
+ created: Instant::new(row.created_at, row.created_sequence),
+ deleted: Instant::optional(row.deleted_at, row.deleted_sequence),
+ })
})
- .fetch_all(&mut *self.0)
+ .fetch(&mut *self.0)
+ .map(|res| Ok::<_, LoadError>(res??))
+ .try_collect()
.await?;
Ok(channels)
@@ -150,53 +185,171 @@ impl<'c> Channels<'c> {
pub async fn delete(
&mut self,
- channel: &Id,
+ channel: &History,
deleted: &Instant,
- ) -> Result<History, sqlx::Error> {
- let channel = sqlx::query!(
+ ) -> Result<History, LoadError> {
+ let id = channel.id();
+ sqlx::query!(
r#"
- delete from channel
+ insert into channel_deleted (id, deleted_at, deleted_sequence)
+ values ($1, $2, $3)
+ "#,
+ id,
+ deleted.at,
+ deleted.sequence,
+ )
+ .execute(&mut *self.0)
+ .await?;
+
+ // Small social responsibility hack here: when a channel is deleted, its name is
+ // retconned to have been the empty string. Someone reading the event stream
+ // afterwards, or looking at channels via the API, cannot retrieve the
+ // "deleted" channel's information by ignoring the deletion event.
+ //
+ // This also avoids the need for a separate name reservation table to ensure
+ // that live channels have unique names, since the `channel` table's name field
+ // is unique over non-null values.
+ sqlx::query!(
+ r#"
+ delete from channel_name
where id = $1
- returning
- id as "id: Id",
- name,
- created_at as "created_at: DateTime",
- created_sequence as "created_sequence: Sequence"
"#,
- channel,
+ id,
)
- .map(|row| History {
- channel: Channel {
- id: row.id,
- name: row.name,
- },
- created: Instant {
- at: row.created_at,
- sequence: row.created_sequence,
- },
- deleted: Some(*deleted),
- })
- .fetch_one(&mut *self.0)
+ .execute(&mut *self.0)
.await?;
+ let channel = self.by_id(id).await?;
+
Ok(channel)
}
- pub async fn expired(&mut self, expired_at: &DateTime) -> Result<Vec<Id>, sqlx::Error> {
+ pub async fn purge(&mut self, purge_at: &DateTime) -> Result<(), sqlx::Error> {
let channels = sqlx::query_scalar!(
r#"
+ with has_messages as (
+ select channel
+ from message
+ group by channel
+ )
+ delete from channel_deleted
+ where deleted_at < $1
+ and id not in has_messages
+ returning id as "id: Id"
+ "#,
+ purge_at,
+ )
+ .fetch_all(&mut *self.0)
+ .await?;
+
+ for channel in channels {
+ // Wanted: a way to batch these up into one query.
+ sqlx::query!(
+ r#"
+ delete from channel
+ where id = $1
+ "#,
+ channel,
+ )
+ .execute(&mut *self.0)
+ .await?;
+ }
+
+ Ok(())
+ }
+
+ pub async fn expired(&mut self, expired_at: &DateTime) -> Result<Vec<History>, LoadError> {
+ let channels = sqlx::query!(
+ r#"
select
- channel.id as "id: Id"
+ channel.id as "id: Id",
+ name.display_name as "display_name?: String",
+ name.canonical_name as "canonical_name?: String",
+ channel.created_at as "created_at: DateTime",
+ channel.created_sequence as "created_sequence: Sequence",
+ deleted.deleted_at as "deleted_at?: DateTime",
+ deleted.deleted_sequence as "deleted_sequence?: Sequence"
from channel
- left join message
- where created_at < $1
+ left join channel_name as name
+ using (id)
+ left join channel_deleted as deleted
+ using (id)
+ left join message
+ on channel.id = message.channel
+ where channel.created_at < $1
and message.id is null
+ and deleted.id is null
"#,
expired_at,
)
- .fetch_all(&mut *self.0)
+ .map(|row| {
+ Ok::<_, name::Error>(History {
+ channel: Channel {
+ id: row.id,
+ name: Name::optional(row.display_name, row.canonical_name)?.unwrap_or_default(),
+ deleted_at: row.deleted_at,
+ },
+ created: Instant::new(row.created_at, row.created_sequence),
+ deleted: Instant::optional(row.deleted_at, row.deleted_sequence),
+ })
+ })
+ .fetch(&mut *self.0)
+ .map(|res| Ok::<_, LoadError>(res??))
+ .try_collect()
.await?;
Ok(channels)
}
+
+ pub async fn recanonicalize(&mut self) -> Result<(), sqlx::Error> {
+ let channels = sqlx::query!(
+ r#"
+ select
+ id as "id: Id",
+ display_name as "display_name: String"
+ from channel_name
+ "#,
+ )
+ .fetch_all(&mut *self.0)
+ .await?;
+
+ for channel in channels {
+ let name = Name::from(channel.display_name);
+ let canonical_name = name.canonical();
+
+ sqlx::query!(
+ r#"
+ update channel_name
+ set canonical_name = $1
+ where id = $2
+ "#,
+ canonical_name,
+ channel.id,
+ )
+ .execute(&mut *self.0)
+ .await?;
+ }
+
+ Ok(())
+ }
+}
+
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub enum LoadError {
+ Database(#[from] sqlx::Error),
+ Name(#[from] name::Error),
+}
+
+impl<T> NotFound for Result<T, LoadError> {
+ type Ok = T;
+ type Error = LoadError;
+
+ fn optional(self) -> Result<Option<T>, LoadError> {
+ match self {
+ Ok(value) => Ok(Some(value)),
+ Err(LoadError::Database(sqlx::Error::RowNotFound)) => Ok(None),
+ Err(other) => Err(other),
+ }
+ }
}
diff --git a/src/channel/routes.rs b/src/channel/routes.rs
deleted file mode 100644
index eaf7962..0000000
--- a/src/channel/routes.rs
+++ /dev/null
@@ -1,121 +0,0 @@
-use axum::{
- extract::{Json, Path, State},
- http::StatusCode,
- response::{IntoResponse, Response},
- routing::{delete, post},
- Router,
-};
-
-use super::{app, Channel, Id};
-use crate::{
- app::App,
- clock::RequestedAt,
- error::{Internal, NotFound},
- login::Login,
- message::app::SendError,
-};
-
-#[cfg(test)]
-mod test;
-
-pub fn router() -> Router<App> {
- Router::new()
- .route("/api/channels", post(on_create))
- .route("/api/channels/:channel", post(on_send))
- .route("/api/channels/:channel", delete(on_delete))
-}
-
-#[derive(Clone, serde::Deserialize)]
-struct CreateRequest {
- name: String,
-}
-
-async fn on_create(
- State(app): State<App>,
- _: Login, // requires auth, but doesn't actually care who you are
- RequestedAt(created_at): RequestedAt,
- Json(form): Json<CreateRequest>,
-) -> Result<Json<Channel>, CreateError> {
- let channel = app
- .channels()
- .create(&form.name, &created_at)
- .await
- .map_err(CreateError)?;
-
- Ok(Json(channel))
-}
-
-#[derive(Debug)]
-struct CreateError(app::CreateError);
-
-impl IntoResponse for CreateError {
- fn into_response(self) -> Response {
- let Self(error) = self;
- match error {
- duplicate @ app::CreateError::DuplicateName(_) => {
- (StatusCode::CONFLICT, duplicate.to_string()).into_response()
- }
- other => Internal::from(other).into_response(),
- }
- }
-}
-
-#[derive(Clone, serde::Deserialize)]
-struct SendRequest {
- body: String,
-}
-
-async fn on_send(
- State(app): State<App>,
- Path(channel): Path<Id>,
- RequestedAt(sent_at): RequestedAt,
- login: Login,
- Json(request): Json<SendRequest>,
-) -> Result<StatusCode, SendErrorResponse> {
- app.messages()
- .send(&channel, &login, &sent_at, &request.body)
- .await?;
-
- Ok(StatusCode::ACCEPTED)
-}
-
-#[derive(Debug, thiserror::Error)]
-#[error(transparent)]
-struct SendErrorResponse(#[from] SendError);
-
-impl IntoResponse for SendErrorResponse {
- fn into_response(self) -> Response {
- let Self(error) = self;
- match error {
- not_found @ SendError::ChannelNotFound(_) => NotFound(not_found).into_response(),
- other => Internal::from(other).into_response(),
- }
- }
-}
-
-async fn on_delete(
- State(app): State<App>,
- Path(channel): Path<Id>,
- RequestedAt(deleted_at): RequestedAt,
- _: Login,
-) -> Result<StatusCode, ErrorResponse> {
- app.channels().delete(&channel, &deleted_at).await?;
-
- Ok(StatusCode::ACCEPTED)
-}
-
-#[derive(Debug, thiserror::Error)]
-#[error(transparent)]
-struct ErrorResponse(#[from] app::Error);
-
-impl IntoResponse for ErrorResponse {
- fn into_response(self) -> Response {
- let Self(error) = self;
- match error {
- not_found @ app::Error::NotFound(_) => {
- (StatusCode::NOT_FOUND, not_found.to_string()).into_response()
- }
- other => Internal::from(other).into_response(),
- }
- }
-}
diff --git a/src/channel/routes/channel/delete.rs b/src/channel/routes/channel/delete.rs
new file mode 100644
index 0000000..91eb506
--- /dev/null
+++ b/src/channel/routes/channel/delete.rs
@@ -0,0 +1,39 @@
+use axum::{
+ extract::{Path, State},
+ http::StatusCode,
+ response::{IntoResponse, Response},
+};
+
+use crate::{
+ app::App,
+ channel::app,
+ clock::RequestedAt,
+ error::{Internal, NotFound},
+ token::extract::Identity,
+};
+
+pub async fn handler(
+ State(app): State<App>,
+ Path(channel): Path<super::PathInfo>,
+ RequestedAt(deleted_at): RequestedAt,
+ _: Identity,
+) -> Result<StatusCode, Error> {
+ app.channels().delete(&channel, &deleted_at).await?;
+
+ Ok(StatusCode::ACCEPTED)
+}
+
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub struct Error(#[from] pub app::Error);
+
+impl IntoResponse for Error {
+ fn into_response(self) -> Response {
+ let Self(error) = self;
+ #[allow(clippy::match_wildcard_for_single_variants)]
+ match error {
+ app::Error::NotFound(_) | app::Error::Deleted(_) => NotFound(error).into_response(),
+ other => Internal::from(other).into_response(),
+ }
+ }
+}
diff --git a/src/channel/routes/channel/mod.rs b/src/channel/routes/channel/mod.rs
new file mode 100644
index 0000000..31a9142
--- /dev/null
+++ b/src/channel/routes/channel/mod.rs
@@ -0,0 +1,9 @@
+use crate::channel::Id;
+
+pub mod delete;
+pub mod post;
+
+#[cfg(test)]
+mod test;
+
+type PathInfo = Id;
diff --git a/src/channel/routes/channel/post.rs b/src/channel/routes/channel/post.rs
new file mode 100644
index 0000000..b51e691
--- /dev/null
+++ b/src/channel/routes/channel/post.rs
@@ -0,0 +1,58 @@
+use axum::{
+ extract::{Json, Path, State},
+ http::StatusCode,
+ response::{self, IntoResponse},
+};
+
+use crate::{
+ app::App,
+ clock::RequestedAt,
+ error::{Internal, NotFound},
+ message::{app::SendError, Body, Message},
+ token::extract::Identity,
+};
+
+pub async fn handler(
+ State(app): State<App>,
+ Path(channel): Path<super::PathInfo>,
+ RequestedAt(sent_at): RequestedAt,
+ identity: Identity,
+ Json(request): Json<Request>,
+) -> Result<Response, Error> {
+ let message = app
+ .messages()
+ .send(&channel, &identity.login, &sent_at, &request.body)
+ .await?;
+
+ Ok(Response(message))
+}
+
+#[derive(serde::Deserialize)]
+pub struct Request {
+ pub body: Body,
+}
+
+#[derive(Debug)]
+pub struct Response(pub Message);
+
+impl IntoResponse for Response {
+ fn into_response(self) -> response::Response {
+ let Self(message) = self;
+ (StatusCode::ACCEPTED, Json(message)).into_response()
+ }
+}
+
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub struct Error(#[from] pub SendError);
+
+impl IntoResponse for Error {
+ fn into_response(self) -> response::Response {
+ let Self(error) = self;
+ #[allow(clippy::match_wildcard_for_single_variants)]
+ match error {
+ SendError::ChannelNotFound(_) => NotFound(error).into_response(),
+ other => Internal::from(other).into_response(),
+ }
+ }
+}
diff --git a/src/channel/routes/channel/test/delete.rs b/src/channel/routes/channel/test/delete.rs
new file mode 100644
index 0000000..e9af12f
--- /dev/null
+++ b/src/channel/routes/channel/test/delete.rs
@@ -0,0 +1,154 @@
+use axum::{
+ extract::{Path, State},
+ http::StatusCode,
+};
+
+use crate::{
+ channel::{app, routes::channel::delete},
+ test::fixtures,
+};
+
+#[tokio::test]
+pub async fn delete_channel() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let channel = fixtures::channel::create(&app, &fixtures::now()).await;
+
+ // Send the request
+
+ let deleter = fixtures::identity::create(&app, &fixtures::now()).await;
+ let response = delete::handler(
+ State(app.clone()),
+ Path(channel.id.clone()),
+ fixtures::now(),
+ deleter,
+ )
+ .await
+ .expect("deleting a valid channel succeeds");
+
+ // Verify the response
+
+ assert_eq!(response, StatusCode::ACCEPTED);
+
+ // Verify the semantics
+
+ let snapshot = app.boot().snapshot().await.expect("boot always succeeds");
+ assert!(!snapshot.channels.contains(&channel));
+}
+
+#[tokio::test]
+pub async fn delete_invalid_channel_id() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+
+ // Send the request
+
+ let deleter = fixtures::identity::create(&app, &fixtures::now()).await;
+ let channel = fixtures::channel::fictitious();
+ let delete::Error(error) = delete::handler(
+ State(app.clone()),
+ Path(channel.clone()),
+ fixtures::now(),
+ deleter,
+ )
+ .await
+ .expect_err("deleting a nonexistent channel fails");
+
+ // Verify the response
+
+ assert!(matches!(error, app::Error::NotFound(id) if id == channel));
+}
+
+#[tokio::test]
+pub async fn delete_deleted() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let channel = fixtures::channel::create(&app, &fixtures::now()).await;
+
+ app.channels()
+ .delete(&channel.id, &fixtures::now())
+ .await
+ .expect("deleting a recently-sent channel succeeds");
+
+ // Send the request
+
+ let deleter = fixtures::identity::create(&app, &fixtures::now()).await;
+ let delete::Error(error) = delete::handler(
+ State(app.clone()),
+ Path(channel.id.clone()),
+ fixtures::now(),
+ deleter,
+ )
+ .await
+ .expect_err("deleting a deleted channel fails");
+
+ // Verify the response
+
+ assert!(matches!(error, app::Error::Deleted(id) if id == channel.id));
+}
+
+#[tokio::test]
+pub async fn delete_expired() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let channel = fixtures::channel::create(&app, &fixtures::ancient()).await;
+
+ app.channels()
+ .expire(&fixtures::now())
+ .await
+ .expect("expiring channels always succeeds");
+
+ // Send the request
+
+ let deleter = fixtures::identity::create(&app, &fixtures::now()).await;
+ let delete::Error(error) = delete::handler(
+ State(app.clone()),
+ Path(channel.id.clone()),
+ fixtures::now(),
+ deleter,
+ )
+ .await
+ .expect_err("deleting an expired channel fails");
+
+ // Verify the response
+
+ assert!(matches!(error, app::Error::Deleted(id) if id == channel.id));
+}
+
+#[tokio::test]
+pub async fn delete_purged() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let channel = fixtures::channel::create(&app, &fixtures::ancient()).await;
+
+ app.channels()
+ .expire(&fixtures::old())
+ .await
+ .expect("expiring channels always succeeds");
+
+ app.channels()
+ .purge(&fixtures::now())
+ .await
+ .expect("purging channels always succeeds");
+
+ // Send the request
+
+ let deleter = fixtures::identity::create(&app, &fixtures::now()).await;
+ let delete::Error(error) = delete::handler(
+ State(app.clone()),
+ Path(channel.id.clone()),
+ fixtures::now(),
+ deleter,
+ )
+ .await
+ .expect_err("deleting a purged channel fails");
+
+ // Verify the response
+
+ assert!(matches!(error, app::Error::NotFound(id) if id == channel.id));
+}
diff --git a/src/channel/routes/channel/test/mod.rs b/src/channel/routes/channel/test/mod.rs
new file mode 100644
index 0000000..78bf86e
--- /dev/null
+++ b/src/channel/routes/channel/test/mod.rs
@@ -0,0 +1,2 @@
+mod delete;
+mod post;
diff --git a/src/channel/routes/test/on_send.rs b/src/channel/routes/channel/test/post.rs
index 293cc56..67e7d36 100644
--- a/src/channel/routes/test/on_send.rs
+++ b/src/channel/routes/channel/test/post.rs
@@ -2,9 +2,8 @@ use axum::extract::{Json, Path, State};
use futures::stream::StreamExt;
use crate::{
- channel,
- channel::routes,
- event::{self, Sequenced},
+ channel::{self, routes::channel::post},
+ event::Sequenced,
message::{self, app::SendError},
test::fixtures::{self, future::Immediately as _},
};
@@ -14,7 +13,7 @@ async fn messages_in_order() {
// Set up the environment
let app = fixtures::scratch_app().await;
- let sender = fixtures::login::create(&app, &fixtures::now()).await;
+ let sender = fixtures::identity::create(&app, &fixtures::now()).await;
let channel = fixtures::channel::create(&app, &fixtures::now()).await;
// Call the endpoint (twice)
@@ -25,17 +24,17 @@ async fn messages_in_order() {
];
for (sent_at, body) in &requests {
- let request = routes::SendRequest { body: body.clone() };
+ let request = post::Request { body: body.clone() };
- routes::on_send(
+ let _ = post::handler(
State(app.clone()),
Path(channel.id.clone()),
sent_at.clone(),
sender.clone(),
- Json(request.clone()),
+ Json(request),
)
.await
- .expect("sending to a valid channel");
+ .expect("sending to a valid channel succeeds");
}
// Verify the semantics
@@ -44,8 +43,8 @@ async fn messages_in_order() {
.events()
.subscribe(None)
.await
- .expect("subscribing to a valid channel")
- .filter(fixtures::filter::messages())
+ .expect("subscribing to a valid channel succeeds")
+ .filter_map(fixtures::message::events)
.take(requests.len());
let events = events.collect::<Vec<_>>().immediately().await;
@@ -54,8 +53,8 @@ async fn messages_in_order() {
assert_eq!(*sent_at, event.at());
assert!(matches!(
event,
- event::Event::Message(message::Event::Sent(event))
- if event.message.sender == sender.id
+ message::Event::Sent(event)
+ if event.message.sender == sender.login.id
&& event.message.body == message
));
}
@@ -66,24 +65,62 @@ async fn nonexistent_channel() {
// Set up the environment
let app = fixtures::scratch_app().await;
- let login = fixtures::login::create(&app, &fixtures::now()).await;
+ let sender = fixtures::identity::create(&app, &fixtures::now()).await;
// Call the endpoint
let sent_at = fixtures::now();
let channel = channel::Id::generate();
- let request = routes::SendRequest {
+ let request = post::Request {
body: fixtures::message::propose(),
};
- let routes::SendErrorResponse(error) = routes::on_send(
+ let post::Error(error) = post::handler(
State(app),
Path(channel.clone()),
sent_at,
- login,
+ sender,
Json(request),
)
.await
- .expect_err("sending to a nonexistent channel");
+ .expect_err("sending to a nonexistent channel fails");
+
+ // Verify the structure of the response
+
+ assert!(matches!(
+ error,
+ SendError::ChannelNotFound(error_channel) if channel == error_channel
+ ));
+}
+
+#[tokio::test]
+async fn deleted_channel() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let sender = fixtures::identity::create(&app, &fixtures::now()).await;
+ let channel = fixtures::channel::create(&app, &fixtures::now()).await;
+
+ app.channels()
+ .delete(&channel.id, &fixtures::now())
+ .await
+ .expect("deleting a new channel succeeds");
+
+ // Call the endpoint
+
+ let sent_at = fixtures::now();
+ let channel = channel::Id::generate();
+ let request = post::Request {
+ body: fixtures::message::propose(),
+ };
+ let post::Error(error) = post::handler(
+ State(app),
+ Path(channel.clone()),
+ sent_at,
+ sender,
+ Json(request),
+ )
+ .await
+ .expect_err("sending to a deleted channel fails");
// Verify the structure of the response
diff --git a/src/channel/routes/mod.rs b/src/channel/routes/mod.rs
new file mode 100644
index 0000000..696bd72
--- /dev/null
+++ b/src/channel/routes/mod.rs
@@ -0,0 +1,19 @@
+use axum::{
+ routing::{delete, post},
+ Router,
+};
+
+use crate::app::App;
+
+mod channel;
+mod post;
+
+#[cfg(test)]
+mod test;
+
+pub fn router() -> Router<App> {
+ Router::new()
+ .route("/api/channels", post(post::handler))
+ .route("/api/channels/:channel", post(channel::post::handler))
+ .route("/api/channels/:channel", delete(channel::delete::handler))
+}
diff --git a/src/channel/routes/post.rs b/src/channel/routes/post.rs
new file mode 100644
index 0000000..810445c
--- /dev/null
+++ b/src/channel/routes/post.rs
@@ -0,0 +1,60 @@
+use axum::{
+ extract::{Json, State},
+ http::StatusCode,
+ response::{self, IntoResponse},
+};
+
+use crate::{
+ app::App,
+ channel::{app, Channel},
+ clock::RequestedAt,
+ error::Internal,
+ name::Name,
+ token::extract::Identity,
+};
+
+pub async fn handler(
+ State(app): State<App>,
+ _: Identity, // requires auth, but doesn't actually care who you are
+ RequestedAt(created_at): RequestedAt,
+ Json(request): Json<Request>,
+) -> Result<Response, Error> {
+ let channel = app
+ .channels()
+ .create(&request.name, &created_at)
+ .await
+ .map_err(Error)?;
+
+ Ok(Response(channel))
+}
+
+#[derive(serde::Deserialize)]
+pub struct Request {
+ pub name: Name,
+}
+
+#[derive(Debug)]
+pub struct Response(pub Channel);
+
+impl IntoResponse for Response {
+ fn into_response(self) -> response::Response {
+ let Self(channel) = self;
+ (StatusCode::ACCEPTED, Json(channel)).into_response()
+ }
+}
+
+#[derive(Debug)]
+pub struct Error(pub app::CreateError);
+
+impl IntoResponse for Error {
+ fn into_response(self) -> response::Response {
+ let Self(error) = self;
+ #[allow(clippy::match_wildcard_for_single_variants)]
+ match error {
+ app::CreateError::DuplicateName(_) => {
+ (StatusCode::CONFLICT, error.to_string()).into_response()
+ }
+ other => Internal::from(other).into_response(),
+ }
+ }
+}
diff --git a/src/channel/routes/test.rs b/src/channel/routes/test.rs
new file mode 100644
index 0000000..216eba1
--- /dev/null
+++ b/src/channel/routes/test.rs
@@ -0,0 +1,221 @@
+use std::future;
+
+use axum::extract::{Json, State};
+use futures::stream::StreamExt as _;
+
+use super::post;
+use crate::{
+ channel::app,
+ name::Name,
+ test::fixtures::{self, future::Immediately as _},
+};
+
+#[tokio::test]
+async fn new_channel() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let creator = fixtures::identity::create(&app, &fixtures::now()).await;
+
+ // Call the endpoint
+
+ let name = fixtures::channel::propose();
+ let request = post::Request { name: name.clone() };
+ let post::Response(response) =
+ post::handler(State(app.clone()), creator, fixtures::now(), Json(request))
+ .await
+ .expect("creating a channel in an empty app succeeds");
+
+ // Verify the structure of the response
+
+ assert_eq!(name, response.name);
+
+ // Verify the semantics
+
+ let snapshot = app.boot().snapshot().await.expect("boot always succeeds");
+ assert!(snapshot.channels.iter().any(|channel| channel == &response));
+
+ let channel = app
+ .channels()
+ .get(&response.id)
+ .await
+ .expect("searching for channels by ID never fails")
+ .expect("the newly-created channel exists");
+ assert_eq!(response, channel);
+
+ let mut events = app
+ .events()
+ .subscribe(None)
+ .await
+ .expect("subscribing never fails")
+ .filter_map(fixtures::channel::events)
+ .filter_map(fixtures::channel::created)
+ .filter(|event| future::ready(event.channel == response));
+
+ let event = events
+ .next()
+ .immediately()
+ .await
+ .expect("creation event published");
+
+ assert_eq!(event.channel, response);
+}
+
+#[tokio::test]
+async fn duplicate_name() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let creator = fixtures::identity::create(&app, &fixtures::now()).await;
+ let channel = fixtures::channel::create(&app, &fixtures::now()).await;
+
+ // Call the endpoint
+
+ let request = post::Request {
+ name: channel.name.clone(),
+ };
+ let post::Error(error) =
+ post::handler(State(app.clone()), creator, fixtures::now(), Json(request))
+ .await
+ .expect_err("duplicate channel name should fail the request");
+
+ // Verify the structure of the response
+
+ assert!(matches!(
+ error,
+ app::CreateError::DuplicateName(name) if channel.name == name
+ ));
+}
+
+#[tokio::test]
+async fn conflicting_canonical_name() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let creator = fixtures::identity::create(&app, &fixtures::now()).await;
+
+ let existing_name = Name::from("rijksmuseum");
+ app.channels()
+ .create(&existing_name, &fixtures::now())
+ .await
+ .expect("creating a channel in an empty environment succeeds");
+
+ let conflicting_name = Name::from("r\u{0133}ksmuseum");
+
+ // Call the endpoint
+
+ let request = post::Request {
+ name: conflicting_name.clone(),
+ };
+ let post::Error(error) =
+ post::handler(State(app.clone()), creator, fixtures::now(), Json(request))
+ .await
+ .expect_err("duplicate channel name should fail the request");
+
+ // Verify the structure of the response
+
+ assert!(matches!(
+ error,
+ app::CreateError::DuplicateName(name) if conflicting_name == name
+ ));
+}
+
+#[tokio::test]
+async fn name_reusable_after_delete() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let creator = fixtures::identity::create(&app, &fixtures::now()).await;
+ let name = fixtures::channel::propose();
+
+ // Call the endpoint (first time)
+
+ let request = post::Request { name: name.clone() };
+ let post::Response(response) = post::handler(
+ State(app.clone()),
+ creator.clone(),
+ fixtures::now(),
+ Json(request),
+ )
+ .await
+ .expect("new channel in an empty app");
+
+ // Delete the channel
+
+ app.channels()
+ .delete(&response.id, &fixtures::now())
+ .await
+ .expect("deleting a newly-created channel succeeds");
+
+ // Call the endpoint (second time)
+
+ let request = post::Request { name: name.clone() };
+ let post::Response(response) =
+ post::handler(State(app.clone()), creator, fixtures::now(), Json(request))
+ .await
+ .expect("creation succeeds after original channel deleted");
+
+ // Verify the structure of the response
+
+ assert_eq!(name, response.name);
+
+ // Verify the semantics
+
+ let channel = app
+ .channels()
+ .get(&response.id)
+ .await
+ .expect("searching for channels by ID never fails")
+ .expect("the newly-created channel exists");
+ assert_eq!(response, channel);
+}
+
+#[tokio::test]
+async fn name_reusable_after_expiry() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let creator = fixtures::identity::create(&app, &fixtures::ancient()).await;
+ let name = fixtures::channel::propose();
+
+ // Call the endpoint (first time)
+
+ let request = post::Request { name: name.clone() };
+ let post::Response(_) = post::handler(
+ State(app.clone()),
+ creator.clone(),
+ fixtures::ancient(),
+ Json(request),
+ )
+ .await
+ .expect("new channel in an empty app");
+
+ // Delete the channel
+
+ app.channels()
+ .expire(&fixtures::now())
+ .await
+ .expect("expiry always succeeds");
+
+ // Call the endpoint (second time)
+
+ let request = post::Request { name: name.clone() };
+ let post::Response(response) =
+ post::handler(State(app.clone()), creator, fixtures::now(), Json(request))
+ .await
+ .expect("creation succeeds after original channel expired");
+
+ // Verify the structure of the response
+
+ assert_eq!(name, response.name);
+
+ // Verify the semantics
+
+ let channel = app
+ .channels()
+ .get(&response.id)
+ .await
+ .expect("searching for channels by ID never fails")
+ .expect("the newly-created channel exists");
+ assert_eq!(response, channel);
+}
diff --git a/src/channel/routes/test/mod.rs b/src/channel/routes/test/mod.rs
deleted file mode 100644
index 3e5aa17..0000000
--- a/src/channel/routes/test/mod.rs
+++ /dev/null
@@ -1,2 +0,0 @@
-mod on_create;
-mod on_send;
diff --git a/src/channel/routes/test/on_create.rs b/src/channel/routes/test/on_create.rs
deleted file mode 100644
index eeecc7f..0000000
--- a/src/channel/routes/test/on_create.rs
+++ /dev/null
@@ -1,88 +0,0 @@
-use axum::extract::{Json, State};
-use futures::stream::StreamExt as _;
-
-use crate::{
- channel::{self, app, routes},
- event,
- test::fixtures::{self, future::Immediately as _},
-};
-
-#[tokio::test]
-async fn new_channel() {
- // Set up the environment
-
- let app = fixtures::scratch_app().await;
- let creator = fixtures::login::create(&app, &fixtures::now()).await;
-
- // Call the endpoint
-
- let name = fixtures::channel::propose();
- let request = routes::CreateRequest { name };
- let Json(response_channel) = routes::on_create(
- State(app.clone()),
- creator,
- fixtures::now(),
- Json(request.clone()),
- )
- .await
- .expect("new channel in an empty app");
-
- // Verify the structure of the response
-
- assert_eq!(request.name, response_channel.name);
-
- // Verify the semantics
-
- let snapshot = app.boot().snapshot().await.expect("boot always succeeds");
- assert!(snapshot
- .channels
- .iter()
- .any(|channel| channel.name == response_channel.name && channel.id == response_channel.id));
-
- let mut events = app
- .events()
- .subscribe(None)
- .await
- .expect("subscribing never fails")
- .filter(fixtures::filter::created());
-
- let event = events
- .next()
- .immediately()
- .await
- .expect("creation event published");
-
- assert!(matches!(
- event,
- event::Event::Channel(channel::Event::Created(event))
- if event.channel == response_channel
- ));
-}
-
-#[tokio::test]
-async fn duplicate_name() {
- // Set up the environment
-
- let app = fixtures::scratch_app().await;
- let creator = fixtures::login::create(&app, &fixtures::now()).await;
- let channel = fixtures::channel::create(&app, &fixtures::now()).await;
-
- // Call the endpoint
-
- let request = routes::CreateRequest { name: channel.name };
- let routes::CreateError(error) = routes::on_create(
- State(app.clone()),
- creator,
- fixtures::now(),
- Json(request.clone()),
- )
- .await
- .expect_err("duplicate channel name");
-
- // Verify the structure of the response
-
- assert!(matches!(
- error,
- app::CreateError::DuplicateName(name) if request.name == name
- ));
-}
diff --git a/src/channel/snapshot.rs b/src/channel/snapshot.rs
index d4d1d27..129c0d6 100644
--- a/src/channel/snapshot.rs
+++ b/src/channel/snapshot.rs
@@ -2,11 +2,14 @@ use super::{
event::{Created, Event},
Id,
};
+use crate::{clock::DateTime, name::Name};
#[derive(Clone, Debug, Eq, PartialEq, serde::Serialize)]
pub struct Channel {
pub id: Id,
- pub name: String,
+ pub name: Name,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub deleted_at: Option<DateTime>,
}
impl Channel {
diff --git a/src/cli.rs b/src/cli/mod.rs
index ade61ae..c75ce2b 100644
--- a/src/cli.rs
+++ b/src/cli/mod.rs
@@ -22,6 +22,8 @@ use crate::{
ui,
};
+pub mod recanonicalize;
+
/// Command-line entry point for running the `hi` server.
///
/// This is intended to be used as a Clap [Parser], to capture command-line
@@ -164,7 +166,7 @@ fn started_msg(listener: &net::TcpListener) -> io::Result<String> {
#[error(transparent)]
pub enum Error {
/// Failure due to `io::Error`. See [`io::Error`].
- IoError(#[from] io::Error),
+ Io(#[from] io::Error),
/// Failure due to a database initialization error. See [`db::Error`].
Database(#[from] db::Error),
}
diff --git a/src/cli/recanonicalize.rs b/src/cli/recanonicalize.rs
new file mode 100644
index 0000000..9db5b77
--- /dev/null
+++ b/src/cli/recanonicalize.rs
@@ -0,0 +1,86 @@
+use sqlx::sqlite::SqlitePool;
+
+use crate::{app::App, db};
+
+/// Command-line entry point for repairing canonical names in the `hi` database.
+/// This command may be necessary after an upgrade, if the canonical forms of
+/// names has changed. It will re-calculate the canonical form of each name in
+/// the database, based on its display form, and store the results back to the
+/// database.
+///
+/// This is intended to be used as a Clap [Parser], to capture command-line
+/// arguments for the `hi-recanonicalize` command:
+///
+/// ```no_run
+/// # use hi::cli::recanonicalize::Error;
+/// #
+/// # #[tokio::main]
+/// # async fn main() -> Result<(), Error> {
+/// use clap::Parser;
+/// use hi::cli::recanonicalize::Args;
+///
+/// let args = Args::parse();
+/// args.run().await?;
+/// # Ok(())
+/// # }
+/// ```
+#[derive(clap::Parser)]
+#[command(
+ version,
+ about = "Recanonicalize names in the `hi` database.",
+ long_about = r#"Recanonicalize names in the `hi` database.
+
+The `hi` server must not be running while this command is run.
+
+The database at `--database-url` will also be created, or upgraded, automatically."#
+)]
+pub struct Args {
+ /// Sqlite URL or path for the `hi` database
+ #[arg(short, long, env, default_value = "sqlite://.hi")]
+ database_url: String,
+
+ /// Sqlite URL or path for a backup of the `hi` database during upgrades
+ #[arg(short = 'D', long, env, default_value = "sqlite://.hi.backup")]
+ backup_database_url: String,
+}
+
+impl Args {
+ /// Recanonicalizes the `hi` database, using the parsed configuation in
+ /// `self`.
+ ///
+ /// This will perform the following tasks:
+ ///
+ /// * Migrate the `hi` database (at `--database-url`).
+ /// * Recanonicalize names in the `login` and `channel` tables.
+ ///
+ /// # Errors
+ ///
+ /// Will return `Err` if the canonicalization or database upgrade processes
+ /// fail. The specific [`Error`] variant will expose the cause
+ /// of the failure.
+ pub async fn run(self) -> Result<(), Error> {
+ let pool = self.pool().await?;
+
+ let app = App::from(pool);
+ app.logins().recanonicalize().await?;
+ app.channels().recanonicalize().await?;
+
+ Ok(())
+ }
+
+ async fn pool(&self) -> Result<SqlitePool, db::Error> {
+ db::prepare(&self.database_url, &self.backup_database_url).await
+ }
+}
+
+/// Errors that can be raised by [`Args::run`].
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub enum Error {
+ // /// Failure due to `io::Error`. See [`io::Error`].
+ // Io(#[from] io::Error),
+ /// Failure due to a database initialization error. See [`db::Error`].
+ Database(#[from] db::Error),
+ /// Failure due to a data manipulation error. See [`sqlx::Error`].
+ Sqlx(#[from] sqlx::Error),
+}
diff --git a/src/db/mod.rs b/src/db/mod.rs
index 6005813..e0522d4 100644
--- a/src/db/mod.rs
+++ b/src/db/mod.rs
@@ -130,14 +130,17 @@ pub enum Error {
Rejected(String, String),
}
-pub trait NotFound {
+pub trait NotFound: Sized {
type Ok;
type Error;
fn not_found<E, F>(self, map: F) -> Result<Self::Ok, E>
where
E: From<Self::Error>,
- F: FnOnce() -> E;
+ F: FnOnce() -> E,
+ {
+ self.optional()?.ok_or_else(map)
+ }
fn optional(self) -> Result<Option<Self::Ok>, Self::Error>;
}
@@ -153,14 +156,6 @@ impl<T> NotFound for Result<T, sqlx::Error> {
Err(other) => Err(other),
}
}
-
- fn not_found<E, F>(self, map: F) -> Result<T, E>
- where
- E: From<sqlx::Error>,
- F: FnOnce() -> E,
- {
- self.optional()?.ok_or_else(map)
- }
}
pub trait Duplicate {
diff --git a/src/error.rs b/src/error.rs
index 85573d4..f3399c6 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -28,15 +28,20 @@ where
}
}
+impl fmt::Display for Internal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let Self(id, _) = self;
+ writeln!(f, "internal server error")?;
+ writeln!(f, "error id: {id}")?;
+ Ok(())
+ }
+}
+
impl IntoResponse for Internal {
fn into_response(self) -> Response {
- let Self(id, error) = self;
+ let Self(id, error) = &self;
eprintln!("hi: [{id}] {error}");
- (
- StatusCode::INTERNAL_SERVER_ERROR,
- format!("internal server error\nerror id: {id}"),
- )
- .into_response()
+ (StatusCode::INTERNAL_SERVER_ERROR, self.to_string()).into_response()
}
}
diff --git a/src/event/app.rs b/src/event/app.rs
index 951ce25..c754388 100644
--- a/src/event/app.rs
+++ b/src/event/app.rs
@@ -11,6 +11,7 @@ use crate::{
channel::{self, repo::Provider as _},
login::{self, repo::Provider as _},
message::{self, repo::Provider as _},
+ name,
};
pub struct Events<'a> {
@@ -26,7 +27,7 @@ impl<'a> Events<'a> {
pub async fn subscribe(
&self,
resume_at: impl Into<ResumePoint>,
- ) -> Result<impl Stream<Item = Event> + std::fmt::Debug, sqlx::Error> {
+ ) -> Result<impl Stream<Item = Event> + std::fmt::Debug, Error> {
let resume_at = resume_at.into();
// Subscribe before retrieving, to catch messages broadcast while we're
// querying the DB. We'll prune out duplicates later.
@@ -81,3 +82,30 @@ impl<'a> Events<'a> {
move |event| future::ready(filter(event))
}
}
+
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub enum Error {
+ Database(#[from] sqlx::Error),
+ Name(#[from] name::Error),
+}
+
+impl From<login::repo::LoadError> for Error {
+ fn from(error: login::repo::LoadError) -> Self {
+ use login::repo::LoadError;
+ match error {
+ LoadError::Database(error) => error.into(),
+ LoadError::Name(error) => error.into(),
+ }
+ }
+}
+
+impl From<channel::repo::LoadError> for Error {
+ fn from(error: channel::repo::LoadError) -> Self {
+ use channel::repo::LoadError;
+ match error {
+ LoadError::Database(error) => error.into(),
+ LoadError::Name(error) => error.into(),
+ }
+ }
+}
diff --git a/src/event/repo.rs b/src/event/repo.rs
index 40d6a53..56beeea 100644
--- a/src/event/repo.rs
+++ b/src/event/repo.rs
@@ -29,10 +29,7 @@ impl<'c> Sequences<'c> {
.fetch_one(&mut *self.0)
.await?;
- Ok(Instant {
- at: *at,
- sequence: next,
- })
+ Ok(Instant::new(*at, next))
}
pub async fn current(&mut self) -> Result<Sequence, sqlx::Error> {
diff --git a/src/event/routes.rs b/src/event/routes/get.rs
index de6d248..22e8762 100644
--- a/src/event/routes.rs
+++ b/src/event/routes/get.rs
@@ -1,41 +1,27 @@
use axum::{
extract::State,
response::{
+ self,
sse::{self, Sse},
- IntoResponse, Response,
+ IntoResponse,
},
- routing::get,
- Router,
};
use axum_extra::extract::Query;
use futures::stream::{Stream, StreamExt as _};
-use super::{extract::LastEventId, Event};
use crate::{
app::App,
error::{Internal, Unauthorized},
- event::{ResumePoint, Sequence, Sequenced as _},
+ event::{app, extract::LastEventId, Event, ResumePoint, Sequence, Sequenced as _},
token::{app::ValidateError, extract::Identity},
};
-#[cfg(test)]
-mod test;
-
-pub fn router() -> Router<App> {
- Router::new().route("/api/events", get(events))
-}
-
-#[derive(Default, serde::Deserialize)]
-struct EventsQuery {
- resume_point: ResumePoint,
-}
-
-async fn events(
+pub async fn handler(
State(app): State<App>,
identity: Identity,
last_event_id: Option<LastEventId<Sequence>>,
- Query(query): Query<EventsQuery>,
-) -> Result<Events<impl Stream<Item = Event> + std::fmt::Debug>, EventsError> {
+ Query(query): Query<QueryParams>,
+) -> Result<Response<impl Stream<Item = Event> + std::fmt::Debug>, Error> {
let resume_at = last_event_id
.map(LastEventId::into_inner)
.or(query.resume_point);
@@ -43,17 +29,22 @@ async fn events(
let stream = app.events().subscribe(resume_at).await?;
let stream = app.tokens().limit_stream(identity.token, stream).await?;
- Ok(Events(stream))
+ Ok(Response(stream))
+}
+
+#[derive(Default, serde::Deserialize)]
+pub struct QueryParams {
+ pub resume_point: ResumePoint,
}
#[derive(Debug)]
-struct Events<S>(S);
+pub struct Response<S>(pub S);
-impl<S> IntoResponse for Events<S>
+impl<S> IntoResponse for Response<S>
where
S: Stream<Item = Event> + Send + 'static,
{
- fn into_response(self) -> Response {
+ fn into_response(self) -> response::Response {
let Self(stream) = self;
let stream = stream.map(sse::Event::try_from);
Sse::new(stream)
@@ -77,15 +68,15 @@ impl TryFrom<Event> for sse::Event {
#[derive(Debug, thiserror::Error)]
#[error(transparent)]
-pub enum EventsError {
- DatabaseError(#[from] sqlx::Error),
- ValidateError(#[from] ValidateError),
+pub enum Error {
+ Subscribe(#[from] app::Error),
+ Validate(#[from] ValidateError),
}
-impl IntoResponse for EventsError {
- fn into_response(self) -> Response {
+impl IntoResponse for Error {
+ fn into_response(self) -> response::Response {
match self {
- Self::ValidateError(ValidateError::InvalidToken) => Unauthorized.into_response(),
+ Self::Validate(ValidateError::InvalidToken) => Unauthorized.into_response(),
other => Internal::from(other).into_response(),
}
}
diff --git a/src/event/routes/mod.rs b/src/event/routes/mod.rs
new file mode 100644
index 0000000..57ab9db
--- /dev/null
+++ b/src/event/routes/mod.rs
@@ -0,0 +1,11 @@
+use axum::{routing::get, Router};
+
+use crate::app::App;
+
+mod get;
+#[cfg(test)]
+mod test;
+
+pub fn router() -> Router<App> {
+ Router::new().route("/api/events", get(get::handler))
+}
diff --git a/src/event/routes/test.rs b/src/event/routes/test.rs
index 209a016..49f8094 100644
--- a/src/event/routes/test.rs
+++ b/src/event/routes/test.rs
@@ -5,8 +5,9 @@ use futures::{
stream::{self, StreamExt as _},
};
+use super::get;
use crate::{
- event::{routes, Sequenced as _},
+ event::Sequenced as _,
test::fixtures::{self, future::Immediately as _},
};
@@ -21,16 +22,15 @@ async fn includes_historical_message() {
// Call the endpoint
- let subscriber_creds = fixtures::login::create_with_password(&app, &fixtures::now()).await;
- let subscriber = fixtures::identity::identity(&app, &subscriber_creds, &fixtures::now()).await;
- let routes::Events(events) = routes::events(State(app), subscriber, None, Query::default())
+ let subscriber = fixtures::identity::create(&app, &fixtures::now()).await;
+ let get::Response(events) = get::handler(State(app), subscriber, None, Query::default())
.await
.expect("subscribe never fails");
// Verify the structure of the response.
let event = events
- .filter(fixtures::filter::messages())
+ .filter_map(fixtures::message::events)
.next()
.immediately()
.await
@@ -48,10 +48,9 @@ async fn includes_live_message() {
// Call the endpoint
- let subscriber_creds = fixtures::login::create_with_password(&app, &fixtures::now()).await;
- let subscriber = fixtures::identity::identity(&app, &subscriber_creds, &fixtures::now()).await;
- let routes::Events(events) =
- routes::events(State(app.clone()), subscriber, None, Query::default())
+ let subscriber = fixtures::identity::create(&app, &fixtures::now()).await;
+ let get::Response(events) =
+ get::handler(State(app.clone()), subscriber, None, Query::default())
.await
.expect("subscribe never fails");
@@ -61,7 +60,7 @@ async fn includes_live_message() {
let message = fixtures::message::send(&app, &channel, &sender, &fixtures::now()).await;
let event = events
- .filter(fixtures::filter::messages())
+ .filter_map(fixtures::message::events)
.next()
.immediately()
.await
@@ -94,16 +93,15 @@ async fn includes_multiple_channels() {
// Call the endpoint
- let subscriber_creds = fixtures::login::create_with_password(&app, &fixtures::now()).await;
- let subscriber = fixtures::identity::identity(&app, &subscriber_creds, &fixtures::now()).await;
- let routes::Events(events) = routes::events(State(app), subscriber, None, Query::default())
+ let subscriber = fixtures::identity::create(&app, &fixtures::now()).await;
+ let get::Response(events) = get::handler(State(app), subscriber, None, Query::default())
.await
.expect("subscribe never fails");
// Verify the structure of the response.
let events = events
- .filter(fixtures::filter::messages())
+ .filter_map(fixtures::message::events)
.take(messages.len())
.collect::<Vec<_>>()
.immediately()
@@ -132,21 +130,22 @@ async fn sequential_messages() {
// Call the endpoint
- let subscriber_creds = fixtures::login::create_with_password(&app, &fixtures::now()).await;
- let subscriber = fixtures::identity::identity(&app, &subscriber_creds, &fixtures::now()).await;
- let routes::Events(events) = routes::events(State(app), subscriber, None, Query::default())
+ let subscriber = fixtures::identity::create(&app, &fixtures::now()).await;
+ let get::Response(events) = get::handler(State(app), subscriber, None, Query::default())
.await
.expect("subscribe never fails");
// Verify the structure of the response.
- let mut events = events.filter(|event| {
- future::ready(
- messages
- .iter()
- .any(|message| fixtures::event::message_sent(event, message)),
- )
- });
+ let mut events = events
+ .filter_map(fixtures::message::events)
+ .filter(|event| {
+ future::ready(
+ messages
+ .iter()
+ .any(|message| fixtures::event::message_sent(event, message)),
+ )
+ });
// Verify delivery in order
for message in &messages {
@@ -177,12 +176,11 @@ async fn resumes_from() {
// Call the endpoint
- let subscriber_creds = fixtures::login::create_with_password(&app, &fixtures::now()).await;
- let subscriber = fixtures::identity::identity(&app, &subscriber_creds, &fixtures::now()).await;
+ let subscriber = fixtures::identity::create(&app, &fixtures::now()).await;
let resume_at = {
// First subscription
- let routes::Events(events) = routes::events(
+ let get::Response(events) = get::handler(
State(app.clone()),
subscriber.clone(),
None,
@@ -192,7 +190,7 @@ async fn resumes_from() {
.expect("subscribe never fails");
let event = events
- .filter(fixtures::filter::messages())
+ .filter_map(fixtures::message::events)
.next()
.immediately()
.await
@@ -204,7 +202,7 @@ async fn resumes_from() {
};
// Resume after disconnect
- let routes::Events(resumed) = routes::events(
+ let get::Response(resumed) = get::handler(
State(app),
subscriber,
Some(resume_at.into()),
@@ -216,6 +214,7 @@ async fn resumes_from() {
// Verify the structure of the response.
let events = resumed
+ .filter_map(fixtures::message::events)
.take(later_messages.len())
.collect::<Vec<_>>()
.immediately()
@@ -254,8 +253,7 @@ async fn serial_resume() {
// Call the endpoint
- let subscriber_creds = fixtures::login::create_with_password(&app, &fixtures::now()).await;
- let subscriber = fixtures::identity::identity(&app, &subscriber_creds, &fixtures::now()).await;
+ let subscriber = fixtures::identity::create(&app, &fixtures::now()).await;
let resume_at = {
let initial_messages = [
@@ -264,7 +262,7 @@ async fn serial_resume() {
];
// First subscription
- let routes::Events(events) = routes::events(
+ let get::Response(events) = get::handler(
State(app.clone()),
subscriber.clone(),
None,
@@ -274,7 +272,7 @@ async fn serial_resume() {
.expect("subscribe never fails");
let events = events
- .filter(fixtures::filter::messages())
+ .filter_map(fixtures::message::events)
.take(initial_messages.len())
.collect::<Vec<_>>()
.immediately()
@@ -302,7 +300,7 @@ async fn serial_resume() {
];
// Second subscription
- let routes::Events(events) = routes::events(
+ let get::Response(events) = get::handler(
State(app.clone()),
subscriber.clone(),
Some(resume_at.into()),
@@ -312,7 +310,7 @@ async fn serial_resume() {
.expect("subscribe never fails");
let events = events
- .filter(fixtures::filter::messages())
+ .filter_map(fixtures::message::events)
.take(resume_messages.len())
.collect::<Vec<_>>()
.immediately()
@@ -340,7 +338,7 @@ async fn serial_resume() {
];
// Third subscription
- let routes::Events(events) = routes::events(
+ let get::Response(events) = get::handler(
State(app.clone()),
subscriber.clone(),
Some(resume_at.into()),
@@ -350,7 +348,7 @@ async fn serial_resume() {
.expect("subscribe never fails");
let events = events
- .filter(fixtures::filter::messages())
+ .filter_map(fixtures::message::events)
.take(final_messages.len())
.collect::<Vec<_>>()
.immediately()
@@ -378,10 +376,10 @@ async fn terminates_on_token_expiry() {
let subscriber_creds = fixtures::login::create_with_password(&app, &fixtures::now()).await;
let subscriber =
- fixtures::identity::identity(&app, &subscriber_creds, &fixtures::ancient()).await;
+ fixtures::identity::logged_in(&app, &subscriber_creds, &fixtures::ancient()).await;
- let routes::Events(events) =
- routes::events(State(app.clone()), subscriber, None, Query::default())
+ let get::Response(events) =
+ get::handler(State(app.clone()), subscriber, None, Query::default())
.await
.expect("subscribe never fails");
@@ -400,6 +398,7 @@ async fn terminates_on_token_expiry() {
];
assert!(events
+ .filter_map(fixtures::message::events)
.filter(|event| future::ready(
messages
.iter()
@@ -421,13 +420,9 @@ async fn terminates_on_logout() {
// Subscribe via the endpoint
- let subscriber_creds = fixtures::login::create_with_password(&app, &fixtures::now()).await;
- let subscriber_token =
- fixtures::identity::logged_in(&app, &subscriber_creds, &fixtures::now()).await;
- let subscriber =
- fixtures::identity::from_token(&app, &subscriber_token, &fixtures::now()).await;
+ let subscriber = fixtures::identity::create(&app, &fixtures::now()).await;
- let routes::Events(events) = routes::events(
+ let get::Response(events) = get::handler(
State(app.clone()),
subscriber.clone(),
None,
@@ -451,6 +446,7 @@ async fn terminates_on_logout() {
];
assert!(events
+ .filter_map(fixtures::message::events)
.filter(|event| future::ready(
messages
.iter()
diff --git a/src/event/sequence.rs b/src/event/sequence.rs
index bf6d5b8..9bc399b 100644
--- a/src/event/sequence.rs
+++ b/src/event/sequence.rs
@@ -10,6 +10,17 @@ pub struct Instant {
pub sequence: Sequence,
}
+impl Instant {
+ pub fn new(at: DateTime, sequence: Sequence) -> Self {
+ Self { at, sequence }
+ }
+
+ pub fn optional(at: Option<DateTime>, sequence: Option<Sequence>) -> Option<Self> {
+ at.zip(sequence)
+ .map(|(at, sequence)| Self::new(at, sequence))
+ }
+}
+
impl From<Instant> for Sequence {
fn from(instant: Instant) -> Self {
instant.sequence
diff --git a/src/expire.rs b/src/expire.rs
index eaedc44..1427a8d 100644
--- a/src/expire.rs
+++ b/src/expire.rs
@@ -16,6 +16,8 @@ pub async fn middleware(
app.tokens().expire(&expired_at).await?;
app.invites().expire(&expired_at).await?;
app.messages().expire(&expired_at).await?;
+ app.messages().purge(&expired_at).await?;
app.channels().expire(&expired_at).await?;
+ app.channels().purge(&expired_at).await?;
Ok(next.run(req).await)
}
diff --git a/src/invite/app.rs b/src/invite/app.rs
index 6800d72..64ba753 100644
--- a/src/invite/app.rs
+++ b/src/invite/app.rs
@@ -7,6 +7,7 @@ use crate::{
db::{Duplicate as _, NotFound as _},
event::repo::Provider as _,
login::{repo::Provider as _, Login, Password},
+ name::Name,
token::{repo::Provider as _, Secret},
};
@@ -31,13 +32,9 @@ impl<'a> Invites<'a> {
Ok(invite)
}
- pub async fn get(&self, invite: &Id) -> Result<Summary, Error> {
+ pub async fn get(&self, invite: &Id) -> Result<Option<Summary>, sqlx::Error> {
let mut tx = self.db.begin().await?;
- let invite = tx
- .invites()
- .summary(invite)
- .await
- .not_found(|| Error::NotFound(invite.clone()))?;
+ let invite = tx.invites().summary(invite).await.optional()?;
tx.commit().await?;
Ok(invite)
@@ -46,10 +43,10 @@ impl<'a> Invites<'a> {
pub async fn accept(
&self,
invite: &Id,
- name: &str,
+ name: &Name,
password: &Password,
accepted_at: &DateTime,
- ) -> Result<Secret, AcceptError> {
+ ) -> Result<(Login, Secret), AcceptError> {
let mut tx = self.db.begin().await?;
let invite = tx
.invites()
@@ -72,11 +69,11 @@ impl<'a> Invites<'a> {
.logins()
.create(name, &password_hash, &created)
.await
- .duplicate(|| AcceptError::DuplicateLogin(name.into()))?;
+ .duplicate(|| AcceptError::DuplicateLogin(name.clone()))?;
let secret = tx.tokens().issue(&login, accepted_at).await?;
tx.commit().await?;
- Ok(secret)
+ Ok((login.as_created(), secret))
}
pub async fn expire(&self, relative_to: &DateTime) -> Result<(), sqlx::Error> {
@@ -92,19 +89,11 @@ impl<'a> Invites<'a> {
}
#[derive(Debug, thiserror::Error)]
-pub enum Error {
- #[error("invite not found: {0}")]
- NotFound(Id),
- #[error(transparent)]
- Database(#[from] sqlx::Error),
-}
-
-#[derive(Debug, thiserror::Error)]
pub enum AcceptError {
#[error("invite not found: {0}")]
NotFound(Id),
#[error("name in use: {0}")]
- DuplicateLogin(String),
+ DuplicateLogin(Name),
#[error(transparent)]
Database(#[from] sqlx::Error),
#[error(transparent)]
diff --git a/src/invite/mod.rs b/src/invite/mod.rs
index 5f9d490..d59fb9c 100644
--- a/src/invite/mod.rs
+++ b/src/invite/mod.rs
@@ -3,10 +3,7 @@ mod id;
mod repo;
mod routes;
-use crate::{
- clock::DateTime,
- login::{self, Login},
-};
+use crate::{clock::DateTime, login, normalize::nfc};
pub use self::{id::Id, routes::router};
@@ -19,6 +16,7 @@ pub struct Invite {
#[derive(serde::Serialize)]
pub struct Summary {
- pub issuer: Login,
+ pub id: Id,
+ pub issuer: nfc::String,
pub issued_at: DateTime,
}
diff --git a/src/invite/repo.rs b/src/invite/repo.rs
index 2ab993f..5f86e49 100644
--- a/src/invite/repo.rs
+++ b/src/invite/repo.rs
@@ -4,6 +4,7 @@ use super::{Id, Invite, Summary};
use crate::{
clock::DateTime,
login::{self, Login},
+ normalize::nfc,
};
pub trait Provider {
@@ -28,13 +29,13 @@ impl<'c> Invites<'c> {
let invite = sqlx::query_as!(
Invite,
r#"
- insert into invite (id, issuer, issued_at)
- values ($1, $2, $3)
- returning
- id as "id: Id",
- issuer as "issuer: login::Id",
- issued_at as "issued_at: DateTime"
- "#,
+ insert into invite (id, issuer, issued_at)
+ values ($1, $2, $3)
+ returning
+ id as "id: Id",
+ issuer as "issuer: login::Id",
+ issued_at as "issued_at: DateTime"
+ "#,
id,
issuer.id,
issued_at
@@ -49,13 +50,13 @@ impl<'c> Invites<'c> {
let invite = sqlx::query_as!(
Invite,
r#"
- select
- id as "id: Id",
- issuer as "issuer: login::Id",
- issued_at as "issued_at: DateTime"
- from invite
- where id = $1
- "#,
+ select
+ id as "id: Id",
+ issuer as "issuer: login::Id",
+ issued_at as "issued_at: DateTime"
+ from invite
+ where id = $1
+ "#,
invite,
)
.fetch_one(&mut *self.0)
@@ -67,21 +68,20 @@ impl<'c> Invites<'c> {
pub async fn summary(&mut self, invite: &Id) -> Result<Summary, sqlx::Error> {
let invite = sqlx::query!(
r#"
- select
- issuer.id as "issuer_id: login::Id",
- issuer.name as "issuer_name",
- invite.issued_at as "invite_issued_at: DateTime"
- from invite
- join login as issuer on (invite.issuer = issuer.id)
- where invite.id = $1
- "#,
+ select
+ invite.id as "invite_id: Id",
+ issuer.id as "issuer_id: login::Id",
+ issuer.display_name as "issuer_name: nfc::String",
+ invite.issued_at as "invite_issued_at: DateTime"
+ from invite
+ join login as issuer on (invite.issuer = issuer.id)
+ where invite.id = $1
+ "#,
invite,
)
.map(|row| Summary {
- issuer: Login {
- id: row.issuer_id,
- name: row.issuer_name,
- },
+ id: row.invite_id,
+ issuer: row.issuer_name,
issued_at: row.invite_issued_at,
})
.fetch_one(&mut *self.0)
@@ -93,10 +93,10 @@ impl<'c> Invites<'c> {
pub async fn accept(&mut self, invite: &Invite) -> Result<(), sqlx::Error> {
sqlx::query_scalar!(
r#"
- delete from invite
- where id = $1
- returning 1 as "deleted: bool"
- "#,
+ delete from invite
+ where id = $1
+ returning 1 as "deleted: bool"
+ "#,
invite.id,
)
.fetch_one(&mut *self.0)
@@ -108,9 +108,9 @@ impl<'c> Invites<'c> {
pub async fn expire(&mut self, expire_at: &DateTime) -> Result<(), sqlx::Error> {
sqlx::query!(
r#"
- delete from invite
- where issued_at < $1
- "#,
+ delete from invite
+ where issued_at < $1
+ "#,
expire_at,
)
.execute(&mut *self.0)
diff --git a/src/invite/routes.rs b/src/invite/routes.rs
deleted file mode 100644
index 977fe9b..0000000
--- a/src/invite/routes.rs
+++ /dev/null
@@ -1,97 +0,0 @@
-use axum::{
- extract::{Json, Path, State},
- http::StatusCode,
- response::{IntoResponse, Response},
- routing::{get, post},
- Router,
-};
-
-use super::{app, Id, Invite, Summary};
-use crate::{
- app::App,
- clock::RequestedAt,
- error::{Internal, NotFound},
- login::{Login, Password},
- token::extract::IdentityToken,
-};
-
-pub fn router() -> Router<App> {
- Router::new()
- .route("/api/invite", post(on_invite))
- .route("/api/invite/:invite", get(invite))
- .route("/api/invite/:invite", post(on_accept))
-}
-
-#[derive(serde::Deserialize)]
-struct InviteRequest {}
-
-async fn on_invite(
- State(app): State<App>,
- RequestedAt(issued_at): RequestedAt,
- login: Login,
- // Require `{}` as the only valid request for this endpoint.
- _: Json<InviteRequest>,
-) -> Result<Json<Invite>, Internal> {
- let invite = app.invites().create(&login, &issued_at).await?;
- Ok(Json(invite))
-}
-
-async fn invite(
- State(app): State<App>,
- Path(invite): Path<Id>,
-) -> Result<Json<Summary>, InviteError> {
- app.invites()
- .get(&invite)
- .await
- .map(Json)
- .map_err(InviteError)
-}
-
-struct InviteError(app::Error);
-
-impl IntoResponse for InviteError {
- fn into_response(self) -> Response {
- let Self(error) = self;
- match error {
- error @ app::Error::NotFound(_) => NotFound(error).into_response(),
- other => Internal::from(other).into_response(),
- }
- }
-}
-
-#[derive(serde::Deserialize)]
-struct AcceptRequest {
- name: String,
- password: Password,
-}
-
-async fn on_accept(
- State(app): State<App>,
- RequestedAt(accepted_at): RequestedAt,
- identity: IdentityToken,
- Path(invite): Path<Id>,
- Json(request): Json<AcceptRequest>,
-) -> Result<(IdentityToken, StatusCode), AcceptError> {
- let secret = app
- .invites()
- .accept(&invite, &request.name, &request.password, &accepted_at)
- .await
- .map_err(AcceptError)?;
- let identity = identity.set(secret);
- Ok((identity, StatusCode::NO_CONTENT))
-}
-
-struct AcceptError(app::AcceptError);
-
-impl IntoResponse for AcceptError {
- fn into_response(self) -> Response {
- let Self(error) = self;
- match error {
- error @ app::AcceptError::NotFound(_) => NotFound(error).into_response(),
- error @ app::AcceptError::DuplicateLogin(_) => {
- (StatusCode::CONFLICT, error.to_string()).into_response()
- }
- other => Internal::from(other).into_response(),
- }
- }
-}
diff --git a/src/invite/routes/invite/get.rs b/src/invite/routes/invite/get.rs
new file mode 100644
index 0000000..c8b52f1
--- /dev/null
+++ b/src/invite/routes/invite/get.rs
@@ -0,0 +1,39 @@
+use axum::{
+ extract::{Json, Path, State},
+ response::{IntoResponse, Response},
+};
+
+use crate::{
+ app::App,
+ error::{Internal, NotFound},
+ invite::{Id, Summary},
+};
+
+pub async fn handler(
+ State(app): State<App>,
+ Path(invite): Path<super::PathInfo>,
+) -> Result<Json<Summary>, Error> {
+ app.invites()
+ .get(&invite)
+ .await?
+ .map(Json)
+ .ok_or_else(move || Error::NotFound(invite))
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+ #[error("invite not found: {0}")]
+ NotFound(Id),
+ #[error(transparent)]
+ Database(#[from] sqlx::Error),
+}
+
+impl IntoResponse for Error {
+ fn into_response(self) -> Response {
+ #[allow(clippy::match_wildcard_for_single_variants)]
+ match self {
+ Self::NotFound(_) => NotFound(self).into_response(),
+ other => Internal::from(other).into_response(),
+ }
+ }
+}
diff --git a/src/invite/routes/invite/mod.rs b/src/invite/routes/invite/mod.rs
new file mode 100644
index 0000000..04593fd
--- /dev/null
+++ b/src/invite/routes/invite/mod.rs
@@ -0,0 +1,4 @@
+pub mod get;
+pub mod post;
+
+type PathInfo = crate::invite::Id;
diff --git a/src/invite/routes/invite/post.rs b/src/invite/routes/invite/post.rs
new file mode 100644
index 0000000..3ca4e6b
--- /dev/null
+++ b/src/invite/routes/invite/post.rs
@@ -0,0 +1,52 @@
+use axum::{
+ extract::{Json, Path, State},
+ http::StatusCode,
+ response::{IntoResponse, Response},
+};
+
+use crate::{
+ app::App,
+ clock::RequestedAt,
+ error::{Internal, NotFound},
+ invite::app,
+ login::{Login, Password},
+ name::Name,
+ token::extract::IdentityCookie,
+};
+
+pub async fn handler(
+ State(app): State<App>,
+ RequestedAt(accepted_at): RequestedAt,
+ identity: IdentityCookie,
+ Path(invite): Path<super::PathInfo>,
+ Json(request): Json<Request>,
+) -> Result<(IdentityCookie, Json<Login>), Error> {
+ let (login, secret) = app
+ .invites()
+ .accept(&invite, &request.name, &request.password, &accepted_at)
+ .await
+ .map_err(Error)?;
+ let identity = identity.set(secret);
+ Ok((identity, Json(login)))
+}
+
+#[derive(serde::Deserialize)]
+pub struct Request {
+ pub name: Name,
+ pub password: Password,
+}
+
+pub struct Error(app::AcceptError);
+
+impl IntoResponse for Error {
+ fn into_response(self) -> Response {
+ let Self(error) = self;
+ match error {
+ app::AcceptError::NotFound(_) => NotFound(error).into_response(),
+ app::AcceptError::DuplicateLogin(_) => {
+ (StatusCode::CONFLICT, error.to_string()).into_response()
+ }
+ other => Internal::from(other).into_response(),
+ }
+ }
+}
diff --git a/src/invite/routes/mod.rs b/src/invite/routes/mod.rs
new file mode 100644
index 0000000..dae20ba
--- /dev/null
+++ b/src/invite/routes/mod.rs
@@ -0,0 +1,16 @@
+use axum::{
+ routing::{get, post},
+ Router,
+};
+
+use crate::app::App;
+
+mod invite;
+mod post;
+
+pub fn router() -> Router<App> {
+ Router::new()
+ .route("/api/invite", post(post::handler))
+ .route("/api/invite/:invite", get(invite::get::handler))
+ .route("/api/invite/:invite", post(invite::post::handler))
+}
diff --git a/src/invite/routes/post.rs b/src/invite/routes/post.rs
new file mode 100644
index 0000000..eb7d706
--- /dev/null
+++ b/src/invite/routes/post.rs
@@ -0,0 +1,19 @@
+use axum::extract::{Json, State};
+
+use crate::{
+ app::App, clock::RequestedAt, error::Internal, invite::Invite, token::extract::Identity,
+};
+
+pub async fn handler(
+ State(app): State<App>,
+ RequestedAt(issued_at): RequestedAt,
+ identity: Identity,
+ _: Json<Request>,
+) -> Result<Json<Invite>, Internal> {
+ let invite = app.invites().create(&identity.login, &issued_at).await?;
+ Ok(Json(invite))
+}
+
+// Require `{}` as the only valid request for this endpoint.
+#[derive(Default, serde::Deserialize)]
+pub struct Request {}
diff --git a/src/lib.rs b/src/lib.rs
index 73a2cb0..84b8dfc 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -16,6 +16,8 @@ mod id;
mod invite;
mod login;
mod message;
+mod name;
+mod normalize;
mod setup;
#[cfg(test)]
mod test;
diff --git a/src/login/app.rs b/src/login/app.rs
index bb1419b..2f5896f 100644
--- a/src/login/app.rs
+++ b/src/login/app.rs
@@ -1,24 +1,37 @@
use sqlx::sqlite::SqlitePool;
-use super::{repo::Provider as _, Login, Password};
+use super::repo::Provider as _;
+
+#[cfg(test)]
+use super::{Login, Password};
+#[cfg(test)]
use crate::{
clock::DateTime,
event::{repo::Provider as _, Broadcaster, Event},
+ name::Name,
};
pub struct Logins<'a> {
db: &'a SqlitePool,
+ #[cfg(test)]
events: &'a Broadcaster,
}
impl<'a> Logins<'a> {
+ #[cfg(not(test))]
+ pub const fn new(db: &'a SqlitePool) -> Self {
+ Self { db }
+ }
+
+ #[cfg(test)]
pub const fn new(db: &'a SqlitePool, events: &'a Broadcaster) -> Self {
Self { db, events }
}
+ #[cfg(test)]
pub async fn create(
&self,
- name: &str,
+ name: &Name,
password: &Password,
created_at: &DateTime,
) -> Result<Login, CreateError> {
@@ -34,11 +47,19 @@ impl<'a> Logins<'a> {
Ok(login.as_created())
}
+
+ pub async fn recanonicalize(&self) -> Result<(), sqlx::Error> {
+ let mut tx = self.db.begin().await?;
+ tx.logins().recanonicalize().await?;
+ tx.commit().await?;
+
+ Ok(())
+ }
}
#[derive(Debug, thiserror::Error)]
#[error(transparent)]
pub enum CreateError {
- DatabaseError(#[from] sqlx::Error),
- PasswordHashError(#[from] password_hash::Error),
+ Database(#[from] sqlx::Error),
+ PasswordHash(#[from] password_hash::Error),
}
diff --git a/src/login/extract.rs b/src/login/extract.rs
deleted file mode 100644
index c2d97f2..0000000
--- a/src/login/extract.rs
+++ /dev/null
@@ -1,15 +0,0 @@
-use axum::{extract::FromRequestParts, http::request::Parts};
-
-use super::Login;
-use crate::{app::App, token::extract::Identity};
-
-#[async_trait::async_trait]
-impl FromRequestParts<App> for Login {
- type Rejection = <Identity as FromRequestParts<App>>::Rejection;
-
- async fn from_request_parts(parts: &mut Parts, state: &App) -> Result<Self, Self::Rejection> {
- let identity = Identity::from_request_parts(parts, state).await?;
-
- Ok(identity.login)
- }
-}
diff --git a/src/login/history.rs b/src/login/history.rs
index f8d81bb..daad579 100644
--- a/src/login/history.rs
+++ b/src/login/history.rs
@@ -20,7 +20,6 @@ impl History {
// if this returns a redacted or modified version of the login. If we implement
// renames by redacting the original name, then this should return the edited
// login, not the original, even if that's not how it was "as created.")
- #[cfg(test)]
pub fn as_created(&self) -> Login {
self.login.clone()
}
@@ -30,6 +29,11 @@ impl History {
.filter(Sequence::up_to(resume_point.into()))
.collect()
}
+
+ // Snapshot of this login, as of all events recorded in this history.
+ pub fn as_snapshot(&self) -> Option<Login> {
+ self.events().collect()
+ }
}
// Events interface
diff --git a/src/login/mod.rs b/src/login/mod.rs
index 98cc3d7..279e9a6 100644
--- a/src/login/mod.rs
+++ b/src/login/mod.rs
@@ -1,7 +1,5 @@
-#[cfg(test)]
pub mod app;
pub mod event;
-pub mod extract;
mod history;
mod id;
pub mod password;
diff --git a/src/login/password.rs b/src/login/password.rs
index 14fd981..c27c950 100644
--- a/src/login/password.rs
+++ b/src/login/password.rs
@@ -4,6 +4,8 @@ use argon2::Argon2;
use password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString};
use rand_core::OsRng;
+use crate::normalize::nfc;
+
#[derive(sqlx::Type)]
#[sqlx(transparent)]
pub struct StoredHash(String);
@@ -31,7 +33,7 @@ impl fmt::Debug for StoredHash {
#[derive(serde::Deserialize)]
#[serde(transparent)]
-pub struct Password(String);
+pub struct Password(nfc::String);
impl Password {
pub fn hash(&self) -> Result<StoredHash, password_hash::Error> {
@@ -56,9 +58,8 @@ impl fmt::Debug for Password {
}
}
-#[cfg(test)]
impl From<String> for Password {
fn from(password: String) -> Self {
- Self(password)
+ Password(password.into())
}
}
diff --git a/src/login/repo.rs b/src/login/repo.rs
index 6d6510c..611edd6 100644
--- a/src/login/repo.rs
+++ b/src/login/repo.rs
@@ -1,9 +1,11 @@
+use futures::stream::{StreamExt as _, TryStreamExt as _};
use sqlx::{sqlite::Sqlite, SqliteConnection, Transaction};
use crate::{
clock::DateTime,
event::{Instant, ResumePoint, Sequence},
login::{password::StoredHash, History, Id, Login},
+ name::{self, Name},
};
pub trait Provider {
@@ -21,80 +23,80 @@ pub struct Logins<'t>(&'t mut SqliteConnection);
impl<'c> Logins<'c> {
pub async fn create(
&mut self,
- name: &str,
+ name: &Name,
password_hash: &StoredHash,
created: &Instant,
) -> Result<History, sqlx::Error> {
let id = Id::generate();
+ let display_name = name.display();
+ let canonical_name = name.canonical();
- let login = sqlx::query!(
+ sqlx::query!(
r#"
insert
- into login (id, name, password_hash, created_sequence, created_at)
- values ($1, $2, $3, $4, $5)
- returning
- id as "id: Id",
- name,
- created_sequence as "created_sequence: Sequence",
- created_at as "created_at: DateTime"
+ into login (id, display_name, canonical_name, password_hash, created_sequence, created_at)
+ values ($1, $2, $3, $4, $5, $6)
"#,
id,
- name,
+ display_name,
+ canonical_name,
password_hash,
created.sequence,
created.at,
)
- .map(|row| History {
+ .execute(&mut *self.0)
+ .await?;
+
+ let login = History {
+ created: *created,
login: Login {
- id: row.id,
- name: row.name,
- },
- created: Instant {
- at: row.created_at,
- sequence: row.created_sequence,
+ id,
+ name: name.clone(),
},
- })
- .fetch_one(&mut *self.0)
- .await?;
+ };
Ok(login)
}
- pub async fn all(&mut self, resume_at: ResumePoint) -> Result<Vec<History>, sqlx::Error> {
- let channels = sqlx::query!(
+ pub async fn all(&mut self, resume_at: ResumePoint) -> Result<Vec<History>, LoadError> {
+ let logins = sqlx::query!(
r#"
select
id as "id: Id",
- name,
+ display_name as "display_name: String",
+ canonical_name as "canonical_name: String",
created_sequence as "created_sequence: Sequence",
created_at as "created_at: DateTime"
from login
where coalesce(created_sequence <= $1, true)
- order by created_sequence
+ order by canonical_name
"#,
resume_at,
)
- .map(|row| History {
- login: Login {
- id: row.id,
- name: row.name,
- },
- created: Instant {
- at: row.created_at,
- sequence: row.created_sequence,
- },
+ .map(|row| {
+ Ok::<_, LoadError>(History {
+ login: Login {
+ id: row.id,
+ name: Name::new(row.display_name, row.canonical_name)?,
+ },
+ created: Instant::new(row.created_at, row.created_sequence),
+ })
})
- .fetch_all(&mut *self.0)
+ .fetch(&mut *self.0)
+ .map(|res| res?)
+ .try_collect()
.await?;
- Ok(channels)
+ Ok(logins)
}
- pub async fn replay(&mut self, resume_at: ResumePoint) -> Result<Vec<History>, sqlx::Error> {
- let messages = sqlx::query!(
+
+ pub async fn replay(&mut self, resume_at: ResumePoint) -> Result<Vec<History>, LoadError> {
+ let logins = sqlx::query!(
r#"
select
id as "id: Id",
- name,
+ display_name as "display_name: String",
+ canonical_name as "canonical_name: String",
created_sequence as "created_sequence: Sequence",
created_at as "created_at: DateTime"
from login
@@ -102,25 +104,59 @@ impl<'c> Logins<'c> {
"#,
resume_at,
)
- .map(|row| History {
- login: Login {
- id: row.id,
- name: row.name,
- },
- created: Instant {
- at: row.created_at,
- sequence: row.created_sequence,
- },
+ .map(|row| {
+ Ok::<_, name::Error>(History {
+ login: Login {
+ id: row.id,
+ name: Name::new(row.display_name, row.canonical_name)?,
+ },
+ created: Instant::new(row.created_at, row.created_sequence),
+ })
})
+ .fetch(&mut *self.0)
+ .map(|res| Ok::<_, LoadError>(res??))
+ .try_collect()
+ .await?;
+
+ Ok(logins)
+ }
+
+ pub async fn recanonicalize(&mut self) -> Result<(), sqlx::Error> {
+ let logins = sqlx::query!(
+ r#"
+ select
+ id as "id: Id",
+ display_name as "display_name: String"
+ from login
+ "#,
+ )
.fetch_all(&mut *self.0)
.await?;
- Ok(messages)
+ for login in logins {
+ let name = Name::from(login.display_name);
+ let canonical_name = name.canonical();
+
+ sqlx::query!(
+ r#"
+ update login
+ set canonical_name = $1
+ where id = $2
+ "#,
+ canonical_name,
+ login.id,
+ )
+ .execute(&mut *self.0)
+ .await?;
+ }
+
+ Ok(())
}
}
-impl<'t> From<&'t mut SqliteConnection> for Logins<'t> {
- fn from(tx: &'t mut SqliteConnection) -> Self {
- Self(tx)
- }
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub enum LoadError {
+ Database(#[from] sqlx::Error),
+ Name(#[from] name::Error),
}
diff --git a/src/login/routes.rs b/src/login/routes.rs
deleted file mode 100644
index 6579ae6..0000000
--- a/src/login/routes.rs
+++ /dev/null
@@ -1,97 +0,0 @@
-use axum::{
- extract::{Json, State},
- http::StatusCode,
- response::{IntoResponse, Response},
- routing::post,
- Router,
-};
-
-use crate::{
- app::App,
- clock::RequestedAt,
- error::{Internal, Unauthorized},
- login::Password,
- token::{app, extract::IdentityToken},
-};
-
-#[cfg(test)]
-mod test;
-
-pub fn router() -> Router<App> {
- Router::new()
- .route("/api/auth/login", post(on_login))
- .route("/api/auth/logout", post(on_logout))
-}
-
-#[derive(serde::Deserialize)]
-struct LoginRequest {
- name: String,
- password: Password,
-}
-
-async fn on_login(
- State(app): State<App>,
- RequestedAt(now): RequestedAt,
- identity: IdentityToken,
- Json(request): Json<LoginRequest>,
-) -> Result<(IdentityToken, StatusCode), LoginError> {
- let token = app
- .tokens()
- .login(&request.name, &request.password, &now)
- .await
- .map_err(LoginError)?;
- let identity = identity.set(token);
- Ok((identity, StatusCode::NO_CONTENT))
-}
-
-#[derive(Debug)]
-struct LoginError(app::LoginError);
-
-impl IntoResponse for LoginError {
- fn into_response(self) -> Response {
- let Self(error) = self;
- match error {
- app::LoginError::Rejected => {
- // not error::Unauthorized due to differing messaging
- (StatusCode::UNAUTHORIZED, "invalid name or password").into_response()
- }
- other => Internal::from(other).into_response(),
- }
- }
-}
-
-#[derive(serde::Deserialize)]
-struct LogoutRequest {}
-
-async fn on_logout(
- State(app): State<App>,
- RequestedAt(now): RequestedAt,
- identity: IdentityToken,
- // This forces the only valid request to be `{}`, and not the infinite
- // variation allowed when there's no body extractor.
- Json(LogoutRequest {}): Json<LogoutRequest>,
-) -> Result<(IdentityToken, StatusCode), LogoutError> {
- if let Some(secret) = identity.secret() {
- let (token, _) = app.tokens().validate(&secret, &now).await?;
- app.tokens().logout(&token).await?;
- }
-
- let identity = identity.clear();
- Ok((identity, StatusCode::NO_CONTENT))
-}
-
-#[derive(Debug, thiserror::Error)]
-#[error(transparent)]
-enum LogoutError {
- ValidateError(#[from] app::ValidateError),
- DatabaseError(#[from] sqlx::Error),
-}
-
-impl IntoResponse for LogoutError {
- fn into_response(self) -> Response {
- match self {
- Self::ValidateError(app::ValidateError::InvalidToken) => Unauthorized.into_response(),
- other => Internal::from(other).into_response(),
- }
- }
-}
diff --git a/src/login/routes/login/mod.rs b/src/login/routes/login/mod.rs
new file mode 100644
index 0000000..36b384e
--- /dev/null
+++ b/src/login/routes/login/mod.rs
@@ -0,0 +1,4 @@
+pub mod post;
+
+#[cfg(test)]
+mod test;
diff --git a/src/login/routes/login/post.rs b/src/login/routes/login/post.rs
new file mode 100644
index 0000000..96da5c5
--- /dev/null
+++ b/src/login/routes/login/post.rs
@@ -0,0 +1,52 @@
+use axum::{
+ extract::{Json, State},
+ http::StatusCode,
+ response::{IntoResponse, Response},
+};
+
+use crate::{
+ app::App,
+ clock::RequestedAt,
+ error::Internal,
+ login::{Login, Password},
+ name::Name,
+ token::{app, extract::IdentityCookie},
+};
+
+pub async fn handler(
+ State(app): State<App>,
+ RequestedAt(now): RequestedAt,
+ identity: IdentityCookie,
+ Json(request): Json<Request>,
+) -> Result<(IdentityCookie, Json<Login>), Error> {
+ let (login, secret) = app
+ .tokens()
+ .login(&request.name, &request.password, &now)
+ .await
+ .map_err(Error)?;
+ let identity = identity.set(secret);
+ Ok((identity, Json(login)))
+}
+
+#[derive(serde::Deserialize)]
+pub struct Request {
+ pub name: Name,
+ pub password: Password,
+}
+
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub struct Error(#[from] pub app::LoginError);
+
+impl IntoResponse for Error {
+ fn into_response(self) -> Response {
+ let Self(error) = self;
+ match error {
+ app::LoginError::Rejected => {
+ // not error::Unauthorized due to differing messaging
+ (StatusCode::UNAUTHORIZED, "invalid name or password").into_response()
+ }
+ other => Internal::from(other).into_response(),
+ }
+ }
+}
diff --git a/src/login/routes/test/login.rs b/src/login/routes/login/test.rs
index 68c92de..7399796 100644
--- a/src/login/routes/test/login.rs
+++ b/src/login/routes/login/test.rs
@@ -1,9 +1,7 @@
-use axum::{
- extract::{Json, State},
- http::StatusCode,
-};
+use axum::extract::{Json, State};
-use crate::{login::routes, test::fixtures, token::app};
+use super::post;
+use crate::{test::fixtures, token::app};
#[tokio::test]
async fn correct_credentials() {
@@ -14,21 +12,23 @@ async fn correct_credentials() {
// Call the endpoint
- let identity = fixtures::identity::not_logged_in();
+ let identity = fixtures::cookie::not_logged_in();
let logged_in_at = fixtures::now();
- let request = routes::LoginRequest {
+ let request = post::Request {
name: name.clone(),
password,
};
- let (identity, status) =
- routes::on_login(State(app.clone()), logged_in_at, identity, Json(request))
+ let (identity, Json(response)) =
+ post::handler(State(app.clone()), logged_in_at, identity, Json(request))
.await
.expect("logged in with valid credentials");
// Verify the return value's basic structure
- assert_eq!(StatusCode::NO_CONTENT, status);
- let secret = identity.secret().expect("logged in with valid credentials");
+ assert_eq!(name, response.name);
+ let secret = identity
+ .secret()
+ .expect("logged in with valid credentials issues an identity cookie");
// Verify the semantics
@@ -39,7 +39,7 @@ async fn correct_credentials() {
.await
.expect("identity secret is valid");
- assert_eq!(name, validated_login.name);
+ assert_eq!(response, validated_login);
}
#[tokio::test]
@@ -50,17 +50,17 @@ async fn invalid_name() {
// Call the endpoint
- let identity = fixtures::identity::not_logged_in();
+ let identity = fixtures::cookie::not_logged_in();
let logged_in_at = fixtures::now();
let (name, password) = fixtures::login::propose();
- let request = routes::LoginRequest {
+ let request = post::Request {
name: name.clone(),
password,
};
- let routes::LoginError(error) =
- routes::on_login(State(app.clone()), logged_in_at, identity, Json(request))
+ let post::Error(error) =
+ post::handler(State(app.clone()), logged_in_at, identity, Json(request))
.await
- .expect_err("logged in with an incorrect password");
+ .expect_err("logged in with an incorrect password fails");
// Verify the return value's basic structure
@@ -77,13 +77,13 @@ async fn incorrect_password() {
// Call the endpoint
let logged_in_at = fixtures::now();
- let identity = fixtures::identity::not_logged_in();
- let request = routes::LoginRequest {
+ let identity = fixtures::cookie::not_logged_in();
+ let request = post::Request {
name: login.name,
password: fixtures::login::propose_password(),
};
- let routes::LoginError(error) =
- routes::on_login(State(app.clone()), logged_in_at, identity, Json(request))
+ let post::Error(error) =
+ post::handler(State(app.clone()), logged_in_at, identity, Json(request))
.await
.expect_err("logged in with an incorrect password");
@@ -102,9 +102,9 @@ async fn token_expires() {
// Call the endpoint
let logged_in_at = fixtures::ancient();
- let identity = fixtures::identity::not_logged_in();
- let request = routes::LoginRequest { name, password };
- let (identity, _) = routes::on_login(State(app.clone()), logged_in_at, identity, Json(request))
+ let identity = fixtures::cookie::not_logged_in();
+ let request = post::Request { name, password };
+ let (identity, _) = post::handler(State(app.clone()), logged_in_at, identity, Json(request))
.await
.expect("logged in with valid credentials");
let secret = identity.secret().expect("logged in with valid credentials");
diff --git a/src/login/routes/logout/mod.rs b/src/login/routes/logout/mod.rs
new file mode 100644
index 0000000..36b384e
--- /dev/null
+++ b/src/login/routes/logout/mod.rs
@@ -0,0 +1,4 @@
+pub mod post;
+
+#[cfg(test)]
+mod test;
diff --git a/src/login/routes/logout/post.rs b/src/login/routes/logout/post.rs
new file mode 100644
index 0000000..bb09b9f
--- /dev/null
+++ b/src/login/routes/logout/post.rs
@@ -0,0 +1,47 @@
+use axum::{
+ extract::{Json, State},
+ http::StatusCode,
+ response::{IntoResponse, Response},
+};
+
+use crate::{
+ app::App,
+ clock::RequestedAt,
+ error::{Internal, Unauthorized},
+ token::{app, extract::IdentityCookie},
+};
+
+pub async fn handler(
+ State(app): State<App>,
+ RequestedAt(now): RequestedAt,
+ identity: IdentityCookie,
+ Json(_): Json<Request>,
+) -> Result<(IdentityCookie, StatusCode), Error> {
+ if let Some(secret) = identity.secret() {
+ let (token, _) = app.tokens().validate(&secret, &now).await?;
+ app.tokens().logout(&token).await?;
+ }
+
+ let identity = identity.clear();
+ Ok((identity, StatusCode::NO_CONTENT))
+}
+
+// This forces the only valid request to be `{}`, and not the infinite
+// variation allowed when there's no body extractor.
+#[derive(Default, serde::Deserialize)]
+pub struct Request {}
+
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub struct Error(#[from] pub app::ValidateError);
+
+impl IntoResponse for Error {
+ fn into_response(self) -> Response {
+ let Self(error) = self;
+ #[allow(clippy::match_wildcard_for_single_variants)]
+ match error {
+ app::ValidateError::InvalidToken => Unauthorized.into_response(),
+ other => Internal::from(other).into_response(),
+ }
+ }
+}
diff --git a/src/login/routes/logout/test.rs b/src/login/routes/logout/test.rs
new file mode 100644
index 0000000..775fa9f
--- /dev/null
+++ b/src/login/routes/logout/test.rs
@@ -0,0 +1,79 @@
+use axum::{
+ extract::{Json, State},
+ http::StatusCode,
+};
+
+use super::post;
+use crate::{test::fixtures, token::app};
+
+#[tokio::test]
+async fn successful() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let now = fixtures::now();
+ let creds = fixtures::login::create_with_password(&app, &fixtures::now()).await;
+ let identity = fixtures::cookie::logged_in(&app, &creds, &now).await;
+ let secret = fixtures::cookie::secret(&identity);
+
+ // Call the endpoint
+
+ let (response_identity, response_status) = post::handler(
+ State(app.clone()),
+ fixtures::now(),
+ identity.clone(),
+ Json::default(),
+ )
+ .await
+ .expect("logged out with a valid token");
+
+ // Verify the return value's basic structure
+
+ assert!(response_identity.secret().is_none());
+ assert_eq!(StatusCode::NO_CONTENT, response_status);
+
+ // Verify the semantics
+ let error = app
+ .tokens()
+ .validate(&secret, &now)
+ .await
+ .expect_err("secret is invalid");
+ assert!(matches!(error, app::ValidateError::InvalidToken));
+}
+
+#[tokio::test]
+async fn no_identity() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+
+ // Call the endpoint
+
+ let identity = fixtures::cookie::not_logged_in();
+ let (identity, status) = post::handler(State(app), fixtures::now(), identity, Json::default())
+ .await
+ .expect("logged out with no token succeeds");
+
+ // Verify the return value's basic structure
+
+ assert!(identity.secret().is_none());
+ assert_eq!(StatusCode::NO_CONTENT, status);
+}
+
+#[tokio::test]
+async fn invalid_token() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+
+ // Call the endpoint
+
+ let identity = fixtures::cookie::fictitious();
+ let post::Error(error) = post::handler(State(app), fixtures::now(), identity, Json::default())
+ .await
+ .expect_err("logged out with an invalid token fails");
+
+ // Verify the return value's basic structure
+
+ assert!(matches!(error, app::ValidateError::InvalidToken));
+}
diff --git a/src/login/routes/mod.rs b/src/login/routes/mod.rs
new file mode 100644
index 0000000..8cb8852
--- /dev/null
+++ b/src/login/routes/mod.rs
@@ -0,0 +1,12 @@
+use axum::{routing::post, Router};
+
+use crate::app::App;
+
+mod login;
+mod logout;
+
+pub fn router() -> Router<App> {
+ Router::new()
+ .route("/api/auth/login", post(login::post::handler))
+ .route("/api/auth/logout", post(logout::post::handler))
+}
diff --git a/src/login/routes/test/logout.rs b/src/login/routes/test/logout.rs
deleted file mode 100644
index 611829e..0000000
--- a/src/login/routes/test/logout.rs
+++ /dev/null
@@ -1,97 +0,0 @@
-use axum::{
- extract::{Json, State},
- http::StatusCode,
-};
-
-use crate::{login::routes, test::fixtures, token::app};
-
-#[tokio::test]
-async fn successful() {
- // Set up the environment
-
- let app = fixtures::scratch_app().await;
- let now = fixtures::now();
- let login = fixtures::login::create_with_password(&app, &fixtures::now()).await;
- let identity = fixtures::identity::logged_in(&app, &login, &now).await;
- let secret = fixtures::identity::secret(&identity);
-
- // Call the endpoint
-
- let (response_identity, response_status) = routes::on_logout(
- State(app.clone()),
- fixtures::now(),
- identity.clone(),
- Json(routes::LogoutRequest {}),
- )
- .await
- .expect("logged out with a valid token");
-
- // Verify the return value's basic structure
-
- assert!(response_identity.secret().is_none());
- assert_eq!(StatusCode::NO_CONTENT, response_status);
-
- // Verify the semantics
-
- let error = app
- .tokens()
- .validate(&secret, &now)
- .await
- .expect_err("secret is invalid");
- match error {
- app::ValidateError::InvalidToken => (), // should be invalid
- other @ app::ValidateError::DatabaseError(_) => {
- panic!("expected ValidateError::InvalidToken, got {other:#}")
- }
- }
-}
-
-#[tokio::test]
-async fn no_identity() {
- // Set up the environment
-
- let app = fixtures::scratch_app().await;
-
- // Call the endpoint
-
- let identity = fixtures::identity::not_logged_in();
- let (identity, status) = routes::on_logout(
- State(app),
- fixtures::now(),
- identity,
- Json(routes::LogoutRequest {}),
- )
- .await
- .expect("logged out with no token");
-
- // Verify the return value's basic structure
-
- assert!(identity.secret().is_none());
- assert_eq!(StatusCode::NO_CONTENT, status);
-}
-
-#[tokio::test]
-async fn invalid_token() {
- // Set up the environment
-
- let app = fixtures::scratch_app().await;
-
- // Call the endpoint
-
- let identity = fixtures::identity::fictitious();
- let error = routes::on_logout(
- State(app),
- fixtures::now(),
- identity,
- Json(routes::LogoutRequest {}),
- )
- .await
- .expect_err("logged out with an invalid token");
-
- // Verify the return value's basic structure
-
- assert!(matches!(
- error,
- routes::LogoutError::ValidateError(app::ValidateError::InvalidToken)
- ));
-}
diff --git a/src/login/routes/test/mod.rs b/src/login/routes/test/mod.rs
deleted file mode 100644
index 90522c4..0000000
--- a/src/login/routes/test/mod.rs
+++ /dev/null
@@ -1,2 +0,0 @@
-mod login;
-mod logout;
diff --git a/src/login/snapshot.rs b/src/login/snapshot.rs
index 1a92f5c..e1eb96c 100644
--- a/src/login/snapshot.rs
+++ b/src/login/snapshot.rs
@@ -2,6 +2,7 @@ use super::{
event::{Created, Event},
Id,
};
+use crate::name::Name;
// This also implements FromRequestParts (see `./extract.rs`). As a result, it
// can be used as an extractor for endpoints that want to require login, or for
@@ -10,7 +11,7 @@ use super::{
#[derive(Clone, Debug, Eq, PartialEq, serde::Serialize)]
pub struct Login {
pub id: Id,
- pub name: String,
+ pub name: Name,
// The omission of the hashed password is deliberate, to minimize the
// chance that it ends up tangled up in debug output or in some other chunk
// of logic elsewhere.
diff --git a/src/message/app.rs b/src/message/app.rs
index 3385af2..eed6ba4 100644
--- a/src/message/app.rs
+++ b/src/message/app.rs
@@ -2,13 +2,14 @@ use chrono::TimeDelta;
use itertools::Itertools;
use sqlx::sqlite::SqlitePool;
-use super::{repo::Provider as _, Id, Message};
+use super::{repo::Provider as _, Body, Id, Message};
use crate::{
channel::{self, repo::Provider as _},
clock::DateTime,
db::NotFound as _,
event::{repo::Provider as _, Broadcaster, Event, Sequence},
login::Login,
+ name,
};
pub struct Messages<'a> {
@@ -26,7 +27,7 @@ impl<'a> Messages<'a> {
channel: &channel::Id,
sender: &Login,
sent_at: &DateTime,
- body: &str,
+ body: &Body,
) -> Result<Message, SendError> {
let mut tx = self.db.begin().await?;
let channel = tx
@@ -46,8 +47,17 @@ impl<'a> Messages<'a> {
pub async fn delete(&self, message: &Id, deleted_at: &DateTime) -> Result<(), DeleteError> {
let mut tx = self.db.begin().await?;
+ let message = tx
+ .messages()
+ .by_id(message)
+ .await
+ .not_found(|| DeleteError::NotFound(message.clone()))?;
+ message
+ .as_snapshot()
+ .ok_or_else(|| DeleteError::Deleted(message.id().clone()))?;
+
let deleted = tx.sequence().next(deleted_at).await?;
- let message = tx.messages().delete(message, &deleted).await?;
+ let message = tx.messages().delete(&message, &deleted).await?;
tx.commit().await?;
self.events.broadcast(
@@ -91,6 +101,17 @@ impl<'a> Messages<'a> {
Ok(())
}
+
+ pub async fn purge(&self, relative_to: &DateTime) -> Result<(), sqlx::Error> {
+ // Somewhat arbitrarily, purge after 6 hours.
+ let purge_at = relative_to.to_owned() - TimeDelta::hours(6);
+
+ let mut tx = self.db.begin().await?;
+ tx.messages().purge(&purge_at).await?;
+ tx.commit().await?;
+
+ Ok(())
+ }
}
#[derive(Debug, thiserror::Error)]
@@ -98,15 +119,27 @@ pub enum SendError {
#[error("channel {0} not found")]
ChannelNotFound(channel::Id),
#[error(transparent)]
- DatabaseError(#[from] sqlx::Error),
+ Database(#[from] sqlx::Error),
+ #[error(transparent)]
+ Name(#[from] name::Error),
+}
+
+impl From<channel::repo::LoadError> for SendError {
+ fn from(error: channel::repo::LoadError) -> Self {
+ use channel::repo::LoadError;
+ match error {
+ LoadError::Database(error) => error.into(),
+ LoadError::Name(error) => error.into(),
+ }
+ }
}
#[derive(Debug, thiserror::Error)]
pub enum DeleteError {
- #[error("channel {0} not found")]
- ChannelNotFound(channel::Id),
#[error("message {0} not found")]
NotFound(Id),
+ #[error("message {0} deleted")]
+ Deleted(Id),
#[error(transparent)]
- DatabaseError(#[from] sqlx::Error),
+ Database(#[from] sqlx::Error),
}
diff --git a/src/message/body.rs b/src/message/body.rs
new file mode 100644
index 0000000..6dd224c
--- /dev/null
+++ b/src/message/body.rs
@@ -0,0 +1,30 @@
+use std::fmt;
+
+use crate::normalize::nfc;
+
+#[derive(
+ Clone, Debug, Default, Eq, PartialEq, serde::Deserialize, serde::Serialize, sqlx::Type,
+)]
+#[serde(transparent)]
+#[sqlx(transparent)]
+pub struct Body(nfc::String);
+
+impl fmt::Display for Body {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let Self(body) = self;
+ body.fmt(f)
+ }
+}
+
+impl From<String> for Body {
+ fn from(body: String) -> Self {
+ Self(body.into())
+ }
+}
+
+impl From<Body> for String {
+ fn from(body: Body) -> Self {
+ let Body(body) = body;
+ body.into()
+ }
+}
diff --git a/src/message/history.rs b/src/message/history.rs
index 09e69b7..0424d0d 100644
--- a/src/message/history.rs
+++ b/src/message/history.rs
@@ -30,6 +30,11 @@ impl History {
.filter(Sequence::up_to(resume_point.into()))
.collect()
}
+
+ // Snapshot of this message as of all events recorded in this history.
+ pub fn as_snapshot(&self) -> Option<Message> {
+ self.events().collect()
+ }
}
// Events interface
diff --git a/src/message/mod.rs b/src/message/mod.rs
index a8f51ab..c2687bc 100644
--- a/src/message/mod.rs
+++ b/src/message/mod.rs
@@ -1,4 +1,5 @@
pub mod app;
+mod body;
pub mod event;
mod history;
mod id;
@@ -6,4 +7,6 @@ pub mod repo;
mod routes;
mod snapshot;
-pub use self::{event::Event, history::History, id::Id, routes::router, snapshot::Message};
+pub use self::{
+ body::Body, event::Event, history::History, id::Id, routes::router, snapshot::Message,
+};
diff --git a/src/message/repo.rs b/src/message/repo.rs
index 71c6d10..c8ceceb 100644
--- a/src/message/repo.rs
+++ b/src/message/repo.rs
@@ -1,6 +1,6 @@
use sqlx::{sqlite::Sqlite, SqliteConnection, Transaction};
-use super::{snapshot::Message, History, Id};
+use super::{snapshot::Message, Body, History, Id};
use crate::{
channel,
clock::DateTime,
@@ -26,24 +26,24 @@ impl<'c> Messages<'c> {
channel: &channel::History,
sender: &Login,
sent: &Instant,
- body: &str,
+ body: &Body,
) -> Result<History, sqlx::Error> {
let id = Id::generate();
let channel_id = channel.id();
let message = sqlx::query!(
r#"
- insert into message
- (id, channel, sender, sent_at, sent_sequence, body)
- values ($1, $2, $3, $4, $5, $6)
- returning
- id as "id: Id",
+ insert into message
+ (id, channel, sender, sent_at, sent_sequence, body)
+ values ($1, $2, $3, $4, $5, $6)
+ returning
+ id as "id: Id",
channel as "channel: channel::Id",
sender as "sender: login::Id",
sent_at as "sent_at: DateTime",
sent_sequence as "sent_sequence: Sequence",
- body
- "#,
+ body as "body: Body"
+ "#,
id,
channel_id,
sender.id,
@@ -53,14 +53,12 @@ impl<'c> Messages<'c> {
)
.map(|row| History {
message: Message {
- sent: Instant {
- at: row.sent_at,
- sequence: row.sent_sequence,
- },
+ sent: Instant::new(row.sent_at, row.sent_sequence),
channel: row.channel,
sender: row.sender,
id: row.id,
- body: row.body,
+ body: row.body.unwrap_or_default(),
+ deleted_at: None,
},
deleted: None,
})
@@ -70,41 +68,37 @@ impl<'c> Messages<'c> {
Ok(message)
}
- pub async fn in_channel(
- &mut self,
- channel: &channel::History,
- resume_at: ResumePoint,
- ) -> Result<Vec<History>, sqlx::Error> {
+ pub async fn live(&mut self, channel: &channel::History) -> Result<Vec<History>, sqlx::Error> {
let channel_id = channel.id();
let messages = sqlx::query!(
r#"
select
- channel as "channel: channel::Id",
- sender as "sender: login::Id",
+ message.channel as "channel: channel::Id",
+ message.sender as "sender: login::Id",
id as "id: Id",
- body,
- sent_at as "sent_at: DateTime",
- sent_sequence as "sent_sequence: Sequence"
+ message.body as "body: Body",
+ message.sent_at as "sent_at: DateTime",
+ message.sent_sequence as "sent_sequence: Sequence",
+ deleted.deleted_at as "deleted_at?: DateTime",
+ deleted.deleted_sequence as "deleted_sequence?: Sequence"
from message
- where channel = $1
- and coalesce(sent_sequence <= $2, true)
- order by sent_sequence
+ left join message_deleted as deleted
+ using (id)
+ where message.channel = $1
+ and deleted.id is null
"#,
channel_id,
- resume_at,
)
.map(|row| History {
message: Message {
- sent: Instant {
- at: row.sent_at,
- sequence: row.sent_sequence,
- },
+ sent: Instant::new(row.sent_at, row.sent_sequence),
channel: row.channel,
sender: row.sender,
id: row.id,
- body: row.body,
+ body: row.body.unwrap_or_default(),
+ deleted_at: row.deleted_at,
},
- deleted: None,
+ deleted: Instant::optional(row.deleted_at, row.deleted_sequence),
})
.fetch_all(&mut *self.0)
.await?;
@@ -116,30 +110,32 @@ impl<'c> Messages<'c> {
let messages = sqlx::query!(
r#"
select
- channel as "channel: channel::Id",
- sender as "sender: login::Id",
+ message.channel as "channel: channel::Id",
+ message.sender as "sender: login::Id",
id as "id: Id",
- body,
- sent_at as "sent_at: DateTime",
- sent_sequence as "sent_sequence: Sequence"
+ message.body as "body: Body",
+ message.sent_at as "sent_at: DateTime",
+ message.sent_sequence as "sent_sequence: Sequence",
+ deleted.deleted_at as "deleted_at: DateTime",
+ deleted.deleted_sequence as "deleted_sequence: Sequence"
from message
- where coalesce(sent_sequence <= $2, true)
- order by sent_sequence
+ left join message_deleted as deleted
+ using (id)
+ where coalesce(message.sent_sequence <= $2, true)
+ order by message.sent_sequence
"#,
resume_at,
)
.map(|row| History {
message: Message {
- sent: Instant {
- at: row.sent_at,
- sequence: row.sent_sequence,
- },
+ sent: Instant::new(row.sent_at, row.sent_sequence),
channel: row.channel,
sender: row.sender,
id: row.id,
- body: row.body,
+ body: row.body.unwrap_or_default(),
+ deleted_at: row.deleted_at,
},
- deleted: None,
+ deleted: Instant::optional(row.deleted_at, row.deleted_sequence),
})
.fetch_all(&mut *self.0)
.await?;
@@ -147,33 +143,35 @@ impl<'c> Messages<'c> {
Ok(messages)
}
- async fn by_id(&mut self, message: &Id) -> Result<History, sqlx::Error> {
+ pub async fn by_id(&mut self, message: &Id) -> Result<History, sqlx::Error> {
let message = sqlx::query!(
r#"
select
- channel as "channel: channel::Id",
- sender as "sender: login::Id",
+ message.channel as "channel: channel::Id",
+ message.sender as "sender: login::Id",
id as "id: Id",
- body,
- sent_at as "sent_at: DateTime",
- sent_sequence as "sent_sequence: Sequence"
+ message.body as "body: Body",
+ message.sent_at as "sent_at: DateTime",
+ message.sent_sequence as "sent_sequence: Sequence",
+ deleted.deleted_at as "deleted_at?: DateTime",
+ deleted.deleted_sequence as "deleted_sequence?: Sequence"
from message
+ left join message_deleted as deleted
+ using (id)
where id = $1
"#,
message,
)
.map(|row| History {
message: Message {
- sent: Instant {
- at: row.sent_at,
- sequence: row.sent_sequence,
- },
+ sent: Instant::new(row.sent_at, row.sent_sequence),
channel: row.channel,
sender: row.sender,
id: row.id,
- body: row.body,
+ body: row.body.unwrap_or_default(),
+ deleted_at: row.deleted_at,
},
- deleted: None,
+ deleted: Instant::optional(row.deleted_at, row.deleted_sequence),
})
.fetch_one(&mut *self.0)
.await?;
@@ -183,39 +181,101 @@ impl<'c> Messages<'c> {
pub async fn delete(
&mut self,
- message: &Id,
+ message: &History,
deleted: &Instant,
) -> Result<History, sqlx::Error> {
- let history = self.by_id(message).await?;
+ let id = message.id();
- sqlx::query_scalar!(
+ sqlx::query!(
r#"
- delete from message
- where
- id = $1
- returning 1 as "deleted: i64"
+ insert into message_deleted (id, deleted_at, deleted_sequence)
+ values ($1, $2, $3)
"#,
- history.message.id,
+ id,
+ deleted.at,
+ deleted.sequence,
)
- .fetch_one(&mut *self.0)
+ .execute(&mut *self.0)
.await?;
- Ok(History {
- deleted: Some(*deleted),
- ..history
- })
+ // Small social responsibility hack here: when a message is deleted, its body is
+ // retconned to have been the empty string. Someone reading the event stream
+ // afterwards, or looking at messages in the channel, cannot retrieve the
+ // "deleted" message by ignoring the deletion event.
+ sqlx::query!(
+ r#"
+ update message
+ set body = ''
+ where id = $1
+ "#,
+ id,
+ )
+ .execute(&mut *self.0)
+ .await?;
+
+ let message = self.by_id(id).await?;
+
+ Ok(message)
}
- pub async fn expired(&mut self, expire_at: &DateTime) -> Result<Vec<Id>, sqlx::Error> {
+ pub async fn purge(&mut self, purge_at: &DateTime) -> Result<(), sqlx::Error> {
let messages = sqlx::query_scalar!(
r#"
+ delete from message_deleted
+ where deleted_at < $1
+ returning id as "id: Id"
+ "#,
+ purge_at,
+ )
+ .fetch_all(&mut *self.0)
+ .await?;
+
+ for message in messages {
+ sqlx::query!(
+ r#"
+ delete from message
+ where id = $1
+ "#,
+ message,
+ )
+ .execute(&mut *self.0)
+ .await?;
+ }
+
+ Ok(())
+ }
+
+ pub async fn expired(&mut self, expire_at: &DateTime) -> Result<Vec<History>, sqlx::Error> {
+ let messages = sqlx::query!(
+ r#"
select
- id as "message: Id"
+ id as "id: Id",
+ message.channel as "channel: channel::Id",
+ message.sender as "sender: login::Id",
+ message.sent_at as "sent_at: DateTime",
+ message.sent_sequence as "sent_sequence: Sequence",
+ message.body as "body: Body",
+ deleted.deleted_at as "deleted_at?: DateTime",
+ deleted.deleted_sequence as "deleted_sequence?: Sequence"
from message
- where sent_at < $1
+ left join message_deleted as deleted
+ using (id)
+ where message.sent_at < $1
+ and deleted.id is null
"#,
expire_at,
)
+ .map(|row| History {
+ message: Message {
+ sent: Instant::new(row.sent_at, row.sent_sequence),
+ id: row.id,
+ channel: row.channel,
+ sender: row.sender,
+ body: row.body.unwrap_or_default(),
+ deleted_at: row.deleted_at,
+ },
+ deleted: Instant::optional(row.deleted_at, row.deleted_sequence),
+ })
.fetch_all(&mut *self.0)
.await?;
@@ -226,29 +286,31 @@ impl<'c> Messages<'c> {
let messages = sqlx::query!(
r#"
select
- channel as "channel: channel::Id",
- sender as "sender: login::Id",
id as "id: Id",
- body,
- sent_at as "sent_at: DateTime",
- sent_sequence as "sent_sequence: Sequence"
+ message.channel as "channel: channel::Id",
+ message.sender as "sender: login::Id",
+ message.sent_at as "sent_at: DateTime",
+ message.sent_sequence as "sent_sequence: Sequence",
+ message.body as "body: Body",
+ deleted.deleted_at as "deleted_at: DateTime",
+ deleted.deleted_sequence as "deleted_sequence: Sequence"
from message
+ left join message_deleted as deleted
+ using (id)
where coalesce(message.sent_sequence > $1, true)
"#,
resume_at,
)
.map(|row| History {
message: Message {
- sent: Instant {
- at: row.sent_at,
- sequence: row.sent_sequence,
- },
+ sent: Instant::new(row.sent_at, row.sent_sequence),
channel: row.channel,
sender: row.sender,
id: row.id,
- body: row.body,
+ body: row.body.unwrap_or_default(),
+ deleted_at: row.deleted_at,
},
- deleted: None,
+ deleted: Instant::optional(row.deleted_at, row.deleted_sequence),
})
.fetch_all(&mut *self.0)
.await?;
diff --git a/src/message/routes.rs b/src/message/routes.rs
deleted file mode 100644
index e21c674..0000000
--- a/src/message/routes.rs
+++ /dev/null
@@ -1,46 +0,0 @@
-use axum::{
- extract::{Path, State},
- http::StatusCode,
- response::{IntoResponse, Response},
- routing::delete,
- Router,
-};
-
-use crate::{
- app::App,
- clock::RequestedAt,
- error::{Internal, NotFound},
- login::Login,
- message::{self, app::DeleteError},
-};
-
-pub fn router() -> Router<App> {
- Router::new().route("/api/messages/:message", delete(on_delete))
-}
-
-async fn on_delete(
- State(app): State<App>,
- Path(message): Path<message::Id>,
- RequestedAt(deleted_at): RequestedAt,
- _: Login,
-) -> Result<StatusCode, ErrorResponse> {
- app.messages().delete(&message, &deleted_at).await?;
-
- Ok(StatusCode::ACCEPTED)
-}
-
-#[derive(Debug, thiserror::Error)]
-#[error(transparent)]
-struct ErrorResponse(#[from] DeleteError);
-
-impl IntoResponse for ErrorResponse {
- fn into_response(self) -> Response {
- let Self(error) = self;
- match error {
- not_found @ (DeleteError::ChannelNotFound(_) | DeleteError::NotFound(_)) => {
- NotFound(not_found).into_response()
- }
- other => Internal::from(other).into_response(),
- }
- }
-}
diff --git a/src/message/routes/message/mod.rs b/src/message/routes/message/mod.rs
new file mode 100644
index 0000000..545ad26
--- /dev/null
+++ b/src/message/routes/message/mod.rs
@@ -0,0 +1,46 @@
+#[cfg(test)]
+mod test;
+
+pub mod delete {
+ use axum::{
+ extract::{Path, State},
+ http::StatusCode,
+ response::{IntoResponse, Response},
+ };
+
+ use crate::{
+ app::App,
+ clock::RequestedAt,
+ error::{Internal, NotFound},
+ message::{self, app::DeleteError},
+ token::extract::Identity,
+ };
+
+ pub async fn handler(
+ State(app): State<App>,
+ Path(message): Path<message::Id>,
+ RequestedAt(deleted_at): RequestedAt,
+ _: Identity,
+ ) -> Result<StatusCode, Error> {
+ app.messages().delete(&message, &deleted_at).await?;
+
+ Ok(StatusCode::ACCEPTED)
+ }
+
+ #[derive(Debug, thiserror::Error)]
+ #[error(transparent)]
+ pub struct Error(#[from] pub DeleteError);
+
+ impl IntoResponse for Error {
+ fn into_response(self) -> Response {
+ let Self(error) = self;
+ #[allow(clippy::match_wildcard_for_single_variants)]
+ match error {
+ DeleteError::NotFound(_) | DeleteError::Deleted(_) => {
+ NotFound(error).into_response()
+ }
+ other => Internal::from(other).into_response(),
+ }
+ }
+ }
+}
diff --git a/src/message/routes/message/test.rs b/src/message/routes/message/test.rs
new file mode 100644
index 0000000..2016fb8
--- /dev/null
+++ b/src/message/routes/message/test.rs
@@ -0,0 +1,160 @@
+use axum::{
+ extract::{Path, State},
+ http::StatusCode,
+};
+
+use super::delete;
+use crate::{message::app, test::fixtures};
+
+#[tokio::test]
+pub async fn delete_message() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let sender = fixtures::login::create(&app, &fixtures::now()).await;
+ let channel = fixtures::channel::create(&app, &fixtures::now()).await;
+ let message = fixtures::message::send(&app, &channel, &sender, &fixtures::now()).await;
+
+ // Send the request
+
+ let deleter = fixtures::identity::create(&app, &fixtures::now()).await;
+ let response = delete::handler(
+ State(app.clone()),
+ Path(message.id.clone()),
+ fixtures::now(),
+ deleter,
+ )
+ .await
+ .expect("deleting a valid message succeeds");
+
+ // Verify the response
+
+ assert_eq!(response, StatusCode::ACCEPTED);
+
+ // Verify the semantics
+
+ let snapshot = app.boot().snapshot().await.expect("boot always succeeds");
+ assert!(!snapshot.messages.contains(&message));
+}
+
+#[tokio::test]
+pub async fn delete_invalid_message_id() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+
+ // Send the request
+
+ let deleter = fixtures::identity::create(&app, &fixtures::now()).await;
+ let message = fixtures::message::fictitious();
+ let delete::Error(error) = delete::handler(
+ State(app.clone()),
+ Path(message.clone()),
+ fixtures::now(),
+ deleter,
+ )
+ .await
+ .expect_err("deleting a nonexistent message fails");
+
+ // Verify the response
+
+ assert!(matches!(error, app::DeleteError::NotFound(id) if id == message));
+}
+
+#[tokio::test]
+pub async fn delete_deleted() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let sender = fixtures::login::create(&app, &fixtures::now()).await;
+ let channel = fixtures::channel::create(&app, &fixtures::now()).await;
+ let message = fixtures::message::send(&app, &channel, &sender, &fixtures::now()).await;
+
+ app.messages()
+ .delete(&message.id, &fixtures::now())
+ .await
+ .expect("deleting a recently-sent message succeeds");
+
+ // Send the request
+
+ let deleter = fixtures::identity::create(&app, &fixtures::now()).await;
+ let delete::Error(error) = delete::handler(
+ State(app.clone()),
+ Path(message.id.clone()),
+ fixtures::now(),
+ deleter,
+ )
+ .await
+ .expect_err("deleting a deleted message fails");
+
+ // Verify the response
+
+ assert!(matches!(error, app::DeleteError::Deleted(id) if id == message.id));
+}
+
+#[tokio::test]
+pub async fn delete_expired() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let sender = fixtures::login::create(&app, &fixtures::ancient()).await;
+ let channel = fixtures::channel::create(&app, &fixtures::ancient()).await;
+ let message = fixtures::message::send(&app, &channel, &sender, &fixtures::ancient()).await;
+
+ app.messages()
+ .expire(&fixtures::now())
+ .await
+ .expect("expiring messages always succeeds");
+
+ // Send the request
+
+ let deleter = fixtures::identity::create(&app, &fixtures::now()).await;
+ let delete::Error(error) = delete::handler(
+ State(app.clone()),
+ Path(message.id.clone()),
+ fixtures::now(),
+ deleter,
+ )
+ .await
+ .expect_err("deleting an expired message fails");
+
+ // Verify the response
+
+ assert!(matches!(error, app::DeleteError::Deleted(id) if id == message.id));
+}
+
+#[tokio::test]
+pub async fn delete_purged() {
+ // Set up the environment
+
+ let app = fixtures::scratch_app().await;
+ let sender = fixtures::login::create(&app, &fixtures::ancient()).await;
+ let channel = fixtures::channel::create(&app, &fixtures::ancient()).await;
+ let message = fixtures::message::send(&app, &channel, &sender, &fixtures::ancient()).await;
+
+ app.messages()
+ .expire(&fixtures::old())
+ .await
+ .expect("expiring messages always succeeds");
+
+ app.messages()
+ .purge(&fixtures::now())
+ .await
+ .expect("purging messages always succeeds");
+
+ // Send the request
+
+ let deleter = fixtures::identity::create(&app, &fixtures::now()).await;
+ let delete::Error(error) = delete::handler(
+ State(app.clone()),
+ Path(message.id.clone()),
+ fixtures::now(),
+ deleter,
+ )
+ .await
+ .expect_err("deleting a purged message fails");
+
+ // Verify the response
+
+ assert!(matches!(error, app::DeleteError::NotFound(id) if id == message.id));
+}
diff --git a/src/message/routes/mod.rs b/src/message/routes/mod.rs
new file mode 100644
index 0000000..dfe8628
--- /dev/null
+++ b/src/message/routes/mod.rs
@@ -0,0 +1,9 @@
+use axum::{routing::delete, Router};
+
+use crate::app::App;
+
+mod message;
+
+pub fn router() -> Router<App> {
+ Router::new().route("/api/messages/:message", delete(message::delete::handler))
+}
diff --git a/src/message/snapshot.rs b/src/message/snapshot.rs
index 0eb37bb..53b7176 100644
--- a/src/message/snapshot.rs
+++ b/src/message/snapshot.rs
@@ -1,8 +1,8 @@
use super::{
event::{Event, Sent},
- Id,
+ Body, Id,
};
-use crate::{channel, event::Instant, login};
+use crate::{channel, clock::DateTime, event::Instant, login};
#[derive(Clone, Debug, Eq, PartialEq, serde::Serialize)]
pub struct Message {
@@ -11,7 +11,9 @@ pub struct Message {
pub channel: channel::Id,
pub sender: login::Id,
pub id: Id,
- pub body: String,
+ pub body: Body,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub deleted_at: Option<DateTime>,
}
impl Message {
diff --git a/src/name.rs b/src/name.rs
new file mode 100644
index 0000000..9187d33
--- /dev/null
+++ b/src/name.rs
@@ -0,0 +1,85 @@
+use std::fmt;
+
+use crate::normalize::{ident, nfc};
+
+#[derive(Clone, Debug, Eq, PartialEq, serde::Deserialize, serde::Serialize, sqlx::Type)]
+#[serde(from = "String", into = "String")]
+pub struct Name {
+ display: nfc::String,
+ canonical: ident::String,
+}
+
+impl Name {
+ pub fn new<D, C>(display: D, canonical: C) -> Result<Self, Error>
+ where
+ D: AsRef<str>,
+ C: AsRef<str>,
+ {
+ let name = Self::from(display);
+
+ if name.canonical.as_str() == canonical.as_ref() {
+ Ok(name)
+ } else {
+ Err(Error::CanonicalMismatch(
+ canonical.as_ref().into(),
+ name.canonical,
+ name.display,
+ ))
+ }
+ }
+
+ pub fn optional<D, C>(display: Option<D>, canonical: Option<C>) -> Result<Option<Self>, Error>
+ where
+ D: AsRef<str>,
+ C: AsRef<str>,
+ {
+ display
+ .zip(canonical)
+ .map(|(display, canonical)| Self::new(display, canonical))
+ .transpose()
+ }
+
+ pub fn display(&self) -> &nfc::String {
+ &self.display
+ }
+
+ pub fn canonical(&self) -> &ident::String {
+ &self.canonical
+ }
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+ #[error("stored canonical form {0:#?} does not match computed canonical form {:#?} for name {:#?}", .1.as_str(), .2.as_str())]
+ CanonicalMismatch(String, ident::String, nfc::String),
+}
+
+impl Default for Name {
+ fn default() -> Self {
+ Self::from(String::default())
+ }
+}
+
+impl fmt::Display for Name {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.display.fmt(f)
+ }
+}
+
+impl<S> From<S> for Name
+where
+ S: AsRef<str>,
+{
+ fn from(name: S) -> Self {
+ let display = nfc::String::from(&name);
+ let canonical = ident::String::from(&name);
+
+ Self { display, canonical }
+ }
+}
+
+impl From<Name> for String {
+ fn from(name: Name) -> Self {
+ name.display.into()
+ }
+}
diff --git a/src/normalize/mod.rs b/src/normalize/mod.rs
new file mode 100644
index 0000000..6294201
--- /dev/null
+++ b/src/normalize/mod.rs
@@ -0,0 +1,36 @@
+mod string;
+
+pub mod nfc {
+ use std::string::String as StdString;
+
+ use unicode_normalization::UnicodeNormalization as _;
+
+ pub type String = super::string::String<Nfc>;
+
+ #[derive(Clone, Debug, Default, Eq, PartialEq)]
+ pub struct Nfc;
+
+ impl super::string::Normalize for Nfc {
+ fn normalize(&self, value: &str) -> StdString {
+ value.nfc().collect()
+ }
+ }
+}
+
+pub mod ident {
+ use std::string::String as StdString;
+
+ use unicode_casefold::UnicodeCaseFold as _;
+ use unicode_normalization::UnicodeNormalization as _;
+
+ pub type String = super::string::String<Ident>;
+
+ #[derive(Clone, Debug, Default, Eq, PartialEq)]
+ pub struct Ident;
+
+ impl super::string::Normalize for Ident {
+ fn normalize(&self, value: &str) -> StdString {
+ value.case_fold().nfkc().collect()
+ }
+ }
+}
diff --git a/src/normalize/string.rs b/src/normalize/string.rs
new file mode 100644
index 0000000..a0d178c
--- /dev/null
+++ b/src/normalize/string.rs
@@ -0,0 +1,112 @@
+use std::{fmt, string::String as StdString};
+
+use sqlx::{
+ encode::{Encode, IsNull},
+ Database, Decode, Type,
+};
+
+pub trait Normalize: Clone + Default {
+ fn normalize(&self, value: &str) -> StdString;
+}
+
+#[derive(Clone, Debug, Default, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
+#[serde(into = "StdString", from = "StdString")]
+#[serde(bound = "N: Normalize")]
+pub struct String<N>(StdString, N);
+
+impl<N> fmt::Display for String<N> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let Self(value, _) = self;
+ value.fmt(f)
+ }
+}
+
+impl<S, N> From<S> for String<N>
+where
+ S: AsRef<str>,
+ N: Normalize,
+{
+ fn from(value: S) -> Self {
+ let normalizer = N::default();
+ let value = normalizer.normalize(value.as_ref());
+
+ Self(value, normalizer)
+ }
+}
+
+impl<N> From<String<N>> for StdString {
+ fn from(value: String<N>) -> Self {
+ let String(value, _) = value;
+ value
+ }
+}
+
+impl<N> std::ops::Deref for String<N> {
+ type Target = StdString;
+
+ fn deref(&self) -> &Self::Target {
+ let Self(value, _) = self;
+ value
+ }
+}
+
+// Type is manually implemented so that we can implement Decode to do
+// normalization on read. Implementation is otherwise based on
+// `#[derive(sqlx::Type)]` with the `#[sqlx(transparent)]` attribute.
+impl<DB, N> Type<DB> for String<N>
+where
+ DB: Database,
+ StdString: Type<DB>,
+{
+ fn type_info() -> <DB as Database>::TypeInfo {
+ <StdString as Type<DB>>::type_info()
+ }
+
+ fn compatible(ty: &<DB as Database>::TypeInfo) -> bool {
+ <StdString as Type<DB>>::compatible(ty)
+ }
+}
+
+impl<'r, DB, N> Decode<'r, DB> for String<N>
+where
+ DB: Database,
+ StdString: Decode<'r, DB>,
+ N: Normalize,
+{
+ fn decode(value: <DB as Database>::ValueRef<'r>) -> Result<Self, sqlx::error::BoxDynError> {
+ let value = StdString::decode(value)?;
+ Ok(Self::from(value))
+ }
+}
+
+impl<'q, DB, N> Encode<'q, DB> for String<N>
+where
+ DB: Database,
+ StdString: Encode<'q, DB>,
+{
+ fn encode_by_ref(
+ &self,
+ buf: &mut <DB as Database>::ArgumentBuffer<'q>,
+ ) -> Result<IsNull, sqlx::error::BoxDynError> {
+ let Self(value, _) = self;
+ value.encode_by_ref(buf)
+ }
+
+ fn encode(
+ self,
+ buf: &mut <DB as Database>::ArgumentBuffer<'q>,
+ ) -> Result<IsNull, sqlx::error::BoxDynError> {
+ let Self(value, _) = self;
+ value.encode(buf)
+ }
+
+ fn produces(&self) -> Option<<DB as Database>::TypeInfo> {
+ let Self(value, _) = self;
+ value.produces()
+ }
+
+ fn size_hint(&self) -> usize {
+ let Self(value, _) = self;
+ value.size_hint()
+ }
+}
diff --git a/src/setup/app.rs b/src/setup/app.rs
index 24e0010..030b5f6 100644
--- a/src/setup/app.rs
+++ b/src/setup/app.rs
@@ -4,7 +4,8 @@ use super::repo::Provider as _;
use crate::{
clock::DateTime,
event::{repo::Provider as _, Broadcaster, Event},
- login::{repo::Provider as _, Password},
+ login::{repo::Provider as _, Login, Password},
+ name::Name,
token::{repo::Provider as _, Secret},
};
@@ -20,10 +21,10 @@ impl<'a> Setup<'a> {
pub async fn initial(
&self,
- name: &str,
+ name: &Name,
password: &Password,
created_at: &DateTime,
- ) -> Result<Secret, Error> {
+ ) -> Result<(Login, Secret), Error> {
let password_hash = password.hash()?;
let mut tx = self.db.begin().await?;
@@ -39,7 +40,7 @@ impl<'a> Setup<'a> {
self.events
.broadcast(login.events().map(Event::from).collect::<Vec<_>>());
- Ok(secret)
+ Ok((login.as_created(), secret))
}
pub async fn completed(&self) -> Result<bool, sqlx::Error> {
diff --git a/src/setup/routes/mod.rs b/src/setup/routes/mod.rs
new file mode 100644
index 0000000..e1e1711
--- /dev/null
+++ b/src/setup/routes/mod.rs
@@ -0,0 +1,9 @@
+use axum::{routing::post, Router};
+
+use crate::app::App;
+
+mod post;
+
+pub fn router() -> Router<App> {
+ Router::new().route("/api/setup", post(post::handler))
+}
diff --git a/src/setup/routes.rs b/src/setup/routes/post.rs
index ff41734..f7b256e 100644
--- a/src/setup/routes.rs
+++ b/src/setup/routes/post.rs
@@ -2,44 +2,43 @@ use axum::{
extract::{Json, State},
http::StatusCode,
response::{IntoResponse, Response},
- routing::post,
- Router,
};
-use super::app;
use crate::{
- app::App, clock::RequestedAt, error::Internal, login::Password, token::extract::IdentityToken,
+ app::App,
+ clock::RequestedAt,
+ error::Internal,
+ login::{Login, Password},
+ name::Name,
+ setup::app,
+ token::extract::IdentityCookie,
};
-pub fn router() -> Router<App> {
- Router::new().route("/api/setup", post(on_setup))
-}
-
-#[derive(serde::Deserialize)]
-struct SetupRequest {
- name: String,
- password: Password,
-}
-
-async fn on_setup(
+pub async fn handler(
State(app): State<App>,
RequestedAt(setup_at): RequestedAt,
- identity: IdentityToken,
- Json(request): Json<SetupRequest>,
-) -> Result<(IdentityToken, StatusCode), SetupError> {
- let secret = app
+ identity: IdentityCookie,
+ Json(request): Json<Request>,
+) -> Result<(IdentityCookie, Json<Login>), Error> {
+ let (login, secret) = app
.setup()
.initial(&request.name, &request.password, &setup_at)
.await
- .map_err(SetupError)?;
+ .map_err(Error)?;
let identity = identity.set(secret);
- Ok((identity, StatusCode::NO_CONTENT))
+ Ok((identity, Json(login)))
+}
+
+#[derive(serde::Deserialize)]
+pub struct Request {
+ pub name: Name,
+ pub password: Password,
}
#[derive(Debug)]
-struct SetupError(app::Error);
+pub struct Error(pub app::Error);
-impl IntoResponse for SetupError {
+impl IntoResponse for Error {
fn into_response(self) -> Response {
let Self(error) = self;
match error {
diff --git a/src/test/fixtures/channel.rs b/src/test/fixtures/channel.rs
index b678717..8cb38ae 100644
--- a/src/test/fixtures/channel.rs
+++ b/src/test/fixtures/channel.rs
@@ -1,10 +1,18 @@
+use std::future;
+
use faker_rand::{
en_us::{addresses::CityName, names::FullName},
faker_impl_from_templates,
};
use rand;
-use crate::{app::App, channel::Channel, clock::RequestedAt};
+use crate::{
+ app::App,
+ channel::{self, Channel},
+ clock::RequestedAt,
+ event::Event,
+ name::Name,
+};
pub async fn create(app: &App, created_at: &RequestedAt) -> Channel {
let name = propose();
@@ -14,11 +22,29 @@ pub async fn create(app: &App, created_at: &RequestedAt) -> Channel {
.expect("should always succeed if the channel is actually new")
}
-pub fn propose() -> String {
- rand::random::<Name>().to_string()
+pub fn propose() -> Name {
+ rand::random::<NameTemplate>().to_string().into()
}
-struct Name(String);
+struct NameTemplate(String);
faker_impl_from_templates! {
- Name; "{} {}", CityName, FullName;
+ NameTemplate; "{} {}", CityName, FullName;
+}
+
+pub fn events(event: Event) -> future::Ready<Option<channel::Event>> {
+ future::ready(match event {
+ Event::Channel(channel) => Some(channel),
+ _ => None,
+ })
+}
+
+pub fn created(event: channel::Event) -> future::Ready<Option<channel::event::Created>> {
+ future::ready(match event {
+ channel::Event::Created(event) => Some(event),
+ channel::Event::Deleted(_) => None,
+ })
+}
+
+pub fn fictitious() -> channel::Id {
+ channel::Id::generate()
}
diff --git a/src/test/fixtures/cookie.rs b/src/test/fixtures/cookie.rs
new file mode 100644
index 0000000..58777c8
--- /dev/null
+++ b/src/test/fixtures/cookie.rs
@@ -0,0 +1,37 @@
+use uuid::Uuid;
+
+use crate::{
+ app::App,
+ clock::RequestedAt,
+ login::Password,
+ name::Name,
+ token::{extract::IdentityCookie, Secret},
+};
+
+pub fn not_logged_in() -> IdentityCookie {
+ IdentityCookie::new()
+}
+
+pub async fn logged_in(
+ app: &App,
+ credentials: &(Name, Password),
+ now: &RequestedAt,
+) -> IdentityCookie {
+ let (name, password) = credentials;
+ let (_, token) = app
+ .tokens()
+ .login(name, password, now)
+ .await
+ .expect("should succeed given known-valid credentials");
+
+ IdentityCookie::new().set(token)
+}
+
+pub fn secret(identity: &IdentityCookie) -> Secret {
+ identity.secret().expect("identity contained a secret")
+}
+
+pub fn fictitious() -> IdentityCookie {
+ let token = Uuid::new_v4().to_string();
+ IdentityCookie::new().set(token)
+}
diff --git a/src/test/fixtures/event.rs b/src/test/fixtures/event.rs
index 7fe2bf3..fa4fbc0 100644
--- a/src/test/fixtures/event.rs
+++ b/src/test/fixtures/event.rs
@@ -1,11 +1,8 @@
-use crate::{
- event::Event,
- message::{Event::Sent, Message},
-};
+use crate::message::{Event, Message};
pub fn message_sent(event: &Event, message: &Message) -> bool {
matches!(
&event,
- Event::Message(Sent(event)) if message == &event.into()
+ Event::Sent(event) if message == &event.into()
)
}
diff --git a/src/test/fixtures/filter.rs b/src/test/fixtures/filter.rs
deleted file mode 100644
index 84d27b0..0000000
--- a/src/test/fixtures/filter.rs
+++ /dev/null
@@ -1,11 +0,0 @@
-use futures::future;
-
-use crate::{channel::Event::Created, event::Event, message::Event::Sent};
-
-pub fn messages() -> impl FnMut(&Event) -> future::Ready<bool> {
- |event| future::ready(matches!(event, Event::Message(Sent(_))))
-}
-
-pub fn created() -> impl FnMut(&Event) -> future::Ready<bool> {
- |event| future::ready(matches!(event, Event::Channel(Created(_))))
-}
diff --git a/src/test/fixtures/identity.rs b/src/test/fixtures/identity.rs
index 56b4ffa..e438f2b 100644
--- a/src/test/fixtures/identity.rs
+++ b/src/test/fixtures/identity.rs
@@ -1,31 +1,21 @@
-use uuid::Uuid;
-
use crate::{
app::App,
clock::RequestedAt,
login::Password,
+ name::Name,
+ test::fixtures,
token::{
- extract::{Identity, IdentityToken},
- Secret,
+ self,
+ extract::{Identity, IdentityCookie},
},
};
-pub fn not_logged_in() -> IdentityToken {
- IdentityToken::new()
-}
-
-pub async fn logged_in(app: &App, login: &(String, Password), now: &RequestedAt) -> IdentityToken {
- let (name, password) = login;
- let token = app
- .tokens()
- .login(name, password, now)
- .await
- .expect("should succeed given known-valid credentials");
-
- IdentityToken::new().set(token)
+pub async fn create(app: &App, created_at: &RequestedAt) -> Identity {
+ let credentials = fixtures::login::create_with_password(app, created_at).await;
+ logged_in(app, &credentials, created_at).await
}
-pub async fn from_token(app: &App, token: &IdentityToken, issued_at: &RequestedAt) -> Identity {
+pub async fn from_cookie(app: &App, token: &IdentityCookie, issued_at: &RequestedAt) -> Identity {
let secret = token.secret().expect("identity token has a secret");
let (token, login) = app
.tokens()
@@ -36,16 +26,18 @@ pub async fn from_token(app: &App, token: &IdentityToken, issued_at: &RequestedA
Identity { token, login }
}
-pub async fn identity(app: &App, login: &(String, Password), issued_at: &RequestedAt) -> Identity {
- let secret = logged_in(app, login, issued_at).await;
- from_token(app, &secret, issued_at).await
+pub async fn logged_in(
+ app: &App,
+ credentials: &(Name, Password),
+ issued_at: &RequestedAt,
+) -> Identity {
+ let secret = fixtures::cookie::logged_in(app, credentials, issued_at).await;
+ from_cookie(app, &secret, issued_at).await
}
-pub fn secret(identity: &IdentityToken) -> Secret {
- identity.secret().expect("identity contained a secret")
-}
+pub fn fictitious() -> Identity {
+ let token = token::Id::generate();
+ let login = fixtures::login::fictitious();
-pub fn fictitious() -> IdentityToken {
- let token = Uuid::new_v4().to_string();
- IdentityToken::new().set(token)
+ Identity { token, login }
}
diff --git a/src/test/fixtures/login.rs b/src/test/fixtures/login.rs
index e5ac716..e308289 100644
--- a/src/test/fixtures/login.rs
+++ b/src/test/fixtures/login.rs
@@ -5,16 +5,18 @@ use crate::{
app::App,
clock::RequestedAt,
login::{self, Login, Password},
+ name::Name,
};
-pub async fn create_with_password(app: &App, created_at: &RequestedAt) -> (String, Password) {
+pub async fn create_with_password(app: &App, created_at: &RequestedAt) -> (Name, Password) {
let (name, password) = propose();
- app.logins()
+ let login = app
+ .logins()
.create(&name, &password, created_at)
.await
.expect("should always succeed if the login is actually new");
- (name, password)
+ (login.name, password)
}
pub async fn create(app: &App, created_at: &RequestedAt) -> Login {
@@ -28,16 +30,16 @@ pub async fn create(app: &App, created_at: &RequestedAt) -> Login {
pub fn fictitious() -> Login {
Login {
id: login::Id::generate(),
- name: name(),
+ name: propose_name(),
}
}
-pub fn propose() -> (String, Password) {
- (name(), propose_password())
+pub fn propose() -> (Name, Password) {
+ (propose_name(), propose_password())
}
-fn name() -> String {
- rand::random::<internet::Username>().to_string()
+fn propose_name() -> Name {
+ rand::random::<internet::Username>().to_string().into()
}
pub fn propose_password() -> Password {
diff --git a/src/test/fixtures/message.rs b/src/test/fixtures/message.rs
index 381b10b..3aebdd9 100644
--- a/src/test/fixtures/message.rs
+++ b/src/test/fixtures/message.rs
@@ -1,16 +1,36 @@
+use std::future;
+
use faker_rand::lorem::Paragraphs;
-use crate::{app::App, channel::Channel, clock::RequestedAt, login::Login, message::Message};
+use crate::{
+ app::App,
+ channel::Channel,
+ clock::RequestedAt,
+ event::Event,
+ login::Login,
+ message::{self, Body, Message},
+};
-pub async fn send(app: &App, channel: &Channel, login: &Login, sent_at: &RequestedAt) -> Message {
+pub async fn send(app: &App, channel: &Channel, sender: &Login, sent_at: &RequestedAt) -> Message {
let body = propose();
app.messages()
- .send(&channel.id, login, sent_at, &body)
+ .send(&channel.id, sender, sent_at, &body)
.await
.expect("should succeed if the channel exists")
}
-pub fn propose() -> String {
- rand::random::<Paragraphs>().to_string()
+pub fn propose() -> Body {
+ rand::random::<Paragraphs>().to_string().into()
+}
+
+pub fn events(event: Event) -> future::Ready<Option<message::Event>> {
+ future::ready(match event {
+ Event::Message(event) => Some(event),
+ _ => None,
+ })
+}
+
+pub fn fictitious() -> message::Id {
+ message::Id::generate()
}
diff --git a/src/test/fixtures/mod.rs b/src/test/fixtures/mod.rs
index 41f7e13..9111811 100644
--- a/src/test/fixtures/mod.rs
+++ b/src/test/fixtures/mod.rs
@@ -3,8 +3,8 @@ use chrono::{TimeDelta, Utc};
use crate::{app::App, clock::RequestedAt, db};
pub mod channel;
+pub mod cookie;
pub mod event;
-pub mod filter;
pub mod future;
pub mod identity;
pub mod login;
@@ -21,6 +21,11 @@ pub fn now() -> RequestedAt {
Utc::now().into()
}
+pub fn old() -> RequestedAt {
+ let timestamp = Utc::now() - TimeDelta::days(95);
+ timestamp.into()
+}
+
pub fn ancient() -> RequestedAt {
let timestamp = Utc::now() - TimeDelta::days(365);
timestamp.into()
diff --git a/src/token/app.rs b/src/token/app.rs
index 15fd858..c19d6a0 100644
--- a/src/token/app.rs
+++ b/src/token/app.rs
@@ -7,12 +7,14 @@ use futures::{
use sqlx::sqlite::SqlitePool;
use super::{
- repo::auth::Provider as _, repo::Provider as _, Broadcaster, Event as TokenEvent, Id, Secret,
+ repo::{self, auth::Provider as _, Provider as _},
+ Broadcaster, Event as TokenEvent, Id, Secret,
};
use crate::{
clock::DateTime,
db::NotFound as _,
login::{Login, Password},
+ name::{self, Name},
};
pub struct Tokens<'a> {
@@ -27,10 +29,10 @@ impl<'a> Tokens<'a> {
pub async fn login(
&self,
- name: &str,
+ name: &Name,
password: &Password,
login_at: &DateTime,
- ) -> Result<Secret, LoginError> {
+ ) -> Result<(Login, Secret), LoginError> {
let mut tx = self.db.begin().await?;
let (login, stored_hash) = tx
.auth()
@@ -45,6 +47,8 @@ impl<'a> Tokens<'a> {
// if the account is deleted during that time.
tx.commit().await?;
+ let snapshot = login.as_snapshot().ok_or(LoginError::Rejected)?;
+
let token = if stored_hash.verify(password)? {
let mut tx = self.db.begin().await?;
let token = tx.tokens().issue(&login, login_at).await?;
@@ -54,7 +58,7 @@ impl<'a> Tokens<'a> {
Err(LoginError::Rejected)?
};
- Ok(token)
+ Ok((snapshot, token))
}
pub async fn validate(
@@ -63,14 +67,16 @@ impl<'a> Tokens<'a> {
used_at: &DateTime,
) -> Result<(Id, Login), ValidateError> {
let mut tx = self.db.begin().await?;
- let login = tx
+ let (token, login) = tx
.tokens()
.validate(secret, used_at)
.await
.not_found(|| ValidateError::InvalidToken)?;
tx.commit().await?;
- Ok(login)
+ let login = login.as_snapshot().ok_or(ValidateError::LoginDeleted)?;
+
+ Ok((token, login))
}
pub async fn limit_stream<E>(
@@ -158,17 +164,42 @@ pub enum LoginError {
#[error("invalid login")]
Rejected,
#[error(transparent)]
- DatabaseError(#[from] sqlx::Error),
+ Database(#[from] sqlx::Error),
+ #[error(transparent)]
+ Name(#[from] name::Error),
#[error(transparent)]
- PasswordHashError(#[from] password_hash::Error),
+ PasswordHash(#[from] password_hash::Error),
+}
+
+impl From<repo::auth::LoadError> for LoginError {
+ fn from(error: repo::auth::LoadError) -> Self {
+ use repo::auth::LoadError;
+ match error {
+ LoadError::Database(error) => error.into(),
+ LoadError::Name(error) => error.into(),
+ }
+ }
}
#[derive(Debug, thiserror::Error)]
pub enum ValidateError {
#[error("invalid token")]
InvalidToken,
+ #[error("login deleted")]
+ LoginDeleted,
+ #[error(transparent)]
+ Database(#[from] sqlx::Error),
#[error(transparent)]
- DatabaseError(#[from] sqlx::Error),
+ Name(#[from] name::Error),
+}
+
+impl From<repo::LoadError> for ValidateError {
+ fn from(error: repo::LoadError) -> Self {
+ match error {
+ repo::LoadError::Database(error) => error.into(),
+ repo::LoadError::Name(error) => error.into(),
+ }
+ }
}
#[derive(Debug)]
diff --git a/src/token/extract/identity_token.rs b/src/token/extract/cookie.rs
index a1e900e..af5787d 100644
--- a/src/token/extract/identity_token.rs
+++ b/src/token/extract/cookie.rs
@@ -12,19 +12,21 @@ use crate::token::Secret;
// The usage pattern here - receive the extractor as an argument, return it in
// the response - is heavily modelled after CookieJar's own intended usage.
#[derive(Clone)]
-pub struct IdentityToken {
+pub struct Identity {
cookies: CookieJar,
}
-impl fmt::Debug for IdentityToken {
+impl fmt::Debug for Identity {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("IdentityToken")
+ f.debug_struct("IdentityCookie")
.field("identity", &self.secret())
.finish()
}
}
-impl IdentityToken {
+impl Identity {
+ const COOKIE_NAME: &str = "identity";
+
// Creates a new, unpopulated identity token store.
#[cfg(test)]
pub fn new() -> Self {
@@ -40,7 +42,7 @@ impl IdentityToken {
// included.
pub fn secret(&self) -> Option<Secret> {
self.cookies
- .get(IDENTITY_COOKIE)
+ .get(Self::COOKIE_NAME)
.map(Cookie::value)
.map(Secret::from)
}
@@ -49,7 +51,7 @@ impl IdentityToken {
// back to the client when this extractor is included in a response.
pub fn set(self, secret: impl Into<Secret>) -> Self {
let secret = secret.into().reveal();
- let identity_cookie = Cookie::build((IDENTITY_COOKIE, secret))
+ let identity_cookie = Cookie::build((Self::COOKIE_NAME, secret))
.http_only(true)
.path("/")
.permanent()
@@ -64,15 +66,13 @@ impl IdentityToken {
// extractor is included in a response.
pub fn clear(self) -> Self {
Self {
- cookies: self.cookies.remove(IDENTITY_COOKIE),
+ cookies: self.cookies.remove(Self::COOKIE_NAME),
}
}
}
-const IDENTITY_COOKIE: &str = "identity";
-
#[async_trait::async_trait]
-impl<S> FromRequestParts<S> for IdentityToken
+impl<S> FromRequestParts<S> for Identity
where
S: Send + Sync,
{
@@ -84,7 +84,7 @@ where
}
}
-impl IntoResponseParts for IdentityToken {
+impl IntoResponseParts for Identity {
type Error = <CookieJar as IntoResponseParts>::Error;
fn into_response_parts(self, res: ResponseParts) -> Result<ResponseParts, Self::Error> {
diff --git a/src/token/extract/identity.rs b/src/token/extract/identity.rs
index 60ad220..a69f509 100644
--- a/src/token/extract/identity.rs
+++ b/src/token/extract/identity.rs
@@ -4,7 +4,7 @@ use axum::{
response::{IntoResponse, Response},
};
-use super::IdentityToken;
+use super::IdentityCookie;
use crate::{
app::App,
@@ -25,19 +25,10 @@ impl FromRequestParts<App> for Identity {
type Rejection = LoginError<Internal>;
async fn from_request_parts(parts: &mut Parts, state: &App) -> Result<Self, Self::Rejection> {
- // After Rust 1.82 (and #[feature(min_exhaustive_patterns)] lands on
- // stable), the following can be replaced:
- //
- // ```
- // let Ok(identity_token) = IdentityToken::from_request_parts(
- // parts,
- // state,
- // ).await;
- // ```
- let identity_token = IdentityToken::from_request_parts(parts, state).await?;
+ let Ok(cookie) = IdentityCookie::from_request_parts(parts, state).await;
let RequestedAt(used_at) = RequestedAt::from_request_parts(parts, state).await?;
- let secret = identity_token.secret().ok_or(LoginError::Unauthorized)?;
+ let secret = cookie.secret().ok_or(LoginError::Unauthorized)?;
let app = State::<App>::from_request_parts(parts, state).await?;
match app.tokens().validate(&secret, &used_at).await {
diff --git a/src/token/extract/mod.rs b/src/token/extract/mod.rs
index b4800ae..fc0f52b 100644
--- a/src/token/extract/mod.rs
+++ b/src/token/extract/mod.rs
@@ -1,4 +1,4 @@
+mod cookie;
mod identity;
-mod identity_token;
-pub use self::{identity::Identity, identity_token::IdentityToken};
+pub use self::{cookie::Identity as IdentityCookie, identity::Identity};
diff --git a/src/token/repo/auth.rs b/src/token/repo/auth.rs
index 9aee81f..bdc4c33 100644
--- a/src/token/repo/auth.rs
+++ b/src/token/repo/auth.rs
@@ -2,8 +2,10 @@ use sqlx::{sqlite::Sqlite, SqliteConnection, Transaction};
use crate::{
clock::DateTime,
+ db::NotFound,
event::{Instant, Sequence},
login::{self, password::StoredHash, History, Login},
+ name::{self, Name},
};
pub trait Provider {
@@ -19,38 +21,53 @@ impl<'c> Provider for Transaction<'c, Sqlite> {
pub struct Auth<'t>(&'t mut SqliteConnection);
impl<'t> Auth<'t> {
- pub async fn for_name(&mut self, name: &str) -> Result<(History, StoredHash), sqlx::Error> {
- let found = sqlx::query!(
+ pub async fn for_name(&mut self, name: &Name) -> Result<(History, StoredHash), LoadError> {
+ let name = name.canonical();
+ let row = sqlx::query!(
r#"
- select
- id as "id: login::Id",
- name,
- password_hash as "password_hash: StoredHash",
+ select
+ id as "id: login::Id",
+ display_name as "display_name: String",
+ canonical_name as "canonical_name: String",
created_sequence as "created_sequence: Sequence",
- created_at as "created_at: DateTime"
- from login
- where name = $1
- "#,
+ created_at as "created_at: DateTime",
+ password_hash as "password_hash: StoredHash"
+ from login
+ where canonical_name = $1
+ "#,
name,
)
- .map(|row| {
- (
- History {
- login: Login {
- id: row.id,
- name: row.name,
- },
- created: Instant {
- at: row.created_at,
- sequence: row.created_sequence,
- },
- },
- row.password_hash,
- )
- })
.fetch_one(&mut *self.0)
.await?;
- Ok(found)
+ let login = History {
+ login: Login {
+ id: row.id,
+ name: Name::new(row.display_name, row.canonical_name)?,
+ },
+ created: Instant::new(row.created_at, row.created_sequence),
+ };
+
+ Ok((login, row.password_hash))
+ }
+}
+
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub enum LoadError {
+ Database(#[from] sqlx::Error),
+ Name(#[from] name::Error),
+}
+
+impl<T> NotFound for Result<T, LoadError> {
+ type Ok = T;
+ type Error = LoadError;
+
+ fn optional(self) -> Result<Option<T>, LoadError> {
+ match self {
+ Ok(value) => Ok(Some(value)),
+ Err(LoadError::Database(sqlx::Error::RowNotFound)) => Ok(None),
+ Err(other) => Err(other),
+ }
}
}
diff --git a/src/token/repo/mod.rs b/src/token/repo/mod.rs
index 9169743..d8463eb 100644
--- a/src/token/repo/mod.rs
+++ b/src/token/repo/mod.rs
@@ -1,4 +1,4 @@
pub mod auth;
mod token;
-pub use self::token::Provider;
+pub use self::token::{LoadError, Provider};
diff --git a/src/token/repo/token.rs b/src/token/repo/token.rs
index c592dcd..35ea385 100644
--- a/src/token/repo/token.rs
+++ b/src/token/repo/token.rs
@@ -3,7 +3,10 @@ use uuid::Uuid;
use crate::{
clock::DateTime,
+ db::NotFound,
+ event::{Instant, Sequence},
login::{self, History, Login},
+ name::{self, Name},
token::{Id, Secret},
};
@@ -100,53 +103,78 @@ impl<'c> Tokens<'c> {
}
// Validate a token by its secret, retrieving the associated Login record.
- // Will return [None] if the token is not valid. The token's last-used
- // timestamp will be set to `used_at`.
+ // Will return an error if the token is not valid. If successful, the
+ // retrieved token's last-used timestamp will be set to `used_at`.
pub async fn validate(
&mut self,
secret: &Secret,
used_at: &DateTime,
- ) -> Result<(Id, Login), sqlx::Error> {
+ ) -> Result<(Id, History), LoadError> {
// I would use `update … returning` to do this in one query, but
// sqlite3, as of this writing, does not allow an update's `returning`
// clause to reference columns from tables joined into the update. Two
// queries is fine, but it feels untidy.
- sqlx::query!(
+ let (token, login) = sqlx::query!(
r#"
update token
set last_used_at = $1
where secret = $2
+ returning
+ id as "token: Id",
+ login as "login: login::Id"
"#,
used_at,
secret,
)
- .execute(&mut *self.0)
+ .map(|row| (row.token, row.login))
+ .fetch_one(&mut *self.0)
.await?;
let login = sqlx::query!(
r#"
select
- token.id as "token_id: Id",
- login.id as "login_id: login::Id",
- login.name as "login_name"
+ id as "id: login::Id",
+ display_name as "display_name: String",
+ canonical_name as "canonical_name: String",
+ created_sequence as "created_sequence: Sequence",
+ created_at as "created_at: DateTime"
from login
- join token on login.id = token.login
- where token.secret = $1
+ where id = $1
"#,
- secret,
+ login,
)
.map(|row| {
- (
- row.token_id,
- Login {
- id: row.login_id,
- name: row.login_name,
+ Ok::<_, name::Error>(History {
+ login: Login {
+ id: row.id,
+ name: Name::new(row.display_name, row.canonical_name)?,
},
- )
+ created: Instant::new(row.created_at, row.created_sequence),
+ })
})
.fetch_one(&mut *self.0)
- .await?;
+ .await??;
+
+ Ok((token, login))
+ }
+}
+
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub enum LoadError {
+ Database(#[from] sqlx::Error),
+ Name(#[from] name::Error),
+}
+
+impl<T> NotFound for Result<T, LoadError> {
+ type Ok = T;
+ type Error = LoadError;
- Ok(login)
+ fn optional(self) -> Result<Option<T>, LoadError> {
+ match self {
+ Ok(value) => Ok(Some(value)),
+ Err(LoadError::Database(sqlx::Error::RowNotFound)) => Ok(None),
+ Err(other) => Err(other),
+ }
}
}
diff --git a/src/ui.rs b/src/ui.rs
deleted file mode 100644
index 91d0eb8..0000000
--- a/src/ui.rs
+++ /dev/null
@@ -1,134 +0,0 @@
-use axum::{
- extract::{Path, Request, State},
- http::{header, StatusCode},
- middleware::{self, Next},
- response::{IntoResponse, Redirect, Response},
- routing::get,
- Router,
-};
-use mime_guess::Mime;
-use rust_embed::EmbeddedFile;
-
-use crate::{app::App, channel, error::Internal, invite, login::Login};
-
-#[derive(rust_embed::Embed)]
-#[folder = "target/ui"]
-struct Assets;
-
-impl Assets {
- fn load(path: impl AsRef<str>) -> Result<Asset, NotFound<String>> {
- let path = path.as_ref();
- let mime = mime_guess::from_path(path).first_or_octet_stream();
-
- Self::get(path)
- .map(|file| Asset(mime, file))
- .ok_or(NotFound(format!("not found: {path}")))
- }
-
- fn index() -> Result<Asset, Internal> {
- // "not found" in this case really is an internal error, as it should
- // never happen. `index.html` is a known-valid path.
- Ok(Self::load("index.html")?)
- }
-}
-
-pub fn router(app: &App) -> Router<App> {
- [
- Router::new()
- .route("/*path", get(asset))
- .route("/setup", get(setup)),
- Router::new()
- .route("/", get(root))
- .route("/login", get(login))
- .route("/ch/:channel", get(channel))
- .route("/invite/:invite", get(invite))
- .route_layer(middleware::from_fn_with_state(app.clone(), setup_required)),
- ]
- .into_iter()
- .fold(Router::default(), Router::merge)
-}
-
-async fn asset(Path(path): Path<String>) -> Result<Asset, NotFound<String>> {
- Assets::load(path)
-}
-
-async fn root(login: Option<Login>) -> Result<impl IntoResponse, Internal> {
- if login.is_none() {
- Ok(Redirect::temporary("/login").into_response())
- } else {
- Ok(Assets::index()?.into_response())
- }
-}
-
-async fn login() -> Result<impl IntoResponse, Internal> {
- Assets::index()
-}
-
-async fn setup(State(app): State<App>) -> Result<impl IntoResponse, Internal> {
- if app.setup().completed().await? {
- Ok(Redirect::to("/login").into_response())
- } else {
- Ok(Assets::index().into_response())
- }
-}
-
-async fn channel(
- State(app): State<App>,
- login: Option<Login>,
- Path(channel): Path<channel::Id>,
-) -> Result<impl IntoResponse, Internal> {
- if login.is_none() {
- Ok(Redirect::temporary("/").into_response())
- } else if app.channels().get(&channel).await?.is_none() {
- Ok(NotFound(Assets::index()?).into_response())
- } else {
- Ok(Assets::index()?.into_response())
- }
-}
-
-async fn invite(
- State(app): State<App>,
- Path(invite): Path<invite::Id>,
-) -> Result<impl IntoResponse, Internal> {
- match app.invites().get(&invite).await {
- Ok(_) => Ok(Assets::index()?.into_response()),
- Err(invite::app::Error::NotFound(_)) => Ok(NotFound(Assets::index()?).into_response()),
- Err(other) => Err(Internal::from(other)),
- }
-}
-
-struct Asset(Mime, EmbeddedFile);
-
-impl IntoResponse for Asset {
- fn into_response(self) -> Response {
- let Self(mime, file) = self;
- (
- StatusCode::OK,
- [(header::CONTENT_TYPE, mime.as_ref())],
- file.data,
- )
- .into_response()
- }
-}
-
-#[derive(Debug, thiserror::Error)]
-#[error("{0}")]
-struct NotFound<E>(pub E);
-
-impl<E> IntoResponse for NotFound<E>
-where
- E: IntoResponse,
-{
- fn into_response(self) -> Response {
- let Self(response) = self;
- (StatusCode::NOT_FOUND, response).into_response()
- }
-}
-
-pub async fn setup_required(State(app): State<App>, request: Request, next: Next) -> Response {
- match app.setup().completed().await {
- Ok(true) => next.run(request).await,
- Ok(false) => Redirect::to("/setup").into_response(),
- Err(error) => Internal::from(error).into_response(),
- }
-}
diff --git a/src/ui/assets.rs b/src/ui/assets.rs
new file mode 100644
index 0000000..6a7563a
--- /dev/null
+++ b/src/ui/assets.rs
@@ -0,0 +1,63 @@
+use ::mime::{FromStrError, Mime};
+use axum::{
+ http::{header, StatusCode},
+ response::{IntoResponse, Response},
+};
+use rust_embed::EmbeddedFile;
+
+use super::{error::NotFound, mime};
+use crate::error::Internal;
+
+#[derive(rust_embed::Embed)]
+#[folder = "target/ui"]
+pub struct Assets;
+
+impl Assets {
+ pub fn load(path: impl AsRef<str>) -> Result<Asset, Error> {
+ let path = path.as_ref();
+ let mime = mime::from_path(path)?;
+
+ Self::get(path)
+ .map(|file| Asset(mime, file))
+ .ok_or(Error::NotFound(path.into()))
+ }
+
+ pub fn index() -> Result<Asset, Internal> {
+ // "not found" in this case really is an internal error, as it should
+ // never happen. `index.html` is a known-valid path with a known-valid
+ // file extension.
+ Ok(Self::load("index.html")?)
+ }
+}
+
+pub struct Asset(Mime, EmbeddedFile);
+
+impl IntoResponse for Asset {
+ fn into_response(self) -> Response {
+ let Self(mime, file) = self;
+ (
+ StatusCode::OK,
+ [(header::CONTENT_TYPE, mime.as_ref())],
+ file.data,
+ )
+ .into_response()
+ }
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+ #[error("not found: {0}")]
+ NotFound(String),
+ #[error(transparent)]
+ Mime(#[from] FromStrError),
+}
+
+impl IntoResponse for Error {
+ fn into_response(self) -> Response {
+ #[allow(clippy::match_wildcard_for_single_variants)]
+ match self {
+ Self::NotFound(_) => NotFound(self.to_string()).into_response(),
+ other => Internal::from(other).into_response(),
+ }
+ }
+}
diff --git a/src/ui/error.rs b/src/ui/error.rs
new file mode 100644
index 0000000..2dc627f
--- /dev/null
+++ b/src/ui/error.rs
@@ -0,0 +1,18 @@
+use axum::{
+ http::StatusCode,
+ response::{IntoResponse, Response},
+};
+
+#[derive(Debug, thiserror::Error)]
+#[error("{0}")]
+pub struct NotFound<E>(pub E);
+
+impl<E> IntoResponse for NotFound<E>
+where
+ E: IntoResponse,
+{
+ fn into_response(self) -> Response {
+ let Self(response) = self;
+ (StatusCode::NOT_FOUND, response).into_response()
+ }
+}
diff --git a/src/ui/middleware.rs b/src/ui/middleware.rs
new file mode 100644
index 0000000..f60ee1c
--- /dev/null
+++ b/src/ui/middleware.rs
@@ -0,0 +1,15 @@
+use axum::{
+ extract::{Request, State},
+ middleware::Next,
+ response::{IntoResponse, Redirect, Response},
+};
+
+use crate::{app::App, error::Internal};
+
+pub async fn setup_required(State(app): State<App>, request: Request, next: Next) -> Response {
+ match app.setup().completed().await {
+ Ok(true) => next.run(request).await,
+ Ok(false) => Redirect::to("/setup").into_response(),
+ Err(error) => Internal::from(error).into_response(),
+ }
+}
diff --git a/src/ui/mime.rs b/src/ui/mime.rs
new file mode 100644
index 0000000..9c724f0
--- /dev/null
+++ b/src/ui/mime.rs
@@ -0,0 +1,22 @@
+use mime::Mime;
+use unix_path::Path;
+
+// Extremely manual; using `std::path` here would result in platform-dependent behaviour when it's not appropriate (the URLs passed here always use `/` and are parsed like URLs). Using `unix_path` might be an option, but it's not clearly
+pub fn from_path<P>(path: P) -> Result<Mime, mime::FromStrError>
+where
+ P: AsRef<Path>,
+{
+ let path = path.as_ref();
+ let extension = path.extension().and_then(|ext| ext.to_str());
+ let mime = match extension {
+ Some("css") => "text/css; charset=utf-8",
+ Some("js") => "text/javascript; charset=utf-8",
+ Some("json") => "application/json",
+ Some("html") => "text/html; charset=utf-8",
+ Some("png") => "image/png",
+ _ => "application/octet-stream",
+ };
+ let mime = mime.parse()?;
+
+ Ok(mime)
+}
diff --git a/src/ui/mod.rs b/src/ui/mod.rs
new file mode 100644
index 0000000..f8caa48
--- /dev/null
+++ b/src/ui/mod.rs
@@ -0,0 +1,7 @@
+mod assets;
+mod error;
+mod middleware;
+mod mime;
+mod routes;
+
+pub use self::routes::router;
diff --git a/src/ui/routes/ch/channel.rs b/src/ui/routes/ch/channel.rs
new file mode 100644
index 0000000..a338f1f
--- /dev/null
+++ b/src/ui/routes/ch/channel.rs
@@ -0,0 +1,61 @@
+pub mod get {
+ use axum::{
+ extract::{Path, State},
+ response::{self, IntoResponse, Redirect},
+ };
+
+ use crate::{
+ app::App,
+ channel,
+ error::Internal,
+ token::extract::Identity,
+ ui::{
+ assets::{Asset, Assets},
+ error::NotFound,
+ },
+ };
+
+ pub async fn handler(
+ State(app): State<App>,
+ identity: Option<Identity>,
+ Path(channel): Path<channel::Id>,
+ ) -> Result<Asset, Error> {
+ let _ = identity.ok_or(Error::NotLoggedIn)?;
+ app.channels()
+ .get(&channel)
+ .await
+ .map_err(Error::internal)?
+ .ok_or(Error::NotFound)?;
+
+ Assets::index().map_err(Error::Internal)
+ }
+
+ #[derive(Debug, thiserror::Error)]
+ pub enum Error {
+ #[error("requested channel not found")]
+ NotFound,
+ #[error("not logged in")]
+ NotLoggedIn,
+ #[error("{0}")]
+ Internal(Internal),
+ }
+
+ impl Error {
+ fn internal(err: impl Into<Internal>) -> Self {
+ Self::Internal(err.into())
+ }
+ }
+
+ impl IntoResponse for Error {
+ fn into_response(self) -> response::Response {
+ match self {
+ Self::NotFound => match Assets::index() {
+ Ok(asset) => NotFound(asset).into_response(),
+ Err(internal) => internal.into_response(),
+ },
+ Self::NotLoggedIn => Redirect::temporary("/login").into_response(),
+ Self::Internal(error) => error.into_response(),
+ }
+ }
+ }
+}
diff --git a/src/ui/routes/ch/mod.rs b/src/ui/routes/ch/mod.rs
new file mode 100644
index 0000000..ff02972
--- /dev/null
+++ b/src/ui/routes/ch/mod.rs
@@ -0,0 +1 @@
+pub mod channel;
diff --git a/src/ui/routes/get.rs b/src/ui/routes/get.rs
new file mode 100644
index 0000000..2fcb51c
--- /dev/null
+++ b/src/ui/routes/get.rs
@@ -0,0 +1,30 @@
+use axum::response::{self, IntoResponse, Redirect};
+
+use crate::{
+ error::Internal,
+ token::extract::Identity,
+ ui::assets::{Asset, Assets},
+};
+
+pub async fn handler(identity: Option<Identity>) -> Result<Asset, Error> {
+ let _ = identity.ok_or(Error::NotLoggedIn)?;
+
+ Assets::index().map_err(Error::Internal)
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+ #[error("not logged in")]
+ NotLoggedIn,
+ #[error("{0}")]
+ Internal(Internal),
+}
+
+impl IntoResponse for Error {
+ fn into_response(self) -> response::Response {
+ match self {
+ Self::NotLoggedIn => Redirect::temporary("/login").into_response(),
+ Self::Internal(error) => error.into_response(),
+ }
+ }
+}
diff --git a/src/ui/routes/invite/invite.rs b/src/ui/routes/invite/invite.rs
new file mode 100644
index 0000000..06e5792
--- /dev/null
+++ b/src/ui/routes/invite/invite.rs
@@ -0,0 +1,55 @@
+pub mod get {
+ use axum::{
+ extract::{Path, State},
+ response::{self, IntoResponse},
+ };
+
+ use crate::{
+ app::App,
+ error::Internal,
+ invite,
+ ui::{
+ assets::{Asset, Assets},
+ error::NotFound,
+ },
+ };
+
+ pub async fn handler(
+ State(app): State<App>,
+ Path(invite): Path<invite::Id>,
+ ) -> Result<Asset, Error> {
+ app.invites()
+ .get(&invite)
+ .await
+ .map_err(Error::internal)?
+ .ok_or(Error::NotFound)?;
+
+ Assets::index().map_err(Error::Internal)
+ }
+
+ #[derive(Debug, thiserror::Error)]
+ pub enum Error {
+ #[error("invite not found")]
+ NotFound,
+ #[error("{0}")]
+ Internal(Internal),
+ }
+
+ impl Error {
+ fn internal(err: impl Into<Internal>) -> Self {
+ Self::Internal(err.into())
+ }
+ }
+
+ impl IntoResponse for Error {
+ fn into_response(self) -> response::Response {
+ match self {
+ Self::NotFound => match Assets::index() {
+ Ok(asset) => NotFound(asset).into_response(),
+ Err(internal) => internal.into_response(),
+ },
+ Self::Internal(error) => error.into_response(),
+ }
+ }
+ }
+}
diff --git a/src/ui/routes/invite/mod.rs b/src/ui/routes/invite/mod.rs
new file mode 100644
index 0000000..50af8be
--- /dev/null
+++ b/src/ui/routes/invite/mod.rs
@@ -0,0 +1,4 @@
+// In this case, the first redundant `invite` is a literal path segment, and the
+// second `invite` reflects a placeholder.
+#[allow(clippy::module_inception)]
+pub mod invite;
diff --git a/src/ui/routes/login.rs b/src/ui/routes/login.rs
new file mode 100644
index 0000000..81a874c
--- /dev/null
+++ b/src/ui/routes/login.rs
@@ -0,0 +1,11 @@
+pub mod get {
+ use crate::{
+ error::Internal,
+ ui::assets::{Asset, Assets},
+ };
+
+ #[allow(clippy::unused_async)]
+ pub async fn handler() -> Result<Asset, Internal> {
+ Assets::index()
+ }
+}
diff --git a/src/ui/routes/mod.rs b/src/ui/routes/mod.rs
new file mode 100644
index 0000000..72d9a4a
--- /dev/null
+++ b/src/ui/routes/mod.rs
@@ -0,0 +1,26 @@
+use axum::{middleware, routing::get, Router};
+
+use crate::{app::App, ui::middleware::setup_required};
+
+mod ch;
+mod get;
+mod invite;
+mod login;
+mod path;
+mod setup;
+
+pub fn router(app: &App) -> Router<App> {
+ [
+ Router::new()
+ .route("/*path", get(path::get::handler))
+ .route("/setup", get(setup::get::handler)),
+ Router::new()
+ .route("/", get(get::handler))
+ .route("/login", get(login::get::handler))
+ .route("/ch/:channel", get(ch::channel::get::handler))
+ .route("/invite/:invite", get(invite::invite::get::handler))
+ .route_layer(middleware::from_fn_with_state(app.clone(), setup_required)),
+ ]
+ .into_iter()
+ .fold(Router::default(), Router::merge)
+}
diff --git a/src/ui/routes/path.rs b/src/ui/routes/path.rs
new file mode 100644
index 0000000..a387552
--- /dev/null
+++ b/src/ui/routes/path.rs
@@ -0,0 +1,9 @@
+pub mod get {
+ use axum::extract::Path;
+
+ use crate::ui::assets::{Asset, Assets, Error};
+
+ pub async fn handler(Path(path): Path<String>) -> Result<Asset, Error> {
+ Assets::load(path)
+ }
+}
diff --git a/src/ui/routes/setup.rs b/src/ui/routes/setup.rs
new file mode 100644
index 0000000..649cc5f
--- /dev/null
+++ b/src/ui/routes/setup.rs
@@ -0,0 +1,43 @@
+pub mod get {
+ use axum::{
+ extract::State,
+ response::{self, IntoResponse, Redirect},
+ };
+
+ use crate::{
+ app::App,
+ error::Internal,
+ ui::assets::{Asset, Assets},
+ };
+
+ pub async fn handler(State(app): State<App>) -> Result<Asset, Error> {
+ if app
+ .setup()
+ .completed()
+ .await
+ .map_err(Internal::from)
+ .map_err(Error::Internal)?
+ {
+ Err(Error::SetupCompleted)
+ } else {
+ Assets::index().map_err(Error::Internal)
+ }
+ }
+
+ #[derive(Debug, thiserror::Error)]
+ pub enum Error {
+ #[error("setup already completed")]
+ SetupCompleted,
+ #[error("{0}")]
+ Internal(Internal),
+ }
+
+ impl IntoResponse for Error {
+ fn into_response(self) -> response::Response {
+ match self {
+ Self::SetupCompleted => Redirect::to("/login").into_response(),
+ Self::Internal(error) => error.into_response(),
+ }
+ }
+ }
+}