summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.sqlx/query-a811cce764f6e5c5b5565d5c29e56e2fd229c024dacb6ad817988eed9a8e0c22.json20
-rw-r--r--.sqlx/query-e7fcc523a4487a901460e904c0720b3c43c87fb335c2f7ca9fa694b9ca4822f7.json20
-rw-r--r--migrations/20240904135137_index_token_issued_at.sql1
-rw-r--r--migrations/20240904153315_channel.sql20
-rw-r--r--migrations/20240911230415_no_channel_membership.sql1
-rw-r--r--migrations/20240912013151_token_last_used.sql6
-rw-r--r--migrations/20240924232919_message_serial_numbers.sql38
-rw-r--r--migrations/20240928002608_channel_lifecycle.sql57
-rw-r--r--migrations/20240928012031_channel_stored_sequence.sql60
-rw-r--r--migrations/20240929013644_token_id.sql29
-rw-r--r--migrations/20241002003606_global_sequence.sql126
-rw-r--r--migrations/20241005020942_login.sql10
-rw-r--r--migrations/20241005020958_token.sql (renamed from migrations/20240831024047_login.sql)17
-rw-r--r--migrations/20241005021009_channel.sql16
-rw-r--r--migrations/20241005021022_message.sql (renamed from migrations/20240912145249_message.sql)9
-rw-r--r--migrations/20241005021100_event_sequence.sql10
-rw-r--r--src/cli.rs9
-rw-r--r--src/db.rs65
18 files changed, 157 insertions, 357 deletions
diff --git a/.sqlx/query-a811cce764f6e5c5b5565d5c29e56e2fd229c024dacb6ad817988eed9a8e0c22.json b/.sqlx/query-a811cce764f6e5c5b5565d5c29e56e2fd229c024dacb6ad817988eed9a8e0c22.json
new file mode 100644
index 0000000..c4d2414
--- /dev/null
+++ b/.sqlx/query-a811cce764f6e5c5b5565d5c29e56e2fd229c024dacb6ad817988eed9a8e0c22.json
@@ -0,0 +1,20 @@
+{
+ "db_name": "SQLite",
+ "query": "\n select count(*) as \"exists: bool\"\n from _sqlx_migrations\n where version = $1\n and description = $2\n and hex(checksum) = $3\n ",
+ "describe": {
+ "columns": [
+ {
+ "name": "exists: bool",
+ "ordinal": 0,
+ "type_info": "Integer"
+ }
+ ],
+ "parameters": {
+ "Right": 3
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "a811cce764f6e5c5b5565d5c29e56e2fd229c024dacb6ad817988eed9a8e0c22"
+}
diff --git a/.sqlx/query-e7fcc523a4487a901460e904c0720b3c43c87fb335c2f7ca9fa694b9ca4822f7.json b/.sqlx/query-e7fcc523a4487a901460e904c0720b3c43c87fb335c2f7ca9fa694b9ca4822f7.json
new file mode 100644
index 0000000..0309783
--- /dev/null
+++ b/.sqlx/query-e7fcc523a4487a901460e904c0720b3c43c87fb335c2f7ca9fa694b9ca4822f7.json
@@ -0,0 +1,20 @@
+{
+ "db_name": "SQLite",
+ "query": "\n select count(*) as \"exists: bool\"\n from sqlite_master\n where name = '_sqlx_migrations'\n ",
+ "describe": {
+ "columns": [
+ {
+ "name": "exists: bool",
+ "ordinal": 0,
+ "type_info": "Integer"
+ }
+ ],
+ "parameters": {
+ "Right": 0
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "e7fcc523a4487a901460e904c0720b3c43c87fb335c2f7ca9fa694b9ca4822f7"
+}
diff --git a/migrations/20240904135137_index_token_issued_at.sql b/migrations/20240904135137_index_token_issued_at.sql
deleted file mode 100644
index b60f691..0000000
--- a/migrations/20240904135137_index_token_issued_at.sql
+++ /dev/null
@@ -1 +0,0 @@
-create index token_issued_at on token (issued_at);
diff --git a/migrations/20240904153315_channel.sql b/migrations/20240904153315_channel.sql
deleted file mode 100644
index e62b51f..0000000
--- a/migrations/20240904153315_channel.sql
+++ /dev/null
@@ -1,20 +0,0 @@
-create table channel (
- id text
- not null
- primary key,
- name text
- not null
- unique
-);
-
-create table channel_member (
- channel text
- not null
- references channel,
- login text
- not null
- references login,
- primary key (channel, login)
-);
-
-create index channel_member_login on channel_member (login);
diff --git a/migrations/20240911230415_no_channel_membership.sql b/migrations/20240911230415_no_channel_membership.sql
deleted file mode 100644
index db5a054..0000000
--- a/migrations/20240911230415_no_channel_membership.sql
+++ /dev/null
@@ -1 +0,0 @@
-drop table channel_member;
diff --git a/migrations/20240912013151_token_last_used.sql b/migrations/20240912013151_token_last_used.sql
deleted file mode 100644
index 0b45cd9..0000000
--- a/migrations/20240912013151_token_last_used.sql
+++ /dev/null
@@ -1,6 +0,0 @@
-alter table token
-add column last_used_at text
- not null;
-
-update token
-set last_used_at = issued_at;
diff --git a/migrations/20240924232919_message_serial_numbers.sql b/migrations/20240924232919_message_serial_numbers.sql
deleted file mode 100644
index a53e4a2..0000000
--- a/migrations/20240924232919_message_serial_numbers.sql
+++ /dev/null
@@ -1,38 +0,0 @@
-create table sequenced_message (
- id text
- not null
- primary key,
- sequence bigint
- not null,
- channel text
- not null
- references channel (id),
- sender text
- not null
- references login (id),
- body text
- not null,
- sent_at text
- not null,
- unique (channel, sequence)
-);
-
-insert into sequenced_message
-select
- id,
- rank() over (
- partition by channel
- order by sent_at
- ) as sequence,
- channel,
- sender,
- body,
- sent_at
-from message;
-
-drop table message;
-
-alter table sequenced_message
-rename to message;
-
-create index message_sent_at on message (channel, sent_at);
diff --git a/migrations/20240928002608_channel_lifecycle.sql b/migrations/20240928002608_channel_lifecycle.sql
deleted file mode 100644
index bc690d7..0000000
--- a/migrations/20240928002608_channel_lifecycle.sql
+++ /dev/null
@@ -1,57 +0,0 @@
-alter table channel
-rename to old_channel;
-
--- Add new columns
-create table channel (
- id text
- not null
- primary key,
- name text
- not null
- unique,
- created_at text
- not null
-);
-
--- Transfer data from original table
-insert into channel
-select
- channel.id,
- channel.name,
- coalesce(
- min(message.sent_at),
- strftime('%FT%R:%f+00:00', 'now', 'utc')
- ) as created_at
-from old_channel as channel
- left join message on channel.id = message.channel
-group by channel.id, channel.name;
-
--- Fix up `message` foreign keys
-alter table message
-rename to old_message;
-
-create table message (
- id text
- not null
- primary key,
- sequence bigint
- not null,
- channel text
- not null
- references channel (id),
- sender text
- not null
- references login (id),
- body text
- not null,
- sent_at text
- not null,
- unique (channel, sequence)
-);
-
-insert into message
-select * from old_message;
-
--- Bury the bodies respectfully
-drop table old_message;
-drop table old_channel;
diff --git a/migrations/20240928012031_channel_stored_sequence.sql b/migrations/20240928012031_channel_stored_sequence.sql
deleted file mode 100644
index badd88d..0000000
--- a/migrations/20240928012031_channel_stored_sequence.sql
+++ /dev/null
@@ -1,60 +0,0 @@
-alter table channel
-rename to old_channel;
-
--- Add new columns
-create table channel (
- id text
- not null
- primary key,
- name text
- not null
- unique,
- created_at text
- not null,
- last_sequence bigint
- not null
-);
-
--- Transfer data from original table
-insert into channel
-select
- channel.id,
- channel.name,
- channel.created_at,
- coalesce(
- max(message.sequence),
- 0
- ) as last_sequence
-from old_channel as channel
- left join message on channel.id = message.channel
-group by channel.id, channel.name;
-
--- Fix up `message` foreign keys
-alter table message
-rename to old_message;
-
-create table message (
- id text
- not null
- primary key,
- sequence bigint
- not null,
- channel text
- not null
- references channel (id),
- sender text
- not null
- references login (id),
- body text
- not null,
- sent_at text
- not null,
- unique (channel, sequence)
-);
-
-insert into message
-select * from old_message;
-
--- Bury the bodies respectfully
-drop table old_message;
-drop table old_channel;
diff --git a/migrations/20240929013644_token_id.sql b/migrations/20240929013644_token_id.sql
deleted file mode 100644
index ad4d8b4..0000000
--- a/migrations/20240929013644_token_id.sql
+++ /dev/null
@@ -1,29 +0,0 @@
-alter table token
-rename to old_token;
-
-create table token (
- id text
- not null
- primary key,
- secret text
- not null
- unique,
- login text
- not null,
- issued_at text
- not null,
- last_used_at text
- not null,
- foreign key (login)
- references login (id)
-);
-
-insert into token
-select
- 'T' || lower(hex(randomblob(8))) as id,
- *
-from old_token;
-
-drop table old_token;
-
-create index token_issued_at on token (issued_at);
diff --git a/migrations/20241002003606_global_sequence.sql b/migrations/20241002003606_global_sequence.sql
deleted file mode 100644
index 198b585..0000000
--- a/migrations/20241002003606_global_sequence.sql
+++ /dev/null
@@ -1,126 +0,0 @@
-create table event_sequence (
- last_value bigint
- not null
-);
-
-create unique index event_sequence_singleton
-on event_sequence (0);
-
--- Attempt to assign events sent so far a globally-unique sequence number,
--- maintaining an approximation of the order they were sent in. This can
--- introduce small ordering anomalies (where the resulting sequence differs
--- from the order they were sent in) for events that were sent close in time;
--- I've gone with chronological order here as it's the closest thing we have to
--- a global ordering, and because the results will be intuitive to most users.
-create temporary table raw_event (
- type text
- not null,
- at text
- not null,
- channel text
- unique,
- message text
- unique,
- check ((channel is not null and message is null) or (message is not null and channel is null))
-);
-
-insert into raw_event (type, at, channel)
-select
- 'channel' as type,
- created_at as at,
- id as channel
-from channel;
-
-insert into raw_event (type, at, message)
-select
- 'message' as type,
- sent_at as at,
- id as message
-from message;
-
-create temporary table event (
- type text
- not null,
- sequence
- unique
- not null,
- at text
- not null,
- channel text
- unique,
- message text
- unique,
- check ((channel is not null and message is null) or (message is not null and channel is null))
-);
-
-insert into event
-select
- type,
- rank() over (order by at) - 1 as sequence,
- at,
- channel,
- message
-from raw_event;
-
-drop table raw_event;
-
-alter table channel rename to old_channel;
-alter table message rename to old_message;
-
-create table channel (
- id text
- not null
- primary key,
- name text
- unique
- not null,
- created_sequence bigint
- unique
- not null,
- created_at text
- not null
-);
-
-insert into channel
-select
- c.id,
- c.name,
- e.sequence,
- c.created_at
-from old_channel as c join event as e
- on e.channel = c.id;
-
-create table message (
- id text
- not null
- primary key,
- channel text
- not null
- references channel (id),
- sender text
- not null
- references login (id),
- sent_sequence bigint
- unique
- not null,
- sent_at text
- not null,
- body text
- not null
-);
-
-insert into message
-select
- m.id,
- m.channel,
- m.sender,
- e.sequence,
- m.sent_at,
- m.body
-from old_message as m join event as e
- on e.message = m.id;
-
-insert into event_sequence
-select coalesce(max(sequence), 0) from event;
-
-drop table event;
diff --git a/migrations/20241005020942_login.sql b/migrations/20241005020942_login.sql
new file mode 100644
index 0000000..92d0875
--- /dev/null
+++ b/migrations/20241005020942_login.sql
@@ -0,0 +1,10 @@
+create table login (
+ id text
+ not null
+ primary key,
+ name text
+ not null
+ unique,
+ password_hash text
+ not null
+);
diff --git a/migrations/20240831024047_login.sql b/migrations/20241005020958_token.sql
index 758e9c7..046638a 100644
--- a/migrations/20240831024047_login.sql
+++ b/migrations/20241005020958_token.sql
@@ -1,22 +1,19 @@
-create table login (
+create table token (
id text
not null
primary key,
- name text
- not null
- unique,
- password_hash text
- not null
-);
-
-create table token (
secret text
not null
- primary key,
+ unique,
login text
not null,
issued_at text
not null,
+ last_used_at text
+ not null,
foreign key (login)
references login (id)
);
+
+create index token_issued_at
+on token (issued_at);
diff --git a/migrations/20241005021009_channel.sql b/migrations/20241005021009_channel.sql
new file mode 100644
index 0000000..ac36884
--- /dev/null
+++ b/migrations/20241005021009_channel.sql
@@ -0,0 +1,16 @@
+create table channel (
+ id text
+ not null
+ primary key,
+ name text
+ unique
+ not null,
+ created_sequence bigint
+ unique
+ not null,
+ created_at text
+ not null
+);
+
+create index channel_created_at
+on channel (created_at);
diff --git a/migrations/20240912145249_message.sql b/migrations/20241005021022_message.sql
index ce9db0d..189612c 100644
--- a/migrations/20240912145249_message.sql
+++ b/migrations/20241005021022_message.sql
@@ -8,10 +8,15 @@ create table message (
sender text
not null
references login (id),
- body text
+ sent_sequence bigint
+ unique
not null,
sent_at text
+ not null,
+ body text
not null
);
-create index message_sent_at on message (channel, sent_at);
+create index message_sent_at
+on message (sent_at);
+
diff --git a/migrations/20241005021100_event_sequence.sql b/migrations/20241005021100_event_sequence.sql
new file mode 100644
index 0000000..ac6821d
--- /dev/null
+++ b/migrations/20241005021100_event_sequence.sql
@@ -0,0 +1,10 @@
+create table event_sequence (
+ last_value bigint
+ not null
+);
+
+create unique index event_sequence_singleton
+on event_sequence (0);
+
+insert into event_sequence
+values (0);
diff --git a/src/cli.rs b/src/cli.rs
index 2d9f512..d88916a 100644
--- a/src/cli.rs
+++ b/src/cli.rs
@@ -99,7 +99,7 @@ impl Args {
(self.address.as_str(), self.port)
}
- async fn pool(&self) -> sqlx::Result<SqlitePool> {
+ async fn pool(&self) -> Result<SqlitePool, db::Error> {
db::prepare(&self.database_url).await
}
}
@@ -126,9 +126,6 @@ fn started_msg(listener: &net::TcpListener) -> io::Result<String> {
pub enum Error {
/// Failure due to `io::Error`. See [`io::Error`].
IoError(#[from] io::Error),
- /// Failure due to a database error. See [`sqlx::Error`].
- DatabaseError(#[from] sqlx::Error),
- /// Failure due to a database migration error. See
- /// [`sqlx::migrate::MigrateError`].
- MigrateError(#[from] sqlx::migrate::MigrateError),
+ /// Failure due to a database initialization error. See [`db::Error`].
+ Database(#[from] db::Error),
}
diff --git a/src/db.rs b/src/db.rs
index 93a1169..e09b0ba 100644
--- a/src/db.rs
+++ b/src/db.rs
@@ -2,8 +2,13 @@ use std::str::FromStr;
use sqlx::sqlite::{SqliteConnectOptions, SqlitePool, SqlitePoolOptions};
-pub async fn prepare(url: &str) -> sqlx::Result<SqlitePool> {
+pub async fn prepare(url: &str) -> Result<SqlitePool, Error> {
let pool = create(url).await?;
+
+ // First migration of original migration series, from commit
+ // 9bd6d9862b1c243def02200bca2cfbf578ad2a2f or earlier.
+ reject_migration(&pool, "20240831024047", "login", "9949D238C4099295EC4BEE734BFDA8D87513B2973DFB895352A11AB01DD46CB95314B7F1B3431B77E3444A165FE3DC28").await?;
+
sqlx::migrate!().run(&pool).await?;
Ok(pool)
}
@@ -17,6 +22,64 @@ async fn create(database_url: &str) -> sqlx::Result<SqlitePool> {
Ok(pool)
}
+async fn reject_migration(
+ pool: &SqlitePool,
+ version: &str,
+ description: &str,
+ checksum_hex: &str,
+) -> Result<(), Error> {
+ if !sqlx::query_scalar!(
+ r#"
+ select count(*) as "exists: bool"
+ from sqlite_master
+ where name = '_sqlx_migrations'
+ "#
+ )
+ .fetch_one(pool)
+ .await?
+ {
+ // No migrations table; this is a fresh DB.
+ return Ok(());
+ }
+
+ if !sqlx::query_scalar!(
+ r#"
+ select count(*) as "exists: bool"
+ from _sqlx_migrations
+ where version = $1
+ and description = $2
+ and hex(checksum) = $3
+ "#,
+ version,
+ description,
+ checksum_hex,
+ )
+ .fetch_one(pool)
+ .await?
+ {
+ // Rejected migration does not exist; this DB never ran it.
+ return Ok(());
+ }
+
+ Err(Error::Rejected(version.into(), description.into()))
+}
+
+/// Errors occurring during database setup.
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+ /// Failure due to a database error. See [`sqlx::Error`].
+ #[error(transparent)]
+ Database(#[from] sqlx::Error),
+ /// Failure due to a database migration error. See
+ /// [`sqlx::migrate::MigrateError`].
+ #[error(transparent)]
+ Migration(#[from] sqlx::migrate::MigrateError),
+ /// Failure because the database contains a migration from an unsupported
+ /// schema version.
+ #[error("database contains rejected migration {0}:{1}, move it aside")]
+ Rejected(String, String),
+}
+
pub trait NotFound {
type Ok;
fn not_found<E, F>(self, map: F) -> Result<Self::Ok, E>