summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorOwen Jacobson <owen@grimoire.ca>2024-10-01 22:30:04 -0400
committerOwen Jacobson <owen@grimoire.ca>2024-10-01 22:43:14 -0400
commitb8392a5fe824eff46f912a58885546e7b0f37e6f (patch)
treeff4061bbf4be30c53f84c179f86e8e6ab584dbda
parent7645411bcf7201e3a4927566da78080dc6a84ccf (diff)
Track event sequences globally, not per channel.
Per-channel event sequences were a cute idea, but it made reasoning about event resumption much, much harder (case in point: recovering the order of events in a partially-ordered collection is quadratic, since it's basically graph sort). The minor overhead of a global sequence number is likely tolerable, and this simplifies both the API and the internals.
-rw-r--r--.sqlx/query-023b1e263b68a483704ae5f6b07e69ab792b7365f2eb2831f7a2ac13e2ecf323.json38
-rw-r--r--.sqlx/query-397bdfdb77651e3e65e9ec53cf075037c794cae08f79a689c7a037aa68d7c00c.json20
-rw-r--r--.sqlx/query-4715007e2395ad30433b7405a144db4901c302bbcd3e76da6c61742ac44345c9.json (renamed from .sqlx/query-2310fe5b8e88e314eb200d8f227b09c3e4b0c9c0202c7cbe3fba93213ea100cf.json)16
-rw-r--r--.sqlx/query-5244f04bc270fc8d3cd4116854398e2151ba2dba10c03a9d2d93184141f1425c.json (renamed from .sqlx/query-df3656771c3cb6851e0c54a2d368676f279af866d0840d6c2c093b87b1eadd8c.json)12
-rw-r--r--.sqlx/query-74f0bad30dcec743d77309b8df33083c2da765dfda3023c78c25c06735670457.json (renamed from .sqlx/query-6a782686e163e65f5e03e4aaf423b1fd14ed9e252d7d9c5323feafb0b9159259.json)10
-rw-r--r--.sqlx/query-760d3532e1613fd9f79ac98cb132479c6e7a2301d576af298da570f3effdc106.json50
-rw-r--r--.sqlx/query-7e816ede017bc2635c11ab72b18b7af92ac1f1faed9df41df90f57cb596cfe7c.json74
-rw-r--r--.sqlx/query-7f6b9c7d4ef3f540d594318a7a66fa8f9e3ddcf6d041be8d834db58f66a5aa88.json (renamed from .sqlx/query-7ccae3dde1aba5f22cf9e3926096285d50afb88a326cff0ecab96058a2f6d93a.json)10
-rw-r--r--.sqlx/query-7fc3094944d5133fd8b2d80aace35b06db0071c5f257b7f71349966bcdadfcb5.json20
-rw-r--r--.sqlx/query-9386cdaa2cb41f5a7e19d2fc8c187294a4661c18c2d820f4379dfd82138a8f77.json38
-rw-r--r--.sqlx/query-aeafe536f36593bfd1080ee61c4b10c6f90b1221e963db69c8e6d23e99012ecf.json32
-rw-r--r--.sqlx/query-f6909336ab05b7ad423c7b96a0e7b12a920f9827aff2b05ee0364ff7688a38ae.json (renamed from .sqlx/query-22f313d9afcdd02df74a8b8c64a38a3f73b112e74b7318ee8e52e475866d8cfd.json)10
-rw-r--r--migrations/20241002003606_global_sequence.sql126
-rw-r--r--src/channel/app.rs14
-rw-r--r--src/channel/routes/test/on_create.rs5
-rw-r--r--src/channel/routes/test/on_send.rs4
-rw-r--r--src/events/app.rs76
-rw-r--r--src/events/repo/message.rs79
-rw-r--r--src/events/routes.rs26
-rw-r--r--src/events/routes/test.rs47
-rw-r--r--src/events/types.rs79
-rw-r--r--src/repo/channel.rs53
-rw-r--r--src/repo/mod.rs1
-rw-r--r--src/repo/sequence.rs45
-rw-r--r--src/test/fixtures/filter.rs12
25 files changed, 535 insertions, 362 deletions
diff --git a/.sqlx/query-023b1e263b68a483704ae5f6b07e69ab792b7365f2eb2831f7a2ac13e2ecf323.json b/.sqlx/query-023b1e263b68a483704ae5f6b07e69ab792b7365f2eb2831f7a2ac13e2ecf323.json
new file mode 100644
index 0000000..cc23359
--- /dev/null
+++ b/.sqlx/query-023b1e263b68a483704ae5f6b07e69ab792b7365f2eb2831f7a2ac13e2ecf323.json
@@ -0,0 +1,38 @@
+{
+ "db_name": "SQLite",
+ "query": "\n insert\n into channel (id, name, created_at, created_sequence)\n values ($1, $2, $3, $4)\n returning\n id as \"id: Id\",\n name,\n created_at as \"created_at: DateTime\",\n created_sequence as \"created_sequence: Sequence\"\n ",
+ "describe": {
+ "columns": [
+ {
+ "name": "id: Id",
+ "ordinal": 0,
+ "type_info": "Text"
+ },
+ {
+ "name": "name",
+ "ordinal": 1,
+ "type_info": "Text"
+ },
+ {
+ "name": "created_at: DateTime",
+ "ordinal": 2,
+ "type_info": "Text"
+ },
+ {
+ "name": "created_sequence: Sequence",
+ "ordinal": 3,
+ "type_info": "Integer"
+ }
+ ],
+ "parameters": {
+ "Right": 4
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false
+ ]
+ },
+ "hash": "023b1e263b68a483704ae5f6b07e69ab792b7365f2eb2831f7a2ac13e2ecf323"
+}
diff --git a/.sqlx/query-397bdfdb77651e3e65e9ec53cf075037c794cae08f79a689c7a037aa68d7c00c.json b/.sqlx/query-397bdfdb77651e3e65e9ec53cf075037c794cae08f79a689c7a037aa68d7c00c.json
deleted file mode 100644
index 5cb7282..0000000
--- a/.sqlx/query-397bdfdb77651e3e65e9ec53cf075037c794cae08f79a689c7a037aa68d7c00c.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
- "db_name": "SQLite",
- "query": "\n update channel\n set last_sequence = last_sequence + 1\n where id = $1\n returning last_sequence as \"next_sequence: Sequence\"\n ",
- "describe": {
- "columns": [
- {
- "name": "next_sequence: Sequence",
- "ordinal": 0,
- "type_info": "Integer"
- }
- ],
- "parameters": {
- "Right": 1
- },
- "nullable": [
- false
- ]
- },
- "hash": "397bdfdb77651e3e65e9ec53cf075037c794cae08f79a689c7a037aa68d7c00c"
-}
diff --git a/.sqlx/query-2310fe5b8e88e314eb200d8f227b09c3e4b0c9c0202c7cbe3fba93213ea100cf.json b/.sqlx/query-4715007e2395ad30433b7405a144db4901c302bbcd3e76da6c61742ac44345c9.json
index 1bd4116..494e1db 100644
--- a/.sqlx/query-2310fe5b8e88e314eb200d8f227b09c3e4b0c9c0202c7cbe3fba93213ea100cf.json
+++ b/.sqlx/query-4715007e2395ad30433b7405a144db4901c302bbcd3e76da6c61742ac44345c9.json
@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
- "query": "\n\t\t\t\tinsert into message\n\t\t\t\t\t(id, channel, sequence, sender, body, sent_at)\n\t\t\t\tvalues ($1, $2, $3, $4, $5, $6)\n\t\t\t\treturning\n\t\t\t\t\tid as \"id: message::Id\",\n sequence as \"sequence: Sequence\",\n\t\t\t\t\tsender as \"sender: login::Id\",\n\t\t\t\t\tbody,\n\t\t\t\t\tsent_at as \"sent_at: DateTime\"\n\t\t\t",
+ "query": "\n\t\t\t\tinsert into message\n\t\t\t\t\t(id, channel, sender, sent_at, sent_sequence, body)\n\t\t\t\tvalues ($1, $2, $3, $4, $5, $6)\n\t\t\t\treturning\n\t\t\t\t\tid as \"id: message::Id\",\n\t\t\t\t\tsender as \"sender: login::Id\",\n sent_at as \"sent_at: DateTime\",\n sent_sequence as \"sent_sequence: Sequence\",\n\t\t\t\t\tbody\n\t\t\t",
"describe": {
"columns": [
{
@@ -9,22 +9,22 @@
"type_info": "Text"
},
{
- "name": "sequence: Sequence",
+ "name": "sender: login::Id",
"ordinal": 1,
- "type_info": "Integer"
+ "type_info": "Text"
},
{
- "name": "sender: login::Id",
+ "name": "sent_at: DateTime",
"ordinal": 2,
"type_info": "Text"
},
{
- "name": "body",
+ "name": "sent_sequence: Sequence",
"ordinal": 3,
- "type_info": "Text"
+ "type_info": "Integer"
},
{
- "name": "sent_at: DateTime",
+ "name": "body",
"ordinal": 4,
"type_info": "Text"
}
@@ -40,5 +40,5 @@
false
]
},
- "hash": "2310fe5b8e88e314eb200d8f227b09c3e4b0c9c0202c7cbe3fba93213ea100cf"
+ "hash": "4715007e2395ad30433b7405a144db4901c302bbcd3e76da6c61742ac44345c9"
}
diff --git a/.sqlx/query-df3656771c3cb6851e0c54a2d368676f279af866d0840d6c2c093b87b1eadd8c.json b/.sqlx/query-5244f04bc270fc8d3cd4116854398e2151ba2dba10c03a9d2d93184141f1425c.json
index 87e478e..820b43f 100644
--- a/.sqlx/query-df3656771c3cb6851e0c54a2d368676f279af866d0840d6c2c093b87b1eadd8c.json
+++ b/.sqlx/query-5244f04bc270fc8d3cd4116854398e2151ba2dba10c03a9d2d93184141f1425c.json
@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
- "query": "\n select\n channel.id as \"channel_id: channel::Id\",\n channel.name as \"channel_name\",\n channel.created_at as \"channel_created_at: DateTime\",\n message.id as \"message: message::Id\"\n from message\n join channel on message.channel = channel.id\n join login as sender on message.sender = sender.id\n where sent_at < $1\n ",
+ "query": "\n select\n channel.id as \"channel_id: channel::Id\",\n channel.name as \"channel_name\",\n channel.created_at as \"channel_created_at: DateTime\",\n channel.created_sequence as \"channel_created_sequence: Sequence\",\n message.id as \"message: message::Id\"\n from message\n join channel on message.channel = channel.id\n join login as sender on message.sender = sender.id\n where sent_at < $1\n ",
"describe": {
"columns": [
{
@@ -19,8 +19,13 @@
"type_info": "Text"
},
{
- "name": "message: message::Id",
+ "name": "channel_created_sequence: Sequence",
"ordinal": 3,
+ "type_info": "Integer"
+ },
+ {
+ "name": "message: message::Id",
+ "ordinal": 4,
"type_info": "Text"
}
],
@@ -31,8 +36,9 @@
false,
false,
false,
+ false,
false
]
},
- "hash": "df3656771c3cb6851e0c54a2d368676f279af866d0840d6c2c093b87b1eadd8c"
+ "hash": "5244f04bc270fc8d3cd4116854398e2151ba2dba10c03a9d2d93184141f1425c"
}
diff --git a/.sqlx/query-6a782686e163e65f5e03e4aaf423b1fd14ed9e252d7d9c5323feafb0b9159259.json b/.sqlx/query-74f0bad30dcec743d77309b8df33083c2da765dfda3023c78c25c06735670457.json
index ae298d6..b34443f 100644
--- a/.sqlx/query-6a782686e163e65f5e03e4aaf423b1fd14ed9e252d7d9c5323feafb0b9159259.json
+++ b/.sqlx/query-74f0bad30dcec743d77309b8df33083c2da765dfda3023c78c25c06735670457.json
@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
- "query": "\n select\n channel.id as \"id: Id\",\n channel.name,\n channel.created_at as \"created_at: DateTime\"\n from channel\n left join message\n where created_at < $1\n and message.id is null\n ",
+ "query": "\n select\n channel.id as \"id: Id\",\n channel.name,\n channel.created_at as \"created_at: DateTime\",\n channel.created_sequence as \"created_sequence: Sequence\"\n from channel\n left join message\n where created_at < $1\n and message.id is null\n ",
"describe": {
"columns": [
{
@@ -17,6 +17,11 @@
"name": "created_at: DateTime",
"ordinal": 2,
"type_info": "Text"
+ },
+ {
+ "name": "created_sequence: Sequence",
+ "ordinal": 3,
+ "type_info": "Integer"
}
],
"parameters": {
@@ -25,8 +30,9 @@
"nullable": [
false,
false,
+ false,
false
]
},
- "hash": "6a782686e163e65f5e03e4aaf423b1fd14ed9e252d7d9c5323feafb0b9159259"
+ "hash": "74f0bad30dcec743d77309b8df33083c2da765dfda3023c78c25c06735670457"
}
diff --git a/.sqlx/query-760d3532e1613fd9f79ac98cb132479c6e7a2301d576af298da570f3effdc106.json b/.sqlx/query-760d3532e1613fd9f79ac98cb132479c6e7a2301d576af298da570f3effdc106.json
deleted file mode 100644
index beb9234..0000000
--- a/.sqlx/query-760d3532e1613fd9f79ac98cb132479c6e7a2301d576af298da570f3effdc106.json
+++ /dev/null
@@ -1,50 +0,0 @@
-{
- "db_name": "SQLite",
- "query": "\n\t\t\t\tselect\n\t\t\t\t\tmessage.id as \"id: message::Id\",\n sequence as \"sequence: Sequence\",\n\t\t\t\t\tlogin.id as \"sender_id: login::Id\",\n\t\t\t\t\tlogin.name as sender_name,\n\t\t\t\t\tmessage.body,\n\t\t\t\t\tmessage.sent_at as \"sent_at: DateTime\"\n\t\t\t\tfrom message\n\t\t\t\t\tjoin login on message.sender = login.id\n\t\t\t\twhere channel = $1\n\t\t\t\t\tand coalesce(sequence > $2, true)\n\t\t\t\torder by sequence asc\n\t\t\t",
- "describe": {
- "columns": [
- {
- "name": "id: message::Id",
- "ordinal": 0,
- "type_info": "Text"
- },
- {
- "name": "sequence: Sequence",
- "ordinal": 1,
- "type_info": "Integer"
- },
- {
- "name": "sender_id: login::Id",
- "ordinal": 2,
- "type_info": "Text"
- },
- {
- "name": "sender_name",
- "ordinal": 3,
- "type_info": "Text"
- },
- {
- "name": "body",
- "ordinal": 4,
- "type_info": "Text"
- },
- {
- "name": "sent_at: DateTime",
- "ordinal": 5,
- "type_info": "Text"
- }
- ],
- "parameters": {
- "Right": 2
- },
- "nullable": [
- false,
- false,
- false,
- false,
- false,
- false
- ]
- },
- "hash": "760d3532e1613fd9f79ac98cb132479c6e7a2301d576af298da570f3effdc106"
-}
diff --git a/.sqlx/query-7e816ede017bc2635c11ab72b18b7af92ac1f1faed9df41df90f57cb596cfe7c.json b/.sqlx/query-7e816ede017bc2635c11ab72b18b7af92ac1f1faed9df41df90f57cb596cfe7c.json
new file mode 100644
index 0000000..f546438
--- /dev/null
+++ b/.sqlx/query-7e816ede017bc2635c11ab72b18b7af92ac1f1faed9df41df90f57cb596cfe7c.json
@@ -0,0 +1,74 @@
+{
+ "db_name": "SQLite",
+ "query": "\n\t\t\t\tselect\n\t\t\t\t\tmessage.id as \"id: message::Id\",\n channel.id as \"channel_id: channel::Id\",\n channel.name as \"channel_name\",\n channel.created_at as \"channel_created_at: DateTime\",\n channel.created_sequence as \"channel_created_sequence: Sequence\",\n\t\t\t\t\tsender.id as \"sender_id: login::Id\",\n\t\t\t\t\tsender.name as sender_name,\n message.sent_at as \"sent_at: DateTime\",\n message.sent_sequence as \"sent_sequence: Sequence\",\n message.body\n\t\t\t\tfrom message\n join channel on message.channel = channel.id\n\t\t\t\t\tjoin login as sender on message.sender = sender.id\n\t\t\t\twhere coalesce(message.sent_sequence > $1, true)\n\t\t\t\torder by sent_sequence asc\n\t\t\t",
+ "describe": {
+ "columns": [
+ {
+ "name": "id: message::Id",
+ "ordinal": 0,
+ "type_info": "Text"
+ },
+ {
+ "name": "channel_id: channel::Id",
+ "ordinal": 1,
+ "type_info": "Text"
+ },
+ {
+ "name": "channel_name",
+ "ordinal": 2,
+ "type_info": "Text"
+ },
+ {
+ "name": "channel_created_at: DateTime",
+ "ordinal": 3,
+ "type_info": "Text"
+ },
+ {
+ "name": "channel_created_sequence: Sequence",
+ "ordinal": 4,
+ "type_info": "Integer"
+ },
+ {
+ "name": "sender_id: login::Id",
+ "ordinal": 5,
+ "type_info": "Text"
+ },
+ {
+ "name": "sender_name",
+ "ordinal": 6,
+ "type_info": "Text"
+ },
+ {
+ "name": "sent_at: DateTime",
+ "ordinal": 7,
+ "type_info": "Text"
+ },
+ {
+ "name": "sent_sequence: Sequence",
+ "ordinal": 8,
+ "type_info": "Integer"
+ },
+ {
+ "name": "body",
+ "ordinal": 9,
+ "type_info": "Text"
+ }
+ ],
+ "parameters": {
+ "Right": 1
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false
+ ]
+ },
+ "hash": "7e816ede017bc2635c11ab72b18b7af92ac1f1faed9df41df90f57cb596cfe7c"
+}
diff --git a/.sqlx/query-7ccae3dde1aba5f22cf9e3926096285d50afb88a326cff0ecab96058a2f6d93a.json b/.sqlx/query-7f6b9c7d4ef3f540d594318a7a66fa8f9e3ddcf6d041be8d834db58f66a5aa88.json
index 4ec7118..3cc33cf 100644
--- a/.sqlx/query-7ccae3dde1aba5f22cf9e3926096285d50afb88a326cff0ecab96058a2f6d93a.json
+++ b/.sqlx/query-7f6b9c7d4ef3f540d594318a7a66fa8f9e3ddcf6d041be8d834db58f66a5aa88.json
@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
- "query": "\n select\n id as \"id: Id\",\n name,\n created_at as \"created_at: DateTime\"\n from channel\n order by channel.name\n ",
+ "query": "\n select\n id as \"id: Id\",\n name,\n created_at as \"created_at: DateTime\",\n created_sequence as \"created_sequence: Sequence\"\n from channel\n order by channel.name\n ",
"describe": {
"columns": [
{
@@ -17,6 +17,11 @@
"name": "created_at: DateTime",
"ordinal": 2,
"type_info": "Text"
+ },
+ {
+ "name": "created_sequence: Sequence",
+ "ordinal": 3,
+ "type_info": "Integer"
}
],
"parameters": {
@@ -25,8 +30,9 @@
"nullable": [
false,
false,
+ false,
false
]
},
- "hash": "7ccae3dde1aba5f22cf9e3926096285d50afb88a326cff0ecab96058a2f6d93a"
+ "hash": "7f6b9c7d4ef3f540d594318a7a66fa8f9e3ddcf6d041be8d834db58f66a5aa88"
}
diff --git a/.sqlx/query-7fc3094944d5133fd8b2d80aace35b06db0071c5f257b7f71349966bcdadfcb5.json b/.sqlx/query-7fc3094944d5133fd8b2d80aace35b06db0071c5f257b7f71349966bcdadfcb5.json
new file mode 100644
index 0000000..b5bc371
--- /dev/null
+++ b/.sqlx/query-7fc3094944d5133fd8b2d80aace35b06db0071c5f257b7f71349966bcdadfcb5.json
@@ -0,0 +1,20 @@
+{
+ "db_name": "SQLite",
+ "query": "\n update event_sequence\n set last_value = last_value + 1\n returning last_value as \"next_value: Sequence\"\n ",
+ "describe": {
+ "columns": [
+ {
+ "name": "next_value: Sequence",
+ "ordinal": 0,
+ "type_info": "Integer"
+ }
+ ],
+ "parameters": {
+ "Right": 0
+ },
+ "nullable": [
+ false
+ ]
+ },
+ "hash": "7fc3094944d5133fd8b2d80aace35b06db0071c5f257b7f71349966bcdadfcb5"
+}
diff --git a/.sqlx/query-9386cdaa2cb41f5a7e19d2fc8c187294a4661c18c2d820f4379dfd82138a8f77.json b/.sqlx/query-9386cdaa2cb41f5a7e19d2fc8c187294a4661c18c2d820f4379dfd82138a8f77.json
new file mode 100644
index 0000000..e9c3967
--- /dev/null
+++ b/.sqlx/query-9386cdaa2cb41f5a7e19d2fc8c187294a4661c18c2d820f4379dfd82138a8f77.json
@@ -0,0 +1,38 @@
+{
+ "db_name": "SQLite",
+ "query": "\n select\n id as \"id: Id\",\n name,\n created_at as \"created_at: DateTime\",\n created_sequence as \"created_sequence: Sequence\"\n from channel\n where coalesce(created_sequence > $1, true)\n ",
+ "describe": {
+ "columns": [
+ {
+ "name": "id: Id",
+ "ordinal": 0,
+ "type_info": "Text"
+ },
+ {
+ "name": "name",
+ "ordinal": 1,
+ "type_info": "Text"
+ },
+ {
+ "name": "created_at: DateTime",
+ "ordinal": 2,
+ "type_info": "Text"
+ },
+ {
+ "name": "created_sequence: Sequence",
+ "ordinal": 3,
+ "type_info": "Integer"
+ }
+ ],
+ "parameters": {
+ "Right": 1
+ },
+ "nullable": [
+ false,
+ false,
+ false,
+ false
+ ]
+ },
+ "hash": "9386cdaa2cb41f5a7e19d2fc8c187294a4661c18c2d820f4379dfd82138a8f77"
+}
diff --git a/.sqlx/query-aeafe536f36593bfd1080ee61c4b10c6f90b1221e963db69c8e6d23e99012ecf.json b/.sqlx/query-aeafe536f36593bfd1080ee61c4b10c6f90b1221e963db69c8e6d23e99012ecf.json
deleted file mode 100644
index 5c27826..0000000
--- a/.sqlx/query-aeafe536f36593bfd1080ee61c4b10c6f90b1221e963db69c8e6d23e99012ecf.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "db_name": "SQLite",
- "query": "\n insert\n into channel (id, name, created_at, last_sequence)\n values ($1, $2, $3, $4)\n returning\n id as \"id: Id\",\n name,\n created_at as \"created_at: DateTime\"\n ",
- "describe": {
- "columns": [
- {
- "name": "id: Id",
- "ordinal": 0,
- "type_info": "Text"
- },
- {
- "name": "name",
- "ordinal": 1,
- "type_info": "Text"
- },
- {
- "name": "created_at: DateTime",
- "ordinal": 2,
- "type_info": "Text"
- }
- ],
- "parameters": {
- "Right": 4
- },
- "nullable": [
- false,
- false,
- false
- ]
- },
- "hash": "aeafe536f36593bfd1080ee61c4b10c6f90b1221e963db69c8e6d23e99012ecf"
-}
diff --git a/.sqlx/query-22f313d9afcdd02df74a8b8c64a38a3f73b112e74b7318ee8e52e475866d8cfd.json b/.sqlx/query-f6909336ab05b7ad423c7b96a0e7b12a920f9827aff2b05ee0364ff7688a38ae.json
index 3d5d06c..ded48e1 100644
--- a/.sqlx/query-22f313d9afcdd02df74a8b8c64a38a3f73b112e74b7318ee8e52e475866d8cfd.json
+++ b/.sqlx/query-f6909336ab05b7ad423c7b96a0e7b12a920f9827aff2b05ee0364ff7688a38ae.json
@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
- "query": "\n select\n id as \"id: Id\",\n name,\n created_at as \"created_at: DateTime\"\n from channel\n where id = $1\n ",
+ "query": "\n select\n id as \"id: Id\",\n name,\n created_at as \"created_at: DateTime\",\n created_sequence as \"created_sequence: Sequence\"\n from channel\n where id = $1\n ",
"describe": {
"columns": [
{
@@ -17,6 +17,11 @@
"name": "created_at: DateTime",
"ordinal": 2,
"type_info": "Text"
+ },
+ {
+ "name": "created_sequence: Sequence",
+ "ordinal": 3,
+ "type_info": "Integer"
}
],
"parameters": {
@@ -25,8 +30,9 @@
"nullable": [
false,
false,
+ false,
false
]
},
- "hash": "22f313d9afcdd02df74a8b8c64a38a3f73b112e74b7318ee8e52e475866d8cfd"
+ "hash": "f6909336ab05b7ad423c7b96a0e7b12a920f9827aff2b05ee0364ff7688a38ae"
}
diff --git a/migrations/20241002003606_global_sequence.sql b/migrations/20241002003606_global_sequence.sql
new file mode 100644
index 0000000..198b585
--- /dev/null
+++ b/migrations/20241002003606_global_sequence.sql
@@ -0,0 +1,126 @@
+create table event_sequence (
+ last_value bigint
+ not null
+);
+
+create unique index event_sequence_singleton
+on event_sequence (0);
+
+-- Attempt to assign events sent so far a globally-unique sequence number,
+-- maintaining an approximation of the order they were sent in. This can
+-- introduce small ordering anomalies (where the resulting sequence differs
+-- from the order they were sent in) for events that were sent close in time;
+-- I've gone with chronological order here as it's the closest thing we have to
+-- a global ordering, and because the results will be intuitive to most users.
+create temporary table raw_event (
+ type text
+ not null,
+ at text
+ not null,
+ channel text
+ unique,
+ message text
+ unique,
+ check ((channel is not null and message is null) or (message is not null and channel is null))
+);
+
+insert into raw_event (type, at, channel)
+select
+ 'channel' as type,
+ created_at as at,
+ id as channel
+from channel;
+
+insert into raw_event (type, at, message)
+select
+ 'message' as type,
+ sent_at as at,
+ id as message
+from message;
+
+create temporary table event (
+ type text
+ not null,
+ sequence
+ unique
+ not null,
+ at text
+ not null,
+ channel text
+ unique,
+ message text
+ unique,
+ check ((channel is not null and message is null) or (message is not null and channel is null))
+);
+
+insert into event
+select
+ type,
+ rank() over (order by at) - 1 as sequence,
+ at,
+ channel,
+ message
+from raw_event;
+
+drop table raw_event;
+
+alter table channel rename to old_channel;
+alter table message rename to old_message;
+
+create table channel (
+ id text
+ not null
+ primary key,
+ name text
+ unique
+ not null,
+ created_sequence bigint
+ unique
+ not null,
+ created_at text
+ not null
+);
+
+insert into channel
+select
+ c.id,
+ c.name,
+ e.sequence,
+ c.created_at
+from old_channel as c join event as e
+ on e.channel = c.id;
+
+create table message (
+ id text
+ not null
+ primary key,
+ channel text
+ not null
+ references channel (id),
+ sender text
+ not null
+ references login (id),
+ sent_sequence bigint
+ unique
+ not null,
+ sent_at text
+ not null,
+ body text
+ not null
+);
+
+insert into message
+select
+ m.id,
+ m.channel,
+ m.sender,
+ e.sequence,
+ m.sent_at,
+ m.body
+from old_message as m join event as e
+ on e.message = m.id;
+
+insert into event_sequence
+select coalesce(max(sequence), 0) from event;
+
+drop table event;
diff --git a/src/channel/app.rs b/src/channel/app.rs
index 70cda47..88f4170 100644
--- a/src/channel/app.rs
+++ b/src/channel/app.rs
@@ -3,8 +3,11 @@ use sqlx::sqlite::SqlitePool;
use crate::{
clock::DateTime,
- events::{broadcaster::Broadcaster, repo::message::Provider as _, types::ChannelEvent},
- repo::channel::{Channel, Provider as _},
+ events::{broadcaster::Broadcaster, types::ChannelEvent},
+ repo::{
+ channel::{Channel, Provider as _},
+ sequence::Provider as _,
+ },
};
pub struct Channels<'a> {
@@ -19,9 +22,10 @@ impl<'a> Channels<'a> {
pub async fn create(&self, name: &str, created_at: &DateTime) -> Result<Channel, CreateError> {
let mut tx = self.db.begin().await?;
+ let created_sequence = tx.sequence().next().await?;
let channel = tx
.channels()
- .create(name, created_at)
+ .create(name, created_at, created_sequence)
.await
.map_err(|err| CreateError::from_duplicate_name(err, name))?;
tx.commit().await?;
@@ -49,10 +53,10 @@ impl<'a> Channels<'a> {
let mut events = Vec::with_capacity(expired.len());
for channel in expired {
- let sequence = tx.message_events().assign_sequence(&channel).await?;
+ let deleted_sequence = tx.sequence().next().await?;
let event = tx
.channels()
- .delete_expired(&channel, sequence, relative_to)
+ .delete(&channel, relative_to, deleted_sequence)
.await?;
events.push(event);
}
diff --git a/src/channel/routes/test/on_create.rs b/src/channel/routes/test/on_create.rs
index e2610a5..5deb88a 100644
--- a/src/channel/routes/test/on_create.rs
+++ b/src/channel/routes/test/on_create.rs
@@ -38,18 +38,17 @@ async fn new_channel() {
let mut events = app
.events()
- .subscribe(types::ResumePoint::default())
+ .subscribe(None)
.await
.expect("subscribing never fails")
.filter(fixtures::filter::created());
- let types::ResumableEvent(_, event) = events
+ let event = events
.next()
.immediately()
.await
.expect("creation event published");
- assert_eq!(types::Sequence::default(), event.sequence);
assert!(matches!(
event.data,
types::ChannelEventData::Created(event)
diff --git a/src/channel/routes/test/on_send.rs b/src/channel/routes/test/on_send.rs
index 233518b..d37ed21 100644
--- a/src/channel/routes/test/on_send.rs
+++ b/src/channel/routes/test/on_send.rs
@@ -43,7 +43,7 @@ async fn messages_in_order() {
let events = app
.events()
- .subscribe(types::ResumePoint::default())
+ .subscribe(None)
.await
.expect("subscribing to a valid channel")
.filter(fixtures::filter::messages())
@@ -51,7 +51,7 @@ async fn messages_in_order() {
let events = events.collect::<Vec<_>>().immediately().await;
- for ((sent_at, message), types::ResumableEvent(_, event)) in requests.into_iter().zip(events) {
+ for ((sent_at, message), event) in requests.into_iter().zip(events) {
assert_eq!(*sent_at, event.at);
assert!(matches!(
event.data,
diff --git a/src/events/app.rs b/src/events/app.rs
index db7f430..c15f11e 100644
--- a/src/events/app.rs
+++ b/src/events/app.rs
@@ -1,5 +1,3 @@
-use std::collections::BTreeMap;
-
use chrono::TimeDelta;
use futures::{
future,
@@ -11,7 +9,7 @@ use sqlx::sqlite::SqlitePool;
use super::{
broadcaster::Broadcaster,
repo::message::Provider as _,
- types::{self, ChannelEvent, ResumePoint},
+ types::{self, ChannelEvent},
};
use crate::{
clock::DateTime,
@@ -19,6 +17,7 @@ use crate::{
channel::{self, Provider as _},
error::NotFound as _,
login::Login,
+ sequence::{Provider as _, Sequence},
},
};
@@ -45,9 +44,10 @@ impl<'a> Events<'a> {
.by_id(channel)
.await
.not_found(|| EventsError::ChannelNotFound(channel.clone()))?;
+ let sent_sequence = tx.sequence().next().await?;
let event = tx
.message_events()
- .create(login, &channel, body, sent_at)
+ .create(login, &channel, sent_at, sent_sequence, body)
.await?;
tx.commit().await?;
@@ -64,10 +64,10 @@ impl<'a> Events<'a> {
let mut events = Vec::with_capacity(expired.len());
for (channel, message) in expired {
- let sequence = tx.message_events().assign_sequence(&channel).await?;
+ let deleted_sequence = tx.sequence().next().await?;
let event = tx
.message_events()
- .delete_expired(&channel, &message, sequence, relative_to)
+ .delete(&channel, &message, relative_to, deleted_sequence)
.await?;
events.push(event);
}
@@ -83,42 +83,30 @@ impl<'a> Events<'a> {
pub async fn subscribe(
&self,
- resume_at: ResumePoint,
- ) -> Result<impl Stream<Item = types::ResumableEvent> + std::fmt::Debug, sqlx::Error> {
- let mut tx = self.db.begin().await?;
- let channels = tx.channels().all().await?;
-
- let created_events = {
- let resume_at = resume_at.clone();
- let channels = channels.clone();
- stream::iter(
- channels
- .into_iter()
- .map(ChannelEvent::created)
- .filter(move |event| resume_at.not_after(event)),
- )
- };
-
+ resume_at: Option<Sequence>,
+ ) -> Result<impl Stream<Item = types::ChannelEvent> + std::fmt::Debug, sqlx::Error> {
// Subscribe before retrieving, to catch messages broadcast while we're
// querying the DB. We'll prune out duplicates later.
let live_messages = self.events.subscribe();
- let mut replays = BTreeMap::new();
- let mut resume_live_at = resume_at.clone();
- for channel in channels {
- let replay = tx
- .message_events()
- .replay(&channel, resume_at.get(&channel.id))
- .await?;
+ let mut tx = self.db.begin().await?;
+ let channels = tx.channels().replay(resume_at).await?;
- if let Some(last) = replay.last() {
- resume_live_at.advance(last);
- }
+ let channel_events = channels
+ .into_iter()
+ .map(ChannelEvent::created)
+ .filter(move |event| resume_at.map_or(true, |resume_at| event.sequence > resume_at));
- replays.insert(channel.id.clone(), replay);
- }
+ let message_events = tx.message_events().replay(resume_at).await?;
+
+ let mut replay_events = channel_events
+ .into_iter()
+ .chain(message_events.into_iter())
+ .collect::<Vec<_>>();
+ replay_events.sort_by_key(|event| event.sequence);
+ let resume_live_at = replay_events.last().map(|event| event.sequence);
- let replay = stream::select_all(replays.into_values().map(stream::iter));
+ let replay = stream::iter(replay_events);
// no skip_expired or resume transforms for stored_messages, as it's
// constructed not to contain messages meeting either criterion.
@@ -132,25 +120,13 @@ impl<'a> Events<'a> {
// stored_messages.
.filter(Self::resume(resume_live_at));
- Ok(created_events.chain(replay).chain(live_messages).scan(
- resume_at,
- |resume_point, event| {
- match event.data {
- types::ChannelEventData::Deleted(_) => resume_point.forget(&event),
- _ => resume_point.advance(&event),
- }
-
- let event = types::ResumableEvent(resume_point.clone(), event);
-
- future::ready(Some(event))
- },
- ))
+ Ok(replay.chain(live_messages))
}
fn resume(
- resume_at: ResumePoint,
+ resume_at: Option<Sequence>,
) -> impl for<'m> FnMut(&'m types::ChannelEvent) -> future::Ready<bool> {
- move |event| future::ready(resume_at.not_after(event))
+ move |event| future::ready(resume_at < Some(event.sequence))
}
}
diff --git a/src/events/repo/message.rs b/src/events/repo/message.rs
index f8bae2b..3237553 100644
--- a/src/events/repo/message.rs
+++ b/src/events/repo/message.rs
@@ -2,11 +2,12 @@ use sqlx::{sqlite::Sqlite, SqliteConnection, Transaction};
use crate::{
clock::DateTime,
- events::types::{self, Sequence},
+ events::types,
repo::{
channel::{self, Channel},
login::{self, Login},
message::{self, Message},
+ sequence::Sequence,
},
};
@@ -27,34 +28,33 @@ impl<'c> Events<'c> {
&mut self,
sender: &Login,
channel: &Channel,
- body: &str,
sent_at: &DateTime,
+ sent_sequence: Sequence,
+ body: &str,
) -> Result<types::ChannelEvent, sqlx::Error> {
- let sequence = self.assign_sequence(channel).await?;
-
let id = message::Id::generate();
let message = sqlx::query!(
r#"
insert into message
- (id, channel, sequence, sender, body, sent_at)
+ (id, channel, sender, sent_at, sent_sequence, body)
values ($1, $2, $3, $4, $5, $6)
returning
id as "id: message::Id",
- sequence as "sequence: Sequence",
sender as "sender: login::Id",
- body,
- sent_at as "sent_at: DateTime"
+ sent_at as "sent_at: DateTime",
+ sent_sequence as "sent_sequence: Sequence",
+ body
"#,
id,
channel.id,
- sequence,
sender.id,
- body,
sent_at,
+ sent_sequence,
+ body,
)
.map(|row| types::ChannelEvent {
- sequence: row.sequence,
+ sequence: row.sent_sequence,
at: row.sent_at,
data: types::MessageEvent {
channel: channel.clone(),
@@ -72,28 +72,12 @@ impl<'c> Events<'c> {
Ok(message)
}
- pub async fn assign_sequence(&mut self, channel: &Channel) -> Result<Sequence, sqlx::Error> {
- let next = sqlx::query_scalar!(
- r#"
- update channel
- set last_sequence = last_sequence + 1
- where id = $1
- returning last_sequence as "next_sequence: Sequence"
- "#,
- channel.id,
- )
- .fetch_one(&mut *self.0)
- .await?;
-
- Ok(next)
- }
-
- pub async fn delete_expired(
+ pub async fn delete(
&mut self,
channel: &Channel,
message: &message::Id,
- sequence: Sequence,
deleted_at: &DateTime,
+ deleted_sequence: Sequence,
) -> Result<types::ChannelEvent, sqlx::Error> {
sqlx::query_scalar!(
r#"
@@ -107,7 +91,7 @@ impl<'c> Events<'c> {
.await?;
Ok(types::ChannelEvent {
- sequence,
+ sequence: deleted_sequence,
at: *deleted_at,
data: types::MessageDeletedEvent {
channel: channel.clone(),
@@ -127,6 +111,7 @@ impl<'c> Events<'c> {
channel.id as "channel_id: channel::Id",
channel.name as "channel_name",
channel.created_at as "channel_created_at: DateTime",
+ channel.created_sequence as "channel_created_sequence: Sequence",
message.id as "message: message::Id"
from message
join channel on message.channel = channel.id
@@ -141,6 +126,7 @@ impl<'c> Events<'c> {
id: row.channel_id,
name: row.channel_name,
created_at: row.channel_created_at,
+ created_sequence: row.channel_created_sequence,
},
row.message,
)
@@ -153,32 +139,39 @@ impl<'c> Events<'c> {
pub async fn replay(
&mut self,
- channel: &Channel,
resume_at: Option<Sequence>,
) -> Result<Vec<types::ChannelEvent>, sqlx::Error> {
let events = sqlx::query!(
r#"
select
message.id as "id: message::Id",
- sequence as "sequence: Sequence",
- login.id as "sender_id: login::Id",
- login.name as sender_name,
- message.body,
- message.sent_at as "sent_at: DateTime"
+ channel.id as "channel_id: channel::Id",
+ channel.name as "channel_name",
+ channel.created_at as "channel_created_at: DateTime",
+ channel.created_sequence as "channel_created_sequence: Sequence",
+ sender.id as "sender_id: login::Id",
+ sender.name as sender_name,
+ message.sent_at as "sent_at: DateTime",
+ message.sent_sequence as "sent_sequence: Sequence",
+ message.body
from message
- join login on message.sender = login.id
- where channel = $1
- and coalesce(sequence > $2, true)
- order by sequence asc
+ join channel on message.channel = channel.id
+ join login as sender on message.sender = sender.id
+ where coalesce(message.sent_sequence > $1, true)
+ order by sent_sequence asc
"#,
- channel.id,
resume_at,
)
.map(|row| types::ChannelEvent {
- sequence: row.sequence,
+ sequence: row.sent_sequence,
at: row.sent_at,
data: types::MessageEvent {
- channel: channel.clone(),
+ channel: Channel {
+ id: row.channel_id,
+ name: row.channel_name,
+ created_at: row.channel_created_at,
+ created_sequence: row.channel_created_sequence,
+ },
sender: login::Login {
id: row.sender_id,
name: row.sender_name,
diff --git a/src/events/routes.rs b/src/events/routes.rs
index f09474c..e3a959f 100644
--- a/src/events/routes.rs
+++ b/src/events/routes.rs
@@ -9,14 +9,12 @@ use axum::{
};
use futures::stream::{Stream, StreamExt as _};
-use super::{
- extract::LastEventId,
- types::{self, ResumePoint},
-};
+use super::{extract::LastEventId, types};
use crate::{
app::App,
error::{Internal, Unauthorized},
login::{app::ValidateError, extract::Identity},
+ repo::sequence::Sequence,
};
#[cfg(test)]
@@ -29,11 +27,9 @@ pub fn router() -> Router<App> {
async fn events(
State(app): State<App>,
identity: Identity,
- last_event_id: Option<LastEventId<ResumePoint>>,
-) -> Result<Events<impl Stream<Item = types::ResumableEvent> + std::fmt::Debug>, EventsError> {
- let resume_at = last_event_id
- .map(LastEventId::into_inner)
- .unwrap_or_default();
+ last_event_id: Option<LastEventId<Sequence>>,
+) -> Result<Events<impl Stream<Item = types::ChannelEvent> + std::fmt::Debug>, EventsError> {
+ let resume_at = last_event_id.map(LastEventId::into_inner);
let stream = app.events().subscribe(resume_at).await?;
let stream = app.logins().limit_stream(identity.token, stream).await?;
@@ -46,7 +42,7 @@ struct Events<S>(S);
impl<S> IntoResponse for Events<S>
where
- S: Stream<Item = types::ResumableEvent> + Send + 'static,
+ S: Stream<Item = types::ChannelEvent> + Send + 'static,
{
fn into_response(self) -> Response {
let Self(stream) = self;
@@ -57,14 +53,12 @@ where
}
}
-impl TryFrom<types::ResumableEvent> for sse::Event {
+impl TryFrom<types::ChannelEvent> for sse::Event {
type Error = serde_json::Error;
- fn try_from(value: types::ResumableEvent) -> Result<Self, Self::Error> {
- let types::ResumableEvent(resume_at, data) = value;
-
- let id = serde_json::to_string(&resume_at)?;
- let data = serde_json::to_string_pretty(&data)?;
+ fn try_from(event: types::ChannelEvent) -> Result<Self, Self::Error> {
+ let id = serde_json::to_string(&event.sequence)?;
+ let data = serde_json::to_string_pretty(&event)?;
let event = Self::default().id(id).data(data);
diff --git a/src/events/routes/test.rs b/src/events/routes/test.rs
index 820192d..1cfca4f 100644
--- a/src/events/routes/test.rs
+++ b/src/events/routes/test.rs
@@ -5,7 +5,7 @@ use futures::{
};
use crate::{
- events::{routes, types},
+ events::routes,
test::fixtures::{self, future::Immediately as _},
};
@@ -28,7 +28,7 @@ async fn includes_historical_message() {
// Verify the structure of the response.
- let types::ResumableEvent(_, event) = events
+ let event = events
.filter(fixtures::filter::messages())
.next()
.immediately()
@@ -58,7 +58,7 @@ async fn includes_live_message() {
let sender = fixtures::login::create(&app).await;
let message = fixtures::message::send(&app, &sender, &channel, &fixtures::now()).await;
- let types::ResumableEvent(_, event) = events
+ let event = events
.filter(fixtures::filter::messages())
.next()
.immediately()
@@ -108,9 +108,7 @@ async fn includes_multiple_channels() {
.await;
for message in &messages {
- assert!(events
- .iter()
- .any(|types::ResumableEvent(_, event)| { event == message }));
+ assert!(events.iter().any(|event| { event == message }));
}
}
@@ -138,12 +136,11 @@ async fn sequential_messages() {
// Verify the structure of the response.
- let mut events =
- events.filter(|types::ResumableEvent(_, event)| future::ready(messages.contains(event)));
+ let mut events = events.filter(|event| future::ready(messages.contains(event)));
// Verify delivery in order
for message in &messages {
- let types::ResumableEvent(_, event) = events
+ let event = events
.next()
.immediately()
.await
@@ -179,7 +176,7 @@ async fn resumes_from() {
.await
.expect("subscribe never fails");
- let types::ResumableEvent(last_event_id, event) = events
+ let event = events
.filter(fixtures::filter::messages())
.next()
.immediately()
@@ -188,7 +185,7 @@ async fn resumes_from() {
assert_eq!(initial_message, event);
- last_event_id
+ event.sequence
};
// Resume after disconnect
@@ -205,9 +202,7 @@ async fn resumes_from() {
.await;
for message in &later_messages {
- assert!(events
- .iter()
- .any(|types::ResumableEvent(_, event)| event == message));
+ assert!(events.iter().any(|event| event == message));
}
}
@@ -259,14 +254,12 @@ async fn serial_resume() {
.await;
for message in &initial_messages {
- assert!(events
- .iter()
- .any(|types::ResumableEvent(_, event)| event == message));
+ assert!(events.iter().any(|event| event == message));
}
- let types::ResumableEvent(id, _) = events.last().expect("this vec is non-empty");
+ let event = events.last().expect("this vec is non-empty");
- id.to_owned()
+ event.sequence
};
// Resume after disconnect
@@ -296,14 +289,12 @@ async fn serial_resume() {
.await;
for message in &resume_messages {
- assert!(events
- .iter()
- .any(|types::ResumableEvent(_, event)| event == message));
+ assert!(events.iter().any(|event| event == message));
}
- let types::ResumableEvent(id, _) = events.last().expect("this vec is non-empty");
+ let event = events.last().expect("this vec is non-empty");
- id.to_owned()
+ event.sequence
};
// Resume after disconnect a second time
@@ -335,9 +326,7 @@ async fn serial_resume() {
// This set of messages, in particular, _should not_ include any prior
// messages from `initial_messages` or `resume_messages`.
for message in &final_messages {
- assert!(events
- .iter()
- .any(|types::ResumableEvent(_, event)| event == message));
+ assert!(events.iter().any(|event| event == message));
}
};
}
@@ -375,7 +364,7 @@ async fn terminates_on_token_expiry() {
];
assert!(events
- .filter(|types::ResumableEvent(_, event)| future::ready(messages.contains(event)))
+ .filter(|event| future::ready(messages.contains(event)))
.next()
.immediately()
.await
@@ -417,7 +406,7 @@ async fn terminates_on_logout() {
];
assert!(events
- .filter(|types::ResumableEvent(_, event)| future::ready(messages.contains(event)))
+ .filter(|event| future::ready(messages.contains(event)))
.next()
.immediately()
.await
diff --git a/src/events/types.rs b/src/events/types.rs
index d954512..aca3af4 100644
--- a/src/events/types.rs
+++ b/src/events/types.rs
@@ -1,84 +1,13 @@
-use std::collections::BTreeMap;
-
use crate::{
clock::DateTime,
repo::{
channel::{self, Channel},
login::Login,
message,
+ sequence::Sequence,
},
};
-#[derive(
- Debug,
- Default,
- Eq,
- Ord,
- PartialEq,
- PartialOrd,
- Clone,
- Copy,
- serde::Serialize,
- serde::Deserialize,
- sqlx::Type,
-)]
-#[serde(transparent)]
-#[sqlx(transparent)]
-pub struct Sequence(i64);
-
-impl Sequence {
- pub fn next(self) -> Self {
- let Self(current) = self;
- Self(current + 1)
- }
-}
-
-// For the purposes of event replay, a resume point is a vector of resume
-// elements. A resume element associates a channel (by ID) with the latest event
-// seen in that channel so far. Replaying the event stream can restart at a
-// predictable point - hence the name. These values can be serialized and sent
-// to the client as JSON dicts, then rehydrated to recover the resume point at a
-// later time.
-//
-// Using a sorted map ensures that there is a canonical representation for
-// each resume point.
-#[derive(Clone, Debug, Default, PartialEq, PartialOrd, serde::Deserialize, serde::Serialize)]
-#[serde(transparent)]
-pub struct ResumePoint(BTreeMap<channel::Id, Sequence>);
-
-impl ResumePoint {
- pub fn advance<'e>(&mut self, event: impl Into<ResumeElement<'e>>) {
- let Self(elements) = self;
- let ResumeElement(channel, sequence) = event.into();
- elements.insert(channel.clone(), sequence);
- }
-
- pub fn forget<'e>(&mut self, event: impl Into<ResumeElement<'e>>) {
- let Self(elements) = self;
- let ResumeElement(channel, _) = event.into();
- elements.remove(channel);
- }
-
- pub fn get(&self, channel: &channel::Id) -> Option<Sequence> {
- let Self(elements) = self;
- elements.get(channel).copied()
- }
-
- pub fn not_after<'e>(&self, event: impl Into<ResumeElement<'e>>) -> bool {
- let Self(elements) = self;
- let ResumeElement(channel, sequence) = event.into();
-
- elements
- .get(channel)
- .map_or(true, |resume_at| resume_at < &sequence)
- }
-}
-
-pub struct ResumeElement<'i>(&'i channel::Id, Sequence);
-
-#[derive(Clone, Debug)]
-pub struct ResumableEvent(pub ResumePoint, pub ChannelEvent);
-
#[derive(Clone, Debug, Eq, PartialEq, serde::Serialize)]
pub struct ChannelEvent {
#[serde(skip)]
@@ -92,7 +21,7 @@ impl ChannelEvent {
pub fn created(channel: Channel) -> Self {
Self {
at: channel.created_at,
- sequence: Sequence::default(),
+ sequence: channel.created_sequence,
data: CreatedEvent { channel }.into(),
}
}
@@ -107,9 +36,9 @@ impl ChannelEvent {
}
}
-impl<'c> From<&'c ChannelEvent> for ResumeElement<'c> {
+impl<'c> From<&'c ChannelEvent> for Sequence {
fn from(event: &'c ChannelEvent) -> Self {
- Self(event.channel_id(), event.sequence)
+ event.sequence
}
}
diff --git a/src/repo/channel.rs b/src/repo/channel.rs
index 3c7468f..efc2ced 100644
--- a/src/repo/channel.rs
+++ b/src/repo/channel.rs
@@ -2,9 +2,10 @@ use std::fmt;
use sqlx::{sqlite::Sqlite, SqliteConnection, Transaction};
+use super::sequence::Sequence;
use crate::{
clock::DateTime,
- events::types::{self, Sequence},
+ events::types::{self},
id::Id as BaseId,
};
@@ -26,6 +27,8 @@ pub struct Channel {
pub name: String,
#[serde(skip)]
pub created_at: DateTime,
+ #[serde(skip)]
+ pub created_sequence: Sequence,
}
impl<'c> Channels<'c> {
@@ -33,25 +36,25 @@ impl<'c> Channels<'c> {
&mut self,
name: &str,
created_at: &DateTime,
+ created_sequence: Sequence,
) -> Result<Channel, sqlx::Error> {
let id = Id::generate();
- let sequence = Sequence::default();
-
let channel = sqlx::query_as!(
Channel,
r#"
insert
- into channel (id, name, created_at, last_sequence)
+ into channel (id, name, created_at, created_sequence)
values ($1, $2, $3, $4)
returning
id as "id: Id",
name,
- created_at as "created_at: DateTime"
+ created_at as "created_at: DateTime",
+ created_sequence as "created_sequence: Sequence"
"#,
id,
name,
created_at,
- sequence,
+ created_sequence,
)
.fetch_one(&mut *self.0)
.await?;
@@ -66,7 +69,8 @@ impl<'c> Channels<'c> {
select
id as "id: Id",
name,
- created_at as "created_at: DateTime"
+ created_at as "created_at: DateTime",
+ created_sequence as "created_sequence: Sequence"
from channel
where id = $1
"#,
@@ -85,7 +89,8 @@ impl<'c> Channels<'c> {
select
id as "id: Id",
name,
- created_at as "created_at: DateTime"
+ created_at as "created_at: DateTime",
+ created_sequence as "created_sequence: Sequence"
from channel
order by channel.name
"#,
@@ -96,11 +101,34 @@ impl<'c> Channels<'c> {
Ok(channels)
}
- pub async fn delete_expired(
+ pub async fn replay(
+ &mut self,
+ resume_at: Option<Sequence>,
+ ) -> Result<Vec<Channel>, sqlx::Error> {
+ let channels = sqlx::query_as!(
+ Channel,
+ r#"
+ select
+ id as "id: Id",
+ name,
+ created_at as "created_at: DateTime",
+ created_sequence as "created_sequence: Sequence"
+ from channel
+ where coalesce(created_sequence > $1, true)
+ "#,
+ resume_at,
+ )
+ .fetch_all(&mut *self.0)
+ .await?;
+
+ Ok(channels)
+ }
+
+ pub async fn delete(
&mut self,
channel: &Channel,
- sequence: Sequence,
deleted_at: &DateTime,
+ deleted_sequence: Sequence,
) -> Result<types::ChannelEvent, sqlx::Error> {
let channel = channel.id.clone();
sqlx::query_scalar!(
@@ -115,7 +143,7 @@ impl<'c> Channels<'c> {
.await?;
Ok(types::ChannelEvent {
- sequence,
+ sequence: deleted_sequence,
at: *deleted_at,
data: types::DeletedEvent { channel }.into(),
})
@@ -128,7 +156,8 @@ impl<'c> Channels<'c> {
select
channel.id as "id: Id",
channel.name,
- channel.created_at as "created_at: DateTime"
+ channel.created_at as "created_at: DateTime",
+ channel.created_sequence as "created_sequence: Sequence"
from channel
left join message
where created_at < $1
diff --git a/src/repo/mod.rs b/src/repo/mod.rs
index cb9d7c8..8f271f4 100644
--- a/src/repo/mod.rs
+++ b/src/repo/mod.rs
@@ -3,4 +3,5 @@ pub mod error;
pub mod login;
pub mod message;
pub mod pool;
+pub mod sequence;
pub mod token;
diff --git a/src/repo/sequence.rs b/src/repo/sequence.rs
new file mode 100644
index 0000000..8fe9dab
--- /dev/null
+++ b/src/repo/sequence.rs
@@ -0,0 +1,45 @@
+use sqlx::{sqlite::Sqlite, SqliteConnection, Transaction};
+
+pub trait Provider {
+ fn sequence(&mut self) -> Sequences;
+}
+
+impl<'c> Provider for Transaction<'c, Sqlite> {
+ fn sequence(&mut self) -> Sequences {
+ Sequences(self)
+ }
+}
+
+#[derive(
+ Clone,
+ Copy,
+ Debug,
+ Eq,
+ Ord,
+ PartialEq,
+ PartialOrd,
+ serde::Deserialize,
+ serde::Serialize,
+ sqlx::Type,
+)]
+#[serde(transparent)]
+#[sqlx(transparent)]
+pub struct Sequence(i64);
+
+pub struct Sequences<'t>(&'t mut SqliteConnection);
+
+impl<'c> Sequences<'c> {
+ pub async fn next(&mut self) -> Result<Sequence, sqlx::Error> {
+ let next = sqlx::query_scalar!(
+ r#"
+ update event_sequence
+ set last_value = last_value + 1
+ returning last_value as "next_value: Sequence"
+ "#,
+ )
+ .fetch_one(&mut *self.0)
+ .await?;
+
+ Ok(next)
+ }
+}
diff --git a/src/test/fixtures/filter.rs b/src/test/fixtures/filter.rs
index fbebced..c31fa58 100644
--- a/src/test/fixtures/filter.rs
+++ b/src/test/fixtures/filter.rs
@@ -2,14 +2,10 @@ use futures::future;
use crate::events::types;
-pub fn messages() -> impl FnMut(&types::ResumableEvent) -> future::Ready<bool> {
- |types::ResumableEvent(_, event)| {
- future::ready(matches!(event.data, types::ChannelEventData::Message(_)))
- }
+pub fn messages() -> impl FnMut(&types::ChannelEvent) -> future::Ready<bool> {
+ |event| future::ready(matches!(event.data, types::ChannelEventData::Message(_)))
}
-pub fn created() -> impl FnMut(&types::ResumableEvent) -> future::Ready<bool> {
- |types::ResumableEvent(_, event)| {
- future::ready(matches!(event.data, types::ChannelEventData::Created(_)))
- }
+pub fn created() -> impl FnMut(&types::ChannelEvent) -> future::Ready<bool> {
+ |event| future::ready(matches!(event.data, types::ChannelEventData::Created(_)))
}