diff --git a/client/testfixture/config.textproto b/client/testfixture/config.textproto
index 5e1034a..2652887 100644
--- a/client/testfixture/config.textproto
+++ b/client/testfixture/config.textproto
@@ -32,6 +32,12 @@ record_apis: [
name: "simple_subset_view"
table_name: "simple_subset_view"
acl_authenticated: [CREATE, READ, UPDATE, DELETE]
+ },
+ {
+ name: "movies"
+ table_name: "movies"
+ acl_world: [READ]
+ acl_authenticated: [CREATE, READ, UPDATE, DELETE]
}
]
schemas: [
diff --git a/client/testfixture/migrations/U1728810800__create_table_movies.sql b/client/testfixture/migrations/U1728810800__create_table_movies.sql
new file mode 100644
index 0000000..14c7fe1
--- /dev/null
+++ b/client/testfixture/migrations/U1728810800__create_table_movies.sql
@@ -0,0 +1,16 @@
+-- A table schema to hold the IMDB test dataset from:
+-- https://www.kaggle.com/datasets/inductiveanks/top-1000-imdb-movies-dataset/data
+--
+-- The only TrailBase API requirements are: "STRICT" typing and a INTEGER (or
+-- UUIDv7) PRIMARY KEY column.
+CREATE TABLE IF NOT EXISTS movies (
+ rank INTEGER PRIMARY KEY,
+ name TEXT NOT NULL,
+ year ANY NOT NULL,
+ watch_time INTEGER NOT NULL,
+ rating REAL NOT NULL,
+ metascore ANY,
+ gross ANY,
+ votes TEXT NOT NULL,
+ description TEXT NOT NULL
+) STRICT;
diff --git a/client/trailbase-dart/lib/src/client.dart b/client/trailbase-dart/lib/src/client.dart
index b7341f4..c467d94 100644
--- a/client/trailbase-dart/lib/src/client.dart
+++ b/client/trailbase-dart/lib/src/client.dart
@@ -103,8 +103,8 @@ class Pagination {
final int? limit;
const Pagination({
- required this.cursor,
- required this.limit,
+ this.cursor,
+ this.limit,
});
}
diff --git a/client/trailbase-dotnet/src/RecordApi.cs b/client/trailbase-dotnet/src/RecordApi.cs
index 84cbb13..e82b801 100644
--- a/client/trailbase-dotnet/src/RecordApi.cs
+++ b/client/trailbase-dotnet/src/RecordApi.cs
@@ -64,13 +64,13 @@ public override string ToString() {
/// Pagination state representation.
public class Pagination {
- /// Offset cursor.
- public string? cursor { get; }
/// Limit of elements per page.
public int? limit { get; }
+ /// Offset cursor.
+ public string? cursor { get; }
/// Pagination constructor.
- public Pagination(string? cursor, int? limit) {
+ public Pagination(int? limit = null, string? cursor = null) {
this.cursor = cursor;
this.limit = limit;
}
@@ -190,6 +190,7 @@ public ErrorEvent(string errorMsg) {
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(ResponseRecordId))]
+[JsonSerializable(typeof(ListResponse))]
internal partial class SerializeResponseRecordIdContext : JsonSerializerContext {
}
@@ -286,9 +287,9 @@ private async Task CreateImpl(HttpContent recordJson) {
[RequiresDynamicCode(DynamicCodeMessage)]
[RequiresUnreferencedCode(UnreferencedCodeMessage)]
public async Task> List(
- Pagination? pagination,
- List? order,
- List? filters
+ Pagination? pagination = null,
+ List? order = null,
+ List? filters = null
) {
string json = await (await ListImpl(pagination, order, filters)).ReadAsStringAsync();
return JsonSerializer.Deserialize>(json) ?? new ListResponse(null, []);
@@ -297,20 +298,36 @@ public async Task> List(
///
/// List records.
///
+ /// Serialization type info for AOT mode.
/// Pagination state.
/// Sort results by the given columns in ascending/descending order, e.g. "-col_name".
/// Results filters, e.g. "col0[gte]=100".
- /// Serialization type info for AOT mode.
public async Task> List(
- Pagination? pagination,
- List? order,
- List? filters,
- JsonTypeInfo> jsonTypeInfo
+ JsonTypeInfo> jsonTypeInfo,
+ Pagination? pagination = null,
+ List? order = null,
+ List? filters = null
) {
string json = await (await ListImpl(pagination, order, filters)).ReadAsStringAsync();
return JsonSerializer.Deserialize>(json, jsonTypeInfo) ?? new ListResponse(null, []);
}
+ ///
+ /// List records.
+ ///
+ /// Pagination state.
+ /// Sort results by the given columns in ascending/descending order, e.g. "-col_name".
+ /// Results filters, e.g. "col0[gte]=100".
+ public async Task> List(
+ Pagination? pagination = null,
+ List? order = null,
+ List? filters = null
+ ) {
+ string json = await (await ListImpl(pagination, order, filters)).ReadAsStringAsync();
+ return JsonSerializer.Deserialize>(
+ json, SerializeResponseRecordIdContext.Default.ListResponseJsonObject) ?? new ListResponse(null, []);
+ }
+
private async Task ListImpl(
Pagination? pagination,
List? order,
diff --git a/client/trailbase-dotnet/test/ClientTest.cs b/client/trailbase-dotnet/test/ClientTest.cs
index cd83df6..65328b3 100644
--- a/client/trailbase-dotnet/test/ClientTest.cs
+++ b/client/trailbase-dotnet/test/ClientTest.cs
@@ -236,10 +236,8 @@ public async Task RecordsTest() {
{
ListResponse response = await api.List(
- null,
- null,
- [$"text_not_null={messages[0]}"],
- SerializeSimpleStrictContext.Default.ListResponseSimpleStrict
+ SerializeSimpleStrictContext.Default.ListResponseSimpleStrict,
+ filters: [$"text_not_null={messages[0]}"]
)!;
Assert.Single(response.records);
Assert.Equal(messages[0], response.records[0].text_not_null);
@@ -247,20 +245,18 @@ public async Task RecordsTest() {
{
var responseAsc = await api.List(
- null,
- ["+text_not_null"],
- [$"text_not_null[like]=% =?&{suffix}"],
- SerializeSimpleStrictContext.Default.ListResponseSimpleStrict
+ SerializeSimpleStrictContext.Default.ListResponseSimpleStrict,
+ order: ["+text_not_null"],
+ filters: [$"text_not_null[like]=% =?&{suffix}"]
)!;
var recordsAsc = responseAsc.records;
Assert.Equal(messages.Count, recordsAsc.Count);
Assert.Equal(messages, recordsAsc.ConvertAll((e) => e.text_not_null));
var responseDesc = await api.List(
- null,
- ["-text_not_null"],
- [$"text_not_null[like]=%{suffix}"],
- SerializeSimpleStrictContext.Default.ListResponseSimpleStrict
+ SerializeSimpleStrictContext.Default.ListResponseSimpleStrict,
+ order: ["-text_not_null"],
+ filters: [$"text_not_null[like]=%{suffix}"]
)!;
var recordsDesc = responseDesc.records;
Assert.Equal(messages.Count, recordsDesc.Count);
@@ -298,10 +294,8 @@ await api.Update(
await api.Delete(id);
var response = await api.List(
- null,
- null,
- [$"text_not_null[like]=%{suffix}"],
- SerializeSimpleStrictContext.Default.ListResponseSimpleStrict
+ SerializeSimpleStrictContext.Default.ListResponseSimpleStrict,
+ filters: [$"text_not_null[like]=%{suffix}"]
)!;
Assert.Single(response.records);
diff --git a/client/trailbase-rs/src/lib.rs b/client/trailbase-rs/src/lib.rs
index eacb77a..ef87242 100644
--- a/client/trailbase-rs/src/lib.rs
+++ b/client/trailbase-rs/src/lib.rs
@@ -49,7 +49,7 @@ pub struct Tokens {
pub csrf_token: Option,
}
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, Default)]
pub struct Pagination {
pub cursor: Option,
pub limit: Option,
@@ -164,36 +164,32 @@ impl RecordApi {
pub async fn list(
&self,
- pagination: Option,
- order: Option<&[&str]>,
- filters: Option<&[&str]>,
+ pagination: Pagination,
+ order: &[&str],
+ filters: &[&str],
) -> Result, Error> {
let mut params: Vec<(Cow<'static, str>, Cow<'static, str>)> = vec![];
- if let Some(pagination) = pagination {
- if let Some(cursor) = pagination.cursor {
- params.push((Cow::Borrowed("cursor"), Cow::Owned(cursor)));
- }
+ if let Some(cursor) = pagination.cursor {
+ params.push((Cow::Borrowed("cursor"), Cow::Owned(cursor)));
+ }
- if let Some(limit) = pagination.limit {
- params.push((Cow::Borrowed("limit"), Cow::Owned(limit.to_string())));
- }
+ if let Some(limit) = pagination.limit {
+ params.push((Cow::Borrowed("limit"), Cow::Owned(limit.to_string())));
}
- if let Some(order) = order {
+ if !order.is_empty() {
params.push((Cow::Borrowed("order"), Cow::Owned(order.join(","))));
}
- if let Some(filters) = filters {
- for filter in filters {
- let Some((name_op, value)) = filter.split_once("=") else {
- panic!("Filter '{filter}' does not match: 'name[op]=value'");
- };
+ for filter in filters {
+ let Some((name_op, value)) = filter.split_once("=") else {
+ panic!("Filter '{filter}' does not match: 'name[op]=value'");
+ };
- params.push((
- Cow::Owned(name_op.to_string()),
- Cow::Owned(value.to_string()),
- ));
- }
+ params.push((
+ Cow::Owned(name_op.to_string()),
+ Cow::Owned(value.to_string()),
+ ));
}
let response = self
diff --git a/client/trailbase-rs/tests/integration_test.rs b/client/trailbase-rs/tests/integration_test.rs
index ff28ec0..2db4f0b 100644
--- a/client/trailbase-rs/tests/integration_test.rs
+++ b/client/trailbase-rs/tests/integration_test.rs
@@ -123,7 +123,7 @@ async fn records_test() {
let filter = format!("text_not_null={}", messages[0]);
let filters = vec![filter.as_str()];
let response = api
- .list::(None, None, Some(filters.as_slice()))
+ .list::(Pagination::default(), &[], filters.as_slice())
.await
.unwrap();
@@ -131,12 +131,12 @@ async fn records_test() {
let second_response = api
.list::(
- Some(Pagination {
+ Pagination {
cursor: response.cursor,
- limit: None,
- }),
- None,
- Some(filters.as_slice()),
+ ..Default::default()
+ },
+ &[],
+ filters.as_slice(),
)
.await
.unwrap();
@@ -148,7 +148,7 @@ async fn records_test() {
// List all the messages
let filter = format!("text_not_null[like]=% =?&{now}");
let records_ascending: Vec = api
- .list(None, Some(&["+text_not_null"]), Some(&[&filter]))
+ .list(Pagination::default(), &["+text_not_null"], &[&filter])
.await
.unwrap()
.records;
@@ -160,7 +160,7 @@ async fn records_test() {
assert_eq!(messages, messages_ascending);
let records_descending: Vec = api
- .list(None, Some(&["-text_not_null"]), Some(&[&filter]))
+ .list(Pagination::default(), &["-text_not_null"], &[&filter])
.await
.unwrap()
.records;
diff --git a/docs/examples/record_api_curl/list.sh b/docs/examples/record_api_curl/list.sh
new file mode 100755
index 0000000..3dec6fb
--- /dev/null
+++ b/docs/examples/record_api_curl/list.sh
@@ -0,0 +1,5 @@
+curl --globoff \
+ --header "Content-Type: application/json" \
+ --header "Authorization: Bearer ${AUTH_TOKEN}" \
+ --request GET \
+ 'http://localhost:4000/api/records/v1/movies?limit=3&order=rank&watch_time[lt]=120&description[like]=%love%'
diff --git a/docs/examples/record_api_dart/lib/record_api.dart b/docs/examples/record_api_dart/lib/record_api.dart
index e6b027e..dfa67a4 100644
--- a/docs/examples/record_api_dart/lib/record_api.dart
+++ b/docs/examples/record_api_dart/lib/record_api.dart
@@ -1,5 +1,6 @@
export 'src/create.dart';
-export 'src/read.dart';
-export 'src/update.dart';
export 'src/delete.dart';
+export 'src/list.dart';
+export 'src/read.dart';
export 'src/subscribe.dart';
+export 'src/update.dart';
diff --git a/docs/examples/record_api_dart/lib/src/list.dart b/docs/examples/record_api_dart/lib/src/list.dart
new file mode 100644
index 0000000..ed6e5c1
--- /dev/null
+++ b/docs/examples/record_api_dart/lib/src/list.dart
@@ -0,0 +1,8 @@
+import 'package:trailbase/trailbase.dart';
+
+Future list(Client client) async =>
+ await client.records('movies').list(
+ pagination: Pagination(limit: 3),
+ order: ['rank'],
+ filters: ['watch_time[lt]=120', 'description[like]=%love%'],
+ );
diff --git a/docs/examples/record_api_dart/test/record_api_test.dart b/docs/examples/record_api_dart/test/record_api_test.dart
index 279c798..718c68a 100644
--- a/docs/examples/record_api_dart/test/record_api_test.dart
+++ b/docs/examples/record_api_dart/test/record_api_test.dart
@@ -71,4 +71,12 @@ void main() {
}).toList();
expect(tableEventList.length, equals(3));
});
+
+ test('Test code listing example', () async {
+ final client = await connect();
+ final results = await list(client);
+
+ expect(results.records.length, 3);
+ expect(results.records[0]['name'], 'Casablanca');
+ });
}
diff --git a/docs/examples/record_api_dotnet/ExamplesTest.cs b/docs/examples/record_api_dotnet/ExamplesTest.cs
index ee6e609..0de3ad9 100644
--- a/docs/examples/record_api_dotnet/ExamplesTest.cs
+++ b/docs/examples/record_api_dotnet/ExamplesTest.cs
@@ -61,4 +61,13 @@ public async Task BasicTest() {
}
}
}
+
+ [Fact]
+ public async Task ListTest() {
+ var client = await Connect();
+ var response = await Examples.List(client);
+
+ Assert.Equal(3, response.records.Count);
+ Assert.Equal("Casablanca", response.records[0]["name"]!.ToString());
+ }
}
diff --git a/docs/examples/record_api_dotnet/List.cs b/docs/examples/record_api_dotnet/List.cs
new file mode 100644
index 0000000..82282b5
--- /dev/null
+++ b/docs/examples/record_api_dotnet/List.cs
@@ -0,0 +1,10 @@
+using TrailBase;
+using System.Text.Json.Nodes;
+
+public partial class Examples {
+ public static async Task> List(Client client) =>
+ await client.Records("movies").List(
+ pagination: new Pagination(limit: 3),
+ order: ["rank"],
+ filters: ["watch_time[lt]=120", "description[like]=%love%"]);
+}
diff --git a/docs/examples/record_api_rs/src/lib.rs b/docs/examples/record_api_rs/src/lib.rs
index 828524e..b2da0e6 100644
--- a/docs/examples/record_api_rs/src/lib.rs
+++ b/docs/examples/record_api_rs/src/lib.rs
@@ -1,5 +1,6 @@
pub mod create;
pub mod delete;
+pub mod list;
pub mod read;
pub mod subscribe;
pub mod update;
@@ -11,6 +12,7 @@ mod test {
use crate::create::*;
use crate::delete::*;
+ use crate::list::*;
use crate::read::*;
use crate::subscribe::*;
use crate::update::*;
@@ -58,4 +60,18 @@ mod test {
let table_events = table_stream.take(3).collect::>().await;
assert_eq!(table_events.len(), 3);
}
+
+ #[ignore]
+ #[tokio::test]
+ async fn list_test() {
+ let client = connect().await;
+
+ let response = list(&client).await.unwrap();
+
+ assert_eq!(response.records.len(), 3);
+ assert_eq!(
+ response.records[0].get("name").unwrap(),
+ &serde_json::Value::String("Casablanca".into())
+ );
+ }
}
diff --git a/docs/examples/record_api_rs/src/list.rs b/docs/examples/record_api_rs/src/list.rs
new file mode 100644
index 0000000..0d2322e
--- /dev/null
+++ b/docs/examples/record_api_rs/src/list.rs
@@ -0,0 +1,17 @@
+use trailbase_client::{Client, ListResponse, Pagination};
+
+pub async fn list(client: &Client) -> anyhow::Result> {
+ Ok(
+ client
+ .records("movies")
+ .list(
+ Pagination {
+ limit: Some(3),
+ ..Default::default()
+ },
+ &["rank"],
+ &["watch_time[lt]=120", "description[like]=%love%"],
+ )
+ .await?,
+ )
+}
diff --git a/docs/examples/record_api_ts/src/list.ts b/docs/examples/record_api_ts/src/list.ts
new file mode 100644
index 0000000..2638fc8
--- /dev/null
+++ b/docs/examples/record_api_ts/src/list.ts
@@ -0,0 +1,10 @@
+import { Client, type ListResponse } from "trailbase";
+
+export const list = async (client: Client): Promise> =>
+ await client.records("movies").list({
+ pagination: {
+ limit: 3,
+ },
+ order: ["rank"],
+ filters: ["watch_time[lt]=120", "description[like]=%love%"],
+ });
diff --git a/docs/examples/record_api_ts/tests/basic.test.ts b/docs/examples/record_api_ts/tests/basic.test.ts
index da34875..6b8fa4b 100644
--- a/docs/examples/record_api_ts/tests/basic.test.ts
+++ b/docs/examples/record_api_ts/tests/basic.test.ts
@@ -5,6 +5,7 @@ import { create } from "../src/create.ts";
import { read } from "../src/read.ts";
import { update } from "../src/update.ts";
import { remove } from "../src/delete.ts";
+import { list } from "../src/list.ts";
import { subscribe, subscribeAll } from "../src/subscribe.ts";
async function connect(): Promise {
@@ -58,3 +59,18 @@ test("Test code examples", async () => {
}
});
+
+test("Test list examples", async () => {
+ const client = await connect();
+
+ const response = await list(client);
+
+ expect(response.records.length).toBe(3);
+
+ type Movie = {
+ name: string;
+ };
+
+ const record = response.records[0] as Movie;
+ expect(record.name).toBe("Casablanca");
+});
diff --git a/docs/src/content/docs/documentation/APIs/record_apis.mdx b/docs/src/content/docs/documentation/APIs/record_apis.mdx
index 0badaee..9356057 100644
--- a/docs/src/content/docs/documentation/APIs/record_apis.mdx
+++ b/docs/src/content/docs/documentation/APIs/record_apis.mdx
@@ -91,7 +91,7 @@ of a group "mygroup":
Independently, you can use `VIEW`s to filter which rows and columns of
your `TABLE`s should be accessible.
-#### Building access groups and capabilities
+#### Building Access Groups and Capabilities
As hinted at by the example above, the SQL access rules can be used to
build higher-level access protection such as group ACLs or capabilities.
@@ -277,15 +277,17 @@ and query parameters.
Parameters:
-* Pagination can be controlled with two parameters: `limit=N` (with a hard
- limit of 1024) and `cursor=`.
-* Ordering can be controlled via `order=[[+-]?]+`, e.g.
- `order=created,-rank`, which would sort records first by their `created`
- column in ascending order (same as "+") and then by the `rank` column in
- descending order due to the "-".
-* Lastly, one can filter records by matching against one or more columns like
- `[op]=`, e.g. `revenue[gt]=0` to request only records
- with revenue values "greater than" 0. The supported operators are:
+* Pagination can be controlled via two query parameters:
+ * `limit=N` (with a built-in hard limit of 1024 to avoid abuse) and
+ * `cursor=`.
+* Ordering can be controlled using the `order=[[+-]?]+` parameter, e.g.
+ `order=created,-rank`, which sorts records based on their `created` column in
+ ascending order first (same as "+") and subsequently in descending order by
+ their `rank` column due to the minus prefix.
+* Filtering can be controlled by passing one or more
+ `[op]=` parameters.
+ For example, `revenue[gt]=0` would list records with a positive `revenue` only.
+ Supported operators are:
* equal, is the empty operator, e.g. `?success=TRUE`.
* **not**|**ne**: not equal
* **gte**: greater-than-equal
@@ -295,13 +297,36 @@ Parameters:
* **like**: SQL `LIKE` operator
* **re**: SQL `REGEXP` operator
-For example, to query the 10 highest grossing movies with a watch time less
-than 2 hours and an actor called John, one could query:
+For example, to query the top-3 ranked movies with a watch time below 2 hours
+and "love" in their description:
-export const curlListRecords =
- `curl -g '${apiPath({name: "movies?limit=10&order=grossing&watch_time_min[lt]=120&actors[like]=%John%", prefix: "http://"})}'`;
+import listDartCode from "@examples/record_api_dart/lib/src/list.dart?raw";
+import listTsCode from "@examples/record_api_ts/src/list.ts?raw";
+import listRustCode from "@examples/record_api_rs/src/list.rs?raw";
+import listDotnetCode from "@examples/record_api_dotnet/List.cs?raw";
+import listCurlCode from "@examples/record_api_curl/list.sh?raw";
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
### Subscribe