Implement issues for v1.1.0 #20

Merged
phlaym merged 24 commits from v1.1.0 into main 2023-04-07 15:07:41 +00:00
Showing only changes of commit fee1475779 - Show all commits

View File

@ -13,13 +13,22 @@ let interval: NodeJS.Timer | null = null;
let moreOlderPostsAvailable = true; let moreOlderPostsAvailable = true;
let loadingOlderPosts = false; let loadingOlderPosts = false;
async function fetchPosts(filter: { since?: string, before?: string }): Promise<Post[]> { interface FetchOptions {
since?: string,
before?: string,
count?: number
}
async function fetchPosts(options: FetchOptions): Promise<Post[]> {
const params = new URLSearchParams(); const params = new URLSearchParams();
if (filter?.since !== undefined) { if (options?.since !== undefined) {
params.set('since', filter.since); params.set('since', options.since);
} }
if (filter?.before !== undefined) { if (options?.before !== undefined) {
params.set('before', filter.before); params.set('before', options.before);
}
if (options?.count !== undefined) {
params.set('count', options.count.toFixed(0));
} }
const response = await fetch(`/api/posts?${params}`); const response = await fetch(`/api/posts?${params}`);
@ -33,7 +42,7 @@ function filterDuplicates(posts: Post[]): Post[] {
} }
function refresh() { function refresh() {
let filter = {}; let filter: FetchOptions = {};
if (data.posts.length > 0) { if (data.posts.length > 0) {
filter = { since: data.posts[0].created_at }; filter = { since: data.posts[0].created_at };
} }
@ -72,9 +81,9 @@ onMount(async () => {
function loadOlderPosts() { function loadOlderPosts() {
loadingOlderPosts = true; loadingOlderPosts = true;
let filter = {}; const filter: FetchOptions = { count: 20 };
if (data.posts.length > 0) { if (data.posts.length > 0) {
filter = { before: data.posts[data.posts.length - 1].created_at }; filter.before = data.posts[data.posts.length - 1].created_at;
} }
@ -83,7 +92,8 @@ function loadOlderPosts() {
// Append old posts, filter dupes // Append old posts, filter dupes
// There shouldn't be any duplicates, but better be safe than sorry // There shouldn't be any duplicates, but better be safe than sorry
data.posts = filterDuplicates(data.posts.concat(resp)); data.posts = filterDuplicates(data.posts.concat(resp));
moreOlderPostsAvailable = true; // If we got less than we expected, there are no older posts available
moreOlderPostsAvailable = resp.length < (filter.count ?? 20);
} else { } else {
moreOlderPostsAvailable = false; moreOlderPostsAvailable = false;
} }