Compare commits

...

2 Commits

View File

@ -13,22 +13,44 @@ let interval: NodeJS.Timer | null = null;
let moreOlderPostsAvailable = true;
let loadingOlderPosts = false;
function refresh() {
interface FetchOptions {
since?: string,
before?: string,
count?: number
}
async function fetchPosts(options: FetchOptions): Promise<Post[]> {
const params = new URLSearchParams();
if (data.posts.length > 0) {
params.set('since', data.posts[0].created_at);
if (options?.since !== undefined) {
params.set('since', options.since);
}
fetch(`/api/posts?${params}`)
.then(r => r.json())
.then((resp: Post[]) => {
if (options?.before !== undefined) {
params.set('before', options.before);
}
if (options?.count !== undefined) {
params.set('count', options.count.toFixed(0));
}
const response = await fetch(`/api/posts?${params}`);
return await response.json();
}
function filterDuplicates(posts: Post[]): Post[] {
return posts.filter((obj, index, arr) => {
return arr.map(mapObj => mapObj.url).indexOf(obj.url) === index;
});
}
function refresh() {
let filter: FetchOptions = {};
if (data.posts.length > 0) {
filter = { since: data.posts[0].created_at };
}
fetchPosts(filter).then(resp => {
if (resp.length > 0) {
// Prepend new posts, filter dupes
// There shouldn't be any duplicates, but better be safe than sorry
const combined = resp.concat(data.posts);
const filteredPosts = combined.filter((obj, index, arr) => {
return arr.map(mapObj => mapObj.url).indexOf(obj.url) === index;
});
data.posts = filteredPosts;
data.posts = filterDuplicates(resp.concat(data.posts));
}
})
.catch(e => {
@ -57,24 +79,21 @@ onMount(async () => {
}
});
async function loadOlderPosts() {
function loadOlderPosts() {
loadingOlderPosts = true;
const params = new URLSearchParams();
const filter: FetchOptions = { count: 20 };
if (data.posts.length > 0) {
params.set('before', data.posts[data.posts.length - 1].created_at);
filter.before = data.posts[data.posts.length - 1].created_at;
}
await fetch(`/api/posts?${params}`)
.then(r => r.json())
.then((resp: Post[]) => {
fetchPosts(filter).then(resp => {
if (resp.length > 0) {
// Append old posts, filter dupes
// There shouldn't be any duplicates, but better be safe than sorry
const combined = data.posts.concat(resp);
const filteredPosts = combined.filter((obj, index, arr) => {
return arr.map(mapObj => mapObj.url).indexOf(obj.url) === index;
});
data.posts = filteredPosts;
moreOlderPostsAvailable = true;
data.posts = filterDuplicates(data.posts.concat(resp));
// If we got less than we expected, there are no older posts available
moreOlderPostsAvailable = resp.length < (filter.count ?? 20);
} else {
moreOlderPostsAvailable = false;
}