Refactor loading logic, so that more code is shared between loading older and newer posts
This commit is contained in:
parent
2409fa2b8d
commit
a3fb47a329
@ -13,22 +13,35 @@ let interval: NodeJS.Timer | null = null;
|
|||||||
let moreOlderPostsAvailable = true;
|
let moreOlderPostsAvailable = true;
|
||||||
let loadingOlderPosts = false;
|
let loadingOlderPosts = false;
|
||||||
|
|
||||||
function refresh() {
|
async function fetchPosts(filter: { since?: string, before?: string }): Promise<Post[]> {
|
||||||
const params = new URLSearchParams();
|
const params = new URLSearchParams();
|
||||||
if (data.posts.length > 0) {
|
if (filter?.since !== undefined) {
|
||||||
params.set('since', data.posts[0].created_at);
|
params.set('since', filter.since);
|
||||||
}
|
}
|
||||||
fetch(`/api/posts?${params}`)
|
if (filter?.before !== undefined) {
|
||||||
.then(r => r.json())
|
params.set('before', filter.before);
|
||||||
.then((resp: Post[]) => {
|
}
|
||||||
|
|
||||||
|
const response = await fetch(`/api/posts?${params}`);
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
function filterDuplicates(posts: Post[]): Post[] {
|
||||||
|
return posts.filter((obj, index, arr) => {
|
||||||
|
return arr.map(mapObj => mapObj.url).indexOf(obj.url) === index;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function refresh() {
|
||||||
|
let filter = {};
|
||||||
|
if (data.posts.length > 0) {
|
||||||
|
filter = { since: data.posts[0].created_at };
|
||||||
|
}
|
||||||
|
fetchPosts(filter).then(resp => {
|
||||||
if (resp.length > 0) {
|
if (resp.length > 0) {
|
||||||
// Prepend new posts, filter dupes
|
// Prepend new posts, filter dupes
|
||||||
// There shouldn't be any duplicates, but better be safe than sorry
|
// There shouldn't be any duplicates, but better be safe than sorry
|
||||||
const combined = resp.concat(data.posts);
|
data.posts = filterDuplicates(resp.concat(data.posts));
|
||||||
const filteredPosts = combined.filter((obj, index, arr) => {
|
|
||||||
return arr.map(mapObj => mapObj.url).indexOf(obj.url) === index;
|
|
||||||
});
|
|
||||||
data.posts = filteredPosts;
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch(e => {
|
.catch(e => {
|
||||||
@ -57,23 +70,19 @@ onMount(async () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
async function loadOlderPosts() {
|
function loadOlderPosts() {
|
||||||
loadingOlderPosts = true;
|
loadingOlderPosts = true;
|
||||||
const params = new URLSearchParams();
|
let filter = {};
|
||||||
if (data.posts.length > 0) {
|
if (data.posts.length > 0) {
|
||||||
params.set('before', data.posts[data.posts.length - 1].created_at);
|
filter = { before: data.posts[data.posts.length - 1].created_at };
|
||||||
}
|
}
|
||||||
await fetch(`/api/posts?${params}`)
|
|
||||||
.then(r => r.json())
|
|
||||||
.then((resp: Post[]) => {
|
fetchPosts(filter).then(resp => {
|
||||||
if (resp.length > 0) {
|
if (resp.length > 0) {
|
||||||
// Append old posts, filter dupes
|
// Append old posts, filter dupes
|
||||||
// There shouldn't be any duplicates, but better be safe than sorry
|
// There shouldn't be any duplicates, but better be safe than sorry
|
||||||
const combined = data.posts.concat(resp);
|
data.posts = filterDuplicates(data.posts.concat(resp));
|
||||||
const filteredPosts = combined.filter((obj, index, arr) => {
|
|
||||||
return arr.map(mapObj => mapObj.url).indexOf(obj.url) === index;
|
|
||||||
});
|
|
||||||
data.posts = filteredPosts;
|
|
||||||
moreOlderPostsAvailable = true;
|
moreOlderPostsAvailable = true;
|
||||||
} else {
|
} else {
|
||||||
moreOlderPostsAvailable = false;
|
moreOlderPostsAvailable = false;
|
||||||
|
Loading…
Reference in New Issue
Block a user