Caching actually works
This commit is contained in:
parent
99b4721bf3
commit
9a587e4391
56
lib.js
56
lib.js
@ -131,6 +131,7 @@ export const delayedFetch = async (url, options, courtesyWait = 5 * 1000) => {
|
||||
|
||||
waitingList.set(domain, waitFor + courtesyWait)
|
||||
if(waitFor !== 0) {
|
||||
console.log(`Waiting ${waitFor}ms to download ${url}`)
|
||||
await sleep(waitFor)
|
||||
}
|
||||
|
||||
@ -155,8 +156,8 @@ export const retryDelayedFetch = async (url, options, courtesyWait, retryAttempt
|
||||
export const getCacheFilename = (source) =>
|
||||
source.name + '.xml'
|
||||
|
||||
export const getCachePath = (source, { directory }) =>
|
||||
Path.join(directory, getCacheFilename(source))
|
||||
export const getCachePath = (source, cache) =>
|
||||
Path.join(cache.directory, getCacheFilename(source))
|
||||
|
||||
export const cacheSource = (source, cache) =>
|
||||
write(getCachePath(source, cache), renderCache(source, cache))
|
||||
@ -180,19 +181,21 @@ export const openCache = async (source, cache) => {
|
||||
if(exists)
|
||||
rss = await FS.readFile(path, { encoding: 'utf8' })
|
||||
|
||||
if(exists & rss) {
|
||||
if(exists && rss) {
|
||||
// if(source.user == 'nanoraptor') {
|
||||
// source.asdf = 'b'
|
||||
// source.cache.asdf = 'b'
|
||||
// }
|
||||
let channel = createChannel(rss)
|
||||
let date = readPubDate(channel.querySelector('pubDate'))
|
||||
let link = new URL(channel.querySelector('link').textContent)
|
||||
|
||||
source.cache = {
|
||||
channel,
|
||||
date,
|
||||
link
|
||||
date: readPubDate(channel.querySelector('pubDate')),
|
||||
link: new URL(channel.querySelector('link').textContent),
|
||||
}
|
||||
} else {
|
||||
source.cache = {
|
||||
date: new Date(0),
|
||||
date: new Date(0)
|
||||
}
|
||||
|
||||
if(source.hostname)
|
||||
@ -207,6 +210,7 @@ export const openCache = async (source, cache) => {
|
||||
export const buildCacheLink = source =>
|
||||
new URL('https://' + source.hostname)
|
||||
|
||||
// .replaceAll(/\n\s*/g, '')
|
||||
export const renderCache = (source, cache) => `\
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
@ -240,14 +244,18 @@ export const createPosts = async (channel, source, fromDate, reducerCallback) =>
|
||||
let promises = []
|
||||
|
||||
for(let item of items) {
|
||||
let post = createPost(item, source)
|
||||
|
||||
if(post.date <= fromDate)
|
||||
continue
|
||||
|
||||
source.items.push(item)
|
||||
let promise = createPost(item, source, reducerCallback)
|
||||
|
||||
let promise = reducerCallback(post)
|
||||
.then(post => {
|
||||
if(post && post.date > fromDate) {
|
||||
if(post) {
|
||||
source.posts.push(post)
|
||||
}
|
||||
|
||||
return post
|
||||
})
|
||||
|
||||
promises.push(promise)
|
||||
@ -257,7 +265,7 @@ export const createPosts = async (channel, source, fromDate, reducerCallback) =>
|
||||
return source
|
||||
}
|
||||
|
||||
export const createPost = async (item, source, reducerCallback) => {
|
||||
export const createPost = (item, source) => {
|
||||
let description = new JSDOM(item.querySelector('description').textContent).window.document
|
||||
let date = readPubDate(item.querySelector('pubDate'))
|
||||
let link = item.querySelector('link').textContent
|
||||
@ -275,7 +283,7 @@ export const createPost = async (item, source, reducerCallback) => {
|
||||
occurances: []
|
||||
}
|
||||
|
||||
return await reducerCallback(post)
|
||||
return post
|
||||
}
|
||||
|
||||
export const processCategories = (post) => {
|
||||
@ -289,7 +297,7 @@ export const processCategories = (post) => {
|
||||
return post
|
||||
}
|
||||
|
||||
export const extractImages = (post, cache = true) => {
|
||||
export const extractImages = (post) => {
|
||||
let images = post.description.querySelectorAll('img')
|
||||
|
||||
if(images) {
|
||||
@ -519,7 +527,7 @@ export const downloadImage = async (url, basename, { courtesyWait, retryAttempts
|
||||
let extension = imageExtensions[mimetype]
|
||||
|
||||
if(typeof extension !== 'string') {
|
||||
console.error(`Unknown image mimetype for ${url}: ${mimetype}. Cannot download`)
|
||||
console.error(`Unknown mimetype for ${url}: ${mimetype}. Cannot download`)
|
||||
return url
|
||||
}
|
||||
|
||||
@ -612,12 +620,12 @@ export const fetchChannelFromInstances = async (source) => {
|
||||
return channel
|
||||
}
|
||||
|
||||
export const populateSource = async (channel, source, postReducerCallback, useCache = true) => {
|
||||
export const populateSource = async (channel, source, postReducerCallback, cache) => {
|
||||
let fromDate = 0
|
||||
source.items = []
|
||||
source.posts = []
|
||||
|
||||
if(useCache) {
|
||||
if(cache.enabled) {
|
||||
fromDate = source.latestPostDate
|
||||
|
||||
if(source.cache.channel)
|
||||
@ -657,14 +665,16 @@ export const writeView = (sources, feeds, view) => {
|
||||
}
|
||||
|
||||
export const createSource = async (source, getChannel, postReducerCallback, cache) => {
|
||||
source = await openCache(source, cache)
|
||||
source = await populateSource(await getChannel(source), source, postReducerCallback, cache.populate)
|
||||
if(cache.enabled)
|
||||
source = await openCache(source, cache)
|
||||
source = await populateSource(await getChannel(source), source, postReducerCallback, cache)
|
||||
|
||||
cache.batch.add(cacheSource(source, cache))
|
||||
if(cache.enabled)
|
||||
cache.batch.add(cacheSource(source, cache))
|
||||
return source
|
||||
}
|
||||
|
||||
export const createSourceOptions = (options) => {
|
||||
export const createSourceOptions = (options, view) => {
|
||||
if(isUnset(options.courtesyWait))
|
||||
options.courtesyWait = 1000
|
||||
|
||||
@ -817,4 +827,4 @@ export const mastodon = {
|
||||
return post
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user