import fetch from "node-fetch"
import Path from "path"
import FS from "fs/promises"
import { JSDOM } from "jsdom"
import config from "./config.js"
let cache = await FS.readFile('./cache.json', { encoding: 'utf-8' })
.then(json => JSON.parse(json) )
let waitingList = new Map()
const write = async (path, content) => {
let dir = Path.dirname(path)
try {
await FS.access(dir)
} catch(e) {
await FS.mkdir(dir, { recursive: true })
}
return await FS.writeFile(path, content)
}
const handleNitterUser = async user => {
let data
let index = 0
let sources = cache.nitter[user] ?
[ cache.nitter[user] ].concat(config.sources.nitter) :
config.sources.nitter
while(!data && index < sources.length) {
let source = sources[index]
let rss = await fetchRss(source, user + '/rss')
try {
data = processNitter(rss, user)
} catch(err) {
if(err.constructor.name == NoMatchesError.name || err.constructor.name == DOMException.name) {
console.warn(`Failed to fetch ${user} from ${source}`)
index++
} else {
throw err
}
}
}
console.log(`Found ${user} at ${sources[index]}`)
cache.nitter[user] = sources[index]
return data
}
const sleep = delay => new Promise(resolve => setTimeout(() => resolve(), delay) )
class NoMatchesError extends Error {}
const processRss = (rss, reducerCallback, cdata) => {
let { document } = new JSDOM(rss, {
contentType: 'text/xml'
}).window
let items = document.querySelectorAll('channel item')
if(items.length == 0) {
throw new NoMatchesError('Got no matches')
}
let posts = []
for(let item of items) {
let description = new JSDOM(item.querySelector('description').textContent).window.document
let dateString = item.querySelector('pubDate').textContent
let link = item.querySelector('link').textContent
let post = reducerCallback(item, description, dateString, link)
if(post) {
post.date = new Date(dateString).valueOf() ?? 0
post.link = link
posts.push(post)
}
}
return posts
}
const fetchRss = async (hostname, path) => {
let waitFor = waitingList.get(hostname)
if(waitFor !== 0) {
await sleep(waitFor)
waitingList.set(hostname, 0)
}
return await fetch(new URL(path, 'https://' + hostname) )
.then(response => {
waitingList.set(hostname, config.courtesyWait)
return response.text()
})
.catch(console.error)
}
const getImages = (user, description) => {
let images = description.querySelectorAll('img')
if(images) {
let imageUrls = []
for(let image of images) {
let { src } = image
if(!src) {
let finalSrc = image.srcset.split(', ').pop()
src = finalSrc.slice(0, finalSrc.indexOf(' ') )
}
imageUrls.push(src)
}
if(imageUrls.length > 0) {
return {
images: imageUrls,
user
}
}
}
}
const processNitter = (rss, user) => {
return processRss(rss, (item, description) => {
let creator = item.getElementsByTagName('dc:creator')[0]
if(creator.innerHTML.slice(1) === user)
return getImages(user, description)
}, true)
}
const handleTumblrUser = async (user) => {
let rss = await fetchRss(user + '.tumblr.com', 'rss')
console.log('Found ' + user)
return processTumblr(rss, user)
}
const processTumblr = (rss, user) => {
return processRss(rss, (item, description) => {
let reblog = description.querySelector('p > a.tumblr_blog')
// If it's a reblog, skip it
if(reblog && reblog.innerHTML !== user) {
return
}
return getImages(user, description)
})
}
const oneDay = 1000 * 60 * 60 * 24
const printFeed = async (sources, directory, header, viewOptions, error) => {
// Coalate
let feed = []
let tooLongAgo = viewOptions.tooLongAgo ?
(Date.now() - (Date.now() % oneDay)) - oneDay * viewOptions.tooLongAgo :
0
for(let source of sources) {
if(source == undefined) {
continue
}
for(let post of source) {
if(post.date > tooLongAgo)
feed.push(post)
}
}
feed = feed.sort((a, b) => a.date < b.date)
// Render
let pages = []
for(let i = 0; i < Math.ceil(feed.length / viewOptions.pageSize); i++) {
pages.push(feed.slice(i * viewOptions.pageSize, (i + 1) * viewOptions.pageSize) )
}
// Write
let promises = []
const writePage = (index, content) =>
promises.push(
write(Path.join(directory, index == 0 ? 'index' : index.toString() ) + '.html', content)
)
for(let i = 0; i < pages.length; i++) {
let nextPage = i + 1
let link = nextPage === pages.length ?
`end` :
`next`
writePage(i, renderPage(`Page ${i + 1}`, pages[i], header, link) )
}
if(pages.length == 0) {
let message = 'No posts available'
if(error) {
// Put in an iframe to prevent potential XSS through response body? Who knows.
message += `
`
}
writePage(0, renderPage('No posts', [], header, message) )
}
return Promise.all(promises)
}
const renderPage = (title, posts, header, footer) => {
let html = `\
${post.user} ${config.printDate(date)} open