Ensure view dirs exist, cleanup

This commit is contained in:
Dakedres 2024-04-11 08:36:27 -06:00
parent 448fd6dc30
commit cd3e970b51

115
lib.js
View File

@ -4,7 +4,6 @@ import Path from "path"
import FS from "fs/promises"
import { JSDOM } from "jsdom"
// | o |
// . . |- . | ,-.
// | | | | | `-.
@ -65,30 +64,25 @@ export const testWhitelist = (array, whitelist) =>
export const testBlacklist = (array, blacklist) =>
blacklist.find(tag => array.includes(tag)) !== undefined
export const createView = (directory, pageSize, extra = {}) => {
let view = {
batch: new PromiseBatch(),
directory,
pageSize,
...extra
}
export const doesExist = async (path) => {
let exists
return view
try {
await FS.access(path)
exists = true
} catch(err) {
exists = false
}
}
export const openImageStore = async view => {
let imageStorePath = Path.join(view.directory, view.imageStoreDirectory)
let dirents = await FS.readdir(imageStorePath, { withFileTypes: true })
view.imageStore = new Map()
export const ensureDir = async (path) => {
let exists = doesExist(path)
for(let dirent of dirents) {
if(dirent.isFile()) {
let basename = dirent.name.slice(0, dirent.name.lastIndexOf('.'))
view.imageStore.set(basename, Path.join(view.imageStoreDirectory, dirent.name))
}
if(!exists) {
await FS.mkdir(path, { recursive: true })
}
return view
return exists
}
export const isUnset = (value) => {
@ -184,14 +178,7 @@ export const cacheSources = (sources, cache) =>
export const openCache = async (source, cache) => {
let path = getCachePath(source, cache)
let exists
try {
await FS.access(path)
exists = true
} catch(err) {
exists = false
}
let exists = await doesExist(path)
let rss
@ -351,6 +338,61 @@ export const extractImages = (post) => {
// |/ | |-' |/|/
// ' ' `-' ' '
export const createView = async (directory, pageSize, extra = {}) => {
let view = {
batch: new PromiseBatch(),
directory,
pageSize,
...extra
}
await ensureDir(view.directory)
if(view.imageStoreDirectory)
await openImageStore(view)
return view
}
export const openImageStore = async view => {
let imageStorePath = Path.join(view.directory, view.imageStoreDirectory)
view.imageStore = new Map()
if(!await ensureDir(imageStorePath)) {
return view
}
let dirents = await FS.readdir(imageStorePath, { withFileTypes: true })
for(let dirent of dirents) {
if(dirent.isFile()) {
let basename = dirent.name.slice(0, dirent.name.lastIndexOf('.'))
view.imageStore.set(basename, Path.join(view.imageStoreDirectory, dirent.name))
}
}
return view
}
export const writeView = (sources, feeds, view) => {
view.header = renderNav(feeds, sources)
let pages = []
for(let feed of feeds) {
pages = pages.concat(createPages(feed, view))
}
for(let source of sources) {
pages = pages.concat(createPages(source, view))
}
for(let page of pages) {
writePage(page, view)
}
writeStylesheet(Path.join(import.meta.dirname, 'assets/style.css'), view)
}
export const createPages = (list, { pageSize }) => {
let posts = []
let pages = []
@ -675,25 +717,6 @@ export const populateSource = async (channel, source, postReducerCallback, cache
return source
}
export const writeView = (sources, feeds, view) => {
view.header = renderNav(feeds, sources)
let pages = []
for(let feed of feeds) {
pages = pages.concat(createPages(feed, view))
}
for(let source of sources) {
pages = pages.concat(createPages(source, view))
}
for(let page of pages) {
writePage(page, view)
}
writeStylesheet(Path.join(import.meta.dirname, 'assets/style.css'), view)
}
export const createSource = async (source, getChannel, postReducerCallback, cache) => {
if(cache.enabled)
source = await openCache(source, cache)