v2 mostly working
This commit is contained in:
parent
040bd8f6fa
commit
2288085649
38
assets/style.css
Normal file
38
assets/style.css
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
body {
|
||||||
|
max-width: 640px;
|
||||||
|
margin: 0 0 0 auto;
|
||||||
|
padding: 8px;
|
||||||
|
font-family: sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul {
|
||||||
|
padding-inline-start: 30px;
|
||||||
|
list-style-type: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
p {
|
||||||
|
padding: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
img {
|
||||||
|
margin: 10px auto;
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
p a, footer a {
|
||||||
|
float: right
|
||||||
|
}
|
||||||
|
|
||||||
|
hr {
|
||||||
|
clear: both
|
||||||
|
}
|
||||||
|
|
||||||
|
footer a {
|
||||||
|
padding-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (prefers-color-scheme: dark) {
|
||||||
|
body { background: #000; color: #eee }
|
||||||
|
a { color: #ccf }
|
||||||
|
hr { border-color: #555 }
|
||||||
|
}
|
@ -105,7 +105,7 @@ const sources = {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
export default {
|
||||||
feeds,
|
feeds,
|
||||||
sources,
|
sources,
|
||||||
courtesyWait,
|
courtesyWait,
|
||||||
|
5
index.js
5
index.js
@ -175,7 +175,7 @@ const printFeed = async (sources, directory, header, viewOptions, error) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
feed = feed.sort((a, b) => a.date < b.date)
|
feed = feed.sort((a, b) => a.date > b.date)
|
||||||
|
|
||||||
// Render
|
// Render
|
||||||
|
|
||||||
@ -187,11 +187,12 @@ const printFeed = async (sources, directory, header, viewOptions, error) => {
|
|||||||
|
|
||||||
// Write
|
// Write
|
||||||
|
|
||||||
|
let lastIndex = getLastIndex()
|
||||||
let promises = []
|
let promises = []
|
||||||
|
|
||||||
const writePage = (index, content) =>
|
const writePage = (index, content) =>
|
||||||
promises.push(
|
promises.push(
|
||||||
write(Path.join(directory, index == 0 ? 'index' : index.toString() ) + '.html', content)
|
write(Path.join(directory, index == (feed.length - 1) ? 'index' : index.toString() ) + '.html', content)
|
||||||
)
|
)
|
||||||
|
|
||||||
for(let i = 0; i < pages.length; i++) {
|
for(let i = 0; i < pages.length; i++) {
|
||||||
|
489
lib.js
Normal file
489
lib.js
Normal file
@ -0,0 +1,489 @@
|
|||||||
|
// Ascii font used is "Shimrod"
|
||||||
|
|
||||||
|
import Path from "path"
|
||||||
|
import FS from "fs/promises"
|
||||||
|
import { JSDOM } from "jsdom"
|
||||||
|
|
||||||
|
let cache = await FS.readFile('./cache.json', { encoding: 'utf-8' })
|
||||||
|
.then(json => JSON.parse(json) )
|
||||||
|
|
||||||
|
|
||||||
|
// | o |
|
||||||
|
// . . |- . | ,-.
|
||||||
|
// | | | | | `-.
|
||||||
|
// `-` `-' ' ' `-'
|
||||||
|
|
||||||
|
export function PromiseBatch() {
|
||||||
|
let promises = []
|
||||||
|
|
||||||
|
this.add = (promise) =>
|
||||||
|
promises.push(promise)
|
||||||
|
|
||||||
|
this.complete = () =>
|
||||||
|
Promise.all(promises)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const annotate = annotation =>
|
||||||
|
data => {
|
||||||
|
console.log(annotation),
|
||||||
|
data
|
||||||
|
}
|
||||||
|
|
||||||
|
export const write = async (path, content) => {
|
||||||
|
let dir = Path.dirname(path)
|
||||||
|
|
||||||
|
try {
|
||||||
|
await FS.access(dir)
|
||||||
|
} catch(e) {
|
||||||
|
await FS.mkdir(dir, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
return await FS.writeFile(path, content)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const download = async (url, path, courtesyWait) => {
|
||||||
|
let response = await delayedFetch(url, {}, courtesyWait)
|
||||||
|
.catch(err => console.error(`Failed download of ${url}:`, err) )
|
||||||
|
|
||||||
|
if(response.ok) {
|
||||||
|
await write(path, response.body)
|
||||||
|
} else {
|
||||||
|
throw createNetworkingError(response)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const createNetworkingError = response => {
|
||||||
|
return new Error(`Request failed, ${response.status}: ${response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getLinkExtname = link =>
|
||||||
|
Path.extname(new URL(link).pathname)
|
||||||
|
|
||||||
|
export const buildImagePathHandler = (source, id) => (url, i, array) => {
|
||||||
|
let path = `images/${source.name}-${id}`
|
||||||
|
|
||||||
|
if(array.length > 1)
|
||||||
|
path += `-${i}`
|
||||||
|
|
||||||
|
return path + getLinkExtname(url)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const addStylesheet = (path, { viewDir, batch }) =>
|
||||||
|
batch.add(
|
||||||
|
FS.readFile(path)
|
||||||
|
.then(content => write(Path.join(viewDir, 'style.css'), content))
|
||||||
|
)
|
||||||
|
|
||||||
|
export const postIdFromPathname = post => {
|
||||||
|
let { pathname } = new URL(post.link)
|
||||||
|
return pathname.slice(pathname.lastIndexOf('/') + 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const createLock = async renderer => {
|
||||||
|
let lockExists = false
|
||||||
|
|
||||||
|
try {
|
||||||
|
await FS.access(renderer.lockPath)
|
||||||
|
lockExists = true
|
||||||
|
} catch(err) {
|
||||||
|
lockExists = false
|
||||||
|
}
|
||||||
|
|
||||||
|
renderer.lock = {
|
||||||
|
sources: {},
|
||||||
|
lists: {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(lockExists) {
|
||||||
|
let lock = JSON.parse(await FS.readFile(renderer.lockPath, { encoding: 'utf8' }))
|
||||||
|
|
||||||
|
Object.assign(renderer.lock, lock)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const writeLock = renderer =>
|
||||||
|
write(renderer.lockPath, JSON.stringify(renderer.lock) )
|
||||||
|
|
||||||
|
|
||||||
|
//
|
||||||
|
// ;-. ,-. ,-.
|
||||||
|
// | `-. `-.
|
||||||
|
// ' `-' `-'
|
||||||
|
|
||||||
|
class NoMatchesError extends Error {}
|
||||||
|
export const processRss = (source, reducerCallback) => {
|
||||||
|
let { document } = new JSDOM(source.rss, {
|
||||||
|
contentType: 'text/xml'
|
||||||
|
}).window
|
||||||
|
let items = document.querySelectorAll('channel item')
|
||||||
|
|
||||||
|
if(items.length == 0) {
|
||||||
|
throw new NoMatchesError('Got no matches')
|
||||||
|
}
|
||||||
|
|
||||||
|
source.posts = []
|
||||||
|
|
||||||
|
for(let item of items) {
|
||||||
|
let description = new JSDOM(item.querySelector('description').textContent).window.document
|
||||||
|
let dateString = item.querySelector('pubDate').textContent
|
||||||
|
let link = item.querySelector('link').textContent
|
||||||
|
let guid = item.querySelector('guid').textContent
|
||||||
|
|
||||||
|
let post = {
|
||||||
|
source,
|
||||||
|
item,
|
||||||
|
description,
|
||||||
|
dateString,
|
||||||
|
date: new Date(dateString).valueOf() ?? 0,
|
||||||
|
link,
|
||||||
|
guid
|
||||||
|
}
|
||||||
|
|
||||||
|
post = reducerCallback(post)
|
||||||
|
|
||||||
|
if(post) {
|
||||||
|
source.posts.push(post)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return source
|
||||||
|
}
|
||||||
|
|
||||||
|
let waitingList = new Map()
|
||||||
|
export const sleep = delay => new Promise(resolve => setTimeout(() => resolve(), delay) )
|
||||||
|
export const delayedFetch = async (url, options, courtesyWait = 5 * 1000) => {
|
||||||
|
let [ domain ] = /[\w-]+.[\w-]+$/.exec(url.hostname)
|
||||||
|
let timeout = waitingList.get(domain) ?? 0
|
||||||
|
let now = Date.now()
|
||||||
|
|
||||||
|
if(timeout == null || timeout <= now) {
|
||||||
|
waitingList.set(domain, timeout + courtesyWait)
|
||||||
|
} else {
|
||||||
|
await sleep(timeout - now)
|
||||||
|
}
|
||||||
|
|
||||||
|
return await fetch(url, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchRss(source) {
|
||||||
|
let { hostname } = source
|
||||||
|
let error
|
||||||
|
let response
|
||||||
|
|
||||||
|
try {
|
||||||
|
response = await delayedFetch(
|
||||||
|
new URL(source.pathname, 'https://' + hostname),
|
||||||
|
{},
|
||||||
|
source.courtesyWait ?? 5 * 1000
|
||||||
|
)
|
||||||
|
} catch(err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
|
||||||
|
source.errored = error !== undefined || !response.ok
|
||||||
|
if(source.errored) {
|
||||||
|
source.error = error ?? createNetworkingError(response)
|
||||||
|
source.rss = ''
|
||||||
|
} else {
|
||||||
|
source.rss = await response.text()
|
||||||
|
}
|
||||||
|
|
||||||
|
return source
|
||||||
|
}
|
||||||
|
|
||||||
|
export const extractImages = (post, cache = true) => {
|
||||||
|
let images = post.description.querySelectorAll('img')
|
||||||
|
|
||||||
|
if(images) {
|
||||||
|
let imageUrls = []
|
||||||
|
|
||||||
|
for(let image of images) {
|
||||||
|
let { src } = image
|
||||||
|
|
||||||
|
if(!src) {
|
||||||
|
let finalSrc = image.srcset.split(', ').pop()
|
||||||
|
|
||||||
|
src = finalSrc.slice(0, finalSrc.indexOf(' ') )
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sending through URL prevents potential XSS
|
||||||
|
imageUrls.push(new URL(src).href)
|
||||||
|
}
|
||||||
|
|
||||||
|
return imageUrls
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// o
|
||||||
|
// . , . ,-. , , ,
|
||||||
|
// |/ | |-' |/|/
|
||||||
|
// ' ' `-' ' '
|
||||||
|
|
||||||
|
export const createPages = (list, { pageSize, header = '', viewDir, batch, getPageFilename, getPageTitle, lock }) => {
|
||||||
|
let posts = []
|
||||||
|
let lastPageLink = 'about:blank'
|
||||||
|
let pageIndex = 0
|
||||||
|
// let pageIndex = Math.ceil(list.posts.length / pageSize)
|
||||||
|
// let {
|
||||||
|
// index: pageIndex = 0,
|
||||||
|
// lastPostDate
|
||||||
|
// } = lock.lists[list.name]?.lastPage ?? {}
|
||||||
|
|
||||||
|
// let sinceDate = posts[0]?.date ?? 0
|
||||||
|
// posts = list.posts
|
||||||
|
// .filter(post => post.date > sinceDate)
|
||||||
|
// .concat(posts)
|
||||||
|
// .sort((a, b) => b.date - a.date)
|
||||||
|
|
||||||
|
// let firstPageSize =
|
||||||
|
|
||||||
|
list.posts.sort((a, b) => a.date - b.date)
|
||||||
|
|
||||||
|
for(let i = 0; i < list.posts.length; i++) {
|
||||||
|
// for(let i = list.posts.length - 1; i >= 0; i--) {
|
||||||
|
posts.push(list.posts[i])
|
||||||
|
|
||||||
|
if(i % pageSize == 0) {
|
||||||
|
let isLastPage = list.main && i < pageSize
|
||||||
|
let title = getPageTitle(list, pageIndex)
|
||||||
|
let html = renderPage(title, posts.reverse(), header, renderNextPageLink(lastPageLink))
|
||||||
|
let filename = isLastPage ? 'index.html' : getPageFilename(list, pageIndex)
|
||||||
|
let promise = write(Path.join(viewDir, filename), html)
|
||||||
|
|
||||||
|
batch.add(promise.then(annotate(`Created "${title}" (${filename})`)))
|
||||||
|
posts = []
|
||||||
|
lastPageLink = filename
|
||||||
|
pageIndex++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// lock.lists[list.name] = {
|
||||||
|
// pageIndex,
|
||||||
|
// lastPostDate: posts[0]?.date ?? lastPostDate
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
|
||||||
|
export const renderPage = (title, posts, header, footer) => `\
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
|
||||||
|
<title>${title}</title>
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
|
<link rel="stylesheet" href="./style.css">
|
||||||
|
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<header>
|
||||||
|
${header}
|
||||||
|
</header>
|
||||||
|
${posts.map(renderPost).join('\n')}
|
||||||
|
<footer>
|
||||||
|
${footer}
|
||||||
|
</footer>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>`
|
||||||
|
|
||||||
|
export const renderPost = post => {
|
||||||
|
let date = new Date(post.date)
|
||||||
|
|
||||||
|
return `\
|
||||||
|
${post.images.map(renderImage).join('\n')}
|
||||||
|
<p><b>${post.source.displayName}</b> ${renderDate(date)} <a href="${post.link}">open</a></p><hr>`
|
||||||
|
}
|
||||||
|
|
||||||
|
export const renderImage = href => {
|
||||||
|
return `\
|
||||||
|
<a href="${href}"><img src="${href}" loading="lazy"></img></a>`
|
||||||
|
}
|
||||||
|
|
||||||
|
export const renderDate = date =>
|
||||||
|
(date.getMonth() + 1) + '.' + date.getDate() + '.' + date.getFullYear()
|
||||||
|
|
||||||
|
export const renderNextPageLink = link => `\
|
||||||
|
<a href="${link}">next</a>`
|
||||||
|
|
||||||
|
export const renderNav = (feeds, sources) => `\
|
||||||
|
<details>
|
||||||
|
|
||||||
|
<summary>Feeds</summary>
|
||||||
|
<section>
|
||||||
|
<ul>
|
||||||
|
|
||||||
|
${feeds.map(renderNavEntry).join('\n')}
|
||||||
|
|
||||||
|
</ul>
|
||||||
|
<hr>
|
||||||
|
<ul>
|
||||||
|
|
||||||
|
${sources.map(renderNavEntry).join('\n')}
|
||||||
|
|
||||||
|
</ul>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
</details>
|
||||||
|
<hr>`
|
||||||
|
|
||||||
|
export const renderNavEntry = (list) => {
|
||||||
|
let extra = ''
|
||||||
|
|
||||||
|
if(list.error) {
|
||||||
|
extra += ' (errored)'
|
||||||
|
} else if (list.posts.length == 0) {
|
||||||
|
extra += ' (empty)'
|
||||||
|
}
|
||||||
|
|
||||||
|
return `<li><a href="${list.link}">${list.displayName}</a>${extra}</li>`
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// | | | o
|
||||||
|
// ,-. ,-. | | ,-: |- . ,-. ;-.
|
||||||
|
// | | | | | | | | | | | | |
|
||||||
|
// `-' `-' ' ' `-` `-' ' `-' ' '
|
||||||
|
|
||||||
|
export const downloadImages = (images, getImagePath, courtesyWait, { viewDir, batch }) => {
|
||||||
|
let out = []
|
||||||
|
|
||||||
|
for(let i = 0; i < images.length; i ++) {
|
||||||
|
let url = images[i]
|
||||||
|
let relativePath = getImagePath(url, i, images)
|
||||||
|
let fullPath = Path.join(viewDir, relativePath)
|
||||||
|
|
||||||
|
let promise = FS.access(fullPath)
|
||||||
|
.catch(() =>
|
||||||
|
download(url, fullPath, courtesyWait)
|
||||||
|
.then(annotate( `Downloaded ${relativePath}`))
|
||||||
|
)
|
||||||
|
|
||||||
|
out.push(relativePath)
|
||||||
|
batch.add(promise)
|
||||||
|
}
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
export const pullImages = (post, renderer, discardPostIfNoImages = false, getPostId = postIdFromPathname) => {
|
||||||
|
let images = extractImages(post)
|
||||||
|
|
||||||
|
if(!discardPostIfNoImages || images.length > 0) {
|
||||||
|
post.images = downloadImages(
|
||||||
|
images,
|
||||||
|
buildImagePathHandler(post.source, getPostId(post)),
|
||||||
|
post.source.courtesyWait,
|
||||||
|
renderer
|
||||||
|
)
|
||||||
|
return post
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const createFeed = (name, sources) => {
|
||||||
|
return {
|
||||||
|
name,
|
||||||
|
posts: sources.reduce((posts, source) => posts.concat(source.posts), [])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const fetchRssFromInstances = async (source, renderer) => {
|
||||||
|
let index = 0
|
||||||
|
let instances = source.instances
|
||||||
|
let lockHostname = renderer.lock.sources[source.name]?.hostname
|
||||||
|
|
||||||
|
if(lockHostname) {
|
||||||
|
instances.unshift(lockHostname)
|
||||||
|
}
|
||||||
|
|
||||||
|
while(!source.rss && index != instances.length) {
|
||||||
|
source.hostname = instances[index]
|
||||||
|
source = await fetchRss(source)
|
||||||
|
|
||||||
|
if(source.errored) {
|
||||||
|
console.error(`Failed to fetch ${source.name} from ${source.hostname}: `, source.error)
|
||||||
|
index++
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(renderer.lock.sources[source.name] ??= {}).hostname = source.hostname
|
||||||
|
|
||||||
|
return source
|
||||||
|
}
|
||||||
|
|
||||||
|
const addPostsToLock = (source, renderer) => {
|
||||||
|
(renderer.lock.sources[source.name] ??= {}).postData = source.posts.map(post => post.description)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// | | ,-
|
||||||
|
// ;-. | ,-: |- | ,-. ;-. ;-.-. ,-.
|
||||||
|
// | | | | | | |- | | | | | | `-.
|
||||||
|
// |-' ' `-` `-' | `-' ' ' ' ' `-'
|
||||||
|
// ' -'
|
||||||
|
|
||||||
|
export const tumblr = {
|
||||||
|
async createSource(user, courtesyWait, postReducerCallback, renderer) {
|
||||||
|
let source = {
|
||||||
|
hostname: user + '.tumblr.com',
|
||||||
|
pathname: 'rss',
|
||||||
|
courtesyWait,
|
||||||
|
name: `tumblr-${user}`,
|
||||||
|
displayName: user,
|
||||||
|
user
|
||||||
|
}
|
||||||
|
|
||||||
|
source = await fetchRss(source)
|
||||||
|
source = processRss(source, postReducerCallback)
|
||||||
|
addPostsToLock(source, renderer)
|
||||||
|
return source
|
||||||
|
},
|
||||||
|
|
||||||
|
createSources(users, ...args) {
|
||||||
|
return Promise.all(users.map(user => tumblr.createSource(user, ...args)))
|
||||||
|
},
|
||||||
|
|
||||||
|
isRepost(post) {
|
||||||
|
let reblog = post.description.querySelector('p > a.tumblr_blog')
|
||||||
|
|
||||||
|
return reblog && reblog.innerHTML !== post.source.user
|
||||||
|
},
|
||||||
|
|
||||||
|
pullImages
|
||||||
|
}
|
||||||
|
|
||||||
|
export const nitter = {
|
||||||
|
async createSource(user, instances, courtesyWait, postReducerCallback, renderer) {
|
||||||
|
let source = {
|
||||||
|
instances,
|
||||||
|
pathname: user + '/rss',
|
||||||
|
courtesyWait,
|
||||||
|
name: `nitter-${user}`,
|
||||||
|
displayName: user,
|
||||||
|
user
|
||||||
|
}
|
||||||
|
|
||||||
|
source = await fetchRssFromInstances(source, renderer)
|
||||||
|
source = processRss(source, postReducerCallback)
|
||||||
|
return source
|
||||||
|
},
|
||||||
|
|
||||||
|
createSources(users, ...args) {
|
||||||
|
return Promise.all(users.map(user => nitter.createSource(user, ...args)))
|
||||||
|
},
|
||||||
|
|
||||||
|
isRepost(post) {
|
||||||
|
let creator = post.item.getElementsByTagName('dc:creator')[0]
|
||||||
|
|
||||||
|
return creator.innerHTML.slice(1) === post.source.user
|
||||||
|
},
|
||||||
|
|
||||||
|
pullImages
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Mastodon support
|
||||||
|
//
|
||||||
|
// "Turns out Mastodon has built-in RSS; your feed URL is [instance]/@[username].rss, so for example I'm
|
||||||
|
// https://mastodon.social/@brownpau.rss (note the "@")"
|
||||||
|
// - https://mastodon.social/@brownpau/100523448408374430
|
@ -10,8 +10,7 @@
|
|||||||
"author": "",
|
"author": "",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"jsdom": "^22.1.0",
|
"jsdom": "^22.1.0"
|
||||||
"node-fetch": "^3.3.1"
|
|
||||||
},
|
},
|
||||||
"type": "module"
|
"type": "module"
|
||||||
}
|
}
|
||||||
|
13
shell.nix
Normal file
13
shell.nix
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
{ pkgs ? import <nixpkgs> {} }:
|
||||||
|
|
||||||
|
pkgs.stdenv.mkDerivation {
|
||||||
|
name = "rssssing-dev";
|
||||||
|
buildInputs = [
|
||||||
|
pkgs.nodejs_21
|
||||||
|
pkgs.yarn
|
||||||
|
];
|
||||||
|
shellHook = ''
|
||||||
|
export PATH="$PWD/node_modules/.bin/:$PATH"
|
||||||
|
export NPM_PACKAGES="$HOME/.npm-packages"
|
||||||
|
'';
|
||||||
|
}
|
43
test.html
Normal file
43
test.html
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
<html>
|
||||||
|
|
||||||
|
<head>
|
||||||
|
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<title>muses</title>
|
||||||
|
<link rel="stylesheet" href="style.css">
|
||||||
|
<style>
|
||||||
|
|
||||||
|
</style>
|
||||||
|
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
|
||||||
|
<article>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
</main>
|
||||||
|
<header><a href="index.html">art regurgitor</a></header>
|
||||||
|
<nav>
|
||||||
|
|
||||||
|
<ul>
|
||||||
|
|
||||||
|
<li><a href="index.html">index</a></li>
|
||||||
|
<li><a href="meta.html">meta</a></li>
|
||||||
|
<li><a href="muses.html"><i>muses</i></a></li>
|
||||||
|
<li><a href="musings.html">musings</a></li>
|
||||||
|
|
||||||
|
</ul>
|
||||||
|
<ul>
|
||||||
|
|
||||||
|
<li><a href="computing.html">computing</a></li>
|
||||||
|
|
||||||
|
</ul>
|
||||||
|
<hr><p><a href="https://git.sys42.net/dakedres/website/raw/branch/v2/site/muses.md">[source]</a></p>
|
||||||
|
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
39
yarn.lock
39
yarn.lock
@ -38,11 +38,6 @@ cssstyle@^3.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
rrweb-cssom "^0.6.0"
|
rrweb-cssom "^0.6.0"
|
||||||
|
|
||||||
data-uri-to-buffer@^4.0.0:
|
|
||||||
version "4.0.1"
|
|
||||||
resolved "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz"
|
|
||||||
integrity sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==
|
|
||||||
|
|
||||||
data-urls@^4.0.0:
|
data-urls@^4.0.0:
|
||||||
version "4.0.0"
|
version "4.0.0"
|
||||||
resolved "https://registry.npmjs.org/data-urls/-/data-urls-4.0.0.tgz"
|
resolved "https://registry.npmjs.org/data-urls/-/data-urls-4.0.0.tgz"
|
||||||
@ -81,14 +76,6 @@ entities@^4.4.0:
|
|||||||
resolved "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz"
|
resolved "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz"
|
||||||
integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==
|
integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==
|
||||||
|
|
||||||
fetch-blob@^3.1.2, fetch-blob@^3.1.4:
|
|
||||||
version "3.2.0"
|
|
||||||
resolved "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz"
|
|
||||||
integrity sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==
|
|
||||||
dependencies:
|
|
||||||
node-domexception "^1.0.0"
|
|
||||||
web-streams-polyfill "^3.0.3"
|
|
||||||
|
|
||||||
form-data@^4.0.0:
|
form-data@^4.0.0:
|
||||||
version "4.0.0"
|
version "4.0.0"
|
||||||
resolved "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz"
|
resolved "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz"
|
||||||
@ -98,13 +85,6 @@ form-data@^4.0.0:
|
|||||||
combined-stream "^1.0.8"
|
combined-stream "^1.0.8"
|
||||||
mime-types "^2.1.12"
|
mime-types "^2.1.12"
|
||||||
|
|
||||||
formdata-polyfill@^4.0.10:
|
|
||||||
version "4.0.10"
|
|
||||||
resolved "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz"
|
|
||||||
integrity sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==
|
|
||||||
dependencies:
|
|
||||||
fetch-blob "^3.1.2"
|
|
||||||
|
|
||||||
html-encoding-sniffer@^3.0.0:
|
html-encoding-sniffer@^3.0.0:
|
||||||
version "3.0.0"
|
version "3.0.0"
|
||||||
resolved "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz"
|
resolved "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz"
|
||||||
@ -187,20 +167,6 @@ ms@2.1.2:
|
|||||||
resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz"
|
resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz"
|
||||||
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
|
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
|
||||||
|
|
||||||
node-domexception@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz"
|
|
||||||
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
|
|
||||||
|
|
||||||
node-fetch@^3.3.1:
|
|
||||||
version "3.3.2"
|
|
||||||
resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz"
|
|
||||||
integrity sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==
|
|
||||||
dependencies:
|
|
||||||
data-uri-to-buffer "^4.0.0"
|
|
||||||
fetch-blob "^3.1.4"
|
|
||||||
formdata-polyfill "^4.0.10"
|
|
||||||
|
|
||||||
nwsapi@^2.2.4:
|
nwsapi@^2.2.4:
|
||||||
version "2.2.7"
|
version "2.2.7"
|
||||||
resolved "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.7.tgz"
|
resolved "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.7.tgz"
|
||||||
@ -292,11 +258,6 @@ w3c-xmlserializer@^4.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
xml-name-validator "^4.0.0"
|
xml-name-validator "^4.0.0"
|
||||||
|
|
||||||
web-streams-polyfill@^3.0.3:
|
|
||||||
version "3.2.1"
|
|
||||||
resolved "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz"
|
|
||||||
integrity sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==
|
|
||||||
|
|
||||||
webidl-conversions@^7.0.0:
|
webidl-conversions@^7.0.0:
|
||||||
version "7.0.0"
|
version "7.0.0"
|
||||||
resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz"
|
resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user