forked from didirus/AstralRinth
These changes add a layered hook to the `markdown-it` renderer rules to resolve `<img>` element `src` attributes and `<a>` element `href` attributes to a path-absolute relative URL, to ensure that such URLs always point to the same resource URL even when the URL the current resource is being viewed from changes. This fixes an issue with relative links and image source URLs being broken when a blog post was watched from a URL that lacked a trailing slash, as web browsers adjust the path relative URLs are resolved from depending on whether such character is present, and we didn't account for that. While at it, I've rebuilt all the blog posts and their associated RSS feed.
286 lines
7.9 KiB
TypeScript
286 lines
7.9 KiB
TypeScript
import { md } from '@modrinth/utils'
|
|
import { promises as fs } from 'fs'
|
|
import { glob } from 'glob'
|
|
import matter from 'gray-matter'
|
|
import { minify } from 'html-minifier-terser'
|
|
import type { Options } from 'markdown-it'
|
|
import type Renderer from 'markdown-it/lib/renderer.mjs'
|
|
import type Token from 'markdown-it/lib/token.mjs'
|
|
import * as path from 'path'
|
|
import RSS from 'rss'
|
|
import { parseStringPromise } from 'xml2js'
|
|
|
|
import {
|
|
ARTICLES_GLOB,
|
|
COMPILED_DIR,
|
|
JSON_PATH,
|
|
PUBLIC_LOCATIONS,
|
|
PUBLIC_SRC,
|
|
ROOT_FILE,
|
|
RSS_PATH,
|
|
SITE_URL,
|
|
} from './blog.config'
|
|
import { copyDir, toVarName } from './utils'
|
|
|
|
async function ensureCompiledDir() {
|
|
await fs.mkdir(COMPILED_DIR, { recursive: true })
|
|
}
|
|
|
|
async function hasThumbnail(slug: string): Promise<boolean> {
|
|
const thumbnailPath = path.posix.join(PUBLIC_SRC, slug, 'thumbnail.webp')
|
|
try {
|
|
await fs.access(thumbnailPath)
|
|
return true
|
|
} catch {
|
|
return false
|
|
}
|
|
}
|
|
|
|
function getArticleLink(slug: string): string {
|
|
return `${SITE_URL}/news/article/${slug}`
|
|
}
|
|
|
|
function getThumbnailUrl(slug: string, hasThumb: boolean): string {
|
|
if (hasThumb) {
|
|
return `${SITE_URL}/news/article/${slug}/thumbnail.webp`
|
|
} else {
|
|
return `${SITE_URL}/news/default.webp`
|
|
}
|
|
}
|
|
|
|
async function compileArticles() {
|
|
await ensureCompiledDir()
|
|
|
|
const files = await glob(ARTICLES_GLOB)
|
|
console.log(`🔎 Found ${files.length} markdown articles!`)
|
|
const articleExports: string[] = []
|
|
const articlesArray: string[] = []
|
|
const articlesForRss = []
|
|
const articlesForJson = []
|
|
|
|
for (const file of files) {
|
|
const src = await fs.readFile(file, 'utf8')
|
|
const { content, data } = matter(src)
|
|
|
|
const { title, summary, date, slug: frontSlug, authors: authorsData, ...rest } = data
|
|
if (!title || !summary || !date) {
|
|
console.error(`❌ Missing required frontmatter in ${file}. Required: title, summary, date`)
|
|
process.exit(1)
|
|
}
|
|
|
|
const mdIt = md()
|
|
const slug = frontSlug || path.basename(file, '.md')
|
|
|
|
// Normalizes relative URL resolution to occur in the context of the article's directory.
|
|
// This prevents user agents from resolving relative URLs differently based on whether
|
|
// the current document URL has a trailing slash or not.
|
|
function normalizeRendererHtmlUriAttribute(ruleName: string, attrName: string) {
|
|
const defaultRenderer =
|
|
mdIt.renderer.rules[ruleName] ||
|
|
function (tokens, idx, options, _env, self) {
|
|
return self.renderToken(tokens, idx, options)
|
|
}
|
|
|
|
return (tokens: Token[], idx: number, options: Options, env: object, self: Renderer) => {
|
|
const attrUrlValue = tokens[idx].attrGet(attrName)
|
|
if (attrUrlValue) {
|
|
tokens[idx].attrSet(
|
|
attrName,
|
|
new URL(attrUrlValue, `${SITE_URL}/news/article/${slug}/`).href.replace(SITE_URL, ''),
|
|
)
|
|
}
|
|
return defaultRenderer(tokens, idx, options, env, self)
|
|
}
|
|
}
|
|
|
|
mdIt.renderer.rules.image = normalizeRendererHtmlUriAttribute('image', 'src')
|
|
mdIt.renderer.rules.link_open = normalizeRendererHtmlUriAttribute('link_open', 'href')
|
|
|
|
const minifiedHtml = await minify(mdIt.render(content), {
|
|
collapseWhitespace: true,
|
|
removeComments: true,
|
|
})
|
|
|
|
const authors = authorsData ? authorsData : []
|
|
|
|
const varName = toVarName(slug)
|
|
const exportFile = path.posix.join(COMPILED_DIR, `${varName}.ts`)
|
|
const contentFile = path.posix.join(COMPILED_DIR, `${varName}.content.ts`)
|
|
const thumbnailPresent = await hasThumbnail(slug)
|
|
|
|
const contentTs = `
|
|
// AUTO-GENERATED FILE - DO NOT EDIT
|
|
export const html = \`${minifiedHtml}\`;
|
|
`.trimStart()
|
|
await fs.writeFile(contentFile, contentTs, 'utf8')
|
|
|
|
const ts = `
|
|
// AUTO-GENERATED FILE - DO NOT EDIT
|
|
export const article = {
|
|
html: () => import(\`./${varName}.content\`).then(m => m.html),
|
|
title: ${JSON.stringify(title)},
|
|
summary: ${JSON.stringify(summary)},
|
|
date: ${JSON.stringify(date)},
|
|
slug: ${JSON.stringify(slug)},
|
|
authors: ${JSON.stringify(authors)},
|
|
thumbnail: ${thumbnailPresent},
|
|
${Object.keys(rest)
|
|
.map((k) => `${k}: ${JSON.stringify(rest[k])},`)
|
|
.join('\n ')}
|
|
};
|
|
`.trimStart()
|
|
|
|
await fs.writeFile(exportFile, ts, 'utf8')
|
|
articleExports.push(`import { article as ${varName} } from "./${varName}";`)
|
|
articlesArray.push(varName)
|
|
|
|
articlesForRss.push({
|
|
title,
|
|
summary,
|
|
date,
|
|
slug,
|
|
html: minifiedHtml,
|
|
} as never)
|
|
|
|
articlesForJson.push({
|
|
title,
|
|
summary,
|
|
thumbnail: getThumbnailUrl(slug, thumbnailPresent),
|
|
date: new Date(date).toISOString(),
|
|
link: getArticleLink(slug),
|
|
} as never)
|
|
}
|
|
|
|
console.log(`📂 Compiled ${files.length} articles.`)
|
|
|
|
const rootExport = `
|
|
// AUTO-GENERATED FILE - DO NOT EDIT
|
|
${articleExports.join('\n')}
|
|
|
|
export const articles = [
|
|
${articlesArray.join(',\n ')}
|
|
];
|
|
`.trimStart()
|
|
|
|
await fs.writeFile(ROOT_FILE, rootExport, 'utf8')
|
|
console.log(`🌟 Done! Wrote root articles export.`)
|
|
|
|
await generateRssFeed(articlesForRss)
|
|
await generateJsonFile(articlesForJson)
|
|
}
|
|
|
|
async function generateRssFeed(articles): Promise<void> {
|
|
const sorted = [...articles].sort(
|
|
(a, b) => new Date(b.date).getTime() - new Date(a.date).getTime(),
|
|
)
|
|
|
|
let currentRssArticles: { title: string; html: string }[] = []
|
|
try {
|
|
const xml = await fs.readFile(RSS_PATH, 'utf8')
|
|
const parsed = await parseStringPromise(xml)
|
|
const items = parsed.rss?.channel?.[0]?.item || []
|
|
currentRssArticles = items.map((item) => ({
|
|
title: (item.title?.[0] ?? '').trim(),
|
|
html: (item['content:encoded']?.[0] ?? '').replace(/^<!\[CDATA\[|\]\]>$/g, '').trim(),
|
|
}))
|
|
} catch {
|
|
currentRssArticles = []
|
|
}
|
|
|
|
const newArr = sorted.map((a) => ({
|
|
title: (a.title ?? '').trim(),
|
|
html: (a.html ?? '').trim(),
|
|
}))
|
|
|
|
let isEqual = currentRssArticles.length === newArr.length
|
|
if (isEqual) {
|
|
for (let i = 0; i < newArr.length; ++i) {
|
|
if (
|
|
currentRssArticles[i].title !== newArr[i].title ||
|
|
currentRssArticles[i].html !== newArr[i].html
|
|
) {
|
|
isEqual = false
|
|
break
|
|
}
|
|
}
|
|
}
|
|
|
|
if (isEqual) {
|
|
console.log(`⏭️ RSS feed not regenerated (articles unchanged)`)
|
|
return
|
|
}
|
|
|
|
const feed = new RSS({
|
|
title: 'Modrinth News',
|
|
description: 'Keep up-to-date on the latest news from Modrinth.',
|
|
feed_url: `${SITE_URL}/news/feed/rss.xml`,
|
|
site_url: `${SITE_URL}/news/`,
|
|
language: 'en',
|
|
generator: '@modrinth/blog',
|
|
})
|
|
|
|
for (const article of sorted) {
|
|
feed.item({
|
|
title: article.title,
|
|
description: article.summary,
|
|
url: `${SITE_URL}/news/article/${article.slug}/`,
|
|
guid: `${SITE_URL}/news/article/${article.slug}/`,
|
|
date: article.date,
|
|
custom_elements: [{ 'content:encoded': `<![CDATA[${article.html}]]>` }],
|
|
})
|
|
}
|
|
|
|
await fs.mkdir(path.dirname(RSS_PATH), { recursive: true })
|
|
await fs.writeFile(RSS_PATH, feed.xml({ indent: true }), 'utf8')
|
|
console.log(`📂 RSS feed written to ${RSS_PATH}`)
|
|
}
|
|
|
|
async function generateJsonFile(articles): Promise<void> {
|
|
const sorted = [...articles].sort(
|
|
(a, b) => new Date(b.date).getTime() - new Date(a.date).getTime(),
|
|
)
|
|
const json = { articles: sorted }
|
|
await fs.mkdir(path.dirname(JSON_PATH), { recursive: true })
|
|
await fs.writeFile(JSON_PATH, JSON.stringify(json, null, '\t') + '\n', 'utf8')
|
|
console.log(`📝 Wrote JSON articles to ${JSON_PATH}`)
|
|
}
|
|
|
|
async function deleteDirContents(dir: string) {
|
|
try {
|
|
const entries = await fs.readdir(dir, { withFileTypes: true })
|
|
await Promise.all(
|
|
entries.map(async (entry) => {
|
|
const fullPath = path.posix.join(dir, entry.name)
|
|
if (entry.isDirectory()) {
|
|
await fs.rm(fullPath, { recursive: true, force: true })
|
|
} else {
|
|
await fs.unlink(fullPath)
|
|
}
|
|
}),
|
|
)
|
|
} catch (error) {
|
|
console.error(`❌ Error deleting contents of ${dir}:`, error)
|
|
throw error
|
|
}
|
|
}
|
|
|
|
async function copyPublicAssets() {
|
|
console.log('🚚 Copying ./public to all PUBLIC_LOCATIONS...')
|
|
for (const loc of PUBLIC_LOCATIONS) {
|
|
await deleteDirContents(loc)
|
|
await copyDir(PUBLIC_SRC, loc)
|
|
console.log(`📂 Copied ./public to ${loc}`)
|
|
}
|
|
console.log('🎉 All public assets copied!')
|
|
}
|
|
|
|
async function main() {
|
|
await compileArticles()
|
|
await copyPublicAssets()
|
|
}
|
|
|
|
main().catch((e) => {
|
|
console.error('❌ Error in compile.ts:', e)
|
|
process.exit(1)
|
|
})
|