update spinners

This commit is contained in:
Jacky Zhao 2023-06-04 13:37:43 -04:00
parent 9ad89997a5
commit 1406ee0f05
7 changed files with 128 additions and 102 deletions
quartz/processors

View file

@ -12,9 +12,11 @@ import path from 'path'
import os from 'os'
import workerpool, { Promise as WorkerPromise } from 'workerpool'
import { QuartzTransformerPlugin } from '../plugins/types'
import { QuartzLogger } from '../log'
import chalk from 'chalk'
export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void>
export function createProcessor(transformers: QuartzTransformerPlugin[]): any {
export function createProcessor(transformers: QuartzTransformerPlugin[]): QuartzProcessor {
// base Markdown -> MD AST
let processor = unified().use(remarkParse)
@ -41,15 +43,11 @@ function* chunks<T>(arr: T[], n: number) {
}
}
async function transpileWorkerScript(verbose: boolean) {
async function transpileWorkerScript() {
// transpile worker script
const cacheFile = "./.quartz-cache/transpiled-worker.mjs"
const fp = "./quartz/worker.ts"
if (verbose) {
console.log("Transpiling worker script")
}
await esbuild.build({
return esbuild.build({
entryPoints: [fp],
outfile: path.join("quartz", cacheFile),
bundle: true,
@ -75,31 +73,50 @@ async function transpileWorkerScript(verbose: boolean) {
})
}
export function createFileParser(baseDir: string, fps: string[], verbose: boolean) {
return async (processor: QuartzProcessor) => {
const res: ProcessedContent[] = []
for (const fp of fps) {
try {
const file = await read(fp)
// base data properties that plugins may use
file.data.slug = slugify(path.relative(baseDir, file.path))
file.data.filePath = fp
const ast = processor.parse(file)
const newAst = await processor.run(ast, file)
res.push([newAst, file])
if (verbose) {
console.log(`[process] ${fp} -> ${file.data.slug}`)
}
} catch (err) {
console.log(chalk.red(`Failed to process \`${fp}\`: `) + err)
process.exit(1)
}
}
return res
}
}
export async function parseMarkdown(transformers: QuartzTransformerPlugin[], baseDir: string, fps: string[], verbose: boolean): Promise<ProcessedContent[]> {
const perf = new PerfTimer()
const log = new QuartzLogger(verbose)
const CHUNK_SIZE = 128
let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism()
const res: ProcessedContent[] = []
let res: ProcessedContent[] = []
log.start(`Parsing input files using ${concurrency} threads`)
if (concurrency === 1) {
// single-thread
const processor = createProcessor(transformers)
for (const fp of fps) {
const file = await read(fp)
// base data properties that plugins may use
file.data.slug = slugify(path.relative(baseDir, file.path))
file.data.filePath = fp
const ast = processor.parse(file)
res.push([await processor.run(ast, file), file])
if (verbose) {
console.log(`[process] ${fp} -> ${file.data.slug}`)
}
}
const parse = createFileParser(baseDir, fps, verbose)
res = await parse(processor)
} else {
await transpileWorkerScript(verbose)
await transpileWorkerScript()
const pool = workerpool.pool(
'./quartz/bootstrap-worker.mjs',
{
@ -113,11 +130,12 @@ export async function parseMarkdown(transformers: QuartzTransformerPlugin[], bas
for (const chunk of chunks(fps, CHUNK_SIZE)) {
childPromises.push(pool.exec('parseFiles', [baseDir, chunk, verbose]))
}
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises)
res.push(...results.flat())
res = results.flat()
await pool.terminate()
}
console.log(`Parsed and transformed ${res.length} Markdown files with ${concurrency} cores in ${perf.timeSince()}`)
log.success(`Parsed ${res.length} Markdown files in ${perf.timeSince()}`)
return res
}