fix watch-mode batching

This commit is contained in:
Jacky Zhao 2023-07-24 00:04:01 -07:00
parent 569ff1a801
commit 041a4ce7bc
14 changed files with 91 additions and 77 deletions
quartz/processors

View file

@ -7,23 +7,24 @@ import { Root as HTMLRoot } from "hast"
import { ProcessedContent } from "../plugins/vfile"
import { PerfTimer } from "../perf"
import { read } from "to-vfile"
import { FilePath, QUARTZ, ServerSlug, slugifyFilePath } from "../path"
import { FilePath, QUARTZ, slugifyFilePath } from "../path"
import path from "path"
import os from "os"
import workerpool, { Promise as WorkerPromise } from "workerpool"
import { QuartzTransformerPluginInstance } from "../plugins/types"
import { QuartzLogger } from "../log"
import { trace } from "../trace"
import { BuildCtx } from "../ctx"
export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void>
export function createProcessor(transformers: QuartzTransformerPluginInstance[]): QuartzProcessor {
export function createProcessor(ctx: BuildCtx): QuartzProcessor {
const transformers = ctx.cfg.plugins.transformers
// base Markdown -> MD AST
let processor = unified().use(remarkParse)
// MD AST -> MD AST transforms
for (const plugin of transformers.filter((p) => p.markdownPlugins)) {
processor = processor.use(plugin.markdownPlugins!())
processor = processor.use(plugin.markdownPlugins!(ctx))
}
// MD AST -> HTML AST
@ -31,7 +32,7 @@ export function createProcessor(transformers: QuartzTransformerPluginInstance[])
// HTML AST -> HTML AST transforms
for (const plugin of transformers.filter((p) => p.htmlPlugins)) {
processor = processor.use(plugin.htmlPlugins!())
processor = processor.use(plugin.htmlPlugins!(ctx))
}
return processor
@ -73,7 +74,8 @@ async function transpileWorkerScript() {
})
}
export function createFileParser({ argv, cfg }: BuildCtx, fps: FilePath[], allSlugs: ServerSlug[]) {
export function createFileParser(ctx: BuildCtx, fps: FilePath[]) {
const { argv, cfg } = ctx
return async (processor: QuartzProcessor) => {
const res: ProcessedContent[] = []
for (const fp of fps) {
@ -85,12 +87,11 @@ export function createFileParser({ argv, cfg }: BuildCtx, fps: FilePath[], allSl
// Text -> Text transforms
for (const plugin of cfg.plugins.transformers.filter((p) => p.textTransform)) {
file.value = plugin.textTransform!(file.value)
file.value = plugin.textTransform!(ctx, file.value)
}
// base data properties that plugins may use
file.data.slug = slugifyFilePath(path.relative(argv.directory, file.path) as FilePath)
file.data.allSlugs = allSlugs
file.data.filePath = fp
const ast = processor.parse(file)
@ -111,24 +112,19 @@ export function createFileParser({ argv, cfg }: BuildCtx, fps: FilePath[], allSl
}
export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<ProcessedContent[]> {
const { argv, cfg } = ctx
const { argv } = ctx
const perf = new PerfTimer()
const log = new QuartzLogger(argv.verbose)
const CHUNK_SIZE = 128
let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism()
// get all slugs ahead of time as each thread needs a copy
const allSlugs = fps.map((fp) =>
slugifyFilePath(path.relative(argv.directory, path.resolve(fp)) as FilePath),
)
let res: ProcessedContent[] = []
log.start(`Parsing input files using ${concurrency} threads`)
if (concurrency === 1) {
try {
const processor = createProcessor(cfg.plugins.transformers)
const parse = createFileParser(ctx, fps, allSlugs)
const processor = createProcessor(ctx)
const parse = createFileParser(ctx, fps)
res = await parse(processor)
} catch (error) {
log.end()
@ -144,7 +140,7 @@ export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<Pro
const childPromises: WorkerPromise<ProcessedContent[]>[] = []
for (const chunk of chunks(fps, CHUNK_SIZE)) {
childPromises.push(pool.exec("parseFiles", [argv, chunk, allSlugs]))
childPromises.push(pool.exec("parseFiles", [argv, chunk, ctx.allSlugs]))
}
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises)