base path refactor, more docs

This commit is contained in:
Jacky Zhao 2023-07-13 00:19:35 -07:00
parent 08f8e3b4a4
commit 906f91f8ee
37 changed files with 1861 additions and 156 deletions
quartz/processors

View file

@ -5,7 +5,7 @@ import { PerfTimer } from "../perf"
import { ComponentResources, emitComponentResources, getComponentResources, getStaticResourcesFromPlugins } from "../plugins"
import { EmitCallback } from "../plugins/types"
import { ProcessedContent } from "../plugins/vfile"
import { QUARTZ, slugify } from "../path"
import { FilePath, QUARTZ, slugifyFilePath } from "../path"
import { globbyStream } from "globby"
import chalk from "chalk"
@ -71,7 +71,7 @@ export async function emitContent(contentFolder: string, output: string, cfg: Qu
log.start(`Emitting output files`)
const emit: EmitCallback = async ({ slug, ext, content }) => {
const pathToPage = path.join(output, slug + ext)
const pathToPage = path.join(output, slug + ext) as FilePath
const dir = path.dirname(pathToPage)
await fs.promises.mkdir(dir, { recursive: true })
await fs.promises.writeFile(pathToPage, content)
@ -123,15 +123,16 @@ export async function emitContent(contentFolder: string, output: string, cfg: Qu
// glob all non MD/MDX/HTML files in content folder and copy it over
const assetsPath = path.join(output, "assets")
for await (const fp of globbyStream("**", {
for await (const rawFp of globbyStream("**", {
ignore: ["**/*.md"],
cwd: contentFolder,
})) {
const ext = path.extname(fp as string)
const src = path.join(contentFolder, fp as string)
const name = slugify(fp as string) + ext
const dest = path.join(assetsPath, name)
const dir = path.dirname(dest)
const fp = rawFp as FilePath
const ext = path.extname(fp)
const src = path.join(contentFolder, fp) as FilePath
const name = slugifyFilePath(fp as FilePath) + ext as FilePath
const dest = path.join(assetsPath, name) as FilePath
const dir = path.dirname(dest) as FilePath
await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists
await fs.promises.copyFile(src, dest)
emittedFiles += 1

View file

@ -7,7 +7,7 @@ import { Root as HTMLRoot } from 'hast'
import { ProcessedContent } from '../plugins/vfile'
import { PerfTimer } from '../perf'
import { read } from 'to-vfile'
import { slugify } from '../path'
import { FilePath, ServerSlug, slugifyFilePath } from '../path'
import path from 'path'
import os from 'os'
import workerpool, { Promise as WorkerPromise } from 'workerpool'
@ -73,7 +73,7 @@ async function transpileWorkerScript() {
})
}
export function createFileParser(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: string[], allSlugs: string[], verbose: boolean) {
export function createFileParser(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: FilePath[], allSlugs: ServerSlug[], verbose: boolean) {
return async (processor: QuartzProcessor) => {
const res: ProcessedContent[] = []
for (const fp of fps) {
@ -89,7 +89,7 @@ export function createFileParser(transformers: QuartzTransformerPluginInstance[]
}
// base data properties that plugins may use
file.data.slug = slugify(path.relative(baseDir, file.path))
file.data.slug = slugifyFilePath(path.relative(baseDir, file.path) as FilePath)
file.data.allSlugs = allSlugs
file.data.filePath = fp
@ -110,7 +110,7 @@ export function createFileParser(transformers: QuartzTransformerPluginInstance[]
}
}
export async function parseMarkdown(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: string[], verbose: boolean): Promise<ProcessedContent[]> {
export async function parseMarkdown(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: FilePath[], verbose: boolean): Promise<ProcessedContent[]> {
const perf = new PerfTimer()
const log = new QuartzLogger(verbose)
@ -118,8 +118,7 @@ export async function parseMarkdown(transformers: QuartzTransformerPluginInstanc
let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism()
// get all slugs ahead of time as each thread needs a copy
// const slugs: string[] = fps.map(fp => slugify(path))
const allSlugs = fps.map(fp => slugify(path.relative(baseDir, path.resolve(fp))))
const allSlugs = fps.map(fp => slugifyFilePath(path.relative(baseDir, path.resolve(fp)) as FilePath))
let res: ProcessedContent[] = []
log.start(`Parsing input files using ${concurrency} threads`)