mirror of
https://github.com/alrayyes/wiki.git
synced 2024-11-25 04:56:23 +00:00
fix: builds should no accumulate on repeated changes (closes #404)
This commit is contained in:
parent
3209f7c3b7
commit
a1a1e7e1e0
2 changed files with 47 additions and 47 deletions
|
@ -393,10 +393,16 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
||||||
})
|
})
|
||||||
|
|
||||||
const buildMutex = new Mutex()
|
const buildMutex = new Mutex()
|
||||||
const timeoutIds = new Set()
|
let lastBuildMs = 0
|
||||||
let cleanupBuild = null
|
let cleanupBuild = null
|
||||||
const build = async (clientRefresh) => {
|
const build = async (clientRefresh) => {
|
||||||
|
const buildStart = new Date().getTime()
|
||||||
|
lastBuildMs = buildStart
|
||||||
const release = await buildMutex.acquire()
|
const release = await buildMutex.acquire()
|
||||||
|
if (lastBuildMs > buildStart) {
|
||||||
|
release()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if (cleanupBuild) {
|
if (cleanupBuild) {
|
||||||
await cleanupBuild()
|
await cleanupBuild()
|
||||||
|
@ -428,12 +434,6 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
||||||
clientRefresh()
|
clientRefresh()
|
||||||
}
|
}
|
||||||
|
|
||||||
const rebuild = (clientRefresh) => {
|
|
||||||
timeoutIds.forEach((id) => clearTimeout(id))
|
|
||||||
timeoutIds.clear()
|
|
||||||
timeoutIds.add(setTimeout(() => build(clientRefresh), 250))
|
|
||||||
}
|
|
||||||
|
|
||||||
if (argv.serve) {
|
if (argv.serve) {
|
||||||
const connections = []
|
const connections = []
|
||||||
const clientRefresh = () => connections.forEach((conn) => conn.send("rebuild"))
|
const clientRefresh = () => connections.forEach((conn) => conn.send("rebuild"))
|
||||||
|
@ -539,7 +539,7 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
||||||
ignoreInitial: true,
|
ignoreInitial: true,
|
||||||
})
|
})
|
||||||
.on("all", async () => {
|
.on("all", async () => {
|
||||||
rebuild(clientRefresh)
|
build(clientRefresh)
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
await build(() => {})
|
await build(() => {})
|
||||||
|
|
|
@ -81,7 +81,7 @@ async function startServing(
|
||||||
}
|
}
|
||||||
|
|
||||||
const initialSlugs = ctx.allSlugs
|
const initialSlugs = ctx.allSlugs
|
||||||
const timeoutIds: Set<ReturnType<typeof setTimeout>> = new Set()
|
let lastBuildMs = 0
|
||||||
const toRebuild: Set<FilePath> = new Set()
|
const toRebuild: Set<FilePath> = new Set()
|
||||||
const toRemove: Set<FilePath> = new Set()
|
const toRemove: Set<FilePath> = new Set()
|
||||||
const trackedAssets: Set<FilePath> = new Set()
|
const trackedAssets: Set<FilePath> = new Set()
|
||||||
|
@ -111,49 +111,50 @@ async function startServing(
|
||||||
}
|
}
|
||||||
|
|
||||||
// debounce rebuilds every 250ms
|
// debounce rebuilds every 250ms
|
||||||
timeoutIds.add(
|
|
||||||
setTimeout(async () => {
|
|
||||||
const release = await mut.acquire()
|
|
||||||
timeoutIds.forEach((id) => clearTimeout(id))
|
|
||||||
timeoutIds.clear()
|
|
||||||
|
|
||||||
const perf = new PerfTimer()
|
const buildStart = new Date().getTime()
|
||||||
console.log(chalk.yellow("Detected change, rebuilding..."))
|
lastBuildMs = buildStart
|
||||||
try {
|
const release = await mut.acquire()
|
||||||
const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
|
if (lastBuildMs > buildStart) {
|
||||||
|
release()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
|
const perf = new PerfTimer()
|
||||||
.filter((fp) => !toRemove.has(fp))
|
console.log(chalk.yellow("Detected change, rebuilding..."))
|
||||||
.map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
|
try {
|
||||||
|
const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
|
||||||
|
|
||||||
ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
|
const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
|
||||||
const parsedContent = await parseMarkdown(ctx, filesToRebuild)
|
.filter((fp) => !toRemove.has(fp))
|
||||||
for (const content of parsedContent) {
|
.map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
|
||||||
const [_tree, vfile] = content
|
|
||||||
contentMap.set(vfile.data.filePath!, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const fp of toRemove) {
|
ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
|
||||||
contentMap.delete(fp)
|
const parsedContent = await parseMarkdown(ctx, filesToRebuild)
|
||||||
}
|
for (const content of parsedContent) {
|
||||||
|
const [_tree, vfile] = content
|
||||||
|
contentMap.set(vfile.data.filePath!, content)
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: we can probably traverse the link graph to figure out what's safe to delete here
|
for (const fp of toRemove) {
|
||||||
// instead of just deleting everything
|
contentMap.delete(fp)
|
||||||
await rimraf(argv.output)
|
}
|
||||||
const parsedFiles = [...contentMap.values()]
|
|
||||||
const filteredContent = filterContent(ctx, parsedFiles)
|
|
||||||
await emitContent(ctx, filteredContent)
|
|
||||||
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
|
|
||||||
} catch {
|
|
||||||
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
|
|
||||||
}
|
|
||||||
|
|
||||||
clientRefresh()
|
const parsedFiles = [...contentMap.values()]
|
||||||
toRebuild.clear()
|
const filteredContent = filterContent(ctx, parsedFiles)
|
||||||
toRemove.clear()
|
// TODO: we can probably traverse the link graph to figure out what's safe to delete here
|
||||||
release()
|
// instead of just deleting everything
|
||||||
}, 250),
|
await rimraf(argv.output)
|
||||||
)
|
await emitContent(ctx, filteredContent)
|
||||||
|
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
|
||||||
|
} catch {
|
||||||
|
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
|
||||||
|
}
|
||||||
|
|
||||||
|
clientRefresh()
|
||||||
|
toRebuild.clear()
|
||||||
|
toRemove.clear()
|
||||||
|
release()
|
||||||
}
|
}
|
||||||
|
|
||||||
const watcher = chokidar.watch(".", {
|
const watcher = chokidar.watch(".", {
|
||||||
|
@ -168,7 +169,6 @@ async function startServing(
|
||||||
.on("unlink", (fp) => rebuild(fp, "delete"))
|
.on("unlink", (fp) => rebuild(fp, "delete"))
|
||||||
|
|
||||||
return async () => {
|
return async () => {
|
||||||
timeoutIds.forEach((id) => clearTimeout(id))
|
|
||||||
await watcher.close()
|
await watcher.close()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue