fix: builds should no accumulate on repeated changes (closes #404)
This commit is contained in:
		
							parent
							
								
									3209f7c3b7
								
							
						
					
					
						commit
						a1a1e7e1e0
					
				| @ -393,10 +393,16 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started. | ||||
|     }) | ||||
| 
 | ||||
|     const buildMutex = new Mutex() | ||||
|     const timeoutIds = new Set() | ||||
|     let lastBuildMs = 0 | ||||
|     let cleanupBuild = null | ||||
|     const build = async (clientRefresh) => { | ||||
|       const buildStart = new Date().getTime() | ||||
|       lastBuildMs = buildStart | ||||
|       const release = await buildMutex.acquire() | ||||
|       if (lastBuildMs > buildStart) { | ||||
|         release() | ||||
|         return | ||||
|       } | ||||
| 
 | ||||
|       if (cleanupBuild) { | ||||
|         await cleanupBuild() | ||||
| @ -428,12 +434,6 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started. | ||||
|       clientRefresh() | ||||
|     } | ||||
| 
 | ||||
|     const rebuild = (clientRefresh) => { | ||||
|       timeoutIds.forEach((id) => clearTimeout(id)) | ||||
|       timeoutIds.clear() | ||||
|       timeoutIds.add(setTimeout(() => build(clientRefresh), 250)) | ||||
|     } | ||||
| 
 | ||||
|     if (argv.serve) { | ||||
|       const connections = [] | ||||
|       const clientRefresh = () => connections.forEach((conn) => conn.send("rebuild")) | ||||
| @ -539,7 +539,7 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started. | ||||
|           ignoreInitial: true, | ||||
|         }) | ||||
|         .on("all", async () => { | ||||
|           rebuild(clientRefresh) | ||||
|           build(clientRefresh) | ||||
|         }) | ||||
|     } else { | ||||
|       await build(() => {}) | ||||
|  | ||||
| @ -81,7 +81,7 @@ async function startServing( | ||||
|   } | ||||
| 
 | ||||
|   const initialSlugs = ctx.allSlugs | ||||
|   const timeoutIds: Set<ReturnType<typeof setTimeout>> = new Set() | ||||
|   let lastBuildMs = 0 | ||||
|   const toRebuild: Set<FilePath> = new Set() | ||||
|   const toRemove: Set<FilePath> = new Set() | ||||
|   const trackedAssets: Set<FilePath> = new Set() | ||||
| @ -111,49 +111,50 @@ async function startServing( | ||||
|     } | ||||
| 
 | ||||
|     // debounce rebuilds every 250ms
 | ||||
|     timeoutIds.add( | ||||
|       setTimeout(async () => { | ||||
|         const release = await mut.acquire() | ||||
|         timeoutIds.forEach((id) => clearTimeout(id)) | ||||
|         timeoutIds.clear() | ||||
| 
 | ||||
|         const perf = new PerfTimer() | ||||
|         console.log(chalk.yellow("Detected change, rebuilding...")) | ||||
|         try { | ||||
|           const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp)) | ||||
|     const buildStart = new Date().getTime() | ||||
|     lastBuildMs = buildStart | ||||
|     const release = await mut.acquire() | ||||
|     if (lastBuildMs > buildStart) { | ||||
|       release() | ||||
|       return | ||||
|     } | ||||
| 
 | ||||
|           const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])] | ||||
|             .filter((fp) => !toRemove.has(fp)) | ||||
|             .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath)) | ||||
|     const perf = new PerfTimer() | ||||
|     console.log(chalk.yellow("Detected change, rebuilding...")) | ||||
|     try { | ||||
|       const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp)) | ||||
| 
 | ||||
|           ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])] | ||||
|           const parsedContent = await parseMarkdown(ctx, filesToRebuild) | ||||
|           for (const content of parsedContent) { | ||||
|             const [_tree, vfile] = content | ||||
|             contentMap.set(vfile.data.filePath!, content) | ||||
|           } | ||||
|       const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])] | ||||
|         .filter((fp) => !toRemove.has(fp)) | ||||
|         .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath)) | ||||
| 
 | ||||
|           for (const fp of toRemove) { | ||||
|             contentMap.delete(fp) | ||||
|           } | ||||
|       ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])] | ||||
|       const parsedContent = await parseMarkdown(ctx, filesToRebuild) | ||||
|       for (const content of parsedContent) { | ||||
|         const [_tree, vfile] = content | ||||
|         contentMap.set(vfile.data.filePath!, content) | ||||
|       } | ||||
| 
 | ||||
|           // TODO: we can probably traverse the link graph to figure out what's safe to delete here
 | ||||
|           // instead of just deleting everything
 | ||||
|           await rimraf(argv.output) | ||||
|           const parsedFiles = [...contentMap.values()] | ||||
|           const filteredContent = filterContent(ctx, parsedFiles) | ||||
|           await emitContent(ctx, filteredContent) | ||||
|           console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) | ||||
|         } catch { | ||||
|           console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`)) | ||||
|         } | ||||
|       for (const fp of toRemove) { | ||||
|         contentMap.delete(fp) | ||||
|       } | ||||
| 
 | ||||
|         clientRefresh() | ||||
|         toRebuild.clear() | ||||
|         toRemove.clear() | ||||
|         release() | ||||
|       }, 250), | ||||
|     ) | ||||
|       const parsedFiles = [...contentMap.values()] | ||||
|       const filteredContent = filterContent(ctx, parsedFiles) | ||||
|       // TODO: we can probably traverse the link graph to figure out what's safe to delete here
 | ||||
|       // instead of just deleting everything
 | ||||
|       await rimraf(argv.output) | ||||
|       await emitContent(ctx, filteredContent) | ||||
|       console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) | ||||
|     } catch { | ||||
|       console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`)) | ||||
|     } | ||||
| 
 | ||||
|     clientRefresh() | ||||
|     toRebuild.clear() | ||||
|     toRemove.clear() | ||||
|     release() | ||||
|   } | ||||
| 
 | ||||
|   const watcher = chokidar.watch(".", { | ||||
| @ -168,7 +169,6 @@ async function startServing( | ||||
|     .on("unlink", (fp) => rebuild(fp, "delete")) | ||||
| 
 | ||||
|   return async () => { | ||||
|     timeoutIds.forEach((id) => clearTimeout(id)) | ||||
|     await watcher.close() | ||||
|   } | ||||
| } | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user