|
1 |
| -import {cache} from 'react'; |
2 | 1 | import matter from 'gray-matter';
|
3 | 2 | import {s} from 'hastscript';
|
4 | 3 | import yaml from 'js-yaml';
|
5 | 4 | import {bundleMDX} from 'mdx-bundler';
|
6 |
| -import {access, opendir, readFile} from 'node:fs/promises'; |
| 5 | +import {BinaryLike, createHash} from 'node:crypto'; |
| 6 | +import {createReadStream, createWriteStream, mkdirSync} from 'node:fs'; |
| 7 | +import {access, cp, mkdir, opendir, readFile} from 'node:fs/promises'; |
7 | 8 | import path from 'node:path';
|
| 9 | +// @ts-expect-error ts(2305) -- For some reason "compose" is not recognized in the types |
| 10 | +import {compose, Readable} from 'node:stream'; |
| 11 | +import {json} from 'node:stream/consumers'; |
| 12 | +import {pipeline} from 'node:stream/promises'; |
| 13 | +import { |
| 14 | + constants as zlibConstants, |
| 15 | + createBrotliCompress, |
| 16 | + createBrotliDecompress, |
| 17 | +} from 'node:zlib'; |
8 | 18 | import {limitFunction} from 'p-limit';
|
9 | 19 | import rehypeAutolinkHeadings from 'rehype-autolink-headings';
|
10 | 20 | import rehypePresetMinify from 'rehype-preset-minify';
|
@@ -48,6 +58,34 @@ const root = process.cwd();
|
48 | 58 | // Functions which looks like AWS Lambda and we get `EMFILE` errors when trying to open
|
49 | 59 | // so many files at once.
|
50 | 60 | const FILE_CONCURRENCY_LIMIT = 200;
|
| 61 | +const CACHE_COMPRESS_LEVEL = 4; |
| 62 | +const CACHE_DIR = path.join(root, '.next', 'cache', 'mdx-bundler'); |
| 63 | +mkdirSync(CACHE_DIR, {recursive: true}); |
| 64 | + |
| 65 | +const md5 = (data: BinaryLike) => createHash('md5').update(data).digest('hex'); |
| 66 | + |
| 67 | +async function readCacheFile<T>(file: string): Promise<T> { |
| 68 | + const reader = createReadStream(file); |
| 69 | + const decompressor = createBrotliDecompress(); |
| 70 | + |
| 71 | + return (await json(compose(reader, decompressor))) as T; |
| 72 | +} |
| 73 | + |
| 74 | +async function writeCacheFile(file: string, data: string) { |
| 75 | + const bufferData = Buffer.from(data); |
| 76 | + await pipeline( |
| 77 | + Readable.from(bufferData), |
| 78 | + createBrotliCompress({ |
| 79 | + chunkSize: 32 * 1024, |
| 80 | + params: { |
| 81 | + [zlibConstants.BROTLI_PARAM_MODE]: zlibConstants.BROTLI_MODE_TEXT, |
| 82 | + [zlibConstants.BROTLI_PARAM_QUALITY]: CACHE_COMPRESS_LEVEL, |
| 83 | + [zlibConstants.BROTLI_PARAM_SIZE_HINT]: bufferData.length, |
| 84 | + }, |
| 85 | + }), |
| 86 | + createWriteStream(file) |
| 87 | + ); |
| 88 | +} |
51 | 89 |
|
52 | 90 | function formatSlug(slug: string) {
|
53 | 91 | return slug.replace(/\.(mdx|md)/, '');
|
@@ -484,6 +522,36 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
|
484 | 522 | );
|
485 | 523 | }
|
486 | 524 |
|
| 525 | + let cacheKey: string | null = null; |
| 526 | + let cacheFile: string | null = null; |
| 527 | + let assetsCacheDir: string | null = null; |
| 528 | + const outdir = path.join(root, 'public', 'mdx-images'); |
| 529 | + await mkdir(outdir, {recursive: true}); |
| 530 | + |
| 531 | + if (process.env.CI) { |
| 532 | + cacheKey = md5(source); |
| 533 | + cacheFile = path.join(CACHE_DIR, `${cacheKey}.br`); |
| 534 | + assetsCacheDir = path.join(CACHE_DIR, cacheKey); |
| 535 | + |
| 536 | + try { |
| 537 | + const [cached, _] = await Promise.all([ |
| 538 | + readCacheFile<SlugFile>(cacheFile), |
| 539 | + cp(assetsCacheDir, outdir, {recursive: true}), |
| 540 | + ]); |
| 541 | + return cached; |
| 542 | + } catch (err) { |
| 543 | + if ( |
| 544 | + err.code !== 'ENOENT' && |
| 545 | + err.code !== 'ABORT_ERR' && |
| 546 | + err.code !== 'Z_BUF_ERROR' |
| 547 | + ) { |
| 548 | + // If cache is corrupted, ignore and proceed |
| 549 | + // eslint-disable-next-line no-console |
| 550 | + console.warn(`Failed to read MDX cache: ${cacheFile}`, err); |
| 551 | + } |
| 552 | + } |
| 553 | + } |
| 554 | + |
487 | 555 | process.env.ESBUILD_BINARY_PATH = path.join(
|
488 | 556 | root,
|
489 | 557 | 'node_modules',
|
@@ -578,8 +646,12 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
|
578 | 646 | '.svg': 'dataurl',
|
579 | 647 | };
|
580 | 648 | // Set the `outdir` to a public location for this bundle.
|
581 |
| - // this where this images will be copied |
582 |
| - options.outdir = path.join(root, 'public', 'mdx-images'); |
| 649 | + // this is where these images will be copied |
| 650 | + // the reason we use the cache folder when it's |
| 651 | + // enabled is because mdx-images is a dumping ground |
| 652 | + // for all images, so we cannot filter it out only |
| 653 | + // for this specific slug easily |
| 654 | + options.outdir = assetsCacheDir || outdir; |
583 | 655 |
|
584 | 656 | // Set write to true so that esbuild will output the files.
|
585 | 657 | options.write = true;
|
@@ -609,12 +681,30 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
|
609 | 681 | },
|
610 | 682 | };
|
611 | 683 |
|
| 684 | + if (assetsCacheDir && cacheFile) { |
| 685 | + await cp(assetsCacheDir, outdir, {recursive: true}); |
| 686 | + writeCacheFile(cacheFile, JSON.stringify(resultObj)).catch(e => { |
| 687 | + // eslint-disable-next-line no-console |
| 688 | + console.warn(`Failed to write MDX cache: ${cacheFile}`, e); |
| 689 | + }); |
| 690 | + } |
| 691 | + |
612 | 692 | return resultObj;
|
613 | 693 | }
|
614 | 694 |
|
| 695 | +const fileBySlugCache = new Map<string, Promise<SlugFile>>(); |
| 696 | + |
615 | 697 | /**
|
616 | 698 | * Cache the result of {@link getFileBySlug}.
|
617 | 699 | *
|
618 | 700 | * This is useful for performance when rendering the same file multiple times.
|
619 | 701 | */
|
620 |
| -export const getFileBySlugWithCache = cache(getFileBySlug); |
| 702 | +export function getFileBySlugWithCache(slug: string): Promise<SlugFile> { |
| 703 | + let cached = fileBySlugCache.get(slug); |
| 704 | + if (!cached) { |
| 705 | + cached = getFileBySlug(slug); |
| 706 | + fileBySlugCache.set(slug, cached); |
| 707 | + } |
| 708 | + |
| 709 | + return cached; |
| 710 | +} |
0 commit comments