|
1 |
| -import {BinaryLike, createHash} from 'crypto'; |
2 |
| - |
3 | 1 | import {cache} from 'react';
|
4 | 2 | import matter from 'gray-matter';
|
5 | 3 | import {s} from 'hastscript';
|
6 | 4 | import yaml from 'js-yaml';
|
7 | 5 | import {bundleMDX} from 'mdx-bundler';
|
8 |
| -import {createReadStream, createWriteStream, mkdirSync} from 'node:fs'; |
9 | 6 | import {access, opendir, readFile} from 'node:fs/promises';
|
10 | 7 | import path from 'node:path';
|
11 |
| -// @ts-expect-error ts(2305) -- For some reason "compose" is not recognized in the types |
12 |
| -import {compose, Readable} from 'node:stream'; |
13 |
| -import {json} from 'node:stream/consumers'; |
14 |
| -import {pipeline} from 'node:stream/promises'; |
15 |
| -import { |
16 |
| - constants as zlibConstants, |
17 |
| - createBrotliCompress, |
18 |
| - createBrotliDecompress, |
19 |
| -} from 'node:zlib'; |
20 | 8 | import {limitFunction} from 'p-limit';
|
21 | 9 | import rehypeAutolinkHeadings from 'rehype-autolink-headings';
|
22 | 10 | import rehypePresetMinify from 'rehype-preset-minify';
|
@@ -60,33 +48,6 @@ const root = process.cwd();
|
60 | 48 | // Functions which looks like AWS Lambda and we get `EMFILE` errors when trying to open
|
61 | 49 | // so many files at once.
|
62 | 50 | const FILE_CONCURRENCY_LIMIT = 200;
|
63 |
| -const CACHE_COMPRESS_LEVEL = 4; |
64 |
| -const CACHE_DIR = path.join(root, '.next', 'cache', 'mdx-bundler'); |
65 |
| -mkdirSync(CACHE_DIR, {recursive: true}); |
66 |
| - |
67 |
| -const md5 = (data: BinaryLike) => createHash('md5').update(data).digest('hex'); |
68 |
| - |
69 |
| -async function readCacheFile<T>(file: string): Promise<T> { |
70 |
| - const reader = createReadStream(file); |
71 |
| - const decompressor = createBrotliDecompress(); |
72 |
| - |
73 |
| - return (await json(compose(reader, decompressor))) as T; |
74 |
| -} |
75 |
| - |
76 |
| -async function writeCacheFile(file: string, data: string) { |
77 |
| - await pipeline( |
78 |
| - Readable.from(data), |
79 |
| - createBrotliCompress({ |
80 |
| - chunkSize: 32 * 1024, |
81 |
| - params: { |
82 |
| - [zlibConstants.BROTLI_PARAM_MODE]: zlibConstants.BROTLI_MODE_TEXT, |
83 |
| - [zlibConstants.BROTLI_PARAM_QUALITY]: CACHE_COMPRESS_LEVEL, |
84 |
| - [zlibConstants.BROTLI_PARAM_SIZE_HINT]: data.length, |
85 |
| - }, |
86 |
| - }), |
87 |
| - createWriteStream(file) |
88 |
| - ); |
89 |
| -} |
90 | 51 |
|
91 | 52 | function formatSlug(slug: string) {
|
92 | 53 | return slug.replace(/\.(mdx|md)/, '');
|
@@ -523,20 +484,6 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
|
523 | 484 | );
|
524 | 485 | }
|
525 | 486 |
|
526 |
| - const cacheKey = md5(source); |
527 |
| - const cacheFile = path.join(CACHE_DIR, cacheKey); |
528 |
| - |
529 |
| - try { |
530 |
| - const cached = await readCacheFile<SlugFile>(cacheFile); |
531 |
| - return cached; |
532 |
| - } catch (err) { |
533 |
| - if (err.code !== 'ENOENT' && err.code !== 'ABORT_ERR') { |
534 |
| - // If cache is corrupted, ignore and proceed |
535 |
| - // eslint-disable-next-line no-console |
536 |
| - console.warn(`Failed to read MDX cache: ${cacheFile}`, err); |
537 |
| - } |
538 |
| - } |
539 |
| - |
540 | 487 | process.env.ESBUILD_BINARY_PATH = path.join(
|
541 | 488 | root,
|
542 | 489 | 'node_modules',
|
@@ -662,11 +609,6 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
|
662 | 609 | },
|
663 | 610 | };
|
664 | 611 |
|
665 |
| - writeCacheFile(cacheFile, JSON.stringify(resultObj)).catch(e => { |
666 |
| - // eslint-disable-next-line no-console |
667 |
| - console.warn(`Failed to write MDX cache: ${cacheFile}`, e); |
668 |
| - }); |
669 |
| - |
670 | 612 | return resultObj;
|
671 | 613 | }
|
672 | 614 |
|
|
0 commit comments