@@ -9,7 +9,7 @@ const path = require('path');
9
9
const pug = require ( 'pug' ) ;
10
10
const pkg = require ( '../package.json' ) ;
11
11
const transform = require ( 'acquit-require' ) ;
12
- const childProcess = require ( " child_process" ) ;
12
+ const childProcess = require ( ' child_process' ) ;
13
13
14
14
// using "__dirname" and ".." to have a consistent CWD, this script should not be runnable, even when not being in the root of the project
15
15
// also a consistent root path so that it is easy to change later when the script should be moved
@@ -183,8 +183,8 @@ function parseVersion(str) {
183
183
184
184
const match = versionReg . exec ( str ) ;
185
185
186
- if ( ! ! match ) {
187
- const parsed = [ parseInt ( match [ 1 ] ) , parseInt ( match [ 2 ] ) , parseInt ( match [ 3 ] ) ]
186
+ if ( match ) {
187
+ const parsed = [ parseInt ( match [ 1 ] ) , parseInt ( match [ 2 ] ) , parseInt ( match [ 3 ] ) ] ;
188
188
189
189
// fallback just in case some number did not parse
190
190
if ( Number . isNaN ( parsed [ 0 ] ) || Number . isNaN ( parsed [ 1 ] ) || Number . isNaN ( parsed [ 2 ] ) ) {
@@ -195,7 +195,7 @@ function parseVersion(str) {
195
195
}
196
196
197
197
// special case, to not log a warning
198
- if ( str === " test" ) {
198
+ if ( str === ' test' ) {
199
199
return undefined ;
200
200
}
201
201
@@ -210,27 +210,27 @@ function parseVersion(str) {
210
210
function getVersions ( ) {
211
211
// get all tags from git
212
212
// "trim" is used to remove the ending new-line
213
- const res = childProcess . execSync ( " git tag" ) . toString ( ) . trim ( ) ;
213
+ const res = childProcess . execSync ( ' git tag' ) . toString ( ) . trim ( ) ;
214
214
215
215
filteredTags = res . split ( '\n' )
216
216
// map all gotten tags if they match the regular expression
217
- . map ( parseVersion )
217
+ . map ( parseVersion )
218
218
// filter out all null / undefined / falsy values
219
- . filter ( v => ! ! v )
219
+ . filter ( v => ! ! v )
220
220
// sort tags with latest (highest) first
221
- . sort ( ( a , b ) => {
222
- if ( a [ 0 ] === b [ 0 ] ) {
223
- if ( a [ 1 ] === b [ 1 ] ) {
224
- return b [ 2 ] - a [ 2 ] ;
221
+ . sort ( ( a , b ) => {
222
+ if ( a [ 0 ] === b [ 0 ] ) {
223
+ if ( a [ 1 ] === b [ 1 ] ) {
224
+ return b [ 2 ] - a [ 2 ] ;
225
+ }
226
+ return b [ 1 ] - a [ 1 ] ;
225
227
}
226
- return b [ 1 ] - a [ 1 ] ;
227
- }
228
- return b [ 0 ] - a [ 0 ] ;
229
- } ) ;
228
+ return b [ 0 ] - a [ 0 ] ;
229
+ } ) ;
230
230
231
231
if ( filteredTags . length === 0 ) {
232
- console . error ( " no tags found!" ) ;
233
- filteredTags . push ( [ 0 , 0 , 0 ] ) ;
232
+ console . error ( ' no tags found!' ) ;
233
+ filteredTags . push ( [ 0 , 0 , 0 ] ) ;
234
234
}
235
235
}
236
236
@@ -269,7 +269,7 @@ function getLatestVersionOf(version) {
269
269
foundVersion = [ 0 , 0 , 0 ] ;
270
270
}
271
271
272
- return { listed : stringifySemverNumber ( foundVersion ) , path : stringifySemverNumber ( foundVersion , true ) } ;
272
+ return { listed : stringifySemverNumber ( foundVersion ) , path : stringifySemverNumber ( foundVersion , true ) } ;
273
273
}
274
274
275
275
/**
@@ -281,11 +281,11 @@ function getCurrentVersion() {
281
281
282
282
// i dont think this will ever happen, but just in case
283
283
if ( ! pkg . version ) {
284
- console . log ( " no version from package?" ) ;
284
+ console . log ( ' no version from package?' ) ;
285
285
versionToUse = getLatestVersion ( ) ;
286
286
}
287
287
288
- return { listed : versionToUse , path : stringifySemverNumber ( parseVersion ( versionToUse ) , true ) } ;
288
+ return { listed : versionToUse , path : stringifySemverNumber ( parseVersion ( versionToUse ) , true ) } ;
289
289
}
290
290
291
291
// execute function to get all tags from git
@@ -314,7 +314,7 @@ const versionObj = (() => {
314
314
getLatestVersionOf ( 6 )
315
315
]
316
316
} ;
317
- const versionedDeploy = ! ! process . env . DOCS_DEPLOY ? ! ( base . currentVersion . listed === base . latestVersion . listed ) : false ;
317
+ const versionedDeploy = process . env . DOCS_DEPLOY ? ! ( base . currentVersion . listed === base . latestVersion . listed ) : false ;
318
318
319
319
const versionedPath = versionedDeploy ? `/docs/${ base . currentVersion . path } ` : '' ;
320
320
@@ -380,20 +380,20 @@ function mapURLs(block, currentUrl) {
380
380
while ( ( match = mongooseComRegex . exec ( block ) ) !== null ) {
381
381
// console.log("match", match);
382
382
// cant just use "match.index" byitself, because of the extra "href=\"" condition, which is not factored in in "match.index"
383
- let startIndex = match . index + match [ 0 ] . length - match [ 1 ] . length ;
383
+ const startIndex = match . index + match [ 0 ] . length - match [ 1 ] . length ;
384
384
out += block . slice ( lastIndex , startIndex ) ;
385
385
lastIndex = startIndex + match [ 1 ] . length ;
386
386
387
387
// somewhat primitive gathering of the url, but should be enough for now
388
- let fullUrl = / ^ \/ [ ^ " ] + / . exec ( block . slice ( lastIndex - 1 ) ) ;
388
+ const fullUrl = / ^ \/ [ ^ " ] + / . exec ( block . slice ( lastIndex - 1 ) ) ;
389
389
390
390
let noPrefix = false ;
391
391
392
392
if ( fullUrl ) {
393
393
// extra processing to only use "#otherId" instead of using full url for the same page
394
394
// at least firefox does not make a difference between a full path and just "#", but it makes debugging paths easier
395
395
if ( fullUrl [ 0 ] . startsWith ( currentUrl ) ) {
396
- let indexMatch = / # / . exec ( fullUrl ) ;
396
+ const indexMatch = / # / . exec ( fullUrl ) ;
397
397
398
398
if ( indexMatch ) {
399
399
lastIndex += indexMatch . index - 1 ;
@@ -404,10 +404,10 @@ function mapURLs(block, currentUrl) {
404
404
405
405
if ( ! noPrefix ) {
406
406
// map all to the versioned-path, unless a explicit version is given
407
- if ( ! versionedDocs . test ( block . slice ( lastIndex , lastIndex + 10 ) ) ) {
408
- out += versionObj . versionedPath + "/" ;
407
+ if ( ! versionedDocs . test ( block . slice ( lastIndex , lastIndex + 10 ) ) ) {
408
+ out += versionObj . versionedPath + '/' ;
409
409
} else {
410
- out += "/" ;
410
+ out += '/' ;
411
411
}
412
412
}
413
413
}
@@ -429,7 +429,7 @@ async function pugify(filename, options, isReload = false) {
429
429
let newfile = undefined ;
430
430
options = options || { } ;
431
431
options . package = pkg ;
432
- const isAPI = options . api && ! filename . endsWith ( 'docs/api.pug' ) ;
432
+ // const isAPI = options.api && !filename.endsWith('docs/api.pug');
433
433
434
434
const _editLink = 'https://github.com/Automattic/mongoose/blob/master' +
435
435
filename . replace ( cwd , '' ) ;
@@ -443,7 +443,7 @@ async function pugify(filename, options, isReload = false) {
443
443
if ( isReload ) {
444
444
apiReq . parseFile ( options . file ) ;
445
445
// overwrite original options because of reload
446
- options = { ...options , ...apiReq . docs . get ( options . file ) } ;
446
+ options = { ...options , ...apiReq . docs . get ( options . file ) } ;
447
447
}
448
448
inputFile = path . resolve ( cwd , 'docs/api_split.pug' ) ;
449
449
}
@@ -453,7 +453,7 @@ async function pugify(filename, options, isReload = false) {
453
453
if ( options . acquit ) {
454
454
contents = transform ( contents , getTests ( ) ) ;
455
455
456
- contents = contents . replaceAll ( / ^ ` ` ` a c q u i t $ / gmi, " ```javascript" ) ;
456
+ contents = contents . replaceAll ( / ^ ` ` ` a c q u i t $ / gmi, ' ```javascript' ) ;
457
457
}
458
458
if ( options . markdown ) {
459
459
const lines = contents . split ( '\n' ) ;
@@ -486,7 +486,7 @@ async function pugify(filename, options, isReload = false) {
486
486
487
487
if ( versionObj . versionedDeploy ) {
488
488
newfile = path . resolve ( cwd , path . join ( '.' , versionObj . versionedPath ) , path . relative ( cwd , newfile ) ) ;
489
- await fs . promises . mkdir ( path . dirname ( newfile ) , { recursive :true } ) ;
489
+ await fs . promises . mkdir ( path . dirname ( newfile ) , { recursive : true } ) ;
490
490
}
491
491
492
492
options . outputUrl = newfile . replace ( cwd , '' ) ;
@@ -497,11 +497,11 @@ async function pugify(filename, options, isReload = false) {
497
497
498
498
let str = await pugRender ( contents , options ) . catch ( console . error ) ;
499
499
500
- if ( typeof str !== " string" ) {
500
+ if ( typeof str !== ' string' ) {
501
501
return ;
502
502
}
503
503
504
- str = mapURLs ( str , '/' + path . relative ( cwd , docsPath ) )
504
+ str = mapURLs ( str , '/' + path . relative ( cwd , docsPath ) ) ;
505
505
506
506
await fs . promises . writeFile ( newfile , str ) . catch ( ( err ) => {
507
507
console . error ( 'could not write' , err . stack ) ;
@@ -534,20 +534,20 @@ function startWatch() {
534
534
}
535
535
} ) ;
536
536
537
- fs . watchFile ( path . join ( cwd , 'docs/api_split.pug' ) , { interval : 1000 } , ( cur , prev ) => {
537
+ fs . watchFile ( path . join ( cwd , 'docs/api_split.pug' ) , { interval : 1000 } , ( cur , prev ) => {
538
538
if ( cur . mtime > prev . mtime ) {
539
539
console . log ( 'docs/api_split.pug modified, reloading all api files' ) ;
540
- Promise . all ( files . filter ( v => v . startsWith ( 'docs/api' ) ) . map ( async ( file ) => {
540
+ Promise . all ( files . filter ( v => v . startsWith ( 'docs/api' ) ) . map ( async ( file ) => {
541
541
const filename = path . join ( cwd , file ) ;
542
542
await pugify ( filename , docsFilemap . fileMap [ file ] , true ) ;
543
543
} ) ) ;
544
544
}
545
545
} ) ;
546
546
547
- fs . watchFile ( path . join ( cwd , 'docs/api_split.pug' ) , { interval : 1000 } , ( cur , prev ) => {
547
+ fs . watchFile ( path . join ( cwd , 'docs/api_split.pug' ) , { interval : 1000 } , ( cur , prev ) => {
548
548
if ( cur . mtime > prev . mtime ) {
549
549
console . log ( 'docs/api_split.pug modified, reloading all api files' ) ;
550
- Promise . all ( files . filter ( v => v . startsWith ( 'docs/api' ) ) . map ( async ( file ) => {
550
+ Promise . all ( files . filter ( v => v . startsWith ( 'docs/api' ) ) . map ( async ( file ) => {
551
551
const filename = path . join ( cwd , file ) ;
552
552
await pugify ( filename , docsFilemap . fileMap [ file ] ) ;
553
553
} ) ) ;
@@ -561,7 +561,7 @@ function startWatch() {
561
561
* @param {Boolean } isReload Indicate this is a reload of all files
562
562
*/
563
563
async function pugifyAllFiles ( noWatch , isReload = false ) {
564
- await Promise . all ( files . map ( async ( file ) => {
564
+ await Promise . all ( files . map ( async ( file ) => {
565
565
const filename = path . join ( cwd , file ) ;
566
566
await pugify ( filename , docsFilemap . fileMap [ file ] , isReload ) ;
567
567
} ) ) ;
@@ -590,7 +590,7 @@ async function copyAllRequiredFiles() {
590
590
await Promise . all ( pathsToCopy . map ( async v => {
591
591
const resultPath = path . resolve ( cwd , path . join ( '.' , versionObj . versionedPath , v ) ) ;
592
592
await fsextra . copy ( v , resultPath ) ;
593
- } ) )
593
+ } ) ) ;
594
594
}
595
595
596
596
exports . default = pugify ;
@@ -626,7 +626,10 @@ if (isMain) {
626
626
}
627
627
628
628
console . log ( 'Done Processing' ) ;
629
- } ) ( ) ;
629
+ } ) ( ) . catch ( ( err ) => {
630
+ console . error ( 'Website Generation failed:' , err ) ;
631
+ process . exit ( - 1 ) ;
632
+ } ) ;
630
633
}
631
634
632
635
// Modified from github-slugger
0 commit comments