@@ -4,9 +4,6 @@ import fs from 'fs';
4
4
import os from 'os' ;
5
5
import path from 'path' ;
6
6
import syntaxCheck from 'syntax-error' ;
7
- import { parse } from '@babel/parser' ;
8
- import babelGenerator from '@babel/generator' ;
9
- import babelTraverse from '@babel/traverse' ;
10
7
import R from 'ramda' ;
11
8
import workerpool from 'workerpool' ;
12
9
@@ -114,12 +111,11 @@ export class DataSchemaCompiler {
114
111
const errorsReport = new ErrorReporter ( null , [ ] , this . errorReport ) ;
115
112
this . errorsReport = errorsReport ;
116
113
117
- const transpilationWorkerThreads = getEnv ( 'transpilationWorkerThreads' ) ;
118
114
const transpilationNative = getEnv ( 'transpilationNative' ) ;
119
115
const transpilationNativeThreadsCount = getThreadsCount ( ) ;
120
116
const { compilerId } = this ;
121
117
122
- if ( ! transpilationNative && transpilationWorkerThreads ) {
118
+ if ( ! transpilationNative ) {
123
119
const wc = getEnv ( 'transpilationWorkerThreadsCount' ) ;
124
120
this . workerPool = workerpool . pool (
125
121
path . join ( __dirname , 'transpilers/transpiler_worker' ) ,
@@ -132,32 +128,27 @@ export class DataSchemaCompiler {
132
128
* @returns {Promise<*> }
133
129
*/
134
130
const transpile = async ( stage ) => {
135
- let cubeNames ;
136
- let cubeSymbols ;
137
- let transpilerNames ;
138
131
let results ;
139
132
140
- if ( transpilationNative || transpilationWorkerThreads ) {
141
- cubeNames = Object . keys ( this . cubeDictionary . byId ) ;
142
- // We need only cubes and all its member names for transpiling.
143
- // Cubes doesn't change during transpiling, but are changed during compilation phase,
144
- // so we can prepare them once for every phase.
145
- // Communication between main and worker threads uses
146
- // The structured clone algorithm (@see https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm)
147
- // which doesn't allow passing any function objects, so we need to sanitize the symbols.
148
- // Communication with native backend also involves deserialization.
149
- cubeSymbols = Object . fromEntries (
150
- Object . entries ( this . cubeSymbols . symbols )
151
- . map (
152
- ( [ key , value ] ) => [ key , Object . fromEntries (
153
- Object . keys ( value ) . map ( ( k ) => [ k , true ] ) ,
154
- ) ] ,
155
- ) ,
156
- ) ;
133
+ const cubeNames = Object . keys ( this . cubeDictionary . byId ) ;
134
+ // We need only cubes and all its member names for transpiling.
135
+ // Cubes doesn't change during transpiling, but are changed during compilation phase,
136
+ // so we can prepare them once for every phase.
137
+ // Communication between main and worker threads uses
138
+ // The structured clone algorithm (@see https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm)
139
+ // which doesn't allow passing any function objects, so we need to sanitize the symbols.
140
+ // Communication with native backend also involves deserialization.
141
+ const cubeSymbols = Object . fromEntries (
142
+ Object . entries ( this . cubeSymbols . symbols )
143
+ . map (
144
+ ( [ key , value ] ) => [ key , Object . fromEntries (
145
+ Object . keys ( value ) . map ( ( k ) => [ k , true ] ) ,
146
+ ) ] ,
147
+ ) ,
148
+ ) ;
157
149
158
- // Transpilers are the same for all files within phase.
159
- transpilerNames = this . transpilers . map ( t => t . constructor . name ) ;
160
- }
150
+ // Transpilers are the same for all files within phase.
151
+ const transpilerNames = this . transpilers . map ( t => t . constructor . name ) ;
161
152
162
153
if ( transpilationNative ) {
163
154
// Warming up swc compiler cache
@@ -192,10 +183,8 @@ export class DataSchemaCompiler {
192
183
}
193
184
194
185
results = ( await Promise . all ( [ ...nonJsFilesTasks , ...JsFilesTasks ] ) ) . flat ( ) ;
195
- } else if ( transpilationWorkerThreads ) {
196
- results = await Promise . all ( toCompile . map ( f => this . transpileFile ( f , errorsReport , { cubeNames, cubeSymbols, transpilerNames } ) ) ) ;
197
186
} else {
198
- results = await Promise . all ( toCompile . map ( f => this . transpileFile ( f , errorsReport , { } ) ) ) ;
187
+ results = await Promise . all ( toCompile . map ( f => this . transpileFile ( f , errorsReport , { cubeNames , cubeSymbols , transpilerNames } ) ) ) ;
199
188
}
200
189
201
190
return results . filter ( f => ! ! f ) ;
@@ -225,7 +214,7 @@ export class DataSchemaCompiler {
225
214
errorsReport ,
226
215
{ cubeNames : [ ] , cubeSymbols : { } , transpilerNames : [ ] , contextSymbols : { } , compilerId : this . compilerId , stage : 0 }
227
216
) ;
228
- } else if ( transpilationWorkerThreads && this . workerPool ) {
217
+ } else if ( this . workerPool ) {
229
218
this . workerPool . terminate ( ) ;
230
219
}
231
220
@@ -334,7 +323,7 @@ export class DataSchemaCompiler {
334
323
errorsReport . exitFile ( ) ;
335
324
336
325
return { ...file , content : res [ 0 ] . code } ;
337
- } else if ( getEnv ( 'transpilationWorkerThreads' ) ) {
326
+ } else {
338
327
const data = {
339
328
fileName : file . fileName ,
340
329
content : file . content ,
@@ -348,24 +337,6 @@ export class DataSchemaCompiler {
348
337
errorsReport . addWarnings ( res . warnings ) ;
349
338
350
339
return { ...file , content : res . content } ;
351
- } else {
352
- const ast = parse (
353
- file . content ,
354
- {
355
- sourceFilename : file . fileName ,
356
- sourceType : 'module' ,
357
- plugins : [ 'objectRestSpread' ] ,
358
- } ,
359
- ) ;
360
-
361
- errorsReport . inFile ( file ) ;
362
- this . transpilers . forEach ( ( t ) => {
363
- babelTraverse ( ast , t . traverseObject ( errorsReport ) ) ;
364
- } ) ;
365
- errorsReport . exitFile ( ) ;
366
-
367
- const content = babelGenerator ( ast , { } , file . content ) . code ;
368
- return { ...file , content } ;
369
340
}
370
341
} catch ( e ) {
371
342
if ( e . toString ( ) . indexOf ( 'SyntaxError' ) !== - 1 ) {
0 commit comments