Skip to content

Commit b9c5d3a

Browse files
committed
chore(schema-compiler)!: Remove old blocking model compilation and make TRANSPILATION_WORKER_THREADS new default
1 parent bb6d655 commit b9c5d3a

File tree

4 files changed

+25
-76
lines changed

4 files changed

+25
-76
lines changed

.github/workflows/push.yml

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -63,11 +63,8 @@ jobs:
6363
node-version: [22.x]
6464
# Don't forget to update build-native-release
6565
python-version: [3.11]
66-
transpile-worker-threads: [false, true]
6766
fail-fast: false
6867

69-
env:
70-
CUBEJS_TRANSPILATION_WORKER_THREADS: ${{ matrix.transpile-worker-threads }}
7168
steps:
7269
- id: get-tag-out
7370
run: echo "$OUT"
@@ -121,16 +118,16 @@ jobs:
121118
- name: Lerna test
122119
run: yarn lerna run --concurrency 1 --stream --no-prefix unit
123120
- name: Fix lcov paths
124-
if: (matrix.node-version == '22.x' && matrix.transpile-worker-threads == true)
121+
if: (matrix.node-version == '22.x')
125122
run: |
126123
./.github/actions/codecov-fix.sh
127124
- name: Combine all fixed LCOV files
128-
if: (matrix.node-version == '22.x' && matrix.transpile-worker-threads == true)
125+
if: (matrix.node-version == '22.x')
129126
run: |
130127
echo "" > ./combined-unit.lcov
131128
find ./packages -type f -name lcov.fixed.info -exec cat {} + >> ./combined-unit.lcov || true
132129
- name: Upload coverage artifact
133-
if: (matrix.node-version == '22.x' && matrix.transpile-worker-threads == true)
130+
if: (matrix.node-version == '22.x')
134131
uses: actions/upload-artifact@v4
135132
with:
136133
name: coverage-unit

docs/pages/product/configuration/reference/environment-variables.mdx

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1318,21 +1318,6 @@ learn more.
13181318
| --------------- | ---------------------- | --------------------- |
13191319
| A valid number | 86400 | 86400 |
13201320

1321-
## `CUBEJS_TRANSPILATION_WORKER_THREADS`
1322-
1323-
If `true`, optimizes data model compilation by running critical parts of the
1324-
code in worker threads.
1325-
1326-
| Possible Values | Default in Development | Default in Production |
1327-
| --------------- | ---------------------- | --------------------- |
1328-
| `true`, `false` | `false` | `false` |
1329-
1330-
<ReferenceBox>
1331-
1332-
See [this issue](https://github.com/cube-js/cube/issues/9285) for details.
1333-
1334-
</ReferenceBox>
1335-
13361321
## `CUBEJS_WEB_SOCKETS`
13371322

13381323
If `true`, then use WebSocket for data fetching.

packages/cubejs-backend-shared/src/env.ts

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -223,16 +223,12 @@ const variables: Record<string, (...args: any) => any> = {
223223
nativeOrchestrator: () => get('CUBEJS_TESSERACT_ORCHESTRATOR')
224224
.default('true')
225225
.asBoolStrict(),
226-
transpilationWorkerThreads: () => get('CUBEJS_TRANSPILATION_WORKER_THREADS')
227-
.default('false')
228-
.asBoolStrict(),
229226
allowNonStrictDateRangeMatching: () => get('CUBEJS_PRE_AGGREGATIONS_ALLOW_NON_STRICT_DATE_RANGE_MATCH')
230227
.default('true')
231228
.asBoolStrict(),
232229
transpilationWorkerThreadsCount: () => get('CUBEJS_TRANSPILATION_WORKER_THREADS_COUNT')
233230
.default('0')
234231
.asInt(),
235-
// This one takes precedence over CUBEJS_TRANSPILATION_WORKER_THREADS
236232
transpilationNative: () => get('CUBEJS_TRANSPILATION_NATIVE')
237233
.default('false')
238234
.asBoolStrict(),

packages/cubejs-schema-compiler/src/compiler/DataSchemaCompiler.js

Lines changed: 22 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,6 @@ import fs from 'fs';
44
import os from 'os';
55
import path from 'path';
66
import syntaxCheck from 'syntax-error';
7-
import { parse } from '@babel/parser';
8-
import babelGenerator from '@babel/generator';
9-
import babelTraverse from '@babel/traverse';
107
import R from 'ramda';
118
import workerpool from 'workerpool';
129

@@ -114,12 +111,11 @@ export class DataSchemaCompiler {
114111
const errorsReport = new ErrorReporter(null, [], this.errorReport);
115112
this.errorsReport = errorsReport;
116113

117-
const transpilationWorkerThreads = getEnv('transpilationWorkerThreads');
118114
const transpilationNative = getEnv('transpilationNative');
119115
const transpilationNativeThreadsCount = getThreadsCount();
120116
const { compilerId } = this;
121117

122-
if (!transpilationNative && transpilationWorkerThreads) {
118+
if (!transpilationNative) {
123119
const wc = getEnv('transpilationWorkerThreadsCount');
124120
this.workerPool = workerpool.pool(
125121
path.join(__dirname, 'transpilers/transpiler_worker'),
@@ -132,32 +128,27 @@ export class DataSchemaCompiler {
132128
* @returns {Promise<*>}
133129
*/
134130
const transpile = async (stage) => {
135-
let cubeNames;
136-
let cubeSymbols;
137-
let transpilerNames;
138131
let results;
139132

140-
if (transpilationNative || transpilationWorkerThreads) {
141-
cubeNames = Object.keys(this.cubeDictionary.byId);
142-
// We need only cubes and all its member names for transpiling.
143-
// Cubes doesn't change during transpiling, but are changed during compilation phase,
144-
// so we can prepare them once for every phase.
145-
// Communication between main and worker threads uses
146-
// The structured clone algorithm (@see https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm)
147-
// which doesn't allow passing any function objects, so we need to sanitize the symbols.
148-
// Communication with native backend also involves deserialization.
149-
cubeSymbols = Object.fromEntries(
150-
Object.entries(this.cubeSymbols.symbols)
151-
.map(
152-
([key, value]) => [key, Object.fromEntries(
153-
Object.keys(value).map((k) => [k, true]),
154-
)],
155-
),
156-
);
133+
const cubeNames = Object.keys(this.cubeDictionary.byId);
134+
// We need only cubes and all its member names for transpiling.
135+
// Cubes doesn't change during transpiling, but are changed during compilation phase,
136+
// so we can prepare them once for every phase.
137+
// Communication between main and worker threads uses
138+
// The structured clone algorithm (@see https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm)
139+
// which doesn't allow passing any function objects, so we need to sanitize the symbols.
140+
// Communication with native backend also involves deserialization.
141+
const cubeSymbols = Object.fromEntries(
142+
Object.entries(this.cubeSymbols.symbols)
143+
.map(
144+
([key, value]) => [key, Object.fromEntries(
145+
Object.keys(value).map((k) => [k, true]),
146+
)],
147+
),
148+
);
157149

158-
// Transpilers are the same for all files within phase.
159-
transpilerNames = this.transpilers.map(t => t.constructor.name);
160-
}
150+
// Transpilers are the same for all files within phase.
151+
const transpilerNames = this.transpilers.map(t => t.constructor.name);
161152

162153
if (transpilationNative) {
163154
// Warming up swc compiler cache
@@ -192,10 +183,8 @@ export class DataSchemaCompiler {
192183
}
193184

194185
results = (await Promise.all([...nonJsFilesTasks, ...JsFilesTasks])).flat();
195-
} else if (transpilationWorkerThreads) {
196-
results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames })));
197186
} else {
198-
results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, {})));
187+
results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames })));
199188
}
200189

201190
return results.filter(f => !!f);
@@ -225,7 +214,7 @@ export class DataSchemaCompiler {
225214
errorsReport,
226215
{ cubeNames: [], cubeSymbols: {}, transpilerNames: [], contextSymbols: {}, compilerId: this.compilerId, stage: 0 }
227216
);
228-
} else if (transpilationWorkerThreads && this.workerPool) {
217+
} else if (this.workerPool) {
229218
this.workerPool.terminate();
230219
}
231220

@@ -334,7 +323,7 @@ export class DataSchemaCompiler {
334323
errorsReport.exitFile();
335324

336325
return { ...file, content: res[0].code };
337-
} else if (getEnv('transpilationWorkerThreads')) {
326+
} else {
338327
const data = {
339328
fileName: file.fileName,
340329
content: file.content,
@@ -348,24 +337,6 @@ export class DataSchemaCompiler {
348337
errorsReport.addWarnings(res.warnings);
349338

350339
return { ...file, content: res.content };
351-
} else {
352-
const ast = parse(
353-
file.content,
354-
{
355-
sourceFilename: file.fileName,
356-
sourceType: 'module',
357-
plugins: ['objectRestSpread'],
358-
},
359-
);
360-
361-
errorsReport.inFile(file);
362-
this.transpilers.forEach((t) => {
363-
babelTraverse(ast, t.traverseObject(errorsReport));
364-
});
365-
errorsReport.exitFile();
366-
367-
const content = babelGenerator(ast, {}, file.content).code;
368-
return { ...file, content };
369340
}
370341
} catch (e) {
371342
if (e.toString().indexOf('SyntaxError') !== -1) {

0 commit comments

Comments
 (0)