Skip to content

Commit c227777

Browse files
committed
try build
1 parent f9f0af4 commit c227777

20 files changed

+969
-4
lines changed
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
name: Download backend
2+
3+
inputs:
4+
arch:
5+
description: Architecture arm64 or x64
6+
required: false
7+
default: 'x64'
8+
9+
runs:
10+
using: 'composite'
11+
steps:
12+
- name: Download backend
13+
run: yarn download:backend ${{ inputs.arch }}
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
name: Install all libraries action
2+
description: Install all libraries and dependencies
3+
4+
runs:
5+
using: 'composite'
6+
steps:
7+
# OS libraries
8+
- name: Setup Node
9+
uses: actions/setup-node@v4.0.4
10+
with:
11+
node-version: '20.15'
12+
# disable cache for windows
13+
# https://github.com/actions/setup-node/issues/975
14+
cache: ${{ runner.os != 'Windows' && 'yarn' || '' }}
15+
cache-dependency-path: ${{ runner.os != 'Windows' && '**/yarn.lock' || '' }}
16+
17+
# - name: Setup Python
18+
# uses: actions/setup-python@v5
19+
# with:
20+
# python-version: '3.11'
21+
22+
- name: Install TypeScript
23+
run: yarn global add typescript
24+
25+
- name: Install dependencies for root package.js
26+
run: yarn install --frozen-lockfile
27+
28+
# run: yarn install

.github/build/sum_sha256.sh

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
#!/bin/bash
2+
set -e
3+
4+
find ./release -type f -name '*.tar.gz' -execdir sh -c 'sha256sum "$1" > "$1.sha256"' _ {} \;

.github/deps-licenses-report.js

Lines changed: 250 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,250 @@
1+
const fs = require('fs');
2+
const { join } = require('path');
3+
const { last, set } = require('lodash');
4+
const { google } = require('googleapis');
5+
const { exec } = require('child_process');
6+
const csvParser = require('csv-parser');
7+
const { stringify } = require('csv-stringify');
8+
9+
const licenseFolderName = 'licenses';
10+
const spreadsheetId = process.env.SPREADSHEET_ID;
11+
const summaryFilePath = `./${licenseFolderName}/summary.csv`;
12+
const allData = [];
13+
let csvFiles = [];
14+
15+
// Main function
16+
async function main() {
17+
const folderPath = './';
18+
const packageJsons = findPackageJsonFiles(folderPath); // Find all package.json files in the given folder
19+
20+
console.log('All package.jsons was found:', packageJsons);
21+
22+
// Create the folder if it doesn't exist
23+
if (!fs.existsSync(licenseFolderName)) {
24+
fs.mkdirSync(licenseFolderName);
25+
}
26+
27+
try {
28+
await Promise.all(packageJsons.map(runLicenseCheck));
29+
console.log('All csv files was generated');
30+
await generateSummary()
31+
await sendLicensesToGoogleSheet()
32+
} catch (error) {
33+
console.error('An error occurred:', error);
34+
process.exit(1);
35+
}
36+
}
37+
38+
main();
39+
40+
// Function to find all package.json files in a given folder
41+
function findPackageJsonFiles(folderPath) {
42+
const packageJsonPaths = [];
43+
const packageJsonName = 'package.json';
44+
const excludeFolders = ['dist', 'node_modules', 'test-extensions'];
45+
46+
// Recursive function to search for package.json files
47+
function searchForPackageJson(currentPath) {
48+
const files = fs.readdirSync(currentPath);
49+
50+
for (const file of files) {
51+
const filePath = join(currentPath, file);
52+
const stats = fs.statSync(filePath);
53+
54+
if (stats.isDirectory() && !excludeFolders.includes(file)) {
55+
searchForPackageJson(filePath);
56+
} else if (file === packageJsonName) {
57+
packageJsonPaths.push(`./${filePath.slice(0, -packageJsonName.length - 1)}`);
58+
}
59+
}
60+
}
61+
62+
searchForPackageJson(folderPath);
63+
return packageJsonPaths;
64+
}
65+
66+
// Function to run license check for a given package.json file
67+
async function runLicenseCheck(path) {
68+
const name = last(path.split('/')) || 'vscode';
69+
70+
const COMMANDS = [
71+
`license-checker --start ${path} --csv --out ./${licenseFolderName}/${name}_prod.csv --production`,
72+
`license-checker --start ${path} --csv --out ./${licenseFolderName}/${name}_dev.csv --development`,
73+
]
74+
75+
return await Promise.all(COMMANDS.map((command) =>
76+
new Promise((resolve, reject) => {
77+
exec(command, (error, stdout, stderr) => {
78+
if (error) {
79+
console.error(`Failed command: ${command}, error:`, stderr);
80+
reject(error);
81+
}
82+
resolve();
83+
});
84+
})
85+
));
86+
}
87+
88+
async function sendLicensesToGoogleSheet() {
89+
try {
90+
const serviceAccountKey = JSON.parse(fs.readFileSync('./gasKey.json', 'utf-8'));
91+
92+
// Set up JWT client
93+
const jwtClient = new google.auth.JWT(
94+
serviceAccountKey.client_email,
95+
null,
96+
serviceAccountKey.private_key,
97+
['https://www.googleapis.com/auth/spreadsheets']
98+
);
99+
100+
const sheets = google.sheets('v4');
101+
102+
// Read all .csv files in the 'licenses' folder
103+
csvFiles.forEach((csvFile) => {
104+
// Extract sheet name from file name
105+
const sheetName = csvFile.replace('.csv', '').replaceAll('_', ' ');
106+
107+
const data = [];
108+
fs.createReadStream(`./${licenseFolderName}/${csvFile}`)
109+
.pipe(csvParser({ headers: false }))
110+
.on('data', (row) => {
111+
data.push(Object.values(row));
112+
})
113+
.on('end', async () => {
114+
const resource = { values: data };
115+
116+
try {
117+
const response = await sheets.spreadsheets.get({
118+
auth: jwtClient,
119+
spreadsheetId,
120+
});
121+
122+
const sheet = response.data.sheets.find((sheet) => sheet.properties.title === sheetName);
123+
if (sheet) {
124+
// Clear contents of the sheet starting from cell A2
125+
await sheets.spreadsheets.values.clear({
126+
auth: jwtClient,
127+
spreadsheetId,
128+
range: `${sheetName}!A1:Z`, // Assuming Z is the last column
129+
});
130+
} else {
131+
// Create the sheet if it doesn't exist
132+
await sheets.spreadsheets.batchUpdate({
133+
auth: jwtClient,
134+
spreadsheetId,
135+
resource: set({}, 'requests[0].addSheet.properties.title', sheetName),
136+
});
137+
}
138+
} catch (error) {
139+
console.error(`Error checking/creating sheet for ${sheetName}:`, error);
140+
}
141+
142+
try {
143+
await sheets.spreadsheets.values.batchUpdate({
144+
auth: jwtClient,
145+
spreadsheetId,
146+
resource: {
147+
valueInputOption: 'RAW',
148+
data: [
149+
{
150+
range: `${sheetName}!A1`, // Use the sheet name as the range and start from A2
151+
majorDimension: 'ROWS',
152+
values: data,
153+
},
154+
],
155+
},
156+
});
157+
158+
console.log(`CSV data has been inserted into ${sheetName} sheet.`);
159+
} catch (err) {
160+
console.error(`Error inserting data for ${sheetName}:`, err);
161+
}
162+
});
163+
});
164+
} catch (error) {
165+
console.error('Error loading service account key:', error);
166+
}
167+
}
168+
169+
// Function to read and process each CSV file
170+
const processCSVFile = (file) => {
171+
return new Promise((resolve, reject) => {
172+
const parser = csvParser({ columns: true, trim: true });
173+
const input = fs.createReadStream(`./${licenseFolderName}/${file}`);
174+
175+
parser.on('data', (record) => {
176+
allData.push(record);
177+
});
178+
179+
parser.on('end', () => {
180+
resolve();
181+
});
182+
183+
parser.on('error', (err) => {
184+
reject(err);
185+
});
186+
187+
input.pipe(parser);
188+
});
189+
};
190+
191+
// Process and aggregate license data
192+
const processLicenseData = () => {
193+
const licenseCountMap = {};
194+
for (const record of allData) {
195+
const license = record.license;
196+
licenseCountMap[license] = (licenseCountMap[license] || 0) + 1;
197+
}
198+
return licenseCountMap;
199+
};
200+
201+
// Create summary CSV data
202+
const createSummaryData = (licenseCountMap) => {
203+
const summaryData = [['License', 'Count']];
204+
for (const license in licenseCountMap) {
205+
summaryData.push([license, licenseCountMap[license]]);
206+
}
207+
return summaryData;
208+
};
209+
210+
// Write summary CSV file
211+
const writeSummaryCSV = async (summaryData) => {
212+
try {
213+
const summaryCsvString = await stringifyPromise(summaryData);
214+
fs.writeFileSync(summaryFilePath, summaryCsvString);
215+
csvFiles.push(last(summaryFilePath.split('/')));
216+
console.log(`Summary CSV saved as ${summaryFilePath}`);
217+
} catch (err) {
218+
console.error(`Error: ${err}`);
219+
}
220+
};
221+
222+
// Stringify as a promise
223+
const stringifyPromise = (data) => {
224+
return new Promise((resolve, reject) => {
225+
stringify(data, (err, csvString) => {
226+
if (err) {
227+
reject(err);
228+
} else {
229+
resolve(csvString);
230+
}
231+
});
232+
});
233+
};
234+
235+
async function generateSummary() {
236+
csvFiles = fs.readdirSync(licenseFolderName).filter(file => file.endsWith('.csv')).sort();
237+
238+
for (const file of csvFiles) {
239+
try {
240+
await processCSVFile(file);
241+
} catch (err) {
242+
console.error(`Error processing ${file}: ${err}`);
243+
}
244+
}
245+
246+
const licenseCountMap = processLicenseData();
247+
const summaryData = createSummaryData(licenseCountMap);
248+
249+
await writeSummaryCSV(summaryData);
250+
}

.github/e2e/e2e-results.js

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
const fs = require('fs');
2+
3+
let parallelNodeInfo = '';
4+
const totalNodes = parseInt(process.env.CIRCLE_NODE_TOTAL, 10);
5+
if (totalNodes > 1) {
6+
parallelNodeInfo = ` (node: ${parseInt(process.env.CIRCLE_NODE_INDEX, 10) + 1}/${totalNodes})`
7+
}
8+
9+
const file = fs.readdirSync('tests/e2e/mochawesome-report').find(file => file.endsWith('-setup-report.json'))
10+
const appBuildType = process.env.APP_BUILD_TYPE || 'VSCode (Linux)'
11+
const results = {
12+
message: {
13+
text: `*E2ETest - ${appBuildType}${parallelNodeInfo}* (Branch: *${process.env.CIRCLE_BRANCH}*)` +
14+
`\n<https://app.circleci.com/pipelines/workflows/${process.env.CIRCLE_WORKFLOW_ID}|View on CircleCI>`,
15+
attachments: [],
16+
},
17+
};
18+
19+
const result = JSON.parse(fs.readFileSync(file, 'utf-8'))
20+
const testRunResult = {
21+
color: '#36a64f',
22+
title: `Started at: *${result.stats.start}`,
23+
text: `Executed ${result.stats.tests} in ${(new Date(result.stats.end) - new Date(result.stats.start)) / 1000}s`,
24+
fields: [
25+
{
26+
title: 'Passed',
27+
value: result.stats.passes,
28+
short: true,
29+
},
30+
{
31+
title: 'Skipped',
32+
value: result.stats.skipped,
33+
short: true,
34+
},
35+
],
36+
};
37+
const failed = result.stats.failures;
38+
if (failed) {
39+
results.passed = false;
40+
testRunResult.color = '#cc0000';
41+
testRunResult.fields.push({
42+
title: 'Failed',
43+
value: failed,
44+
short: true,
45+
});
46+
}
47+
48+
results.message.attachments.push(testRunResult);
49+
50+
if (results.passed === false) {
51+
results.message.text = '<!here> ' + results.message.text;
52+
}
53+
54+
fs.writeFileSync('e2e.report.json', JSON.stringify({
55+
channel: process.env.SLACK_TEST_REPORT_CHANNEL,
56+
...results.message,
57+
}));

.github/e2e/test.app.sh

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
#!/bin/bash
2+
set -e
3+
4+
# Create folder before tests run to prevent permissions issues
5+
mkdir -p tests/e2e/remote
6+
7+
# Run RTE (Redis Test Environment)
8+
docker compose -f tests/e2e/rte.docker-compose.yml build
9+
docker compose -f tests/e2e/rte.docker-compose.yml up --force-recreate -d -V
10+
./tests/e2e/wait-for-redis.sh localhost 12000
11+
12+
# Run tests
13+
RI_SOCKETS_CORS=true \
14+
yarn --cwd tests/e2e dotenv -e .ci.env yarn --cwd tests/e2e test:ci

0 commit comments

Comments
 (0)