Skip to content

Commit 02b20c8

Browse files
Merge pull request #72 from AhmedBasem20/optimize-bandwidth
Optimize bandwidth
2 parents a55aec3 + 469e73f commit 02b20c8

File tree

4 files changed

+72
-11
lines changed

4 files changed

+72
-11
lines changed

.github/workflows/website.yml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,12 @@ jobs:
4646
with:
4747
name: 'Data'
4848

49+
- name: 'Filter and compress results file.'
50+
run: python utilities/reduce_output_size.py test_output.csv test_output.csv.gz
51+
4952
- name: move data to the dashboard folder
5053
run: |
51-
mv test_output.csv website/dashboard
54+
mv test_output.csv.gz website/dashboard
5255
5356
- name: Build documentation
5457
run: |

utilities/reduce_output_size.py

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
import os
2+
import gzip
3+
import csv
4+
import sys
5+
6+
def reduce_output_file_size(input_file:str, output_file:str):
7+
"""
8+
Simplify the data generated by the analysis pipeline by retaining only the essential information required for the frontend.
9+
"""
10+
if os.path.exists(input_file):
11+
# Open the input and output files
12+
with open(input_file, 'r') as infile, gzip.open(output_file, 'wt', newline='') as outfile:
13+
reader = csv.DictReader(infile)
14+
15+
# Drop b_values columns
16+
fieldnames = [field for field in reader.fieldnames if not field.startswith('bval_')]
17+
writer = csv.DictWriter(outfile, fieldnames=fieldnames)
18+
writer.writeheader()
19+
20+
columns_to_round = ['f', 'Dp', 'D', 'f_fitted', 'Dp_fitted', 'D_fitted']
21+
22+
for row in reader:
23+
#Delete columns starting with 'bval_'
24+
for key in list(row.keys()):
25+
if key.startswith('bval_'):
26+
del row[key]
27+
28+
# Round values in the remaining relevant columns
29+
for column in columns_to_round:
30+
if column in row:
31+
row[column] = round(float(row[column]), 4)
32+
writer.writerow(row)
33+
else:
34+
print(f"File '{input_file}' not found.")
35+
36+
if __name__ == '__main__':
37+
if len(sys.argv) != 3:
38+
print("Usage: python reduce_output_size.py <input_file> <output_file>")
39+
sys.exit(1)
40+
41+
input_file = sys.argv[1]
42+
output_file = sys.argv[2]
43+
reduce_output_file_size(input_file, output_file)

website/dashboard/index.html

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
<script src="https://cdnjs.cloudflare.com/ajax/libs/jszip/3.7.1/jszip.min.js"></script>
99
<script src="https://cdn.plot.ly/plotly-2.30.0.min.js"></script>
1010
<script src="https://cdnjs.cloudflare.com/ajax/libs/PapaParse/5.3.0/papaparse.min.js"></script>
11+
<script src="https://cdn.jsdelivr.net/npm/pako@1.0.11/dist/pako.min.js"></script>
1112
<script src="index.js"></script>
1213
<link rel="stylesheet" href="index.css">
1314
</head>

website/dashboard/index.js

Lines changed: 24 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -205,17 +205,31 @@ document.addEventListener('DOMContentLoaded', function() {
205205

206206
showLoading();
207207

208-
Papa.parse('test_output.csv', {
209-
download: true,
210-
header: true,
211-
complete: results => {
212-
data = results;
213-
hideLoading();
214-
populateOptions(data);
215-
drawBoxPlot();
216-
drawRegionBoxPlot();
217-
208+
fetch('test_output.csv.gz')
209+
.then(response => {
210+
if (!response.ok) {
211+
throw new Error('Network response was not ok');
218212
}
213+
return response.arrayBuffer();
214+
})
215+
.then(buffer => {
216+
// Use pako to decompress the data
217+
var decompressed = pako.inflate(new Uint8Array(buffer), { to: 'string' });
218+
// Now use Papa Parse to parse the decompressed CSV data
219+
Papa.parse(decompressed, {
220+
header: true,
221+
complete: results => {
222+
console.log(results);
223+
data = results;
224+
hideLoading();
225+
populateOptions(data);
226+
drawBoxPlot();
227+
drawRegionBoxPlot();
228+
}
229+
});
230+
})
231+
.catch(error => {
232+
console.error('There has been a problem with your fetch operation:', error);
219233
});
220234

221235
function populateOptions(data) {

0 commit comments

Comments
 (0)