10
10
import sys
11
11
import time
12
12
import unittest
13
+ import json
13
14
import zlib
14
15
from pathlib import Path
15
16
from typing import List
56
57
57
58
58
59
class Benchmarker ():
60
+ # Whether to record statistics. Set by SizeBenchmarker.
61
+ record_stats = False
62
+
59
63
# called when we init the object, which is during startup, even if we are
60
64
# not running benchmarks
61
65
def __init__ (self , name ):
@@ -112,14 +116,39 @@ def display(self, baseline=None):
112
116
113
117
# size
114
118
115
- size = sum (os .path .getsize (f ) for f in self .get_output_files ())
116
- gzip_size = sum (len (zlib .compress (read_binary (f ))) for f in self .get_output_files ())
119
+ recorded_stats = []
120
+
121
+ def add_stat (name , size , gzip_size ):
122
+ recorded_stats .append ({
123
+ 'value' : name ,
124
+ 'measurement' : size ,
125
+ })
126
+ recorded_stats .append ({
127
+ 'value' : name + ' (gzipped)' ,
128
+ 'measurement' : gzip_size ,
129
+ })
130
+
131
+ total_size = 0
132
+ total_gzip_size = 0
117
133
118
- print (' size: %8s, compressed: %8s' % (size , gzip_size ), end = ' ' )
134
+ for file in self .get_output_files ():
135
+ size = os .path .getsize (file )
136
+ gzip_size = len (zlib .compress (read_binary (file )))
137
+ if self .record_stats :
138
+ add_stat (utils .removeprefix (os .path .basename (file ), 'size_' ), size , gzip_size )
139
+ total_size += size
140
+ total_gzip_size += gzip_size
141
+
142
+ if self .record_stats :
143
+ add_stat ('total' , total_size , total_gzip_size )
144
+
145
+ print (' size: %8s, compressed: %8s' % (total_size , total_gzip_size ), end = ' ' )
119
146
if self .get_size_text ():
120
147
print (' (' + self .get_size_text () + ')' , end = ' ' )
121
148
print ()
122
149
150
+ return recorded_stats
151
+
123
152
def get_size_text (self ):
124
153
return ''
125
154
@@ -240,6 +269,21 @@ def get_output_files(self):
240
269
return ret
241
270
242
271
272
+ # This benchmarker will make a test benchmark build with Emscripten and record
273
+ # the file output sizes in out/test/stats.json. The file format is specified at
274
+ # https://skia.googlesource.com/buildbot/+/refs/heads/main/perf/FORMAT.md
275
+ # Running the benchmark will be skipped.
276
+ class SizeBenchmarker (EmscriptenBenchmarker ):
277
+ record_stats = True
278
+
279
+ def __init__ (self , name ):
280
+ # do not set an engine, as we will not run the code
281
+ super ().__init__ (name , engine = None )
282
+
283
+ # we will not actually run the benchmarks
284
+ run = None
285
+
286
+
243
287
CHEERP_BIN = '/opt/cheerp/bin/'
244
288
245
289
@@ -317,6 +361,7 @@ def get_output_files(self):
317
361
named_benchmarkers = {
318
362
'clang' : NativeBenchmarker ('clang' , [CLANG_CC ], [CLANG_CXX ]),
319
363
'gcc' : NativeBenchmarker ('gcc' , ['gcc' , '-no-pie' ], ['g++' , '-no-pie' ]),
364
+ 'size' : SizeBenchmarker ('size' ),
320
365
'v8' : EmscriptenBenchmarker ('v8' , aot_v8 ),
321
366
'v8-lto' : EmscriptenBenchmarker ('v8-lto' , aot_v8 , ['-flto' ]),
322
367
'v8-ctors' : EmscriptenBenchmarker ('v8-ctors' , aot_v8 , ['-sEVAL_CTORS' ]),
@@ -337,6 +382,7 @@ def get_output_files(self):
337
382
338
383
class benchmark (common .RunnerCore ):
339
384
save_dir = True
385
+ stats = [] # type: ignore
340
386
341
387
@classmethod
342
388
def setUpClass (cls ):
@@ -358,6 +404,17 @@ def setUpClass(cls):
358
404
fingerprint .append ('llvm: ' + config .LLVM_ROOT )
359
405
print ('Running Emscripten benchmarks... [ %s ]' % ' | ' .join (fingerprint ))
360
406
407
+ @classmethod
408
+ def tearDownClass (cls ):
409
+ super ().tearDownClass ()
410
+ if cls .stats :
411
+ output = {
412
+ 'version' : 1 ,
413
+ 'git_hash' : '' ,
414
+ 'results' : cls .stats
415
+ }
416
+ utils .write_file ('stats.json' , json .dumps (output , indent = 2 ) + '\n ' )
417
+
361
418
# avoid depending on argument reception from the commandline
362
419
def hardcode_arguments (self , code ):
363
420
if not code or 'int main()' in code :
@@ -397,11 +454,27 @@ def do_benchmark(self, name, src, expected_output='FAIL', args=None,
397
454
for b in benchmarkers :
398
455
if skip_native and isinstance (b , NativeBenchmarker ):
399
456
continue
457
+ if not b .run :
458
+ # If we won't run the benchmark, we don't need repetitions.
459
+ reps = 0
400
460
baseline = b
401
461
print ('Running benchmarker: %s: %s' % (b .__class__ .__name__ , b .name ))
402
462
b .build (self , filename , args , shared_args , emcc_args , native_args , native_exec , lib_builder , has_output_parser = output_parser is not None )
403
463
b .bench (args , output_parser , reps , expected_output )
404
- b .display (baseline )
464
+ recorded_stats = b .display (baseline )
465
+ if recorded_stats :
466
+ self .add_stats (name , recorded_stats )
467
+
468
+ def add_stats (self , name , stats ):
469
+ self .stats .append ({
470
+ 'key' : {
471
+ 'test' : name ,
472
+ 'units' : 'bytes'
473
+ },
474
+ 'measurements' : {
475
+ 'stats' : stats
476
+ }
477
+ })
405
478
406
479
def test_primes (self , check = True ):
407
480
src = r'''
0 commit comments