11
11
import re
12
12
import os
13
13
14
- def main ():
14
+ def filter (filters , record ):
15
+ # Always filter non-records:
16
+ if not "\" ph\" :" in record : return False
17
+ # Handle the case where there are no filters:
18
+ if len (filters ) == 0 : return True
19
+ # Always keep metadata records:
20
+ if "\" ph\" :\" M\" " in record : return True
21
+ # Otherwise, check against all filters:
22
+ for filter in filters :
23
+ if filter in record : return True
24
+ return False
25
+
26
+ def fixup (record ):
27
+ if not record .strip ().endswith (',' ):
28
+ record = record .strip () + ",\n "
29
+ return record
15
30
31
+ def main ():
16
32
printHelp = False
17
33
18
34
if (len (sys .argv ) < 4 or sys .argv [1 ] == '-h' or sys .argv [1 ] == '-?' ):
19
35
printHelp = True
20
36
21
37
# Help message
22
38
if (printHelp ):
23
- print ("" )
24
- print (" A script to combine multiple Chrome traces captured by the opencl-intercept-layer." )
25
- print (" The combined trace can be viewed on a common timeline in the Chrome browser." )
39
+ print (r "" )
40
+ print (r " A script to combine multiple Chrome traces captured by the opencl-intercept-layer." )
41
+ print (r " The combined trace can be viewed on a common timeline in the Chrome browser." )
26
42
print ()
27
- print (" This is useful for analyzing multi-process execution." )
28
- print (" Set CLI_AppendPid=1 when collecting Chrome traces to obtain separate per-process traces." )
29
- print (" Can also be useful to compare two or more single process executions on a common timeline." )
43
+ print (r " This is useful for analyzing multi-process execution." )
44
+ print (r " Set CLI_AppendPid=1 when collecting Chrome traces to obtain separate per-process traces." )
45
+ print (r " Can also be useful to compare two or more single process executions on a common timeline." )
30
46
print ()
31
- print (" Use as:" )
32
- print (" combine_chrome_traces.py <number of traces> <space-separated paths to all json traces> [space-separated event filters]" )
47
+ print (r " Use as:" )
48
+ print (r " combine_chrome_traces.py <number of traces> <space-separated paths to all json traces> [space-separated event filters]" )
33
49
print ()
34
- print (" Optional arguments: event-filters are names of OpenCL kernels or OpenCL API calls" )
35
- print (" that should be retained in the filtered output." )
50
+ print (r " Optional arguments: event-filters are names of OpenCL kernels or OpenCL API calls" )
51
+ print (r " that should be retained in the filtered output." )
36
52
print ()
37
- print (" Example:" )
53
+ print (r " Example:" )
38
54
print ()
39
- print (" combine_chrome_traces.py 4 \ # specifies 4 traces to combine" )
40
- print (" CLIntercept_Dump.45682/clintercept_trace.json \ # paths to the four traces follow" )
41
- print (" CLIntercept_Dump.45683/clintercept_trace.json \ " )
42
- print (" CLIntercept_Dump.45684/clintercept_trace.json \ " )
43
- print (" CLIntercept_Dump.45685/clintercept_trace.json \ " )
44
- print (" kernelA kernelB clEnqueueWriteBuffer clEnqueueReadBuffer # specifies kernel/API names as filters [optional]" )
55
+ print (r " combine_chrome_traces.py 4 \ # specifies 4 traces to combine" )
56
+ print (r " CLIntercept_Dump.45682/clintercept_trace.json \ # paths to the four traces follow" )
57
+ print (r " CLIntercept_Dump.45683/clintercept_trace.json \ " )
58
+ print (r " CLIntercept_Dump.45684/clintercept_trace.json \ " )
59
+ print (r " CLIntercept_Dump.45685/clintercept_trace.json \ " )
60
+ print (r " kernelA kernelB clEnqueueWriteBuffer clEnqueueReadBuffer # specifies kernel/API names as filters [optional]" )
45
61
print ()
46
- print (" Note: This script modifies events records so that all traces have a common epoch." )
62
+ print (r " Note: This script modifies events records so that all traces have a common epoch." )
47
63
print ()
48
64
sys .exit (0 )
49
65
50
66
# Get input arguments
51
- numFiles = int (sys .argv [1 ]);
52
- numStrings = len (sys .argv ) - numFiles - 2 ;
67
+ files = sys . argv [ 2 : 2 + int (sys .argv [1 ])]
68
+ filters = sys . argv [ 2 + int (sys .argv [ 1 ]):]
53
69
54
70
# Sanity checks
55
- if ( numFiles < 2 ) :
71
+ if len ( files ) < 2 :
56
72
print ("ERROR: you must specify at least two traces to combine." )
57
73
sys .exit (1 )
58
- for j in range ( numFiles ) :
59
- if ( not os .path .isfile (sys . argv [ j + 2 ]) ):
60
- print ("ERROR: specified file " + sys . argv [ j + 2 ] + " cannot be found." )
74
+ for filename in files :
75
+ if not os .path .isfile (filename ):
76
+ print ("ERROR: specified file {} cannot be found." . format ( filename ) )
61
77
sys .exit (1 )
62
78
63
79
# Read input files
64
80
inputFiles = []
65
- for j in range ( numFiles ) :
66
- f = open (sys . argv [ j + 2 ] ,'r' )
81
+ for filename in files :
82
+ f = open (filename ,'r' )
67
83
currentFile = f .readlines ()
68
84
f .close ()
69
85
inputFiles .append (currentFile )
70
86
71
87
# Figure out epoch (earliest start_time across all records)
72
88
start_times = []
73
- for j in range (numFiles ):
89
+ for j in range (len ( files ) ):
74
90
for k in range (len (inputFiles [j ])):
75
91
if (inputFiles [j ][k ].find ("start_time" ) != - 1 ):
76
92
start_times .append (int (inputFiles [j ][2 ].split (":" )[- 1 ].split ("}" )[0 ].strip ('"' )))
@@ -79,39 +95,43 @@ def main():
79
95
print ("ERROR: start_time not found in trace file " + sys .argv [j + 2 ]+ ". Please check if the trace is valid." )
80
96
sys .exit (1 )
81
97
epoch = min (start_times )
98
+ print ("Found minimum start time {}" .format (epoch ))
82
99
83
- # Perform filtering if necessary
100
+ # Perform filtering
84
101
filteredFiles = []
85
- if (numStrings == 0 ):
86
- filteredFiles = inputFiles
87
- else :
88
- for j in range (numFiles ):
89
- flt = [i for i in inputFiles [j ] if "\" ph\" :\" M\" " in i ] # copy metadata
90
- for k in range (numStrings ):
91
- flt = flt + [i for i in inputFiles [j ] if sys .argv [2 + numFiles + k ] in i ]
92
- filteredFiles .append (flt )
102
+ for j in range (len (files )):
103
+ flt = [fixup (i ) for i in inputFiles [j ] if filter (filters , i )]
104
+ filteredFiles .append (flt )
93
105
94
106
# Perform epoch normalization
95
- for j in range (numFiles ):
107
+ for j in range (len ( files ) ):
96
108
offset = start_times [j ] - epoch
109
+ print ("Processing file {} with offset {}" .format (files [j ], offset ))
97
110
for k in range (len (filteredFiles [j ])):
98
111
if (filteredFiles [j ][k ].find ("\" ts\" " ) != - 1 ):
99
- ts = int (filteredFiles [j ][k ].split ("\" ts\" :" )[- 1 ].split ("," )[0 ]) + offset
100
- filteredFiles [j ][k ] = re .sub ("\" ts\" :\d+" , "\" ts\" :" + str (ts ), filteredFiles [j ][k ])
112
+ ts = float (filteredFiles [j ][k ].split ("\" ts\" :" )[- 1 ].split ("," )[0 ]) + offset
113
+ #print('old record was: {}'.format(filteredFiles[j][k].strip()))
114
+ filteredFiles [j ][k ] = re .sub ("\" ts\" :[\\ d.]+" , "\" ts\" :" + str (ts ), filteredFiles [j ][k ])
115
+ #print('new record is: {}'.format(filteredFiles[j][k].strip()))
101
116
elif (filteredFiles [j ][k ].find ("start_time" ) != - 1 ):
102
- filteredFiles [j ][k ] = re .sub ('\" start_time\" :["]?\d+["]?' , "\" start_time\" :" + str (epoch ), filteredFiles [j ][k ])
103
-
117
+ #print('old record was: {}'.format(filteredFiles[j][k].strip()))
118
+ filteredFiles [j ][k ] = re .sub ('\" start_time\" :["]?\\ d+["]?' , "\" start_time\" :" + str (epoch ), filteredFiles [j ][k ])
119
+ #print('new record is: {}'.format(filteredFiles[j][k].strip()))
120
+
104
121
# Write to output file
105
122
tstamp = datetime .datetime .now ()
106
123
fName = "merged_" + str (tstamp .year ) + '-' + str (tstamp .month ) + '-' + str (tstamp .day ) \
107
124
+ '-' + str (tstamp .hour ) + '-' + str (tstamp .minute )+ '-' + str (tstamp .second ) + ".json"
108
- print ("Combining in " + fName )
125
+ print ("Writing to combined file " + fName )
109
126
fo = open (fName , 'w' )
110
127
fo .write ("[\n " )
111
- for j in range (numFiles ):
128
+ for j in range (len ( files ) ):
112
129
for k in range (1 ,len (filteredFiles [j ])):
113
130
fo .write ("%s" % filteredFiles [j ][k ])
131
+ fo .write ("{\" ph\" :\" M\" ,\" name\" :\" clintercept_merged_eof\" ,\" pid\" :0,\" tid\" :0}\n " )
132
+ fo .write ("]\n " )
114
133
f .close ()
134
+ print ("Done." )
115
135
116
136
if __name__ == "__main__" :
117
137
main ()
0 commit comments