@@ -35,7 +35,9 @@ def __init__(self):
35
35
self .user_time_ms = None
36
36
self .system_time = None
37
37
self .rss = None
38
- self .output_hash = None
38
+ self .result_hash = None
39
+ self .stdout_file_path = None
40
+ self .stderr_file_path = None
39
41
self .perf_file_path = None
40
42
41
43
def from_json (self , json ):
@@ -64,6 +66,8 @@ def pretty_print(value):
64
66
return f"Unwrap(DateTime::FromSeconds({ int (delt .total_seconds ())} ))"
65
67
if type (value ) == datetime .timedelta :
66
68
return f"DateTime::IntervalFromMicroseconds({ int (value / datetime .timedelta (microseconds = 1 ))} )"
69
+ if isinstance (value , pathlib .Path ):
70
+ return f'\" { value } \" '
67
71
if type (value ) == str :
68
72
return f'\" { value } \" '
69
73
if type (value ) in [int , float ]:
@@ -74,7 +78,21 @@ def pretty_print(value):
74
78
assert False , f"unrecognized type: { type (value )} "
75
79
76
80
77
- def upload_results (result_path , s3_folder , test_start ):
81
+ def upload_file_to_s3 (s3_folder , result_path , file ):
82
+ # copying files to folder that will be synced with s3
83
+ dst = file .relative_to (result_path )
84
+ s3_file = (s3_folder / dst ).resolve ()
85
+ s3_file .parent .mkdir (parents = True , exist_ok = True )
86
+ _ = shutil .copy2 (str (file .resolve ()), str (s3_file ))
87
+ return dst
88
+
89
+
90
+ def upload_results (result_path , s3_folder , test_start , try_num ):
91
+ def add_try_num_to_path (path ):
92
+ if try_num :
93
+ path = f"try_{ try_num } " / path
94
+ return path
95
+
78
96
results_map = {}
79
97
for entry in result_path .glob ("*/*" ):
80
98
if not entry .is_dir ():
@@ -98,17 +116,22 @@ def upload_results(result_path, s3_folder, test_start):
98
116
if query_num not in this_result :
99
117
this_result [query_num ] = RunResults ()
100
118
119
+ # q<num>.svg
101
120
if file .suffix == ".svg" :
102
- dst = file .relative_to (result_path )
103
- this_result [query_num ].perf_file_path = dst
104
- # copying files to folder that will be synced with s3
105
- dst = (s3_folder / dst ).resolve ()
106
- dst .parent .mkdir (parents = True , exist_ok = True )
107
- _ = shutil .copy2 (str (file .resolve ()), str (dst ))
121
+ this_result [query_num ].perf_file_path = add_try_num_to_path (upload_file_to_s3 (s3_folder , result_path , file ))
122
+
123
+ # q<num>-result.yson
124
+ if file .stem == f"q{ query_num } -result" :
125
+ with open (file , "r" ) as result :
126
+ this_result [query_num ].result_hash = str (hash (result .read ().strip ()))
127
+
108
128
# q<num>-stdout.txt
109
129
if file .stem == f"q{ query_num } -stdout" :
110
- with open (file , "r" ) as stdout :
111
- this_result [query_num ].output_hash = str (hash (stdout .read ().strip ()))
130
+ this_result [query_num ].stdout_file_path = add_try_num_to_path (upload_file_to_s3 (s3_folder , result_path , file ))
131
+
132
+ # q<num>-stderr.txt
133
+ if file .stem == f"q{ query_num } -stderr" :
134
+ this_result [query_num ].stderr_file_path = add_try_num_to_path (upload_file_to_s3 (s3_folder , result_path , file ))
112
135
113
136
summary_file = entry / "summary.json"
114
137
@@ -144,12 +167,14 @@ def upload_results(result_path, s3_folder, test_start):
144
167
"WasSpillingInJoin" : None ,
145
168
"WasSpillingInChannels" : None ,
146
169
"MaxTasksPerStage" : params .tasks ,
147
- "PerfFileLink" : results .perf_file_path ,
148
170
"ExitCode" : results .exitcode ,
149
- "ResultHash" : results .output_hash ,
171
+ "ResultHash" : results .result_hash ,
150
172
"SpilledBytes" : results .read_bytes ,
151
173
"UserTime" : results .user_time ,
152
- "SystemTime" : results .system_time
174
+ "SystemTime" : results .system_time ,
175
+ "StdoutFileLink" : results .stdout_file_path ,
176
+ "StderrFileLink" : results .stderr_file_path ,
177
+ "PerfFileLink" : results .perf_file_path
153
178
}
154
179
sql = 'UPSERT INTO `perfomance/olap/dq_spilling_nightly_runs`\n \t ({columns})\n VALUES\n \t ({values})' .format (
155
180
columns = ", " .join (map (str , mapping .keys ())),
@@ -164,14 +189,15 @@ def main():
164
189
165
190
parser .add_argument ("--result-path" , type = pathlib .Path )
166
191
parser .add_argument ("--s3-folder" , type = pathlib .Path )
192
+ parser .add_argument ("--try-num" , default = None )
167
193
168
194
args = parser .parse_args ()
169
195
170
196
if "CI_YDB_SERVICE_ACCOUNT_KEY_FILE_CREDENTIALS" not in os .environ :
171
197
raise AttributeError ("Env variable CI_YDB_SERVICE_ACCOUNT_KEY_FILE_CREDENTIALS is missing, skipping uploading" )
172
198
os .environ ["YDB_SERVICE_ACCOUNT_KEY_FILE_CREDENTIALS" ] = os .environ ["CI_YDB_SERVICE_ACCOUNT_KEY_FILE_CREDENTIALS" ]
173
199
174
- upload_results (args .result_path , args .s3_folder , upload_time )
200
+ upload_results (args .result_path , args .s3_folder , upload_time , args . try_num )
175
201
176
202
177
203
if __name__ == "__main__" :
0 commit comments