|
1 |
| -using GPUArrays, Test, Pkg |
| 1 | +using Distributed |
| 2 | +using Dates |
| 3 | +import REPL |
| 4 | +using Printf: @sprintf |
2 | 5 |
|
3 |
| -include("testsuite.jl") |
| 6 | +# parse some command-line arguments |
| 7 | +function extract_flag!(args, flag, default=nothing) |
| 8 | + for f in args |
| 9 | + if startswith(f, flag) |
| 10 | + # Check if it's just `--flag` or if it's `--flag=foo` |
| 11 | + if f != flag |
| 12 | + val = split(f, '=')[2] |
| 13 | + if default !== nothing && !(typeof(default) <: AbstractString) |
| 14 | + val = parse(typeof(default), val) |
| 15 | + end |
| 16 | + else |
| 17 | + val = default |
| 18 | + end |
4 | 19 |
|
5 |
| -@testset "JLArray" begin |
6 |
| - using JLArrays |
| 20 | + # Drop this value from our args |
| 21 | + filter!(x -> x != f, args) |
| 22 | + return (true, val) |
| 23 | + end |
| 24 | + end |
| 25 | + return (false, default) |
| 26 | +end |
| 27 | +do_help, _ = extract_flag!(ARGS, "--help") |
| 28 | +if do_help |
| 29 | + println(""" |
| 30 | + Usage: runtests.jl [--help] [--list] [--jobs=N] [TESTS...] |
| 31 | +
|
| 32 | + --help Show this text. |
| 33 | + --list List all available tests. |
| 34 | + --quickfail Fail the entire run as soon as a single test errored. |
| 35 | + --jobs=N Launch `N` processes to perform tests (default: Sys.CPU_THREADS). |
| 36 | +
|
| 37 | + Remaining arguments filter the tests that will be executed.""") |
| 38 | + exit(0) |
| 39 | +end |
| 40 | +_, jobs = extract_flag!(ARGS, "--jobs", Sys.CPU_THREADS) |
| 41 | +do_quickfail, _ = extract_flag!(ARGS, "--quickfail") |
| 42 | + |
| 43 | +include("setup.jl") # make sure everything is precompiled |
| 44 | + |
| 45 | +@info "Running $jobs tests in parallel. If this is too many, specify the `--jobs` argument to the tests, or set the JULIA_CPU_THREADS environment variable." |
| 46 | + |
| 47 | +# choose tests |
| 48 | +const tests = [] |
| 49 | +const test_runners = Dict() |
| 50 | +## GPUArrays testsuite |
| 51 | +for AT in (JLArray, Array), name in keys(TestSuite.tests) |
| 52 | + push!(tests, "$(AT)$(Base.Filesystem.path_separator)$name") |
| 53 | + test_runners["$(AT)$(Base.Filesystem.path_separator)$name"] = ()->TestSuite.tests[name](AT) |
| 54 | +end |
| 55 | +unique!(tests) |
| 56 | + |
| 57 | +# parse some more command-line arguments |
| 58 | +## --list to list all available tests |
| 59 | +do_list, _ = extract_flag!(ARGS, "--list") |
| 60 | +if do_list |
| 61 | + println("Available tests:") |
| 62 | + for test in sort(tests) |
| 63 | + println(" - $test") |
| 64 | + end |
| 65 | + exit(0) |
| 66 | +end |
| 67 | +## no options should remain |
| 68 | +optlike_args = filter(startswith("-"), ARGS) |
| 69 | +if !isempty(optlike_args) |
| 70 | + error("Unknown test options `$(join(optlike_args, " "))` (try `--help` for usage instructions)") |
| 71 | +end |
| 72 | +## the remaining args filter tests |
| 73 | +if !isempty(ARGS) |
| 74 | + filter!(tests) do test |
| 75 | + any(arg->startswith(test, arg), ARGS) |
| 76 | + end |
| 77 | +end |
| 78 | + |
| 79 | +# add workers |
| 80 | +const test_exeflags = Base.julia_cmd() |
| 81 | +filter!(test_exeflags.exec) do c |
| 82 | + return !(startswith(c, "--depwarn") || startswith(c, "--check-bounds")) |
| 83 | +end |
| 84 | +push!(test_exeflags.exec, "--check-bounds=yes") |
| 85 | +push!(test_exeflags.exec, "--startup-file=no") |
| 86 | +push!(test_exeflags.exec, "--depwarn=yes") |
| 87 | +push!(test_exeflags.exec, "--project=$(Base.active_project())") |
| 88 | +const test_exename = popfirst!(test_exeflags.exec) |
| 89 | +function addworker(X; kwargs...) |
| 90 | + withenv("JULIA_NUM_THREADS" => 1, "OPENBLAS_NUM_THREADS" => 1) do |
| 91 | + procs = addprocs(X; exename=test_exename, exeflags=test_exeflags, kwargs...) |
| 92 | + @everywhere procs include($(joinpath(@__DIR__, "setup.jl"))) |
| 93 | + procs |
| 94 | + end |
| 95 | +end |
| 96 | +addworker(min(jobs, length(tests))) |
| 97 | + |
| 98 | +# pretty print information about gc and mem usage |
| 99 | +testgroupheader = "Test" |
| 100 | +workerheader = "(Worker)" |
| 101 | +name_align = maximum([textwidth(testgroupheader) + textwidth(" ") + |
| 102 | + textwidth(workerheader); map(x -> textwidth(x) + |
| 103 | + 3 + ndigits(nworkers()), tests)]) |
| 104 | +elapsed_align = textwidth("Time (s)") |
| 105 | +gc_align = textwidth("GC (s)") |
| 106 | +percent_align = textwidth("GC %") |
| 107 | +alloc_align = textwidth("Alloc (MB)") |
| 108 | +rss_align = textwidth("RSS (MB)") |
| 109 | +printstyled(" "^(name_align + textwidth(testgroupheader) - 3), " | ") |
| 110 | +printstyled(" | ---------------- CPU ---------------- |\n", color=:white) |
| 111 | +printstyled(testgroupheader, color=:white) |
| 112 | +printstyled(lpad(workerheader, name_align - textwidth(testgroupheader) + 1), " | ", color=:white) |
| 113 | +printstyled("Time (s) | GC (s) | GC % | Alloc (MB) | RSS (MB) |\n", color=:white) |
| 114 | +print_lock = stdout isa Base.LibuvStream ? stdout.lock : ReentrantLock() |
| 115 | +if stderr isa Base.LibuvStream |
| 116 | + stderr.lock = print_lock |
| 117 | +end |
| 118 | +function print_testworker_stats(test, wrkr, resp) |
| 119 | + @nospecialize resp |
| 120 | + lock(print_lock) |
| 121 | + try |
| 122 | + printstyled(test, color=:white) |
| 123 | + printstyled(lpad("($wrkr)", name_align - textwidth(test) + 1, " "), " | ", color=:white) |
| 124 | + time_str = @sprintf("%7.2f",resp[2]) |
| 125 | + printstyled(lpad(time_str, elapsed_align, " "), " | ", color=:white) |
| 126 | + |
| 127 | + cpu_gc_str = @sprintf("%5.2f", resp[4]) |
| 128 | + printstyled(lpad(cpu_gc_str, gc_align, " "), " | ", color=:white) |
| 129 | + # since there may be quite a few digits in the percentage, |
| 130 | + # the left-padding here is less to make sure everything fits |
| 131 | + cpu_percent_str = @sprintf("%4.1f", 100 * resp[4] / resp[2]) |
| 132 | + printstyled(lpad(cpu_percent_str, percent_align, " "), " | ", color=:white) |
| 133 | + cpu_alloc_str = @sprintf("%5.2f", resp[3] / 2^20) |
| 134 | + printstyled(lpad(cpu_alloc_str, alloc_align, " "), " | ", color=:white) |
| 135 | + |
| 136 | + cpu_rss_str = @sprintf("%5.2f", resp[6] / 2^20) |
| 137 | + printstyled(lpad(cpu_rss_str, rss_align, " "), " |\n", color=:white) |
| 138 | + finally |
| 139 | + unlock(print_lock) |
| 140 | + end |
| 141 | +end |
| 142 | +global print_testworker_started = (name, wrkr)->begin |
| 143 | +end |
| 144 | +function print_testworker_errored(name, wrkr) |
| 145 | + lock(print_lock) |
| 146 | + try |
| 147 | + printstyled(name, color=:red) |
| 148 | + printstyled(lpad("($wrkr)", name_align - textwidth(name) + 1, " "), " |", |
| 149 | + " "^elapsed_align, " failed at $(now())\n", color=:red) |
| 150 | + finally |
| 151 | + unlock(print_lock) |
| 152 | + end |
| 153 | +end |
| 154 | + |
| 155 | +# run tasks |
| 156 | +t0 = now() |
| 157 | +results = [] |
| 158 | +all_tasks = Task[] |
| 159 | +all_tests = copy(tests) |
| 160 | +try |
| 161 | + # Monitor stdin and kill this task on ^C |
| 162 | + # but don't do this on Windows, because it may deadlock in the kernel |
| 163 | + t = current_task() |
| 164 | + running_tests = Dict{String, DateTime}() |
| 165 | + if !Sys.iswindows() && isa(stdin, Base.TTY) |
| 166 | + stdin_monitor = @async begin |
| 167 | + term = REPL.Terminals.TTYTerminal("xterm", stdin, stdout, stderr) |
| 168 | + try |
| 169 | + REPL.Terminals.raw!(term, true) |
| 170 | + while true |
| 171 | + c = read(term, Char) |
| 172 | + if c == '\x3' |
| 173 | + Base.throwto(t, InterruptException()) |
| 174 | + break |
| 175 | + elseif c == '?' |
| 176 | + println("Currently running: ") |
| 177 | + tests = sort(collect(running_tests), by=x->x[2]) |
| 178 | + foreach(tests) do (test, date) |
| 179 | + println(test, " (running for ", round(now()-date, Minute), ")") |
| 180 | + end |
| 181 | + end |
| 182 | + end |
| 183 | + catch e |
| 184 | + isa(e, InterruptException) || rethrow() |
| 185 | + finally |
| 186 | + REPL.Terminals.raw!(term, false) |
| 187 | + end |
| 188 | + end |
| 189 | + end |
| 190 | + @sync begin |
| 191 | + function recycle_worker(p) |
| 192 | + rmprocs(p, waitfor=30) |
| 193 | + return nothing |
| 194 | + end |
| 195 | + |
| 196 | + for p in workers() |
| 197 | + @async begin |
| 198 | + push!(all_tasks, current_task()) |
| 199 | + while length(tests) > 0 |
| 200 | + test = popfirst!(tests) |
7 | 201 |
|
8 |
| - jl([1]) |
| 202 | + # sometimes a worker failed, and we need to spawn a new one |
| 203 | + if p === nothing |
| 204 | + p = addworker(1)[1] |
| 205 | + end |
| 206 | + wrkr = p |
9 | 207 |
|
10 |
| - TestSuite.test(JLArray) |
| 208 | + local resp |
| 209 | + |
| 210 | + # run the test |
| 211 | + running_tests[test] = now() |
| 212 | + try |
| 213 | + resp = remotecall_fetch(runtests, wrkr, test_runners[test], test) |
| 214 | + catch e |
| 215 | + isa(e, InterruptException) && return |
| 216 | + resp = Any[e] |
| 217 | + end |
| 218 | + delete!(running_tests, test) |
| 219 | + push!(results, (test, resp)) |
| 220 | + |
| 221 | + # act on the results |
| 222 | + if resp[1] isa Exception |
| 223 | + print_testworker_errored(test, wrkr) |
| 224 | + do_quickfail && Base.throwto(t, InterruptException()) |
| 225 | + |
| 226 | + # the worker encountered some failure, recycle it |
| 227 | + # so future tests get a fresh environment |
| 228 | + p = recycle_worker(p) |
| 229 | + else |
| 230 | + print_testworker_stats(test, wrkr, resp) |
| 231 | + |
| 232 | + cpu_rss = resp[6] |
| 233 | + if haskey(ENV, "CI") && cpu_rss > 3*2^30 |
| 234 | + # XXX: collecting garbage |
| 235 | + # after each test, we are leaking CPU memory somewhere. |
| 236 | + # this is a problem on CI, where2 we don't have much RAM. |
| 237 | + # work around this by periodically recycling the worker. |
| 238 | + p = recycle_worker(p) |
| 239 | + end |
| 240 | + end |
| 241 | + end |
| 242 | + |
| 243 | + if p !== nothing |
| 244 | + recycle_worker(p) |
| 245 | + end |
| 246 | + end |
| 247 | + end |
| 248 | + end |
| 249 | +catch e |
| 250 | + isa(e, InterruptException) || rethrow() |
| 251 | + # If the test suite was merely interrupted, still print the |
| 252 | + # summary, which can be useful to diagnose what's going on |
| 253 | + foreach(task -> begin |
| 254 | + istaskstarted(task) || return |
| 255 | + istaskdone(task) && return |
| 256 | + try |
| 257 | + schedule(task, InterruptException(); error=true) |
| 258 | + catch ex |
| 259 | + @error "InterruptException" exception=ex,catch_backtrace() |
| 260 | + end |
| 261 | + end, all_tasks) |
| 262 | + for t in all_tasks |
| 263 | + # NOTE: we can't just wait, but need to discard the exception, |
| 264 | + # because the throwto for --quickfail also kills the worker. |
| 265 | + try |
| 266 | + wait(t) |
| 267 | + catch e |
| 268 | + showerror(stderr, e) |
| 269 | + end |
| 270 | + end |
| 271 | +finally |
| 272 | + if @isdefined stdin_monitor |
| 273 | + schedule(stdin_monitor, InterruptException(); error=true) |
| 274 | + end |
11 | 275 | end
|
| 276 | +t1 = now() |
| 277 | +elapsed = canonicalize(Dates.CompoundPeriod(t1-t0)) |
| 278 | +println("Testing finished in $elapsed") |
12 | 279 |
|
13 |
| -@testset "Array" begin |
14 |
| - TestSuite.test(Array) |
| 280 | +# construct a testset to render the test results |
| 281 | +o_ts = Test.DefaultTestSet("Overall") |
| 282 | +Test.push_testset(o_ts) |
| 283 | +completed_tests = Set{String}() |
| 284 | +for (testname, (resp,)) in results |
| 285 | + push!(completed_tests, testname) |
| 286 | + if isa(resp, Test.DefaultTestSet) |
| 287 | + Test.push_testset(resp) |
| 288 | + Test.record(o_ts, resp) |
| 289 | + Test.pop_testset() |
| 290 | + elseif isa(resp, Tuple{Int,Int}) |
| 291 | + fake = Test.DefaultTestSet(testname) |
| 292 | + for i in 1:resp[1] |
| 293 | + Test.record(fake, Test.Pass(:test, nothing, nothing, nothing, nothing)) |
| 294 | + end |
| 295 | + for i in 1:resp[2] |
| 296 | + Test.record(fake, Test.Broken(:test, nothing)) |
| 297 | + end |
| 298 | + Test.push_testset(fake) |
| 299 | + Test.record(o_ts, fake) |
| 300 | + Test.pop_testset() |
| 301 | + elseif isa(resp, RemoteException) && isa(resp.captured.ex, Test.TestSetException) |
| 302 | + println("Worker $(resp.pid) failed running test $(testname):") |
| 303 | + Base.showerror(stdout, resp.captured) |
| 304 | + println() |
| 305 | + fake = Test.DefaultTestSet(testname) |
| 306 | + for i in 1:resp.captured.ex.pass |
| 307 | + Test.record(fake, Test.Pass(:test, nothing, nothing, nothing, nothing)) |
| 308 | + end |
| 309 | + for i in 1:resp.captured.ex.broken |
| 310 | + Test.record(fake, Test.Broken(:test, nothing)) |
| 311 | + end |
| 312 | + for t in resp.captured.ex.errors_and_fails |
| 313 | + Test.record(fake, t) |
| 314 | + end |
| 315 | + Test.push_testset(fake) |
| 316 | + Test.record(o_ts, fake) |
| 317 | + Test.pop_testset() |
| 318 | + else |
| 319 | + if !isa(resp, Exception) |
| 320 | + resp = ErrorException(string("Unknown result type : ", typeof(resp))) |
| 321 | + end |
| 322 | + # If this test raised an exception that is not a remote testset exception, |
| 323 | + # i.e. not a RemoteException capturing a TestSetException that means |
| 324 | + # the test runner itself had some problem, so we may have hit a segfault, |
| 325 | + # deserialization errors or something similar. Record this testset as Errored. |
| 326 | + fake = Test.DefaultTestSet(testname) |
| 327 | + Test.record(fake, Test.Error(:nontest_error, testname, nothing, Any[(resp, [])], LineNumberNode(1))) |
| 328 | + Test.push_testset(fake) |
| 329 | + Test.record(o_ts, fake) |
| 330 | + Test.pop_testset() |
| 331 | + end |
| 332 | +end |
| 333 | +for test in all_tests |
| 334 | + (test in completed_tests) && continue |
| 335 | + fake = Test.DefaultTestSet(test) |
| 336 | + Test.record(fake, Test.Error(:test_interrupted, test, nothing, |
| 337 | + [("skipped", [])], LineNumberNode(1))) |
| 338 | + Test.push_testset(fake) |
| 339 | + Test.record(o_ts, fake) |
| 340 | + Test.pop_testset() |
15 | 341 | end
|
| 342 | +println() |
| 343 | +Test.print_test_results(o_ts, 1) |
| 344 | +if !o_ts.anynonpass |
| 345 | + println(" \033[32;1mSUCCESS\033[0m") |
| 346 | +else |
| 347 | + println(" \033[31;1mFAILURE\033[0m\n") |
| 348 | + Test.print_test_errors(o_ts) |
| 349 | + throw(Test.FallbackTestSetException("Test run finished with errors")) |
| 350 | +end |
| 351 | + |
0 commit comments