|
1 | 1 | /* Copyright (C) 2020 NooBaa */
|
2 |
| -/* eslint-disable no-invalid-this */ |
3 | 2 | 'use strict';
|
4 | 3 |
|
5 |
| -const mocha = require('mocha'); |
6 |
| -const config = require('../../../config'); |
7 |
| -const file_writer_hashing = require('../../tools/file_writer_hashing'); |
| 4 | +const config = require('../../../../config'); |
| 5 | +const file_writer_hashing = require('../../../tools/file_writer_hashing'); |
8 | 6 | const orig_iov_max = config.NSFS_DEFAULT_IOV_MAX;
|
9 | 7 |
|
10 | 8 | // on iov_max small tests we need to use smaller amount of parts and chunks to ensure that the test will finish
|
11 | 9 | // in a reasonable period of time because we will flush max 1/2 buffers at a time.
|
12 | 10 | const small_iov_num_parts = 20;
|
13 | 11 |
|
14 |
| - |
15 |
| -mocha.describe('FileWriter', function() { |
| 12 | +describe('FileWriter', () => { |
16 | 13 | const RUN_TIMEOUT = 10 * 60 * 1000;
|
17 | 14 |
|
18 |
| - mocha.afterEach(function() { |
| 15 | + afterEach(() => { |
19 | 16 | config.NSFS_DEFAULT_IOV_MAX = orig_iov_max;
|
20 | 17 | });
|
21 | 18 |
|
22 |
| - mocha.it('Concurrent FileWriter with hash target', async function() { |
23 |
| - const self = this; |
24 |
| - self.timeout(RUN_TIMEOUT); |
| 19 | + it('Concurrent FileWriter with hash target', async () => { |
25 | 20 | await file_writer_hashing.hash_target();
|
26 |
| - }); |
| 21 | + }, RUN_TIMEOUT); |
27 | 22 |
|
28 |
| - mocha.it('Concurrent FileWriter with file target', async function() { |
29 |
| - const self = this; |
30 |
| - self.timeout(RUN_TIMEOUT); |
| 23 | + it('Concurrent FileWriter with file target', async () => { |
31 | 24 | await file_writer_hashing.file_target();
|
32 |
| - }); |
| 25 | + }, RUN_TIMEOUT); |
33 | 26 |
|
34 |
| - mocha.it('Concurrent FileWriter with hash target - iov_max=1', async function() { |
35 |
| - const self = this; |
36 |
| - self.timeout(RUN_TIMEOUT); |
| 27 | + it('Concurrent FileWriter with hash target - iov_max=1', async () => { |
37 | 28 | await file_writer_hashing.hash_target(undefined, small_iov_num_parts, 1);
|
38 |
| - }); |
| 29 | + }, RUN_TIMEOUT); |
39 | 30 |
|
40 |
| - mocha.it('Concurrent FileWriter with file target - iov_max=1', async function() { |
41 |
| - const self = this; |
42 |
| - self.timeout(RUN_TIMEOUT); |
| 31 | + it('Concurrent FileWriter with file target - iov_max=1', async () => { |
43 | 32 | await file_writer_hashing.file_target(undefined, small_iov_num_parts, 1);
|
44 |
| - }); |
| 33 | + }, RUN_TIMEOUT); |
45 | 34 |
|
46 |
| - mocha.it('Concurrent FileWriter with hash target - iov_max=2', async function() { |
47 |
| - const self = this; |
48 |
| - self.timeout(RUN_TIMEOUT); |
| 35 | + it('Concurrent FileWriter with hash target - iov_max=2', async () => { |
49 | 36 | await file_writer_hashing.hash_target(undefined, small_iov_num_parts, 2);
|
50 |
| - }); |
| 37 | + }, RUN_TIMEOUT); |
51 | 38 |
|
52 |
| - mocha.it('Concurrent FileWriter with file target - iov_max=2', async function() { |
53 |
| - const self = this; |
54 |
| - self.timeout(RUN_TIMEOUT); |
| 39 | + it('Concurrent FileWriter with file target - iov_max=2', async () => { |
55 | 40 | await file_writer_hashing.file_target(undefined, small_iov_num_parts, 2);
|
56 |
| - }); |
| 41 | + }, RUN_TIMEOUT); |
57 | 42 |
|
58 |
| - mocha.it('Concurrent FileWriter with file target - produce num_chunks > 1024 && total_chunks_size < config.NSFS_BUF_SIZE_L', async function() { |
59 |
| - const self = this; |
60 |
| - self.timeout(RUN_TIMEOUT); |
| 43 | + it('Concurrent FileWriter with file target - produce num_chunks > 1024 && total_chunks_size < config.NSFS_BUF_SIZE_L', async () => { |
61 | 44 | // The goal of this test is to produce num_chunks > 1024 && total_chunks_size < config.NSFS_BUF_SIZE_L
|
62 | 45 | // so we will flush buffers because of reaching max num of buffers and not because we reached the max NSFS buf size
|
63 | 46 | // chunk size = 100, num_chunks = (10 * 1024 * 1024)/100 < 104587, 104587 = num_chunks > 1024
|
64 | 47 | // chunk size = 100, total_chunks_size after having 1024 chunks is = 100 * 1024 < config.NSFS_BUF_SIZE_L
|
65 | 48 | const chunk_size = 100;
|
66 | 49 | const parts_s = 50;
|
67 | 50 | await file_writer_hashing.file_target(chunk_size, parts_s);
|
68 |
| - }); |
| 51 | + }, RUN_TIMEOUT); |
69 | 52 | });
|
0 commit comments