@@ -46,13 +46,14 @@ describe('test versioning concurrency', () => {
46
46
47
47
beforeEach ( async ( ) => {
48
48
await fs_utils . create_fresh_path ( tmp_fs_path ) ;
49
+ nsfs . versioning = 'ENABLED' ;
49
50
} ) ;
50
51
51
52
afterEach ( async ( ) => {
52
53
await fs_utils . folder_delete ( tmp_fs_path ) ;
53
54
} ) ;
54
55
55
- it ( 'multiple puts of the same key' , async ( ) => {
56
+ it ( 'multiple puts of the same key - enabled ' , async ( ) => {
56
57
const bucket = 'bucket1' ;
57
58
const key = 'key1' ;
58
59
const failed_operations = [ ] ;
@@ -434,6 +435,61 @@ describe('test versioning concurrency', () => {
434
435
const num_of_latest_versions = ( merged_versions . filter ( version => version . is_latest === true ) ) . length ;
435
436
expect ( num_of_latest_versions ) . toBe ( initial_num_of_objects ) ;
436
437
} , test_timeout ) ;
438
+
439
+ it ( 'multiple puts of the same key - suspended' , async ( ) => {
440
+ const bucket = 'bucket-s' ;
441
+ const key = 'key-s' ;
442
+ nsfs . versioning = 'SUSPENDED' ;
443
+ const failed_operations = [ ] ;
444
+ const successful_operations = [ ] ;
445
+ const num_of_concurrency = 10 ;
446
+ for ( let i = 0 ; i < num_of_concurrency ; i ++ ) {
447
+ const random_data = Buffer . from ( String ( i ) ) ;
448
+ const body = buffer_utils . buffer_to_read_stream ( random_data ) ;
449
+ nsfs . upload_object ( { bucket : bucket , key : key , source_stream : body } , DUMMY_OBJECT_SDK )
450
+ . catch ( err => failed_operations . push ( err ) )
451
+ . then ( res => successful_operations . push ( res ) ) ;
452
+ }
453
+ await P . delay ( 2000 ) ;
454
+ expect ( successful_operations ) . toHaveLength ( num_of_concurrency ) ;
455
+ expect ( failed_operations ) . toHaveLength ( 0 ) ;
456
+ const versions = await nsfs . list_object_versions ( { bucket : bucket } , DUMMY_OBJECT_SDK ) ;
457
+ expect ( versions . objects . length ) . toBe ( 1 ) ; // save only the null version
458
+ } , test_timeout ) ;
459
+
460
+ it ( 'multiple puts of the same key - enabled and suspended' , async ( ) => {
461
+ const bucket = 'bucket-es' ;
462
+ const key = 'key-es' ;
463
+ const failed_operations1 = [ ] ;
464
+ const successful_operations1 = [ ] ;
465
+ const num_of_concurrency1 = 2 ;
466
+ for ( let i = 0 ; i < num_of_concurrency1 ; i ++ ) {
467
+ const random_data = Buffer . from ( String ( i ) ) ;
468
+ const body = buffer_utils . buffer_to_read_stream ( random_data ) ;
469
+ nsfs . upload_object ( { bucket : bucket , key : key , source_stream : body } , DUMMY_OBJECT_SDK )
470
+ . catch ( err => failed_operations1 . push ( err ) )
471
+ . then ( res => successful_operations1 . push ( res ) ) ;
472
+ }
473
+ await P . delay ( 2000 ) ;
474
+ nsfs . versioning = 'SUSPENDED' ;
475
+ const failed_operations2 = [ ] ;
476
+ const successful_operations2 = [ ] ;
477
+ const num_of_concurrency2 = 3 ;
478
+ for ( let i = 0 ; i < num_of_concurrency2 ; i ++ ) {
479
+ const random_data = Buffer . from ( String ( i ) ) ;
480
+ const body = buffer_utils . buffer_to_read_stream ( random_data ) ;
481
+ nsfs . upload_object ( { bucket : bucket , key : key , source_stream : body } , DUMMY_OBJECT_SDK )
482
+ . catch ( err => failed_operations2 . push ( err ) )
483
+ . then ( res => successful_operations2 . push ( res ) ) ;
484
+ }
485
+ await P . delay ( 2000 ) ;
486
+ expect ( successful_operations1 ) . toHaveLength ( num_of_concurrency1 ) ;
487
+ expect ( failed_operations1 ) . toHaveLength ( 0 ) ;
488
+ expect ( successful_operations2 ) . toHaveLength ( num_of_concurrency2 ) ;
489
+ expect ( failed_operations1 ) . toHaveLength ( 0 ) ;
490
+ const versions = await nsfs . list_object_versions ( { bucket : bucket } , DUMMY_OBJECT_SDK ) ;
491
+ expect ( versions . objects . length ) . toBe ( num_of_concurrency1 + 1 ) ; // num_of_concurrency1 is the number of versions uploaded when versioning was enabled + 1 null version
492
+ } , test_timeout ) ;
437
493
} ) ;
438
494
439
495
/**
0 commit comments