38
38
39
39
from servicex .models import ResultFile , TransformStatus
40
40
41
+ # Maximum five simultaneous streams per individual file download
41
42
_transferconfig = TransferConfig (max_concurrency = 5 )
42
- _sem = asyncio .Semaphore (10 )
43
+ # Maximum ten files simultaneously being downloaded (configurable with init_s3_config)
44
+ _file_transfer_sem = asyncio .Semaphore (10 )
45
+ # Maximum five buckets being queried at once
43
46
_bucket_list_sem = asyncio .Semaphore (5 )
44
47
45
48
46
49
def init_s3_config (concurrency : int = 10 ):
47
50
"Update the number of concurrent connections"
48
- global _sem
49
- _sem = asyncio .Semaphore (concurrency )
51
+ global _file_transfer_sem
52
+ _file_transfer_sem = asyncio .Semaphore (concurrency )
50
53
51
54
52
55
def _sanitize_filename (fname : str ):
@@ -128,7 +131,7 @@ async def download_file(
128
131
if expected_size is not None :
129
132
remotesize = expected_size
130
133
else :
131
- async with _sem :
134
+ async with _file_transfer_sem :
132
135
info = await s3 .head_object (Bucket = self .bucket , Key = object_name )
133
136
remotesize = info ["ContentLength" ]
134
137
if path .exists ():
@@ -137,7 +140,7 @@ async def download_file(
137
140
localsize = path .stat ().st_size
138
141
if localsize == remotesize :
139
142
return path .resolve ()
140
- async with _sem :
143
+ async with _file_transfer_sem :
141
144
await s3 .download_file (
142
145
Bucket = self .bucket ,
143
146
Key = object_name ,
0 commit comments