@@ -20,7 +20,7 @@ def create_upload_dict(df:pandas.core.frame.DataFrame, lb_client:Client, base_cl
20
20
divider : Optional (str) - String delimiter for all name keys generated
21
21
verbose : Optional (bool) - If True, prints information about code execution
22
22
Returns:
23
- Two items - the global_key , and a dictionary with "row_data", "global_key", "external_id" and "metadata_fields" keys
23
+ Two values - a success value , and a dictinoary where {key=global_key : value = { "row_data", "global_key", "external_id", "metadata_fields"}}
24
24
"""
25
25
if verbose :
26
26
print (f'Creating upload list - { len (df )} rows in Pandas DataFrame' )
@@ -29,28 +29,35 @@ def create_upload_dict(df:pandas.core.frame.DataFrame, lb_client:Client, base_cl
29
29
metadata_schema_to_name_key = base_client .get_metadata_schema_to_name_key (lb_mdo = False , divider = divider , invert = False )
30
30
metadata_name_key_to_schema = base_client .get_metadata_schema_to_name_key (lb_mdo = False , divider = divider , invert = True )
31
31
global_key_to_upload_dict = {}
32
- with ThreadPoolExecutor () as exc :
33
- futures = []
34
- x = 0
35
- dupe_print = 0
36
- if verbose :
37
- print (f'Submitting data rows...' )
38
- for index , row in df .iterrows ():
39
- futures .append (exc .submit (create_data_rows , lb_client , base_client , row , metadata_name_key_to_schema , metadata_schema_to_name_key , row_data_col , global_key_col , external_id_col , metadata_index , local_files , divider ))
40
- if verbose :
41
- print (f'Processing data rows...' )
42
- for f in as_completed (futures ):
43
- res = f .result ()
44
- global_key_to_upload_dict [str (res ["global_key" ])] = res
32
+ try :
33
+ with ThreadPoolExecutor () as exc :
34
+ futures = []
35
+ x = 0
36
+ dupe_print = 0
45
37
if verbose :
46
- x += 1
47
- percent_complete = math .ceil ((x / len (df )* 100 ))
48
- if percent_complete % 1 == 0 and (percent_complete != dupe_print ):
49
- print (f'{ str (percent_complete )} % complete' )
50
- dupe_print = percent_complete
51
- if verbose :
52
- print (f'Generated upload list - { len (global_key_to_upload_dict )} data rows to upload' )
53
- return global_key_to_upload_dict
38
+ print (f'Submitting data rows...' )
39
+ for index , row in df .iterrows ():
40
+ futures .append (exc .submit (create_data_rows , lb_client , base_client , row , metadata_name_key_to_schema , metadata_schema_to_name_key , row_data_col , global_key_col , external_id_col , metadata_index , local_files , divider ))
41
+ if verbose :
42
+ print (f'Processing data rows...' )
43
+ for f in as_completed (futures ):
44
+ res = f .result ()
45
+ global_key_to_upload_dict [str (res ["global_key" ])] = res
46
+ if verbose :
47
+ x += 1
48
+ percent_complete = math .ceil ((x / len (df )* 100 ))
49
+ if percent_complete % 1 == 0 and (percent_complete != dupe_print ):
50
+ print (f'{ str (percent_complete )} % complete' )
51
+ dupe_print = percent_complete
52
+ if verbose :
53
+ print (f'Generated upload list - { len (global_key_to_upload_dict )} data rows to upload' )
54
+ return True , global_key_to_upload_dict
55
+ except Exception as e :
56
+ print (e )
57
+ if res :
58
+ return False , res
59
+ else :
60
+ return False , False
54
61
55
62
def create_data_rows (lb_client :Client , base_client :baseClient , row :pandas .core .series .Series ,
56
63
metadata_name_key_to_schema :dict , metadata_schema_to_name_key :dict , row_data_col :str ,
0 commit comments