@@ -20,7 +20,9 @@ def create_upload_dict(df:pandas.core.frame.DataFrame, lb_client:Client, base_cl
20
20
divider : Optional (str) - String delimiter for all name keys generated
21
21
verbose : Optional (bool) - If True, prints information about code execution
22
22
Returns:
23
- Two values - a success value, and a dictionary where {key=global_key : value = {"row_data", "global_key", "external_id", "metadata_fields"}}
23
+ Two values:
24
+ - global_key_to_upload_dict - Dictionary where {key=global_key : value=data row dictionary in upload format}
25
+ - errors - List of dictionaries containing conversion error information; see connector.create_data_rows() for more information
24
26
"""
25
27
if verbose :
26
28
print (f'Creating upload list - { len (df )} rows in Pandas DataFrame' )
@@ -72,27 +74,29 @@ def create_data_rows(lb_client:Client, base_client:baseClient, row:pandas.core.s
72
74
local_files : Optional (bool) - If True, will create urls for local files; if False, uploads `row_data_col` as urls
73
75
divider : Optional (str) - String delimiter for all name keys generated
74
76
Returns:
75
- Two items - the global_key, and a dictionary with "row_data", "global_key", "external_id" and "metadata_fields" keys
77
+ A dictionary with "error" and "data_row" keys:
78
+ - "error" - If there's value in the "error" key, the script will scip it on upload and return the error at the end
79
+ - "data_row" - Dictionary with "global_key" "external_id" "row_data" and "metadata_fields" keys in the proper format to-be-uploaded
76
80
"""
81
+ return_value = {"error" : None , "data_row" : {}}
77
82
try :
78
- row_data = lb_client .upload_file (str (row [row_data_col ])) if local_files else str (row [row_data_col ])
83
+ return_value ["result" ]["row_data" ] = lb_client .upload_file (str (row [row_data_col ])) if local_files else str (row [row_data_col ])
84
+ return_value ["result" ]["global_key" ] = str (row [global_key_col ])
85
+ return_value ["result" ]["external_id" ] = str (row [external_id_col ])
79
86
metadata_fields = [{"schema_id" : metadata_name_key_to_schema ['lb_integration_source' ], "value" : "Pandas" }]
80
87
if metadata_index :
81
88
for metadata_field_name in metadata_index .keys ():
82
89
input_metadata = base_client .process_metadata_value (
83
- metadata_value = row [metadata_field_name ],
84
- metadata_type = metadata_index [metadata_field_name ],
85
- parent_name = metadata_field_name ,
86
- metadata_name_key_to_schema = metadata_name_key_to_schema ,
87
- divider = divider
90
+ metadata_value = row [metadata_field_name ], metadata_type = metadata_index [metadata_field_name ],
91
+ parent_name = metadata_field_name , metadata_name_key_to_schema = metadata_name_key_to_schema , divider = divider
88
92
)
89
93
if input_metadata :
90
94
metadata_fields .append ({"schema_id" : metadata_name_key_to_schema [metadata_field_name ], "value" : input_metadata })
91
95
else :
92
96
continue
93
- return_value = { "error" : None , " result" : { "row_data" : row_data , "global_key" : str ( row [ global_key_col ]), "external_id" : str ( row [ external_id_col ]), "metadata_fields" : metadata_fields }}
97
+ return_value [ " result"][ "metadata_fields" ] = metadata_fields
94
98
except Exception as e :
95
- return_value = { "error" : e , "result" : None }
99
+ return_value [ "error" ] = e
96
100
return return_value
97
101
98
102
def get_columns_function (df ):
0 commit comments