@@ -29,21 +29,11 @@ def __init__(
29
29
# def create_table_from_dataset():
30
30
# return table
31
31
32
- def create_data_rows_from_table (
33
- self ,
34
- table :pd .core .frame .DataFrame ,
35
- lb_dataset :labelboxDataset ,
36
- row_data_col :str ,
37
- local_files :bool = False ,
38
- global_key_col :str = "" ,
39
- external_id_col :str = "" ,
40
- metadata_index :dict = {},
41
- skip_duplicates :bool = False ,
42
- divider = "___" ,
43
- verbose :bool = False ):
32
+ def create_data_rows_from_table (self , df :pd .core .frame .DataFrame , lb_dataset :labelboxDataset , row_data_col :str , local_files :bool = False ,
33
+ global_key_col = None , external_id_col = None , metadata_index :dict = {}, skip_duplicates :bool = False , divider = "___" , verbose :bool = False ):
44
34
""" Creates Labelbox data rows given a Pandas table and a Labelbox Dataset
45
35
Args:
46
- table : Required (pandas.core.frame.DataFrame) - Pandas dataframe to-be-uploaded
36
+ df : Required (pandas.core.frame.DataFrame) - Pandas dataframe to-be-uploaded
47
37
lb_dataset : Required (labelbox.schema.dataset.Dataset) - Labelbox dataset to add data rows to
48
38
row_data_col : Required (str) - Column name where the data row row data URL is located
49
39
local_files : Required (bool) - If True, will create urls for local files / If False, treats the values in `row_data_col` as urls
@@ -56,35 +46,24 @@ def create_data_rows_from_table(
56
46
Returns:
57
47
List of errors from data row upload - if successful, is an empty list
58
48
"""
59
- table = self .base_client .sync_metadata_fields (
60
- table = table ,
61
- get_columns_function = connector .get_columns_function ,
62
- add_column_function = connector .add_column_function ,
63
- get_unique_values_function = connector .get_unique_values_function ,
64
- metadata_index = metadata_index ,
65
- verbose = verbose
49
+ df = self .base_client .sync_metadata_fields (
50
+ table = df , get_columns_function = connector .get_columns_function , add_column_function = connector .add_column_function ,
51
+ get_unique_values_function = connector .get_unique_values_function , metadata_index = metadata_index , verbose = verbose
66
52
)
67
53
68
- if type (table ) == bool :
54
+ if type (df ) == bool :
69
55
return None
70
56
71
57
global_key_to_upload_dict = connector .create_upload_dict (
72
- table = table ,
73
- local_files = local_files ,
74
- lb_client = self .lb_client ,
75
- row = row ,
76
- row_data_col = row_data_col ,
77
- global_key_col = global_key_col ,
78
- external_id_col = external_id_col ,
79
- metadata_index = metadata_index ,
80
- divider = divider
58
+ df = df , local_files = local_files , lb_client = self .lb_client ,
59
+ row = row , row_data_col = row_data_col , global_key_col = global_key_col ,
60
+ external_id_col = external_id_col , metadata_index = metadata_index , divider = divider
81
61
)
82
62
83
63
upload_results = self .base_client .batch_create_data_rows (
84
- dataset = lb_dataset ,
85
- global_key_to_upload_dict = global_key_to_upload_dict ,
86
- skip_duplicates = skip_duplicates ,
87
- divider = divider )
64
+ dataset = lb_dataset , global_key_to_upload_dict = global_key_to_upload_dict ,
65
+ skip_duplicates = skip_duplicates , divider = divider
66
+ )
88
67
89
68
return upload_results
90
69
0 commit comments