49
49
50
50
51
51
def get_execution_engine_type (
52
- data_frame : Union [DataFrame , pd .DataFrame ]
52
+ data_frame : Union [DataFrame , pd .DataFrame ]
53
53
) -> ExecutionEngine :
54
54
"""
55
55
Determines the execution engine type for a given DataFrame.
@@ -89,7 +89,7 @@ def get_metastore_id(feature_store_id: str):
89
89
90
90
91
91
def validate_delta_format_parameters (
92
- timestamp : datetime = None , version_number : int = None , is_restore : bool = False
92
+ timestamp : datetime = None , version_number : int = None , is_restore : bool = False
93
93
):
94
94
"""
95
95
Validate the user input provided as part of preview, restore APIs for ingested data, Ingested data is
@@ -123,9 +123,9 @@ def validate_delta_format_parameters(
123
123
124
124
125
125
def show_ingestion_summary (
126
- entity_id : str ,
127
- entity_type : EntityType = EntityType .FEATURE_GROUP ,
128
- error_details : str = None ,
126
+ entity_id : str ,
127
+ entity_type : EntityType = EntityType .FEATURE_GROUP ,
128
+ error_details : str = None ,
129
129
):
130
130
"""
131
131
Displays a ingestion summary table with the given entity type and error details.
@@ -165,7 +165,7 @@ def show_validation_summary(ingestion_status: str, validation_output, expectatio
165
165
statistics = validation_output ["statistics" ]
166
166
167
167
table_headers = (
168
- ["expectation_type" ] + list (statistics .keys ()) + ["ingestion_status" ]
168
+ ["expectation_type" ] + list (statistics .keys ()) + ["ingestion_status" ]
169
169
)
170
170
171
171
table_values = [expectation_type ] + list (statistics .values ()) + [ingestion_status ]
@@ -209,9 +209,9 @@ def show_validation_summary(ingestion_status: str, validation_output, expectatio
209
209
210
210
211
211
def get_features (
212
- output_columns : List [dict ],
213
- parent_id : str ,
214
- entity_type : EntityType = EntityType .FEATURE_GROUP ,
212
+ output_columns : List [dict ],
213
+ parent_id : str ,
214
+ entity_type : EntityType = EntityType .FEATURE_GROUP ,
215
215
) -> List [Feature ]:
216
216
"""
217
217
Returns a list of features, given a list of output_columns and a feature_group_id.
@@ -268,7 +268,7 @@ def get_schema_from_spark_df(df: DataFrame):
268
268
269
269
270
270
def get_schema_from_df (
271
- data_frame : Union [DataFrame , pd .DataFrame ], feature_store_id : str
271
+ data_frame : Union [DataFrame , pd .DataFrame ], feature_store_id : str
272
272
) -> List [dict ]:
273
273
"""
274
274
Given a DataFrame, returns a list of dictionaries that describe its schema.
@@ -282,7 +282,7 @@ def get_schema_from_df(
282
282
283
283
284
284
def get_input_features_from_df (
285
- data_frame : Union [DataFrame , pd .DataFrame ], feature_store_id : str
285
+ data_frame : Union [DataFrame , pd .DataFrame ], feature_store_id : str
286
286
) -> List [FeatureDetail ]:
287
287
"""
288
288
Given a DataFrame, returns a list of FeatureDetail objects that represent its input features.
@@ -299,7 +299,7 @@ def get_input_features_from_df(
299
299
300
300
301
301
def convert_expectation_suite_to_expectation (
302
- expectation_suite : ExpectationSuite , expectation_type : ExpectationType
302
+ expectation_suite : ExpectationSuite , expectation_type : ExpectationType
303
303
):
304
304
"""
305
305
Convert an ExpectationSuite object to an Expectation object with detailed rule information.
@@ -358,7 +358,7 @@ def largest_matching_subset_of_primary_keys(left_feature_group, right_feature_gr
358
358
359
359
360
360
def convert_pandas_datatype_with_schema (
361
- raw_feature_details : List [dict ], input_df : pd .DataFrame
361
+ raw_feature_details : List [dict ], input_df : pd .DataFrame
362
362
) -> pd .DataFrame :
363
363
feature_detail_map = {}
364
364
columns_to_remove = []
@@ -383,7 +383,7 @@ def convert_pandas_datatype_with_schema(
383
383
384
384
385
385
def convert_spark_dataframe_with_schema (
386
- raw_feature_details : List [dict ], input_df : DataFrame
386
+ raw_feature_details : List [dict ], input_df : DataFrame
387
387
) -> DataFrame :
388
388
feature_detail_map = {}
389
389
columns_to_remove = []
@@ -402,4 +402,4 @@ def convert_spark_dataframe_with_schema(
402
402
def validate_input_feature_details (input_feature_details , data_frame ):
403
403
if isinstance (data_frame , pd .DataFrame ):
404
404
return convert_pandas_datatype_with_schema (input_feature_details , data_frame )
405
- return convert_spark_dataframe_with_schema (input_feature_details , data_frame )
405
+ return convert_spark_dataframe_with_schema (input_feature_details , data_frame )
0 commit comments