|
70 | 70 |
|
71 | 71 | # Training data for baseline model
|
72 | 72 | baseline_train_x = tf.constant(X_train)
|
73 |
| -baseline_train_y = tf.constant(y_train) |
| 73 | +baseline_train_y = tf.constant(y_train, dtype=tf.int8) |
74 | 74 |
|
75 | 75 | # Packaged for Cerebros (multimodal, takes inputs as a list)
|
76 | 76 | training_x = [baseline_train_x]
|
@@ -142,7 +142,10 @@ def from_config(cls, config):
|
142 | 142 | gpt_baseline_model.compile(
|
143 | 143 | optimizer=Adam(learning_rate=1e-4), # Small LR since we're fine-tuning GPT
|
144 | 144 | loss='binary_crossentropy',
|
145 |
| - metrics=['accuracy', tf.keras.metrics.AUC(name='auc')] |
| 145 | + # metrics=['accuracy', tf.keras.metrics.AUC(name='auc')] |
| 146 | + metrics=[tf.keras.metrics.BinaryAccuracy(), |
| 147 | + tf.keras.metrics.Precision(), |
| 148 | + tf.keras.metrics.Recall()] |
146 | 149 | )
|
147 | 150 |
|
148 | 151 | gpt_t0 = time.time()
|
@@ -303,9 +306,9 @@ def from_config(cls, config):
|
303 | 306 | num_lateral_connection_tries_per_unit=num_lateral_connection_tries_per_unit,
|
304 | 307 | learning_rate=learning_rate,
|
305 | 308 | loss=tf.keras.losses.CategoricalHinge(),
|
306 |
| - metrics=[tf.keras.metrics.Accuracy(), |
307 |
| - tf.keras.metrics.Precision(), |
308 |
| - tf.keras.metrics.Recall()], |
| 309 | + metrics=[tf.keras.metrics.BinaryAccuracy(), |
| 310 | + tf.keras.metrics.Precision(), |
| 311 | + tf.keras.metrics.Recall()], |
309 | 312 | epochs=epochs,
|
310 | 313 | project_name=f"{PROJECT_NAME}_meta_{meta_trial_number}",
|
311 | 314 | model_graphs='model_graphs',
|
|
0 commit comments