Skip to content

Commit be71e84

Browse files
committed
[CLN] A bit of cleaning
1 parent 225a78f commit be71e84

File tree

2 files changed

+5
-12
lines changed

2 files changed

+5
-12
lines changed

gempy_engine/modules/activator/activator_interface.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -17,18 +17,6 @@ def activate_formation_block(exported_fields: ExportedFields, ids: np.ndarray,
1717
if LEGACY := False and not sigmoid_slope_negative: # * Here we branch to the experimental activation function with hard sigmoid
1818
sigm = activate_formation_block_from_args(Z_x, ids, scalar_value_at_sp, sigmoid_slope)
1919
else:
20-
# from .torch_activation import activate_formation_block_from_args_hard_sigmoid
21-
# sigm = activate_formation_block_from_args_hard_sigmoid(Z_x, ids, scalar_value_at_sp)
22-
23-
# assume scalar_value_at_sp is shape (K-1,)
24-
# bt.t.array
25-
# edges = bt.t.concatenate([
26-
# bt.t.array([0.], dtype=BackendTensor.dtype_obj),
27-
# scalar_value_at_sp,
28-
# bt.t.array([float('inf')], dtype=BackendTensor.dtype_obj)
29-
# ]) # now length K+1
30-
# ids = torch.arange(K, dtype=scalar_value_at_sp.dtype, device=scalar_value_at_sp.device)
31-
3220
sigm = soft_segment_unbounded(
3321
Z=Z_x,
3422
edges=scalar_value_at_sp,

gempy_engine/modules/activator/torch_activation.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import warnings
2+
13
import torch
24
from ...core.backend_tensor import BackendTensor as bt, BackendTensor
35

@@ -14,6 +16,9 @@
1416

1517

1618
def activate_formation_block_from_args_hard_sigmoid(Z_x, ids, scalar_value_at_sp):
19+
20+
warnings.warn(DeprecationWarning("This function is deprecated. Use activate_formation_block instead."))
21+
1722
element_0 = bt.t.array([0], dtype=BackendTensor.dtype_obj)
1823

1924
min_Z_x = BackendTensor.t.min(Z_x, axis=0).reshape(-1) # ? Is this as good as it gets?

0 commit comments

Comments
 (0)