Skip to content

Commit e0b5d8d

Browse files
committed
[DOC] Getting the examples running
1 parent 7c77dff commit e0b5d8d

File tree

4 files changed

+118
-16
lines changed

4 files changed

+118
-16
lines changed

docs/dev_logs/2025_May.md

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,4 +44,11 @@
4444

4545
### Likelihood functions
4646
- ? Should I start to create a catalog of likelihood functions that goes with gempy?
47-
- This would be useful to be able to visualize whatever the likelihood function represents independent of the probabilistic model.
47+
- This would be useful to be able to visualize whatever the likelihood function represents independent of the probabilistic model.
48+
-
49+
### What would I want to have to do StonePark example as flowless as possible?
50+
- A way to save the model already with the correct **nugget effects** instead of having to "load nuggets"
51+
- Easy pykeops setting
52+
- Likelihood function plug and play
53+
- Some sort of pre-analysis if the model looks good. Maybe some metrics about how long it takes to run the model.
54+
- Some sort of easy set-up for having a reduced number of priors

examples/examples/_aux_func_gravity_inversion.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,13 @@
11
import numpy as np
22
from sklearn.linear_model import LinearRegression
33

4+
import numpy as np
5+
6+
import gempy as gp
7+
import xarray as xr
8+
from vector_geology.model_building_functions import optimize_nuggets_for_group
9+
10+
411
def calculate_scale_shift(a: np.ndarray, b: np.ndarray) -> tuple:
512
# Reshape arrays for sklearn
613
a_reshaped = a.reshape(-1, 1)
@@ -22,17 +29,10 @@ def gaussian_kernel(locations, length_scale, variance):
2229
locations = torch.tensor(locations.values)
2330
distance_squared = torch.cdist(locations, locations, p=2).pow(2)
2431
# Compute the covariance matrix using the Gaussian kernel
25-
covariance_matrix = variance * torch.exp(-0.5 * distance_squared / length_scale**2)
32+
covariance_matrix = variance * torch.exp(-0.5 * distance_squared / length_scale ** 2)
2633
return covariance_matrix
2734

2835

29-
import numpy as np
30-
31-
import gempy as gp
32-
import xarray as xr
33-
from vector_geology.model_building_functions import optimize_nuggets_for_group
34-
35-
3636
def initialize_geo_model(structural_elements: list[gp.data.StructuralElement], extent: list[float],
3737
topography: xr.Dataset, load_nuggets: bool = False
3838
) -> gp.data.GeoModel:
Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
import os
2+
import xarray as xr
3+
import numpy as np
4+
import gempy as gp
5+
import subsurface
6+
7+
8+
def process_file(filename, global_extent, color_generator: gp.data.ColorsGenerator):
9+
dataset: xr.Dataset = xr.open_dataset(filename)
10+
file_extent = _calculate_extent(dataset)
11+
global_extent = _update_global_extent(global_extent, file_extent)
12+
13+
base, ext = os.path.splitext(filename)
14+
base = os.path.basename(base)
15+
structural_element = _extract_surface_points_and_orientations(dataset, base, color_generator)
16+
17+
return structural_element, global_extent
18+
19+
20+
# Function to calculate the extent (ROI) for a single dataset
21+
def _calculate_extent(dataset):
22+
x_coord, y_coord, z_coord = dataset.vertex[:, 0], dataset.vertex[:, 1], dataset.vertex[:, 2]
23+
return [x_coord.min().values, x_coord.max().values, y_coord.min().values, y_coord.max().values, z_coord.min().values, z_coord.max().values]
24+
25+
26+
# Function to extract surface points and orientations from the dataset
27+
28+
29+
def _extract_surface_points_and_orientations(dataset, name, color_generator) -> gp.data.StructuralElement:
30+
name = name.replace("Stonepark_", "")
31+
32+
# Extract surface points and orientations
33+
unstruct = subsurface.UnstructuredData(dataset)
34+
ts = subsurface.TriSurf(mesh=unstruct)
35+
triangulated_mesh = subsurface.visualization.to_pyvista_mesh(ts)
36+
# print(name)
37+
# subsurface.visualization.pv_plot([triangulated_mesh])
38+
39+
# Decimate the mesh and compute normals
40+
decimated_mesh = triangulated_mesh.decimate_pro(0.95)
41+
normals = decimated_mesh.compute_normals(point_normals=False, cell_normals=True, consistent_normals=True)
42+
normals_array = np.array(normals.cell_data["Normals"])
43+
44+
# Filter points
45+
vertex_sp = normals.cell_centers().points
46+
vertex_grads = normals_array
47+
positive_z = vertex_grads[:, 2] > 0
48+
largest_component_z = np.all(vertex_grads[:, 2:3] >= np.abs(vertex_grads[:, :2]), axis=1)
49+
filter_mask = np.logical_and(positive_z, largest_component_z)
50+
51+
surface_points_xyz = vertex_sp[filter_mask]
52+
orientations_gxyz = vertex_grads[filter_mask]
53+
nuggets = np.ones(len(surface_points_xyz)) * 0.000001
54+
55+
# Create SurfacePointsTable and OrientationsTable
56+
every = 10
57+
surface_points = gp.data.SurfacePointsTable.from_arrays(
58+
x=surface_points_xyz[::every, 0],
59+
y=surface_points_xyz[::every, 1],
60+
z=surface_points_xyz[::every, 2],
61+
names=name,
62+
nugget=nuggets[::every]
63+
)
64+
65+
every = 10
66+
orientations = gp.data.OrientationsTable.from_arrays(
67+
x=surface_points_xyz[::every, 0],
68+
y=surface_points_xyz[::every, 1],
69+
z=surface_points_xyz[::every, 2],
70+
G_x=orientations_gxyz[::every, 0],
71+
G_y=orientations_gxyz[::every, 1],
72+
G_z=orientations_gxyz[::every, 2],
73+
nugget=4,
74+
names=name
75+
)
76+
77+
structural_element = gp.data.StructuralElement(
78+
name=name,
79+
surface_points=surface_points,
80+
orientations=orientations,
81+
color=next(color_generator)
82+
)
83+
84+
return structural_element
85+
86+
87+
# Function to update the global extent based on each file's extent
88+
def _update_global_extent(global_extent, file_extent):
89+
if not global_extent:
90+
return file_extent
91+
else:
92+
return [min(global_extent[0], file_extent[0]), max(global_extent[1], file_extent[1]),
93+
min(global_extent[2], file_extent[2]), max(global_extent[3], file_extent[3]),
94+
min(global_extent[4], file_extent[4]), max(global_extent[5], file_extent[5])]

examples/examples/gravity_inversion.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
21
"""
32
Probabilistic Inversion Example: Geological Model
43
--------------------------------------------------
@@ -22,12 +21,12 @@
2221
import gempy as gp
2322
import gempy_engine
2423
import gempy_viewer as gpv
24+
25+
from examples.examples._aux_func_gravity_inversion import calculate_scale_shift, gaussian_kernel, initialize_geo_model, setup_geophysics
26+
from examples.examples._aux_func_gravity_inversion_II import process_file
27+
2528
from gempy_engine.core.backend_tensor import BackendTensor
2629
from gempy_probability.modules.plot.plot_posterior import default_red, default_blue
27-
from vector_geology.bayesian_helpers import calculate_scale_shift, gaussian_kernel
28-
from vector_geology.model_1_builder import initialize_geo_model, setup_geophysics
29-
from vector_geology.omf_to_gempy import process_file
30-
3130

3231
# %%
3332
# Config
@@ -65,7 +64,7 @@
6564
structural_elements=structural_elements,
6665
extent=(np.array(global_extent)),
6766
topography=(xr.open_dataset(os.path.join(path, "Topography.nc"))),
68-
load_nuggets=True
67+
load_nuggets=False
6968
)
7069

7170
# %%
@@ -138,6 +137,7 @@
138137
densities=prior_tensor,
139138
)
140139

140+
141141
# %%
142142
# Define the Pyro probabilistic model for inversion
143143
def model(y_obs_list, interpolation_input):
@@ -168,6 +168,7 @@ def model(y_obs_list, interpolation_input):
168168
obs=y_obs_list
169169
)
170170

171+
171172
# %%
172173
# Prepare observed data for Pyro model and optimize mesh settings
173174
y_obs_list = torch.tensor(adapted_observed_grav.values).view(1, 17)
@@ -216,7 +217,7 @@ def model(y_obs_list, interpolation_input):
216217
)
217218
plt.show()
218219

219-
#%%
220+
# %%
220221
az.plot_density(
221222
data=[data.posterior_predictive, data.prior_predictive],
222223
shade=.9,

0 commit comments

Comments
 (0)