Skip to content

Commit

Permalink
Update with spremberg
Browse files Browse the repository at this point in the history
  • Loading branch information
Leguark committed Jun 13, 2024
1 parent 75f4f09 commit 11140e7
Show file tree
Hide file tree
Showing 124 changed files with 4,825 additions and 1,457 deletions.
2 changes: 2 additions & 0 deletions .idea/.gitignore

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,25 @@
},
"outputs": [],
"source": [
"import time\nimport os\nimport numpy as np\nimport xarray as xr\nimport pandas as pd\nimport torch\nimport pyro\nimport pyro.distributions as dist\nimport gempy as gp\nimport gempy_viewer as gpv\nfrom matplotlib import pyplot as plt\nfrom dotenv import dotenv_values\nfrom pyro.infer import MCMC, NUTS, Predictive\nimport arviz as az\n\nfrom gempy_probability.plot_posterior import default_red, default_blue\nfrom vector_geology.bayesian_helpers import calculate_scale_shift, gaussian_kernel\nfrom vector_geology.model_1_builder import initialize_geo_model, setup_geophysics\nfrom vector_geology.omf_to_gempy import process_file\nfrom vector_geology.utils import extend_box\nimport gempy_engine\nfrom gempy_engine.core.backend_tensor import BackendTensor"
"import os\nimport time\n\nimport arviz as az\nimport numpy as np\nimport pyro\nimport pyro.distributions as dist\nimport torch\nimport xarray as xr\nfrom dotenv import dotenv_values\nfrom matplotlib import pyplot as plt\nfrom pyro.infer import MCMC, NUTS, Predictive\n\nimport gempy as gp\nimport gempy_engine\nimport gempy_viewer as gpv\nfrom gempy_engine.core.backend_tensor import BackendTensor\nfrom gempy_probability.plot_posterior import default_red, default_blue\nfrom vector_geology.bayesian_helpers import calculate_scale_shift, gaussian_kernel\nfrom vector_geology.model_1_builder import initialize_geo_model, setup_geophysics\nfrom vector_geology.omf_to_gempy import process_file"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Config\n\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"seed = 123456\ntorch.manual_seed(seed)\npyro.set_rng_seed(seed)"
]
},
{
Expand Down Expand Up @@ -296,7 +314,7 @@
},
"outputs": [],
"source": [
"y_obs_list = torch.tensor(adapted_observed_grav.values).view(1, 17)\ninterpolation_options.mesh_extraction = False\ninterpolation_options.number_octree_levels = 1\ngeo_model.grid.set_inactive(\"topography\")\ngeo_model.grid.set_inactive(\"regular\")"
"y_obs_list = torch.tensor(adapted_observed_grav.values).view(1, 17)\ninterpolation_options.mesh_extraction = False\ninterpolation_options.number_octree_levels = 1\ngeo_model.grid.active_grids ^= gp.data.Grid.GridTypes.TOPOGRAPHY\ngeo_model.grid.active_grids ^= gp.data.Grid.GridTypes.DENSE"
]
},
{
Expand All @@ -314,7 +332,7 @@
},
"outputs": [],
"source": [
"if True:\n prior = Predictive(model, num_samples=50)(y_obs_list, interpolation_input=geo_model.interpolation_input)\n data = az.from_pyro(prior=prior)\n az.plot_trace(data.prior)\n plt.show()"
"raise NotImplementedError(\"From this point we need to optimize the code again.\")\nif True:\n prior = Predictive(model, num_samples=50)(y_obs_list, interpolation_input=geo_model.interpolation_input_copy)\n data = az.from_pyro(prior=prior)\n az.plot_trace(data.prior)\n plt.show()"
]
},
{
Expand All @@ -332,7 +350,7 @@
},
"outputs": [],
"source": [
"pyro.primitives.enable_validation(is_validate=True)\nnuts_kernel = NUTS(model)\nmcmc = MCMC(nuts_kernel, num_samples=1000, warmup_steps=300)\nmcmc.run(y_obs_list, interpolation_input=geo_model.interpolation_input)"
"pyro.primitives.enable_validation(is_validate=True)\nnuts_kernel = NUTS(model)\nmcmc = MCMC(nuts_kernel, num_samples=1000, warmup_steps=300)\nmcmc.run(y_obs_list, interpolation_input=geo_model.interpolation_input_copy)"
]
},
{
Expand All @@ -350,7 +368,7 @@
},
"outputs": [],
"source": [
"posterior_samples = mcmc.get_samples(50)\nposterior_predictive = Predictive(model, posterior_samples)(y_obs_list, interpolation_input=geo_model.interpolation_input)\ndata = az.from_pyro(\n posterior=mcmc,\n prior=prior,\n posterior_predictive=posterior_predictive\n)\naz.plot_trace(data)\nplt.show()"
"posterior_samples = mcmc.get_samples(50)\nposterior_predictive = Predictive(model, posterior_samples)(y_obs_list, interpolation_input=geo_model.interpolation_input_copy)\ndata = az.from_pyro(\n posterior=mcmc,\n prior=prior,\n posterior_predictive=posterior_predictive\n)\naz.plot_trace(data)\nplt.show()"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,12 @@
import pyvista
import subsurface
from subsurface import TriSurf
from subsurface.visualization import to_pyvista_mesh, pv_plot
from subsurface.writer import base_structs_to_binary_file
from dotenv import dotenv_values

from subsurface.modules.visualization import to_pyvista_mesh, pv_plot
from subsurface.modules.writer import base_structs_to_binary_file


# %%
# Load OMF Project
# ----------------
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,222 @@
"""
Construct Spremberg: Importing borehole data
--------------------------------------------
This example demonstrates how to construct a 3D geological model of the Model 1 deposit using GemPy.
It leverages custom APIs to streamline the modeling process.
"""
# %% [markdown]
# Import the necessary libraries for geological modeling and visualization.
# sphinx_gallery_thumbnail_number = -1
import os
import pandas as pd
import pyvista

import gempy as gp
import gempy_viewer as gpv
from subsurface.core.geological_formats.boreholes.boreholes import BoreholeSet, MergeOptions
from subsurface.core.geological_formats.boreholes.collars import Collars
from subsurface.core.geological_formats.boreholes.survey import Survey
from subsurface.core.reader_helpers.readers_data import GenericReaderFilesHelper
from subsurface.modules.reader.wells.read_borehole_interface import read_lith, read_survey, read_collar
from subsurface.modules.visualization import to_pyvista_line, to_pyvista_points, init_plotter

# %% [markdown]
# Initialize the reader for the lithological data. Specify the file path and column mappings.
import dotenv
dotenv.load_dotenv()
reader: GenericReaderFilesHelper = GenericReaderFilesHelper(
file_or_buffer=os.getenv("PATH_TO_SPREMBERG_STRATIGRAPHY"),
columns_map={
'hole_id' : 'id',
'depth_from': 'top',
'depth_to' : 'base',
'lit_code' : 'component lith'
}
)

# Read the lithological data into a DataFrame.
lith: pd.DataFrame = read_lith(reader)

# %% [markdown]
# Initialize the reader for the survey data. Specify the file path and column mappings.
reader: GenericReaderFilesHelper = GenericReaderFilesHelper(
file_or_buffer=os.getenv("PATH_TO_SPREMBERG_SURVEY"),
columns_map={
'depth' : 'md',
'dip' : 'dip',
'azimuth': 'azi'
},
)

# Read the survey data into a DataFrame.
df = read_survey(reader)

# %% [markdown]
# Create a Survey object from the DataFrame and update it with lithological data.
survey: Survey = Survey.from_df(df)
survey.update_survey_with_lith(lith)

# %% [markdown]
# Initialize the reader for the collar data. Specify the file path, header, and column mappings.
reader_collar: GenericReaderFilesHelper = GenericReaderFilesHelper(
file_or_buffer=os.getenv("PATH_TO_SPREMBERG_COLLAR"),
header=0,
usecols=[0, 1, 2, 4],
columns_map={
"hole_id" : "id",
"X_GK5_incl_inserted": "x",
"Y__incl_inserted" : "y",
"Z_GK" : "z"
}
)

# Read the collar data into a DataFrame and create a Collars object.
df_collar = read_collar(reader_collar)
collar = Collars.from_df(df_collar)

# %% [markdown]
# Combine the collar and survey data into a BoreholeSet.
borehole_set = BoreholeSet(
collars=collar,
survey=survey,
merge_option=MergeOptions.INTERSECT
)

# %% [markdown]
# Visualize the borehole trajectories and collars using PyVista.
well_mesh = to_pyvista_line(
line_set=borehole_set.combined_trajectory,
active_scalar="lith_ids",
radius=10
)

collars = to_pyvista_points(
borehole_set.collars.collar_loc,
)

# Initialize the PyVista plotter.
pyvista_plotter = init_plotter()

# Define the units limit for thresholding the well mesh.
units_limit = [0, 20]

# Add the well mesh and collars to the plotter and display.
pyvista_plotter.add_mesh(
well_mesh.threshold(units_limit),
cmap="tab20c",
clim=units_limit
)

pyvista_plotter.add_mesh(
collars,
point_size=10,
render_points_as_spheres=True
)

pyvista_plotter.show()

# %% [markdown]
# Create structural elements from the borehole set for different lithological units.
elements: list[gp.data.StructuralElement] = gp.structural_elements_from_borehole_set(
borehole_set=borehole_set,
elements_dict={
"Buntsandstein" : {
"id" : 53_300,
"color": "#983999"
},
"Werra-Anhydrit" : {
"id" : 61_730,
"color": "#00923f"
},
"Kupfershiefer" : {
"id" : 61_760,
"color": "#da251d"
},
"Zechsteinkonglomerat": {
"id" : 61_770,
"color": "#f8c300"
},
"Rotliegend" : {
"id" : 62_000,
"color": "#bb825b"
}
}
)

# %% [markdown]
# Group the structural elements into a StructuralGroup and create a StructuralFrame.
group = gp.data.StructuralGroup(
name="Stratigraphic Pile",
elements=elements,
structural_relation=gp.data.StackRelationType.ERODE
)
structural_frame = gp.data.StructuralFrame(
structural_groups=[group],
color_gen=gp.data.ColorsGenerator()
)

# %% [markdown]
# Determine the extent of the geological model from the surface points coordinates.
all_surface_points_coords: gp.data.SurfacePointsTable = structural_frame.surface_points_copy
extent_from_data = all_surface_points_coords.xyz.min(axis=0), all_surface_points_coords.xyz.max(axis=0)

# %% [markdown]
# Create a GeoModel with the specified extent, grid resolution, and interpolation options.
geo_model = gp.data.GeoModel(
name="Stratigraphic Pile",
structural_frame=structural_frame,
grid=gp.data.Grid(
extent=[extent_from_data[0][0], extent_from_data[1][0], extent_from_data[0][1], extent_from_data[1][1], extent_from_data[0][2], extent_from_data[1][2]],
resolution=(50, 50, 50)
),
interpolation_options=gp.data.InterpolationOptions(
range=5,
c_o=10,
mesh_extraction=True,
number_octree_levels=3,
),
)

# %% [markdown]
# Visualize the 3D geological model using GemPy's plot_3d function.
gempy_plot = gpv.plot_3d(
model=geo_model,
kwargs_pyvista_bounds={
'show_xlabels': False,
'show_ylabels': False,
},
show=True,
image=True
)

# %% [markdown]
# Combine all visual elements and display them together.
sp_mesh: pyvista.PolyData = gempy_plot.surface_points_mesh

pyvista_plotter = init_plotter()
pyvista_plotter.show_bounds(all_edges=True)

pyvista_plotter.add_mesh(
well_mesh.threshold(units_limit),
cmap="tab20c",
clim=units_limit
)

pyvista_plotter.add_mesh(
collars,
point_size=10,
render_points_as_spheres=True
)

pyvista_plotter.add_point_labels(
points=collar.collar_loc.points,
labels=collar.ids,
point_size=10,
shape_opacity=0.5,
font_size=12,
bold=True
)
pyvista_plotter.add_actor(gempy_plot.surface_points_actor)

pyvista_plotter.show()
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
},
"outputs": [],
"source": [
"import pandas as pd\nimport pyvista\nimport xarray\n\nimport subsurface\nfrom subsurface import TriSurf, LineSet\nfrom subsurface.visualization import to_pyvista_mesh, to_pyvista_line, init_plotter\nfrom vector_geology.utils import load_omf"
"import pandas as pd\nimport pyvista\nimport xarray\n\nimport subsurface\nfrom subsurface import TriSurf, LineSet\nfrom subsurface.modules.visualization import to_pyvista_mesh, to_pyvista_line, init_plotter\nfrom vector_geology.utils import load_omf"
]
},
{
Expand Down
Loading

0 comments on commit 11140e7

Please sign in to comment.