Skip to content

Commit

Permalink
replacing RunSummary and KDS with mtpy versions
Browse files Browse the repository at this point in the history
  • Loading branch information
kujaku11 committed Aug 18, 2024
1 parent 17666cd commit c11dc46
Show file tree
Hide file tree
Showing 14 changed files with 93 additions and 48 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,8 @@ jobs:
conda install -c conda-forge pytest pytest-cov certifi">=2017.4.17" pandoc
pip install -r requirements-dev.txt
pip install git+https://github.com/kujaku11/mt_metadata.git@main
pip install git+https://github.com/kujaku11/mth5.git@add_aurora_tools
pip install git+https://github.com/kujaku11/mth5.git@add_aurora_tools
pip install git+https://github.com/MTgeophysics/mtpy-v2.git@aurora
- name: Install Our Package
run: |
Expand Down
2 changes: 1 addition & 1 deletion data/synthetic/emtf_results/test1.zss
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
********** WITH FULL ERROR COVARINCE**********
Robust Single station
station :test1
coordinate 1007.996 0.000 declination 0.00
coordinate 0 0.000 declination 0.00
number of channels 5 number of frequencies 25
orientations and tilts of each channel
1 0.00 0.00 tes Hx
Expand Down
6 changes: 4 additions & 2 deletions docs/examples/operate_aurora.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,10 @@
"from aurora.config import BANDS_DEFAULT_FILE\n",
"from aurora.config.config_creator import ConfigCreator\n",
"from aurora.pipelines.process_mth5 import process_mth5\n",
"from aurora.transfer_function.kernel_dataset import KernelDataset\n",
"from aurora.pipelines.run_summary import RunSummary\n",
"\n",
"from mtpy.processing.run_summary import RunSummary\n",
"from mtpy.processing.kernel_dataset import KernelDataset\n",
"\n",
"\n",
"warnings.filterwarnings('ignore')"
]
Expand Down
5 changes: 3 additions & 2 deletions docs/tutorials/processing_configuration.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -152,9 +152,10 @@
"metadata": {},
"outputs": [],
"source": [
"from aurora.pipelines.run_summary import RunSummary\n",
"from aurora.test_utils.synthetic.paths import SyntheticTestPaths\n",
"from aurora.transfer_function.kernel_dataset import KernelDataset\n",
"\n",
"from mtpy.processing.run_summary import RunSummary\n",
"from mtpy.processing.kernel_dataset import KernelDataset\n",
"\n",
"synthetic_test_paths = SyntheticTestPaths()\n",
"MTH5_PATH = synthetic_test_paths.mth5_path"
Expand Down
5 changes: 3 additions & 2 deletions docs/tutorials/synthetic_data_processing.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,11 @@
"\n",
"from aurora.config.config_creator import ConfigCreator\n",
"from aurora.pipelines.process_mth5 import process_mth5\n",
"from aurora.pipelines.run_summary import RunSummary\n",
"from aurora.test_utils.synthetic.make_mth5_from_asc import create_test12rr_h5\n",
"from aurora.test_utils.synthetic.paths import DATA_PATH\n",
"from aurora.transfer_function.kernel_dataset import KernelDataset\n",
"\n",
"from mtpy.processing.run_summary import RunSummary\n",
"from mtpy.processing.kernel_dataset import KernelDataset\n",
"\n",
"warnings.filterwarnings('ignore')"
]
Expand Down
13 changes: 9 additions & 4 deletions tests/cas04/02b_process_cas04_mth5.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,10 @@
from aurora.config.config_creator import ConfigCreator
from aurora.general_helper_functions import get_test_path
from aurora.pipelines.process_mth5 import process_mth5
from aurora.pipelines.run_summary import RunSummary
from aurora.transfer_function.plot.comparison_plots import compare_two_z_files
from aurora.transfer_function.kernel_dataset import KernelDataset

from mtpy.processing.run_summary import RunSummary
from mtpy.processing.kernel_dataset import KernelDataset

from loguru import logger

Expand Down Expand Up @@ -125,7 +126,9 @@ def z_file_name(self, target_dir):
return out_file


def process_station_runs(local_station_id, remote_station_id="", station_runs={}):
def process_station_runs(
local_station_id, remote_station_id="", station_runs={}
):
"""
Parameters
Expand All @@ -152,7 +155,9 @@ def process_station_runs(local_station_id, remote_station_id="", station_runs={}

# Pass the run_summary to a Dataset class
kernel_dataset = KernelDataset()
kernel_dataset.from_run_summary(run_summary, local_station_id, remote_station_id)
kernel_dataset.from_run_summary(
run_summary, local_station_id, remote_station_id
)

# reduce station_runs_dict to only relevant stations

Expand Down
5 changes: 3 additions & 2 deletions tests/parkfield/test_process_parkfield_run.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from aurora.config.config_creator import ConfigCreator
from aurora.pipelines.process_mth5 import process_mth5
from aurora.pipelines.run_summary import RunSummary
from aurora.test_utils.parkfield.make_parkfield_mth5 import ensure_h5_exists
from aurora.test_utils.parkfield.path_helpers import PARKFIELD_PATHS
from aurora.transfer_function.kernel_dataset import KernelDataset
from aurora.transfer_function.plot.comparison_plots import compare_two_z_files

from mtpy.processing.run_summary import RunSummary
from mtpy.processing.kernel_dataset import KernelDataset
from loguru import logger
from mth5.helpers import close_open_files

Expand Down
14 changes: 10 additions & 4 deletions tests/parkfield/test_process_parkfield_run_rr.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
from aurora.config.config_creator import ConfigCreator
from aurora.pipelines.process_mth5 import process_mth5
from aurora.pipelines.run_summary import RunSummary
from aurora.sandbox.mth5_channel_summary_helpers import (
channel_summary_to_make_mth5,
)
from aurora.test_utils.parkfield.make_parkfield_mth5 import ensure_h5_exists
from aurora.test_utils.parkfield.path_helpers import PARKFIELD_PATHS
from aurora.transfer_function.kernel_dataset import KernelDataset
from aurora.transfer_function.plot.comparison_plots import compare_two_z_files

from mtpy.processing.run_summary import RunSummary
from mtpy.processing.kernel_dataset import KernelDataset

from loguru import logger
from mth5.mth5 import MTH5
from mth5.helpers import close_open_files
Expand Down Expand Up @@ -90,7 +92,9 @@ def test():
test_processing(z_file_path=z_file_path)

# COMPARE WITH ARCHIVED Z-FILE
auxilliary_z_file = PARKFIELD_PATHS["emtf_results"].joinpath("PKD_272_00.zrr")
auxilliary_z_file = PARKFIELD_PATHS["emtf_results"].joinpath(
"PKD_272_00.zrr"
)
if z_file_path.exists():
compare_two_z_files(
z_file_path,
Expand All @@ -104,7 +108,9 @@ def test():
xlims=[0.05, 500],
)
else:
logger.error("Z-File not found - Parkfield tests failed to generate output")
logger.error(
"Z-File not found - Parkfield tests failed to generate output"
)
logger.warning("NCEDC probably not returning data")
close_open_files()

Expand Down
4 changes: 2 additions & 2 deletions tests/synthetic/test_compare_aurora_vs_archived_emtf.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from aurora.pipelines.process_mth5 import process_mth5
from aurora.pipelines.run_summary import RunSummary
from mtpy.processing.run_summary import RunSummary
from aurora.sandbox.io_helpers.zfile_murphy import read_z_file
from mth5.data.make_mth5_from_asc import create_test1_h5
from mth5.data.make_mth5_from_asc import create_test2_h5
Expand All @@ -14,7 +14,7 @@
from aurora.transfer_function.emtf_z_file_helpers import (
merge_tf_collection_to_match_z_file,
)
from aurora.transfer_function.kernel_dataset import KernelDataset
from mtpy.processing.kernel_dataset import KernelDataset
from plot_helpers_synthetic import plot_rho_phi
from loguru import logger
from mth5.helpers import close_open_files
Expand Down
26 changes: 20 additions & 6 deletions tests/synthetic/test_fourier_coefficients.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,19 @@
from aurora.pipelines.fourier_coefficients import fc_decimations_creator
from aurora.pipelines.fourier_coefficients import read_back_fcs
from aurora.pipelines.process_mth5 import process_mth5
from aurora.pipelines.run_summary import RunSummary

from aurora.test_utils.synthetic.make_mth5_from_asc import create_test1_h5
from aurora.test_utils.synthetic.make_mth5_from_asc import create_test2_h5
from aurora.test_utils.synthetic.make_mth5_from_asc import create_test3_h5
from aurora.test_utils.synthetic.make_mth5_from_asc import create_test12rr_h5
from aurora.test_utils.synthetic.make_processing_configs import create_test_run_config
from aurora.test_utils.synthetic.make_processing_configs import (
create_test_run_config,
)
from aurora.test_utils.synthetic.paths import SyntheticTestPaths
from aurora.transfer_function.kernel_dataset import KernelDataset

from mtpy.processing.run_summary import RunSummary
from mtpy.processing.kernel_dataset import KernelDataset

from loguru import logger
from mth5.helpers import close_open_files

Expand Down Expand Up @@ -55,7 +60,12 @@ def setUpClass(self):
mth5_path_2 = create_test2_h5(file_version=self.file_version)
mth5_path_3 = create_test3_h5(file_version=self.file_version)
mth5_path_12rr = create_test12rr_h5(file_version=self.file_version)
self.mth5_paths = [mth5_path_1, mth5_path_2, mth5_path_3, mth5_path_12rr]
self.mth5_paths = [
mth5_path_1,
mth5_path_2,
mth5_path_3,
mth5_path_12rr,
]

def test_123(self):
"""
Expand Down Expand Up @@ -85,7 +95,9 @@ def test_123(self):
]:
station_id = mth5_path.stem
tfk_dataset.from_run_summary(run_summary, station_id)
processing_config = create_test_run_config(station_id, tfk_dataset)
processing_config = create_test_run_config(
station_id, tfk_dataset
)
elif mth5_path.stem in [
"test3",
]:
Expand Down Expand Up @@ -135,7 +147,9 @@ def test_create_then_use_stored_fcs_for_processing(self):
tf1 = process_synthetic_2(
force_make_mth5=True, z_file_path=z_file_path_1, save_fc=True
)
tf2 = process_synthetic_2(force_make_mth5=False, z_file_path=z_file_path_2)
tf2 = process_synthetic_2(
force_make_mth5=False, z_file_path=z_file_path_2
)
assert tf1 == tf2


Expand Down
2 changes: 1 addition & 1 deletion tests/synthetic/test_metadata_values_set_correctly.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import pandas as pd
import unittest

from aurora.pipelines.run_summary import RunSummary
from mtpy.processing.run_summary import RunSummary
from mth5.data.make_mth5_from_asc import create_test3_h5
from mth5.data.station_config import make_station_03
from mth5.helpers import close_open_files
Expand Down
18 changes: 7 additions & 11 deletions tests/synthetic/test_multi_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@
import unittest
from aurora.config.config_creator import ConfigCreator
from aurora.pipelines.process_mth5 import process_mth5
from aurora.pipelines.run_summary import RunSummary

from aurora.test_utils.synthetic.make_mth5_from_asc import create_test3_h5
from aurora.test_utils.synthetic.paths import SyntheticTestPaths
from aurora.transfer_function.kernel_dataset import KernelDataset
from mtpy.processing.run_summary import RunSummary
from mtpy.processing.kernel_dataset import KernelDataset
from mth5.helpers import close_open_files


Expand Down Expand Up @@ -137,13 +138,8 @@ def test_works_with_truncated_run(self):
tf_cls.write(fn=xml_file_name, file_type="emtfxml")


def main():
# tmp = TestMultiRunProcessing()
# tmp.setUp()
# tmp.test_works_with_truncated_run()
# tmp.test_all_runs()
unittest.main()


# =============================================================================
# run
# =============================================================================
if __name__ == "__main__":
main()
unittest.main()
27 changes: 20 additions & 7 deletions tests/synthetic/test_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,20 @@
import unittest

from aurora.pipelines.process_mth5 import process_mth5
from aurora.pipelines.run_summary import RunSummary

from aurora.test_utils.synthetic.make_mth5_from_asc import create_test1_h5
from aurora.test_utils.synthetic.make_mth5_from_asc import create_test2_h5
from aurora.test_utils.synthetic.make_mth5_from_asc import create_test12rr_h5
from aurora.test_utils.synthetic.make_processing_configs import (
create_test_run_config,
)
from aurora.test_utils.synthetic.paths import SyntheticTestPaths
from aurora.transfer_function.kernel_dataset import KernelDataset

from mth5.helpers import close_open_files

from mtpy.processing.run_summary import RunSummary
from mtpy.processing.kernel_dataset import KernelDataset

synthetic_test_paths = SyntheticTestPaths()
synthetic_test_paths.mkdirs()
AURORA_RESULTS_PATH = synthetic_test_paths.aurora_results_path
Expand Down Expand Up @@ -60,20 +63,26 @@ def test_can_output_tf_class_and_write_tf_xml(self):

def test_can_use_channel_nomenclature(self):
channel_nomencalture = "LEMI12"
z_file_path = AURORA_RESULTS_PATH.joinpath(f"syn1-{channel_nomencalture}.zss")
z_file_path = AURORA_RESULTS_PATH.joinpath(
f"syn1-{channel_nomencalture}.zss"
)
tf_cls = process_synthetic_1(
z_file_path=z_file_path,
file_version=self.file_version,
channel_nomenclature=channel_nomencalture,
)
xml_file_base = f"syn1_mth5-{self.file_version}_{channel_nomencalture}.xml"
xml_file_base = (
f"syn1_mth5-{self.file_version}_{channel_nomencalture}.xml"
)
xml_file_name = AURORA_RESULTS_PATH.joinpath(xml_file_base)
tf_cls.write(fn=xml_file_name, file_type="emtfxml")

def test_can_use_mth5_file_version_020(self):
file_version = "0.2.0"
z_file_path = AURORA_RESULTS_PATH.joinpath(f"syn1-{file_version}.zss")
tf_cls = process_synthetic_1(z_file_path=z_file_path, file_version=file_version)
tf_cls = process_synthetic_1(
z_file_path=z_file_path, file_version=file_version
)
xml_file_base = f"syn1_mth5v{file_version}.xml"
xml_file_name = AURORA_RESULTS_PATH.joinpath(xml_file_base)
tf_cls.write(fn=xml_file_name, file_type="emtfxml")
Expand Down Expand Up @@ -106,7 +115,9 @@ def test_can_use_scale_factor_dictionary(self):
)

def test_simultaneous_regression(self):
z_file_path = AURORA_RESULTS_PATH.joinpath("syn1_simultaneous_estimate.zss")
z_file_path = AURORA_RESULTS_PATH.joinpath(
"syn1_simultaneous_estimate.zss"
)
tf_cls = process_synthetic_1(
z_file_path=z_file_path, simultaneous_regression=True
)
Expand Down Expand Up @@ -228,7 +239,9 @@ def process_synthetic_1(
if return_collection:
z_figure_name = z_file_path.name.replace("zss", "png")
for xy_or_yx in ["xy", "yx"]:
ttl_str = f"{xy_or_yx} component, test_scale_factor = {test_scale_factor}"
ttl_str = (
f"{xy_or_yx} component, test_scale_factor = {test_scale_factor}"
)
out_png_name = f"{xy_or_yx}_{z_figure_name}"
tf_result.rho_phi_plot(
xy_or_yx=xy_or_yx,
Expand Down
11 changes: 8 additions & 3 deletions tests/synthetic/test_stft_methods_agree.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,20 @@
See aurora issue #3. This test confirms that the internal aurora stft
method returns the same array as scipy.signal.spectrogram
"""

import numpy as np

from aurora.pipelines.run_summary import RunSummary
from aurora.pipelines.time_series_helpers import prototype_decimate
from aurora.pipelines.time_series_helpers import run_ts_to_stft
from aurora.pipelines.time_series_helpers import run_ts_to_stft_scipy
from aurora.test_utils.synthetic.make_mth5_from_asc import create_test1_h5
from aurora.test_utils.synthetic.make_processing_configs import (
create_test_run_config,
)
from aurora.transfer_function.kernel_dataset import KernelDataset

from mtpy.processing.run_summary import RunSummary
from mtpy.processing.kernel_dataset import KernelDataset

from loguru import logger
from mth5.mth5 import MTH5
from mth5.helpers import close_open_files
Expand Down Expand Up @@ -65,7 +68,9 @@ def test_stft_methods_agree():
run_ts = run_obj.to_runts(start=None, end=None)
local_run_xrts = run_ts.dataset
else:
local_run_xrts = prototype_decimate(dec_config.decimation, local_run_xrts)
local_run_xrts = prototype_decimate(
dec_config.decimation, local_run_xrts
)

dec_config.extra_pre_fft_detrend_type = ""
local_stft_obj = run_ts_to_stft(dec_config, local_run_xrts)
Expand Down

0 comments on commit c11dc46

Please sign in to comment.