Skip to content

Commit

Permalink
merge from #337 branch for testing
Browse files Browse the repository at this point in the history
  • Loading branch information
kkappler committed Jul 23, 2024
1 parent aff7852 commit cf88f7c
Show file tree
Hide file tree
Showing 11 changed files with 335 additions and 122 deletions.
59 changes: 28 additions & 31 deletions aurora/test_utils/dataset_definitions.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,16 @@
"""
This module contains methods that are used to define datasets to build from FDSN servers.
These datasets are in turn used for testing.
"""
from obspy import UTCDateTime
from aurora.sandbox.io_helpers.fdsn_dataset import FDSNDataset


def make_pkdsao_test_00_config(minitest=False):
def make_pkdsao_test_00_config(minitest=False) -> FDSNDataset:
"""
Populate a FDSNDataset() object for 2h of 40Hz data
Return a description of a 2h PKD SAO 40Hz dataset from NCEDC.
Parameters
----------
Expand All @@ -30,7 +36,14 @@ def make_pkdsao_test_00_config(minitest=False):
return test_data_set


def make_cas04_nvr08_test_00_config():
def make_cas04_nvr08_test_00_config() -> FDSNDataset:
"""
Return a description of a CAS04,NVR08 dataset from IRIS.
Returns
-------
"""
test_data_set = FDSNDataset()
test_data_set.dataset_id = "cas_nvr_test_00"
test_data_set.network = "ZU"
Expand All @@ -52,7 +65,8 @@ def make_cas04_nvr08_test_00_config():
return test_data_set


def make_iak34_test_00_config():
def make_iak34_test_00_config() -> FDSNDataset:
"""Return a description of a IAK34 dataset from IRIS."""
test_data_set = FDSNDataset()
test_data_set.dataset_id = "iak34_test_00"
test_data_set.network = "EM"
Expand All @@ -70,7 +84,8 @@ def make_iak34_test_00_config():
return test_data_set


def make_iak34_test_01_config():
def make_iak34_test_01_config() -> FDSNDataset:
"""Return a description of a IAK34 dataset from IRIS."""
test_data_set = FDSNDataset()
test_data_set.dataset_id = "iak34_test_01_long_ss"
test_data_set.network = "EM"
Expand All @@ -87,7 +102,8 @@ def make_iak34_test_01_config():
return test_data_set


def make_iak34_test_02_config():
def make_iak34_test_02_config() -> FDSNDataset:
"""Return a description of a IAK34 dataset from IRIS."""
test_data_set = FDSNDataset()
test_data_set.dataset_id = "iak34_test_02_long_rr"
test_data_set.network = "EM"
Expand All @@ -104,7 +120,8 @@ def make_iak34_test_02_config():
return test_data_set


def make_iak34_test_03_config():
def make_iak34_test_03_config() -> FDSNDataset:
"""Return a description of a IAK34 dataset from IRIS."""
test_data_set = FDSNDataset()
test_data_set.dataset_id = "iak34_test_03_long_rr"
test_data_set.network = "EM"
Expand All @@ -121,7 +138,8 @@ def make_iak34_test_03_config():
return test_data_set


def make_iak34_test_04_config():
def make_iak34_test_04_config() -> FDSNDataset:
"""Return a description of a IAK34 dataset from IRIS."""
test_data_set = FDSNDataset()
test_data_set.dataset_id = "iak34_test_04_rr"
test_data_set.network = "EM"
Expand All @@ -138,29 +156,8 @@ def make_iak34_test_04_config():
return test_data_set


# def make_iak34_nen34_test_00_config():
# test_data_set = FDSNDataset()
# test_data_set.dataset_id = "iak34_nen34_test_00"
# test_data_set.network = "ZU"
# test_data_set.station = "IAK34,NEN34"
# # <ORIGINAL>
# # test_data_set.starttime = UTCDateTime("2020-06-02T18:41:43.000000Z")
# # test_data_set.endtime = UTCDateTime("2020-07-13T21:46:12.000000Z")
# # </ORIGINAL>
# test_data_set.starttime = UTCDateTime("2020-06-04T00:00:00.000000Z")
# test_data_set.endtime = UTCDateTime("2020-06-05T00:00:00.000000Z") # minitest
# # test_data_set.endtime = UTCDateTime("2020-06-24T15:55:46.000000Z")
#
# # test_data_set.starttime = UTCDateTime("2004-09-28T00:00:00.000000Z")
# # test_data_set.endtime = UTCDateTime("2004-09-28T01:59:59.975000Z")
# # test_data_set.endtime = UTCDateTime("2004-09-28T00:01:59.999000Z") #small test
# test_data_set.channel_codes = None
# test_data_set.description = "earthscope example dataset"
# test_data_set.components_list = ["hx", "hy", "ex", "ey"]
# return test_data_set


def make_test_configs():
def make_test_configs() -> dict:
"""Make all the test dataset configs and put them in a dict"""
test_data_set_configs = {}

# pkd_sao_test_00 Remote Reference
Expand Down
31 changes: 22 additions & 9 deletions aurora/test_utils/mth5/fc_helpers.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,35 @@
"""
This module contains functions to used by test that involve Fourier Coefficients in MTH5.
"""

from typing import Optional, Union
import numpy as np
import pandas as pd
import pathlib
import xarray as xr


def read_fc_csv(csv_name, as_xarray=True):
def read_fc_csv(
csv_name: Union[pathlib.Path, str], as_xarray: Optional[bool] = True
) -> Union[xr.Dataset, pd.DataFrame]:
"""
Load Fourier coefficients from a csv file and return as xarray or dataframe
Usage:
xrds_obj = read_fc_csv(csv_name)
df = read_fc_csv(csv_name, as_xarry=False)
xrds =
Returns a data
Parameters
----------
csv_name: str or pathlib.Path
as_xarray: bool'
csv_name: Union[pathlib.Path, str]
Path to csv file to read
as_xarray: Optional[bool]
If true return xr.Dataset
Returns
-------
output: xr.Dataset or pd.DataFrame
"""
df = pd.read_csv(
csv_name,
Expand All @@ -29,7 +42,7 @@ def read_fc_csv(csv_name, as_xarray=True):
for col in df.columns:
df[col] = np.complex128(df[col])
if as_xarray:
xrds_out = df.to_xarray()
return xrds_out
output = df.to_xarray()
else:
return df
output = df
return output
31 changes: 18 additions & 13 deletions aurora/test_utils/parkfield/calibration_helpers.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
"""
This module contains methods that are used in the Parkfield calibration tests.
"""
import matplotlib.pyplot as plt
import numpy as np

Expand All @@ -8,25 +11,26 @@
plt.ion()


def load_bf4_fap_for_parkfield_test_using_mt_metadata(frequencies):
def load_bf4_fap_for_parkfield_test_using_mt_metadata(frequencies: np.ndarray):
"""
The hardware repsonses (AAF and digitizer) are not included in this response,
but these do not make any significant difference away from the Nyquist frequecny.
Loads a csv format response file for a BF4 coil and return the calibration function.
Uses an mt_metadata filter object.
Near the Nyquist calibration is inadequate anyhow. Looking at the output plots,
which show the "full calibration" vs "response table (EMI)", neither one is
realistic at high frequency. The fap ("response table (EMI)") curve does not
compensate for AAF and plunges down at high frequency. The full calibration
from the PZ response on the other hand rises unrealistically. The PZ rising
signal amplitude at high frequency is an artefact of calibrating noise.
- Anti-alias filter and digitizer responses are not included in the csv -- it is coil only.
- We ignore the AAF, and hard-code a counts-per-volt value for now
Development Notes:
TODO: Add doc showing where counts per volt is accessing in FDSN metadata.
Parameters
----------
frequencies : numpy array
Array of frequencies at which to evaluate the bf response function
frequencies: np.ndarray
Frequencies at which to evaluate the bf response function
Returns
-------
bf4_resp: np.ndarray
Complex response of the filter at the input frequencies
"""
from aurora.general_helper_functions import DATA_PATH
from mt_metadata.timeseries.filters.helper_functions import (
Expand All @@ -49,6 +53,7 @@ def plot_responses(
show_response_curves,
):
"""
Makes a sanity check plot to show the response of the calibration curves
Parameters
----------
Expand Down Expand Up @@ -99,7 +104,7 @@ def parkfield_sanity_check(
include_decimation=False,
):
"""
loop over channels in fft obj and make calibrated spectral plots
Loop over channels in fft obj and make calibrated spectral plots
Parameters
----------
Expand Down
26 changes: 22 additions & 4 deletions aurora/test_utils/parkfield/make_parkfield_mth5.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
"""
Create Parkfield / Hollister mth5 to use as test data
This module contains methods for building an MTH5 file from data at Parkfield (PKD) and Hollister
(SAO) long term monitoring stations to use as test data.
"""
import pathlib

from aurora.test_utils.dataset_definitions import TEST_DATA_SET_CONFIGS
from mth5.utils.helpers import read_back_data
from mth5.helpers import close_open_files
Expand All @@ -16,6 +19,16 @@


def select_data_source():
"""
Identifies appropriate web client to use for NCEDC data requests.
This was used for debugging data access issues in the past -- may no longer be needed.
Returns
-------
data_source: str
A responsive NCEDC client.
"""
from obspy.clients.fdsn import Client

ok = False
Expand All @@ -35,7 +48,9 @@ def select_data_source():


def make_pkdsao_mth5(fdsn_dataset):
""" """
"""
Makes MTH5 file with data from Parkfield and Hollister stations to use for testing.
"""
close_open_files()
fdsn_dataset.data_source = select_data_source()
fdsn_dataset.initialize_client()
Expand All @@ -51,12 +66,14 @@ def make_pkdsao_mth5(fdsn_dataset):
return h5_path


def ensure_h5_exists():
def ensure_h5_exists() -> pathlib.Path:
"""
Make sure that the PKD SAO MTH5 file exists. If it does not, build it.
Returns
-------
h5_path: pathlib.Path
The path to the PKD SAO mth5 file to be used for testing.
"""

h5_path = PARKFIELD_PATHS["data"].joinpath(FDSN_DATASET.h5_filebase)
Expand All @@ -73,6 +90,7 @@ def ensure_h5_exists():


def main():
"""allows the make to be run by calling this module from the command line"""
make_pkdsao_mth5(FDSN_DATASET)


Expand Down
14 changes: 13 additions & 1 deletion aurora/test_utils/parkfield/path_helpers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,19 @@
"""
This module contains helper functions to control where the parkfield test data
and test results are stored /accessed.
"""
from aurora.general_helper_functions import DATA_PATH


def make_parkfield_paths():
def make_parkfield_paths() -> dict:
"""
Makes a dictionary with information about where to store/access PKD test data and results.
Returns
-------
parkfield_paths: dict
Dict containing paths to "data", "aurora_results", "config", "emtf_results"
"""
base_path = DATA_PATH.joinpath("parkfield")
parkfield_paths = {}
parkfield_paths["data"] = base_path
Expand Down
Loading

0 comments on commit cf88f7c

Please sign in to comment.