Skip to content

Commit

Permalink
Merge pull request #200 from kujaku11/patches
Browse files Browse the repository at this point in the history
- Run ipynb on tests
- Fix issue with `mixed` datetime types in a `channel_summary`
  • Loading branch information
kujaku11 authored Apr 12, 2024
2 parents 9dd8d2b + 8ae4d55 commit 03583af
Show file tree
Hide file tree
Showing 15 changed files with 248 additions and 51 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.4.2
current_version = 0.4.3
files = setup.py mth5/__init__.py README.md docs/README.rst
commit = True
tag = True
16 changes: 15 additions & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,20 @@ jobs:
source activate mth5-test
pip install -e .
conda list
- name: Install Jupyter and dependencies
run: |
source activate mth5-test
pip install jupyter
pip install ipykernel
pip install ipympl
python -m ipykernel install --user --name mth5-test
# Install any other dependencies you need
- name: Execute Jupyter Notebooks
run: |
source activate mth5-test
jupyter nbconvert --to notebook --execute docs/examples/notebooks/make_mth5_driver_v0.2.0.ipynb
- name: Run Tests
shell: bash
Expand All @@ -53,6 +67,6 @@ jobs:
uses: codecov/codecov-action@v3
with:
flags: tests
fail_ci_if_error: true
fail_ci_if_error: false
verbose: true

22 changes: 22 additions & 0 deletions LICENSE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
MIT License
--------------

Copyright (c) 2020 JP

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ MTH5 uses [h5py](https://www.h5py.org/) to interact with the HDF5 file, [xarray]
This project is in cooperation with the Incorporated Research Institutes of Seismology, the U.S. Geological Survey, and other collaborators. Facilities of the IRIS Consortium are supported by the National Science Foundation’s Seismological Facilities for the Advancement of Geoscience (SAGE) Award under Cooperative Support Agreement EAR-1851048. USGS is partially funded through the Community for Data Integration and IMAGe through the Minerals Resources Program.


* **Version**: 0.4.2
* **Version**: 0.4.3
* **Free software**: MIT license
* **Documentation**: https://mth5.readthedocs.io.
* **Examples**: Click the `Binder` badge above and Jupyter Notebook examples are in **docs/examples/notebooks**
Expand Down
2 changes: 1 addition & 1 deletion docs/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ MTH5 uses `h5py <https://www.h5py.org/>`_ to interact with the HDF5 file, `xarra

This project is in cooperation with the Incorporated Research Institutes of Seismology, the U.S. Geological Survey, and other collaborators. Facilities of the IRIS Consortium are supported by the National Science Foundation’s Seismological Facilities for the Advancement of Geoscience (SAGE) Award under Cooperative Support Agreement EAR-1851048. USGS is partially funded through the Community for Data Integration and IMAGe through the Minerals Resources Program.

* **Version**: 0.4.2
* **Version**: 0.4.3
* **Free software**: MIT license
* **Documentation**: `<https://mth5.readthedocs.io>`_.
* **Examples**: Click the `Binder` badge above and Jupyter Notebook examples are in **docs/examples/notebooks**
Expand Down
14 changes: 9 additions & 5 deletions docs/examples/notebooks/make_mth5_from_phoenix_real.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,14 @@
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"2022-08-31 13:19:07,431 [line 135] mth5.setup_logger - INFO: Logging file can be found C:\\Users\\jpeacock\\OneDrive - DOI\\Documents\\GitHub\\mth5\\logs\\mth5_debug.log\n"
"ename": "ImportError",
"evalue": "cannot import name 'ReceiverMetadataJSON' from 'mth5.io.phoenix' (C:\\Users\\peaco\\Documents\\GitHub\\mth5\\mth5\\io\\phoenix\\__init__.py)",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mImportError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[1;32mIn[1], line 5\u001b[0m\n\u001b[0;32m 3\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mmth5\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mmth5\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m MTH5\n\u001b[0;32m 4\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mmth5\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m read_file\n\u001b[1;32m----> 5\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mmth5\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mio\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mphoenix\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m ReceiverMetadataJSON, PhoenixCollection\n",
"\u001b[1;31mImportError\u001b[0m: cannot import name 'ReceiverMetadataJSON' from 'mth5.io.phoenix' (C:\\Users\\peaco\\Documents\\GitHub\\mth5\\mth5\\io\\phoenix\\__init__.py)"
]
}
],
Expand Down Expand Up @@ -57,7 +61,7 @@
"metadata": {},
"outputs": [],
"source": [
"station_dir = Path(r\"c:\\Users\\jpeacock\\OneDrive - DOI\\mt\\phoenix_example_data\\10291_2019-09-06-015630\")"
"station_dir = Path(r\"c:\\MT\\ST2024\\1014\\Data\\10643_2024-03-19-190728\")"
]
},
{
Expand Down
3 changes: 2 additions & 1 deletion mth5/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

__author__ = """Jared Peacock"""
__email__ = "[email protected]"
__version__ = "0.4.2"
__version__ = "0.4.3"


# =============================================================================
Expand Down Expand Up @@ -46,6 +46,7 @@
# =============================================================================
CHUNK_SIZE = 8196
ACCEPTABLE_FILE_TYPES = ["mth5", "MTH5", "h5", "H5"]
ACCEPTABLE_FILE_SUFFIXES = [f".{x}" for x in ACCEPTABLE_FILE_TYPES]
ACCEPTABLE_FILE_VERSIONS = ["0.1.0", "0.2.0"]
ACCEPTABLE_DATA_LEVELS = [0, 1, 2, 3]

Expand Down
51 changes: 43 additions & 8 deletions mth5/clients/phoenix.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,17 @@ class PhoenixClient:
def __init__(
self,
data_path,
sample_rates=[130, 24000],
sample_rates=[150, 24000],
save_path=None,
calibration_path=None,
receiver_calibration_dict={},
sensor_calibration_dict={},
):
self.data_path = data_path
self.sample_rates = sample_rates
self.save_path = save_path
self.mth5_filename = "from_phoenix.h5"
self.calibration_path = calibration_path
self.receiver_calibration_dict = receiver_calibration_dict
self.sensor_calibration_dict = sensor_calibration_dict

self.collection = PhoenixCollection(self.data_path)

Expand Down Expand Up @@ -62,12 +64,38 @@ def data_path(self, value):
raise ValueError("data_path cannot be None")

@property
def calibration_path(self):
def receiver_calibration_dict(self):
"""receiver calibrations"""
return self._receiver_calibration_dict

@receiver_calibration_dict.setter
def receiver_calibration_dict(self, value):
if isinstance(value, dict):
self._receiver_calibration_dict = value

elif isinstance(value, (str, Path)):
receiver_path = Path(value)
if receiver_path.is_dir():
self._receiver_calibration_dict = {}
for fn in receiver_path.glob("*.rx_cal.json"):
self._receiver_calibration_dict[fn.stem.split("_")[0]] = (
fn
)
elif receiver_path.is_file():
self._receiver_calibration_dict = {}
self._receiver_calibration_dict[fn.stem.split("_")[0]] = (
receiver_path
)
else:
raise TypeError(f"type {type(value)} not supported.")

@property
def sensor_calibration_dict(self):
"""Path to calibration data"""
return self._calibration_path
return self._sensor_calibration_dict

@calibration_path.setter
def calibration_path(self, value):
@sensor_calibration_dict.setter
def sensor_calibration_dict(self, value):
"""
:param value: DESCRIPTION
Expand All @@ -77,7 +105,14 @@ def calibration_path(self, value):
"""

if value is not None:
if isinstance(value, dict):
self._sensor_calibration_dict = value
elif isinstance(value, (str, Path)):
self._sensor_calibration_dict = {}
cal_path = Path(value)
if cal_path.is_dir():
for fn in cal_path.glob("*scal.json"):
self._sensor_calibration_dict[fn.stem.split("_")[0]]
self._calibration_path = Path(value)
if not self._calibration_path.exists():
raise IOError(f"Could not find {self._calibration_path}")
Expand Down
9 changes: 6 additions & 3 deletions mth5/groups/fourier_coefficients.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,14 +433,16 @@ def from_xarray(self, data_array, sample_rate_decimation_level):
ch,
fc_data=data_array[ch].to_numpy(),
fc_metadata=ch_metadata,
dtype=data_array[ch].dtype,
)
elif data_array[ch].time.size == data_array[ch].shape[1]:
self.add_channel(
ch,
fc_data=data_array[ch].to_numpy().T,
fc_metadata=ch_metadata,
dtype=data_array[ch].dtype,
)
return
return

def to_xarray(self, channels=None):
"""
Expand Down Expand Up @@ -494,6 +496,7 @@ def add_channel(
fc_metadata=None,
max_shape=(None, None),
chunks=True,
dtype=complex,
**kwargs,
):
"""
Expand Down Expand Up @@ -548,12 +551,12 @@ def add_channel(
raise TypeError(msg)
else:
chunks = True
fc_data = np.zeros((1, 1), dtype=complex)
fc_data = np.zeros((1, 1), dtype=dtype)
try:
dataset = self.hdf5_group.create_dataset(
fc_name,
data=fc_data,
dtype=complex,
dtype=dtype,
chunks=chunks,
maxshape=max_shape,
**self.dataset_options,
Expand Down
22 changes: 16 additions & 6 deletions mth5/io/phoenix/phoenix_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,12 +118,19 @@ def to_dataframe(
for folder in station_folders:
rec_fn = folder.joinpath(self._receiver_metadata_name)
receiver_metadata = self._read_receiver_metadata_json(rec_fn)
self.metadata_dict[
receiver_metadata.station_metadata.id
] = receiver_metadata
self.metadata_dict[receiver_metadata.station_metadata.id] = (
receiver_metadata
)

for sr in sample_rates:
for fn in folder.rglob(f"*{self._file_extension_map[int(sr)]}"):
for fn in folder.rglob(
f"*{self._file_extension_map[int(sr)]}"
):
if "calibration" in fn.as_posix().lower():
self.logger.debug(
f"skipping calibration time series {fn}"
)
continue
try:
phx_obj = open_phoenix(fn)
except OSError:
Expand Down Expand Up @@ -204,7 +211,9 @@ def assign_run_names(self, df, zeros=4):
starts = np.sort(
sdf.loc[sdf.sample_rate == sr].start.unique()
)
ends = np.sort(sdf.loc[sdf.sample_rate == sr].end.unique())
ends = np.sort(
sdf.loc[sdf.sample_rate == sr].end.unique()
)

# find any breaks in the data
diff = ends[0:-1] - starts[1:]
Expand All @@ -229,7 +238,8 @@ def assign_run_names(self, df, zeros=4):

else:
rdf.loc[
(rdf.station == station) & (rdf.sample_rate == sr),
(rdf.station == station)
& (rdf.sample_rate == sr),
"run",
] = f"sr{run_stem}_{count:0{zeros}}"

Expand Down
3 changes: 2 additions & 1 deletion mth5/mth5.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
from mth5 import (
CHANNEL_DTYPE,
TF_DTYPE,
ACCEPTABLE_FILE_SUFFIXES,
ACCEPTABLE_FILE_TYPES,
ACCEPTABLE_FILE_VERSIONS,
ACCEPTABLE_DATA_LEVELS,
Expand Down Expand Up @@ -334,7 +335,7 @@ def filename(self, value):
if value is not None:
if not isinstance(value, Path):
value = Path(value)
if value.suffix not in ACCEPTABLE_FILE_TYPES:
if value.suffix not in ACCEPTABLE_FILE_SUFFIXES:
msg = (
f"file extension {value.suffix} is not correct. "
"Changing to default .h5"
Expand Down
8 changes: 6 additions & 2 deletions mth5/tables/channel_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,12 @@ def to_dataframe(self):
"units",
]:
setattr(df, key, getattr(df, key).str.decode("utf-8"))
df.start = pd.to_datetime(df.start.str.decode("utf-8"), format="mixed")
df.end = pd.to_datetime(df.end.str.decode("utf-8"), format="mixed")
try:
df.start = pd.to_datetime(df.start.str.decode("utf-8"), format="mixed")
df.end = pd.to_datetime(df.end.str.decode("utf-8"), format="mixed")
except ValueError:
df.start = pd.to_datetime(df.start.str.decode("utf-8"))
df.end = pd.to_datetime(df.end.str.decode("utf-8"))

return df

Expand Down
4 changes: 4 additions & 0 deletions mth5/utils/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,10 @@ def get_channel_summary(m, show=True):
"""
logger.info(f"{m.filename} channel summary")
df = m.channel_summary.to_dataframe()
if len(df) <= 1:
logger.warning("channel summary smaller than expected -- re-summarizing")
m.channel_summary.summarize()
df = m.channel_summary.to_dataframe()
if show:
logger.info(f"{df}")
return df
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,6 @@
test_suite="tests",
tests_require=test_requirements,
url="https://github.com/kujaku11/mth5",
version="0.4.2",
version="0.4.3",
zip_safe=False,
)
Loading

0 comments on commit 03583af

Please sign in to comment.