From b1c99aa67db999e09fe034faa4ea4bd8a35c29c2 Mon Sep 17 00:00:00 2001 From: Sven-Erik Enno Date: Thu, 2 Dec 2021 16:53:52 +0100 Subject: [PATCH 01/71] Add VAISALA GLD360 ualf2 lightning stroke/pulse data reader. --- satpy/etc/readers/gld360_ualf2.yaml | 157 ++++++++++++ satpy/readers/gld360_ualf2.py | 152 ++++++++++++ satpy/tests/reader_tests/test_gld360_ualf2.py | 224 ++++++++++++++++++ 3 files changed, 533 insertions(+) create mode 100644 satpy/etc/readers/gld360_ualf2.yaml create mode 100644 satpy/readers/gld360_ualf2.py create mode 100644 satpy/tests/reader_tests/test_gld360_ualf2.py diff --git a/satpy/etc/readers/gld360_ualf2.yaml b/satpy/etc/readers/gld360_ualf2.yaml new file mode 100644 index 0000000000..d1805ca3c2 --- /dev/null +++ b/satpy/etc/readers/gld360_ualf2.yaml @@ -0,0 +1,157 @@ +reader: + description: Gld360Ualf2 reader + name: gld360_ualf2 + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' + sensors: [gld360] + +file_types: + gld360: + file_reader: !!python/name:readers.gld360_ualf2.Gld360Ualf2FileHandler '' + file_patterns: ['{start_time:%Y.%m.%d.%H.%M}.txt'] + +datasets: + time: + name: time + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + ualf_record_type: + name: ualf_record_type + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + network_type: + name: network_type + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + latitude: + name: latitude + sensor: gld360 + file_type: gld360 + units: degree_north + standard_name: latitude + + longitude: + name: longitude + sensor: gld360 + file_type: gld360 + units: degree_east + standard_name: longitude + + altitude: + name: altitude + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + altitude_uncertainty: + name: altitude_uncertainty + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + peak_current: + name: peak_current + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + vhf_range: + name: vhf_range + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + multiplicity_flash: + name: multiplicity_flash + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + cloud_pulse_count: + name: cloud_pulse_count + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + number_of_sensors: + name: number_of_sensors + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + degree_freedom_for_location: + name: degree_freedom_for_location + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + error_ellipse_angle: + name: error_ellipse_angle + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + error_ellipse_max_axis_length: + name: error_ellipse_max_axis_length + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + error_ellipse_min_axis_length: + name: error_ellipse_min_axis_length + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + chi_squared_value_location_optimization: + name: chi_squared_value_location_optimization + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + wave_form_rise_time: + name: wave_form_rise_time + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + wave_form_peak_to_zero_time: + name: wave_form_peak_to_zero_time + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + wave_form_max_rate_of_rise: + name: wave_form_max_rate_of_rise + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + cloud_indicator: + name: cloud_indicator + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + angle_indicator: + name: angle_indicator + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + signal_indicator: + name: signal_indicator + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] + + timing_indicator: + name: timing_indicator + sensor: gld360 + file_type: gld360 + coordinates: [ longitude, latitude ] diff --git a/satpy/readers/gld360_ualf2.py b/satpy/readers/gld360_ualf2.py new file mode 100644 index 0000000000..21608d4401 --- /dev/null +++ b/satpy/readers/gld360_ualf2.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + +"""Gld360Ualf2 reader. + +Vaisala Global Lightning Dataset GLD360 is data as a service +that provides real-time lightning data for accurate and early +detection and tracking of severe weather. The data provided is +generated by a Vaisala owned and operated world-wide lightning +detection sensor network. + +References: +- [GLD360] https://www.vaisala.com/en/products/data-subscriptions-and-reports/data-sets/gld360 + +""" + +import logging +from datetime import timedelta + +import dask.dataframe as dd +import numpy as np +import xarray as xr + +from satpy.readers.file_handlers import BaseFileHandler + +logger = logging.getLogger(__name__) + + +class Gld360Ualf2FileHandler(BaseFileHandler): + """ASCII reader for Vaisala GDL360 ualf2 data.""" + + def __init__(self, filename, filename_info, filetype_info): + """Initialize Vaisala Gld360Ualf2FileHandler.""" + super(Gld360Ualf2FileHandler, self).__init__(filename, filename_info, filetype_info) + + def pad_nanoseconds(nanoseconds): + """Read ns values for less than 0.1s correctly (these are not zero-padded in the input files).""" + return str(nanoseconds).zfill(9) + + names = [ + 'ualf_record_type', + 'network_type', + 'year', + 'month', + 'day', + 'hour', + 'minute', + 'second', + 'nanosecond', + 'latitude', + 'longitude', + 'altitude', + 'altitude_uncertainty', + 'peak_current', + 'vhf_range', + 'multiplicity_flash', + 'cloud_pulse_count', + 'number_of_sensors', + 'degree_freedom_for_location', + 'error_ellipse_angle', + 'error_ellipse_max_axis_length', + 'error_ellipse_min_axis_length', + 'chi_squared_value_location_optimization', + 'wave_form_rise_time', + 'wave_form_peak_to_zero_time', + 'wave_form_max_rate_of_rise', + 'cloud_indicator', + 'angle_indicator', + 'signal_indicator', + 'timing_indicator', + ] + + dtype = { + 'ualf_record_type': np.uint8, + 'network_type': np.uint8, + 'year': str, + 'month': str, + 'day': str, + 'hour': str, + 'minute': str, + 'second': str, + 'latitude': np.float32, + 'longitude': np.float32, + 'altitude': np.uint16, + 'altitude_uncertainty': np.uint16, + 'peak_current': np.int16, + 'vhf_range': np.float32, + 'multiplicity_flash': np.uint8, + 'cloud_pulse_count': np.int16, + 'number_of_sensors': np.uint8, + 'degree_freedom_for_location': np.uint8, + 'error_ellipse_angle': np.float32, + 'error_ellipse_max_axis_length': np.float32, + 'error_ellipse_min_axis_length': np.float32, + 'chi_squared_value_location_optimization': np.float32, + 'wave_form_rise_time': np.float32, + 'wave_form_peak_to_zero_time': np.float32, + 'wave_form_max_rate_of_rise': np.float32, + 'cloud_indicator': bool, + 'angle_indicator': bool, + 'signal_indicator': bool, + 'timing_indicator': bool, + } + + # Combine 'year', 'month', 'day', 'hour', 'minute', 'second' and 'nanosecond' into a datetime object. + parse_dates = {'time': ['year', 'month', 'day', 'hour', 'minute', 'second', 'nanosecond']} + + self.data = dd.read_csv(filename, + sep='\t', + header=None, + names=names, + dtype=dtype, + parse_dates=parse_dates, + converters={'nanosecond': pad_nanoseconds} + ) + + self.data['time'] = dd.to_datetime(self.data['time'], format='%Y %m %d %H %M %S %f') + self.data = self.data.drop_duplicates() + self.data = self.data.sort_values('time') + + @property + def start_time(self): + """Return start time.""" + return self.filename_info['start_time'] + + @property + def end_time(self): + """Return end time.""" + return self.filename_info['start_time'] + timedelta(hours=1) + + def get_dataset(self, dataset_id, dataset_info): + """Return the dataset.""" + # create xarray and place along y dimension + data_array = xr.DataArray(self.data[dataset_id['name']].to_dask_array(lengths=True), dims=['y']) + # assign dataset infos to xarray attrs + data_array.attrs.update(dataset_info) + return data_array diff --git a/satpy/tests/reader_tests/test_gld360_ualf2.py b/satpy/tests/reader_tests/test_gld360_ualf2.py new file mode 100644 index 0000000000..827539f1f1 --- /dev/null +++ b/satpy/tests/reader_tests/test_gld360_ualf2.py @@ -0,0 +1,224 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Unittesting the GLD360 ualf2 reader.""" + +import tempfile +import unittest + +import numpy as np + +from satpy.readers.gld360_ualf2 import Gld360Ualf2FileHandler +from satpy.tests.utils import make_dataid + + +class TestVaisalaGLD360TextFileHandler(unittest.TestCase): + """Test the Gld360Ualf2FileHandler.""" + + def test_vaisala_gld360(self): + """Test basic functionality for vaisala file handler.""" + expected_ualf_record_type = np.array([2, 2, 2]) + expected_network_type = np.array([3, 3, 3]) + expected_time = np.array(['2021-01-04T08:00:01.000000051', '2021-01-04T08:00:01.864782486', + '2021-01-04T08:00:01.897014133'], dtype='datetime64[ns]') + expected_latitude = np.array([-20.8001, 0.4381, 66.8166]) + expected_longitude = np.array([-158.3439, -0.85, 42.4914]) + expected_altitude = np.array([0, 0, 0]) + expected_altitude_uncertainty = np.array([0, 0, 0]) + expected_peak_current = np.array([10, -20, 15]) + expected_vhf_range = np.array([0, 0, 0]) + expected_multiplicity_flash = np.array([0, 1, 0]) + expected_cloud_pulse_count = np.array([1, 0, 1]) + expected_number_of_sensors = np.array([3, 4, 5]) + expected_degree_freedom_for_location = np.array([3, 5, 7]) + expected_error_ellipse_angle = np.array([9.47, 24.99, 103.87]) + expected_error_ellipse_max_axis_length = np.array([1.91, 1.95, 4.33]) + expected_error_ellipse_min_axis_length = np.array([1.59, 1.53, 1.46]) + expected_chi_squared_value_location_optimization = np.array([0.19, 1.53, 0.48]) + expected_wave_form_rise_time = np.array([11.4, 14., 22.]) + expected_wave_form_peak_to_zero_time = np.array([8.8, 12.9, 12.3]) + expected_wave_form_max_rate_of_rise = np.array([0, 0, 0]) + expected_cloud_indicator = np.array([1, 0, 1]) + expected_angle_indicator = np.array([1, 1, 1]) + expected_signal_indicator = np.array([0, 0, 0]) + expected_timing_indicator = np.array([1, 1, 1]) + + with tempfile.NamedTemporaryFile(mode='w') as t: + t.write( + u'2\t3\t2021\t1\t4\t8\t0\t1\t51\t-20.8001\t-158.3439\t0\t0\t10\t0\t0\t1\t3\t3\t9.47\t1.91\t1.59\t' + '0.19\t11.4\t8.8\t0.0\t1\t1\t0\t1\n' + '2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t24.99\t1.95\t1.53\t' + '1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n' + '2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t24.99\t1.95\t1.53\t' + '1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n' + '2\t3\t2021\t1\t4\t8\t0\t1\t897014133\t66.8166\t42.4914\t0\t0\t15\t0\t0\t1\t5\t7\t103.87\t4.33\t1.46\t' + '0.48\t22.0\t12.3\t0.0\t1\t1\t0\t1' + ) + + t.seek(0) + filename_info = {} + filetype_info = {} + + self.handler = Gld360Ualf2FileHandler( + t.name, filename_info, filetype_info + ) + + # Test ualf record type. + dataset_id = make_dataid(name='ualf_record_type') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_ualf_record_type) + + # Test network type. + dataset_id = make_dataid(name='network_type') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_network_type) + + # Test time. + dataset_id = make_dataid(name='time') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_time) + + # Test latitude. + dataset_id = make_dataid(name='latitude') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_latitude, rtol=1e-05) + + # Test longitude. + dataset_id = make_dataid(name='longitude') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_longitude, rtol=1e-05) + + # Test altitude. + dataset_id = make_dataid(name='altitude') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_altitude) + + # Test altitude uncertainty. + dataset_id = make_dataid(name='altitude_uncertainty') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_altitude_uncertainty) + + # Test peak current. + dataset_id = make_dataid(name='peak_current') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_peak_current) + + # Test vhf range. + dataset_id = make_dataid(name='vhf_range') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_vhf_range) + + # Test multiplicity flash. + dataset_id = make_dataid(name='multiplicity_flash') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_multiplicity_flash) + + # Test cloud pulse count. + dataset_id = make_dataid(name='cloud_pulse_count') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_cloud_pulse_count) + + # Test number of sensors. + dataset_id = make_dataid(name='number_of_sensors') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_number_of_sensors) + + # Test degree freedom for location. + dataset_id = make_dataid(name='degree_freedom_for_location') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_degree_freedom_for_location) + + # Test error ellipse angle. + dataset_id = make_dataid(name='error_ellipse_angle') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_error_ellipse_angle, rtol=1e-05) + + # Test error ellipse max axis length. + dataset_id = make_dataid(name='error_ellipse_max_axis_length') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_error_ellipse_max_axis_length, rtol=1e-05) + + # Test error ellipse min axis length. + dataset_id = make_dataid(name='error_ellipse_min_axis_length') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_error_ellipse_min_axis_length, rtol=1e-05) + + # Test chi squared value location optimization. + dataset_id = make_dataid(name='chi_squared_value_location_optimization') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_chi_squared_value_location_optimization, rtol=1e-05) + + # Test wave form rise time. + dataset_id = make_dataid(name='wave_form_rise_time') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_wave_form_rise_time, rtol=1e-05) + + # Test wave form peak to zero time. + dataset_id = make_dataid(name='wave_form_peak_to_zero_time') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_wave_form_peak_to_zero_time, rtol=1e-05) + + # Test wave form max rate of rise. + dataset_id = make_dataid(name='wave_form_max_rate_of_rise') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_wave_form_max_rate_of_rise) + + # Test cloud indicator. + dataset_id = make_dataid(name='cloud_indicator') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_cloud_indicator) + + # Test angle indicator. + dataset_id = make_dataid(name='angle_indicator') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_angle_indicator) + + # Test signal indicator. + dataset_id = make_dataid(name='signal_indicator') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_signal_indicator) + + # Test timing indicator. + dataset_id = make_dataid(name='timing_indicator') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_timing_indicator) + + t.close() From 596959773dc2215b47404972e4e0c18283c5debc Mon Sep 17 00:00:00 2001 From: Sven-Erik Enno Date: Thu, 2 Dec 2021 17:19:02 +0100 Subject: [PATCH 02/71] Add my name to AUTHORS.md --- AUTHORS.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS.md b/AUTHORS.md index 9383fceda3..189c216496 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -76,4 +76,5 @@ The following people have made contributions to this project: - [oananicola (oananicola)](https://github.com/oananicola) - [praerien (praerien)](https://github.com/praerien) - [Xin Zhang (zxdawn)](https://github.com/zxdawn) -- [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) \ No newline at end of file +- [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) +- [Sven-Erik Enno (seenno)](https://github.com/seenno) From dace153d4a1d8915e2b44dbc089f84b23e3410a9 Mon Sep 17 00:00:00 2001 From: Sven-Erik Enno Date: Thu, 2 Dec 2021 17:32:37 +0100 Subject: [PATCH 03/71] Add new reader to readers list. --- doc/source/index.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/source/index.rst b/doc/source/index.rst index f689674394..667dba9245 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -226,6 +226,9 @@ the base Satpy installation. * - Vaisala Global Lightning Dataset GLD360 data in ASCII format - `vaisala_gld360` - Beta + * - Vaisala Global Lightning Dataset GLD360 data in ualf2 format + - `gld360_ualf2` + - Beta * - TROPOMI L2 data in NetCDF4 format - `tropomi_l2` - Beta From 18c0e626f22cbce8061e3118c5f0b70a22cbd781 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 13 Jun 2024 10:12:44 +0200 Subject: [PATCH 04/71] Rename FileHandler, clean-up docstrings. --- satpy/etc/readers/gld360_ualf2.yaml | 11 +- satpy/readers/gld360_ualf2.py | 147 +++++++++--------- satpy/tests/reader_tests/test_gld360_ualf2.py | 80 +++++----- 3 files changed, 121 insertions(+), 117 deletions(-) diff --git a/satpy/etc/readers/gld360_ualf2.yaml b/satpy/etc/readers/gld360_ualf2.yaml index d1805ca3c2..fb6e34a6a2 100644 --- a/satpy/etc/readers/gld360_ualf2.yaml +++ b/satpy/etc/readers/gld360_ualf2.yaml @@ -1,13 +1,16 @@ reader: - description: Gld360Ualf2 reader name: gld360_ualf2 - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' + short_name: gld360_ualf2 + long_name: Vaisala GLD360 UALF2 + description: > + Vaisala GLD360 reader for Universal ASCII Lightning Format 2. + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [gld360] file_types: gld360: - file_reader: !!python/name:readers.gld360_ualf2.Gld360Ualf2FileHandler '' - file_patterns: ['{start_time:%Y.%m.%d.%H.%M}.txt'] + file_reader: !!python/name:readers.gld360_ualf2.VaisalaGld360Ualf2FileHandler + file_patterns: ["{start_time:%Y.%m.%d.%H.%M}.txt"] datasets: time: diff --git a/satpy/readers/gld360_ualf2.py b/satpy/readers/gld360_ualf2.py index 21608d4401..f281f84960 100644 --- a/satpy/readers/gld360_ualf2.py +++ b/satpy/readers/gld360_ualf2.py @@ -16,7 +16,7 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Gld360Ualf2 reader. +"""Vaisala Global Lightning Dataset 360 reader for Universal ASCII Lightning Format 2 (UALF2). Vaisala Global Lightning Dataset GLD360 is data as a service that provides real-time lightning data for accurate and early @@ -26,6 +26,7 @@ References: - [GLD360] https://www.vaisala.com/en/products/data-subscriptions-and-reports/data-sets/gld360 +- [SMHI] https://opendata.smhi.se/apidocs/lightning/parameters.html """ @@ -41,112 +42,112 @@ logger = logging.getLogger(__name__) -class Gld360Ualf2FileHandler(BaseFileHandler): - """ASCII reader for Vaisala GDL360 ualf2 data.""" +class VaisalaGld360Ualf2FileHandler(BaseFileHandler): + """FileHandler for Vaisala GLD360 data in UALF2-format.""" def __init__(self, filename, filename_info, filetype_info): - """Initialize Vaisala Gld360Ualf2FileHandler.""" - super(Gld360Ualf2FileHandler, self).__init__(filename, filename_info, filetype_info) + """Initialize FileHandler.""" + super(VaisalaGld360Ualf2FileHandler, self).__init__(filename, filename_info, filetype_info) def pad_nanoseconds(nanoseconds): """Read ns values for less than 0.1s correctly (these are not zero-padded in the input files).""" return str(nanoseconds).zfill(9) names = [ - 'ualf_record_type', - 'network_type', - 'year', - 'month', - 'day', - 'hour', - 'minute', - 'second', - 'nanosecond', - 'latitude', - 'longitude', - 'altitude', - 'altitude_uncertainty', - 'peak_current', - 'vhf_range', - 'multiplicity_flash', - 'cloud_pulse_count', - 'number_of_sensors', - 'degree_freedom_for_location', - 'error_ellipse_angle', - 'error_ellipse_max_axis_length', - 'error_ellipse_min_axis_length', - 'chi_squared_value_location_optimization', - 'wave_form_rise_time', - 'wave_form_peak_to_zero_time', - 'wave_form_max_rate_of_rise', - 'cloud_indicator', - 'angle_indicator', - 'signal_indicator', - 'timing_indicator', + "ualf_record_type", + "network_type", + "year", + "month", + "day", + "hour", + "minute", + "second", + "nanosecond", + "latitude", + "longitude", + "altitude", + "altitude_uncertainty", + "peak_current", + "vhf_range", + "multiplicity_flash", + "cloud_pulse_count", + "number_of_sensors", + "degree_freedom_for_location", + "error_ellipse_angle", + "error_ellipse_max_axis_length", + "error_ellipse_min_axis_length", + "chi_squared_value_location_optimization", + "wave_form_rise_time", + "wave_form_peak_to_zero_time", + "wave_form_max_rate_of_rise", + "cloud_indicator", + "angle_indicator", + "signal_indicator", + "timing_indicator", ] dtype = { - 'ualf_record_type': np.uint8, - 'network_type': np.uint8, - 'year': str, - 'month': str, - 'day': str, - 'hour': str, - 'minute': str, - 'second': str, - 'latitude': np.float32, - 'longitude': np.float32, - 'altitude': np.uint16, - 'altitude_uncertainty': np.uint16, - 'peak_current': np.int16, - 'vhf_range': np.float32, - 'multiplicity_flash': np.uint8, - 'cloud_pulse_count': np.int16, - 'number_of_sensors': np.uint8, - 'degree_freedom_for_location': np.uint8, - 'error_ellipse_angle': np.float32, - 'error_ellipse_max_axis_length': np.float32, - 'error_ellipse_min_axis_length': np.float32, - 'chi_squared_value_location_optimization': np.float32, - 'wave_form_rise_time': np.float32, - 'wave_form_peak_to_zero_time': np.float32, - 'wave_form_max_rate_of_rise': np.float32, - 'cloud_indicator': bool, - 'angle_indicator': bool, - 'signal_indicator': bool, - 'timing_indicator': bool, + "ualf_record_type": np.uint8, + "network_type": np.uint8, + "year": str, + "month": str, + "day": str, + "hour": str, + "minute": str, + "second": str, + "latitude": np.float32, + "longitude": np.float32, + "altitude": np.uint16, + "altitude_uncertainty": np.uint16, + "peak_current": np.int16, + "vhf_range": np.float32, + "multiplicity_flash": np.uint8, + "cloud_pulse_count": np.int16, + "number_of_sensors": np.uint8, + "degree_freedom_for_location": np.uint8, + "error_ellipse_angle": np.float32, + "error_ellipse_max_axis_length": np.float32, + "error_ellipse_min_axis_length": np.float32, + "chi_squared_value_location_optimization": np.float32, + "wave_form_rise_time": np.float32, + "wave_form_peak_to_zero_time": np.float32, + "wave_form_max_rate_of_rise": np.float32, + "cloud_indicator": bool, + "angle_indicator": bool, + "signal_indicator": bool, + "timing_indicator": bool, } - # Combine 'year', 'month', 'day', 'hour', 'minute', 'second' and 'nanosecond' into a datetime object. - parse_dates = {'time': ['year', 'month', 'day', 'hour', 'minute', 'second', 'nanosecond']} + # Combine "year", "month", "day", "hour", "minute", "second" and "nanosecond" into a datetime object. + parse_dates = {"time": ["year", "month", "day", "hour", "minute", "second", "nanosecond"]} self.data = dd.read_csv(filename, - sep='\t', + sep="\t", header=None, names=names, dtype=dtype, parse_dates=parse_dates, - converters={'nanosecond': pad_nanoseconds} + converters={"nanosecond": pad_nanoseconds} ) - self.data['time'] = dd.to_datetime(self.data['time'], format='%Y %m %d %H %M %S %f') + self.data["time"] = dd.to_datetime(self.data["time"], format="%Y %m %d %H %M %S %f") self.data = self.data.drop_duplicates() - self.data = self.data.sort_values('time') + self.data = self.data.sort_values("time") @property def start_time(self): """Return start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Return end time.""" - return self.filename_info['start_time'] + timedelta(hours=1) + return self.filename_info["start_time"] + timedelta(hours=1) def get_dataset(self, dataset_id, dataset_info): """Return the dataset.""" # create xarray and place along y dimension - data_array = xr.DataArray(self.data[dataset_id['name']].to_dask_array(lengths=True), dims=['y']) + data_array = xr.DataArray(self.data[dataset_id["name"]].to_dask_array(lengths=True), dims=["y"]) # assign dataset infos to xarray attrs data_array.attrs.update(dataset_info) return data_array diff --git a/satpy/tests/reader_tests/test_gld360_ualf2.py b/satpy/tests/reader_tests/test_gld360_ualf2.py index 827539f1f1..1764e1d0ff 100644 --- a/satpy/tests/reader_tests/test_gld360_ualf2.py +++ b/satpy/tests/reader_tests/test_gld360_ualf2.py @@ -15,26 +15,26 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Unittesting the GLD360 ualf2 reader.""" +"""Tests for the Vaisala GLD360 UALF2-reader.""" import tempfile import unittest import numpy as np -from satpy.readers.gld360_ualf2 import Gld360Ualf2FileHandler +from satpy.readers.gld360_ualf2 import VaisalaGld360Ualf2FileHandler from satpy.tests.utils import make_dataid -class TestVaisalaGLD360TextFileHandler(unittest.TestCase): - """Test the Gld360Ualf2FileHandler.""" +class TestVaisalaGld360Ualf2FileHandler(unittest.TestCase): + """Test class for the FileHandler.""" def test_vaisala_gld360(self): """Test basic functionality for vaisala file handler.""" expected_ualf_record_type = np.array([2, 2, 2]) expected_network_type = np.array([3, 3, 3]) - expected_time = np.array(['2021-01-04T08:00:01.000000051', '2021-01-04T08:00:01.864782486', - '2021-01-04T08:00:01.897014133'], dtype='datetime64[ns]') + expected_time = np.array(["2021-01-04T08:00:01.000000051", "2021-01-04T08:00:01.864782486", + "2021-01-04T08:00:01.897014133"], dtype="datetime64[ns]") expected_latitude = np.array([-20.8001, 0.4381, 66.8166]) expected_longitude = np.array([-158.3439, -0.85, 42.4914]) expected_altitude = np.array([0, 0, 0]) @@ -57,166 +57,166 @@ def test_vaisala_gld360(self): expected_signal_indicator = np.array([0, 0, 0]) expected_timing_indicator = np.array([1, 1, 1]) - with tempfile.NamedTemporaryFile(mode='w') as t: + with tempfile.NamedTemporaryFile(mode="w") as t: t.write( - u'2\t3\t2021\t1\t4\t8\t0\t1\t51\t-20.8001\t-158.3439\t0\t0\t10\t0\t0\t1\t3\t3\t9.47\t1.91\t1.59\t' - '0.19\t11.4\t8.8\t0.0\t1\t1\t0\t1\n' - '2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t24.99\t1.95\t1.53\t' - '1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n' - '2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t24.99\t1.95\t1.53\t' - '1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n' - '2\t3\t2021\t1\t4\t8\t0\t1\t897014133\t66.8166\t42.4914\t0\t0\t15\t0\t0\t1\t5\t7\t103.87\t4.33\t1.46\t' - '0.48\t22.0\t12.3\t0.0\t1\t1\t0\t1' + u"2\t3\t2021\t1\t4\t8\t0\t1\t51\t-20.8001\t-158.3439\t0\t0\t10\t0\t0\t1\t3\t3\t9.47\t1.91\t1.59\t" + "0.19\t11.4\t8.8\t0.0\t1\t1\t0\t1\n" + "2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t24.99\t1.95\t1.53\t" + "1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n" + "2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t24.99\t1.95\t1.53\t" + "1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n" + "2\t3\t2021\t1\t4\t8\t0\t1\t897014133\t66.8166\t42.4914\t0\t0\t15\t0\t0\t1\t5\t7\t103.87\t4.33\t1.46\t" + "0.48\t22.0\t12.3\t0.0\t1\t1\t0\t1" ) t.seek(0) filename_info = {} filetype_info = {} - self.handler = Gld360Ualf2FileHandler( + self.handler = VaisalaGld360Ualf2FileHandler( t.name, filename_info, filetype_info ) # Test ualf record type. - dataset_id = make_dataid(name='ualf_record_type') + dataset_id = make_dataid(name="ualf_record_type") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_ualf_record_type) # Test network type. - dataset_id = make_dataid(name='network_type') + dataset_id = make_dataid(name="network_type") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_network_type) # Test time. - dataset_id = make_dataid(name='time') + dataset_id = make_dataid(name="time") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_time) # Test latitude. - dataset_id = make_dataid(name='latitude') + dataset_id = make_dataid(name="latitude") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_latitude, rtol=1e-05) # Test longitude. - dataset_id = make_dataid(name='longitude') + dataset_id = make_dataid(name="longitude") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_longitude, rtol=1e-05) # Test altitude. - dataset_id = make_dataid(name='altitude') + dataset_id = make_dataid(name="altitude") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_altitude) # Test altitude uncertainty. - dataset_id = make_dataid(name='altitude_uncertainty') + dataset_id = make_dataid(name="altitude_uncertainty") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_altitude_uncertainty) # Test peak current. - dataset_id = make_dataid(name='peak_current') + dataset_id = make_dataid(name="peak_current") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_peak_current) # Test vhf range. - dataset_id = make_dataid(name='vhf_range') + dataset_id = make_dataid(name="vhf_range") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_vhf_range) # Test multiplicity flash. - dataset_id = make_dataid(name='multiplicity_flash') + dataset_id = make_dataid(name="multiplicity_flash") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_multiplicity_flash) # Test cloud pulse count. - dataset_id = make_dataid(name='cloud_pulse_count') + dataset_id = make_dataid(name="cloud_pulse_count") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_cloud_pulse_count) # Test number of sensors. - dataset_id = make_dataid(name='number_of_sensors') + dataset_id = make_dataid(name="number_of_sensors") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_number_of_sensors) # Test degree freedom for location. - dataset_id = make_dataid(name='degree_freedom_for_location') + dataset_id = make_dataid(name="degree_freedom_for_location") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_degree_freedom_for_location) # Test error ellipse angle. - dataset_id = make_dataid(name='error_ellipse_angle') + dataset_id = make_dataid(name="error_ellipse_angle") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_error_ellipse_angle, rtol=1e-05) # Test error ellipse max axis length. - dataset_id = make_dataid(name='error_ellipse_max_axis_length') + dataset_id = make_dataid(name="error_ellipse_max_axis_length") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_error_ellipse_max_axis_length, rtol=1e-05) # Test error ellipse min axis length. - dataset_id = make_dataid(name='error_ellipse_min_axis_length') + dataset_id = make_dataid(name="error_ellipse_min_axis_length") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_error_ellipse_min_axis_length, rtol=1e-05) # Test chi squared value location optimization. - dataset_id = make_dataid(name='chi_squared_value_location_optimization') + dataset_id = make_dataid(name="chi_squared_value_location_optimization") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_chi_squared_value_location_optimization, rtol=1e-05) # Test wave form rise time. - dataset_id = make_dataid(name='wave_form_rise_time') + dataset_id = make_dataid(name="wave_form_rise_time") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_wave_form_rise_time, rtol=1e-05) # Test wave form peak to zero time. - dataset_id = make_dataid(name='wave_form_peak_to_zero_time') + dataset_id = make_dataid(name="wave_form_peak_to_zero_time") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_wave_form_peak_to_zero_time, rtol=1e-05) # Test wave form max rate of rise. - dataset_id = make_dataid(name='wave_form_max_rate_of_rise') + dataset_id = make_dataid(name="wave_form_max_rate_of_rise") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_wave_form_max_rate_of_rise) # Test cloud indicator. - dataset_id = make_dataid(name='cloud_indicator') + dataset_id = make_dataid(name="cloud_indicator") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_cloud_indicator) # Test angle indicator. - dataset_id = make_dataid(name='angle_indicator') + dataset_id = make_dataid(name="angle_indicator") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_angle_indicator) # Test signal indicator. - dataset_id = make_dataid(name='signal_indicator') + dataset_id = make_dataid(name="signal_indicator") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_signal_indicator) # Test timing indicator. - dataset_id = make_dataid(name='timing_indicator') + dataset_id = make_dataid(name="timing_indicator") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_timing_indicator) From 83678ac95f2983a4a9c0e09a281ad37ef796b833 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 13 Jun 2024 11:29:38 +0200 Subject: [PATCH 05/71] Refactor unittests. --- satpy/tests/reader_tests/test_gld360_ualf2.py | 445 ++++++++++-------- 1 file changed, 246 insertions(+), 199 deletions(-) diff --git a/satpy/tests/reader_tests/test_gld360_ualf2.py b/satpy/tests/reader_tests/test_gld360_ualf2.py index 1764e1d0ff..574e46c874 100644 --- a/satpy/tests/reader_tests/test_gld360_ualf2.py +++ b/satpy/tests/reader_tests/test_gld360_ualf2.py @@ -17,208 +17,255 @@ # satpy. If not, see . """Tests for the Vaisala GLD360 UALF2-reader.""" -import tempfile -import unittest - import numpy as np +import pytest from satpy.readers.gld360_ualf2 import VaisalaGld360Ualf2FileHandler from satpy.tests.utils import make_dataid -class TestVaisalaGld360Ualf2FileHandler(unittest.TestCase): - """Test class for the FileHandler.""" - - def test_vaisala_gld360(self): - """Test basic functionality for vaisala file handler.""" - expected_ualf_record_type = np.array([2, 2, 2]) - expected_network_type = np.array([3, 3, 3]) - expected_time = np.array(["2021-01-04T08:00:01.000000051", "2021-01-04T08:00:01.864782486", - "2021-01-04T08:00:01.897014133"], dtype="datetime64[ns]") - expected_latitude = np.array([-20.8001, 0.4381, 66.8166]) - expected_longitude = np.array([-158.3439, -0.85, 42.4914]) - expected_altitude = np.array([0, 0, 0]) - expected_altitude_uncertainty = np.array([0, 0, 0]) - expected_peak_current = np.array([10, -20, 15]) - expected_vhf_range = np.array([0, 0, 0]) - expected_multiplicity_flash = np.array([0, 1, 0]) - expected_cloud_pulse_count = np.array([1, 0, 1]) - expected_number_of_sensors = np.array([3, 4, 5]) - expected_degree_freedom_for_location = np.array([3, 5, 7]) - expected_error_ellipse_angle = np.array([9.47, 24.99, 103.87]) - expected_error_ellipse_max_axis_length = np.array([1.91, 1.95, 4.33]) - expected_error_ellipse_min_axis_length = np.array([1.59, 1.53, 1.46]) - expected_chi_squared_value_location_optimization = np.array([0.19, 1.53, 0.48]) - expected_wave_form_rise_time = np.array([11.4, 14., 22.]) - expected_wave_form_peak_to_zero_time = np.array([8.8, 12.9, 12.3]) - expected_wave_form_max_rate_of_rise = np.array([0, 0, 0]) - expected_cloud_indicator = np.array([1, 0, 1]) - expected_angle_indicator = np.array([1, 1, 1]) - expected_signal_indicator = np.array([0, 0, 0]) - expected_timing_indicator = np.array([1, 1, 1]) - - with tempfile.NamedTemporaryFile(mode="w") as t: - t.write( - u"2\t3\t2021\t1\t4\t8\t0\t1\t51\t-20.8001\t-158.3439\t0\t0\t10\t0\t0\t1\t3\t3\t9.47\t1.91\t1.59\t" - "0.19\t11.4\t8.8\t0.0\t1\t1\t0\t1\n" - "2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t24.99\t1.95\t1.53\t" - "1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n" - "2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t24.99\t1.95\t1.53\t" - "1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n" - "2\t3\t2021\t1\t4\t8\t0\t1\t897014133\t66.8166\t42.4914\t0\t0\t15\t0\t0\t1\t5\t7\t103.87\t4.33\t1.46\t" - "0.48\t22.0\t12.3\t0.0\t1\t1\t0\t1" - ) - - t.seek(0) - filename_info = {} - filetype_info = {} - - self.handler = VaisalaGld360Ualf2FileHandler( - t.name, filename_info, filetype_info - ) - - # Test ualf record type. - dataset_id = make_dataid(name="ualf_record_type") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_ualf_record_type) - - # Test network type. - dataset_id = make_dataid(name="network_type") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_network_type) - - # Test time. - dataset_id = make_dataid(name="time") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_time) - - # Test latitude. - dataset_id = make_dataid(name="latitude") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_latitude, rtol=1e-05) - - # Test longitude. - dataset_id = make_dataid(name="longitude") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_longitude, rtol=1e-05) - - # Test altitude. - dataset_id = make_dataid(name="altitude") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_altitude) - - # Test altitude uncertainty. - dataset_id = make_dataid(name="altitude_uncertainty") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_altitude_uncertainty) - - # Test peak current. - dataset_id = make_dataid(name="peak_current") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_peak_current) - - # Test vhf range. - dataset_id = make_dataid(name="vhf_range") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_vhf_range) - - # Test multiplicity flash. - dataset_id = make_dataid(name="multiplicity_flash") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_multiplicity_flash) - - # Test cloud pulse count. - dataset_id = make_dataid(name="cloud_pulse_count") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_cloud_pulse_count) - - # Test number of sensors. - dataset_id = make_dataid(name="number_of_sensors") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_number_of_sensors) - - # Test degree freedom for location. - dataset_id = make_dataid(name="degree_freedom_for_location") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_degree_freedom_for_location) - - # Test error ellipse angle. - dataset_id = make_dataid(name="error_ellipse_angle") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_error_ellipse_angle, rtol=1e-05) - - # Test error ellipse max axis length. - dataset_id = make_dataid(name="error_ellipse_max_axis_length") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_error_ellipse_max_axis_length, rtol=1e-05) - - # Test error ellipse min axis length. - dataset_id = make_dataid(name="error_ellipse_min_axis_length") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_error_ellipse_min_axis_length, rtol=1e-05) - - # Test chi squared value location optimization. - dataset_id = make_dataid(name="chi_squared_value_location_optimization") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_chi_squared_value_location_optimization, rtol=1e-05) - - # Test wave form rise time. - dataset_id = make_dataid(name="wave_form_rise_time") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_wave_form_rise_time, rtol=1e-05) - - # Test wave form peak to zero time. - dataset_id = make_dataid(name="wave_form_peak_to_zero_time") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_wave_form_peak_to_zero_time, rtol=1e-05) - - # Test wave form max rate of rise. - dataset_id = make_dataid(name="wave_form_max_rate_of_rise") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_wave_form_max_rate_of_rise) - - # Test cloud indicator. - dataset_id = make_dataid(name="cloud_indicator") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_cloud_indicator) - - # Test angle indicator. - dataset_id = make_dataid(name="angle_indicator") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_angle_indicator) - - # Test signal indicator. - dataset_id = make_dataid(name="signal_indicator") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_signal_indicator) - - # Test timing indicator. - dataset_id = make_dataid(name="timing_indicator") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_timing_indicator) - - t.close() +@pytest.fixture() +def gld360_ualf2_file(tmp_path): + """Create UALF2 file for the tests.""" + fname = tmp_path / "gld360_ualf2.asc" + with open(fname, "w") as fid: + fid.write( + u"2\t3\t2021\t1\t4\t8\t0\t1\t51\t-20.8001\t-158.3439\t0\t0\t10\t0\t0\t1\t3\t3\t9.47\t1.91\t1.59\t" + "0.19\t11.4\t8.8\t0.0\t1\t1\t0\t1\n" + "2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t24.99\t1.95\t1.53\t" + "1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n" + "2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t24.99\t1.95\t1.53\t" + "1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n" + "2\t3\t2021\t1\t4\t8\t0\t1\t897014133\t66.8166\t42.4914\t0\t0\t15\t0\t0\t1\t5\t7\t103.87\t4.33\t1.46\t" + "0.48\t22.0\t12.3\t0.0\t1\t1\t0\t1" + ) + + return fname + + +@pytest.fixture() +def gld360_ualf2_filehandler(gld360_ualf2_file): + """Create FileHandler for the tests.""" + filename_info = {} + filetype_info = {} + + return VaisalaGld360Ualf2FileHandler( + gld360_ualf2_file, filename_info, filetype_info + ) + + +def test_ualf2_record_type(gld360_ualf2_filehandler): + """Test ualf record type.""" + expected_ualf_record_type = np.array([2, 2, 2]) + dataset_id = make_dataid(name="ualf_record_type") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_ualf_record_type) + + +def test_network_type(gld360_ualf2_filehandler): + """Test network type.""" + expected_network_type = np.array([3, 3, 3]) + dataset_id = make_dataid(name="network_type") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_network_type) + + +def test_time(gld360_ualf2_filehandler): + """Test time.""" + expected_time = np.array(["2021-01-04T08:00:01.000000051", "2021-01-04T08:00:01.864782486", + "2021-01-04T08:00:01.897014133"], dtype="datetime64[ns]") + dataset_id = make_dataid(name="time") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_time) + + +def test_latitude(gld360_ualf2_filehandler): + """Test latitude.""" + expected_latitude = np.array([-20.8001, 0.4381, 66.8166]) + dataset_id = make_dataid(name="latitude") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_latitude, rtol=1e-05) + + +def test_longitude(gld360_ualf2_filehandler): + """Test longitude.""" + expected_longitude = np.array([-158.3439, -0.85, 42.4914]) + dataset_id = make_dataid(name="longitude") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_longitude, rtol=1e-05) + + +def test_altitude(gld360_ualf2_filehandler): + """Test altitude.""" + expected_altitude = np.array([0, 0, 0]) + dataset_id = make_dataid(name="altitude") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_altitude) + + +def test_altitude_uncertainty(gld360_ualf2_filehandler): + """Test altitude uncertainty.""" + expected_altitude_uncertainty = np.array([0, 0, 0]) + dataset_id = make_dataid(name="altitude_uncertainty") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_altitude_uncertainty) + + +def test_peak_current(gld360_ualf2_filehandler): + """Test peak current.""" + expected_peak_current = np.array([10, -20, 15]) + dataset_id = make_dataid(name="peak_current") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_peak_current) + + +def test_vhf_range(gld360_ualf2_filehandler): + """Test vhf range.""" + expected_vhf_range = np.array([0, 0, 0]) + dataset_id = make_dataid(name="vhf_range") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_vhf_range) + + +def test_multiplicity_flash(gld360_ualf2_filehandler): + """Test multiplicity flash.""" + expected_multiplicity_flash = np.array([0, 1, 0]) + dataset_id = make_dataid(name="multiplicity_flash") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_multiplicity_flash) + + +def test_cloud_pulse_count(gld360_ualf2_filehandler): + """Test cloud pulse count.""" + expected_cloud_pulse_count = np.array([1, 0, 1]) + dataset_id = make_dataid(name="cloud_pulse_count") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_cloud_pulse_count) + + +def test_number_of_sensors(gld360_ualf2_filehandler): + """Test number of sensors.""" + expected_number_of_sensors = np.array([3, 4, 5]) + dataset_id = make_dataid(name="number_of_sensors") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_number_of_sensors) + + +def test_degree_freedom_for_location(gld360_ualf2_filehandler): + """Test degree freedom for location.""" + expected_degree_freedom_for_location = np.array([3, 5, 7]) + dataset_id = make_dataid(name="degree_freedom_for_location") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_degree_freedom_for_location) + + +def test_error_ellipse_angle(gld360_ualf2_filehandler): + """Test error ellipse angle.""" + expected_error_ellipse_angle = np.array([9.47, 24.99, 103.87]) + dataset_id = make_dataid(name="error_ellipse_angle") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_error_ellipse_angle, rtol=1e-05) + + +def test_error_ellipse_max_axis_length(gld360_ualf2_filehandler): + """Test error ellipse max axis length.""" + expected_error_ellipse_max_axis_length = np.array([1.91, 1.95, 4.33]) + dataset_id = make_dataid(name="error_ellipse_max_axis_length") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_error_ellipse_max_axis_length, rtol=1e-05) + + +def test_error_ellipse_min_axis_length(gld360_ualf2_filehandler): + """Test error ellipse min axis length.""" + expected_error_ellipse_min_axis_length = np.array([1.59, 1.53, 1.46]) + dataset_id = make_dataid(name="error_ellipse_min_axis_length") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_error_ellipse_min_axis_length, rtol=1e-05) + + +def test_chi_squared_value_location_optimization(gld360_ualf2_filehandler): + """Test chi squared value location optimization.""" + expected_chi_squared_value_location_optimization = np.array([0.19, 1.53, 0.48]) + dataset_id = make_dataid(name="chi_squared_value_location_optimization") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_chi_squared_value_location_optimization, rtol=1e-05) + + +def test_wave_form_rise_time(gld360_ualf2_filehandler): + """Test wave form rise time.""" + expected_wave_form_rise_time = np.array([11.4, 14., 22.]) + dataset_id = make_dataid(name="wave_form_rise_time") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_wave_form_rise_time, rtol=1e-05) + + +def test_wave_form_peak_to_zero_time(gld360_ualf2_filehandler): + """Test wave form peak to zero time.""" + expected_wave_form_peak_to_zero_time = np.array([8.8, 12.9, 12.3]) + dataset_id = make_dataid(name="wave_form_peak_to_zero_time") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(result, expected_wave_form_peak_to_zero_time, rtol=1e-05) + + +def test_wave_form_max_rate_of_rise(gld360_ualf2_filehandler): + """Test wave form max rate of rise.""" + expected_wave_form_max_rate_of_rise = np.array([0, 0, 0]) + dataset_id = make_dataid(name="wave_form_max_rate_of_rise") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_wave_form_max_rate_of_rise) + + +def test_cloud_indicator(gld360_ualf2_filehandler): + """Test cloud indicator.""" + expected_cloud_indicator = np.array([1, 0, 1]) + dataset_id = make_dataid(name="cloud_indicator") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_cloud_indicator) + + +def test_angle_indicator(gld360_ualf2_filehandler): + """Test angle indicator.""" + expected_angle_indicator = np.array([1, 1, 1]) + dataset_id = make_dataid(name="angle_indicator") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_angle_indicator) + + +def test_signal_indicator(gld360_ualf2_filehandler): + """Test signal indicator.""" + expected_signal_indicator = np.array([0, 0, 0]) + dataset_id = make_dataid(name="signal_indicator") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_signal_indicator) + + +def test_timing_indicator(gld360_ualf2_filehandler): + """Test timing indicator.""" + expected_timing_indicator = np.array([1, 1, 1]) + dataset_id = make_dataid(name="timing_indicator") + dataset_info = {} + result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(result, expected_timing_indicator) From 4018d5fc9004ec352df290a7697991eb49d18d40 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 13 Jun 2024 13:56:26 +0200 Subject: [PATCH 06/71] Refactor FileHandler initialization, add unittests. --- satpy/readers/gld360_ualf2.py | 124 ++++++++---------- satpy/tests/reader_tests/test_gld360_ualf2.py | 23 +++- 2 files changed, 74 insertions(+), 73 deletions(-) diff --git a/satpy/readers/gld360_ualf2.py b/satpy/readers/gld360_ualf2.py index f281f84960..555d19d96b 100644 --- a/satpy/readers/gld360_ualf2.py +++ b/satpy/readers/gld360_ualf2.py @@ -41,6 +41,50 @@ logger = logging.getLogger(__name__) +UALF2_DTYPES = { + "ualf_record_type": np.uint8, + "network_type": np.uint8, + "year": str, + "month": str, + "day": str, + "hour": str, + "minute": str, + "second": str, + "latitude": np.float32, + "longitude": np.float32, + "altitude": np.uint16, + "altitude_uncertainty": np.uint16, + "peak_current": np.int16, + "vhf_range": np.float32, + "multiplicity_flash": np.uint8, + "cloud_pulse_count": np.int16, + "number_of_sensors": np.uint8, + "degree_freedom_for_location": np.uint8, + "error_ellipse_angle": np.float32, + "error_ellipse_max_axis_length": np.float32, + "error_ellipse_min_axis_length": np.float32, + "chi_squared_value_location_optimization": np.float32, + "wave_form_rise_time": np.float32, + "wave_form_peak_to_zero_time": np.float32, + "wave_form_max_rate_of_rise": np.float32, + "cloud_indicator": bool, + "angle_indicator": bool, + "signal_indicator": bool, + "timing_indicator": bool, +} + + +def _create_column_names(): + """Insert nanoseconds in the column names to a correct index.""" + tmp = [*UALF2_DTYPES] + idx = tmp.index("second") + 1 + tmp.insert(idx, "nanosecond") + + return tmp + + +UALF2_COLUMN_NAMES = _create_column_names() + class VaisalaGld360Ualf2FileHandler(BaseFileHandler): """FileHandler for Vaisala GLD360 data in UALF2-format.""" @@ -49,85 +93,16 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize FileHandler.""" super(VaisalaGld360Ualf2FileHandler, self).__init__(filename, filename_info, filetype_info) - def pad_nanoseconds(nanoseconds): - """Read ns values for less than 0.1s correctly (these are not zero-padded in the input files).""" - return str(nanoseconds).zfill(9) - - names = [ - "ualf_record_type", - "network_type", - "year", - "month", - "day", - "hour", - "minute", - "second", - "nanosecond", - "latitude", - "longitude", - "altitude", - "altitude_uncertainty", - "peak_current", - "vhf_range", - "multiplicity_flash", - "cloud_pulse_count", - "number_of_sensors", - "degree_freedom_for_location", - "error_ellipse_angle", - "error_ellipse_max_axis_length", - "error_ellipse_min_axis_length", - "chi_squared_value_location_optimization", - "wave_form_rise_time", - "wave_form_peak_to_zero_time", - "wave_form_max_rate_of_rise", - "cloud_indicator", - "angle_indicator", - "signal_indicator", - "timing_indicator", - ] - - dtype = { - "ualf_record_type": np.uint8, - "network_type": np.uint8, - "year": str, - "month": str, - "day": str, - "hour": str, - "minute": str, - "second": str, - "latitude": np.float32, - "longitude": np.float32, - "altitude": np.uint16, - "altitude_uncertainty": np.uint16, - "peak_current": np.int16, - "vhf_range": np.float32, - "multiplicity_flash": np.uint8, - "cloud_pulse_count": np.int16, - "number_of_sensors": np.uint8, - "degree_freedom_for_location": np.uint8, - "error_ellipse_angle": np.float32, - "error_ellipse_max_axis_length": np.float32, - "error_ellipse_min_axis_length": np.float32, - "chi_squared_value_location_optimization": np.float32, - "wave_form_rise_time": np.float32, - "wave_form_peak_to_zero_time": np.float32, - "wave_form_max_rate_of_rise": np.float32, - "cloud_indicator": bool, - "angle_indicator": bool, - "signal_indicator": bool, - "timing_indicator": bool, - } - # Combine "year", "month", "day", "hour", "minute", "second" and "nanosecond" into a datetime object. parse_dates = {"time": ["year", "month", "day", "hour", "minute", "second", "nanosecond"]} self.data = dd.read_csv(filename, sep="\t", header=None, - names=names, - dtype=dtype, + names=UALF2_COLUMN_NAMES, + dtype=UALF2_DTYPES, parse_dates=parse_dates, - converters={"nanosecond": pad_nanoseconds} + converters={"nanosecond": self.pad_nanoseconds} ) self.data["time"] = dd.to_datetime(self.data["time"], format="%Y %m %d %H %M %S %f") @@ -151,3 +126,8 @@ def get_dataset(self, dataset_id, dataset_info): # assign dataset infos to xarray attrs data_array.attrs.update(dataset_info) return data_array + + @staticmethod + def pad_nanoseconds(nanoseconds): + """Read ns values for less than 0.1s correctly (these are not zero-padded in the input files).""" + return str(nanoseconds).zfill(9) diff --git a/satpy/tests/reader_tests/test_gld360_ualf2.py b/satpy/tests/reader_tests/test_gld360_ualf2.py index 574e46c874..7603b548da 100644 --- a/satpy/tests/reader_tests/test_gld360_ualf2.py +++ b/satpy/tests/reader_tests/test_gld360_ualf2.py @@ -20,7 +20,7 @@ import numpy as np import pytest -from satpy.readers.gld360_ualf2 import VaisalaGld360Ualf2FileHandler +from satpy.readers.gld360_ualf2 import UALF2_COLUMN_NAMES, VaisalaGld360Ualf2FileHandler from satpy.tests.utils import make_dataid @@ -269,3 +269,24 @@ def test_timing_indicator(gld360_ualf2_filehandler): dataset_info = {} result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_timing_indicator) + + +def test_pad_nanoseconds(gld360_ualf2_filehandler): + """Test pad nanoseconds.""" + expected_nanoseconds = "000000013" + result = gld360_ualf2_filehandler.pad_nanoseconds(13) + np.testing.assert_string_equal(result, expected_nanoseconds) + + +def test_nanoseconds_index(): + """Test nanosecond column being after seconds.""" + expected_index = UALF2_COLUMN_NAMES.index("nanosecond") + result = UALF2_COLUMN_NAMES.index("second") + 1 + np.testing.assert_array_equal(result, expected_index) + + +def test_column_names_length(): + """Test correct number of column names.""" + expected_length = 30 + result = len(UALF2_COLUMN_NAMES) + np.testing.assert_equal(result, expected_length) From cb9389ab9f4859ddfc61572726582cb336666284 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 13 Jun 2024 15:33:04 +0200 Subject: [PATCH 07/71] Add unittests and refactor. --- satpy/etc/readers/gld360_ualf2.yaml | 7 +- satpy/tests/reader_tests/test_gld360_ualf2.py | 141 +++++++++++------- 2 files changed, 90 insertions(+), 58 deletions(-) diff --git a/satpy/etc/readers/gld360_ualf2.yaml b/satpy/etc/readers/gld360_ualf2.yaml index fb6e34a6a2..355b6eee9d 100644 --- a/satpy/etc/readers/gld360_ualf2.yaml +++ b/satpy/etc/readers/gld360_ualf2.yaml @@ -2,15 +2,14 @@ reader: name: gld360_ualf2 short_name: gld360_ualf2 long_name: Vaisala GLD360 UALF2 - description: > - Vaisala GLD360 reader for Universal ASCII Lightning Format 2. + description: Vaisala GLD360 reader for Universal ASCII Lightning Format 2. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [gld360] file_types: gld360: - file_reader: !!python/name:readers.gld360_ualf2.VaisalaGld360Ualf2FileHandler - file_patterns: ["{start_time:%Y.%m.%d.%H.%M}.txt"] + file_reader: !!python/name:satpy.readers.gld360_ualf2.VaisalaGld360Ualf2FileHandler + file_patterns: ['{start_time:%Y.%m.%d.%H.%M}.txt'] datasets: time: diff --git a/satpy/tests/reader_tests/test_gld360_ualf2.py b/satpy/tests/reader_tests/test_gld360_ualf2.py index 7603b548da..6ae60b1e32 100644 --- a/satpy/tests/reader_tests/test_gld360_ualf2.py +++ b/satpy/tests/reader_tests/test_gld360_ualf2.py @@ -17,17 +17,22 @@ # satpy. If not, see . """Tests for the Vaisala GLD360 UALF2-reader.""" +import datetime as dt + import numpy as np import pytest from satpy.readers.gld360_ualf2 import UALF2_COLUMN_NAMES, VaisalaGld360Ualf2FileHandler from satpy.tests.utils import make_dataid +TEST_START_TIME = dt.datetime(2021, 1, 4, 8, 0) +TEST_END_TIME = TEST_START_TIME + dt.timedelta(hours=1) + @pytest.fixture() -def gld360_ualf2_file(tmp_path): +def fake_file(tmp_path): """Create UALF2 file for the tests.""" - fname = tmp_path / "gld360_ualf2.asc" + fname = tmp_path / "2021.01.04.08.00.txt" with open(fname, "w") as fid: fid.write( u"2\t3\t2021\t1\t4\t8\t0\t1\t51\t-20.8001\t-158.3439\t0\t0\t10\t0\t0\t1\t3\t3\t9.47\t1.91\t1.59\t" @@ -44,237 +49,237 @@ def gld360_ualf2_file(tmp_path): @pytest.fixture() -def gld360_ualf2_filehandler(gld360_ualf2_file): +def fake_filehandler(fake_file): """Create FileHandler for the tests.""" filename_info = {} filetype_info = {} return VaisalaGld360Ualf2FileHandler( - gld360_ualf2_file, filename_info, filetype_info + fake_file, filename_info, filetype_info ) -def test_ualf2_record_type(gld360_ualf2_filehandler): +def test_ualf2_record_type(fake_filehandler): """Test ualf record type.""" expected_ualf_record_type = np.array([2, 2, 2]) dataset_id = make_dataid(name="ualf_record_type") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_ualf_record_type) -def test_network_type(gld360_ualf2_filehandler): +def test_network_type(fake_filehandler): """Test network type.""" expected_network_type = np.array([3, 3, 3]) dataset_id = make_dataid(name="network_type") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_network_type) -def test_time(gld360_ualf2_filehandler): +def test_time(fake_filehandler): """Test time.""" expected_time = np.array(["2021-01-04T08:00:01.000000051", "2021-01-04T08:00:01.864782486", "2021-01-04T08:00:01.897014133"], dtype="datetime64[ns]") dataset_id = make_dataid(name="time") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_time) -def test_latitude(gld360_ualf2_filehandler): +def test_latitude(fake_filehandler): """Test latitude.""" expected_latitude = np.array([-20.8001, 0.4381, 66.8166]) dataset_id = make_dataid(name="latitude") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_latitude, rtol=1e-05) -def test_longitude(gld360_ualf2_filehandler): +def test_longitude(fake_filehandler): """Test longitude.""" expected_longitude = np.array([-158.3439, -0.85, 42.4914]) dataset_id = make_dataid(name="longitude") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_longitude, rtol=1e-05) -def test_altitude(gld360_ualf2_filehandler): +def test_altitude(fake_filehandler): """Test altitude.""" expected_altitude = np.array([0, 0, 0]) dataset_id = make_dataid(name="altitude") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_altitude) -def test_altitude_uncertainty(gld360_ualf2_filehandler): +def test_altitude_uncertainty(fake_filehandler): """Test altitude uncertainty.""" expected_altitude_uncertainty = np.array([0, 0, 0]) dataset_id = make_dataid(name="altitude_uncertainty") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_altitude_uncertainty) -def test_peak_current(gld360_ualf2_filehandler): +def test_peak_current(fake_filehandler): """Test peak current.""" expected_peak_current = np.array([10, -20, 15]) dataset_id = make_dataid(name="peak_current") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_peak_current) -def test_vhf_range(gld360_ualf2_filehandler): +def test_vhf_range(fake_filehandler): """Test vhf range.""" expected_vhf_range = np.array([0, 0, 0]) dataset_id = make_dataid(name="vhf_range") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_vhf_range) -def test_multiplicity_flash(gld360_ualf2_filehandler): +def test_multiplicity_flash(fake_filehandler): """Test multiplicity flash.""" expected_multiplicity_flash = np.array([0, 1, 0]) dataset_id = make_dataid(name="multiplicity_flash") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_multiplicity_flash) -def test_cloud_pulse_count(gld360_ualf2_filehandler): +def test_cloud_pulse_count(fake_filehandler): """Test cloud pulse count.""" expected_cloud_pulse_count = np.array([1, 0, 1]) dataset_id = make_dataid(name="cloud_pulse_count") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_cloud_pulse_count) -def test_number_of_sensors(gld360_ualf2_filehandler): +def test_number_of_sensors(fake_filehandler): """Test number of sensors.""" expected_number_of_sensors = np.array([3, 4, 5]) dataset_id = make_dataid(name="number_of_sensors") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_number_of_sensors) -def test_degree_freedom_for_location(gld360_ualf2_filehandler): +def test_degree_freedom_for_location(fake_filehandler): """Test degree freedom for location.""" expected_degree_freedom_for_location = np.array([3, 5, 7]) dataset_id = make_dataid(name="degree_freedom_for_location") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_degree_freedom_for_location) -def test_error_ellipse_angle(gld360_ualf2_filehandler): +def test_error_ellipse_angle(fake_filehandler): """Test error ellipse angle.""" expected_error_ellipse_angle = np.array([9.47, 24.99, 103.87]) dataset_id = make_dataid(name="error_ellipse_angle") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_error_ellipse_angle, rtol=1e-05) -def test_error_ellipse_max_axis_length(gld360_ualf2_filehandler): +def test_error_ellipse_max_axis_length(fake_filehandler): """Test error ellipse max axis length.""" expected_error_ellipse_max_axis_length = np.array([1.91, 1.95, 4.33]) dataset_id = make_dataid(name="error_ellipse_max_axis_length") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_error_ellipse_max_axis_length, rtol=1e-05) -def test_error_ellipse_min_axis_length(gld360_ualf2_filehandler): +def test_error_ellipse_min_axis_length(fake_filehandler): """Test error ellipse min axis length.""" expected_error_ellipse_min_axis_length = np.array([1.59, 1.53, 1.46]) dataset_id = make_dataid(name="error_ellipse_min_axis_length") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_error_ellipse_min_axis_length, rtol=1e-05) -def test_chi_squared_value_location_optimization(gld360_ualf2_filehandler): +def test_chi_squared_value_location_optimization(fake_filehandler): """Test chi squared value location optimization.""" expected_chi_squared_value_location_optimization = np.array([0.19, 1.53, 0.48]) dataset_id = make_dataid(name="chi_squared_value_location_optimization") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_chi_squared_value_location_optimization, rtol=1e-05) -def test_wave_form_rise_time(gld360_ualf2_filehandler): +def test_wave_form_rise_time(fake_filehandler): """Test wave form rise time.""" expected_wave_form_rise_time = np.array([11.4, 14., 22.]) dataset_id = make_dataid(name="wave_form_rise_time") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_wave_form_rise_time, rtol=1e-05) -def test_wave_form_peak_to_zero_time(gld360_ualf2_filehandler): +def test_wave_form_peak_to_zero_time(fake_filehandler): """Test wave form peak to zero time.""" expected_wave_form_peak_to_zero_time = np.array([8.8, 12.9, 12.3]) dataset_id = make_dataid(name="wave_form_peak_to_zero_time") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_wave_form_peak_to_zero_time, rtol=1e-05) -def test_wave_form_max_rate_of_rise(gld360_ualf2_filehandler): +def test_wave_form_max_rate_of_rise(fake_filehandler): """Test wave form max rate of rise.""" expected_wave_form_max_rate_of_rise = np.array([0, 0, 0]) dataset_id = make_dataid(name="wave_form_max_rate_of_rise") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_wave_form_max_rate_of_rise) -def test_cloud_indicator(gld360_ualf2_filehandler): +def test_cloud_indicator(fake_filehandler): """Test cloud indicator.""" expected_cloud_indicator = np.array([1, 0, 1]) dataset_id = make_dataid(name="cloud_indicator") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_cloud_indicator) -def test_angle_indicator(gld360_ualf2_filehandler): +def test_angle_indicator(fake_filehandler): """Test angle indicator.""" expected_angle_indicator = np.array([1, 1, 1]) dataset_id = make_dataid(name="angle_indicator") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_angle_indicator) -def test_signal_indicator(gld360_ualf2_filehandler): +def test_signal_indicator(fake_filehandler): """Test signal indicator.""" expected_signal_indicator = np.array([0, 0, 0]) dataset_id = make_dataid(name="signal_indicator") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_signal_indicator) -def test_timing_indicator(gld360_ualf2_filehandler): +def test_timing_indicator(fake_filehandler): """Test timing indicator.""" expected_timing_indicator = np.array([1, 1, 1]) dataset_id = make_dataid(name="timing_indicator") dataset_info = {} - result = gld360_ualf2_filehandler.get_dataset(dataset_id, dataset_info).values + result = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_timing_indicator) -def test_pad_nanoseconds(gld360_ualf2_filehandler): +def test_pad_nanoseconds(fake_filehandler): """Test pad nanoseconds.""" expected_nanoseconds = "000000013" - result = gld360_ualf2_filehandler.pad_nanoseconds(13) + result = fake_filehandler.pad_nanoseconds(13) np.testing.assert_string_equal(result, expected_nanoseconds) @@ -290,3 +295,31 @@ def test_column_names_length(): expected_length = 30 result = len(UALF2_COLUMN_NAMES) np.testing.assert_equal(result, expected_length) + + +@pytest.fixture() +def fake_scn(fake_file): + """Create fake file for tests.""" + from satpy import Scene + scn = Scene(reader="gld360_ualf2", filenames=[fake_file]) + return scn + +def test_scene_attributes(fake_scn): + """Test for correct start and end times.""" + np.testing.assert_equal(fake_scn.start_time, TEST_START_TIME) + np.testing.assert_equal(fake_scn.end_time, TEST_END_TIME) + + +def test_scene_load(fake_scn): + """Test data loading through Scene-object.""" + fake_scn.load(["time", "latitude", "longitude"]) + assert "time" in fake_scn + assert "latitude" in fake_scn + assert "longitude" in fake_scn + + +def test_area_(fake_scn): + """Test correct area instance type.""" + from pyresample.geometry import SwathDefinition + fake_scn.load(["time"]) + assert isinstance(fake_scn["time"].attrs["area"], SwathDefinition) From 04fa702fcbf27cfc64ecb48fbe6ddea62a3181d0 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 13 Jun 2024 15:46:57 +0200 Subject: [PATCH 08/71] Remove deprecated vaisala_gld360-reader, update reader-table in docs. --- satpy/etc/readers/vaisala_gld360.yaml | 47 ---------- satpy/readers/vaisala_gld360.py | 87 ------------------- .../tests/reader_tests/test_vaisala_gld360.py | 76 ---------------- 3 files changed, 210 deletions(-) delete mode 100644 satpy/etc/readers/vaisala_gld360.yaml delete mode 100644 satpy/readers/vaisala_gld360.py delete mode 100644 satpy/tests/reader_tests/test_vaisala_gld360.py diff --git a/satpy/etc/readers/vaisala_gld360.yaml b/satpy/etc/readers/vaisala_gld360.yaml deleted file mode 100644 index 6744d95c23..0000000000 --- a/satpy/etc/readers/vaisala_gld360.yaml +++ /dev/null @@ -1,47 +0,0 @@ -reader: - name: vaisala_gld360 - short_name: Vaisala GLD360 - long_name: Vaisala Global Lightning Dataset GLD360 data in ASCII format - description: Vaisala Global Lightning Dataset 360 reader - status: Beta - supports_fsspec: false - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - sensors: [vaisala_gld360] - -file_types: - vaisala_gld360: - file_reader: !!python/name:satpy.readers.vaisala_gld360.VaisalaGLD360TextFileHandler - file_patterns: ['flashes_{start_time:%Y%m%d}.txt'] - -datasets: - time: - name: time - sensor: vaisala_gld360 - resolution: 2000 - file_type: vaisala_gld360 - - latitude: - name: latitude - sensor: vaisala_gld360 - resolution: 2000 - file_type: vaisala_gld360 - standard_name: latitude - units: degree_north - - longitude: - name: longitude - sensor: vaisala_gld360 - resolution: 2000 - file_type: vaisala_gld360 - standard_name: longitude - units: degree_east - - power: - name: power - sensor: vaisala_gld360 - resolution: 2000 - file_type: vaisala_gld360 - coordinates: - - longitude - - latitude - units: kA diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py deleted file mode 100644 index b0bde01573..0000000000 --- a/satpy/readers/vaisala_gld360.py +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2019 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Vaisala Global Lightning Dataset 360 reader. - -Vaisala Global Lightning Dataset GLD360 is data as a service -that provides real-time lightning data for accurate and early -detection and tracking of severe weather. The data provided is -generated by a Vaisala owned and operated world-wide lightning -detection sensor network. - -References: -- [GLD360] https://www.vaisala.com/en/products/data-subscriptions-and-reports/data-sets/gld360 - -""" - -import logging - -import dask.array as da -import pandas as pd -import xarray as xr - -from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size - -logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() - - -class VaisalaGLD360TextFileHandler(BaseFileHandler): - """ASCII reader for Vaisala GDL360 data.""" - - def __init__(self, filename, filename_info, filetype_info): - """Initialize VaisalaGLD360TextFileHandler.""" - super(VaisalaGLD360TextFileHandler, self).__init__(filename, filename_info, filetype_info) - - names = ["gld360_date", "gld360_time", "latitude", "longitude", "power", "unit"] - types = ["str", "str", "float", "float", "float", "str"] - dtypes = dict(zip(names, types)) - # Combine 'date' and 'time' into a datetime object - parse_dates = {"time": ["gld360_date", "gld360_time"]} - - self.data = pd.read_csv(filename, delim_whitespace=True, header=None, - names=names, dtype=dtypes, parse_dates=parse_dates) - - @property - def start_time(self): - """Get start time.""" - return self.data["time"].iloc[0] - - @property - def end_time(self): - """Get end time.""" - return self.data["time"].iloc[-1] - - def get_dataset(self, dataset_id, dataset_info): - """Load a dataset.""" - xarr = xr.DataArray(da.from_array(self.data[dataset_id["name"]], - chunks=CHUNK_SIZE), dims=["y"]) - - # Add time, longitude, and latitude as non-dimensional y-coordinates - xarr["time"] = ("y", self.data["time"]) - xarr["longitude"] = ("y", self.data["longitude"]) - xarr["latitude"] = ("y", self.data["latitude"]) - - if dataset_id["name"] == "power": - # Check that units in the file match the unit specified in the - # reader yaml-file - if not (self.data.unit == dataset_info["units"]).all(): - raise ValueError("Inconsistent units found in file!") - xarr.attrs.update(dataset_info) - - return xarr diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py deleted file mode 100644 index c08450613a..0000000000 --- a/satpy/tests/reader_tests/test_vaisala_gld360.py +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2019 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Unittesting the Vaisala GLD360 reader.""" - -import unittest -from io import StringIO - -import numpy as np - -from satpy.readers.vaisala_gld360 import VaisalaGLD360TextFileHandler -from satpy.tests.utils import make_dataid - - -class TestVaisalaGLD360TextFileHandler(unittest.TestCase): - """Test the VaisalaGLD360TextFileHandler.""" - - def test_vaisala_gld360(self): - """Test basic functionality for vaisala file handler.""" - expected_power = np.array([12.3, 13.2, -31.]) - expected_lat = np.array([30.5342, -0.5727, 12.1529]) - expected_lon = np.array([-90.1152, 104.0688, -10.8756]) - expected_time = np.array(["2017-06-20T00:00:00.007178000", "2017-06-20T00:00:00.020162000", - "2017-06-20T00:00:00.023183000"], dtype="datetime64[ns]") - - filename = StringIO( - u"2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n" - "2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n" - "2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA" - ) - filename_info = {} - filetype_info = {} - - self.handler = VaisalaGLD360TextFileHandler( - filename, filename_info, filetype_info - ) - - filename.close() - - # test power - dataset_id = make_dataid(name="power") - dataset_info = {"units": "kA"} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_power, rtol=1e-05) - - # test lat - dataset_id = make_dataid(name="latitude") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_lat, rtol=1e-05) - - # test lon - dataset_id = make_dataid(name="longitude") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_lon, rtol=1e-05) - - # test time - dataset_id = make_dataid(name="time") - dataset_info = {} - result = self.handler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_time) From 6576e6fc30de43485a86ded2348652cf6610e6f1 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 13 Jun 2024 16:18:19 +0200 Subject: [PATCH 09/71] Refactor and clean-up long variable names in tests. --- satpy/tests/reader_tests/test_gld360_ualf2.py | 165 +++++++++--------- 1 file changed, 83 insertions(+), 82 deletions(-) diff --git a/satpy/tests/reader_tests/test_gld360_ualf2.py b/satpy/tests/reader_tests/test_gld360_ualf2.py index 6ae60b1e32..73443ff944 100644 --- a/satpy/tests/reader_tests/test_gld360_ualf2.py +++ b/satpy/tests/reader_tests/test_gld360_ualf2.py @@ -61,240 +61,240 @@ def fake_filehandler(fake_file): def test_ualf2_record_type(fake_filehandler): """Test ualf record type.""" - expected_ualf_record_type = np.array([2, 2, 2]) + expected = np.array([2, 2, 2]) dataset_id = make_dataid(name="ualf_record_type") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_ualf_record_type) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_network_type(fake_filehandler): """Test network type.""" - expected_network_type = np.array([3, 3, 3]) + expected = np.array([3, 3, 3]) dataset_id = make_dataid(name="network_type") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_network_type) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_time(fake_filehandler): """Test time.""" - expected_time = np.array(["2021-01-04T08:00:01.000000051", "2021-01-04T08:00:01.864782486", - "2021-01-04T08:00:01.897014133"], dtype="datetime64[ns]") + expected = np.array(["2021-01-04T08:00:01.000000051", "2021-01-04T08:00:01.864782486", + "2021-01-04T08:00:01.897014133"], dtype="datetime64[ns]") dataset_id = make_dataid(name="time") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_time) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_latitude(fake_filehandler): """Test latitude.""" - expected_latitude = np.array([-20.8001, 0.4381, 66.8166]) + expected = np.array([-20.8001, 0.4381, 66.8166]) dataset_id = make_dataid(name="latitude") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_latitude, rtol=1e-05) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_longitude(fake_filehandler): """Test longitude.""" - expected_longitude = np.array([-158.3439, -0.85, 42.4914]) + expected = np.array([-158.3439, -0.85, 42.4914]) dataset_id = make_dataid(name="longitude") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_longitude, rtol=1e-05) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_altitude(fake_filehandler): """Test altitude.""" - expected_altitude = np.array([0, 0, 0]) + expected = np.array([0, 0, 0]) dataset_id = make_dataid(name="altitude") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_altitude) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_altitude_uncertainty(fake_filehandler): """Test altitude uncertainty.""" - expected_altitude_uncertainty = np.array([0, 0, 0]) + expected = np.array([0, 0, 0]) dataset_id = make_dataid(name="altitude_uncertainty") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_altitude_uncertainty) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_peak_current(fake_filehandler): """Test peak current.""" - expected_peak_current = np.array([10, -20, 15]) + expected = np.array([10, -20, 15]) dataset_id = make_dataid(name="peak_current") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_peak_current) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_vhf_range(fake_filehandler): """Test vhf range.""" - expected_vhf_range = np.array([0, 0, 0]) + expected = np.array([0, 0, 0]) dataset_id = make_dataid(name="vhf_range") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_vhf_range) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_multiplicity_flash(fake_filehandler): """Test multiplicity flash.""" - expected_multiplicity_flash = np.array([0, 1, 0]) + expected = np.array([0, 1, 0]) dataset_id = make_dataid(name="multiplicity_flash") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_multiplicity_flash) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_cloud_pulse_count(fake_filehandler): """Test cloud pulse count.""" - expected_cloud_pulse_count = np.array([1, 0, 1]) + expected = np.array([1, 0, 1]) dataset_id = make_dataid(name="cloud_pulse_count") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_cloud_pulse_count) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_number_of_sensors(fake_filehandler): """Test number of sensors.""" - expected_number_of_sensors = np.array([3, 4, 5]) + expected = np.array([3, 4, 5]) dataset_id = make_dataid(name="number_of_sensors") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_number_of_sensors) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_degree_freedom_for_location(fake_filehandler): """Test degree freedom for location.""" - expected_degree_freedom_for_location = np.array([3, 5, 7]) + expected = np.array([3, 5, 7]) dataset_id = make_dataid(name="degree_freedom_for_location") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_degree_freedom_for_location) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_error_ellipse_angle(fake_filehandler): """Test error ellipse angle.""" - expected_error_ellipse_angle = np.array([9.47, 24.99, 103.87]) + expected = np.array([9.47, 24.99, 103.87]) dataset_id = make_dataid(name="error_ellipse_angle") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_error_ellipse_angle, rtol=1e-05) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_error_ellipse_max_axis_length(fake_filehandler): """Test error ellipse max axis length.""" - expected_error_ellipse_max_axis_length = np.array([1.91, 1.95, 4.33]) + expected = np.array([1.91, 1.95, 4.33]) dataset_id = make_dataid(name="error_ellipse_max_axis_length") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_error_ellipse_max_axis_length, rtol=1e-05) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_error_ellipse_min_axis_length(fake_filehandler): """Test error ellipse min axis length.""" - expected_error_ellipse_min_axis_length = np.array([1.59, 1.53, 1.46]) + expected = np.array([1.59, 1.53, 1.46]) dataset_id = make_dataid(name="error_ellipse_min_axis_length") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_error_ellipse_min_axis_length, rtol=1e-05) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_chi_squared_value_location_optimization(fake_filehandler): """Test chi squared value location optimization.""" - expected_chi_squared_value_location_optimization = np.array([0.19, 1.53, 0.48]) + expected = np.array([0.19, 1.53, 0.48]) dataset_id = make_dataid(name="chi_squared_value_location_optimization") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_chi_squared_value_location_optimization, rtol=1e-05) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_wave_form_rise_time(fake_filehandler): """Test wave form rise time.""" - expected_wave_form_rise_time = np.array([11.4, 14., 22.]) + expected = np.array([11.4, 14., 22.]) dataset_id = make_dataid(name="wave_form_rise_time") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_wave_form_rise_time, rtol=1e-05) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_wave_form_peak_to_zero_time(fake_filehandler): """Test wave form peak to zero time.""" - expected_wave_form_peak_to_zero_time = np.array([8.8, 12.9, 12.3]) + expected = np.array([8.8, 12.9, 12.3]) dataset_id = make_dataid(name="wave_form_peak_to_zero_time") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_allclose(result, expected_wave_form_peak_to_zero_time, rtol=1e-05) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_wave_form_max_rate_of_rise(fake_filehandler): """Test wave form max rate of rise.""" - expected_wave_form_max_rate_of_rise = np.array([0, 0, 0]) + expected = np.array([0, 0, 0]) dataset_id = make_dataid(name="wave_form_max_rate_of_rise") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_wave_form_max_rate_of_rise) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_cloud_indicator(fake_filehandler): """Test cloud indicator.""" - expected_cloud_indicator = np.array([1, 0, 1]) + expected = np.array([1, 0, 1]) dataset_id = make_dataid(name="cloud_indicator") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_cloud_indicator) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_angle_indicator(fake_filehandler): """Test angle indicator.""" - expected_angle_indicator = np.array([1, 1, 1]) + expected = np.array([1, 1, 1]) dataset_id = make_dataid(name="angle_indicator") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_angle_indicator) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_signal_indicator(fake_filehandler): """Test signal indicator.""" - expected_signal_indicator = np.array([0, 0, 0]) + expected = np.array([0, 0, 0]) dataset_id = make_dataid(name="signal_indicator") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_signal_indicator) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_timing_indicator(fake_filehandler): """Test timing indicator.""" - expected_timing_indicator = np.array([1, 1, 1]) + expected = np.array([1, 1, 1]) dataset_id = make_dataid(name="timing_indicator") dataset_info = {} - result = fake_filehandler.get_dataset(dataset_id, dataset_info).values - np.testing.assert_array_equal(result, expected_timing_indicator) + actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values + np.testing.assert_array_equal(actual, expected) def test_pad_nanoseconds(fake_filehandler): """Test pad nanoseconds.""" - expected_nanoseconds = "000000013" - result = fake_filehandler.pad_nanoseconds(13) - np.testing.assert_string_equal(result, expected_nanoseconds) + expected = "000000013" + actual = fake_filehandler.pad_nanoseconds(13) + np.testing.assert_string_equal(actual, expected) def test_nanoseconds_index(): """Test nanosecond column being after seconds.""" - expected_index = UALF2_COLUMN_NAMES.index("nanosecond") - result = UALF2_COLUMN_NAMES.index("second") + 1 - np.testing.assert_array_equal(result, expected_index) + expected = UALF2_COLUMN_NAMES.index("nanosecond") + actual = UALF2_COLUMN_NAMES.index("second") + 1 + np.testing.assert_array_equal(actual, expected) def test_column_names_length(): """Test correct number of column names.""" - expected_length = 30 - result = len(UALF2_COLUMN_NAMES) - np.testing.assert_equal(result, expected_length) + expected = 30 + actual = len(UALF2_COLUMN_NAMES) + np.testing.assert_equal(actual, expected) @pytest.fixture() @@ -304,6 +304,7 @@ def fake_scn(fake_file): scn = Scene(reader="gld360_ualf2", filenames=[fake_file]) return scn + def test_scene_attributes(fake_scn): """Test for correct start and end times.""" np.testing.assert_equal(fake_scn.start_time, TEST_START_TIME) From e6b6f482449751d4af983d5d7be39f1f73d0392c Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Fri, 14 Jun 2024 09:53:59 +0200 Subject: [PATCH 10/71] Make time-parsing future proof. --- satpy/readers/gld360_ualf2.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/satpy/readers/gld360_ualf2.py b/satpy/readers/gld360_ualf2.py index 555d19d96b..d26af8e611 100644 --- a/satpy/readers/gld360_ualf2.py +++ b/satpy/readers/gld360_ualf2.py @@ -93,19 +93,23 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize FileHandler.""" super(VaisalaGld360Ualf2FileHandler, self).__init__(filename, filename_info, filetype_info) - # Combine "year", "month", "day", "hour", "minute", "second" and "nanosecond" into a datetime object. - parse_dates = {"time": ["year", "month", "day", "hour", "minute", "second", "nanosecond"]} - self.data = dd.read_csv(filename, sep="\t", header=None, names=UALF2_COLUMN_NAMES, dtype=UALF2_DTYPES, - parse_dates=parse_dates, converters={"nanosecond": self.pad_nanoseconds} ) - self.data["time"] = dd.to_datetime(self.data["time"], format="%Y %m %d %H %M %S %f") + combined_time = (self.data["year"] + " " + + self.data["month"] + " " + + self.data["day"] + " " + + self.data["hour"] + " " + + self.data["minute"] + " " + + self.data["second"] + " " + + self.data["nanosecond"]) + + self.data["time"] = dd.to_datetime(combined_time, format="%Y %m %d %H %M %S %f") self.data = self.data.drop_duplicates() self.data = self.data.sort_values("time") From c6b5c4aeed848cd0c7a087d761ea81091c877aa5 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 09:31:49 +0000 Subject: [PATCH 11/71] remove mocking to enable the actual testing --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index d31713b68b..2ddd930556 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -897,7 +897,6 @@ def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_ segpos_info = filetype_handler.get_segment_position_info() assert segpos_info == expected_pos_info - @mock.patch("satpy.readers.yaml_reader.GEOVariableSegmentYAMLReader") @pytest.mark.parametrize(("channel", "resolution"), generate_parameters("radiance")) def test_not_get_segment_info_called_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): """Test that checks that the get_segment_position_info has not been called for AF data.""" From 0f77d57edc2747294cb027bd2c0297b7f3e6a9cf Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 09:35:29 +0000 Subject: [PATCH 12/71] add try-except to pass by the failures related to the mocking --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 2ddd930556..3f0dd6de3f 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -903,7 +903,13 @@ def test_not_get_segment_info_called_af(self,FakeFCIFileHandlerAF_fixture,reader with mock.patch("satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler.get_segment_position_info") as gspi: fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - reader.load([channel]) + try: + # attempt to load the channel + reader.load([channel]) + except KeyError: + # if get_segment_position_info is called, the code will fail with a KeyError because of the mocking. + # So we catch the error here for now, but the test will still fail with the following assert_not_called + pass gspi.assert_not_called() @pytest.mark.parametrize("calibration", ["index_map","pixel_quality"]) From ca0d996081139eeabe094f83602bd23a0e013863 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 09:36:52 +0000 Subject: [PATCH 13/71] change the count_in_repeat_cycle numbering for the African products to enable the nominal padding mechanism (which should not be actually activated anyway) --- satpy/readers/fci_l1c_nc.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index ec849fc96e..98da127d7d 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -222,6 +222,11 @@ def __init__(self, filename, filename_info, filetype_info): # change number of chunk so that padding gets activated correctly on missing chunks self.filename_info["count_in_repeat_cycle"] += 28 + if self.filename_info["coverage"] == "AF": + # change number of chunk from 0 to 1 so that the padding is not activated (chunk 1 is present and only 1 + # chunk is expected), as the African dissemination products come in one file per full disk. + self.filename_info["count_in_repeat_cycle"] = 1 + if self.filename_info["facility_or_tool"] == "IQTI": self.is_iqt = True else : From 18f8c43bc92264b0579a1aef3e9b1edd8e047557 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 09:37:39 +0000 Subject: [PATCH 14/71] update the test to check that the chunk numbering for the african products is modified correctly --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 3f0dd6de3f..1eafab81ac 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -1030,7 +1030,7 @@ def test_count_in_repeat_cycle_rc_period_min(self, reader_configs, fh_param,comp reader_configs,compare_tuples) @pytest.mark.parametrize(("channel","resolution","compare_tuples"), - [("vis_06","3km",(0,10, + [("vis_06","3km",(1,10, datetime.datetime.strptime("2024-01-09 08:00:00", "%Y-%m-%d %H:%M:%S"), datetime.datetime.strptime("2024-01-09 08:10:00", "%Y-%m-%d %H:%M:%S")))]) def test_count_in_repeat_cycle_rc_period_min_AF(self, FakeFCIFileHandlerAF_fixture, reader_configs, From 70a38384e300e65dcea2d24ffccfbe35cf63a490 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 09:45:24 +0000 Subject: [PATCH 15/71] reformat the test file --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 503 +++++++++++--------- 1 file changed, 270 insertions(+), 233 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 1eafab81ac..a8a6cbee5f 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -67,103 +67,103 @@ } LIST_CHANNEL_SOLAR = ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", - "nir_13", "nir_16", "nir_22"] + "nir_13", "nir_16", "nir_22"] LIST_CHANNEL_TERRAN = ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", - "ir_123", "ir_133"] + "ir_123", "ir_133"] LIST_TOTAL_CHANNEL = LIST_CHANNEL_SOLAR + LIST_CHANNEL_TERRAN -LIST_RESOLUTION_V06 = ["1km","3km"] +LIST_RESOLUTION_V06 = ["1km", "3km"] LIST_RESOLUTION = ["3km"] EXPECTED_POS_INFO_FOR_FILETYPE = { - "fdhsi": {"1km": {"start_position_row": 1, - "end_position_row": 200, - "segment_height": 200, - "grid_width": 11136}, - "2km": {"start_position_row": 1, - "end_position_row": 100, - "segment_height": 100, - "grid_width": 5568}}, - "hrfi": {"500m": {"start_position_row": 1, - "end_position_row": 400, - "segment_height": 400, - "grid_width": 22272}, - "1km": {"start_position_row": 1, - "end_position_row": 200, - "grid_width": 11136, - "segment_height": 200}}, - "fci_af" : {"3km": {"start_position_row": 1, - "end_position_row": 67, - "segment_height": 67, - "grid_width": 3712 - }, - }, - "fci_af_vis_06" : {"3km": {"start_position_row": 1, - "end_position_row": 67, - "segment_height": 67, - "grid_width": 3712 - }, - "1km": {"start_position_row": 1, - "end_position_row": 200, - "grid_width": 11136, - "segment_height": 200} - } - } + "fdhsi": {"1km": {"start_position_row": 1, + "end_position_row": 200, + "segment_height": 200, + "grid_width": 11136}, + "2km": {"start_position_row": 1, + "end_position_row": 100, + "segment_height": 100, + "grid_width": 5568}}, + "hrfi": {"500m": {"start_position_row": 1, + "end_position_row": 400, + "segment_height": 400, + "grid_width": 22272}, + "1km": {"start_position_row": 1, + "end_position_row": 200, + "grid_width": 11136, + "segment_height": 200}}, + "fci_af": {"3km": {"start_position_row": 1, + "end_position_row": 67, + "segment_height": 67, + "grid_width": 3712 + }, + }, + "fci_af_vis_06": {"3km": {"start_position_row": 1, + "end_position_row": 67, + "segment_height": 67, + "grid_width": 3712 + }, + "1km": {"start_position_row": 1, + "end_position_row": 200, + "grid_width": 11136, + "segment_height": 200} + } +} CHANS_FHDSI = {"solar": LIST_CHANNEL_SOLAR, - "solar_grid_type": ["1km"] * 8, - "terran": LIST_CHANNEL_TERRAN, - "terran_grid_type": ["2km"] * 8} + "solar_grid_type": ["1km"] * 8, + "terran": LIST_CHANNEL_TERRAN, + "terran_grid_type": ["2km"] * 8} CHANS_HRFI = {"solar": ["vis_06", "nir_22"], - "solar_grid_type": ["500m"] * 2, - "terran": ["ir_38", "ir_105"], - "terran_grid_type": ["1km"] * 2} - -DICT_CALIBRATION = { "radiance" : {"dtype": np.float32, - "value_1": 15, - "value_0":9700, - "attrs_dict":{"calibration":"radiance", - "units":"mW m-2 sr-1 (cm-1)-1", - "radiance_unit_conversion_coefficient": np.float32(1234.56) - }, - }, - - "reflectance" : {"dtype": np.float32, - "attrs_dict":{"calibration":"reflectance", - "units":"%" + "solar_grid_type": ["500m"] * 2, + "terran": ["ir_38", "ir_105"], + "terran_grid_type": ["1km"] * 2} + +DICT_CALIBRATION = {"radiance": {"dtype": np.float32, + "value_1": 15, + "value_0": 9700, + "attrs_dict": {"calibration": "radiance", + "units": "mW m-2 sr-1 (cm-1)-1", + "radiance_unit_conversion_coefficient": np.float32(1234.56) + }, + }, + + "reflectance": {"dtype": np.float32, + "attrs_dict": {"calibration": "reflectance", + "units": "%" + }, }, - }, - "counts" : {"dtype": np.uint16, - "value_1": 1, - "value_0": 5000, - "attrs_dict":{"calibration":"counts", - "units":"count", - }, - }, - - "brightness_temperature" : {"dtype": np.float32, - "value_1": np.float32(209.68275), - "value_0": np.float32(1888.8513), - "attrs_dict":{"calibration":"brightness_temperature", - "units":"K", - }, - }, -} + "counts": {"dtype": np.uint16, + "value_1": 1, + "value_0": 5000, + "attrs_dict": {"calibration": "counts", + "units": "count", + }, + }, + + "brightness_temperature": {"dtype": np.float32, + "value_1": np.float32(209.68275), + "value_0": np.float32(1888.8513), + "attrs_dict": {"calibration": "brightness_temperature", + "units": "K", + }, + }, + } TEST_FILENAMES = {"fdhsi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" ], "fdhsi_error": [ - "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FDD--" - "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" - "20170410113925_20170410113934_N__C_0070_0067.nc" -], + "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FDD--" + "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" + "20170410113925_20170410113934_N__C_0070_0067.nc" + ], "fdhsi_iqti": [ - "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" - "CHK-BODY--MON-NC4_C_EUMT_20240307233956_IQTI_DEV_" - "20231016125007_20231016125017_N__C_0078_0001.nc" -], + "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" + "CHK-BODY--MON-NC4_C_EUMT_20240307233956_IQTI_DEV_" + "20231016125007_20231016125017_N__C_0078_0001.nc" + ], "hrfi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" @@ -175,14 +175,15 @@ "20231016125007_20231016125017_N__C_0078_0001.nc" ], "fdhsi_q4": ["W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-Q4--" - "CHK-BODY--DIS-NC4E_C_EUMT_20230723025408_IDPFI_DEV_" - "20230722120000_20230722120027_N_JLS_C_0289_0001.nc" - ], + "CHK-BODY--DIS-NC4E_C_EUMT_20230723025408_IDPFI_DEV_" + "20230722120000_20230722120027_N_JLS_C_0289_0001.nc" + ], "hrfi_q4": ["W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-Q4--" "CHK-BODY--DIS-NC4E_C_EUMT_20230723025408_IDPFI_DEV" "_20230722120000_20230722120027_N_JLS_C_0289_0001.nc"] } + def resolutions(channel): """Get the resolutions.""" if channel == "vis_06": @@ -190,25 +191,29 @@ def resolutions(channel): else: return LIST_RESOLUTION + def fill_chans_af(): """Fill the dict CHANS_AF and the list TEST_FILENAMES with the right channel and resolution.""" CHANS_AF = {} for channel in LIST_TOTAL_CHANNEL: list_resol = resolutions(channel) for resol in list_resol: - chann_upp = channel.replace("_","").upper() + chann_upp = channel.replace("_", "").upper() TEST_FILENAMES[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" - f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_20240125144655_DT_OPE" - f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] - if channel.split("_")[0] in ["vis","nir"]: - CHANS_AF[f"{channel}_{resol}"] = {"solar":[channel], - "solar_grid_type": [resol]} - elif channel.split("_")[0] in ["ir","wv"]: - CHANS_AF[f"{channel}_{resol}"] = {"terran":[channel], - "terran_grid_type": [resol]} - return CHANS_AF,TEST_FILENAMES - -CHANS_AF,TEST_FILENAMES = fill_chans_af() + f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_20240125144655_DT_OPE" + f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] + if channel.split("_")[0] in ["vis", "nir"]: + CHANS_AF[f"{channel}_{resol}"] = {"solar": [channel], + "solar_grid_type": [resol]} + elif channel.split("_")[0] in ["ir", "wv"]: + CHANS_AF[f"{channel}_{resol}"] = {"terran": [channel], + "terran_grid_type": [resol]} + return CHANS_AF, TEST_FILENAMES + + +CHANS_AF, TEST_FILENAMES = fill_chans_af() + + # ---------------------------------------------------- # Filehandlers preparation --------------------------- # ---------------------------------------------------- @@ -465,6 +470,7 @@ class FakeFCIFileHandlerFDHSI(FakeFCIFileHandlerBase): "grid_type": "2km"}, } + class FakeFCIFileHandlerFDHSIIQTI(FakeFCIFileHandlerFDHSI): """Mock IQTI for FHDSI data.""" @@ -504,6 +510,7 @@ class FakeFCIFileHandlerHRFI(FakeFCIFileHandlerBase): "grid_type": "1km"}, } + class FakeFCIFileHandlerHRFIIQTI(FakeFCIFileHandlerHRFI): """Mock IQTI for HRFI data.""" @@ -518,6 +525,7 @@ class FakeFCIFileHandlerAF(FakeFCIFileHandlerBase): """Mock AF data.""" chan_patterns = {} + # ---------------------------------------------------- # Fixtures preparation ------------------------------- # ---------------------------------------------------- @@ -538,6 +546,7 @@ def _get_reader_with_filehandlers(filenames, reader_configs): clear_cache(reader) return reader + def clear_cache(reader): """Clear the cache for file handlres in reader.""" for key in reader.file_handlers: @@ -545,6 +554,7 @@ def clear_cache(reader): for fh in fhs: fh.cached_file_content = {} + def get_list_channel_calibration(calibration): """Get the channel's list according the calibration.""" if calibration == "reflectance": @@ -554,12 +564,14 @@ def get_list_channel_calibration(calibration): else: return LIST_TOTAL_CHANNEL + def generate_parameters(calibration): """Generate dinamicaly the parameters.""" for channel in get_list_channel_calibration(calibration): for resolution in resolutions(channel): yield (channel, resolution) + @contextlib.contextmanager def mocked_basefilehandler(filehandler): """Mock patch the base class of the FCIL1cNCFileHandler with the content of our fake files (filehandler).""" @@ -580,6 +592,7 @@ def FakeFCIFileHandlerFDHSI_fixture(): } yield param_dict + @pytest.fixture() def FakeFCIFileHandlerFDHSIError_fixture(): """Get a fixture for the fake FDHSI filehandler, including channel and file names.""" @@ -603,6 +616,7 @@ def FakeFCIFileHandlerFDHSIIQTI_fixture(): } yield param_dict + @pytest.fixture() def FakeFCIFileHandlerFDHSIQ4_fixture(): """Get a fixture for the fake FDHSI Q4 filehandler, including channel and file names.""" @@ -614,6 +628,7 @@ def FakeFCIFileHandlerFDHSIQ4_fixture(): } yield param_dict + @pytest.fixture() def FakeFCIFileHandlerHRFI_fixture(): """Get a fixture for the fake HRFI filehandler, including channel and file names.""" @@ -625,6 +640,7 @@ def FakeFCIFileHandlerHRFI_fixture(): } yield param_dict + @pytest.fixture() def FakeFCIFileHandlerHRFIIQTI_fixture(): """Get a fixture for the fake HRFI IQTI filehandler, including channel and file names.""" @@ -636,6 +652,7 @@ def FakeFCIFileHandlerHRFIIQTI_fixture(): } yield param_dict + @pytest.fixture() def FakeFCIFileHandlerHRFIQ4_fixture(): """Get a fixture for the fake HRFI Q4 filehandler, including channel and file names.""" @@ -649,10 +666,10 @@ def FakeFCIFileHandlerHRFIQ4_fixture(): @pytest.fixture() -def FakeFCIFileHandlerAF_fixture(channel,resolution): +def FakeFCIFileHandlerAF_fixture(channel, resolution): """Get a fixture for the fake AF filehandler, it contains only one channel and one resolution.""" - chan_patterns = {channel.split("_")[0]+"_{:>02d}": {"channels": [int(channel.split("_")[1])], - "grid_type": f"{resolution}"},} + chan_patterns = {channel.split("_")[0] + "_{:>02d}": {"channels": [int(channel.split("_")[1])], + "grid_type": f"{resolution}"}, } FakeFCIFileHandlerAF.chan_patterns = chan_patterns with mocked_basefilehandler(FakeFCIFileHandlerAF): param_dict = { @@ -662,6 +679,7 @@ def FakeFCIFileHandlerAF_fixture(channel,resolution): } yield param_dict + # ---------------------------------------------------- # Tests ---------------------------------------------- # ---------------------------------------------------- @@ -669,46 +687,46 @@ class ModuleTestFCIL1cNcReader: """Class containing parameters and modules useful for the test related to L1c reader.""" fh_param_for_filetype = {"hrfi": {"channels": CHANS_HRFI, "filenames": TEST_FILENAMES["hrfi"]}, - "fdhsi": {"channels": CHANS_FHDSI, + "fdhsi": {"channels": CHANS_FHDSI, "filenames": TEST_FILENAMES["fdhsi"]}, - "fdhsi_iqti": {"channels": CHANS_FHDSI, - "filenames": TEST_FILENAMES["fdhsi_iqti"]}, - "hrfi_q4": {"channels": CHANS_HRFI, - "filenames": TEST_FILENAMES["hrfi_q4"]}, - "hrfi_iqti": {"channels": CHANS_HRFI, - "filenames": TEST_FILENAMES["hrfi_iqti"]}, - "fdhsi_q4": {"channels": CHANS_FHDSI, - "filenames": TEST_FILENAMES["fdhsi_q4"]}} - - def _get_type_ter_AF(self,channel): + "fdhsi_iqti": {"channels": CHANS_FHDSI, + "filenames": TEST_FILENAMES["fdhsi_iqti"]}, + "hrfi_q4": {"channels": CHANS_HRFI, + "filenames": TEST_FILENAMES["hrfi_q4"]}, + "hrfi_iqti": {"channels": CHANS_HRFI, + "filenames": TEST_FILENAMES["hrfi_iqti"]}, + "fdhsi_q4": {"channels": CHANS_FHDSI, + "filenames": TEST_FILENAMES["fdhsi_q4"]}} + + def _get_type_ter_AF(self, channel): """Get the type_ter.""" - if channel.split("_")[0] in ["vis","nir"]: + if channel.split("_")[0] in ["vis", "nir"]: return "solar" - elif channel.split("_")[0] in ["wv","ir"]: + elif channel.split("_")[0] in ["wv", "ir"]: return "terran" - def _get_assert_attrs(self,res,ch,attrs_dict): + def _get_assert_attrs(self, res, ch, attrs_dict): """Test the differents attributes values.""" - for key,item in attrs_dict.items(): + for key, item in attrs_dict.items(): assert res[ch].attrs[key] == item - def _get_assert_erased_attrs(self,res,ch): + def _get_assert_erased_attrs(self, res, ch): """Test that the attributes listed have been erased.""" - LIST_ATTRIBUTES = ["add_offset","warm_add_offset","scale_factor", - "warm_scale_factor","valid_range"] + LIST_ATTRIBUTES = ["add_offset", "warm_add_offset", "scale_factor", + "warm_scale_factor", "valid_range"] for atr in LIST_ATTRIBUTES: assert atr not in res[ch].attrs - def _reflectance_test(self,tab,filenames): + def _reflectance_test(self, tab, filenames): """Test of with the reflectance test.""" if "IQTI" in filenames: - numpy.testing.assert_array_almost_equal(tab, - 93.6462,decimal=4) - else : numpy.testing.assert_array_almost_equal(tab, - 100 * 15 * 1 * np.pi / 50) + 93.6462, decimal=4) + else: + numpy.testing.assert_array_almost_equal(tab, + 100 * 15 * 1 * np.pi / 50) - def _other_calibration_test(self,res,ch,dict_arg): + def _other_calibration_test(self, res, ch, dict_arg): """Test of other calibration test.""" if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], dict_arg["value_1"]) @@ -716,53 +734,51 @@ def _other_calibration_test(self,res,ch,dict_arg): else: numpy.testing.assert_array_equal(res[ch], dict_arg["value_1"]) - def _shape_test(self,res,ch,grid_type,dict_arg): + def _shape_test(self, res, ch, grid_type, dict_arg): """Test the shape.""" assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == dict_arg["dtype"] - def _get_assert_load(self,res,ch,dict_arg,filenames): + def _get_assert_load(self, res, ch, dict_arg, filenames): """Test the value for differents channels.""" - self._get_assert_attrs(res,ch,dict_arg["attrs_dict"]) - if dict_arg["attrs_dict"]["calibration"] in ["radiance","brightness_temperature","reflectance"]: - self._get_assert_erased_attrs(res,ch) + self._get_assert_attrs(res, ch, dict_arg["attrs_dict"]) + if dict_arg["attrs_dict"]["calibration"] in ["radiance", "brightness_temperature", "reflectance"]: + self._get_assert_erased_attrs(res, ch) if dict_arg["attrs_dict"]["calibration"] == "reflectance": - self._reflectance_test(res[ch],filenames) - else : - self._other_calibration_test(res,ch,dict_arg) - - + self._reflectance_test(res[ch], filenames) + else: + self._other_calibration_test(res, ch, dict_arg) - def _get_res_AF(self,channel,fh_param,calibration,reader_configs): + def _get_res_AF(self, channel, fh_param, calibration, reader_configs): """Load the reader for AF data.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) type_ter = self._get_type_ter_AF(channel) res = reader.load([make_dataid(name=name, calibration=calibration) - for name in fh_param["channels"][type_ter]], pad_data=False) + for name in fh_param["channels"][type_ter]], pad_data=False) return res - def _compare_sun_earth_distance(self,filetype,fh_param,reader_configs): + def _compare_sun_earth_distance(self, filetype, fh_param, reader_configs): """Test the sun earth distance.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) if "IQTI" in fh_param["filenames"][0]: np.testing.assert_almost_equal( - reader.file_handlers[filetype][0]._compute_sun_earth_distance, - 0.996803423,decimal=7) - else : + reader.file_handlers[filetype][0]._compute_sun_earth_distance, + 0.996803423, decimal=7) + else: np.testing.assert_almost_equal( - reader.file_handlers[filetype][0]._compute_sun_earth_distance, - 1.0,decimal=7) + reader.file_handlers[filetype][0]._compute_sun_earth_distance, + 1.0, decimal=7) - def _compare_rc_period_min_count_in_repeat_cycle(self,filetype,fh_param, - reader_configs,compare_parameters_tuple): + def _compare_rc_period_min_count_in_repeat_cycle(self, filetype, fh_param, + reader_configs, compare_parameters_tuple): """Test the count_in_repeat_cycle, rc_period_min.""" - count_in_repeat_cycle_imp,rc_period_min_imp,start_nominal_time,end_nominal_time = compare_parameters_tuple + count_in_repeat_cycle_imp, rc_period_min_imp, start_nominal_time, end_nominal_time = compare_parameters_tuple reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) assert count_in_repeat_cycle_imp == \ - reader.file_handlers[filetype][0].filename_info["count_in_repeat_cycle"] + reader.file_handlers[filetype][0].filename_info["count_in_repeat_cycle"] assert rc_period_min_imp == \ - reader.file_handlers[filetype][0].rc_period_min + reader.file_handlers[filetype][0].rc_period_min assert start_nominal_time == reader.file_handlers[filetype][0].nominal_start_time assert end_nominal_time == reader.file_handlers[filetype][0].nominal_end_time @@ -793,15 +809,15 @@ def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): files = reader.select_files_from_pathnames(filenames) assert len(files) == 0 - @pytest.mark.parametrize("calibration", ["counts","radiance","brightness_temperature","reflectance"]) - @pytest.mark.parametrize(("fh_param","res_type"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"),"hdfi"), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"),"hrfi"), - (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"),"hrfi"), - (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"),"hdfi"), - (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"),"hrfi"), - (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"),"hdfi")]) + @pytest.mark.parametrize("calibration", ["counts", "radiance", "brightness_temperature", "reflectance"]) + @pytest.mark.parametrize(("fh_param", "res_type"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), "hdfi"), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), "hrfi"), + (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"), "hrfi"), + (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"), "hdfi"), + (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"), "hrfi"), + (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"), "hdfi")]) def test_load_calibration(self, reader_configs, fh_param, - caplog,calibration,res_type): + caplog, calibration, res_type): """Test loading with counts,radiance,reflectance and bt.""" expected_res_n = {} if calibration == "reflectance": @@ -828,37 +844,36 @@ def test_load_calibration(self, reader_configs, fh_param, assert expected_res_n[res_type] == len(res) for ch, grid_type in zip(list_chan, list_grid): - self._shape_test(res,ch,grid_type,DICT_CALIBRATION[calibration]) + self._shape_test(res, ch, grid_type, DICT_CALIBRATION[calibration]) self._get_assert_load(res, ch, DICT_CALIBRATION[calibration], fh_param["filenames"][0]) @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ - (calibration, channel, resolution) - for calibration in ["counts", "radiance", "brightness_temperature", "reflectance"] - for channel, resolution in generate_parameters(calibration) + (calibration, channel, resolution) + for calibration in ["counts", "radiance", "brightness_temperature", "reflectance"] + for channel, resolution in generate_parameters(calibration) ]) - def test_load_calibration_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,calibration,caplog): + def test_load_calibration_af(self, FakeFCIFileHandlerAF_fixture, reader_configs, channel, calibration, caplog): """Test loading with counts,radiance,reflectance and bt for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture type_ter = self._get_type_ter_AF(channel) with caplog.at_level(logging.WARNING): - res = self._get_res_AF(channel,fh_param,calibration,reader_configs) + res = self._get_res_AF(channel, fh_param, calibration, reader_configs) assert caplog.text == "" assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): - self._shape_test(res,ch,grid_type,DICT_CALIBRATION[calibration]) - self._get_assert_load(res,ch,DICT_CALIBRATION[calibration], + self._shape_test(res, ch, grid_type, DICT_CALIBRATION[calibration]) + self._get_assert_load(res, ch, DICT_CALIBRATION[calibration], fh_param["filenames"][0]) - @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture")), - (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture")), - (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture")), - (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture")), - (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"))]) + (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture")), + (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture")), + (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"))]) def test_orbital_parameters_attr(self, reader_configs, fh_param): """Test the orbital parameter attribute.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) @@ -869,11 +884,11 @@ def test_orbital_parameters_attr(self, reader_configs, fh_param): for ch in fh_param["channels"]["solar"] + fh_param["channels"]["terran"]: assert res[ch].attrs["orbital_parameters"] == { "satellite_actual_longitude": np.mean(np.arange(6000)) if "IQTI" not in - fh_param["filenames"][0] else 0.0, + fh_param["filenames"][0] else 0.0, "satellite_actual_latitude": np.mean(np.arange(6000)) if "IQTI" not in - fh_param["filenames"][0] else 0.0, + fh_param["filenames"][0] else 0.0, "satellite_actual_altitude": np.mean(np.arange(6000)) if "IQTI" not in - fh_param["filenames"][0] else 35786400.0, + fh_param["filenames"][0] else 35786400.0, "satellite_nominal_longitude": 0.0, "satellite_nominal_latitude": 0, "satellite_nominal_altitude": 35786400.0, @@ -885,7 +900,7 @@ def test_orbital_parameters_attr(self, reader_configs, fh_param): @pytest.mark.parametrize(("fh_param", "expected_pos_info"), [ (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["fdhsi"]), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["hrfi"]), - (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"),EXPECTED_POS_INFO_FOR_FILETYPE["hrfi"]), + (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["hrfi"]), (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["fdhsi"]), (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["hrfi"]), (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["fdhsi"]) @@ -898,7 +913,7 @@ def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_ assert segpos_info == expected_pos_info @pytest.mark.parametrize(("channel", "resolution"), generate_parameters("radiance")) - def test_not_get_segment_info_called_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + def test_not_get_segment_info_called_af(self, FakeFCIFileHandlerAF_fixture, reader_configs, channel, resolution): """Test that checks that the get_segment_position_info has not been called for AF data.""" with mock.patch("satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler.get_segment_position_info") as gspi: fh_param = FakeFCIFileHandlerAF_fixture @@ -912,14 +927,15 @@ def test_not_get_segment_info_called_af(self,FakeFCIFileHandlerAF_fixture,reader pass gspi.assert_not_called() - @pytest.mark.parametrize("calibration", ["index_map","pixel_quality"]) + @pytest.mark.parametrize("calibration", ["index_map", "pixel_quality"]) @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4), - (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"), 4), - (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"), 16), - (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"), 4), - (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"), 16)]) - def test_load_map_and_pixel(self, reader_configs, fh_param, expected_res_n,calibration): + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4), + (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"), 4), + (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"), 4), + ( + lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"), 16)]) + def test_load_map_and_pixel(self, reader_configs, fh_param, expected_res_n, calibration): """Test loading of index_map and pixel_quality.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( @@ -930,7 +946,7 @@ def test_load_map_and_pixel(self, reader_configs, fh_param, expected_res_n,calib fh_param["channels"]["solar_grid_type"] + fh_param["channels"]["terran_grid_type"]): assert res[f"{ch}_{calibration}"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) if calibration == "index_map": numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 110) elif calibration == "pixel_quality": @@ -938,30 +954,29 @@ def test_load_map_and_pixel(self, reader_configs, fh_param, expected_res_n,calib assert res[f"{ch}_{calibration}"].attrs["name"] == ch + "_pixel_quality" @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ - (calibration, channel, resolution) - for calibration in ["index_map","pixel_quality"] - for channel, resolution in generate_parameters(calibration) + (calibration, channel, resolution) + for calibration in ["index_map", "pixel_quality"] + for channel, resolution in generate_parameters(calibration) ]) - def test_load_map_and_pixel_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,calibration): + def test_load_map_and_pixel_af(self, FakeFCIFileHandlerAF_fixture, reader_configs, channel, calibration): """Test loading with of index_map and pixel_quality for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) type_ter = self._get_type_ter_AF(channel) res = reader.load([f"{name}_{calibration}" - for name in fh_param["channels"][type_ter]], pad_data=False) + for name in fh_param["channels"][type_ter]], pad_data=False) assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): assert res[f"{ch}_{calibration}"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) if calibration == "index_map": numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 110) elif calibration == "pixel_quality": numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 3) assert res[f"{ch}_{calibration}"].attrs["name"] == ch + "_pixel_quality" - @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture")), @@ -979,10 +994,10 @@ def test_load_aux_data(self, reader_configs, fh_param): assert res[aux].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) if (aux == fh_param["channels"]["solar"][0] + "_earth_sun_distance") and ("IQTI" not in - fh_param["filenames"][0]): + fh_param["filenames"][0]): numpy.testing.assert_array_equal(res[aux][1, 1], 149597870.7) elif aux == fh_param["channels"]["solar"][0] + "_earth_sun_distance": - numpy.testing.assert_array_equal(res[aux][1,1], np.nan) + numpy.testing.assert_array_equal(res[aux][1, 1], np.nan) else: numpy.testing.assert_array_equal(res[aux][1, 1], 10) @@ -1003,62 +1018,85 @@ def test_platform_name(self, reader_configs, fh_param): res = reader.load(["vis_06"], pad_data=False) assert res["vis_06"].attrs["platform_name"] == "MTG-I1" - - @pytest.mark.parametrize(("fh_param","compare_tuples"), - [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"),(67,10, - datetime.datetime.strptime("2017-04-10 11:30:00", "%Y-%m-%d %H:%M:%S"), - datetime.datetime.strptime("2017-04-10 11:40:00", "%Y-%m-%d %H:%M:%S"))), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"),(67,10, - datetime.datetime.strptime("2017-04-10 11:30:00", "%Y-%m-%d %H:%M:%S"), - datetime.datetime.strptime("2017-04-10 11:40:00", "%Y-%m-%d %H:%M:%S"))), - (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"),(29,2.5, - datetime.datetime.strptime("2023-07-22 12:00:00", "%Y-%m-%d %H:%M:%S"), - datetime.datetime.strptime("2023-07-22 12:02:30", "%Y-%m-%d %H:%M:%S"))), - (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"),(29,2.5, - datetime.datetime.strptime("2023-07-22 12:00:00", "%Y-%m-%d %H:%M:%S"), - datetime.datetime.strptime("2023-07-22 12:02:30", "%Y-%m-%d %H:%M:%S"))), - (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"),(1,10, - datetime.datetime.strptime("2023-10-16 12:50:00", "%Y-%m-%d %H:%M:%S"), - datetime.datetime.strptime("2023-10-16 13:00:00", "%Y-%m-%d %H:%M:%S"))), - (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"),(1,10, - datetime.datetime.strptime("2023-10-16 12:50:00", "%Y-%m-%d %H:%M:%S"), - datetime.datetime.strptime("2023-10-16 13:00:00", "%Y-%m-%d %H:%M:%S"))), - ]) - def test_count_in_repeat_cycle_rc_period_min(self, reader_configs, fh_param,compare_tuples): + @pytest.mark.parametrize(("fh_param", "compare_tuples"), + [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), (67, 10, + datetime.datetime.strptime( + "2017-04-10 11:30:00", + "%Y-%m-%d %H:%M:%S"), + datetime.datetime.strptime( + "2017-04-10 11:40:00", + "%Y-%m-%d %H:%M:%S"))), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), (67, 10, + datetime.datetime.strptime( + "2017-04-10 11:30:00", + "%Y-%m-%d %H:%M:%S"), + datetime.datetime.strptime( + "2017-04-10 11:40:00", + "%Y-%m-%d %H:%M:%S"))), + (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"), (29, 2.5, + datetime.datetime.strptime( + "2023-07-22 12:00:00", + "%Y-%m-%d %H:%M:%S"), + datetime.datetime.strptime( + "2023-07-22 12:02:30", + "%Y-%m-%d %H:%M:%S"))), + (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"), (29, 2.5, + datetime.datetime.strptime( + "2023-07-22 12:00:00", + "%Y-%m-%d %H:%M:%S"), + datetime.datetime.strptime( + "2023-07-22 12:02:30", + "%Y-%m-%d %H:%M:%S"))), + (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"), (1, 10, + datetime.datetime.strptime( + "2023-10-16 12:50:00", + "%Y-%m-%d %H:%M:%S"), + datetime.datetime.strptime( + "2023-10-16 13:00:00", + "%Y-%m-%d %H:%M:%S"))), + (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"), (1, 10, + datetime.datetime.strptime( + "2023-10-16 12:50:00", + "%Y-%m-%d %H:%M:%S"), + datetime.datetime.strptime( + "2023-10-16 13:00:00", + "%Y-%m-%d %H:%M:%S"))), + ]) + def test_count_in_repeat_cycle_rc_period_min(self, reader_configs, fh_param, compare_tuples): """Test the rc_period_min value for each configurations.""" - self._compare_rc_period_min_count_in_repeat_cycle(fh_param["filetype"],fh_param, - reader_configs,compare_tuples) - - @pytest.mark.parametrize(("channel","resolution","compare_tuples"), - [("vis_06","3km",(1,10, - datetime.datetime.strptime("2024-01-09 08:00:00", "%Y-%m-%d %H:%M:%S"), - datetime.datetime.strptime("2024-01-09 08:10:00", "%Y-%m-%d %H:%M:%S")))]) + self._compare_rc_period_min_count_in_repeat_cycle(fh_param["filetype"], fh_param, + reader_configs, compare_tuples) + + @pytest.mark.parametrize(("channel", "resolution", "compare_tuples"), + [("vis_06", "3km", (1, 10, + datetime.datetime.strptime("2024-01-09 08:00:00", "%Y-%m-%d %H:%M:%S"), + datetime.datetime.strptime("2024-01-09 08:10:00", + "%Y-%m-%d %H:%M:%S")))]) def test_count_in_repeat_cycle_rc_period_min_AF(self, FakeFCIFileHandlerAF_fixture, reader_configs, - channel,compare_tuples): + channel, compare_tuples): """Test the rc_period_min value for each configurations.""" fh_param = FakeFCIFileHandlerAF_fixture - self._compare_rc_period_min_count_in_repeat_cycle(f"{fh_param['filetype']}_{channel}",fh_param, - reader_configs,compare_tuples) + self._compare_rc_period_min_count_in_repeat_cycle(f"{fh_param['filetype']}_{channel}", fh_param, + reader_configs, compare_tuples) @pytest.mark.parametrize(("fh_param"), - [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture")), - (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture")), - (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture")), - (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture")), - (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture")), - ]) + [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture")), + (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture")), + (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture")), + ]) def test_compute_earth_sun_parameter(self, reader_configs, fh_param): """Test the computation of the sun_earth_parameter.""" - self._compare_sun_earth_distance(fh_param["filetype"],fh_param,reader_configs) - + self._compare_sun_earth_distance(fh_param["filetype"], fh_param, reader_configs) - @pytest.mark.parametrize(("channel","resolution"),[("vis_06","3km")]) + @pytest.mark.parametrize(("channel", "resolution"), [("vis_06", "3km")]) def test_compute_earth_sun_parameter_AF(self, FakeFCIFileHandlerAF_fixture, reader_configs, - channel): + channel): """Test the rc_period_min value for each configurations.""" fh_param = FakeFCIFileHandlerAF_fixture - self._compare_sun_earth_distance(f"{fh_param['filetype']}_{channel}",fh_param,reader_configs) + self._compare_sun_earth_distance(f"{fh_param['filetype']}_{channel}", fh_param, reader_configs) @pytest.mark.parametrize(("fh_param"), [(lazy_fixture("FakeFCIFileHandlerFDHSIError_fixture"))]) def test_rc_period_min_error(self, reader_configs, fh_param): @@ -1066,7 +1104,6 @@ def test_rc_period_min_error(self, reader_configs, fh_param): with pytest.raises(NotImplementedError): _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - @pytest.mark.parametrize(("fh_param", "expected_area"), [ (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), ["mtg_fci_fdss_1km", "mtg_fci_fdss_2km"]), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), ["mtg_fci_fdss_500m", "mtg_fci_fdss_1km"]), From e18a9bfd798b63319dcd74c8de89b7355c86ccf8 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 10:00:44 +0000 Subject: [PATCH 16/71] switch to staticmethod --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 26 ++++++++++++++------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index a8a6cbee5f..4c398936c1 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -698,26 +698,30 @@ class ModuleTestFCIL1cNcReader: "fdhsi_q4": {"channels": CHANS_FHDSI, "filenames": TEST_FILENAMES["fdhsi_q4"]}} - def _get_type_ter_AF(self, channel): + @staticmethod + def _get_type_ter_AF(channel): """Get the type_ter.""" if channel.split("_")[0] in ["vis", "nir"]: return "solar" elif channel.split("_")[0] in ["wv", "ir"]: return "terran" - def _get_assert_attrs(self, res, ch, attrs_dict): + @staticmethod + def _get_assert_attrs(res, ch, attrs_dict): """Test the differents attributes values.""" for key, item in attrs_dict.items(): assert res[ch].attrs[key] == item - def _get_assert_erased_attrs(self, res, ch): + @staticmethod + def _get_assert_erased_attrs(res, ch): """Test that the attributes listed have been erased.""" LIST_ATTRIBUTES = ["add_offset", "warm_add_offset", "scale_factor", "warm_scale_factor", "valid_range"] for atr in LIST_ATTRIBUTES: assert atr not in res[ch].attrs - def _reflectance_test(self, tab, filenames): + @staticmethod + def _reflectance_test(tab, filenames): """Test of with the reflectance test.""" if "IQTI" in filenames: numpy.testing.assert_array_almost_equal(tab, @@ -726,7 +730,8 @@ def _reflectance_test(self, tab, filenames): numpy.testing.assert_array_almost_equal(tab, 100 * 15 * 1 * np.pi / 50) - def _other_calibration_test(self, res, ch, dict_arg): + @staticmethod + def _other_calibration_test(res, ch, dict_arg): """Test of other calibration test.""" if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], dict_arg["value_1"]) @@ -734,7 +739,8 @@ def _other_calibration_test(self, res, ch, dict_arg): else: numpy.testing.assert_array_equal(res[ch], dict_arg["value_1"]) - def _shape_test(self, res, ch, grid_type, dict_arg): + @staticmethod + def _shape_test(res, ch, grid_type, dict_arg): """Test the shape.""" assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) @@ -758,8 +764,9 @@ def _get_res_AF(self, channel, fh_param, calibration, reader_configs): for name in fh_param["channels"][type_ter]], pad_data=False) return res - def _compare_sun_earth_distance(self, filetype, fh_param, reader_configs): - """Test the sun earth distance.""" + @staticmethod + def _compare_sun_earth_distance(filetype, fh_param, reader_configs): + """Test the sun earth distance calculation.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) if "IQTI" in fh_param["filenames"][0]: np.testing.assert_almost_equal( @@ -770,7 +777,8 @@ def _compare_sun_earth_distance(self, filetype, fh_param, reader_configs): reader.file_handlers[filetype][0]._compute_sun_earth_distance, 1.0, decimal=7) - def _compare_rc_period_min_count_in_repeat_cycle(self, filetype, fh_param, + @staticmethod + def _compare_rc_period_min_count_in_repeat_cycle(filetype, fh_param, reader_configs, compare_parameters_tuple): """Test the count_in_repeat_cycle, rc_period_min.""" count_in_repeat_cycle_imp, rc_period_min_imp, start_nominal_time, end_nominal_time = compare_parameters_tuple From b69f42502d5afd3809afd7d9df6b28e130b9a262 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 10:01:30 +0000 Subject: [PATCH 17/71] clarify resolutions AF variable names --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 22 ++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 4c398936c1..54fc26c39f 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -71,8 +71,8 @@ LIST_CHANNEL_TERRAN = ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", "ir_123", "ir_133"] LIST_TOTAL_CHANNEL = LIST_CHANNEL_SOLAR + LIST_CHANNEL_TERRAN -LIST_RESOLUTION_V06 = ["1km", "3km"] -LIST_RESOLUTION = ["3km"] +LIST_RESOLUTION_VIS06_AF = ["1km", "3km"] +LIST_RESOLUTION_AF = ["3km"] EXPECTED_POS_INFO_FOR_FILETYPE = { "fdhsi": {"1km": {"start_position_row": 1, "end_position_row": 200, @@ -184,19 +184,19 @@ } -def resolutions(channel): - """Get the resolutions.""" +def resolutions_AF_products(channel): + """Get the resolutions of the African products.""" if channel == "vis_06": - return LIST_RESOLUTION_V06 + return LIST_RESOLUTION_VIS06_AF else: - return LIST_RESOLUTION + return LIST_RESOLUTION_AF def fill_chans_af(): """Fill the dict CHANS_AF and the list TEST_FILENAMES with the right channel and resolution.""" CHANS_AF = {} for channel in LIST_TOTAL_CHANNEL: - list_resol = resolutions(channel) + list_resol = resolutions_AF_products(channel) for resol in list_resol: chann_upp = channel.replace("_", "").upper() TEST_FILENAMES[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" @@ -208,10 +208,10 @@ def fill_chans_af(): elif channel.split("_")[0] in ["ir", "wv"]: CHANS_AF[f"{channel}_{resol}"] = {"terran": [channel], "terran_grid_type": [resol]} - return CHANS_AF, TEST_FILENAMES + return CHANS_AF -CHANS_AF, TEST_FILENAMES = fill_chans_af() +CHANS_AF = fill_chans_af() # ---------------------------------------------------- @@ -568,8 +568,8 @@ def get_list_channel_calibration(calibration): def generate_parameters(calibration): """Generate dinamicaly the parameters.""" for channel in get_list_channel_calibration(calibration): - for resolution in resolutions(channel): - yield (channel, resolution) + for resolution in resolutions_AF_products(channel): + yield channel, resolution @contextlib.contextmanager From ebb792ada4786e6e20d344f36df4a0ee8700ebf5 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 10:02:02 +0000 Subject: [PATCH 18/71] remove redundant parenthesis --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 54fc26c39f..013a4de52d 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -271,7 +271,7 @@ def _get_test_calib_for_channel_ir(data, meas_path): def _get_test_calib_for_channel_vis(data, meas): data["state/celestial/earth_sun_distance"] = FakeH5Variable( da.repeat(da.array([149597870.7]), 6000), dims=("index")) - data[meas + "/channel_effective_solar_irradiance"] = FakeH5Variable(da.array((50.0), dtype=np.float32)) + data[meas + "/channel_effective_solar_irradiance"] = FakeH5Variable(da.array(50.0, dtype=np.float32)) return data @@ -477,7 +477,7 @@ class FakeFCIFileHandlerFDHSIIQTI(FakeFCIFileHandlerFDHSI): def _get_test_content_all_channels(self): data = super()._get_test_content_all_channels() data.update({"state/celestial/earth_sun_distance": FakeH5Variable( - da.repeat(da.array([np.nan]), 6000), dims=("index"))}) + da.repeat(da.array([np.nan]), 6000), dims="index")}) return data @@ -517,7 +517,7 @@ class FakeFCIFileHandlerHRFIIQTI(FakeFCIFileHandlerHRFI): def _get_test_content_all_channels(self): data = super()._get_test_content_all_channels() data.update({"state/celestial/earth_sun_distance": FakeH5Variable( - da.repeat(da.array([np.nan]), 6000), dims=("x"))}) + da.repeat(da.array([np.nan]), 6000), dims="x")}) return data From 274f96119db3c3506704fa0c85ad8c2f851f8cb6 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 10:18:59 +0000 Subject: [PATCH 19/71] fix dtype passing --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 013a4de52d..8c64b9a31e 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -397,7 +397,7 @@ def _get_test_content_aux_data(): # skip population of earth_sun_distance as this is already defined for reflectance calculation if key == "earth_sun_distance": continue - data[value] = xr.DataArray(da.arange(indices_dim, dtype="float32"), dims=("index")) + data[value] = xr.DataArray(da.arange(indices_dim, dtype=np.dtype("float32")), dims="index") # compute the last data entry to simulate the FCI caching # data[list(AUX_DATA.values())[-1]] = data[list(AUX_DATA.values())[-1]].compute() From f5f9b4255d36abf91630b8934e03b9acc9043cd9 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 10:19:33 +0000 Subject: [PATCH 20/71] fix typos --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 47 +++++++++++---------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 8c64b9a31e..9bc367a310 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -108,7 +108,7 @@ } } -CHANS_FHDSI = {"solar": LIST_CHANNEL_SOLAR, +CHANS_FDHSI = {"solar": LIST_CHANNEL_SOLAR, "solar_grid_type": ["1km"] * 8, "terran": LIST_CHANNEL_TERRAN, "terran_grid_type": ["2km"] * 8} @@ -194,21 +194,22 @@ def resolutions_AF_products(channel): def fill_chans_af(): """Fill the dict CHANS_AF and the list TEST_FILENAMES with the right channel and resolution.""" - CHANS_AF = {} + chans_af = {} for channel in LIST_TOTAL_CHANNEL: list_resol = resolutions_AF_products(channel) for resol in list_resol: chann_upp = channel.replace("_", "").upper() TEST_FILENAMES[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" - f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_20240125144655_DT_OPE" + f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_" + f"20240125144655_DT_OPE" f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] if channel.split("_")[0] in ["vis", "nir"]: - CHANS_AF[f"{channel}_{resol}"] = {"solar": [channel], + chans_af[f"{channel}_{resol}"] = {"solar": [channel], "solar_grid_type": [resol]} elif channel.split("_")[0] in ["ir", "wv"]: - CHANS_AF[f"{channel}_{resol}"] = {"terran": [channel], + chans_af[f"{channel}_{resol}"] = {"terran": [channel], "terran_grid_type": [resol]} - return CHANS_AF + return chans_af CHANS_AF = fill_chans_af() @@ -235,7 +236,7 @@ def _set_meta(self): self.dtype = self._data.dtype def __array__(self): - """Get the array data..""" + """Get the array data.""" return self._data.__array__() def __getitem__(self, key): @@ -270,7 +271,7 @@ def _get_test_calib_for_channel_ir(data, meas_path): def _get_test_calib_for_channel_vis(data, meas): data["state/celestial/earth_sun_distance"] = FakeH5Variable( - da.repeat(da.array([149597870.7]), 6000), dims=("index")) + da.repeat(da.array([149597870.7]), 6000), dims="index") data[meas + "/channel_effective_solar_irradiance"] = FakeH5Variable(da.array(50.0, dtype=np.float32)) return data @@ -403,7 +404,7 @@ def _get_test_content_aux_data(): # data[list(AUX_DATA.values())[-1]] = data[list(AUX_DATA.values())[-1]].compute() data["index"] = xr.DataArray( - da.ones(indices_dim, dtype="uint16") * 100, dims=("index")) + da.ones(indices_dim, dtype="uint16") * 100, dims="index") return data @@ -566,7 +567,7 @@ def get_list_channel_calibration(calibration): def generate_parameters(calibration): - """Generate dinamicaly the parameters.""" + """Generate dynamically the parameters.""" for channel in get_list_channel_calibration(calibration): for resolution in resolutions_AF_products(channel): yield channel, resolution @@ -587,7 +588,7 @@ def FakeFCIFileHandlerFDHSI_fixture(): with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { "filetype": "fci_l1c_fdhsi", - "channels": CHANS_FHDSI, + "channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi"] } yield param_dict @@ -599,7 +600,7 @@ def FakeFCIFileHandlerFDHSIError_fixture(): with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { "filetype": "fci_l1c_fdhsi", - "channels": CHANS_FHDSI, + "channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi_error"] } yield param_dict @@ -611,7 +612,7 @@ def FakeFCIFileHandlerFDHSIIQTI_fixture(): with mocked_basefilehandler(FakeFCIFileHandlerFDHSIIQTI): param_dict = { "filetype": "fci_l1c_fdhsi", - "channels": CHANS_FHDSI, + "channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi_iqti"] } yield param_dict @@ -623,7 +624,7 @@ def FakeFCIFileHandlerFDHSIQ4_fixture(): with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { "filetype": "fci_l1c_fdhsi", - "channels": CHANS_FHDSI, + "channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi_q4"] } yield param_dict @@ -687,15 +688,15 @@ class ModuleTestFCIL1cNcReader: """Class containing parameters and modules useful for the test related to L1c reader.""" fh_param_for_filetype = {"hrfi": {"channels": CHANS_HRFI, "filenames": TEST_FILENAMES["hrfi"]}, - "fdhsi": {"channels": CHANS_FHDSI, + "fdhsi": {"channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi"]}, - "fdhsi_iqti": {"channels": CHANS_FHDSI, + "fdhsi_iqti": {"channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi_iqti"]}, "hrfi_q4": {"channels": CHANS_HRFI, "filenames": TEST_FILENAMES["hrfi_q4"]}, "hrfi_iqti": {"channels": CHANS_HRFI, "filenames": TEST_FILENAMES["hrfi_iqti"]}, - "fdhsi_q4": {"channels": CHANS_FHDSI, + "fdhsi_q4": {"channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi_q4"]}} @staticmethod @@ -708,7 +709,7 @@ def _get_type_ter_AF(channel): @staticmethod def _get_assert_attrs(res, ch, attrs_dict): - """Test the differents attributes values.""" + """Test the different attributes values.""" for key, item in attrs_dict.items(): assert res[ch].attrs[key] == item @@ -747,7 +748,7 @@ def _shape_test(res, ch, grid_type, dict_arg): assert res[ch].dtype == dict_arg["dtype"] def _get_assert_load(self, res, ch, dict_arg, filenames): - """Test the value for differents channels.""" + """Test the value for different channels.""" self._get_assert_attrs(res, ch, dict_arg["attrs_dict"]) if dict_arg["attrs_dict"]["calibration"] in ["radiance", "brightness_temperature", "reflectance"]: self._get_assert_erased_attrs(res, ch) @@ -930,7 +931,7 @@ def test_not_get_segment_info_called_af(self, FakeFCIFileHandlerAF_fixture, read # attempt to load the channel reader.load([channel]) except KeyError: - # if get_segment_position_info is called, the code will fail with a KeyError because of the mocking. + # If get_segment_position_info is called, the code will fail with a KeyError because of the mocking. # So we catch the error here for now, but the test will still fail with the following assert_not_called pass gspi.assert_not_called() @@ -1071,7 +1072,7 @@ def test_platform_name(self, reader_configs, fh_param): "%Y-%m-%d %H:%M:%S"))), ]) def test_count_in_repeat_cycle_rc_period_min(self, reader_configs, fh_param, compare_tuples): - """Test the rc_period_min value for each configurations.""" + """Test the rc_period_min value for each configuration.""" self._compare_rc_period_min_count_in_repeat_cycle(fh_param["filetype"], fh_param, reader_configs, compare_tuples) @@ -1082,7 +1083,7 @@ def test_count_in_repeat_cycle_rc_period_min(self, reader_configs, fh_param, com "%Y-%m-%d %H:%M:%S")))]) def test_count_in_repeat_cycle_rc_period_min_AF(self, FakeFCIFileHandlerAF_fixture, reader_configs, channel, compare_tuples): - """Test the rc_period_min value for each configurations.""" + """Test the rc_period_min value for each configuration.""" fh_param = FakeFCIFileHandlerAF_fixture self._compare_rc_period_min_count_in_repeat_cycle(f"{fh_param['filetype']}_{channel}", fh_param, reader_configs, compare_tuples) @@ -1102,7 +1103,7 @@ def test_compute_earth_sun_parameter(self, reader_configs, fh_param): @pytest.mark.parametrize(("channel", "resolution"), [("vis_06", "3km")]) def test_compute_earth_sun_parameter_AF(self, FakeFCIFileHandlerAF_fixture, reader_configs, channel): - """Test the rc_period_min value for each configurations.""" + """Test the rc_period_min value for each configuration.""" fh_param = FakeFCIFileHandlerAF_fixture self._compare_sun_earth_distance(f"{fh_param['filetype']}_{channel}", fh_param, reader_configs) From 82f44f37af4ce929a49e2b42827c6c2ee04d60a8 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 10:22:24 +0000 Subject: [PATCH 21/71] reformat the reader file --- satpy/readers/fci_l1c_nc.py | 56 ++++++++++++++++++------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 98da127d7d..034f852f19 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -229,7 +229,7 @@ def __init__(self, filename, filename_info, filetype_info): if self.filename_info["facility_or_tool"] == "IQTI": self.is_iqt = True - else : + else: self.is_iqt = False self._cache = {} @@ -239,7 +239,7 @@ def rc_period_min(self): """Get nominal repeat cycle duration.""" if "Q4" in self.filename_info["coverage"]: return 2.5 - elif self.filename_info["coverage"] in ["FD","AF"]: + elif self.filename_info["coverage"] in ["FD", "AF"]: return 10 else: raise NotImplementedError(f"coverage for {self.filename_info['coverage']}" @@ -250,7 +250,7 @@ def nominal_start_time(self): """Get nominal start time.""" rc_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0) return rc_date + dt.timedelta( - minutes=(self.filename_info["repeat_cycle_in_day"]-1)*self.rc_period_min) + minutes=(self.filename_info["repeat_cycle_in_day"] - 1) * self.rc_period_min) @property def nominal_end_time(self): @@ -302,21 +302,21 @@ def get_segment_position_info(self): vis_06_measured_path = self.get_channel_measured_group_path("vis_06") ir_105_measured_path = self.get_channel_measured_group_path("ir_105") segment_position_info = { - HIGH_RES_GRID_INFO[file_type]["grid_type"]: { - "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), - "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), - "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - - self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, - "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] - }, - LOW_RES_GRID_INFO[file_type]["grid_type"]: { - "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), - "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), - "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - - self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, - "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] - } - } + HIGH_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, + "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] + }, + LOW_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, + "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] + } + } return segment_position_info def get_dataset(self, key, info=None): @@ -409,7 +409,7 @@ def _get_dataset_measurand(self, key, info=None): "nominal_end_time": self.nominal_end_time, "observation_start_time": self.observation_start_time, "observation_end_time": self.observation_end_time, - } + } res.attrs.update(self.orbital_param) return res @@ -428,7 +428,7 @@ def get_iqt_parameters_lon_lat_alt(self): " These parameters are taken from the projection's dictionary" f"satellite_actual_longitude = {actual_subsat_lon} ," f"satellite_sat_alt = {actual_sat_alt}") - return actual_subsat_lon,actual_subsat_lat,actual_sat_alt + return actual_subsat_lon, actual_subsat_lat, actual_sat_alt def get_parameters_lon_lat_alt(self): """Compute the orbital parameters. @@ -438,18 +438,17 @@ def get_parameters_lon_lat_alt(self): actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_longitude"))) actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_latitude"))) actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector("platform_altitude"))) - return actual_subsat_lon,actual_subsat_lat,actual_sat_alt - + return actual_subsat_lon, actual_subsat_lat, actual_sat_alt @cached_property def orbital_param(self): """Compute the orbital parameters for the current segment.""" if self.is_iqt: - actual_subsat_lon,actual_subsat_lat,actual_sat_alt = self.get_iqt_parameters_lon_lat_alt() + actual_subsat_lon, actual_subsat_lat, actual_sat_alt = self.get_iqt_parameters_lon_lat_alt() else: - actual_subsat_lon,actual_subsat_lat,actual_sat_alt = self.get_parameters_lon_lat_alt() + actual_subsat_lon, actual_subsat_lat, actual_sat_alt = self.get_parameters_lon_lat_alt() # The "try" is a temporary part of the code as long as the AF data are not modified - try : + try: nominal_and_proj_subsat_lon = float( self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) except ValueError: @@ -674,7 +673,7 @@ def calibrate_counts_to_rad(self, data, key): measured = self.get_channel_measured_group_path(key["name"]) data.attrs.update({"radiance_unit_conversion_coefficient": - self.get_and_cache_npxr(measured + "/radiance_unit_conversion_coefficient")}) + self.get_and_cache_npxr(measured + "/radiance_unit_conversion_coefficient")}) return data def calibrate_rad_to_bt(self, radiance, key): @@ -725,10 +724,10 @@ def calibrate_rad_to_refl(self, radiance, key): return res @cached_property - def _compute_sun_earth_distance(self) -> float : + def _compute_sun_earth_distance(self) -> float: """Compute the sun_earth_distance.""" if self.is_iqt: - middle_time_diff = (self.observation_end_time-self.observation_start_time)/2 + middle_time_diff = (self.observation_end_time - self.observation_start_time) / 2 utc_date = self.observation_start_time + middle_time_diff sun_earth_distance = sun_earth_distance_correction(utc_date) logger.info(f"The value sun_earth_distance is set to {sun_earth_distance} AU.") @@ -737,6 +736,7 @@ def _compute_sun_earth_distance(self) -> float : self._get_aux_data_lut_vector("earth_sun_distance")) / 149597870.7 # [AU] return sun_earth_distance + def _ensure_dataarray(arr): if not isinstance(arr, xr.DataArray): attrs = dict(arr.attrs).copy() From b2860741fb63c6c4ad97fd3b9d6e45e22419d129 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 10:28:44 +0000 Subject: [PATCH 22/71] update docstrings --- satpy/readers/fci_l1c_nc.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 034f852f19..3fe326bf54 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -20,9 +20,9 @@ This module defines the :class:`FCIL1cNCFileHandler` file handler, to be used for reading Meteosat Third Generation (MTG) Flexible Combined -Imager (FCI) Level-1c data. FCI will fly +Imager (FCI) Level-1c data. FCI flies on the MTG Imager (MTG-I) series of satellites, with the first satellite (MTG-I1) -scheduled to be launched on the 13th of December 2022. +launched on the 13th of December 2022. For more information about FCI, see `EUMETSAT`_. For simulated test data to be used with this reader, see `test data releases`_. @@ -110,7 +110,7 @@ .. _AF PUG: https://www-cdn.eumetsat.int/files/2022-07/MTG%20EUMETCast%20Africa%20Product%20User%20Guide%20%5BAfricaPUG%5D_v2E.pdf .. _PUG: https://www-cdn.eumetsat.int/files/2020-07/pdf_mtg_fci_l1_pug.pdf -.. _EUMETSAT: https://www.eumetsat.int/mtg-flexible-combined-imager # noqa: E501 +.. _EUMETSAT: https://user.eumetsat.int/resources/user-guides/mtg-fci-level-1c-data-guide # noqa: E501 .. _test data releases: https://www.eumetsat.int/mtg-test-data """ @@ -219,7 +219,8 @@ def __init__(self, filename, filename_info, filetype_info): logger.debug("End: {}".format(self.end_time)) if self.filename_info["coverage"] == "Q4": - # change number of chunk so that padding gets activated correctly on missing chunks + # change the chunk number so that padding gets activated correctly for Q4, which corresponds to the upper + # quarter of the disc self.filename_info["count_in_repeat_cycle"] += 28 if self.filename_info["coverage"] == "AF": From 5dcc413edf8341f87b02627171afb51a018855fb Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 1 Aug 2024 10:32:40 +0000 Subject: [PATCH 23/71] update more docstrings --- satpy/readers/fci_l1c_nc.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 3fe326bf54..fc40916699 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -418,23 +418,23 @@ def _get_dataset_measurand(self, key, info=None): def get_iqt_parameters_lon_lat_alt(self): """Compute the orbital parameters for IQT data. - Compute satellite_actual_longitude,satellite_actual_latitude,satellite_actual_altitude.add_constant. + Compute satellite_actual_longitude, satellite_actual_latitude, satellite_actual_altitude. """ actual_subsat_lon = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/" "longitude_of_projection_origin")) actual_subsat_lat = 0.0 actual_sat_alt = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) - logger.info("IQT data the following parameter is hardcoded " - f" satellite_actual_latitude = {actual_subsat_lat} ," - " These parameters are taken from the projection's dictionary" - f"satellite_actual_longitude = {actual_subsat_lon} ," - f"satellite_sat_alt = {actual_sat_alt}") + logger.info("For IQT data, the following parameter is hardcoded:" + f" satellite_actual_latitude = {actual_subsat_lat}. " + "The following parameters are taken from the projection dictionary: " + f"satellite_actual_longitude = {actual_subsat_lon}, " + f"satellite_actual_altitude = {actual_sat_alt}") return actual_subsat_lon, actual_subsat_lat, actual_sat_alt def get_parameters_lon_lat_alt(self): """Compute the orbital parameters. - Compute satellite_actual_longitude,satellite_actual_latitude,satellite_actual_altitude. + Compute satellite_actual_longitude, satellite_actual_latitude, satellite_actual_altitude. """ actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_longitude"))) actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_latitude"))) @@ -448,7 +448,7 @@ def orbital_param(self): actual_subsat_lon, actual_subsat_lat, actual_sat_alt = self.get_iqt_parameters_lon_lat_alt() else: actual_subsat_lon, actual_subsat_lat, actual_sat_alt = self.get_parameters_lon_lat_alt() - # The "try" is a temporary part of the code as long as the AF data are not modified + # The "try" is a temporary part of the code as long as the AF data are not fixed try: nominal_and_proj_subsat_lon = float( self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) From 618fc0e498c7f1e1bfce011f1ed1b041895ed8ce Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 2 Aug 2024 16:54:14 +0200 Subject: [PATCH 24/71] update test to check that summation over disk works --- satpy/tests/reader_tests/test_li_l2_nc.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index cf4858c7ed..36cc930683 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -792,10 +792,12 @@ def test_with_area_def_pixel_placement(self, filetype_infos): # prepare reference array data = handler_without_area_def.get_dataset(dsid).values ref_arr = np.empty(LI_GRID_SHAPE, dtype=arr.dtype) - ref_arr[:] = np.nan + ref_arr[:] = 0 rows = (LI_GRID_SHAPE[0] - yarr) cols = xarr - 1 - ref_arr[rows, cols] = data + for n_entry in range(len(data)): + ref_arr[rows[n_entry], cols[n_entry]] += data[n_entry] + ref_arr = np.where(ref_arr > 0, ref_arr, np.nan) # Check all nan values are at the same locations: assert np.all(np.isnan(arr) == np.isnan(ref_arr)) From ad82b7b9e6a542c738f1b82d48dd99f2e9d11347 Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 2 Aug 2024 17:09:22 +0200 Subject: [PATCH 25/71] update reader to perform np.add.at --- satpy/readers/li_l2_nc.py | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index c172e99a0f..488fbb07cc 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -130,21 +130,35 @@ def get_array_on_fci_grid(self, data_array: xr.DataArray): cols = self.get_measured_variable("x") attrs = data_array.attrs - rows, cols = da.compute(rows, cols) - # origin is in the south-west corner, so we flip the rows (applying # offset of 1 implicitly) # And we manually offset the columns by 1 too: rows = (LI_GRID_SHAPE[0] - rows.astype(int)) cols = cols.astype(int) - 1 - # Create an empyt 1-D array for the results - flattened_result = np.nan * da.zeros((LI_GRID_SHAPE[0] * LI_GRID_SHAPE[1]), dtype=data_array.dtype) - # Insert the data. Dask doesn't support this for more than one dimension at a time, so ... - flattened_result[rows * LI_GRID_SHAPE[0] + cols] = data_array + # Create an empty 1-D array for the results + li_grid_flat_size = LI_GRID_SHAPE[0] * LI_GRID_SHAPE[1] + flattened_result = da.zeros((LI_GRID_SHAPE[0] * LI_GRID_SHAPE[1]), dtype=data_array.dtype, + chunks=(li_grid_flat_size,)) + + # Insert the data. If a pixel has more than one entry, the values are added up (np.add.at functionality) + indices = xr.DataArray(da.asarray(rows * LI_GRID_SHAPE[0] + cols)) + flattened_result = da.map_blocks(_np_add_at_wrapper, flattened_result, indices, data_array, + dtype=data_array.dtype, + chunks=(li_grid_flat_size,)) + flattened_result = da.where(flattened_result > 0, flattened_result, np.nan) + # ... reshape to final 2D grid data_2d = da.reshape(flattened_result, LI_GRID_SHAPE) xarr = xr.DataArray(da.asarray(data_2d, CHUNK_SIZE), dims=("y", "x")) xarr.attrs = attrs return xarr + + +def _np_add_at_wrapper(target_array, indices, data): + # copy needed for correct computation in-place inside the da.map_blocks + ta = target_array.copy() + # add.at is not implemented in xarray, so we explicitly need the np.array + np.add.at(ta, indices.values, data.values) + return ta From 3c22fb74dc7538e5b8ee6ea8c1514af3fcb76b75 Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 2 Aug 2024 17:32:00 +0200 Subject: [PATCH 26/71] switch to direct 2-d computation --- satpy/readers/li_l2_nc.py | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 488fbb07cc..01127a3e05 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -130,26 +130,24 @@ def get_array_on_fci_grid(self, data_array: xr.DataArray): cols = self.get_measured_variable("x") attrs = data_array.attrs + rows, cols = da.compute(rows, cols) + # origin is in the south-west corner, so we flip the rows (applying # offset of 1 implicitly) # And we manually offset the columns by 1 too: rows = (LI_GRID_SHAPE[0] - rows.astype(int)) cols = cols.astype(int) - 1 - # Create an empty 1-D array for the results - li_grid_flat_size = LI_GRID_SHAPE[0] * LI_GRID_SHAPE[1] - flattened_result = da.zeros((LI_GRID_SHAPE[0] * LI_GRID_SHAPE[1]), dtype=data_array.dtype, - chunks=(li_grid_flat_size,)) + # initialise results array with zeros + data_2d = da.zeros((LI_GRID_SHAPE[0], LI_GRID_SHAPE[1]), dtype=data_array.dtype, + chunks=(LI_GRID_SHAPE[0], LI_GRID_SHAPE[1])) - # Insert the data. If a pixel has more than one entry, the values are added up (np.add.at functionality) - indices = xr.DataArray(da.asarray(rows * LI_GRID_SHAPE[0] + cols)) - flattened_result = da.map_blocks(_np_add_at_wrapper, flattened_result, indices, data_array, - dtype=data_array.dtype, - chunks=(li_grid_flat_size,)) - flattened_result = da.where(flattened_result > 0, flattened_result, np.nan) + # insert the data. If a pixel has more than one entry, the values are added up (np.add.at functionality) + data_2d = da.map_blocks(_np_add_at_wrapper, data_2d, (rows, cols), data_array, + dtype=data_array.dtype, + chunks=(LI_GRID_SHAPE[0], LI_GRID_SHAPE[1])) + data_2d = da.where(data_2d > 0, data_2d, np.nan) - # ... reshape to final 2D grid - data_2d = da.reshape(flattened_result, LI_GRID_SHAPE) xarr = xr.DataArray(da.asarray(data_2d, CHUNK_SIZE), dims=("y", "x")) xarr.attrs = attrs @@ -157,8 +155,8 @@ def get_array_on_fci_grid(self, data_array: xr.DataArray): def _np_add_at_wrapper(target_array, indices, data): - # copy needed for correct computation in-place inside the da.map_blocks + # copy needed for correct computation in-place ta = target_array.copy() # add.at is not implemented in xarray, so we explicitly need the np.array - np.add.at(ta, indices.values, data.values) + np.add.at(ta, indices, data.values) return ta From cad4e60c439deca60632071c732dc3aa1df77c58 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Tue, 6 Aug 2024 17:04:21 +0200 Subject: [PATCH 27/71] Added test for NWCSAF GEO type promotion Added a test to make sure that cloud type (or others) are not accidentally transformed to an inflated dtype. This currently fails with numpy 2.0; see https://github.com/pytroll/satpy/issues/2872 --- satpy/tests/reader_tests/test_nwcsaf_nc.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 6a509f023f..922c47ed9a 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -103,7 +103,7 @@ def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs_geo): nc_file.attrs.update(attrs) var_name = "ct" - var = nc_file.create_variable(var_name, ("ny", "nx"), np.uint16, + var = nc_file.create_variable(var_name, ("ny", "nx"), np.uint8, chunks=(256, 256)) var[:] = RANDOM_GEN.integers(0, 255, size=(928, 1530), dtype=np.uint8) @@ -353,6 +353,13 @@ def test_end_time(self, nwcsaf_geo_ct_filehandler): """Test the end time property.""" assert nwcsaf_geo_ct_filehandler.end_time == read_nwcsaf_time(END_TIME) + def test_uint8_remains_uint8(self, nwcsaf_geo_ct_filehandler): + """Test that loading uint8 remains uint8.""" + ct = nwcsaf_geo_ct_filehandler.get_dataset( + {"name": "ct"}, + {"name": "ct", "file_type": "nc_nwcsaf_geo"}) + assert ct.dtype == np.dtype("uint8") + class TestNcNWCSAFPPS: """Test the NcNWCSAF reader for PPS products.""" From 7a294639a395d1f2c3ad8019730a642a8914c21c Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Tue, 6 Aug 2024 17:15:31 +0200 Subject: [PATCH 28/71] Fix dtype for nwcsaf geo Make sure that integer dtypes when reading NWCSAF GEO are not accidentally cast to int64. --- satpy/readers/nwcsaf_nc.py | 4 ++-- satpy/tests/reader_tests/test_nwcsaf_nc.py | 8 ++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index 64a284200d..ef68e830fa 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -205,8 +205,8 @@ def scale_dataset(self, variable, info): """ variable = remove_empties(variable) - scale = variable.attrs.get("scale_factor", np.array(1)) - offset = variable.attrs.get("add_offset", np.array(0)) + scale = variable.attrs.get("scale_factor", np.array(1, dtype=variable.dtype)) + offset = variable.attrs.get("add_offset", np.array(0, dtype=variable.dtype)) if "_FillValue" in variable.attrs: variable.attrs["scaled_FillValue"] = variable.attrs["_FillValue"] * scale + offset if np.issubdtype((scale + offset).dtype, np.floating) or np.issubdtype(variable.dtype, np.floating): diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 922c47ed9a..d4f6dffedc 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -326,6 +326,14 @@ def test_scale_dataset_floating_nwcsaf_geo_ctth(self, nwcsaf_geo_ct_filehandler) assert "add_offset" not in var.attrs np.testing.assert_equal(var.attrs["valid_range"], (-2000., 25000.)) + def test_scale_dataset_uint8_noop(self, nwcsaf_geo_ct_filehandler): + """Test that uint8 is not accidentally casted when no scaling is done.""" + attrs = {} + var = xr.DataArray(np.array([1, 2, 3], dtype=np.uint8), attrs=attrs) + var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") + np.testing.assert_equal(var, np.array([1, 2, 3], dtype=np.uint8)) + assert var.dtype == np.uint8 + def test_orbital_parameters_are_correct(self, nwcsaf_geo_ct_filehandler): """Test that orbital parameters are present in the dataset attributes.""" dsid = {"name": "ct"} From b25107d73a386d61a87e88909ba28a0dfb77e0ff Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 6 Aug 2024 10:39:22 -0500 Subject: [PATCH 29/71] Fix AWIPS tiled writer handling of odd units in VIIRS EDR products --- satpy/writers/awips_tiled.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index e5cd82ce7b..f697f8902d 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -246,6 +246,7 @@ "percent": "%", "Kelvin": "kelvin", "K": "kelvin", + "Meter": "meters", } TileInfo = namedtuple("TileInfo", ["tile_count", "image_shape", "tile_shape", From 2911b649c8604ad4e62232dfdb48e668f83888ec Mon Sep 17 00:00:00 2001 From: andream Date: Wed, 7 Aug 2024 18:18:08 +0200 Subject: [PATCH 30/71] change to li only in composite yaml to avoid dependency issues --- satpy/etc/composites/li.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/li.yaml b/satpy/etc/composites/li.yaml index 71b832375a..f7dce95e52 100644 --- a/satpy/etc/composites/li.yaml +++ b/satpy/etc/composites/li.yaml @@ -1,5 +1,5 @@ --- -sensor_name: visir/li +sensor_name: li # these are tentative recipes that will need to be further tuned as we gain experience with LI data composites: acc_flash: From a551b3568f6e655d07c76a8aa40d8bdea9899eef Mon Sep 17 00:00:00 2001 From: andream Date: Wed, 7 Aug 2024 18:24:09 +0200 Subject: [PATCH 31/71] switch to suppress instead of try/except --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 9bc367a310..f7add161a5 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -927,13 +927,12 @@ def test_not_get_segment_info_called_af(self, FakeFCIFileHandlerAF_fixture, read with mock.patch("satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler.get_segment_position_info") as gspi: fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - try: + with contextlib.suppress(KeyError): # attempt to load the channel - reader.load([channel]) - except KeyError: # If get_segment_position_info is called, the code will fail with a KeyError because of the mocking. - # So we catch the error here for now, but the test will still fail with the following assert_not_called - pass + # However, the point of the test is to check if the function has been called, not if the function + # would work with this case, so the expected KeyError is suppressed, and we assert_not_called below. + reader.load([channel]) gspi.assert_not_called() @pytest.mark.parametrize("calibration", ["index_map", "pixel_quality"]) From 04266c33e7b524a35c985ad255fce2f076faa44f Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 8 Aug 2024 11:49:56 +0200 Subject: [PATCH 32/71] fix WV filenaming that uses IR instead --- satpy/etc/readers/fci_l1c_nc.yaml | 8 ++++---- satpy/tests/reader_tests/test_fci_l1c_nc.py | 7 +++++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index d750e18622..5f5e1d0880 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -176,8 +176,8 @@ file_types: fci_l1c_af_wv_63: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -186,8 +186,8 @@ file_types: fci_l1c_af_wv_73: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index f7add161a5..b9c131ce02 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -197,10 +197,13 @@ def fill_chans_af(): chans_af = {} for channel in LIST_TOTAL_CHANNEL: list_resol = resolutions_AF_products(channel) + if channel in ["wv_63", "wv_73"]: + ch_name_for_file = channel.replace("wv", "ir").replace("_", "").upper() + else: + ch_name_for_file = channel.replace("_", "").upper() for resol in list_resol: - chann_upp = channel.replace("_", "").upper() TEST_FILENAMES[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" - f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_" + f"-{resol.upper()}-AF-{ch_name_for_file}-x-x---NC4E_C_EUMT_" f"20240125144655_DT_OPE" f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] if channel.split("_")[0] in ["vis", "nir"]: From 6860030bf0140bb16f5ac30c5a01a95cf30d7b07 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 8 Aug 2024 11:56:54 +0200 Subject: [PATCH 33/71] split AF vis_06 into two filetype for each resolution, so that both files can be read at the same time and the highest resolution is picked, as with with the normal data --- satpy/etc/readers/fci_l1c_nc.yaml | 63 ++++++++++++--------- satpy/tests/reader_tests/test_fci_l1c_nc.py | 18 ++++-- 2 files changed, 47 insertions(+), 34 deletions(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index 5f5e1d0880..fb461a517a 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -80,14 +80,21 @@ file_types: - ir_105_hr # Note: In The current file the 'MTI1-FCI-1C' which is a part of the file will be replaced by MTI1+FCI-1C, patterns have been added # to maanage this - fci_l1c_af_vis_06: + fci_l1c_af_vis_06_3km: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 + required_netcdf_variables: *required-variables + variable_name_replacements: + channel_name: + - vis_06 + fci_l1c_af_vis_06_1km: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -287,8 +294,8 @@ datasets: wavelength: [0.590, 0.640, 0.690] resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } calibration: counts: standard_name: counts @@ -556,8 +563,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_pixel_quality: name: vis_08_pixel_quality @@ -672,8 +679,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_index_map: name: vis_08_index_map @@ -791,8 +798,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_time: name: vis_08_time @@ -920,8 +927,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_swath_direction: name: vis_08_swath_direction @@ -1036,8 +1043,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_swath_number: name: vis_08_swath_number @@ -1155,8 +1162,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_subsatellite_latitude: name: vis_08_subsatellite_latitude @@ -1287,8 +1294,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_subsatellite_longitude: name: vis_08_subsatellite_longitude @@ -1419,8 +1426,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_subsolar_latitude: name: vis_08_subsolar_latitude @@ -1551,8 +1558,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_subsolar_longitude: name: vis_08_subsolar_longitude @@ -1683,8 +1690,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_platform_altitude: name: vis_08_platform_altitude @@ -1815,8 +1822,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_earth_sun_distance: name: vis_08_earth_sun_distance @@ -1947,8 +1954,8 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } - 3000: { file_type: fci_l1c_af_vis_06 } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } + 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_sun_satellite_distance: name: vis_08_sun_satellite_distance diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index b9c131ce02..f7037752f3 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -1082,12 +1082,17 @@ def test_count_in_repeat_cycle_rc_period_min(self, reader_configs, fh_param, com [("vis_06", "3km", (1, 10, datetime.datetime.strptime("2024-01-09 08:00:00", "%Y-%m-%d %H:%M:%S"), datetime.datetime.strptime("2024-01-09 08:10:00", - "%Y-%m-%d %H:%M:%S")))]) + "%Y-%m-%d %H:%M:%S"))), + ("vis_06", "1km", (1, 10, + datetime.datetime.strptime("2024-01-09 08:00:00", "%Y-%m-%d %H:%M:%S"), + datetime.datetime.strptime("2024-01-09 08:10:00", + "%Y-%m-%d %H:%M:%S"))) + ]) def test_count_in_repeat_cycle_rc_period_min_AF(self, FakeFCIFileHandlerAF_fixture, reader_configs, - channel, compare_tuples): + channel, resolution, compare_tuples): """Test the rc_period_min value for each configuration.""" fh_param = FakeFCIFileHandlerAF_fixture - self._compare_rc_period_min_count_in_repeat_cycle(f"{fh_param['filetype']}_{channel}", fh_param, + self._compare_rc_period_min_count_in_repeat_cycle(f"{fh_param['filetype']}_{channel}_{resolution}", fh_param, reader_configs, compare_tuples) @pytest.mark.parametrize(("fh_param"), @@ -1102,12 +1107,13 @@ def test_compute_earth_sun_parameter(self, reader_configs, fh_param): """Test the computation of the sun_earth_parameter.""" self._compare_sun_earth_distance(fh_param["filetype"], fh_param, reader_configs) - @pytest.mark.parametrize(("channel", "resolution"), [("vis_06", "3km")]) + @pytest.mark.parametrize(("channel", "resolution"), [("vis_06", "3km"), + ("vis_06", "1km")]) def test_compute_earth_sun_parameter_AF(self, FakeFCIFileHandlerAF_fixture, reader_configs, - channel): + channel, resolution): """Test the rc_period_min value for each configuration.""" fh_param = FakeFCIFileHandlerAF_fixture - self._compare_sun_earth_distance(f"{fh_param['filetype']}_{channel}", fh_param, reader_configs) + self._compare_sun_earth_distance(f"{fh_param['filetype']}_{channel}_{resolution}", fh_param, reader_configs) @pytest.mark.parametrize(("fh_param"), [(lazy_fixture("FakeFCIFileHandlerFDHSIError_fixture"))]) def test_rc_period_min_error(self, reader_configs, fh_param): From bea93d1d735485e008d9082f946f57dde10a0fd8 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 8 Aug 2024 19:30:12 +0200 Subject: [PATCH 34/71] update docstring with note --- satpy/readers/li_l2_nc.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 01127a3e05..587039fa46 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -46,6 +46,18 @@ grid as per intended usage, with a ``pyresample.geometry.AreaDefinition`` area attribute containing the grid geolocation information. In this way, the products can directly be overlaid to FCI data. + +.. note:: + + L2 accumulated products retrieved from the archive + (that have "ARC" in the filename) contain data for 20 repeat cycles (timesteps) covering + 10 minutes of sensing time. For these files, when loading the main variables + (``accumulated_flash_area``, ``flash_accumulation``, ``flash_radiance``), + the reader will cumulate (sum up) the data for the entire sensing period of the file. + A solution to access easily each timestep is being worked on. See https://github.com/pytroll/satpy/issues/2878 + for possible workarounds in the meanwhile. + + If needed, the accumulated products can also be accessed as 1-d array by setting the reader kwarg ``with_area_definition=False``, e.g.:: From 1b22d9cdbd0eb2e2bfa14f8df468b8d5d0abfaa8 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 8 Aug 2024 19:43:30 +0200 Subject: [PATCH 35/71] add note in yaml file --- satpy/etc/composites/li.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/etc/composites/li.yaml b/satpy/etc/composites/li.yaml index f7dce95e52..4d3cc88e95 100644 --- a/satpy/etc/composites/li.yaml +++ b/satpy/etc/composites/li.yaml @@ -1,5 +1,8 @@ --- +# we use li only here, and not visir/li, since the second can cause dependency issues when creating composites +# combined with imagers in a multi-reader Scene. visir composites do not apply to LI anyway. sensor_name: li + # these are tentative recipes that will need to be further tuned as we gain experience with LI data composites: acc_flash: From 5401d28011fb22543daea6928f376c13eb957c28 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Fri, 26 Jul 2024 13:39:28 +0200 Subject: [PATCH 36/71] Add `readers.utils.fromfile()` for remote reading This function uses `readers.utils.generic_open()` and `np.frombuffer()` to achieve this. --- satpy/readers/utils.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index 170cc5abcc..378f16de0b 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -361,6 +361,27 @@ def generic_open(filename, *args, **kwargs): fp.close() +def fromfile(filename, dtype, count=1, offset=0): + """Reads the numpy array from a (remote or local) file using a buffer. + + Note: + This function relies on the :func:`generic_open` context manager to read a file remotely. + + Args: + filename: Either the name of the file to read or a :class:`satpy.readers.FSFile` object. + dtype: The data type of the numpy array + count (Optional, default ``1``): Number of items to read + offset (Optional, default ``0``): Starting point for reading the buffer from + + Returns: + The content of the filename as a numpy array with the given data type. + """ + with generic_open(filename, mode="rb") as istream: + istream.seek(offset) + content = np.frombuffer(istream.read(dtype.itemsize * count), dtype=dtype, count=count) + return content + + def bbox(img): """Find the bounding box around nonzero elements in the given array. From 7142a682a1d75f27fdc6cb79cfc4bb80382bc44a Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Fri, 26 Jul 2024 13:51:02 +0200 Subject: [PATCH 37/71] Update `AUTHORS.md` --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index 461ce3ca1e..e764b96627 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -43,6 +43,7 @@ The following people have made contributions to this project: - [Jactry Zeng](https://github.com/jactry) - [Johannes Johansson (JohannesSMHI)](https://github.com/JohannesSMHI) - [Sauli Joro (sjoro)](https://github.com/sjoro) +- [Pouria Khalaj](https://github.com/pkhalaj) - [Janne Kotro (jkotro)](https://github.com/jkotro) - [Ralph Kuehn (ralphk11)](https://github.com/ralphk11) - [Panu Lahtinen (pnuu)](https://github.com/pnuu) From 8051d50703c366989a99618fd350d600e502ba40 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Fri, 26 Jul 2024 13:54:25 +0200 Subject: [PATCH 38/71] Reformat `readers.utils` to make it PEP8 compliant --- satpy/readers/utils.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index 378f16de0b..e45e78aef1 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -98,8 +98,8 @@ def get_geostationary_angle_extent(geos_area): h = float(h) / 1000 + req # compute some constants - aeq = 1 - req**2 / (h ** 2) - ap_ = 1 - rp**2 / (h ** 2) + aeq = 1 - req ** 2 / (h ** 2) + ap_ = 1 - rp ** 2 / (h ** 2) # generate points around the north hemisphere in satellite projection # make it a bit smaller so that we stay inside the valid area @@ -142,15 +142,15 @@ def _lonlat_from_geos_angle(x, y, geos_area): b__ = (a / float(b)) ** 2 sd = np.sqrt((h__ * np.cos(x) * np.cos(y)) ** 2 - - (np.cos(y)**2 + b__ * np.sin(y)**2) * - (h__**2 - (float(a) / 1000)**2)) + (np.cos(y) ** 2 + b__ * np.sin(y) ** 2) * + (h__ ** 2 - (float(a) / 1000) ** 2)) # sd = 0 - sn = (h__ * np.cos(x) * np.cos(y) - sd) / (np.cos(y)**2 + b__ * np.sin(y)**2) + sn = (h__ * np.cos(x) * np.cos(y) - sd) / (np.cos(y) ** 2 + b__ * np.sin(y) ** 2) s1 = h__ - sn * np.cos(x) * np.cos(y) s2 = sn * np.sin(x) * np.cos(y) s3 = -sn * np.sin(y) - sxy = np.sqrt(s1**2 + s2**2) + sxy = np.sqrt(s1 ** 2 + s2 ** 2) lons = np.rad2deg(np.arctan2(s2, s1)) + lon_0 lats = np.rad2deg(-np.arctan2(b__ * s3, sxy)) @@ -256,7 +256,7 @@ def _unzip_with_pbzip(filename, tmpfilepath, fdn): if n_thr: runner = [pbzip, "-dc", - "-p"+str(n_thr), + "-p" + str(n_thr), filename] else: runner = [pbzip, @@ -416,7 +416,7 @@ def get_earth_radius(lon, lat, a, b): latlong = pyproj.CRS.from_dict({"proj": "latlong", "a": a, "b": b, "units": "m"}) transformer = pyproj.Transformer.from_crs(latlong, geocent) x, y, z = transformer.transform(lon, lat, 0.0) - return np.sqrt(x**2 + y**2 + z**2) + return np.sqrt(x ** 2 + y ** 2 + z ** 2) def reduce_mda(mda, max_size=100): From 16f26425f8a7d4460d9689f51b13264b46b31f8e Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Fri, 26 Jul 2024 14:14:39 +0200 Subject: [PATCH 39/71] Adapt `readers.seviri_l1b_native` for remote reading In particular, the following functions/methods have been modified: - `has_archive_header()` now uses `readers.utils.generic_open()` instead of `open()`. - `read_header()` now uses `readers.utils.fromfile()` instead of `np.formfile()`. - `NativeMSGFileHandler._read_trailer()` now uses `readers.utils.fromfile()` instead of `np.formfile()`. - `NativeMSGFileHandler._get_memmap()` has been renamed to `NativeMSGFileHandler._get_array()` and now uses `readers.utils.fromfile()` instead of `np.memmap()`. --- satpy/readers/seviri_l1b_native.py | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 976cb7c338..d5f3ba2692 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -138,7 +138,7 @@ get_native_header, native_trailer, ) -from satpy.readers.utils import reduce_mda +from satpy.readers.utils import fromfile, generic_open, reduce_mda from satpy.utils import get_legacy_chunk_size logger = logging.getLogger("native_msg") @@ -193,7 +193,7 @@ def __init__(self, filename, filename_info, filetype_info, # Available channels are known only after the header has been read self.header_type = get_native_header(has_archive_header(self.filename)) self._read_header() - self.dask_array = da.from_array(self._get_memmap(), chunks=(CHUNK_SIZE,)) + self.dask_array = da.from_array(self._get_array(), chunks=(CHUNK_SIZE,)) self._read_trailer() self.image_boundaries = ImageBoundaries(self.header, self.trailer, self.mda) @@ -276,15 +276,11 @@ def get_lrec(cols): return np.dtype(drec) - def _get_memmap(self): - """Get the memory map for the SEVIRI data.""" - with open(self.filename) as fp: - data_dtype = self._get_data_dtype() - hdr_size = self.header_type.itemsize - - return np.memmap(fp, dtype=data_dtype, - shape=(self.mda["number_of_lines"],), - offset=hdr_size, mode="r") + def _get_array(self): + """Get the numpy array for the SEVIRI data.""" + data_dtype = self._get_data_dtype() + hdr_size = self.header_type.itemsize + return fromfile(self.filename, dtype=data_dtype, offset=hdr_size, count=self.mda["number_of_lines"]) def _read_header(self): """Read the header info.""" @@ -387,9 +383,7 @@ def _read_trailer(self): data_size = (self._get_data_dtype().itemsize * self.mda["number_of_lines"]) - with open(self.filename) as fp: - fp.seek(hdr_size + data_size) - data = np.fromfile(fp, dtype=native_trailer, count=1) + data = fromfile(self.filename, dtype=native_trailer, count=1, offset=hdr_size + data_size) self.trailer.update(recarray2dict(data)) @@ -888,12 +882,12 @@ def get_available_channels(header): def has_archive_header(filename): """Check whether the file includes an ASCII archive header.""" - with open(filename, mode="rb") as istream: + with generic_open(filename, mode="rb") as istream: return istream.read(36) == ASCII_STARTSWITH def read_header(filename): """Read SEVIRI L1.5 native header.""" dtype = get_native_header(has_archive_header(filename)) - hdr = np.fromfile(filename, dtype=dtype, count=1) + hdr = fromfile(filename, dtype=dtype, count=1) return recarray2dict(hdr) From 1f106903afae1a86aac3ac8e5e66636d752dc28f Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Fri, 26 Jul 2024 14:24:24 +0200 Subject: [PATCH 40/71] Update `test_seviri_l1b_native` with `_get_array` Reason: since `NativeMSGFileHandler._get_memmap()` has been renamed to `NativeMSGFileHandler._get_array()`. --- .../tests/reader_tests/test_seviri_l1b_native.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 8f4e46e2fb..c8501cb6a7 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -634,7 +634,7 @@ def prepare_area_definitions(test_dict): with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ - mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( "satpy.readers.seviri_l1b_native.has_archive_header" @@ -642,7 +642,7 @@ def prepare_area_definitions(test_dict): has_archive_header.return_value = True fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) - _get_memmap.return_value = np.arange(3) + _get_array.return_value = np.arange(3) fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.fill_disk = fill_disk fh.header = header @@ -718,7 +718,7 @@ def prepare_is_roi(test_dict): with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ - mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( "satpy.readers.seviri_l1b_native.has_archive_header" @@ -726,7 +726,7 @@ def prepare_is_roi(test_dict): has_archive_header.return_value = True fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) - _get_memmap.return_value = np.arange(3) + _get_array.return_value = np.arange(3) fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header fh.trailer = trailer @@ -1168,12 +1168,12 @@ def test_header_type(file_content, exp_header_size): header.pop("15_SECONDARY_PRODUCT_HEADER") with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ - mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("builtins.open", mock.mock_open(read_data=file_content)): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) - _get_memmap.return_value = np.arange(3) + _get_array.return_value = np.arange(3) fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) assert fh.header_type.itemsize == exp_header_size assert "15_SECONDARY_PRODUCT_HEADER" in fh.header @@ -1198,11 +1198,11 @@ def test_header_warning(): with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ - mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("builtins.open", mock.mock_open(read_data=ASCII_STARTSWITH)): recarray2dict.side_effect = (lambda x: x) - _get_memmap.return_value = np.arange(3) + _get_array.return_value = np.arange(3) exp_warning = "The quality flag for this file indicates not OK. Use this data with caution!" From 7d516d82a94f0a6f74902b83ab9956405240686d Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Fri, 26 Jul 2024 14:49:00 +0200 Subject: [PATCH 41/71] Update mock.patch args in `test_seviri_l1b_native` to match changes in `seviri_l1b_native` --- .../tests/reader_tests/test_seviri_l1b_native.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index c8501cb6a7..38a4e16e77 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -632,7 +632,7 @@ def prepare_area_definitions(test_dict): trailer = create_test_trailer(is_rapid_scan) expected_area_def = test_dict["expected_area_def"] - with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ @@ -716,7 +716,7 @@ def prepare_is_roi(test_dict): trailer = create_test_trailer(is_rapid_scan) expected = test_dict["is_roi"] - with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ @@ -1166,11 +1166,11 @@ def test_header_type(file_content, exp_header_size): ) if file_content == b"foobar": header.pop("15_SECONDARY_PRODUCT_HEADER") - with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ - mock.patch("builtins.open", mock.mock_open(read_data=file_content)): + mock.patch("satpy.readers.seviri_l1b_native.generic_open", mock.mock_open(read_data=file_content)): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_array.return_value = np.arange(3) @@ -1196,11 +1196,11 @@ def test_header_warning(): good_qual="NOK" ) - with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ - mock.patch("builtins.open", mock.mock_open(read_data=ASCII_STARTSWITH)): + mock.patch("satpy.readers.seviri_l1b_native.generic_open", mock.mock_open(read_data=ASCII_STARTSWITH)): recarray2dict.side_effect = (lambda x: x) _get_array.return_value = np.arange(3) @@ -1233,7 +1233,7 @@ def test_header_warning(): ) def test_has_archive_header(starts_with, expected): """Test if the file includes an ASCII archive header.""" - with mock.patch("builtins.open", mock.mock_open(read_data=starts_with)): + with mock.patch("satpy.readers.seviri_l1b_native.generic_open", mock.mock_open(read_data=starts_with)): actual = has_archive_header("filename") assert actual == expected @@ -1248,7 +1248,7 @@ def test_read_header(): dtypes = np.dtype([(k, t) for k, t in zip(keys, types)]) hdr_data = np.array([values], dtype=dtypes) - with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile: + with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile: fromfile.return_value = hdr_data actual = recarray2dict(hdr_data) assert actual == expected From d01a519c4220df9ec5079a42f38e4e179afb256e Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Sun, 28 Jul 2024 17:46:25 +0200 Subject: [PATCH 42/71] Update `test_seviri_l1b_native` with tests for remote reading This includes generating an actual file on disk and attempt to read it. --- .../reader_tests/test_seviri_l1b_native.py | 122 ++++++++++++++++++ 1 file changed, 122 insertions(+) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 38a4e16e77..4a41162483 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -39,6 +39,8 @@ get_available_channels, has_archive_header, ) +from satpy.readers.seviri_l1b_native_hdr import Msg15NativeHeaderRecord +from satpy.scene import Scene from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS, ORBIT_POLYNOMIALS_INVALID from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid @@ -1252,3 +1254,123 @@ def test_read_header(): fromfile.return_value = hdr_data actual = recarray2dict(hdr_data) assert actual == expected + + +def generate_seviri_native_null_header(): + """Generates the header of the seviri native format which is filled with zeros, hence, the term null!""" + header_type = Msg15NativeHeaderRecord().get(True) + null_header = np.zeros(header_type.shape, dtype=header_type).reshape(1, ) + return header_type, null_header + + +def scene_from_physical_seviri_nat_file(filename): + """Generates a Scene object from the given seviri native file.""" + return Scene([filename], reader="seviri_l1b_native", reader_kwargs={"fill_disk": True}) + + +def amend_seviri_native_null_header(hdr_null_numpy): + """Amends the given null header so that the ``seviri_l1b_native`` reader can properly parse it. + + This is achieved by setting values for the bare minimum number of header fields so that the reader can make sense of + the given header. This function relies on a number of auxiliary functions all of which are enclosed in the body of + the present function. + + Note: + The naming scheme of the auxiliary functions is as follows: ``_amend_____...``, where + corresponds to keys in the header when it is represented as a dictionary, i.e. when calling ``recarray2dict()`` + on the given header array. + + For example, ``_amend_15_DATA_HEADER__SatelliteStatus__SatelliteDefinition__SatelliteId()`` corresponds to an + auxiliary function which manipulates the following entry: + ``hdr_null_numpy_as_dict["15_DATA_HEADER"]["SatelliteStatus"]["SatelliteDefinition"]["SatelliteId"]`` + """ + + def _amend_15_MAIN_PRODUCT_HEADER(): + hdr_null_numpy[0][0][0] = (b"FormatName : ", b"NATIVE\n") + + def _amend_15_SECONDARY_PRODUCT_HEADER(): + hdr_null_numpy[0][1][9] = (b"SelectedBandIDs", b"XXXXXXXXXXXX") + hdr_null_numpy[0][1][10] = (b"SouthLineSelectedRectangle", b"1") + hdr_null_numpy[0][1][11] = (b"NorthLineSelectedRectangle", b"3712") + hdr_null_numpy[0][1][12] = (b"EastColumnSelectedRectangle", b"1") + hdr_null_numpy[0][1][13] = (b"WestColumnSelectedRectangle", b"3712") + hdr_null_numpy[0][1][14] = (b"NumberLinesVISIR", b"3712") + hdr_null_numpy[0][1][15] = (b"NumberColumnsVISIR", b"3712") + hdr_null_numpy[0][1][16] = (b"NumberLinesHRV", b"11136") + hdr_null_numpy[0][1][17] = (b"NumberColumnsHRV", b"11136") + + def _amend_GP_PK_SH1__PacketTime(): + hdr_null_numpy[0][3][5] = (23158, 27921912) + + def _amend_15_DATA_HEADER__SatelliteStatus__SatelliteDefinition__SatelliteId(): + hdr_null_numpy[0][4][1][0][0] = 324 + + def _amend_15_DATA_HEADER__GeometricProcessing__EarthModel(): + hdr_null_numpy[0][4][6][1] = (2, 6378.169, 6356.5838, 6356.5838) + + def _amend_15_DATA_HEADER__ImageAcquisition__PlannedAcquisitionTime(): + hdr_null_numpy[0][4][2][0] = ( + (23158, 27911177, 286, 223), + (23158, 28663675, 401, 687), + (23158, 28810078, 157, 663) + ) + + # Apply all the header amendments + _amend_15_MAIN_PRODUCT_HEADER() + _amend_15_SECONDARY_PRODUCT_HEADER() + _amend_GP_PK_SH1__PacketTime() + _amend_15_DATA_HEADER__SatelliteStatus__SatelliteDefinition__SatelliteId() + _amend_15_DATA_HEADER__GeometricProcessing__EarthModel() + _amend_15_DATA_HEADER__ImageAcquisition__PlannedAcquisitionTime() + + +@pytest.fixture() +def tmp_seviri_nat_filename(tmp_path): + """Creates a fully-qualified filename for a seviri native format file.""" + tmp_filename = "MSG4-SEVI-MSG15-0100-NA-20210528075743.722000000Z-NA" + return tmp_path / f"{tmp_filename}.nat" + + +def append_data_and_trailer_content_to_seviri_native_header(filename, hdr_null_numpy): + """Generates the data and trailer part (null content) of the file and appends them to the null header. + + The data and trailer are also null and appending them to the header results in a complete seviri nat file. + """ + # size of different parts of the seviri native file in bytes + size = dict(header_with_archive=450400, data=270344960, trailer=380363) + + zero_bytes = bytearray(size["data"] + size["trailer"]) + bytes_data = bytes(zero_bytes) + + hdr_null_numpy.tofile(filename) + with open(filename, "ab") as f: + f.write(bytes_data) + + +@pytest.fixture() +def physical_seviri_native_file(tmp_seviri_nat_filename): + """Creates a physical seviri native file on disk.""" + hdr_null_type, hdr_null = generate_seviri_native_null_header() + amend_seviri_native_null_header(hdr_null) + append_data_and_trailer_content_to_seviri_native_header(tmp_seviri_nat_filename, hdr_null) + + return dict(header_type=hdr_null_type, header=hdr_null, filename=tmp_seviri_nat_filename) + + +def test_read_physical_seviri_nat_file(physical_seviri_native_file): + """Tests that the physical seviri native file has been read successfully. + + Note: + The purpose of this function is not to fully test the properties of the scene. It only provides a test for + reading a physical file from disk. + """ + scene = scene_from_physical_seviri_nat_file(physical_seviri_native_file["filename"]) + + assert physical_seviri_native_file["header_type"] == physical_seviri_native_file["header"].dtype + assert scene.sensor_names == {"seviri"} + assert len(scene.available_dataset_ids()) == 36 + assert set(scene.available_dataset_names()) == set(CHANNEL_INDEX_LIST) + + scene.load(["VIS006"]) + assert scene["VIS006"].shape == (3712, 3712) + assert isinstance(scene["VIS006"], xr.core.dataarray.DataArray) From 0e5f3e78131e4fbdeb3c9403f761aa5b70533867 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Mon, 29 Jul 2024 10:23:51 +0200 Subject: [PATCH 43/71] Parametrize `test_read_physical_seviri_nat_file` to test zip files as well as plain files --- .../reader_tests/test_seviri_l1b_native.py | 33 ++++++++++++++----- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 4a41162483..aab1e95568 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -23,12 +23,14 @@ import os import unittest import warnings +import zipfile from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr +from pytest_lazy_fixtures import lf from satpy.readers.eum_base import recarray2dict, time_cds_short from satpy.readers.seviri_l1b_native import ( @@ -1328,7 +1330,15 @@ def _amend_15_DATA_HEADER__ImageAcquisition__PlannedAcquisitionTime(): def tmp_seviri_nat_filename(tmp_path): """Creates a fully-qualified filename for a seviri native format file.""" tmp_filename = "MSG4-SEVI-MSG15-0100-NA-20210528075743.722000000Z-NA" - return tmp_path / f"{tmp_filename}.nat" + return dict(path=tmp_path, filename=tmp_filename, full_path=tmp_path / f"{tmp_filename}.nat") + + +def compress_seviri_native_file(path, seviri_native_filename): + """Compresses the given seviri native file into a zip file.""" + zip_full_path = path / f"{seviri_native_filename}.zip" + with zipfile.ZipFile(zip_full_path, mode="w") as archive: + archive.write(path / f"{seviri_native_filename}.nat", f"{seviri_native_filename}.nat") + return f"zip://*.nat::{zip_full_path}" def append_data_and_trailer_content_to_seviri_native_header(filename, hdr_null_numpy): @@ -1347,26 +1357,31 @@ def append_data_and_trailer_content_to_seviri_native_header(filename, hdr_null_n f.write(bytes_data) -@pytest.fixture() -def physical_seviri_native_file(tmp_seviri_nat_filename): +def physical_seviri_native_file(seviri_nat_full_file_path): """Creates a physical seviri native file on disk.""" hdr_null_type, hdr_null = generate_seviri_native_null_header() amend_seviri_native_null_header(hdr_null) - append_data_and_trailer_content_to_seviri_native_header(tmp_seviri_nat_filename, hdr_null) + append_data_and_trailer_content_to_seviri_native_header(seviri_nat_full_file_path, hdr_null) - return dict(header_type=hdr_null_type, header=hdr_null, filename=tmp_seviri_nat_filename) + return dict(header_type=hdr_null_type, header=hdr_null) -def test_read_physical_seviri_nat_file(physical_seviri_native_file): - """Tests that the physical seviri native file has been read successfully. +@pytest.mark.parametrize(("treat_native_file", "args"), [ + (lambda path, filename: path / f"{filename}.nat", lf("tmp_seviri_nat_filename")), + (compress_seviri_native_file, lf("tmp_seviri_nat_filename")) +]) +def test_read_physical_seviri_nat_file(tmp_seviri_nat_filename, treat_native_file, args): + """Tests that the physical seviri native file can be read successfully, in case of both a plain and a zip file. Note: The purpose of this function is not to fully test the properties of the scene. It only provides a test for reading a physical file from disk. """ - scene = scene_from_physical_seviri_nat_file(physical_seviri_native_file["filename"]) + native_file = physical_seviri_native_file(tmp_seviri_nat_filename["full_path"]) + full_path = treat_native_file(args["path"], args["filename"]) + scene = scene_from_physical_seviri_nat_file(full_path) - assert physical_seviri_native_file["header_type"] == physical_seviri_native_file["header"].dtype + assert native_file["header_type"] == native_file["header"].dtype assert scene.sensor_names == {"seviri"} assert len(scene.available_dataset_ids()) == 36 assert set(scene.available_dataset_names()) == set(CHANNEL_INDEX_LIST) From 15d17d60db50c27ba5131e9c44dd34b0a8048be5 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Mon, 29 Jul 2024 10:26:13 +0200 Subject: [PATCH 44/71] Reformat `test_seviri_l1b_native` to make it PEP8 compliant --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index aab1e95568..ecb2b42122 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1004,12 +1004,12 @@ def test_get_dataset(self, file_handler): def test_time(self, file_handler): """Test start/end nominal/observation time handling.""" assert dt.datetime(2006, 1, 1, 12, 15, 9, 304888) == file_handler.observation_start_time - assert dt.datetime(2006, 1, 1, 12, 15,) == file_handler.start_time + assert dt.datetime(2006, 1, 1, 12, 15, ) == file_handler.start_time assert file_handler.start_time == file_handler.nominal_start_time assert dt.datetime(2006, 1, 1, 12, 27, 9, 304888) == file_handler.observation_end_time assert file_handler.end_time == file_handler.nominal_end_time - assert dt.datetime(2006, 1, 1, 12, 30,) == file_handler.end_time + assert dt.datetime(2006, 1, 1, 12, 30, ) == file_handler.end_time def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" From df4a6dbd861cd7a7b9d4e4eff94734b9525f86ed Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Mon, 29 Jul 2024 10:42:27 +0200 Subject: [PATCH 45/71] Address @mraspaud comments on PR #2863 --- AUTHORS.md | 1 - .../reader_tests/test_seviri_l1b_native.py | 108 +++++++++--------- 2 files changed, 54 insertions(+), 55 deletions(-) diff --git a/AUTHORS.md b/AUTHORS.md index e764b96627..a8f2cb8e4b 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -95,5 +95,4 @@ The following people have made contributions to this project: - [Will Sharpe (wjsharpe)](https://github.com/wjsharpe) - [Sara Hörnquist (shornqui)](https://github.com/shornqui) - [Antonio Valentino](https://github.com/avalentino) -- [Pouria Khalaj](https://github.com/pkhalaj) - [Clément (ludwigvonkoopa)](https://github.com/ludwigVonKoopa) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index ecb2b42122..32b6c4fb32 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1258,11 +1258,44 @@ def test_read_header(): assert actual == expected -def generate_seviri_native_null_header(): - """Generates the header of the seviri native format which is filled with zeros, hence, the term null!""" - header_type = Msg15NativeHeaderRecord().get(True) - null_header = np.zeros(header_type.shape, dtype=header_type).reshape(1, ) - return header_type, null_header +@pytest.fixture() +def tmp_seviri_nat_filename(tmp_path): + """Creates a fully-qualified filename for a seviri native format file.""" + tmp_filename = "MSG4-SEVI-MSG15-0100-NA-20210528075743.722000000Z-NA" + return dict(path=tmp_path, filename=tmp_filename, full_path=tmp_path / f"{tmp_filename}.nat") + + +def compress_seviri_native_file(path, seviri_native_filename): + """Compresses the given seviri native file into a zip file.""" + zip_full_path = path / f"{seviri_native_filename}.zip" + with zipfile.ZipFile(zip_full_path, mode="w") as archive: + archive.write(path / f"{seviri_native_filename}.nat", f"{seviri_native_filename}.nat") + return f"zip://*.nat::{zip_full_path}" + + +@pytest.mark.parametrize(("treat_native_file", "args"), [ + (lambda path, filename: path / f"{filename}.nat", lf("tmp_seviri_nat_filename")), + (compress_seviri_native_file, lf("tmp_seviri_nat_filename")) +]) +def test_read_physical_seviri_nat_file(tmp_seviri_nat_filename, treat_native_file, args): + """Tests that the physical seviri native file can be read successfully, in case of both a plain and a zip file. + + Note: + The purpose of this function is not to fully test the properties of the scene. It only provides a test for + reading a physical file from disk. + """ + native_file = physical_seviri_native_file(tmp_seviri_nat_filename["full_path"]) + full_path = treat_native_file(args["path"], args["filename"]) + scene = scene_from_physical_seviri_nat_file(full_path) + + assert native_file["header_type"] == native_file["header"].dtype + assert scene.sensor_names == {"seviri"} + assert len(scene.available_dataset_ids()) == 36 + assert set(scene.available_dataset_names()) == set(CHANNEL_INDEX_LIST) + + scene.load(["VIS006"]) + assert scene["VIS006"].shape == (3712, 3712) + assert isinstance(scene["VIS006"], xr.core.dataarray.DataArray) def scene_from_physical_seviri_nat_file(filename): @@ -1270,6 +1303,22 @@ def scene_from_physical_seviri_nat_file(filename): return Scene([filename], reader="seviri_l1b_native", reader_kwargs={"fill_disk": True}) +def physical_seviri_native_file(seviri_nat_full_file_path): + """Creates a physical seviri native file on disk.""" + header_type, header_null = generate_seviri_native_null_header() + amend_seviri_native_null_header(header_null) + append_data_and_trailer_content_to_seviri_native_header(seviri_nat_full_file_path, header_null) + + return dict(header_type=header_type, header=header_null) + + +def generate_seviri_native_null_header(): + """Generates the header of the seviri native format which is filled with zeros, hence, the term null!""" + header_type = Msg15NativeHeaderRecord().get(True) + null_header = np.zeros(header_type.shape, dtype=header_type).reshape(1, ) + return header_type, null_header + + def amend_seviri_native_null_header(hdr_null_numpy): """Amends the given null header so that the ``seviri_l1b_native`` reader can properly parse it. @@ -1326,21 +1375,6 @@ def _amend_15_DATA_HEADER__ImageAcquisition__PlannedAcquisitionTime(): _amend_15_DATA_HEADER__ImageAcquisition__PlannedAcquisitionTime() -@pytest.fixture() -def tmp_seviri_nat_filename(tmp_path): - """Creates a fully-qualified filename for a seviri native format file.""" - tmp_filename = "MSG4-SEVI-MSG15-0100-NA-20210528075743.722000000Z-NA" - return dict(path=tmp_path, filename=tmp_filename, full_path=tmp_path / f"{tmp_filename}.nat") - - -def compress_seviri_native_file(path, seviri_native_filename): - """Compresses the given seviri native file into a zip file.""" - zip_full_path = path / f"{seviri_native_filename}.zip" - with zipfile.ZipFile(zip_full_path, mode="w") as archive: - archive.write(path / f"{seviri_native_filename}.nat", f"{seviri_native_filename}.nat") - return f"zip://*.nat::{zip_full_path}" - - def append_data_and_trailer_content_to_seviri_native_header(filename, hdr_null_numpy): """Generates the data and trailer part (null content) of the file and appends them to the null header. @@ -1355,37 +1389,3 @@ def append_data_and_trailer_content_to_seviri_native_header(filename, hdr_null_n hdr_null_numpy.tofile(filename) with open(filename, "ab") as f: f.write(bytes_data) - - -def physical_seviri_native_file(seviri_nat_full_file_path): - """Creates a physical seviri native file on disk.""" - hdr_null_type, hdr_null = generate_seviri_native_null_header() - amend_seviri_native_null_header(hdr_null) - append_data_and_trailer_content_to_seviri_native_header(seviri_nat_full_file_path, hdr_null) - - return dict(header_type=hdr_null_type, header=hdr_null) - - -@pytest.mark.parametrize(("treat_native_file", "args"), [ - (lambda path, filename: path / f"{filename}.nat", lf("tmp_seviri_nat_filename")), - (compress_seviri_native_file, lf("tmp_seviri_nat_filename")) -]) -def test_read_physical_seviri_nat_file(tmp_seviri_nat_filename, treat_native_file, args): - """Tests that the physical seviri native file can be read successfully, in case of both a plain and a zip file. - - Note: - The purpose of this function is not to fully test the properties of the scene. It only provides a test for - reading a physical file from disk. - """ - native_file = physical_seviri_native_file(tmp_seviri_nat_filename["full_path"]) - full_path = treat_native_file(args["path"], args["filename"]) - scene = scene_from_physical_seviri_nat_file(full_path) - - assert native_file["header_type"] == native_file["header"].dtype - assert scene.sensor_names == {"seviri"} - assert len(scene.available_dataset_ids()) == 36 - assert set(scene.available_dataset_names()) == set(CHANNEL_INDEX_LIST) - - scene.load(["VIS006"]) - assert scene["VIS006"].shape == (3712, 3712) - assert isinstance(scene["VIS006"], xr.core.dataarray.DataArray) From 43b846b25cbaf8bc516df082e8f9e1ff8be5a8dd Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Mon, 29 Jul 2024 11:32:22 +0200 Subject: [PATCH 46/71] Simplify fixtures and parameters of `test_read_physical_seviri_nat_file()` --- .../reader_tests/test_seviri_l1b_native.py | 42 ++++++++++--------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 32b6c4fb32..b3117b91d9 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1258,37 +1258,44 @@ def test_read_header(): assert actual == expected -@pytest.fixture() -def tmp_seviri_nat_filename(tmp_path): +@pytest.fixture(scope="session") +def session_tmp_path(tmp_path_factory): + """Generates a single temp path to use for the entire session.""" + return tmp_path_factory.mktemp("data") + + +@pytest.fixture(scope="session") +def tmp_seviri_nat_filename(session_tmp_path): """Creates a fully-qualified filename for a seviri native format file.""" - tmp_filename = "MSG4-SEVI-MSG15-0100-NA-20210528075743.722000000Z-NA" - return dict(path=tmp_path, filename=tmp_filename, full_path=tmp_path / f"{tmp_filename}.nat") + full_file_path = session_tmp_path / "MSG4-SEVI-MSG15-0100-NA-20210528075743.722000000Z-N.nat" + create_physical_seviri_native_file(full_file_path) + return full_file_path -def compress_seviri_native_file(path, seviri_native_filename): +@pytest.fixture(scope="session") +def compress_seviri_native_file(tmp_seviri_nat_filename, session_tmp_path): """Compresses the given seviri native file into a zip file.""" - zip_full_path = path / f"{seviri_native_filename}.zip" + zip_full_path = session_tmp_path / "test_seviri_native.zip" with zipfile.ZipFile(zip_full_path, mode="w") as archive: - archive.write(path / f"{seviri_native_filename}.nat", f"{seviri_native_filename}.nat") + archive.write(tmp_seviri_nat_filename, os.path.basename(tmp_seviri_nat_filename)) return f"zip://*.nat::{zip_full_path}" -@pytest.mark.parametrize(("treat_native_file", "args"), [ - (lambda path, filename: path / f"{filename}.nat", lf("tmp_seviri_nat_filename")), - (compress_seviri_native_file, lf("tmp_seviri_nat_filename")) +@pytest.mark.slow() +@pytest.mark.order("last") +@pytest.mark.parametrize(("full_path"), [ + lf("tmp_seviri_nat_filename"), + lf("compress_seviri_native_file") ]) -def test_read_physical_seviri_nat_file(tmp_seviri_nat_filename, treat_native_file, args): +def test_read_physical_seviri_nat_file(full_path): """Tests that the physical seviri native file can be read successfully, in case of both a plain and a zip file. Note: The purpose of this function is not to fully test the properties of the scene. It only provides a test for reading a physical file from disk. """ - native_file = physical_seviri_native_file(tmp_seviri_nat_filename["full_path"]) - full_path = treat_native_file(args["path"], args["filename"]) scene = scene_from_physical_seviri_nat_file(full_path) - assert native_file["header_type"] == native_file["header"].dtype assert scene.sensor_names == {"seviri"} assert len(scene.available_dataset_ids()) == 36 assert set(scene.available_dataset_names()) == set(CHANNEL_INDEX_LIST) @@ -1303,17 +1310,15 @@ def scene_from_physical_seviri_nat_file(filename): return Scene([filename], reader="seviri_l1b_native", reader_kwargs={"fill_disk": True}) -def physical_seviri_native_file(seviri_nat_full_file_path): +def create_physical_seviri_native_file(seviri_nat_full_file_path): """Creates a physical seviri native file on disk.""" header_type, header_null = generate_seviri_native_null_header() amend_seviri_native_null_header(header_null) append_data_and_trailer_content_to_seviri_native_header(seviri_nat_full_file_path, header_null) - return dict(header_type=header_type, header=header_null) - def generate_seviri_native_null_header(): - """Generates the header of the seviri native format which is filled with zeros, hence, the term null!""" + """Generates the header of the seviri native format which is filled with zeros, hence the term null!""" header_type = Msg15NativeHeaderRecord().get(True) null_header = np.zeros(header_type.shape, dtype=header_type).reshape(1, ) return header_type, null_header @@ -1382,7 +1387,6 @@ def append_data_and_trailer_content_to_seviri_native_header(filename, hdr_null_n """ # size of different parts of the seviri native file in bytes size = dict(header_with_archive=450400, data=270344960, trailer=380363) - zero_bytes = bytearray(size["data"] + size["trailer"]) bytes_data = bytes(zero_bytes) From 816a2c77fdd1ba5556e98a5c9d4add25ed2c2514 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Mon, 29 Jul 2024 11:54:22 +0200 Subject: [PATCH 47/71] Use a header which leads to a smaller seviri nat file on disk This concerns `test_read_physical_seviri_nat_file()`. --- .../reader_tests/test_seviri_l1b_native.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index b3117b91d9..d9d54979df 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1345,15 +1345,15 @@ def _amend_15_MAIN_PRODUCT_HEADER(): hdr_null_numpy[0][0][0] = (b"FormatName : ", b"NATIVE\n") def _amend_15_SECONDARY_PRODUCT_HEADER(): - hdr_null_numpy[0][1][9] = (b"SelectedBandIDs", b"XXXXXXXXXXXX") - hdr_null_numpy[0][1][10] = (b"SouthLineSelectedRectangle", b"1") - hdr_null_numpy[0][1][11] = (b"NorthLineSelectedRectangle", b"3712") - hdr_null_numpy[0][1][12] = (b"EastColumnSelectedRectangle", b"1") - hdr_null_numpy[0][1][13] = (b"WestColumnSelectedRectangle", b"3712") - hdr_null_numpy[0][1][14] = (b"NumberLinesVISIR", b"3712") - hdr_null_numpy[0][1][15] = (b"NumberColumnsVISIR", b"3712") - hdr_null_numpy[0][1][16] = (b"NumberLinesHRV", b"11136") - hdr_null_numpy[0][1][17] = (b"NumberColumnsHRV", b"11136") + hdr_null_numpy[0][1][9] = (b"SelectedBandIDs", b"XXXXXXXXXXX-") + hdr_null_numpy[0][1][10] = (b"SouthLineSelectedRectangle", b"3360") + hdr_null_numpy[0][1][11] = (b"NorthLineSelectedRectangle", b"3373") + hdr_null_numpy[0][1][12] = (b"EastColumnSelectedRectangle", b"1714") + hdr_null_numpy[0][1][13] = (b"WestColumnSelectedRectangle", b"1729") + hdr_null_numpy[0][1][14] = (b"NumberLinesVISIR", b"14") + hdr_null_numpy[0][1][15] = (b"NumberColumnsVISIR", b"16") + hdr_null_numpy[0][1][16] = (b"NumberLinesHRV", b"42") + hdr_null_numpy[0][1][17] = (b"NumberColumnsHRV", b"48") def _amend_GP_PK_SH1__PacketTime(): hdr_null_numpy[0][3][5] = (23158, 27921912) @@ -1386,7 +1386,7 @@ def append_data_and_trailer_content_to_seviri_native_header(filename, hdr_null_n The data and trailer are also null and appending them to the header results in a complete seviri nat file. """ # size of different parts of the seviri native file in bytes - size = dict(header_with_archive=450400, data=270344960, trailer=380363) + size = {"header_with_archive": 450400, "data": 13090, "trailer": 380363} zero_bytes = bytearray(size["data"] + size["trailer"]) bytes_data = bytes(zero_bytes) From d4976c32e6336b9b0b84ca07e246489a5e5c001c Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Mon, 29 Jul 2024 13:11:07 +0200 Subject: [PATCH 48/71] Make the returned path posix compliant in `compress_seviri_native_file` This is to ensure that the remote reading tests can also run on Windows. Note: `fsspec` expects a POSIX path. --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index d9d54979df..04198f27e3 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1278,7 +1278,7 @@ def compress_seviri_native_file(tmp_seviri_nat_filename, session_tmp_path): zip_full_path = session_tmp_path / "test_seviri_native.zip" with zipfile.ZipFile(zip_full_path, mode="w") as archive: archive.write(tmp_seviri_nat_filename, os.path.basename(tmp_seviri_nat_filename)) - return f"zip://*.nat::{zip_full_path}" + return f"zip://*.nat::file://{zip_full_path.as_posix()}" @pytest.mark.slow() From db3ddb856747fcc0122c55dbdd7de7da07fd188f Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Mon, 29 Jul 2024 13:18:31 +0200 Subject: [PATCH 49/71] Resolve warnings raised as a result of `slow` & `order` being pytest unknown marks We do not need these marks anymore, as we decreased the size of the generated seviri native file. As a result, the tests now run fast enough. --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 04198f27e3..1c5faf5736 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1281,8 +1281,6 @@ def compress_seviri_native_file(tmp_seviri_nat_filename, session_tmp_path): return f"zip://*.nat::file://{zip_full_path.as_posix()}" -@pytest.mark.slow() -@pytest.mark.order("last") @pytest.mark.parametrize(("full_path"), [ lf("tmp_seviri_nat_filename"), lf("compress_seviri_native_file") From fbd02b5b743256ec72788b2c8919634cbcecabb2 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Mon, 29 Jul 2024 13:28:28 +0200 Subject: [PATCH 50/71] Resolve warnings raised as a failure in the orbit polynomial This warning is totally benign. It is caused as a result of the seviri native file that we create which is essentially filled with zeros. --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 1c5faf5736..8ba8fcb0c6 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1298,9 +1298,11 @@ def test_read_physical_seviri_nat_file(full_path): assert len(scene.available_dataset_ids()) == 36 assert set(scene.available_dataset_names()) == set(CHANNEL_INDEX_LIST) - scene.load(["VIS006"]) - assert scene["VIS006"].shape == (3712, 3712) - assert isinstance(scene["VIS006"], xr.core.dataarray.DataArray) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=UserWarning) + scene.load(["VIS006"]) + assert scene["VIS006"].shape == (3712, 3712) + assert isinstance(scene["VIS006"], xr.core.dataarray.DataArray) def scene_from_physical_seviri_nat_file(filename): From f002b00cd3c83ed25808ba5cebe02cf8d21a1647 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Mon, 29 Jul 2024 13:57:27 +0200 Subject: [PATCH 51/71] Fix the issue with the docstring in `amend_seviri_native_null_header()` The issue was an unexpected indentation on the last line of the docstring! --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 8ba8fcb0c6..e88ff059b3 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1338,7 +1338,7 @@ def amend_seviri_native_null_header(hdr_null_numpy): For example, ``_amend_15_DATA_HEADER__SatelliteStatus__SatelliteDefinition__SatelliteId()`` corresponds to an auxiliary function which manipulates the following entry: - ``hdr_null_numpy_as_dict["15_DATA_HEADER"]["SatelliteStatus"]["SatelliteDefinition"]["SatelliteId"]`` + ``hdr_null_numpy_as_dict["15_DATA_HEADER"]["SatelliteStatus"]["SatelliteDefinition"]["SatelliteId"]`` """ def _amend_15_MAIN_PRODUCT_HEADER(): From a7596d3e6bd20e1672e18d87d73a68ff25abbaf0 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Mon, 29 Jul 2024 15:16:08 +0200 Subject: [PATCH 52/71] Update `fsspec` support column for `seviri_l1b_native` reader in the readers table --- satpy/etc/readers/seviri_l1b_native.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/seviri_l1b_native.yaml b/satpy/etc/readers/seviri_l1b_native.yaml index 8fbecd4e59..4aea36c174 100644 --- a/satpy/etc/readers/seviri_l1b_native.yaml +++ b/satpy/etc/readers/seviri_l1b_native.yaml @@ -5,7 +5,7 @@ reader: description: > Reader for EUMETSAT MSG SEVIRI Level 1b native format files. status: Nominal - supports_fsspec: false + supports_fsspec: true sensors: [seviri] default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader From 80f3925818be3b6972c5c3652b42f81c68425ef9 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Tue, 30 Jul 2024 15:53:50 +0200 Subject: [PATCH 53/71] Use dask `map_blocks()` in `_get_array()` This includes: - Extracting the `_get_array()` method so that it is now a function in the module and not a class method. - Introduction of `NativeMSGFileHandler_make_dask_array_with_map_blocks()` method to utilize the dask `map_blocks()`. - Introduction of a new method, namely `NativeMSGFileHandler._number_of_visir_channels` to facilitate testing and mock patching. - Adapting the mock patches in tests accordingly. --- satpy/readers/seviri_l1b_native.py | 42 +++++++++++++++---- .../reader_tests/test_seviri_l1b_native.py | 21 ++++++++-- 2 files changed, 50 insertions(+), 13 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index d5f3ba2692..b09a947e1c 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -193,10 +193,27 @@ def __init__(self, filename, filename_info, filetype_info, # Available channels are known only after the header has been read self.header_type = get_native_header(has_archive_header(self.filename)) self._read_header() - self.dask_array = da.from_array(self._get_array(), chunks=(CHUNK_SIZE,)) + self._make_dask_array_with_map_blocks() self._read_trailer() self.image_boundaries = ImageBoundaries(self.header, self.trailer, self.mda) + def _make_dask_array_with_map_blocks(self): + """Makes the dask array using the ``da.map_blocks()`` functionality.""" + dtype = self._get_data_dtype() + chunks = da.core.normalize_chunks( + "auto", + shape=(self.mda["number_of_lines"],), + dtype=dtype) + self.dask_array = da.map_blocks( + _get_array, + dtype=dtype, + chunks=chunks, + meta=np.zeros(1, dtype=dtype), + # The following will be passed as keyword arguments to the `_get_array()` function. + filename=self.filename, + hdr_size=self.header_type.itemsize + ) + @property def _repeat_cycle_duration(self): """Get repeat cycle duration from the trailer.""" @@ -266,9 +283,7 @@ def get_lrec(cols): # each pixel is 10-bits -> one line of data has 25% more bytes # than the number of columns suggest (10/8 = 1.25) visir_rec = get_lrec(int(self.mda["number_of_columns"] * 1.25)) - number_of_visir_channels = len( - [s for s in self.mda["channel_list"] if not s == "HRV"]) - drec = [("visir", (visir_rec, number_of_visir_channels))] + drec = [("visir", (visir_rec, self._number_of_visir_channels()))] if self.mda["available_channels"]["HRV"]: hrv_rec = get_lrec(int(self.mda["hrv_number_of_columns"] * 1.25)) @@ -276,11 +291,9 @@ def get_lrec(cols): return np.dtype(drec) - def _get_array(self): - """Get the numpy array for the SEVIRI data.""" - data_dtype = self._get_data_dtype() - hdr_size = self.header_type.itemsize - return fromfile(self.filename, dtype=data_dtype, offset=hdr_size, count=self.mda["number_of_lines"]) + def _number_of_visir_channels(self): + """Returns the number of visir channels, i.e. all channels excluding ``HRV``.""" + return len([s for s in self.mda["channel_list"] if not s == "HRV"]) def _read_header(self): """Read the header info.""" @@ -891,3 +904,14 @@ def read_header(filename): dtype = get_native_header(has_archive_header(filename)) hdr = fromfile(filename, dtype=dtype, count=1) return recarray2dict(hdr) + + +def _get_array(filename=None, hdr_size=None, block_info=None): + """Get the numpy array for the SEVIRI data.""" + output_block_info = block_info[None] + data_dtype = output_block_info["dtype"] + return fromfile( + filename, + dtype=data_dtype, + offset=hdr_size + output_block_info["array-location"][0][0] * data_dtype.itemsize, + count=output_block_info["chunk-shape"][0]) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index e88ff059b3..4c09ca0381 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -638,7 +638,9 @@ def prepare_area_definitions(test_dict): with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ - mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ + mock.patch("satpy.readers.seviri_l1b_native._get_array") as _get_array, \ + mock.patch( + "satpy.readers.seviri_l1b_native.NativeMSGFileHandler._number_of_visir_channels") as _n_visir_ch, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( "satpy.readers.seviri_l1b_native.has_archive_header" @@ -647,6 +649,7 @@ def prepare_area_definitions(test_dict): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_array.return_value = np.arange(3) + _n_visir_ch.return_value = 11 fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.fill_disk = fill_disk fh.header = header @@ -722,7 +725,9 @@ def prepare_is_roi(test_dict): with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ - mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ + mock.patch("satpy.readers.seviri_l1b_native._get_array") as _get_array, \ + mock.patch( + "satpy.readers.seviri_l1b_native.NativeMSGFileHandler._number_of_visir_channels") as _n_visir_ch, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( "satpy.readers.seviri_l1b_native.has_archive_header" @@ -731,6 +736,7 @@ def prepare_is_roi(test_dict): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_array.return_value = np.arange(3) + _n_visir_ch.return_value = 11 fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header fh.trailer = trailer @@ -1172,12 +1178,15 @@ def test_header_type(file_content, exp_header_size): header.pop("15_SECONDARY_PRODUCT_HEADER") with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ - mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ + mock.patch("satpy.readers.seviri_l1b_native._get_array") as _get_array, \ + mock.patch( + "satpy.readers.seviri_l1b_native.NativeMSGFileHandler._number_of_visir_channels") as _n_visir_ch, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("satpy.readers.seviri_l1b_native.generic_open", mock.mock_open(read_data=file_content)): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_array.return_value = np.arange(3) + _n_visir_ch.return_value = 11 fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) assert fh.header_type.itemsize == exp_header_size assert "15_SECONDARY_PRODUCT_HEADER" in fh.header @@ -1202,7 +1211,9 @@ def test_header_warning(): with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ - mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_array") as _get_array, \ + mock.patch("satpy.readers.seviri_l1b_native._get_array") as _get_array, \ + mock.patch( + "satpy.readers.seviri_l1b_native.NativeMSGFileHandler._number_of_visir_channels") as _n_visir_ch, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("satpy.readers.seviri_l1b_native.generic_open", mock.mock_open(read_data=ASCII_STARTSWITH)): recarray2dict.side_effect = (lambda x: x) @@ -1211,6 +1222,8 @@ def test_header_warning(): exp_warning = "The quality flag for this file indicates not OK. Use this data with caution!" fromfile.return_value = header_good + _n_visir_ch.return_value = 11 + with warnings.catch_warnings(): warnings.simplefilter("error") NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) From abe48ae022a98a6f92807bc8b389ac4fbe2bbe91 Mon Sep 17 00:00:00 2001 From: Florian Fichtner <12199342+fwfichtner@users.noreply.github.com> Date: Tue, 13 Aug 2024 09:13:07 +0200 Subject: [PATCH 54/71] support FIRG file-patterns coming from EUMETCast-Europe --- satpy/etc/readers/seviri_l2_grib.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/etc/readers/seviri_l2_grib.yaml b/satpy/etc/readers/seviri_l2_grib.yaml index cbe6c81f09..5d7a204e24 100644 --- a/satpy/etc/readers/seviri_l2_grib.yaml +++ b/satpy/etc/readers/seviri_l2_grib.yaml @@ -60,6 +60,7 @@ file_types: - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + - 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{spacecraft:5s}+FIRG_C_{server:4s}_{start_time:%Y%m%d%H%M%S}_{ord_num:1s}.bin' # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:MPE-GRIB From 42dfc051b7aa4382748681a1908cbc8cd3ffc209 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Tue, 13 Aug 2024 10:49:07 +0200 Subject: [PATCH 55/71] Replace `np.zeros` with `np.empty` in the meta parameter of the dask array --- satpy/readers/seviri_l1b_native.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index b09a947e1c..f1bf5372a7 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -208,7 +208,7 @@ def _make_dask_array_with_map_blocks(self): _get_array, dtype=dtype, chunks=chunks, - meta=np.zeros(1, dtype=dtype), + meta=np.empty(1, dtype=dtype), # The following will be passed as keyword arguments to the `_get_array()` function. filename=self.filename, hdr_size=self.header_type.itemsize From 1c00de7296ec2a354f4291757ec16c5ba25a46c4 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Tue, 13 Aug 2024 10:50:11 +0200 Subject: [PATCH 56/71] Making `dask_array` private by prefixing it with `_` --- satpy/readers/seviri_l1b_native.py | 14 +++++++------- satpy/tests/reader_tests/test_seviri_l1b_native.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index f1bf5372a7..2cda9a613a 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -204,7 +204,7 @@ def _make_dask_array_with_map_blocks(self): "auto", shape=(self.mda["number_of_lines"],), dtype=dtype) - self.dask_array = da.map_blocks( + self._dask_array = da.map_blocks( _get_array, dtype=dtype, chunks=chunks, @@ -594,10 +594,10 @@ def _get_visir_channel(self, dataset_id): # Check if there is only 1 channel in the list as a change # is needed in the array assignment ie channel id is not present if len(self.mda["channel_list"]) == 1: - raw = self.dask_array["visir"]["line_data"] + raw = self._dask_array["visir"]["line_data"] else: i = self.mda["channel_list"].index(dataset_id["name"]) - raw = self.dask_array["visir"]["line_data"][:, i, :] + raw = self._dask_array["visir"]["line_data"][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape) return data @@ -608,7 +608,7 @@ def _get_hrv_channel(self): data_list = [] for i in range(3): - raw = self.dask_array["hrv"]["line_data"][:, i, :] + raw = self._dask_array["hrv"]["line_data"][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape_layer) data_list.append(data) @@ -667,7 +667,7 @@ def _add_scanline_acq_time(self, dataset, dataset_id): def _get_acq_time_hrv(self): """Get raw acquisition time for HRV channel.""" - tline = self.dask_array["hrv"]["acq_time"] + tline = self._dask_array["hrv"]["acq_time"] tline0 = tline[:, 0] tline1 = tline[:, 1] tline2 = tline[:, 2] @@ -679,9 +679,9 @@ def _get_acq_time_visir(self, dataset_id): # Check if there is only 1 channel in the list as a change # is needed in the array assignment, i.e. channel id is not present if len(self.mda["channel_list"]) == 1: - return self.dask_array["visir"]["acq_time"].compute() + return self._dask_array["visir"]["acq_time"].compute() i = self.mda["channel_list"].index(dataset_id["name"]) - return self.dask_array["visir"]["acq_time"][:, i].compute() + return self._dask_array["visir"]["acq_time"][:, i].compute() def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 4c09ca0381..10fb8ef726 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -928,7 +928,7 @@ def file_handler(self): fh.header = header fh.trailer = trailer fh.mda = mda - fh.dask_array = da.from_array(data) + fh._dask_array = da.from_array(data) fh.platform_id = 324 fh.fill_disk = False fh.calib_mode = "NOMINAL" From b58823c90e74dab21fe6a76e25f1e87918b58f4c Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Tue, 13 Aug 2024 11:01:00 +0200 Subject: [PATCH 57/71] Replace `np.empty` with `np.array` for performance (less memory footprint?) --- satpy/readers/seviri_l1b_native.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 2cda9a613a..340cba2028 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -208,7 +208,7 @@ def _make_dask_array_with_map_blocks(self): _get_array, dtype=dtype, chunks=chunks, - meta=np.empty(1, dtype=dtype), + meta=np.array([], dtype=dtype), # The following will be passed as keyword arguments to the `_get_array()` function. filename=self.filename, hdr_size=self.header_type.itemsize From d22d2b6f17c850ce45f399d828435520530b4ff2 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Wed, 14 Aug 2024 08:59:21 +0200 Subject: [PATCH 58/71] Change the first line of docstrings to imperative mood. --- satpy/readers/seviri_l1b_native.py | 4 ++-- satpy/readers/utils.py | 2 +- .../reader_tests/test_seviri_l1b_native.py | 18 +++++++++--------- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 340cba2028..3eaa9b4dfd 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -198,7 +198,7 @@ def __init__(self, filename, filename_info, filetype_info, self.image_boundaries = ImageBoundaries(self.header, self.trailer, self.mda) def _make_dask_array_with_map_blocks(self): - """Makes the dask array using the ``da.map_blocks()`` functionality.""" + """Make the dask array using the ``da.map_blocks()`` functionality.""" dtype = self._get_data_dtype() chunks = da.core.normalize_chunks( "auto", @@ -292,7 +292,7 @@ def get_lrec(cols): return np.dtype(drec) def _number_of_visir_channels(self): - """Returns the number of visir channels, i.e. all channels excluding ``HRV``.""" + """Return the number of visir channels, i.e. all channels excluding ``HRV``.""" return len([s for s in self.mda["channel_list"] if not s == "HRV"]) def _read_header(self): diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index e45e78aef1..983225acd5 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -362,7 +362,7 @@ def generic_open(filename, *args, **kwargs): def fromfile(filename, dtype, count=1, offset=0): - """Reads the numpy array from a (remote or local) file using a buffer. + """Read the numpy array from a (remote or local) file using a buffer. Note: This function relies on the :func:`generic_open` context manager to read a file remotely. diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 10fb8ef726..eab987d41f 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1273,13 +1273,13 @@ def test_read_header(): @pytest.fixture(scope="session") def session_tmp_path(tmp_path_factory): - """Generates a single temp path to use for the entire session.""" + """Generate a single temp path to use for the entire session.""" return tmp_path_factory.mktemp("data") @pytest.fixture(scope="session") def tmp_seviri_nat_filename(session_tmp_path): - """Creates a fully-qualified filename for a seviri native format file.""" + """Create a fully-qualified filename for a seviri native format file.""" full_file_path = session_tmp_path / "MSG4-SEVI-MSG15-0100-NA-20210528075743.722000000Z-N.nat" create_physical_seviri_native_file(full_file_path) return full_file_path @@ -1287,7 +1287,7 @@ def tmp_seviri_nat_filename(session_tmp_path): @pytest.fixture(scope="session") def compress_seviri_native_file(tmp_seviri_nat_filename, session_tmp_path): - """Compresses the given seviri native file into a zip file.""" + """Compress the given seviri native file into a zip file.""" zip_full_path = session_tmp_path / "test_seviri_native.zip" with zipfile.ZipFile(zip_full_path, mode="w") as archive: archive.write(tmp_seviri_nat_filename, os.path.basename(tmp_seviri_nat_filename)) @@ -1299,7 +1299,7 @@ def compress_seviri_native_file(tmp_seviri_nat_filename, session_tmp_path): lf("compress_seviri_native_file") ]) def test_read_physical_seviri_nat_file(full_path): - """Tests that the physical seviri native file can be read successfully, in case of both a plain and a zip file. + """Test that the physical seviri native file can be read successfully, in case of both a plain and a zip file. Note: The purpose of this function is not to fully test the properties of the scene. It only provides a test for @@ -1319,26 +1319,26 @@ def test_read_physical_seviri_nat_file(full_path): def scene_from_physical_seviri_nat_file(filename): - """Generates a Scene object from the given seviri native file.""" + """Generate a Scene object from the given seviri native file.""" return Scene([filename], reader="seviri_l1b_native", reader_kwargs={"fill_disk": True}) def create_physical_seviri_native_file(seviri_nat_full_file_path): - """Creates a physical seviri native file on disk.""" + """Create a physical seviri native file on disk.""" header_type, header_null = generate_seviri_native_null_header() amend_seviri_native_null_header(header_null) append_data_and_trailer_content_to_seviri_native_header(seviri_nat_full_file_path, header_null) def generate_seviri_native_null_header(): - """Generates the header of the seviri native format which is filled with zeros, hence the term null!""" + """Generate the header of the seviri native format which is filled with zeros, hence the term null!""" header_type = Msg15NativeHeaderRecord().get(True) null_header = np.zeros(header_type.shape, dtype=header_type).reshape(1, ) return header_type, null_header def amend_seviri_native_null_header(hdr_null_numpy): - """Amends the given null header so that the ``seviri_l1b_native`` reader can properly parse it. + """Amend the given null header so that the ``seviri_l1b_native`` reader can properly parse it. This is achieved by setting values for the bare minimum number of header fields so that the reader can make sense of the given header. This function relies on a number of auxiliary functions all of which are enclosed in the body of @@ -1394,7 +1394,7 @@ def _amend_15_DATA_HEADER__ImageAcquisition__PlannedAcquisitionTime(): def append_data_and_trailer_content_to_seviri_native_header(filename, hdr_null_numpy): - """Generates the data and trailer part (null content) of the file and appends them to the null header. + """Generate the data and trailer part (null content) of the file and appends them to the null header. The data and trailer are also null and appending them to the header results in a complete seviri nat file. """ From 278d00b09a4885a9f52dbe6f57db6a64749ccc47 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 15 Aug 2024 15:13:56 +0200 Subject: [PATCH 59/71] Update changelog for v0.51.0 --- CHANGELOG.md | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d0f3f9fff6..b421696af8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,40 @@ +## Version 0.51.0 (2024/08/15) + +### Issues Closed + +* [Issue 2881](https://github.com/pytroll/satpy/issues/2881) - seviri_l2_grib-reader support for filename patterns of files coming from EUMETCast-Europe ([PR 2882](https://github.com/pytroll/satpy/pull/2882) by [@fwfichtner](https://github.com/fwfichtner)) +* [Issue 2877](https://github.com/pytroll/satpy/issues/2877) - NWCSAF GEO images black with numpy 2 +* [Issue 2872](https://github.com/pytroll/satpy/issues/2872) - nwcsaf-geo reader turns uint8 into int64 on numpy 2.0 ([PR 2874](https://github.com/pytroll/satpy/pull/2874) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 2859](https://github.com/pytroll/satpy/issues/2859) - LI L2 Accumulated products retrieved from archive ("ARC" 10-min files) have faulty reading ([PR 2867](https://github.com/pytroll/satpy/pull/2867) by [@ameraner](https://github.com/ameraner)) +* [Issue 2836](https://github.com/pytroll/satpy/issues/2836) - Confusing documentation for creating a Scene without a filename or reader ([PR 2868](https://github.com/pytroll/satpy/pull/2868) by [@joleenf](https://github.com/joleenf)) + +In this release 5 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2876](https://github.com/pytroll/satpy/pull/2876) - Fix AWIPS tiled writer handling of odd units in VIIRS EDR products +* [PR 2874](https://github.com/pytroll/satpy/pull/2874) - Avoid accidental NWCSAF-GEO type promotion ([2872](https://github.com/pytroll/satpy/issues/2872)) +* [PR 2867](https://github.com/pytroll/satpy/pull/2867) - Fix LI L2 reader for accumulated products from archive ([2859](https://github.com/pytroll/satpy/issues/2859)) +* [PR 2866](https://github.com/pytroll/satpy/pull/2866) - Fix FCI L1c reader for African products + +#### Features added + +* [PR 2882](https://github.com/pytroll/satpy/pull/2882) - support FIRG file-patterns coming from EUMETCast-Europe ([2881](https://github.com/pytroll/satpy/issues/2881)) +* [PR 2867](https://github.com/pytroll/satpy/pull/2867) - Fix LI L2 reader for accumulated products from archive ([2859](https://github.com/pytroll/satpy/issues/2859)) +* [PR 2863](https://github.com/pytroll/satpy/pull/2863) - Adapt the SEVIRI native format reader in Satpy to support remote reading +* [PR 2862](https://github.com/pytroll/satpy/pull/2862) - Update thresholds for FCI geo_color low-level cloud layer +* [PR 2843](https://github.com/pytroll/satpy/pull/2843) - feat: Enable to read for the Q4 coverage and the IQTI files for the fci l1c data +* [PR 1916](https://github.com/pytroll/satpy/pull/1916) - Add Gld360 ualf2 reader + +#### Documentation changes + +* [PR 2868](https://github.com/pytroll/satpy/pull/2868) - Clarify Scene Documentation without Readers ([2836](https://github.com/pytroll/satpy/issues/2836)) + +In this release 11 pull requests were closed. + + ## Version 0.50.0 (2024/07/26) ### Issues Closed From e7d5656e6da642958254eed6d466d2684ddb6e40 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 22 Aug 2024 12:19:30 -0500 Subject: [PATCH 60/71] Update pyhdf-based arrs to be manually tokenized This avoids a bug in dask or cloudpickle that alters the state of the pyhdf SDS object in some way making it unusable. --- satpy/readers/hdf4_utils.py | 24 +++++++++++++++++++----- satpy/readers/hdfeos_base.py | 2 +- satpy/readers/modis_l1b.py | 4 ++-- satpy/readers/modis_l2.py | 2 +- 4 files changed, 23 insertions(+), 9 deletions(-) diff --git a/satpy/readers/hdf4_utils.py b/satpy/readers/hdf4_utils.py index d6258d9d62..2b836e9f26 100644 --- a/satpy/readers/hdf4_utils.py +++ b/satpy/readers/hdf4_utils.py @@ -18,10 +18,12 @@ """Helpers for reading hdf4-based files.""" import logging +import os import dask.array as da import numpy as np import xarray as xr +from dask.base import tokenize from pyhdf.SD import SD, SDC, SDS from satpy.readers.file_handlers import BaseFileHandler @@ -45,12 +47,24 @@ } -def from_sds(var, *args, **kwargs): +def from_sds(var, src_path, *args, **kwargs): """Create a dask array from a SD dataset.""" - var.__dict__["dtype"] = np.dtype(HTYPE_TO_DTYPE[var.info()[3]]) - shape = var.info()[2] + var_info = var.info() + var.__dict__["dtype"] = np.dtype(HTYPE_TO_DTYPE[var_info[3]]) + shape = var_info[2] var.__dict__["shape"] = shape if isinstance(shape, (tuple, list)) else tuple(shape) - return da.from_array(var, *args, **kwargs) + + name = kwargs.pop("name", None) + if name is None: + var_name = var_info[0] + tokenize_args = (os.fspath(src_path), var_name) + if args: + tokenize_args += (args,) + if kwargs: + tokenize_args += (kwargs,) + # put variable name in the front for easier dask debugging + name = var_name + "-" + tokenize(*tokenize_args) + return da.from_array(var, *args, name=name, **kwargs) class HDF4FileHandler(BaseFileHandler): @@ -92,7 +106,7 @@ def collect_metadata(self, name, obj): def _open_xarray_dataset(self, val, chunks=CHUNK_SIZE): """Read the band in blocks.""" - dask_arr = from_sds(val, chunks=chunks) + dask_arr = from_sds(val, self.filename, chunks=chunks) attrs = val.attributes() return xr.DataArray(dask_arr, dims=("y", "x"), attrs=attrs) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 3fd920c01f..7c25e1d09a 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -216,7 +216,7 @@ def load_dataset(self, dataset_name, is_category=False): dataset = self._read_dataset_in_file(dataset_name) chunks = self._chunks_for_variable(dataset) - dask_arr = from_sds(dataset, chunks=chunks) + dask_arr = from_sds(dataset, self.filename, chunks=chunks) dims = ("y", "x") if dask_arr.ndim == 2 else None data = xr.DataArray(dask_arr, dims=dims, attrs=dataset.attributes()) diff --git a/satpy/readers/modis_l1b.py b/satpy/readers/modis_l1b.py index 8280b30065..17bf5d56ae 100644 --- a/satpy/readers/modis_l1b.py +++ b/satpy/readers/modis_l1b.py @@ -117,7 +117,7 @@ def get_dataset(self, key, info): var_attrs = subdata.attributes() uncertainty = self.sd.select(var_name + "_Uncert_Indexes") chunks = self._chunks_for_variable(subdata) - array = xr.DataArray(from_sds(subdata, chunks=chunks)[band_index, :, :], + array = xr.DataArray(from_sds(subdata, self.filename, chunks=chunks)[band_index, :, :], dims=["y", "x"]).astype(np.float32) valid_range = var_attrs["valid_range"] valid_min = np.float32(valid_range[0]) @@ -214,7 +214,7 @@ def _mask_uncertain_pixels(self, array, uncertainty, band_index): if not self._mask_saturated: return array uncertainty_chunks = self._chunks_for_variable(uncertainty) - band_uncertainty = from_sds(uncertainty, chunks=uncertainty_chunks)[band_index, :, :] + band_uncertainty = from_sds(uncertainty, self.filename, chunks=uncertainty_chunks)[band_index, :, :] array = array.where(band_uncertainty < 15) return array diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 8fdf1c69bb..2f2555692d 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -111,7 +111,7 @@ def read_geo_resolution(metadata): def _select_hdf_dataset(self, hdf_dataset_name, byte_dimension): """Load a dataset from HDF-EOS level 2 file.""" dataset = self.sd.select(hdf_dataset_name) - dask_arr = from_sds(dataset, chunks=CHUNK_SIZE) + dask_arr = from_sds(dataset, self.filename, chunks=CHUNK_SIZE) attrs = dataset.attributes() dims = ["y", "x"] if byte_dimension == 0: From 8a9d85cce7cb46436c77caad32c3b2d77ffa1425 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 23 Aug 2024 09:46:27 -0500 Subject: [PATCH 61/71] Remove unnecessary *args from from_sds function --- satpy/readers/hdf4_utils.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/readers/hdf4_utils.py b/satpy/readers/hdf4_utils.py index 2b836e9f26..17c162b5de 100644 --- a/satpy/readers/hdf4_utils.py +++ b/satpy/readers/hdf4_utils.py @@ -47,7 +47,7 @@ } -def from_sds(var, src_path, *args, **kwargs): +def from_sds(var, src_path, **kwargs): """Create a dask array from a SD dataset.""" var_info = var.info() var.__dict__["dtype"] = np.dtype(HTYPE_TO_DTYPE[var_info[3]]) @@ -58,8 +58,6 @@ def from_sds(var, src_path, *args, **kwargs): if name is None: var_name = var_info[0] tokenize_args = (os.fspath(src_path), var_name) - if args: - tokenize_args += (args,) if kwargs: tokenize_args += (kwargs,) # put variable name in the front for easier dask debugging From 5e27be4449305933393ac6ed5477d292eeb1ddec Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 23 Aug 2024 09:55:37 -0500 Subject: [PATCH 62/71] Fix missed use of *args --- satpy/readers/hdf4_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/hdf4_utils.py b/satpy/readers/hdf4_utils.py index 17c162b5de..10f3b24b66 100644 --- a/satpy/readers/hdf4_utils.py +++ b/satpy/readers/hdf4_utils.py @@ -62,7 +62,7 @@ def from_sds(var, src_path, **kwargs): tokenize_args += (kwargs,) # put variable name in the front for easier dask debugging name = var_name + "-" + tokenize(*tokenize_args) - return da.from_array(var, *args, name=name, **kwargs) + return da.from_array(var, name=name, **kwargs) class HDF4FileHandler(BaseFileHandler): From 25f147a77edbe371b8fbdd267c61b1e4465b511d Mon Sep 17 00:00:00 2001 From: manu Date: Tue, 27 Aug 2024 17:07:05 +0200 Subject: [PATCH 63/71] Removing CONTRIBUTING.rst symlink --- doc/source/dev_guide/CONTRIBUTING.rst | 1 - 1 file changed, 1 deletion(-) delete mode 120000 doc/source/dev_guide/CONTRIBUTING.rst diff --git a/doc/source/dev_guide/CONTRIBUTING.rst b/doc/source/dev_guide/CONTRIBUTING.rst deleted file mode 120000 index ac9338fc25..0000000000 --- a/doc/source/dev_guide/CONTRIBUTING.rst +++ /dev/null @@ -1 +0,0 @@ -../../../CONTRIBUTING.rst \ No newline at end of file From f01b57bccfb1daa6dce06d9c55a9ec28529ceb1c Mon Sep 17 00:00:00 2001 From: manu Date: Tue, 27 Aug 2024 17:08:13 +0200 Subject: [PATCH 64/71] Adding new CONTRIBUTING.rst file. --- doc/source/dev_guide/CONTRIBUTING.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 doc/source/dev_guide/CONTRIBUTING.rst diff --git a/doc/source/dev_guide/CONTRIBUTING.rst b/doc/source/dev_guide/CONTRIBUTING.rst new file mode 100644 index 0000000000..7cea977c4d --- /dev/null +++ b/doc/source/dev_guide/CONTRIBUTING.rst @@ -0,0 +1,2 @@ +.. include:: ../../../CONTRIBUTING.rst + From 279a1033be5206a7875c48b4e6da4072090b0b47 Mon Sep 17 00:00:00 2001 From: administrator Date: Tue, 27 Aug 2024 17:12:25 +0200 Subject: [PATCH 65/71] Adding my name to authors.md --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index 22ec5de7e4..d7e78eecae 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -67,6 +67,7 @@ The following people have made contributions to this project: - [Lars Ørum Rasmussen (loerum)](https://github.com/loerum) - [Martin Raspaud (mraspaud)](https://github.com/mraspaud) - [William Roberts (wroberts4)](https://github.com/wroberts4) +- [Emmanuel Roche (roche-emmanuel)](https://github.com/roche-emmanuel) - [Benjamin Rösner (BENR0)](https://github.com/BENR0) - [Pascale Roquet (roquetp)](https://github.com/roquetp) - [Kristian Rune Larsen](https://github.com/) From f6ba9a89be3469cd48bed92a09868b2b1382ad82 Mon Sep 17 00:00:00 2001 From: GMV - Emmanuel Roche Date: Tue, 27 Aug 2024 17:24:32 +0200 Subject: [PATCH 66/71] Applying fixes to README and CONTRIBUTING.rst --- README | 2 +- doc/source/dev_guide/CONTRIBUTING.rst | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/README b/README index 92cacd2853..a1320b1b4a 120000 --- a/README +++ b/README @@ -1 +1 @@ -README.rst \ No newline at end of file +README.rst diff --git a/doc/source/dev_guide/CONTRIBUTING.rst b/doc/source/dev_guide/CONTRIBUTING.rst index 7cea977c4d..b1cd2f37dc 100644 --- a/doc/source/dev_guide/CONTRIBUTING.rst +++ b/doc/source/dev_guide/CONTRIBUTING.rst @@ -1,2 +1 @@ .. include:: ../../../CONTRIBUTING.rst - From a25f056e745d172df8448fc806ee1e77203546d8 Mon Sep 17 00:00:00 2001 From: GMV - Emmanuel Roche Date: Tue, 27 Aug 2024 21:53:51 +0200 Subject: [PATCH 67/71] Updated contributing.rst content location. --- CONTRIBUTING.rst | 150 +------------------------- doc/source/dev_guide/CONTRIBUTING.rst | 149 ++++++++++++++++++++++++- 2 files changed, 151 insertions(+), 148 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 0474f5f9b5..4cb160f8bd 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -1,148 +1,4 @@ -================= -How to contribute -================= +.. If reading this .rst file as plain text please refer directly + to the file included below for contribution guidelines. -Thank you for considering contributing to Satpy! Satpy's development team -is made up of volunteers so any help we can get is very appreciated. - -Contributions from users are what keep this community going. We welcome -any contributions including bug reports, documentation fixes or updates, -bug fixes, and feature requests. By contributing to Satpy you are providing -code that everyone can use and benefit from. - -The following guidelines will describe how the Satpy project structures -its code contributions from discussion to code to package release. - -For more information on contributing to open source projects see -`GitHub's Guide `_. - -What can I do? -============== - -- Make sure you have a `GitHub account `_. -- Submit a ticket for your issue, assuming one does not already exist. -- If you're uncomfortable using Git/GitHub, see - `Learn Git Branching `_ or other - online tutorials. -- If you are uncomfortable contributing to an open source project see: - - * `How to Contribute to an Open Source Project on GitHub `_ - video series - * Aaron Meurer's `Git Workflow `_ - * `How to Contribute to Open Source `_ - -- See what `issues `_ already - exist. Issues marked - `good first issue `_ - or `help wanted `_ - can be good issues to start with. -- Read the :doc:`index` for more details on contributing code. -- `Fork `_ the repository on - GitHub and install the package in development mode. -- Update the Satpy documentation to make it clearer and more detailed. -- Contribute code to either fix a bug or add functionality and submit a - `Pull Request `_. -- Make an example Jupyter Notebook and add it to the - `available examples `_. - -What if I break something? -========================== - -Not possible. If something breaks because of your contribution it was our -fault. When you submit your changes to be merged as a GitHub -`Pull Request `_ -they will be automatically tested and checked against coding style rules. -Before they are merged they are reviewed by at least one maintainer of the -Satpy project. If anything needs updating, we'll let you know. - -What is expected? -================= - -You can expect the Satpy maintainers to help you. We are all volunteers, -have jobs, and occasionally go on vacations. We will try our best to answer -your questions as soon as possible. We will try our best to understand your -use case and add the features you need. Although we strive to make -Satpy useful for everyone there may be some feature requests that we can't -allow if they would require breaking existing features. Other features may -be best for a different package, PyTroll or otherwise. Regardless, we will -help you find the best place for your feature and to make it possible to do -what you want. - -We, the Satpy maintainers, expect you to be patient, understanding, and -respectful of both developers and users. Satpy can only be successful if -everyone in the community feels welcome. We also expect you to put in as -much work as you expect out of us. There is no dedicated PyTroll or Satpy -support team, so there may be times when you need to do most of the work -to solve your problem (trying different test cases, environments, etc). - -Being respectful includes following the style of the existing code for any -code submissions. Please follow -`PEP8 `_ style guidelines and -limit lines of code to 80 characters whenever possible and when it doesn't -hurt readability. Satpy follows -`Google Style Docstrings `_ -for all code API documentation. When in doubt use the existing code as a -guide for how coding should be done. - -.. _dev_help: - -How do I get help? -================== - -The Satpy developers (and all other PyTroll package developers) monitor the: - -- `Mailing List `_ -- `Slack chat `_ (get an `invitation `_) -- `GitHub issues `_ - -How do I submit my changes? -=========================== - -Any contributions should start with some form of communication (see above) to -let the Satpy maintainers know how you plan to help. The larger the -contribution the more important direct communication is so everyone can avoid -duplicate code and wasted time. -After talking to the Satpy developers any additional work like code or -documentation changes can be provided as a GitHub -`Pull Request `_. - -To make sure that your code complies with the pytroll python standard, you can -run the `flake8 `_ linter on your changes -before you submit them, or even better install a pre-commit hook that runs the -style check for you. To this aim, we provide a configuration file for the -`pre-commit `_ tool, that you can install with eg:: - - pip install pre-commit - pre-commit install - -running from your base satpy directory. This will automatically check code style for every commit. - -Code of Conduct -=============== - -Satpy follows the same code of conduct as the PyTroll project. For reference -it is copied to this repository in -`CODE_OF_CONDUCT.md `_. - -As stated in the PyTroll home page, this code of conduct applies to the -project space (GitHub) as well as the public space online and offline when -an individual is representing the project or the community. Online examples -of this include the PyTroll Slack team, mailing list, and the PyTroll twitter -account. This code of conduct also applies to in-person situations like -PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when -the project is being represented. - -Any violations of this code of conduct will be handled by the core maintainers -of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. -If you wish to report one of the maintainers for a violation and are -not comfortable with them seeing it, please contact one or more of the other -maintainers to report the violation. Responses to violations will be -determined by the maintainers and may include one or more of the following: - -- Verbal warning -- Ask for public apology -- Temporary or permanent ban from in-person events -- Temporary or permanent ban from online communication (Slack, mailing list, etc) - -For details see the official -`code of conduct document `_. +.. include:: doc/source/dev_guide/CONTRIBUTING.rst diff --git a/doc/source/dev_guide/CONTRIBUTING.rst b/doc/source/dev_guide/CONTRIBUTING.rst index b1cd2f37dc..0474f5f9b5 100644 --- a/doc/source/dev_guide/CONTRIBUTING.rst +++ b/doc/source/dev_guide/CONTRIBUTING.rst @@ -1 +1,148 @@ -.. include:: ../../../CONTRIBUTING.rst +================= +How to contribute +================= + +Thank you for considering contributing to Satpy! Satpy's development team +is made up of volunteers so any help we can get is very appreciated. + +Contributions from users are what keep this community going. We welcome +any contributions including bug reports, documentation fixes or updates, +bug fixes, and feature requests. By contributing to Satpy you are providing +code that everyone can use and benefit from. + +The following guidelines will describe how the Satpy project structures +its code contributions from discussion to code to package release. + +For more information on contributing to open source projects see +`GitHub's Guide `_. + +What can I do? +============== + +- Make sure you have a `GitHub account `_. +- Submit a ticket for your issue, assuming one does not already exist. +- If you're uncomfortable using Git/GitHub, see + `Learn Git Branching `_ or other + online tutorials. +- If you are uncomfortable contributing to an open source project see: + + * `How to Contribute to an Open Source Project on GitHub `_ + video series + * Aaron Meurer's `Git Workflow `_ + * `How to Contribute to Open Source `_ + +- See what `issues `_ already + exist. Issues marked + `good first issue `_ + or `help wanted `_ + can be good issues to start with. +- Read the :doc:`index` for more details on contributing code. +- `Fork `_ the repository on + GitHub and install the package in development mode. +- Update the Satpy documentation to make it clearer and more detailed. +- Contribute code to either fix a bug or add functionality and submit a + `Pull Request `_. +- Make an example Jupyter Notebook and add it to the + `available examples `_. + +What if I break something? +========================== + +Not possible. If something breaks because of your contribution it was our +fault. When you submit your changes to be merged as a GitHub +`Pull Request `_ +they will be automatically tested and checked against coding style rules. +Before they are merged they are reviewed by at least one maintainer of the +Satpy project. If anything needs updating, we'll let you know. + +What is expected? +================= + +You can expect the Satpy maintainers to help you. We are all volunteers, +have jobs, and occasionally go on vacations. We will try our best to answer +your questions as soon as possible. We will try our best to understand your +use case and add the features you need. Although we strive to make +Satpy useful for everyone there may be some feature requests that we can't +allow if they would require breaking existing features. Other features may +be best for a different package, PyTroll or otherwise. Regardless, we will +help you find the best place for your feature and to make it possible to do +what you want. + +We, the Satpy maintainers, expect you to be patient, understanding, and +respectful of both developers and users. Satpy can only be successful if +everyone in the community feels welcome. We also expect you to put in as +much work as you expect out of us. There is no dedicated PyTroll or Satpy +support team, so there may be times when you need to do most of the work +to solve your problem (trying different test cases, environments, etc). + +Being respectful includes following the style of the existing code for any +code submissions. Please follow +`PEP8 `_ style guidelines and +limit lines of code to 80 characters whenever possible and when it doesn't +hurt readability. Satpy follows +`Google Style Docstrings `_ +for all code API documentation. When in doubt use the existing code as a +guide for how coding should be done. + +.. _dev_help: + +How do I get help? +================== + +The Satpy developers (and all other PyTroll package developers) monitor the: + +- `Mailing List `_ +- `Slack chat `_ (get an `invitation `_) +- `GitHub issues `_ + +How do I submit my changes? +=========================== + +Any contributions should start with some form of communication (see above) to +let the Satpy maintainers know how you plan to help. The larger the +contribution the more important direct communication is so everyone can avoid +duplicate code and wasted time. +After talking to the Satpy developers any additional work like code or +documentation changes can be provided as a GitHub +`Pull Request `_. + +To make sure that your code complies with the pytroll python standard, you can +run the `flake8 `_ linter on your changes +before you submit them, or even better install a pre-commit hook that runs the +style check for you. To this aim, we provide a configuration file for the +`pre-commit `_ tool, that you can install with eg:: + + pip install pre-commit + pre-commit install + +running from your base satpy directory. This will automatically check code style for every commit. + +Code of Conduct +=============== + +Satpy follows the same code of conduct as the PyTroll project. For reference +it is copied to this repository in +`CODE_OF_CONDUCT.md `_. + +As stated in the PyTroll home page, this code of conduct applies to the +project space (GitHub) as well as the public space online and offline when +an individual is representing the project or the community. Online examples +of this include the PyTroll Slack team, mailing list, and the PyTroll twitter +account. This code of conduct also applies to in-person situations like +PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when +the project is being represented. + +Any violations of this code of conduct will be handled by the core maintainers +of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. +If you wish to report one of the maintainers for a violation and are +not comfortable with them seeing it, please contact one or more of the other +maintainers to report the violation. Responses to violations will be +determined by the maintainers and may include one or more of the following: + +- Verbal warning +- Ask for public apology +- Temporary or permanent ban from in-person events +- Temporary or permanent ban from online communication (Slack, mailing list, etc) + +For details see the official +`code of conduct document `_. From dcf3d2a5da2d4ba491b9c505560dbb34740932e9 Mon Sep 17 00:00:00 2001 From: GMV - Emmanuel Roche Date: Tue, 27 Aug 2024 22:02:24 +0200 Subject: [PATCH 68/71] Adding link in contributing.rst --- CONTRIBUTING.rst | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 4cb160f8bd..0eb7120a14 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -1,4 +1,12 @@ -.. If reading this .rst file as plain text please refer directly - to the file included below for contribution guidelines. +Contributing Guidelines +======================= + +For detailed contribution guidelines, please see our `Developer's Guide on Contributing `_. + +.. note:: + If you're viewing this file on GitHub, you can access the full contribution guidelines by clicking the link above. + +.. If you're reading this file locally or in a context where file inclusions are supported, + the following include statement will display the guidelines below: .. include:: doc/source/dev_guide/CONTRIBUTING.rst From b5d2f5126ab3ef7bfa34527fd4f07f3b478b6c0e Mon Sep 17 00:00:00 2001 From: GMV - Emmanuel Roche Date: Wed, 28 Aug 2024 08:33:54 +0200 Subject: [PATCH 69/71] Updating contributing.rst content --- CONTRIBUTING.rst | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 0eb7120a14..44d272b198 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -1,12 +1,9 @@ Contributing Guidelines ======================= -For detailed contribution guidelines, please see our `Developer's Guide on Contributing `_. +For detailed contribution guidelines, please see our `Developer's Guide on ReadTheDocs `_. -.. note:: - If you're viewing this file on GitHub, you can access the full contribution guidelines by clicking the link above. -.. If you're reading this file locally or in a context where file inclusions are supported, - the following include statement will display the guidelines below: - -.. include:: doc/source/dev_guide/CONTRIBUTING.rst +.. If you're reading this file locally as a plain text, you may also + directly refer to the file doc/source/dev_guide/CONTRIBUTING.rst for + any unmerged/pending changes to the contribution guidelines. From 92153226e5feb1bf343444df1d5dea38a532d580 Mon Sep 17 00:00:00 2001 From: GMV - Emmanuel Roche Date: Wed, 28 Aug 2024 20:40:54 +0200 Subject: [PATCH 70/71] Removing README element --- README | 1 - 1 file changed, 1 deletion(-) delete mode 120000 README diff --git a/README b/README deleted file mode 120000 index a1320b1b4a..0000000000 --- a/README +++ /dev/null @@ -1 +0,0 @@ -README.rst From 104e8fee0a2ce159f6db8f4aaf3fc09b0bf06446 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 1 Sep 2024 10:19:38 +0000 Subject: [PATCH 71/71] Bump pypa/gh-action-pypi-publish from 1.9.0 to 1.10.0 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.9.0 to 1.10.0. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.9.0...v1.10.0) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 36ec551ea1..130b3a43b8 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.9.0 + uses: pypa/gh-action-pypi-publish@v1.10.0 with: user: __token__ password: ${{ secrets.pypi_password }}