Skip to content

Commit

Permalink
fixed pep8 issues
Browse files Browse the repository at this point in the history
  • Loading branch information
kperrynrel committed Feb 8, 2024
1 parent 93a59d5 commit bb800d9
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 14 deletions.
7 changes: 4 additions & 3 deletions docs/examples/pvfleets-qa-pipeline/pvfleets-irradiance-qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
pvanalytics_dir = pathlib.Path(pvanalytics.__file__).parent
file = pvanalytics_dir / 'data' / 'system_15_poa_irradiance.parquet'
time_series = pd.read_parquet(file)
time_series.set_index('measured_on', inplace = True)
time_series.set_index('measured_on', inplace=True)
time_series.index = pd.to_datetime(time_series.index)
time_series = time_series['poa_irradiance__484']
latitude = 39.7406
Expand All @@ -55,7 +55,8 @@
# Now, let's run basic data checks to identify stale and abnormal/outlier
# data in the time series. Basic data checks include the following steps:
#
# 1) Flatlined/stale data periods (:py:func:`pvanalytics.quality.gaps.stale_values_round`)
# 1) Flatlined/stale data periods
# (:py:func:`pvanalytics.quality.gaps.stale_values_round`)
# 2) Negative irradiance data
# 3) "Abnormal" data periods, which are defined as less than 10% of the
# daily time series mean OR greater than 1300
Expand Down Expand Up @@ -134,7 +135,7 @@
# Filter the time series, taking out all of the issues
issue_mask = ((~stale_data_mask) & (~negative_mask) & (~erroneous_mask) &
(~out_of_bounds_mask) & (~zscore_outlier_mask))
time_series =time_series[issue_mask]
time_series = time_series[issue_mask]
time_series = time_series.asfreq(data_freq)

# Visualize the time series post-filtering
Expand Down
17 changes: 9 additions & 8 deletions docs/examples/pvfleets-qa-pipeline/pvfleets-power-qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
pvanalytics_dir = pathlib.Path(pvanalytics.__file__).parent
file = pvanalytics_dir / 'data' / 'system_50_ac_power_2_full_DST.parquet'
time_series = pd.read_parquet(file)
time_series.set_index('measured_on', inplace = True)
time_series.set_index('measured_on', inplace=True)
time_series.index = pd.to_datetime(time_series.index)
time_series = time_series['ac_power_2']
latitude = 39.7406
Expand All @@ -58,7 +58,8 @@
# Now, let's run basic data checks to identify stale and abnormal/outlier
# data in the time series. Basic data checks include the following steps:
#
# 1) Flatlined/stale data periods (:py:func:`pvanalytics.quality.gaps.stale_values_round`)
# 1) Flatlined/stale data periods
# (:py:func:`pvanalytics.quality.gaps.stale_values_round`)
# 2) Negative data
# 3) "Abnormal" data periods, which are defined as less than 10% of the
# daily time series mean
Expand Down Expand Up @@ -130,7 +131,7 @@

# Filter the time series, taking out all of the issues
issue_mask = ((~stale_data_mask) & (~negative_mask) &
(~erroneous_mask) & (~zscore_outlier_mask))
(~erroneous_mask) & (~zscore_outlier_mask))

time_series = time_series[issue_mask]
time_series = time_series.asfreq(data_freq)
Expand Down Expand Up @@ -243,10 +244,10 @@
# Estimate the time shifts by comparing the modelled midday point to the
# measured midday point.
is_shifted, time_shift_series = shifts_ruptures(modeled_midday_series_daily,
midday_series_daily,
period_min=15,
shift_min=15,
zscore_cutoff=1.5)
midday_series_daily,
period_min=15,
shift_min=15,
zscore_cutoff=1.5)

# Create a midday difference series between modeled and measured midday, to
# visualize time shifts. First, resample each time series to daily frequency,
Expand Down Expand Up @@ -432,7 +433,7 @@
# PVLib: :py:func:`pvlib.iotools.get_psm3`
file = pvanalytics_dir / 'data' / 'system_50_ac_power_2_full_DST_psm3.parquet'
psm3 = pd.read_parquet(file)
psm3.set_index('index', inplace = True)
psm3.set_index('index', inplace=True)
psm3.index = pd.to_datetime(psm3.index)

psm3 = psm3.reindex(pd.date_range(psm3.index[0],
Expand Down
7 changes: 4 additions & 3 deletions docs/examples/pvfleets-qa-pipeline/pvfleets-temperature-qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@
# This data is timezone-localized.

pvanalytics_dir = pathlib.Path(pvanalytics.__file__).parent
file = "C:/Users/kperry/Documents/source/repos/pvanalytics/pvanalytics/data/system_4_module_temperature.parquet"#pvanalytics_dir / 'data' / 'system_4_module_temperature.parquet'
file = pvanalytics_dir / 'data' / 'system_4_module_temperature.parquet'
time_series = pd.read_parquet(file)
time_series.set_index('index', inplace = True)
time_series.set_index('index', inplace=True)
time_series.index = pd.to_datetime(time_series.index)
time_series = time_series['module_temp_1']
latitude = 39.7406
Expand All @@ -54,7 +54,8 @@
# Now, let's run basic data checks to identify stale and abnormal/outlier
# data in the time series. Basic data checks include the following steps:
#
# 1) Flatlined/stale data periods (:py:func:`pvanalytics.quality.gaps.stale_values_round`)
# 1) Flatlined/stale data periods
# (:py:func:`pvanalytics.quality.gaps.stale_values_round`)
# 2) "Abnormal" data periods, which are out of the temperature limits of
# -40 to 185 deg C. Additional checks based on thresholds are applied
# depending on the type of temperature sensor (ambient or module)
Expand Down

0 comments on commit bb800d9

Please sign in to comment.