Skip to content

Commit

Permalink
Merge pull request #21 from smartie2076/25.4_SDG_Leicester_intermediate
Browse files Browse the repository at this point in the history
Including release v2.0 into master
  • Loading branch information
smartie2076 authored May 3, 2019
2 parents ea62808 + 1c79405 commit f6c7266
Show file tree
Hide file tree
Showing 34 changed files with 53,844 additions and 35,574 deletions.
38 changes: 27 additions & 11 deletions A_main_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@


import pprint as pp

import os, sys
from oemof.tools import logger
import logging
# Logging
Expand All @@ -17,21 +17,28 @@
#screen_level=logging.DEBUG,
file_level=logging.DEBUG)

logging.info('\n \n MICRO GRID TOOL 1.0 \n Version: 28.03.2019 \n Coded by: Martha M. Hoffmann \n Reiner Lemoine Institute (Berlin) \n \n ')
logging.info('\n \n MICRO GRID TOOL 2.0 '
'\n Version: 26.04.2019 (SDG Leicester) '
'\n Coded by: Martha M. Hoffmann '
'\n Reiner Lemoine Institute (Berlin) \n \n ')

###############################################################################
# Get values from excel_template: #
# * Experiments: Settings, project sites #
# * List of simulated cases #
# Get values from excel_template called in terminal #
# python3 A_main_script.py PATH/file.xlsx
###############################################################################

os.system('clear')
input_excel_file = str(sys.argv[1])
#input_excel_file = './inputs/input_template_excel.xlsx'

#-------- Get all settings ---------------------------------------------------#
# General settings, general parameters, sensitivity parameters, project site #
# data including timeseries (no noise, not clipped to evaluated timeframe #
#-----------------------------------------------------------------------------#

from B_read_from_files import excel_template
settings, parameters_constant_values, parameters_sensitivity, project_site_s, case_definitions = \
excel_template.settings()
excel_template.settings(input_excel_file)

#---- Define all sensitivity_experiment_s, define result parameters ----------#
from C_sensitivity_experiments import generate_sensitvitiy_experiments, get_names
Expand Down Expand Up @@ -78,20 +85,21 @@
from G0_oemof_simulate import oemof_simulate

experiment_count = 0
total_number_of_simulations = settings['total_number_of_experiments'] * len(case_list)

for experiment in sensitivity_experiment_s:

experiment_count = experiment_count + 1
capacities_oem = {}

if 'grid_availability' in sensitivity_experiment_s[experiment].keys():
logging.debug('Using grid availability as included in timeseries file of project location.')
pass # grid availability timeseries from file already included in data
# grid availability timeseries from file already included in data
else:
# extend experiment with blackout timeseries according to blackout parameters
logging.debug('Using grid availability timeseries that was randomly generated.')
blackout_experiment_name = get_names.blackout_experiment_name(sensitivity_experiment_s[experiment])
sensitivity_experiment_s[experiment].update({'grid_availability': sensitivity_grid_availability[blackout_experiment_name]})
sensitivity_experiment_s[experiment].update({'grid_availability':
sensitivity_grid_availability[blackout_experiment_name]})

###############################################################################
# Simulations of all cases #
Expand All @@ -106,10 +114,11 @@
# Creating, simulating and storing micro grid energy systems with oemof #
# According to parameters set beforehand #
###############################################################################
experiment_count = experiment_count + 1
logging.info(
'Starting simulation of case ' + specific_case + ', '
+ 'project site ' + sensitivity_experiment_s[experiment]['project_site_name'] + ', '
+ 'experiment no. ' + str(experiment_count) + '/'+ str(settings['total_number_of_experiments'] * len(case_list)) + '...')
+ 'experiment no. ' + str(experiment_count) + '/'+ str(total_number_of_simulations) + '...')

# Run simulation, evaluate results
oemof_results = oemof_simulate.run(sensitivity_experiment_s[experiment], experiment_case_dict)
Expand All @@ -128,7 +137,14 @@
# Extend overall results dataframe with simulation results
overall_results = helpers.store_result_matrix(overall_results, sensitivity_experiment_s[experiment], oemof_results)
# Writing DataFrame with all results to csv file
overall_results.to_csv(sensitivity_experiment_s[experiment]['output_folder'] + '/results.csv') # moved from below
overall_results.to_csv(sensitivity_experiment_s[experiment]['output_folder'] + '/' + sensitivity_experiment_s[experiment]['output_file'] + '.csv') # moved from below

# Estimating simulation time left - more precise for greater number of simulations
logging.info(' Estimated simulation time left: '
+ str(round(sum(overall_results['evaluation_time'][:])
* (total_number_of_simulations-experiment_count)/experiment_count/60,1))
+ ' minutes.')
print('\n')

if settings['display_experiment'] == True:
logging.info('The experiment with following parameters has been analysed:')
Expand Down
98 changes: 56 additions & 42 deletions B_read_from_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,76 +4,45 @@
import shutil
# requires xlrd

class csv_input():

def from_file(project_site):
##########################################################
# Reads timeseries from files connected to project sites #
##########################################################

data_set = pd.read_csv(project_site['timeseries_file'], sep=project_site['seperator']) # excluded attribute sep: ';'
if project_site['title_time']=='None':
file_index = None
else:
file_index = pd.DatetimeIndex(data_set[project_site['title_time']].values)

# Attached data to each project site analysed. Does NOT apply noise here,
# as noise might be subject to sensitivity analysis

# Necessary: All of these input timeseries in same unit (kWh)
project_site.update({'demand': data_set[project_site['title_demand']]})
project_site.update({'pv_generation_per_kWp': data_set[project_site['title_pv']]}) # reading pv_generation values - adjust to panel area or kWp and if in Wh!
project_site.update({'wind_generation_per_kW': data_set[project_site['title_wind']]})


if project_site['title_grid_availability'] != 'None':
project_site.update({'grid_availability': data_set[project_site['title_grid_availability']]})

project_site.update({'file_index': file_index})

return

class excel_template():

def settings():
def settings(input_excel_file):
#######################################
# Reads all input from excel template #
#######################################

# location of excel template
file = './inputs/input_template_excel.xlsx'
# Name of tabs
sheet_settings = 'settings'
sheet_input_constant = 'input_constant'
sheet_input_sensitivity = 'input_sensitivity'
sheet_project_sites = 'project_sites'
sheet_case_definitions = 'case_definitions'

settings = excel_template.get_settings(file, sheet_settings)
settings = excel_template.get_settings(input_excel_file, sheet_settings)

# -------- Check for, create or empty results directory -----------------------#
from Z_output_functions import output
output.check_output_directory(settings)
output.check_output_directory(settings, input_excel_file)

parameters_constant_units, parameters_constant_values = excel_template.get_parameters_constant(file, sheet_input_constant)
parameters_sensitivity = excel_template.get_parameters_sensitivity(file, sheet_input_sensitivity)
parameters_constant_units, parameters_constant_values = excel_template.get_parameters_constant(input_excel_file, sheet_input_constant)
parameters_sensitivity = excel_template.get_parameters_sensitivity(input_excel_file, sheet_input_sensitivity)

project_site_s = excel_template.get_project_sites(file, sheet_project_sites)
project_site_s = excel_template.get_project_sites(input_excel_file, sheet_project_sites)

necessity_for_blackout_timeseries_generation=False
# extend by timeseries
for project_site in project_site_s:
# copy input timeseries to new location
path_from = os.path.abspath(project_site_s[project_site]['timeseries_file'])
path_to = os.path.abspath(settings['output_folder'] + '/inputs/'+ project_site + '.csv')
path_from = os.path.abspath(settings['input_folder_timeseries'] + '/' + project_site_s[project_site]['timeseries_file'])
path_to = os.path.abspath(settings['output_folder'] + '/inputs/'+ project_site_s[project_site]['timeseries_file'])
shutil.copy(path_from, path_to)

csv_input.from_file(project_site_s[project_site])
csv_input.from_file(project_site_s[project_site], path_from)
if project_site_s[project_site]['title_grid_availability'] == 'None':
necessity_for_blackout_timeseries_generation=True

settings.update({'necessity_for_blackout_timeseries_generation': necessity_for_blackout_timeseries_generation})
case_definitions = excel_template.get_case_definitions(file, sheet_case_definitions)
case_definitions = excel_template.get_case_definitions(input_excel_file, sheet_case_definitions)
return settings, parameters_constant_values, parameters_sensitivity, project_site_s, case_definitions

def get_data(file, sheet, header_row, index_column, last_column):
Expand Down Expand Up @@ -159,4 +128,49 @@ def get_case_definitions(file, sheet_project_sites):
case_definitions[case].update({'max_shortage': float(case_definitions[case]['max_shortage'])})

case_definitions[case].update({'number_of_equal_generators': int(case_definitions[case]['number_of_equal_generators'])})
return case_definitions
return case_definitions

class csv_input():

def from_file(project_site, path_from):
##########################################################
# Reads timeseries from files connected to project sites #
##########################################################

data_set = pd.read_csv(path_from, sep=project_site['seperator']) # excluded attribute sep: ';'
if project_site['title_time']=='None':
file_index = None
else:
file_index = pd.DatetimeIndex(data_set[project_site['title_time']].values)

# Attached data to each project site analysed. Does NOT apply noise here,
# as noise might be subject to sensitivity analysis

# Necessary: All of these input timeseries in same unit (kWh)
# If-else clauses allow that some of the timeseries are not included in csv file.
if project_site['title_demand_ac'] != 'None':
project_site.update({'demand_ac': data_set[project_site['title_demand_ac']]})
else:
project_site.update({'demand_ac': pd.Series([0 for i in range(0,8760)])})

if project_site['title_demand_dc'] != 'None':
project_site.update({'demand_dc': data_set[project_site['title_demand_dc']]})
else:
project_site.update({'demand_dc': pd.Series([0 for i in range(0,8760)])})

if project_site['title_pv'] != 'None':
project_site.update({'pv_generation_per_kWp': data_set[project_site['title_pv']]}) # reading pv_generation values - adjust to panel area or kWp and if in Wh!
else:
project_site.update({'pv_generation_per_kWp': pd.Series([0 for i in range(0,8760)])})

if project_site['title_wind'] != 'None':
project_site.update({'wind_generation_per_kW': data_set[project_site['title_wind']]})
else:
project_site.update({'wind_generation_per_kW': pd.Series([0 for i in range(0,8760)])})

if project_site['title_grid_availability'] != 'None':
project_site.update({'grid_availability': data_set[project_site['title_grid_availability']]})

project_site.update({'file_index': file_index})

return
36 changes: 20 additions & 16 deletions C_sensitivity_experiments.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@ def get(settings, parameters_constant_values, parameters_sensitivity, project_si
# Get sensitivity_experiment_s for sensitivity analysis #
#######################################################
if settings['sensitivity_all_combinations'] == True:
sensitivitiy_experiments_s, number_of_project_sites, sensitivity_array_dict, total_number_of_experiments = \
sensitivitiy_experiment_s, number_of_project_sites, sensitivity_array_dict, total_number_of_experiments = \
generate_experiments.all_possible(settings, parameters_constant_values, parameters_sensitivity, project_sites)

elif settings['sensitivity_all_combinations'] == False:
sensitivitiy_experiments_s, number_of_project_sites, sensitivity_array_dict, total_number_of_experiments = \
sensitivitiy_experiment_s, number_of_project_sites, sensitivity_array_dict, total_number_of_experiments = \
generate_experiments.with_base_case(settings, parameters_constant_values, parameters_sensitivity, project_sites)

else:
Expand All @@ -40,18 +40,26 @@ def get(settings, parameters_constant_values, parameters_sensitivity, project_si

logging.info(message[:-2])

for experiment in sensitivitiy_experiments_s:
for experiment in sensitivitiy_experiment_s:
# scaling demand according to scaling factor - used for tests regarding tool application
sensitivitiy_experiment_s[experiment].update({
'demand_ac': sensitivitiy_experiment_s[experiment]['demand_ac'] *
sensitivitiy_experiment_s[experiment]['demand_ac_scaling_factor']})
sensitivitiy_experiment_s[experiment].update({
'demand_dc': sensitivitiy_experiment_s[experiment]['demand_dc'] *
sensitivitiy_experiment_s[experiment]['demand_dc_scaling_factor']})

# Add economic values to sensitivity sensitivity_experiment_s
process_input.economic_values(sensitivitiy_experiments_s[experiment])
process_input.economic_values(sensitivitiy_experiment_s[experiment])
# Give a file name to the sensitivity_experiment_s
get_names.experiment_name(sensitivitiy_experiments_s[experiment], sensitivity_array_dict,
get_names.experiment_name(sensitivitiy_experiment_s[experiment], sensitivity_array_dict,
number_of_project_sites)

if 'comments' not in sensitivitiy_experiments_s[experiment]:
sensitivitiy_experiments_s[experiment].update({'comments': ''})
if 'comments' not in sensitivitiy_experiment_s[experiment]:
sensitivitiy_experiment_s[experiment].update({'comments': ''})

if sensitivitiy_experiments_s[experiment]['storage_soc_initial']=='None':
sensitivitiy_experiments_s[experiment].update({'storage_soc_initial': None})
if sensitivitiy_experiment_s[experiment]['storage_soc_initial']=='None':
sensitivitiy_experiment_s[experiment].update({'storage_soc_initial': None})
#######################################################
# Get blackout_experiment_s for sensitvitiy #
#######################################################
Expand All @@ -60,9 +68,9 @@ def get(settings, parameters_constant_values, parameters_sensitivity, project_si
= generate_experiments.blackout(sensitivity_array_dict, parameters_constant_values, settings)

# save all Experiments with all used input data to csv
csv_dict = deepcopy(sensitivitiy_experiments_s)
csv_dict = deepcopy(sensitivitiy_experiment_s)
# delete timeseries to make file readable
timeseries_names=['demand', 'pv_generation_per_kWp', 'wind_generation_per_kW', 'grid_availability']
timeseries_names=['demand_ac', 'demand_dc', 'pv_generation_per_kWp', 'wind_generation_per_kW', 'grid_availability']
for entry in csv_dict:
for series in timeseries_names:
if series in csv_dict[entry].keys():
Expand All @@ -89,7 +97,7 @@ def get(settings, parameters_constant_values, parameters_sensitivity, project_si
settings.update({'total_number_of_experiments': total_number_of_experiments})


return sensitivitiy_experiments_s, blackout_experiment_s, title_overall_results, names_sensitivities
return sensitivitiy_experiment_s, blackout_experiment_s, title_overall_results, names_sensitivities

class generate_experiments():
def all_possible(settings, parameters_constant_values, parameters_sensitivity, project_site_s):
Expand Down Expand Up @@ -278,8 +286,6 @@ def combinations_around_base(sensitivity_array_dict, universal_parameters, proje
sensitivity_experiment_s[experiment_number].update(deepcopy(project_site_s[project_site]))
# overwrite base case value by sensitivity value (only in case specific parameter is changed)
sensitivity_experiment_s[experiment_number].update({key: sensitivity_array_dict[key][interval_entry]})
# scaling demand according to scaling factor - used for tests regarding tool application
sensitivity_experiment_s[experiment_number].update({'demand': sensitivity_experiment_s[experiment_number]['demand'] * sensitivity_experiment_s[experiment_number]['demand_scaling_factor']})

elif sensitivity_array_dict[key][interval_entry] == key_value and defined_base == False:
# Defining scenario only with base case values for universal parameter / specific to project site (once!)
Expand All @@ -288,8 +294,6 @@ def combinations_around_base(sensitivity_array_dict, universal_parameters, proje
sensitivity_experiment_s[experiment_number].update({key: key_value})
sensitivity_experiment_s[experiment_number].update({'project_site_name': project_site})
sensitivity_experiment_s[experiment_number].update(deepcopy(project_site_s[project_site]))
# scaling demand according to scaling factor - used for tests regarding tool application
sensitivity_experiment_s[experiment_number].update({'demand': sensitivity_experiment_s[experiment_number]['demand'] * sensitivity_experiment_s[experiment_number]['demand_scaling_factor']})
defined_base = True
sensitivity_experiment_s[experiment_number].update({'comments': 'Base case, '})

Expand Down
Loading

0 comments on commit f6c7266

Please sign in to comment.