Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Spinncer update #2

Draft
wants to merge 14 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 3 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
application_generated_data_files/*
reports/*
*application_generated_data_files/
*reports/
__pycache__/
.idea/
spynnaker.cfg
*.egg-info
*.npz
*.out
*.out
8 changes: 6 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
numpy==1.15.4
scipy==1.3.0
numpy > 1.13, < 1.20; python_version == '3.6'
numpy > 1.13, < 1.21; python_version == '3.7'
numpy; python_version >= '3.8'
scipy >= 0.16.0, < 1.6; python_version == '3.6'
scipy >= 0.16.0, < 1.8; python_version == '3.7'
scipy >= 0.16.0; python_version >= '3.8'
brian2
neo==0.7.1
pynn
Expand Down
10 changes: 7 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,12 @@


install_requires = [
"numpy==1.15.4",
"scipy==1.3.0",
"numpy > 1.13, < 1.20; python_version == '3.6'",
"numpy > 1.13, < 1.21; python_version == '3.7'",
"numpy; python_version >= '3.8'",
"scipy >= 0.16.0, < 1.6; python_version == '3.6'",
"scipy >= 0.16.0, < 1.8; python_version == '3.7'",
"scipy >= 0.16.0; python_version >= '3.8'",
"brian2",
"neo==0.7.1",
"pynn==0.9.5",
Expand Down Expand Up @@ -38,7 +42,7 @@

"Programming Language :: Python :: 3"
"Programming Language :: Python :: 3.7"

"Topic :: Scientific/Engineering",
]
)
23 changes: 23 additions & 0 deletions spinncer/cerebellum.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,9 @@
import numpy as np
import h5py

from spynnaker.pyNN.extra_algorithms.splitter_components import (
SplitterAbstractPopulationVertexNeuronsSynapses)

from spinncer.analysis_common import get_plot_order
from spinncer.circuit import Circuit
from spinncer.utilities.constants import *
Expand Down Expand Up @@ -45,6 +48,9 @@ def __init__(self, sim, connectivity, stimulus_information, reporting=True,
neuron_model="IF_cond_exp", force_number_of_neurons=None,
input_spikes=None,
rb_left_shifts=None,
use_split_model=False,
n_synapse_cores=1,
n_delay_slots=64,
no_loops=3,
round_input_spike_times=None,
id_remap=None,
Expand Down Expand Up @@ -78,6 +84,11 @@ def __init__(self, sim, connectivity, stimulus_information, reporting=True,
self.id_remap = id_remap
self.r_mem = r_mem

# Values for splitters
self.use_split_model = use_split_model
self.n_synapse_cores = n_synapse_cores
self.n_delay_slots = n_delay_slots

self.expected_max_spikes = expected_max_spikes
self.implicit_shift = implicit_shift
self.ensure_weight_is_representable = ensure_weight_is_representable
Expand Down Expand Up @@ -475,6 +486,18 @@ def build_populations(self, positions):
elif cell_model == "if_cond_alpha":
cell_model = self.sim.IF_cond_alpha

# Add splitters everywhere except for SSP populations
if self.use_split_model:
if cell_name in ["granule", "golgi", "stellate", "basket",
"purkinjie", "dcn"]:
print("Cell {} model {} using split synapse neuron model "
"with {} synapse cores and {} delay slots".format(
cell_name, cell_model, self.n_synapse_cores,
self.n_delay_slots))
additional_params["splitter"] = \
SplitterAbstractPopulationVertexNeuronsSynapses(
self.n_synapse_cores, self.n_delay_slots, False)

# Adding the population to the network
try:
self.populations[cell_name] = self.sim.Population(
Expand Down
18 changes: 13 additions & 5 deletions spinncer/cerebellum_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -417,6 +417,8 @@ def spike_analysis(results_file, fig_folder,
try:
for pop in plot_order:
curr_spikes = neo_all_spikes[pop].segments[0].spiketrains
print("pop, curr_spikes: ", pop, curr_spikes)
print("elephant, timestep, simtime", elephant_timestep, elephant_simtime)
curr_inst_rates = \
elephant.statistics.instantaneous_rate(
curr_spikes,
Expand All @@ -437,6 +439,9 @@ def spike_analysis(results_file, fig_folder,
except:
traceback.print_exc()

print("elephant_instantaneous_rates: ", elephant_instantaneous_rates)
print("all_neurons: ", all_neurons)

stim_period_start = {}
stim_period_end = {}
per_pop_stim_durations = {k: [] for k in plot_order}
Expand Down Expand Up @@ -465,6 +470,7 @@ def spike_analysis(results_file, fig_folder,
# before, during and after stimulation
_filtered_spike_rates = np.zeros(stimulus_periods)
_spike_times = spikes[:, 1]
_spike_nids = spikes[:, 0]
# Initialise per_neuron_firing
per_neuron_firing[pop] = np.ones((all_neurons[pop],
stimulus_periods)) * -10
Expand Down Expand Up @@ -498,7 +504,7 @@ def spike_analysis(results_file, fig_folder,
np.count_nonzero(_filtered_spike_times) / \
(current_period_duration * ms)
for nid in range(all_neurons[pop]):
_spikes_for_nid = spikes[spikes[:, 0] == nid][:, 1]
_spikes_for_nid = spikes[_spike_nids == nid][:, 1]
_no_spike_for_nid = np.count_nonzero(np.logical_and(
_spikes_for_nid >= time_filter_pre,
_spikes_for_nid < time_filter_post))
Expand Down Expand Up @@ -712,8 +718,10 @@ def spike_analysis(results_file, fig_folder,
print("Connection Name ")
print("{:27} | {:10} | ".format("Connection Name", "Def. W"),
"{:20}".format("SpiNN. W"))
sorted_keys = list(final_connectivity.keys())
sorted_keys.sort()
sorted_keys = []
if final_connectivity != []:
sorted_keys = list(final_connectivity.keys())
sorted_keys.sort()
for key in sorted_keys:
conn = conn_dict[key]
mean = np.abs(np.mean(conn[:, 2]))
Expand Down Expand Up @@ -802,8 +810,8 @@ def spike_analysis(results_file, fig_folder,
# Report statistics here
for key, v in all_voltages.items():
nid, tstep = np.unravel_index(np.argmax(v, axis=None), v.shape)
print("{:20}-> neuron {:>8d} received {:>6d}".format(
key, int(nid), int(np.max(v))),
print("{:20}-> neuron {:>8d} received {:4.2f}".format(
key, int(nid), np.max(v)),
"nA in timestep #{:8d}".format(int(tstep)))
# THIS IS BROKEN! it will be removed soon
# # Also treat voltage as if it's a piggybacked value packaging
Expand Down
20 changes: 15 additions & 5 deletions spinncer/cerebellum_experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,7 @@
# import simulator
spinnaker_sim = False
if str.lower(args.simulator) in ["spinnaker", "spynnaker"]:
try:
# this might be deprecated soon
import spynnaker8 as sim
except ImportError:
import pyNN.spynnaker as sim
import pyNN.spiNNaker as sim
spinnaker_sim = True
elif str.lower(args.simulator) in ["nest"]:
import pyNN.nest as sim
Expand All @@ -32,6 +28,7 @@
import pylab as plt
import os
import traceback
from spinncer.provenance_analysis import save_provenance_to_file_from_database

# Record SCRIPT start time (wall clock)
start_time = plt.datetime.datetime.now()
Expand Down Expand Up @@ -137,6 +134,8 @@
expected_max_spikes = EXPECTED_MAX_SPIKES_200
canonical_rbls = RMEM_RBLS if args.r_mem else VANILLA_RBLS
print("Canonical ring buffer left shifts:", canonical_rbls)
use_split_model = True
print("use_split_model is ", use_split_model)

# Instantiate a Cerebellum
cerebellum_circuit = Cerebellum(
Expand All @@ -149,6 +148,9 @@
neuron_model=args.neuron_model,
input_spikes=input_spikes,
rb_left_shifts=canonical_rbls,
use_split_model=use_split_model,
n_synapse_cores=1,
n_delay_slots=64,
no_loops=args.loops_grc,
round_input_spike_times=round_spike_times,
id_remap=args.id_remap,
Expand Down Expand Up @@ -234,6 +236,9 @@
recorded_spikes = {}
other_recordings = {}

# Store simulator name and run
sim_name = sim.name

# Record simulation start time (wall clock)
sim_start_time = plt.datetime.datetime.now()
current_error = None
Expand Down Expand Up @@ -311,6 +316,11 @@
# Appropriately end the simulation
sim.end()

# Get the provenance from the simulator's database
structured_provenance_filename = "structured_provenance.csv"
save_provenance_to_file_from_database(
structured_provenance_filename, sim_name)

# Analysis time!
spike_analysis(results_file=results_file, fig_folder=args.figures_dir,
worst_case=args.worst_case_spikes, delay_sensitive=True)
Expand Down
Loading