summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJan Huwald <jh@sotun.de>2012-05-07 19:53:27 (GMT)
committerJan Huwald <jh@sotun.de>2012-05-07 19:53:27 (GMT)
commit00b209240138660db1ded3ef3870023964ce6e4e (patch)
tree8ffaec780b060bdc478929aa714b8af2ee760671
Initial commitHEADmaster
-rw-r--r--Makefile46
-rw-r--r--code/Makefile9
-rwxr-xr-xcode/anatool/plot_spikes12
-rw-r--r--code/core/Makefile57
-rw-r--r--code/core/bin.cpp6
-rw-r--r--code/core/bin.h15
-rw-r--r--code/core/event.cpp128
-rw-r--r--code/core/event.h74
-rw-r--r--code/core/fileutils.cpp37
-rw-r--r--code/core/fileutils.h10
-rw-r--r--code/core/global.cpp115
-rw-r--r--code/core/global.h63
-rw-r--r--code/core/interface.cpp262
-rw-r--r--code/core/interface.h62
-rw-r--r--code/core/log.h7
-rw-r--r--code/core/max.h13
-rw-r--r--code/core/min.h13
-rw-r--r--code/core/neuron.cpp164
-rw-r--r--code/core/neuron.h64
-rw-r--r--code/core/print_defaults.cpp10
-rw-r--r--code/core/regex.cpp134
-rw-r--r--code/core/regex.h8
-rw-r--r--code/core/reward.cpp105
-rw-r--r--code/core/reward.h53
-rw-r--r--code/core/simulate.cpp134
-rw-r--r--code/core/simulate.h70
-rw-r--r--code/core/spike.cpp28
-rw-r--r--code/core/spike.h28
-rw-r--r--code/core/synapse.cpp118
-rw-r--r--code/core/synapse.h48
-rw-r--r--code/core/tracepoints.cpp57
-rw-r--r--code/core/tracepoints.h25
-rw-r--r--code/core/type2name.h20
-rw-r--r--code/glue/Makefile8
-rwxr-xr-xcode/glue/da-controlled-sim-wrapper62
-rwxr-xr-xcode/glue/distill-performance5
-rwxr-xr-xcode/glue/exec-matlab8
-rwxr-xr-xcode/glue/extract-matlab-matrix43
-rwxr-xr-xcode/glue/plot_sliding_perf24
-rwxr-xr-xcode/glue/plot_spike_time_hist19
-rwxr-xr-xcode/glue/print-params3
-rwxr-xr-xcode/glue/repeat-trace-cmdbin0 -> 9863 bytes
-rw-r--r--code/glue/repeat-trace-cmd.c27
-rwxr-xr-xcode/glue/sim-wrapper24
-rw-r--r--code/matlab/Makefile10
-rw-r--r--code/matlab/analye-perfomance.m14
-rw-r--r--code/matlab/analye-stdp-freq-dep.m12
-rw-r--r--code/matlab/analyze_weight_development.m30
-rw-r--r--code/matlab/plot_stdp_param_scout.m51
-rw-r--r--code/matlab/random_spikes.m15
-rw-r--r--code/matlab/random_topo.m60
-rw-r--r--code/trainer/Makefile29
-rw-r--r--code/trainer/check_stdp_freq-dep.cpp160
-rw-r--r--code/trainer/check_stdp_freq-dep.h46
-rw-r--r--code/trainer/mem1.cpp412
-rw-r--r--code/trainer/mem1.h72
-rw-r--r--code/trainer/reinforce_synapse.cpp302
-rw-r--r--code/trainer/reinforce_synapse.h55
-rw-r--r--code/trainer/test.cpp13
-rw-r--r--model_input/Makefile5
-rw-r--r--model_input/global/if/default0
-rw-r--r--model_input/neurons/Makefile11
-rw-r--r--model_input/neurons/dalif/1000_randomcharge1000
-rw-r--r--model_input/neurons/if/1_smallcharge1
-rw-r--r--model_input/neurons/if/3_supercharge3
-rw-r--r--model_input/spikes/Makefile7
-rw-r--r--model_input/topology/Makefile12
-rw-r--r--model_input/topology/README1
-rw-r--r--model_input/topology/if/2circle2
-rw-r--r--model_input/topology/if/3circle26
-rw-r--r--model_input/trace/10s_10ms_all1000
-rw-r--r--model_input/trace/10s_10s_all1
-rw-r--r--model_input/trace/README3
73 files changed, 5551 insertions, 0 deletions
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..55c9aac
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,46 @@
+.PHONY: all
+all:
+ @echo "All does not exist\nChoose of:\n\tbackup\t- safe code and model definition\n\tclean\n\tcode\t- create code for all simulations\ndata\t- plain data for all models\n\tresults\t- graphs & co (what you expect when typing \"make all\")"
+
+.PHONY: backup
+BAKDIR="BAK/`date '+%Y-%m-%d_%s'`"
+backup: clean
+ mkdir -p ${BAKDIR}
+ cp -a Makefile code model_input ${BAKDIR}
+
+.PHONY: clean
+clean:
+ rm -f *~
+ cd code && make clean
+
+.PHONY: code
+code:
+ cd code && make
+
+.PHONY: input
+input: code
+ cd model_input && make
+
+.PHONY: data
+data: code input data/leakyif_test/timestamp
+
+.PHONY: results
+results: data
+ @echo NOT IMPLEMENTED && false
+
+# HINT: the dataset depends on the executable it is using (simulator might change) as well as on the data sources
+# It also depends on a timestamp to know if the experiment is outdated
+
+# a test of the leakyif model with some random input (mostly to test the simulator and toolchain)
+INPUTFILES_LEAKYIF_TEST=model_input/neurons/if/1000_nocharge \
+ model_input/topology/if/1000_random \
+ model_input/spikes/1000N_10s_10Hz_random \
+ model_input/global/if/default
+data/leakyif_test/timestamp: code code/core/sim-if $(INPUTFILES_LEAKYIF_TEST) model_input/trace/10s_10ms_all
+ -rm -Rf data/leakyif_test
+ mkdir -p data/leakyif_test
+ ./code/glue/sim-wrapper if \
+ $(INPUTFILES_LEAKYIF_TEST) \
+ data/leakyif_test/neuron data/leakyif_test/synapse data/leakyif_test/spikes data/leakyif_test/global \
+ model_input/trace/10s_10ms_all
+ touch data/leakyif_test/timestamp
diff --git a/code/Makefile b/code/Makefile
new file mode 100644
index 0000000..804a0ec
--- /dev/null
+++ b/code/Makefile
@@ -0,0 +1,9 @@
+.PHONY: all clean
+
+SUBDIRS=core trainer matlab
+
+all:
+ for I in $(SUBDIRS); do cd $$I; make; cd ..; done
+
+clean:
+ for I in $(SUBDIRS); do cd $$I; make clean; cd ..; done
diff --git a/code/anatool/plot_spikes b/code/anatool/plot_spikes
new file mode 100755
index 0000000..58fe722
--- /dev/null
+++ b/code/anatool/plot_spikes
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+(
+ if [ $# -eq 1 ]; then
+ echo "set xrange [0:$1]"
+ fi
+ if [ $# -eq 2 ]; then
+ echo "set xrange [$1:$2]"
+ fi
+ echo "plot 'spikes.out' using 1:2 with dots"
+) | gnuplot -persist
+
diff --git a/code/core/Makefile b/code/core/Makefile
new file mode 100644
index 0000000..df0dd28
--- /dev/null
+++ b/code/core/Makefile
@@ -0,0 +1,57 @@
+# HINT: the paradigma is not to create object files to allow all otpimizations
+# take place even though the code is scattere across many files
+
+CC=g++
+CCFLAGS=-O3 -ggdb
+LDFLAGS=
+INCLUDE=-I/home/huwald/src/boost
+
+# flags to test
+# -ffast-math -fno-finite-math-only
+
+# debug options
+# normal program:
+#DEBUGOPTS=
+# verbose status line for every (!) spike:
+DEBUGOPTS=-DDEBUG_STATUSLINE
+# enale profiling:
+#DEBUGOPTS=-pg
+
+# list of files every build depends on
+#BASE_SRC_FILES=neuron.cpp simulate.cpp synapse.cpp
+BASE_SRC_FILES=\
+ model_switch.h \
+ reward.h \
+ reward.cpp \
+ event.h \
+ event.cpp \
+ interface.h \
+ interface.cpp \
+ min.h \
+ max.h \
+ neuron.cpp \
+ neuron.h \
+ simulate.cpp \
+ simulate.h \
+ synapse.cpp \
+ synapse.h \
+ tracepoints.h \
+ tracepoints.cpp \
+ fileutils.cpp
+
+.PHONY: all clean wordcount
+
+all: sim print_defaults
+
+clean:
+ rm -f *~ massif.*.* sim print_defaults
+
+
+sim: $(BASE_SRC_FILES) Makefile
+ $(CC) -o $@ $(CCFLAGS) simulate.cpp $(INCLUDE) $(LDFLAGS) $(DEBUGOPTS) -DMODEL_`echo $* | tr '[:lower:]' '[:upper:]'`
+
+print_defaults: $(BASE_SRC_FILES) print_defaults.cpp
+ $(CC) -o $@ $(CCFLAGS) print_defaults.cpp $(LDFLAGS) $(DEBUGOPTS) -DMODEL_`echo $* | tr '[:lower:]' '[:upper:]'`
+
+wordcount:
+ wc *h *cpp Makefile
diff --git a/code/core/bin.cpp b/code/core/bin.cpp
new file mode 100644
index 0000000..e93e4c8
--- /dev/null
+++ b/code/core/bin.cpp
@@ -0,0 +1,6 @@
+#include "bin.h"
+
+void Bin::bin(int neuron) {
+ if (neurons->count(neuron))
+ count++;
+}
diff --git a/code/core/bin.h b/code/core/bin.h
new file mode 100644
index 0000000..302df56
--- /dev/null
+++ b/code/core/bin.h
@@ -0,0 +1,15 @@
+#ifndef BIN_H
+#define BIN_H
+
+#include <set>
+
+class Bin {
+public:
+ Bin(std::set<int> * neurons) : neurons(neurons), count(0) {}
+ void bin(int neuron);
+
+ std::set<int> * neurons;
+ int count;
+};
+
+#endif // BIN_H
diff --git a/code/core/event.cpp b/code/core/event.cpp
new file mode 100644
index 0000000..cb6f5ff
--- /dev/null
+++ b/code/core/event.cpp
@@ -0,0 +1,128 @@
+#include "simulate.h"
+
+#include "event.h"
+
+void Event::execute() {
+ switch (type) {
+ case 0:
+ ((InternalSpike*) this)->execute();
+ break;
+
+ case 1:
+ ((ExternalSpike*) this)->execute();
+ break;
+
+ case 2:
+ ((ExternalNoise*) this)->execute();
+ break;
+
+ case 3:
+ ((IntrinsicNeuron*) this)->execute();
+ break;
+
+ case 4:
+ ((VirtualEvent*) this)->vexecute();
+ break;
+ }
+}
+
+void Event::free() {
+ switch (type) {
+ case 0:
+ delete (InternalSpike*) this;
+ break;
+
+ case 1:
+ delete (ExternalSpike*) this;
+ break;
+
+ case 2:
+ delete (ExternalNoise*) this;
+ break;
+
+ case 3:
+ delete (IntrinsicNeuron*) this;
+ break;
+
+ case 4:
+ delete (VirtualEvent*) this;
+ break;
+ }
+}
+
+// comparison of two events regarding their order in the event list; spike events are processed prior to non-spike events (as spikes are incoming and non-spike are intrinsic and depending on all events up to the present)
+bool Event::operator>(Event &e) {
+ return (time > e.time) || ((time == e.time) && (type > e.type));
+}
+
+void Spike::execute() {
+ double nextEventTime = s.neurons[dst].processCurrent(time, current);
+
+ // check if there is an event to occur
+ if (nextEventTime != INFINITY)
+ s.addEvent(new IntrinsicNeuron(nextEventTime, dst));
+}
+
+void ExternalSpike::execute() {
+ // exec common spike code
+ ((Spike *) this)->execute();
+}
+
+void InternalSpike::execute() {
+ // the spike has to be registered as delieverd in the sending synapse
+ synapse->lastSpike = time;
+ if (synapse->firstSpike == -INFINITY)
+ synapse->firstSpike = time;
+
+ // exec common spike code
+ ((Spike *) this)->execute();
+
+ // synaptic scaling
+ s.neurons[dst].normalizeWeight(g.sumWeight);
+}
+
+void IntrinsicNeuron::execute() {
+ // check if a spike does occur
+ bool spikeDoesOccur;
+ double nextEventTime = s.neurons[dst].generateSpike(time, spikeDoesOccur);
+
+ // check if a spike should be generated
+ if (!spikeDoesOccur)
+ return;
+
+ // add next intrinsic event
+ if (nextEventTime != INFINITY)
+ s.addEvent(new IntrinsicNeuron(nextEventTime, dst));
+
+ // create the spike for every neuron where it will be recieved
+ for (SynapseDstList::iterator i = s.neurons[dst].sout.begin(); i != s.neurons[dst].sout.end(); i++) {
+ double ntime = time, ncurrent;
+ i->computePostsynapticPulse(ntime, ncurrent);
+ s.addEvent(new InternalSpike(ntime, i->dst, ncurrent, &(*i)));
+ }
+
+ // check if we should output a spike of this neuron
+ for (s.spikeOIfList.iterator i = s.spikeOIfList.begin(); i != s.spikeOIfList.end(); i++)
+ if (i->isContained(dst)) {
+ SpikeMUX s(dst, time);
+ i->pushObject(s);
+ }
+
+ // bin spikes
+ for (std::set<Bin*>::iterator bin = s.binSets.begin(); bin != s.to.traceBinSets.end(); bin++)
+ (*bin)->bin(dst);
+}
+
+void ExternalNoise::execute() {
+ // pick a random neuron to send noise to (using a temporary event)
+ s.addEvent(new ExternalSpike(time, rand() % (s.numNeurons * 4 / 5), g.en_current));
+
+ // select the next timepoint from a poisson process
+ s.addEvent(new ExternalNoise(time - (log(1.0 - drand48()) / (g.en_freq * s.numNeurons))));
+}
+
+virtual void GlobalUpdate::vexecute() {
+ static const double td = 0.001;
+ g.evolve(td);
+ s.addEvent(new GlobalUpdate(time + td));
+}
diff --git a/code/core/event.h b/code/core/event.h
new file mode 100644
index 0000000..20d348a
--- /dev/null
+++ b/code/core/event.h
@@ -0,0 +1,74 @@
+#ifndef EVENT_H
+#define EVENT_H
+
+class Event {
+public:
+ double time;
+ int type;
+
+ Event(double ti, int ty) : time(ti), type(ty) {}
+ void execute();
+ void free();
+ bool operator>(Event &e1); // comparison of two events regarding their order in the event list; spike events are processed prior to non-spike events (as spikes are incoming and non-spike are intrinsic and depending on all events up to the present)
+
+private:
+ Event() {};
+};
+
+class Spike : public Event {
+public:
+ int dst;
+ double current;
+
+ Spike(double time, int type, int dst, double current) : Event(time, type), dst(dst), current(current) {}
+ void execute();
+};
+
+
+class ExternalSpike : public Spike {
+public:
+ ExternalSpike(double time, int dst, double current) : Spike(time, 1, dst, current) {}
+ void execute();
+};
+
+class InternalSpike : public Spike {
+public:
+ Synapse *synapse;
+
+ InternalSpike(double time, int dst, double current, Synapse *synapse) : Spike(time, 0, dst, current), synapse(synapse) {}
+ void execute();
+};
+
+class IntrinsicNeuron : public Event {
+public:
+ int dst;
+
+ IntrinsicNeuron(double time, int dst) : Event(time, 3), dst(dst) {}
+ void execute();
+};
+
+class ExternalNoise : public Event {
+public:
+ ExternalNoise(double time) : Event(time, 2) {}
+ void execute();
+};
+
+class VirtualEvent : public Event {
+public:
+ VirtualEvent(double time) : Event(time, 4) {}
+ virtual void vexecute() {}
+ virtual ~VirtualEvent() {}
+};
+
+class GlobalUpdate : public VirtualEvent {
+public:
+ GlobalUpdate(double time) : VirtualEvent(time) {}
+ virtual void vexecute();
+};
+
+class PEventGreater {
+public:
+ bool operator() (Event *e1, Event *e2) { return *e1 > *e2; }
+};
+
+#endif // EVENT_H
diff --git a/code/core/fileutils.cpp b/code/core/fileutils.cpp
new file mode 100644
index 0000000..9d080ce
--- /dev/null
+++ b/code/core/fileutils.cpp
@@ -0,0 +1,37 @@
+#include <string.h>
+#include <stdlib.h>
+
+#include "fileutils.h"
+
+// direction is either false=input or true=output and used to overload parameter '-'
+FILE *fd_magic(char *str, bool direction) {
+ static bool usedDir[2] = { false, false };
+
+ // check if stdin/-out is wanted
+ if (strcmp(str, "-") == 0) {
+ // check if we already used this
+ if (usedDir[direction]) {
+ fprintf(stderr, "stdin/-out cannot be used twice\n");
+ exit(-1);
+ }
+
+ // mark that we used it and return in
+ usedDir[direction] = true;
+ return direction ? stdout : stdin;
+ }
+
+ // check if stdout is wanted
+ if (strcmp(str, "0") == 0) {
+ // replace filename and proceed
+ str = "/dev/null";
+ }
+
+ // open file traditionally
+ FILE *fd = fopen(str, direction ? "w" : "r" );
+ if (fd == NULL) {
+ fprintf(stderr, "Failed to open %s\n", str);
+ exit(-1);
+ }
+
+ return fd;
+}
diff --git a/code/core/fileutils.h b/code/core/fileutils.h
new file mode 100644
index 0000000..324dab7
--- /dev/null
+++ b/code/core/fileutils.h
@@ -0,0 +1,10 @@
+#ifndef FILEUTILS_H
+#define FILEUTILS_H
+
+#include <stdio.h>
+
+// direction is either false=input or true=output and used to overload parameter '-'
+FILE *fd_magic(char *str, bool direction);
+
+
+#endif // FILEUTILS_H
diff --git a/code/core/global.cpp b/code/core/global.cpp
new file mode 100644
index 0000000..83ec666
--- /dev/null
+++ b/code/core/global.cpp
@@ -0,0 +1,115 @@
+#include <math.h>
+
+#include "interface.h"
+
+#include "global.h"
+
+Global::Global() :
+ // neuron related
+ voltage_tau(0.05), // [s]
+ voltage_base(-0.065), // [V] rest potential
+ threshold(-0.05), // [V] above which a spike is emitted
+
+ Wmax(0.004), // [V]
+ Wmin(0.0), // [V]
+ sumWeight(0.16), // [V]
+ absoluteRefractoryTime(0.001), // [V]
+
+ // dopamin
+ dopamin_level(0.0), // [?]
+ dopamin_tau(0.005), // [s]
+
+ // STDP
+ stdp_tau_plus(0.014), // [s]
+ stdp_tau_minus(0.034), // [s]
+ stdp_lambda_plus(0.066), // [1]
+ stdp_lambda_minus(0.066), // [1]
+ stdp_et_tau(0.1), // [s] eligibility trace tau
+
+ // IP
+ ip_sliding_avg_tau(), // TODO
+ ip_lambda_R(),
+ ip_lambda_C(),
+ ip_dst_mom1(),
+ ip_dst_mom2(),
+
+ // external noise
+ en_current(0.1) // [V]
+ en_freq(1.0) // [Hz/Neuron]
+
+ // trainer
+ trainer_eval_delay(0.001), // [s]
+ trainer_numSymbols(2), // [1]
+ trainer_refractoryTime(0.2), // [s]
+ trainer_rewardAmount(0.1), // [?]
+ trainer_rd_c1(0.05), // readout delay with
+ trainer_rd_c2(0.0001), // delay = c1 + t*c2 + r*c3 + t*r'*c4
+ trainer_rd_c3(0.0), // where
+ trainer_rd_c4(0.0001); // r and r' are two distinct random numbers (equ. dist in [0,1))
+{}
+
+void Global::evolve(double td) {
+ dopamin_level = decay_dopamin(dopamin_level, td);
+}
+
+double decay_dopamin(double level, double td) {
+ return level * exp( - td / dopamin_tau );
+}
+
+template<class Action>
+bool Global::reflect<Action>(Action &a) {
+return
+ // neuron related
+ reflect(voltage_tau, "voltage_tau")
+ && reflect(voltage_base, "voltage_base")
+ && reflect(threshold, "threshold")
+
+ && reflect(Wmin, "Wmin")
+ && reflect(Wmax, "Wmax")
+ && reflect(sumWeight, "sumWeight")
+ && reflect(absoluteRefractoryTime, "absoluteRefractoryTime")
+
+ // dopamin
+ && reflect(dopamin_level, "dopamin_level")
+ && reflect(dopamin_tau, "dopamin_tau")
+
+ // STDP
+ && reflect(stdp_tau_plus, "stdp_tau_plus")
+ && reflect(stdp_tau_minus, "stdp_tau_minus")
+ && reflect(stdp_lambda_plus, "stdp_lambda_plus")
+ && reflect(stdp_lambda_minus, "stdp_lambda_minus")
+ && reflect(stdp_et_tau, "stdp_et_tau")
+
+ // IP
+ && reflect(ip_sliding_avg_tau, "ip_sliding_avg_tau")
+ && reflect(ip_lambda_R, "ip_lambda_R")
+ && reflect(ip_lambda_C, "ip_lambda_C")
+ && reflect(ip_dst_mom1, "ip_dst_mom1")
+ && reflect(ip_dst_mom2, "ip_dst_mom2")
+
+ // external noise
+ && reflect(en_current, "en_current")
+ && reflect(en_freq, "en_freq")
+
+ // trainer
+ && reflect(trainer_eval_delay, "trainer_eval_delay")
+ && reflect(trainer_numSymbols, "trainer_numSymbols")
+ && reflect(trainer_refractoryTime, "trainer_refractoryTime")
+ && reflect(trainer_rewardAmount, "trainer_rewardAmount")
+ && reflect(trainer_rd_c1, "trainer_rd_c1")
+ && reflect(trainer_rd_c1, "trainer_rd_c2")
+ && reflect(trainer_rd_c1, "trainer_rd_c3")
+ && reflect(trainer_rd_c1, "trainer_rd_c4");
+}
+
+static id_type Global::numElements() {
+ return 1;
+}
+
+static Global * singleton(int num) {
+ if (num==0) {
+ return &global;
+ }else{
+ DIE("requested global object with id != 0");
+ }
+}
diff --git a/code/core/global.h b/code/core/global.h
new file mode 100644
index 0000000..8fec3d4
--- /dev/null
+++ b/code/core/global.h
@@ -0,0 +1,63 @@
+#ifndef GLOBAL_H
+#define GLOBAL_H
+
+class Global {
+public:
+ Global();
+
+ // methods
+ void evolve(double td);
+ double decay_dopamin(double level, double td); // return how much the level _would_ have been decayed
+
+ // variables (and quasi constants)
+ // * neuron related
+ double voltage_tau; // [s]
+ double voltage_base; // [V] rest potential
+ double threshold; // [V] above which a spike is emitted
+
+ double Wmax, Wmin; // [V]
+ double sumWeight; // [V]
+ double absoluteRefractoryTime; // [V]
+
+ // * dopamin
+ double dopamin_level; // [?]
+ double dopamin_tau; // [s]
+
+ // * STDP
+ double stdp_tau_plus; // [s]
+ double stdp_tau_minus; // [s]
+ double stdp_lambda_plus; // [1]
+ double stdp_lambda_minus; // [1]
+ double stdp_et_tau; // [s] eligibility trace tau
+
+ // * IP
+ double ip_sliding_avg_tau;
+ double ip_lambda_R;
+ double ip_lambda_C;
+ double ip_dst_mom1;
+ double ip_dst_mom2;
+
+ // * external noise
+ double en_current; // [V]
+ double en_freq; // [Hz/Neuron]
+
+ // * trainer
+ double trainer_eval_delay; // [s]
+ int trainer_numSymbols; // [1]
+ double trainer_refractoryTime; // [s]
+ double trainer_rewardAmount; // [?]
+ double trainer_rd_c1; // readout delay with
+ double trainer_rd_c2; // delay = c1 + t*c2 + r*c3 + t*r'*c4
+ double trainer_rd_c3; // where
+ double trainer_rd_c4; // r and r' are two distinct random numbers (equ. dist in [0,1))
+
+ // reflection
+ template<class Action> bool reflect(Action &a);
+ typedef int id_type;
+ static id_type numElements();
+ static Neuron * singleton(int num); // return neuron # num
+} global;
+
+Global &g = global;
+
+#endif // GLOBAL_H
diff --git a/code/core/interface.cpp b/code/core/interface.cpp
new file mode 100644
index 0000000..c2d3a3b
--- /dev/null
+++ b/code/core/interface.cpp
@@ -0,0 +1,262 @@
+#include <unistd.h>
+#include <asm-generic/errno.h>
+#include <boost/lexical_cast.hpp>
+
+#include "type2name.h"
+#include "log.h"
+#include "min.h"
+
+#include "interface.h"
+
+/* action definitions (the stuff implementing the different reflection primtives) */
+
+// (abstract) action (to implement filtering stuff)
+template<class interface>
+class Action {
+public:
+ Action<interface>(interface *ifc) : ifc(ifc) {}
+
+ bool isFiltered(char *desc) { // true -> don't use this element
+ return ! (ifc->ife.empty() || ifc->ifo.count(desc));
+ }
+
+ interface *ifc;
+};
+
+// count the length of a record assuming binary encoding
+template<class interface>
+class ActionCountRecordSize : public Action<interface> {
+public:
+ ActionCountRecordSize<interface>(interface ifc) : Action<interface>(ifc), result(0) {}
+
+ template<class T>
+ bool operator() (T &val, char *desc) {
+ if (this->isFiltered(desc)) return true;
+ result += sizeof(val);
+ return true;
+ }
+
+ int result;
+};
+
+// put the chosen titles in a string
+template<class interface>
+class ActionWriteTitle : public Action<interface> {
+public:
+ ActionWriteTitle<interface>(interface ifc) : Action<interface>(ifc) {}
+
+ template<class T>
+ bool operator() (T &val, char * desc) {
+ if (this->isFiltered(desc)) return true;
+ this->ifc->bufWrite(desc);
+ this->ifc->bufWrite(" ");
+ return true;
+ }
+};
+
+// put the choosen types in a string
+template<class interface>
+class ActionWriteTypes : public Action<interface> {
+public:
+ ActionWriteTypes<interface>(interface ifc) : Action<interface>(ifc) {}
+
+ template<class T>
+ bool operator() (T &val, char * desc) {
+ if (this->isFiltered(desc)) return true;
+ this->ifc->buf.append(type2name<T>());
+ this->ifc->buf.append(" ");
+ return true;
+ }
+};
+
+// put the serialized values to the string (space-delimited)
+// put the binary values to the string (no delimiter)
+template<class interface>
+class ActionWriteValues : public Action<interface> {
+public:
+ ActionWriteValues<interface>(interface ifc) : Action<interface>(ifc) {}
+
+ template<class T>
+ bool operator() (T &val, char *desc) {
+ if (this->isFiltered(desc)) return true;
+ return this->ifc->bufWrite(val);
+ }
+};
+
+// read serialized values
+template<class interface>
+class ActionRead : public Action<interface> {
+public:
+ ActionRead<interface>(interface ifc) : Action<interface>(ifc) {}
+
+ template<class T>
+ bool operator() (T &val, char * desc) {
+ if (this->isFiltered(desc)) return true;
+ return this->ifc->bufRead(val);
+ }
+};
+
+/* implementation of InputInterface */
+
+template<class T>
+InputInterface<T>::InputInterface(int fd, bool binary, IfObjectFilter *ifo, IfElementFilter *ife)
+ : binary(binary), fd(fd), ifo(ifo), ifo(ife), buf(), rpos(0), npos(0), wpos(0), eof(false) {
+ buf.reserve(buf_size);
+}
+
+template<class T>
+bool InputInterface<T>::garantueeBuf(size_t size) {
+ // no chance?
+ if (eof) return (wpos - rpos >= size);
+ if (size >= buf_size/2) return false;
+
+ // relocate buffer?
+ if (rpos > buf_size/2) {
+ buf = buf.substr(rpos);
+ buf.reserve(buf_size);
+ npos -= rpos;
+ wpos -= rpos;
+ rpos = 0;
+ }
+
+ // read more?
+ do {
+ if (wpos < buf_size) {
+ ssize_t res = read(fd, buf.data(), buf_size - wpos);
+ switch (res) {
+ case 0: // EOF
+ eof = true;
+ break;
+
+ case EAGAIN:
+ // TO DECIDE: wait?
+ // read again (if neccessary)
+ break;
+
+ case EIO: case EBADF: case EINVAL: case EINTR: case EFAULT:
+ return false;
+
+ default:
+ // read res bytes
+ wpos += res;
+ }
+ }
+ } while ((wpos - rpos < size) || eof); // loop in case of EAGAIN
+
+ return wpos - rpos >= size;
+}
+
+// make sure that a "\n"-terminated, non-empty string is in the buffer (starting at rpos)
+template<class T>
+bool InputInterface<T>::garantueeLine() {
+ if (npos <= rpos) npos = rpos + 1;
+ while (true) {
+ if ((npos > wpos) && !garantueeBuf(npos - rpos)) return false; // increase amount of data in buffer
+ if (buf[npos] == '\n') return true;
+ npos++;
+ }
+}
+
+template<class T>
+double InputInterface<T>::peekNextTime() {
+ double res;
+ if (!bufRead(res, false)) {
+ if (eof) {
+ return INFINITY;
+ }else{
+ DIE("peeking next time failed");
+ }
+ }
+ return res;
+}
+
+
+template<class Tif> template<class Tval>
+bool InputInterface<Tif>::bufRead(Tval &val, bool proceedRPos=true) {
+ if (binary) {
+ if (!garantueeBuf(sizeof(Tval))) return false;
+ val = *((Tval*) &(buf[npos]));
+ if (proceedRPos) rpos += sizeof(Tval);
+ }else{
+ if (!garantueeLine()) return false;
+
+ // find next delimiter
+ size_t p = buf.find_first_of(" \n", rpos);
+ if (!p || (p == buf.npos)) return false;
+
+ // parse substring
+ try {
+ val = boost::lexical_cast<Tval>(buf.substr(rpos, p-1));
+ } catch(...) {
+ return false;
+ }
+
+ if (proceedRPos) rpos = p + 1;
+ }
+ return true;
+}
+
+template<class T>
+bool InputInterface<T>::readEntireFile() {
+ return readFileUntil(INFINITY);
+}
+
+template<typename T>
+bool InputInterface<T>::readFileUntil(double time) {
+ // iterate over events
+ while (peekNextTime() <= min(time, INFINITY)) {
+ // read (and forget) time to proceed rpos
+ double _foo; bufRead(_foo);
+
+ // get element id
+ typename T::id_type id;
+ if (!bufRead(id)) return false;
+ // TODO: check if the id refers to a valid object
+
+ // ignore it?
+ if (!ifo->empty() && !ifo->count(id)) return false;
+
+ // construct interface action and apply it
+ ActionRead<InputInterface<T> > ia(this);
+ if (!(T::singleton(id)->interface(ia))) return false;
+ }
+ return true;
+}
+
+template<class T>
+OutputInterface<T>::OutputInterface(int fd, bool binary, IfObjectFilter *ifo, IfElementFilter *ife)
+ : fd(fd), binary(binary), ifo(ifo), ife(ife), buf() {}
+
+template<class T>
+bool OutputInterface<T>::pushObject(T *o) {
+ ActionWriteValues<OutputInterface<T> > ia(this);
+ return o->interface(ia);
+}
+
+template<class T>
+bool OutputInterface<T>::pushClass() {
+ if (ifo->empty()) {
+ // for every object of the class
+ for (int i=0; i<T::numElements(); i++)
+ if (!pushObject(T::singleton(i))) return false;
+ }else{
+ // for all objects specified by ifo
+ for (IfObjectFilter::iterator i = ifo->begin(); i != ifo->end(); i++)
+ if (!pushObject(T::singleton(*i))) return false;
+ }
+}
+
+template<class T>
+bool OutputInterface<T>::isContained(typename T::id_type id) {
+ return ifo->empty() || ifo->count(id);
+}
+
+template<class Tif> template<class Tval>
+bool OutputInterface<Tif>::bufWrite(Tval &val) {
+ try {
+ buf.append(boost::lexical_cast<std::string>(val));
+ }catch (...) {
+ return false;
+ }
+ return true;
+}
diff --git a/code/core/interface.h b/code/core/interface.h
new file mode 100644
index 0000000..b53b741
--- /dev/null
+++ b/code/core/interface.h
@@ -0,0 +1,62 @@
+#ifndef INTERFACE_H
+#define INTERFACE_H
+
+#include <string>
+#include <set>
+#include <boost/lexical_cast.hpp>
+
+#include "math.h"
+
+// filters to select which objects/variables (not) to select
+typedef std::set<int> IfObjectFilter;
+typedef std::set<std::string> IfElementFilter;
+
+// interface-class .. holds long-time stuff for reflecting on a per class (template), per file (instance) and per direction (class) basis
+template<class T>
+class InputInterface {
+public:
+ // A. the methods you want to call
+ // creation
+ InputInterface<T>(int fd, bool binary, IfObjectFilter ifo, IfElementFilter ife);
+ bool readEntireFile();
+ bool readFileUntil(double time);
+ double peekNextTime(); // read a line ahead (if possible) and check at what time it occurs
+
+ // B. internal state
+ bool binary;
+ int fd;
+ std::string buf;
+ size_t rpos, npos, wpos;
+ static const size_t buf_size = 32768;
+ bool eof;
+ IfObjectFilter ifo;
+ IfElementFilter ife;
+
+ // C. internal functions
+ bool garantueeBuf(size_t size); // returns only after wpos - rpos >= size, but reads as much as possible without blocking
+ bool garantueeLine();
+
+ template<class Tval> bool bufRead(Tval &val, bool proccedRPos);
+};
+
+template<class T>
+class OutputInterface {
+public:
+ OutputInterface<T>(int fd, bool binary, IfObjectFilter *ifo, IfElementFilter *ife);
+
+ bool pushObject(T *o); // serialize one object
+ bool pushClass(); // serialize the entire class (selective by filter)
+ bool isContained(typename T::id_type id);
+
+ // internal state
+ int fd;
+ std::string buf;
+ bool binary;
+ IfObjectFilter *ifo;
+ IfElementFilter *ife;
+
+ // internal functions
+ template<class Tval> bool bufWrite(Tval &val);
+};
+
+#endif // INTERFACE_H
diff --git a/code/core/log.h b/code/core/log.h
new file mode 100644
index 0000000..57fb21b
--- /dev/null
+++ b/code/core/log.h
@@ -0,0 +1,7 @@
+#ifndef LOG_H
+#define LOG_H
+
+#define WARN(str) fprintf(stderr, "WARN at " __FILE__ ":%d: " str "\n", __LINE__, str);
+#define DIE(str) { fprintf(stderr, "FATAL at " __FILE__ ":%d: " str "\n", __LINE__, str); exit(-1); }
+
+#endif
diff --git a/code/core/max.h b/code/core/max.h
new file mode 100644
index 0000000..490b338
--- /dev/null
+++ b/code/core/max.h
@@ -0,0 +1,13 @@
+#ifndef MAX_H
+#define MAX_H
+
+template<class L, class R>
+inline L max(L l, R r) {
+ if (l > r) {
+ return l;
+ }else{
+ return r;
+ }
+}
+
+#endif // MAX_H
diff --git a/code/core/min.h b/code/core/min.h
new file mode 100644
index 0000000..78472ab
--- /dev/null
+++ b/code/core/min.h
@@ -0,0 +1,13 @@
+#ifndef MIN_H
+#define MIN_H
+
+template<class L, class R>
+inline L min(L l, R r) {
+ if (l < r) {
+ return l;
+ }else{
+ return r;
+ }
+}
+
+#endif // MIN_H
diff --git a/code/core/neuron.cpp b/code/core/neuron.cpp
new file mode 100644
index 0000000..c35afcc
--- /dev/null
+++ b/code/core/neuron.cpp
@@ -0,0 +1,164 @@
+//#include "model_switch.h"
+
+#include <stdio.h>
+
+#include "neuron.h"
+
+Neuron::Neuron() {
+ // neurons are initialized before the Global objects is; therefore this call to init() is preliminary
+ // and has to be repeated after the Global object has been loaded
+ init();
+}
+
+Neuron::init() {
+ // general neuron params
+ voltage = global.base_voltage;
+ refractoryTime = 0.0;
+
+ // IP
+ fac_voltage_tau = 1.0;
+ fax_current = 1.0;
+
+ ip_R = 1.0;
+ ip_C = 1.0;
+
+ ip_est_mom1 = global.ip_dst_mom1;
+ ip_est_mom2 = global.ip_dst_mom2;
+ ip_dst_mom1 = global.ip_dst_mom1;
+ ip_dst_mom2 = global.ip_dst_mom2;
+
+ // clock
+ lastEvent = 0.0;
+ lastSpike = - INFINITY;
+
+ // WARN: synapse lists not initialized (!) (well .. they have a constructor)
+}
+
+// evolve the neuron's state time [seconds]
+// RETURN: the time difference between the this and the last update
+double Neuron::evolve(double time) {
+ // update internal clock
+ double dt = time - lastEvent;
+ lastEvent = time;
+
+ // voltage decay
+ voltage -= (voltage - global.voltage_base) * (1.0 - exp( - td / (global.voltage_tau * fac_voltage_tau)));
+
+ return dt;
+}
+
+// apply an incoming spike of current to this neurons state giving it the current time [s] of the simulation; returns the time of the next spike to occur
+double Neuron::processCurrent(double time, double current) {
+ // process the model until the current time
+ evolve(time);
+
+ // add spike
+ if (time > refractoryTime)
+ voltage += fac_current * current;
+
+ // return when the neuron should fire the next time (0.0 is an option, too)
+ return predictSpike(time);
+}
+
+// generate a spike wich has been predicted earlier to occur at time [s]
+// RETURN when the next spike is expected to occur and the current of the spike
+double Neuron::generateSpike(double time, bool &doesOccur) {
+ // update the model (to the after-firing-state)
+ evolve(time);
+
+ // check if a spike occurs (the spike date might be outdated)
+ doesOccur = (voltage >= global.fire_threshold) && (time >= refractoryTime);
+
+ if (doesOccur) {
+ // 0. update internal state (instantaneously)
+ voltage = global.voltage_base;
+ refractoryTime = time + global.absoluteRefractoryTime;
+
+ // 1. do intrinsic plasticity
+ intrinsicPlasticity(time - lastSpike);
+
+ // 2. do stdp related accounting in the incoming synapses
+ // (check each synapse for event(s))
+ for (SynapseSrcList::iterator i = sin.begin(); i != sin.end(); i++) {
+ // do not touch constant synapses (calc'ing the values below is a waste of time)
+ if (i->constant)
+ continue;
+
+ // check if pre-neuron has fired since we have fired the last time
+ // this info is stored in the synapse between pre- and this neuron
+ if ((lastSpike != -INFINITY) && (i->firstSpike != -INFINITY)) {
+ // depression
+ i->evolve(i->firstSpike);
+ i->stdp(lastSpike - i->firstSpike);
+
+ // facilitation
+ i->evolve(time);
+ i->stdp(time - i->lastSpike);
+
+ // reset firstFired to allow the next max calculation
+ i->firstSpike = -INFINITY;
+ }
+ }
+
+ // 3. update the last time this neuron has fired
+ lastSpike = time;
+ }
+
+ return predictSpike(time);
+}
+
+void Neuron::predictSpike(double time) {
+ if (voltage >= global.voltage_threshold) {
+ return fmax(time, refractoryTime);
+ }else{
+ return INFINITY;
+ }
+}
+
+void Neuron::intrinsicPlasticity(double td) {
+ if (!ip)
+ return;
+
+ // update rate estimation
+ double edt = exp( td / g.ip_sliding_avg_tau );
+ double freq = 1.0 / td; // HINT: td=0 is not possible (because of an absolute refractory period)
+ ip_est_mom1 = edt * ip_est_mom1 + (1 - edt) * freq;
+ ip_est_mom2 = edt * ip_est_mom2 + (1 - edt) * freq * freq;
+
+ // modify internal representation
+ ip_R += g.ip_lambda_R * (ip_est_mom1 - ip_dst_mom1) * td;
+
+ double m2d = g.ip_lambda_C * (ip_est_mom2 - ip_dst_mom2);
+ ip_C += pow(ip_C, 2) * m2d * td
+ / ( 1.0 + ip_C * m2d * td );
+
+ // update coefficients
+ fac_voltage_tau = ip_R * ip_C;
+ fac_current = ip_R;
+}
+
+template<class Action>
+bool Neuron::reflect<Action>(Action &a) {
+ return
+ a(voltage, "voltage")
+ && a(refractoryTime, "refractoryTime")
+ && a(ip, "ip")
+ && a(ip_est_mom1, "ip_est_mom1")
+ && a(ip_est_mom2, "ip_est_mom2")
+ && a(ip_dst_mom1, "ip_dst_mom1")
+ && a(ip_dst_mom2, "ip_dst_mom2")
+ && a(ip_R, "ip_R")
+ && a(ip_C, "ip_C")
+ && a(fac_voltage_tau, "fac_voltage_tau")
+ && a(fac_current, "fac_current")
+ && a(lastEvent, "lastEvent")
+ && a(lastSpike, "lastSpike");
+}
+
+static id_type Neuron::numElements() {
+ return s.numNeurons;
+}
+
+static Neuron * Neuron::singleton(id_type id) {
+ return &(s.neurons[id]);
+}
diff --git a/code/core/neuron.h b/code/core/neuron.h
new file mode 100644
index 0000000..03c0dbb
--- /dev/null
+++ b/code/core/neuron.h
@@ -0,0 +1,64 @@
+/*************************************************************
+neuron.h
+ implementation the axon hillock
+
+ STORES
+ * current tuning of neuron body (parameters)
+ * list of synapses (-> connections to other neurons)
+**************************************************************/
+
+#ifndef NEURON_H
+#define NEURON_H
+
+#include <boost/intrusive/slist.hpp>
+
+#include "synapse.h"
+
+using namespace boost::intrusive;
+
+typedef slist<Synapse, member_hook<Synapse, slist_member_hook<>, &Synapse::hook_dst> > SynapseDstList;
+typedef slist<Synapse, member_hook<Synapse, slist_member_hook<>, &Synapse::hook_src> > SynapseSrcList;
+
+class Neuron {
+public:
+ Neuron();
+ void init();
+
+ // functions operating on neuronal state
+ double evolve(double time);
+ double processCurrent(double time, double current);
+ double generateSpike(double time, bool &doesOccur);
+ double predictSpike(double time);
+ void intrinsicPlasticity(double td);
+
+ // reflection
+ template<class Action> bool reflect(Action &a);
+ typedef int id_type;
+ static id_type numElements();
+ static Neuron * singleton(int num); // return neuron # num
+
+ // list of outgoing and incoming synapses
+ SynapseSrcList sin;
+ SynapseDstList sout;
+ //Synapse **sin, *sout;// list of _in_coming and _out_going _s_ynapses; HINT: sin is a list of pointers to synapses in the corresponding outgoing neuron
+
+ // basic neuron properties
+ double voltage; // [V]
+ double refractoryTime; // [s]
+
+ // IP related
+ bool ip; // if this neuron has IP at all
+ double ip_est_mom1, ip_est_mom2; // estimated moments (using exponential sliding average (exact integration))
+ double ip_dst_mom1, ip_dst_mom2; // targeted moments
+ double ip_R, ip_C; // internal correspondences to christinas model
+ double fac_voltage_tau, fac_current; // resulting coefficients for neuron behaviour
+
+ // local clocks
+ double lastEvent; // timepoint when the last event occured
+ double lastSpike;
+};
+
+const int numNeurons = 1000;
+Neuron n[numNeurons];
+
+#endif // NEURON_H
diff --git a/code/core/print_defaults.cpp b/code/core/print_defaults.cpp
new file mode 100644
index 0000000..7fbc7c2
--- /dev/null
+++ b/code/core/print_defaults.cpp
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+
+
+int main () {
+ TO BE IMPLEMENTED AGAIN
+ using the relflection interface
+
+ return 0;
+}
diff --git a/code/core/regex.cpp b/code/core/regex.cpp
new file mode 100644
index 0000000..ef98209
--- /dev/null
+++ b/code/core/regex.cpp
@@ -0,0 +1,134 @@
+// file access
+#include <fcntl.h>
+#include <errno.h>
+#include <list>
+#include <boost/xpressive/xpressive_static.hpp>
+#include <boost/xpressive/regex_actions.hpp>
+#include <boost/type_traits/is_same.hpp>
+#include <boost/foreach.hpp>
+
+#include "interface.h"
+
+#include "log.h"
+#include "simulate.h"
+#include "tracepoints.h"
+#include "global.h"
+#include "neuron.h"
+#include "synapse.h"
+#include "spike.h"
+
+#include "regex.h"
+
+// pollute seperate namespace with regex expressions
+namespace tracepoints {
+
+ using namespace boost::xpressive;
+
+ /* parser result variables */
+ std::set<int> intSet;
+ std::set<std::string> stringSet;
+ double targetTimeOffset = 0.0;
+ std::string filename;
+ bool ioDirection; // tue -> write; false -> read
+ bool ioBinary;
+
+ /* parser callback implementations */
+ struct push_impl
+ {
+ typedef void result_type; // Result type, needed for tr1::result_of
+
+ template<typename Set, class Value>
+ void operator()(Set &s, Value const &val) const { s.insert(val); }
+ };
+ function<push_impl>::type const push = {{}};
+
+ /* I/O interface launcher */
+ template<class T>
+ struct start_interface_impl
+ {
+ typedef void result_type; // Result type, needed for tr1::result_of
+
+ void operator() () const {
+ // open src/dst file
+ int fd = open(filename.c_str, write ? (O_WRONLY | O_APPEND | O_CREAT) : (O_RDONLY | O_NONBLOCK));
+ if (fd == -1) DIE("could not open file");
+
+ // execute task
+ if (ioDirection) { // write
+ if (boost::is_same<T, SpikeMUX>::value) {
+ // create a persistent interface to write out spikes as they occur
+ s.spikeOIfList.push_back(new OutputInterface<SpikeMUX>(fd, ioBinary, stringSet, intSet));
+ }else{
+ // create a temporary interface to write out any state
+ OutputInterface<T> oif(fd, ioBinary, stringSet, intSet);
+ oif.pushClass();
+ close(fd);
+ }
+ }else{ // read
+ // create long living input interface (terminates itself when EOF is discovered)
+ FileInputEvent<T>::createInputStream(new InputInterface<T>(fd, ioBinary, stringSet, intSet));
+ }
+
+ // clean up vars for next command
+ ioBinary = false;
+ stringSet.empty();
+ intSet.empty();
+ }
+ };
+
+ function<start_interface_impl<Global> >::type const start_interface_global = {{}};
+ function<start_interface_impl<Neuron> >::type const start_interface_neuron = {{}};
+ function<start_interface_impl<Synapse> >::type const start_interface_synapse = {{}};
+ function<start_interface_impl<Spike> >::type const start_interface_spike = {{}};
+
+ /* regex pattern definitions */
+
+ // character sets
+ cregex Delim = as_xpr(';');
+ cregex VarNameChar = alnum | as_xpr('_');
+
+ // basic elements
+ cregex Time = (+_d >> !('.' >> +_d))[ ref(targetOffseTime)=as<double>(_) ];
+
+ cregex Index = (+_d)[ push(as<int>(_)) ];
+
+ cregex Element = (+varnamechar)[ push(as<std::string>(_)) ];
+
+ cregex Filename = (+varnamechar)[ ref(filename)=<std::string>(_) ];
+
+ cregex IODirWrite = as_xpr('>') [ ref(ioDirection)=true ];
+ cregex IODirRead = as_xpr('>') [ ref(ioDirection)=false ];
+ cregex IODirection = IODirWrite | IODirRead;
+
+ cregex IOBinaryTrue = as_xpr('#') [ ref(ioBinary)=true ];
+ cregex IOBinary = !IOBinaryTrue;
+
+ cregex IOCmd = IOBinary >> IODirection >> Filename;
+
+ // lists
+ cregex ObjectListInner = *space >> (s1= index) >> *space;
+ cregex ObjectList = *space >> '(' >> object_list_inner >> *( delim >> object_list_inner ) >> ')';
+
+ cregex ElementListInner = *space >> (s1= element) >> *space;
+ cregex ElementList = *space >> '{' >> element_list_inner >> *( delim >> element_list_inner ) >> '}';
+
+ // trace commands
+ cregex TraceCmdTail = *space >> !ElementList >> *space >> !ObjectList >> *space >> IOCmd;
+
+ cregex TraceGlobal = (icase( "global" ) >> TraceCmdTail [ start_interface_global() ];
+ cregex TraceNeuron = (icase( "neuron" ) >> TraceCmdTail [ start_interface_neuron() ];
+ cregex TraceSynapse = (icase( "synapse" ) >> TraceCmdTail [ start_interface_synapse() ];
+ cregex TraceSpike = (icase( "spike" ) >> TraceCmdTail [ start_interface_spike() ];
+
+ // whole line
+ // target format what
+ // cregex channel = *space >> +delim >> *space >> ( neuron | synapse | global | spikes );
+ // cregex traceline = time >> *channel >> *( delim | space );
+ cregex traceLine = !icase("proceed") >> +space >> time;
+ cregex line = *space >> *(inputElem | outputElem | traceElem) >> *( delim | space ));
+
+ bool parseRequest(char *str) {
+ return regex_match(str, line);
+ }
+
+};
diff --git a/code/core/regex.h b/code/core/regex.h
new file mode 100644
index 0000000..0d1f3d3
--- /dev/null
+++ b/code/core/regex.h
@@ -0,0 +1,8 @@
+#ifndef REGEX_H
+#define REGEX_H
+
+namespace regex {
+ bool parseRequest(char *str);
+};
+
+#endif // REGEX_H
diff --git a/code/core/reward.cpp b/code/core/reward.cpp
new file mode 100644
index 0000000..0738c19
--- /dev/null
+++ b/code/core/reward.cpp
@@ -0,0 +1,105 @@
+#include <vector>
+
+#include "simulate.h"
+#include "bin.h"
+
+#include "reward.h"
+
+// only called once to init the da-reward system; later the more specific events are called
+void Reward::vexecute() {
+ // for ease of viewing ascendingly select 50 neurons per symbol
+ inputNeurons = new IOPop(g.trainer_numSymbols);
+ outputNeurons = new IOPop(g.trainer_numSymbols);
+ for (int i = 0; i<g.trainer_numSymbols; i++)
+ for (int j=i*50; j<(i+1)*50; j++) {
+ (*inputNeurons)[i].insert(j);
+ (*outputNeurons)[i].insert(799 - j);
+ }
+
+
+ // present the first symbol after 0.5 second
+ s.addEvent(new Reward_Input(time + 10, inputNeurons, outputNeurons));
+}
+
+// choose and present a random input, then wait a random time
+void Reward_Input::vexecute() {
+ int symbol = rand() % g.trainer_numSymbols;
+ fprintf(stderr, "Pin: %d \n", symbol);
+
+ // excite symbol specific input neurons with 0.1 V
+ for (std::set<int>::iterator i = (*inputNeurons)[symbol].begin(); i != (*inputNeurons)[symbol].end(); i++)
+ s.addEvent(new ExternalSpike(time + drand48() * 0.001, *i, 0.15));
+
+ // wait a (not yet) random time until evaluation
+ s.addEvent(new Reward_EnableBinning(time + g.trainer_eval_delay, inputNeurons, outputNeurons, symbol));
+}
+
+// start the binning of relevant neurons
+void Reward_EnableBinning::vexecute() {
+ // add bins
+ vector<Bin*> *bins = new vector<Bin*>(g.trainer_numSymbols);
+ for (int i=0; i < (*outputNeurons).size(); i++) {
+ Bin *b = new Bin(&((*outputNeurons)[i]));
+ s.to.traceBinSets.insert(b);
+ (*bins)[i] = b;
+ }
+
+ // wait a small amount until reading the result of this binning
+ double delay = g.trainer_rd_c1
+ + g.trainer_rd_c2 * time
+ + g.trainer_rd_c3 * drand48()
+ + g.trainer_rd_c4 * time * drand48();
+ s.addEvent(new Reward_Readout(time + delay, symbol, inputNeurons, outputNeurons, bins));
+}
+
+// read the output frequencies and give reward
+void Reward_Readout::vexecute() {
+ if (estimatePerformance() > 1.0) {
+ deployReward(g.trainer_rewardAmount);
+ }else{
+ deployReward(-g.trainer_rewardAmount);
+ }
+
+ // delete bin trace commands
+ for (int i=0; i<bins->size(); i++) {
+ s.to.traceBinSets.erase((*bins)[i]);
+ delete (*bins)[i];
+ }
+ delete bins;
+
+ // wait a refractory time
+ s.addEvent(new Reward_Input(time + g.trainer_refractoryTime, inputNeurons, outputNeurons));
+}
+
+double Reward_Readout::estimatePerformance() {
+ int max_freq = 0, // max. freq of all populations _except_ target population
+ target_freq;
+
+ for (int i=0; i<bins->size(); i++) {
+ fprintf(stderr, "Pout: %d -> %d \n", i, (*bins)[i]->count);
+ if (i == symbol) {
+ target_freq = (*bins)[i]->count;
+ }else{
+ max_freq = max(max_freq, (*bins)[i]->count);
+ }
+ }
+
+ double res;
+ if (max_freq == 0) {
+ if (target_freq == 0) {
+ res = 0.0;
+ }else{
+ res = ((double) target_freq) * INFINITY;
+ }
+ }else{
+ res = ((double) target_freq) / max_freq;
+ }
+
+ fprintf(stderr, "PERF: %f\n", res);
+ return res;
+}
+
+void Reward_Readout::deployReward(double reward) {
+ g.dopamin_level += reward;
+ s.da_history.push_front(pair<double,double>(time, g.dopamin_level));
+}
diff --git a/code/core/reward.h b/code/core/reward.h
new file mode 100644
index 0000000..1bc13f4
--- /dev/null
+++ b/code/core/reward.h
@@ -0,0 +1,53 @@
+#ifndef REWARD_H
+#define REWARD_H
+
+#include <vector>
+#include <set>
+
+#include <queue>
+
+#include "event.h"
+#include "bin.h"
+
+using namespace std;
+
+class Reward : public VirtualEvent {
+public:
+ typedef vector< std::set<int> > IOPop;
+
+ Reward(double time) : VirtualEvent(time) {}
+ Reward(double time, IOPop *inputNeurons, IOPop *outputNeurons) : VirtualEvent(time), inputNeurons(inputNeurons), outputNeurons(outputNeurons) {}
+ virtual void vexecute();
+
+ // state
+ IOPop *inputNeurons, *outputNeurons;
+};
+
+class Reward_Input : public Reward {
+public:
+ Reward_Input(double time, IOPop *inputNeurons, IOPop *outputNeurons) : Reward(time, inputNeurons, outputNeurons) {}
+ virtual void vexecute();
+};
+
+class Reward_EnableBinning : public Reward {
+public:
+ Reward_EnableBinning(double time, IOPop *inputNeurons, IOPop *outputNeurons, int symbol) : Reward(time, inputNeurons, outputNeurons), symbol(symbol) {}
+ virtual void vexecute();
+
+ int symbol;
+};
+
+class Reward_Readout : public Reward {
+public:
+ Reward_Readout(double time, int symbol, IOPop *inputNeurons, IOPop *outputNeurons, vector<Bin*> *bins) : Reward(time, inputNeurons, outputNeurons), symbol(symbol), bins(bins) {}
+ virtual void vexecute();
+
+ double estimatePerformance();
+ void deployReward(double reward);
+
+ int symbol;
+ vector<Bin*> * bins;
+};
+
+
+#endif // REWARD_H
diff --git a/code/core/simulate.cpp b/code/core/simulate.cpp
new file mode 100644
index 0000000..69252bb
--- /dev/null
+++ b/code/core/simulate.cpp
@@ -0,0 +1,134 @@
+#include <utility>
+#include <map>
+#include <signal.h>
+
+#include "model_switch.h"
+#include "synapse.h"
+#include "neuron.h"
+#include "topology.h"
+#include "reward.h"
+#include "tracepoints.h"
+#include "fileutils.h"
+#include "log.h"
+
+#include "simulate.h"
+
+// include cpp files to allow all otpimizations to take place across all files
+#include "synapse.cpp"
+#include "neuron.cpp"
+#include "topology.cpp"
+#include "fileutils.cpp"
+#include "event.cpp"
+#include "reward.cpp"
+#include "bin.cpp"
+#include "tracepoints.cpp" // should be excluded to speed up compilation
+
+
+Simulation::Simulation() :
+ // debug related stuff
+#ifdef DEBUG_STATUSLINE
+ numSpikes(0),
+ charCount(0),
+#endif
+
+ // init globals
+ currentTime(0.0),
+{
+ // set the initial dopamin level
+ da_history.push_front(pair<double, double>(0.0, g.dopamin_level));
+}
+
+bool Simulation::Step() {
+ // check if there are pending events
+ if (pendingSpikes.empty())
+ return false;
+
+ // retrieve and check next event
+ Event *e = pendingSpikes.top();
+ pendingSpikes.pop();
+
+ // proceed to the new time
+ if (currentTime > e->time)
+ DIE("tried to execute event of the past");
+ currentTime = e->time;
+
+#ifdef DEBUG_STATUSLINE
+ if (numSpikes % 16384 == 0) {
+ // remove old line
+ while (charCount-- > 0)
+ fprintf(stderr, " ");
+ fprintf(stderr, "\r");
+
+ // print new line
+ charCount += fprintf(stderr, "%f s, %d, %lld events (%d\tpending:", e->type, currentTime, numSpikes, pendingSpikes.size());
+ for (map<int, int>::iterator i = eventCount.begin(); i != eventCount.end(); i++) {
+ charCount += fprintf(stderr, "\t%d: %d", i->first, i->second);
+ charCount += 7; // tab
+ }
+ charCount += fprintf(stderr, ")\r");
+ charCount += 7; // tab
+ }
+ fflush(stderr);
+
+ numSpikes++;
+ eventCount[e->type]--;
+#endif
+
+ // execute event
+ e->execute();
+ e->free();
+
+ return true;
+}
+
+bool Simulation::proceedTime(double targetTime) {
+ // insert an intrinsic event for each neuron to have it computed up to exactly targetTime
+ for (int i=0; i < numNeurons; i++) {
+ addEvent(new IntrinsicNeuron(targetTime, i));
+ }
+
+ // proceed until no event is left before the time-point to be reached
+ // then also currentTime == targetTime holds
+ while (!pendingSpikes.empty() && (pendingSpikes.top()-> time <= targetTime)) {
+ Step();
+ }
+
+ // check if there are events left ... with the new event system this should always be the case
+ // and does not anymore indicate that the network is alive
+ return !pendingSpikes.empty();
+}
+
+void Simulation::addEvent(Event *e) {
+ pendingSpikes.push(e);
+#ifdef DEBUG_STATUSLINE
+ eventCount[e->type]++;
+#endif
+}
+
+void initStaticVars() {
+ Synapse::numSynapses = 0;
+}
+
+int main(int argc, char **argv) {
+ // ignore broken pipe signals (they should be handled by stdio file handlers and our usual error functions)
+ signal(SIGPIPE, SIG_IGN);
+
+ initStaticVars();
+
+ // load the network
+ // tp_load(s.fd_isynapse);
+ // neuron_load(s.fd_ineuron);
+
+ // init services implemented via events by adding one of them to the event population
+ s.addEvent(new ExternalNoise(0.0));
+ s.addEvent(new Reward(0.0));
+
+ // check for autoexciting events (and init neurons btw)
+ s.proceedTime(0.0);
+
+ // pass control to the tracepoint parser
+ // this allows to run the simulation interactively
+ executeTracepoints(s.fd_trace, s.fd_ispike, s.fd_iglobal);
+
+ return 0;
+}
diff --git a/code/core/simulate.h b/code/core/simulate.h
new file mode 100644
index 0000000..3d84b6e
--- /dev/null
+++ b/code/core/simulate.h
@@ -0,0 +1,70 @@
+#ifndef SIMULATE_H
+#define SIMULATE_H
+
+#include <queue>
+#include <list>
+
+#include "neuron.h"
+#include "synapse.h"
+#include "tracepoints.h"
+#include "event.h"
+#include "interface.h"
+#include "spike.h"
+
+using namespace std;
+
+class Simulation {
+public:
+ Simulation();
+
+ // ----- FUNCTIONS -----
+
+ // controlling functions
+ bool Step(); // do a single step (fire one neuron); return if the net is dead
+ bool proceedTime(double td);
+ bool proceedSpikes(long count);
+
+ void addEvent(Event*);
+
+ // ----- SIMULATION STATE -----
+
+ // simulation global variables
+ double currentTime;
+
+ // list of spikes and events
+ priority_queue<Event, vector<Event*>, PEventGreater> pendingSpikes;
+
+ // list of discontinous dopamin changes (time, amount), sorted by time (newest is front)
+ std::list<pair<double, double> > da_history;
+
+ // ----- HELPER VARS -----
+
+ // reflection & traces
+ std::list<OutputInterface<SpikeMUX> *> spikeOIfList;
+
+ std::set<Bin*> binSets;
+
+ // file descriptors
+ FILE *fd_ineuron,
+ *fd_isynapse,
+ *fd_ispike,
+ *fd_iglobal,
+ *fd_oneuron,
+ *fd_osynapse,
+ *fd_ospike,
+ *fd_oglobal,
+ *fd_trace; // command file
+
+ // debug vars
+#ifdef DEBUG_STATUSLINE
+ long long numSpikes; // number of spikes (counted after axon hillock) processed so far
+ map<int, int> eventCount;
+ int charCount;
+#endif
+};
+
+// init neural network
+Simulation s;
+
+
+#endif // SIMULATE_H
diff --git a/code/core/spike.cpp b/code/core/spike.cpp
new file mode 100644
index 0000000..7b12c18
--- /dev/null
+++ b/code/core/spike.cpp
@@ -0,0 +1,28 @@
+#include "spike.h"
+
+static SpikeMUX * SpikeMUX::singleton(id_type num) {
+ staticSpikeMUX.dst = num;
+ return &staticSpikeMUX;
+}
+
+// special case for reading a spike from a file
+template<>
+bool SpikeMUX::reflect<ActionRead>(ActionRead &a) {
+ bool res = reflect(dst, "dst")
+ && reflect(time, "time")
+ && reflect(current, "current")
+ && (time > s.currentTime);
+
+ if (res)
+ s.addEvent(new ExternalSpike(time, dst, current));
+
+ return res;
+}
+
+// general reflection case
+template<class Action>
+bool SpikeMUX::reflect<Action>(Action &a) {
+ return reflect(dst, "dst")
+ && reflect(time, "time")
+ && reflect(current, "current");
+}
diff --git a/code/core/spike.h b/code/core/spike.h
new file mode 100644
index 0000000..2b2e4da
--- /dev/null
+++ b/code/core/spike.h
@@ -0,0 +1,28 @@
+#ifndef SPIKE_H
+#define SPIKE_H
+
+#include <math.h>
+
+#include "neuron.h"
+#include "log.h"
+
+class SpikeMUX {
+public:
+ typedef Neuron::id_type id_type; // WARN: this id refers to the spiking neuron, not the spike itself wich can not be addressed directly
+ SpikeMUX(id_type dst) : dst(dst), time(-INFINITY), current(0.0) {}
+ SpikeMUX(id_type dst, double time) : dst(dst), time(time), current(0.0) {}
+
+ // reflection
+ template<class Action> bool reflect(Action &a);
+ static id_type numElements() { DIE("Spike::numElements() called"); }
+ static SpikeMUX * singleton(id_type num); // do some reflection magic to multiplex between IntrinsicNeuron and ExternalNoise
+
+ // properties
+ id_type dst;
+ double time;
+ double current;
+};
+
+SpikeMUX staticSpikeMUX(-1); // single copy to use for reading spikes
+
+#endif // SPIKE_H
diff --git a/code/core/synapse.cpp b/code/core/synapse.cpp
new file mode 100644
index 0000000..cc7399a
--- /dev/null
+++ b/code/core/synapse.cpp
@@ -0,0 +1,118 @@
+#include <math.h>
+
+#include "synapse.h"
+
+Synapse::Synapse() :
+ src(-1), dst(-1), // seeing this value indicates an unused synapse
+ weight(0.0),
+ eligibility(0.0),
+ delay(0.0),
+ constant(false),
+ firstSpike(- INFINITY),
+ lastSpike(- INFINITY),
+ evolvedUntil(0.0)
+{}
+
+// given an input time and current calculate their output values
+void Synapse::computePostsynapticPulse(double &time, double &current) {
+ time += delay;
+ current = weight;
+}
+
+void Synapse::evolve(double time) {
+ if (time < evolvedUntil)
+ return;
+
+ // get the dopamin history starting from a dopamin event earlier than the synpase's last event
+ std::list<pair<double, double> >::iterator i = s.da_history.begin();
+ while (i->first > evolvedUntil)
+ i++;
+
+ // evolve synapse state for each interval given by the dopamin history
+ while (evolvedUntil < time) {
+ double da_level = i->second;
+ if (i->first < evolvedUntil) // the first dopamin event partially lies outside our time window
+ da_level = g.decay_dopamin(da_level, evolvedUntil - i->first);
+
+ double td = - evolvedUntil;
+ if (i == s.da_history.begin()) {
+ // this is the last (=newest) element of the history
+ td += time;
+ evolvedUntil = time;
+ }else{
+ i--;
+ if (i->first > time) {
+ // dopamin history goes furher (in time) than we have to simulate
+ td += time;
+ evolvedUntil = time;
+ }else{
+ td += i->first;
+ evolvedUntil += td;
+ }
+ }
+
+ /* below is the exact solution of the equation system
+ dc/dt = -Cc c - eligibility trace, C - cf. tau
+ dd/dt = -Dd d - dopamin level, D - cf. tau
+ dw/dt = cd w - weight
+ */
+
+ if (!constant) {
+ double tau = g.dopamin_tau * g.stdp_et_tau / (g.dopamin_tau + g.stdp_et_tau);
+ double dw = da_level * eligibility * tau * (1 - exp( -td / tau ));
+ weight += dw;
+ weight = fmax( msg.Wmin, fmin( msg.Wmax, weight ));
+ }
+ eligibility *= exp( -td / g.stdp_et_tau );
+ }
+}
+
+// modify eligibility trace according to a single STDP event given by
+// a pre-post time difference (= t_post - t_pre)
+void Synapse::stdp(double td) {
+ if (td >= 0.0) {// WARN: this doesn't work with negative zero
+ eligibility = g.stdp_lambda_plus * exp( - td / g.stdp_tau_plus );
+ }else{
+ eligibility = - g.stdp_lambda_minus * exp( td / g.stdp_tau_minus );
+ }
+}
+
+static void Synapse::updateTopology() {
+ // clear the sin/sout lists of all neurons
+ for (int i=0; i<numNeurons; i++) {
+ n[i].sout.clear();
+ n[i].sin.clear();
+ }
+
+ // iterate over all synapses wich have a valid source and destination (neuron) and add them to sin/sout lists
+ int i;
+ for (i=0; (i<maxSynapses) && (syn[i].src != -1) && (syn[i].dst != -1); i++) {
+ n[syn[i].src].sout.push_front(syn[i]);
+ n[syn[i].dst].sin.push_front(syn[i]);
+ }
+
+ // store number of (used) synapses
+ numSynapses = i;
+}
+
+template<class Action>
+bool Synapse::reflect<Action>(Action &a) {
+ return
+ a(src, "src")
+ && a(dst, "dst")
+ && a(weight, "weight")
+ && a(eligibility, "eligibility")
+ && a(delay, "delay")
+ && a(constant, "constant")
+ && a(firstSpike, "firstSpike")
+ && a(lastSpike, "lastSpike")
+ && a(evolvedUntil, "evolvedUntil");
+}
+
+static id_type Synapse::numElements() {
+ return numSynapsesDec;
+}
+
+static Synapse * Synapse::singleton(id_type id) {
+ return &(synapses[id]);
+}
diff --git a/code/core/synapse.h b/code/core/synapse.h
new file mode 100644
index 0000000..3eef12d
--- /dev/null
+++ b/code/core/synapse.h
@@ -0,0 +1,48 @@
+#ifndef SYNAPSE_H
+#define SYNAPSE_H
+
+#include <boost/intrusive/slist.hpp>
+
+using namespace boost::intrusive;
+
+class Neuron;
+
+class Synapse {
+public:
+ Synapse();
+
+ // functions
+ void computePostsynapticPulse(double &time, double &current); // given an input time and current calculate their output value
+ void evolve(double time);
+ void stdp(double dt);
+ static void updateTopology(); // recreate all sin/sout lists in the neurons
+
+ // reflection
+ template<class Action> bool reflect(Action &a);
+ typedef int id_type;
+ static id_type numElements();
+ static Synapse * singleton(id_type num); // return neuron # num
+ static id_type numSynapses;
+
+ // model specifics
+ int src, dst; // pre- and postsynaptic neuron
+ double weight; // [V]
+ double eligibility; // [?]
+ double delay; // total delay from presynaptic axon hillock to postsynaptic axon hillock
+ bool constant; // if the weight can change (HINT: weight < 0 implies constness, too)
+
+ // sim helper vars
+ double firstSpike, // time the first/last spike arrived at it's dst neuron in a given interval
+ lastSpike; // HINT: firstSpike is set to -INFINITY every time after it is processed
+ // by the STDP rule
+ double evolvedUntil; // up to wich time has the model been evolved
+
+ // hock to use intrusive lists (defined in neuron.h)
+ slist_member_hook<> hook_src, hook_dst;
+};
+
+const int maxSynapses = 200000;
+Synapse syn[maxSynapses];
+
+
+#endif // SYNAPSE_H
diff --git a/code/core/tracepoints.cpp b/code/core/tracepoints.cpp
new file mode 100644
index 0000000..7243f1c
--- /dev/null
+++ b/code/core/tracepoints.cpp
@@ -0,0 +1,57 @@
+#include "event.h"
+#include "regex.h"
+
+#include "simulate.h"
+
+/* input event streams */
+
+template<class T>
+static void FileInputEvent<T>::CreateInputStream(InputInterface<T> *iface) {
+ double time = iface->peekNextTime();
+ if (time == INFINITY) {
+ delete iface;
+ return;
+ }
+ if (time < s.currentTime) {
+ DIE("tried to include a file with events of the past");
+ }
+ s.addEvent(new FileInputEvent(iface, time));
+}
+
+template<class T>
+void FileInputEvent<T>::vexecute() {
+ iface->readFileUntil(time);
+ CreateInputStream(iface);
+}
+
+/* command loop */
+
+bool executeTracepoints() {
+ // the main loop of the program
+ char *str = NULL;
+ size_t _foo;
+
+ // loop over trace commands
+ while (getline(&str, &_foo, stdin) > 0) {
+ // parse request
+ if (!regex::parseRequest(str))
+ DIE("Invalid tracepoint format");
+
+ // proceed time if a run command was given
+ if (regex::targetTimeOffset != 0.0) {
+ if (!s.proceedTime(s.currentTime + tracepoints::targetTimeOffset))
+ fprintf(stderr, "Warning: network is dead\n");
+ // reset target time
+ targetTimeOffset = 0.0;
+ // reset spike output list
+ BOOST_FOREARCH(SpikeMUX *i, s.spikeOIfList) { delete i; }
+ s.spikeOIfList.empty();
+ }
+
+
+ free(str);
+ str = NULL;
+ }
+
+ return true;
+}
diff --git a/code/core/tracepoints.h b/code/core/tracepoints.h
new file mode 100644
index 0000000..d0ea9fe
--- /dev/null
+++ b/code/core/tracepoints.h
@@ -0,0 +1,25 @@
+#ifndef TRACEPOINTS_H
+#define TRACEPOINTS_H
+
+#include <list>
+#include <set>
+
+#include "interface.h"
+#include "bin.h"
+#include "event.h"
+
+// read tracepoints from fd and execute them
+bool executeTracepoints(FILE *fd_trace, FILE *fd_spike, FILE *fd_global);
+
+template<class T>
+class FileInputEvent : public VirtualEvent {
+public:
+ FileInputEvent<T>(InputInterface<T> *iface, double time) : VirtualEvent(time), iface(iface) {}
+ static void createInputStream(InputInterface<T> *iface);
+ void vexecute();
+
+ InputInterface<T> *iface;
+};
+
+
+#endif // TRACEPOINTS_H
diff --git a/code/core/type2name.h b/code/core/type2name.h
new file mode 100644
index 0000000..1917a6a
--- /dev/null
+++ b/code/core/type2name.h
@@ -0,0 +1,20 @@
+#ifndef TYPE2NAME_H
+#define TYPE2NAME_H
+
+// if the type is not known
+template<class T>
+char * type2name() { return "type_not_stringified"; }
+
+template<>
+char * type2name<bool>() { return "bool"; }
+
+template<>
+char * type2name<char>() { return "char"; }
+
+template<>
+char * type2name<int>() { return "int"; }
+
+template<>
+char * type2name<double>() { return "double"; }
+
+#endif // TYPE2NAME_H
diff --git a/code/glue/Makefile b/code/glue/Makefile
new file mode 100644
index 0000000..35bd16b
--- /dev/null
+++ b/code/glue/Makefile
@@ -0,0 +1,8 @@
+.PHONY: all clean
+all: repeat-trace-cmd
+
+clean:
+ rm *~ repeat-trace-cmd
+
+repeat-trace-cmd: repeat-trace-cmd.c
+ gcc repeat-trace-cmd.c -o repeat-trace-cmd -O23 \ No newline at end of file
diff --git a/code/glue/da-controlled-sim-wrapper b/code/glue/da-controlled-sim-wrapper
new file mode 100755
index 0000000..918de4a
--- /dev/null
+++ b/code/glue/da-controlled-sim-wrapper
@@ -0,0 +1,62 @@
+#!/bin/sh
+
+# check param count
+if [ ! $# -eq 7 ]; then
+ echo 'wrong parameter count (see the source for parameter order)' >&2
+ # 1. model name (e.g. the "if" from "sim-if")
+ # 2. controller name (relative to trainer-dir)
+ # 3. input_neuron_file
+ # 4. input_synapse_file
+ # 5. output_neuron_file
+ # 6. output_synapse_file
+ # 7. performance output
+ exit 1
+fi
+
+# determine the path of the simulaton program
+SIM=`dirname $0`/../core/sim-$1
+if [ ! -x $SIM ]; then
+ echo "executable ($SIM) does not exist" >&2
+ exit 1
+fi
+
+# determine the path of the controller programm
+CTL=`dirname $0`/../trainer/$2-$1
+if [ ! -x $CTL ]; then
+ echo "executable ($CTL) does not exist" >&2
+ exit 1
+fi
+
+
+# create tmp dir
+FIFODIR=`mktemp -td fasimu.XXXXXXXXXX`
+
+# create fifos
+mkfifo $FIFODIR/spike_in
+mkfifo $FIFODIR/spike_out
+mkfifo $FIFODIR/trace_in
+mkfifo $FIFODIR/global_in
+mkfifo $FIFODIR/global_out
+
+# TODO: check if an additional i/o file is an executable
+
+# launch controller and simulator
+#echo $CTL - $FIFODIR/trace_in $FIFODIR/global_in $FIFODIR/global_out $FIFODIR/spike_in $FIFODIR/spike_out "2> trainer.err &"
+$CTL $7 $FIFODIR/trace_in $FIFODIR/global_in $FIFODIR/global_out $FIFODIR/spike_in $FIFODIR/spike_out 2> trainer.err &
+
+#echo $SIM "2> sim.err" $3 $4 $FIFODIR/spike_in $FIFODIR/global_in $5 $6 $FIFODIR/spike_out $FIFODIR/global_out $FIFODIR/trace_in
+$SIM 2> sim.err $3 $4 $FIFODIR/spike_in $FIFODIR/global_in $5 $6 $FIFODIR/spike_out $FIFODIR/global_out $FIFODIR/trace_in
+
+# hint: simulator params are
+ # input_neuron_file
+ # input_synapse_file
+ # input_spike_file
+ # input_global_file
+ # output_neuron_file
+ # output_synapse_file
+ # output_spike_file
+ # output_global_file
+ # trace_commando_file
+
+# delete tmp dir
+rm -R $FIFODIR
diff --git a/code/glue/distill-performance b/code/glue/distill-performance
new file mode 100755
index 0000000..af90f2f
--- /dev/null
+++ b/code/glue/distill-performance
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+if [ -f performance.out.raw ]; then
+ cat performance.out.raw | tr "\r" "\n" | grep ^PERF | cut -d" " -f2 > performance.out
+fi
diff --git a/code/glue/exec-matlab b/code/glue/exec-matlab
new file mode 100755
index 0000000..ccb0d23
--- /dev/null
+++ b/code/glue/exec-matlab
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+if [ -f $0.m ]; then
+ env - bash -c "(echo \"$1\"; cat $0.m) | matlab -nodesktop"
+# env - bash -c "cat $0.m | octave"
+else
+ echo file $0.m not found
+fi
diff --git a/code/glue/extract-matlab-matrix b/code/glue/extract-matlab-matrix
new file mode 100755
index 0000000..b5021a4
--- /dev/null
+++ b/code/glue/extract-matlab-matrix
@@ -0,0 +1,43 @@
+#!/bin/bash
+
+# TODO: use tail instead of cat where only the top of the file is needed
+
+# extract the multiplier of the matrix
+MUL=`cat $1 \
+| egrep -o 'e\+[0-9]* \*' \
+| tr -d 'e+ *'`
+
+if [ -z "$MUL" ]; then
+ MUL=1
+fi
+
+# get the number of cols
+COLS=`cat $1 \
+| egrep '([:space:]*([01](\.[0-9]*){0,1})[:space:]*)+$' \
+| tail -n1 \
+| wc -w`
+
+# read the matrix, multiply to correct value and put it out
+cat $1 \
+| egrep '([:space:]*([01](\.[0-9]*){0,1})[:space:]*)+$' \
+| tr " " "\n" \
+| egrep -v '^$' \
+| while read; do
+ echo $MUL '*' $REPLY
+done \
+| bc \
+| sed 's/\([0-9]\+\)\.0*/\1/' \
+| ( I=1
+ while read; do
+ if [ $I -eq $COLS ]; then
+ echo "$REPLY"
+ I=1
+ else
+ echo -n "$REPLY,"
+ I=$(( $I + 1 ))
+ fi
+ done )
+
+
+# old debug stuff
+#echo $MUL $COLS \ No newline at end of file
diff --git a/code/glue/plot_sliding_perf b/code/glue/plot_sliding_perf
new file mode 100755
index 0000000..c71d42b
--- /dev/null
+++ b/code/glue/plot_sliding_perf
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+if [ -f "performance.out" ]; then
+ echo "x=load('performance.out');
+ x=(x > 1);
+ y=length(x);
+ p=zeros(y-100,1);
+ for i=1:(y-100)
+ p(i)=mean(x(i:(i+100),1));
+ end
+ p" \
+ | octave -q \
+ | tr -dc "0123456789.\n" \
+ |grep -v "^$" \
+ > performance.out.sliding-avg
+
+ PWD=`pwd`
+
+ echo "set title 'performance (sliding avg, window size 100) $PWD'
+ set terminal postscript
+ set output 'performance.out.ps'
+ plot 'performance.out.sliding-avg' using 1 with lines, 0.5" \
+ | gnuplot
+fi
diff --git a/code/glue/plot_spike_time_hist b/code/glue/plot_spike_time_hist
new file mode 100755
index 0000000..ad45305
--- /dev/null
+++ b/code/glue/plot_spike_time_hist
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+if [ -f "spikes.out" ]; then
+ cat spikes.out | cut -d, -f1 > spikes.out.timing
+
+ echo "x = load('spikes.out.timing');
+ y = hist(x, ceil(max(x)/10));
+ y'" \
+ | octave -q \
+ | tr -dc "0123456789.\n" \
+ | grep -v "^$" \
+ > spikes.out.binned-timing
+
+ echo "set title 'population frequency $PWD'
+ set terminal postscript
+ set output 'spikes.out.binned-timing.ps'
+ plot 'spikes.out.binned-timing' using 1 with lines" \
+ | gnuplot
+fi \ No newline at end of file
diff --git a/code/glue/print-params b/code/glue/print-params
new file mode 100755
index 0000000..3df1e19
--- /dev/null
+++ b/code/glue/print-params
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo $@
diff --git a/code/glue/repeat-trace-cmd b/code/glue/repeat-trace-cmd
new file mode 100755
index 0000000..b889e0b
--- /dev/null
+++ b/code/glue/repeat-trace-cmd
Binary files differ
diff --git a/code/glue/repeat-trace-cmd.c b/code/glue/repeat-trace-cmd.c
new file mode 100644
index 0000000..ac35733
--- /dev/null
+++ b/code/glue/repeat-trace-cmd.c
@@ -0,0 +1,27 @@
+#include <stdio.h>
+
+int main(int argc, char **argv) {
+ double t, dt;
+ long n;
+
+ if (argc != 4) {
+ fprintf(stderr, "ERROR: wrong argument count\nUse %s total_time time_per_trace \"trace command(s) \"\n", argv[0]);
+ return -1;
+ }
+
+ if ((sscanf(argv[1], "%lf", &t) != 1) ||
+ (sscanf(argv[2], "%lf", &dt) != 1)) {
+ fprintf(stderr, "failed to read arg 1/2\n");
+ return -1;
+ }
+ printf("%f, %f\n", t, dt);
+ // print the full command once
+
+ // now print enough newline (= command repetitions)
+ // TODO: be faster than lame-duck-speed
+ n = (long) (t / dt); // on step already passed because of above printf statement
+ while (n>1) {
+ printf("\n");
+ n--;
+ }
+}
diff --git a/code/glue/sim-wrapper b/code/glue/sim-wrapper
new file mode 100755
index 0000000..9825409
--- /dev/null
+++ b/code/glue/sim-wrapper
@@ -0,0 +1,24 @@
+#!/bin/sh
+
+if [ ! $# -eq 10 ]; then
+ echo 'wrong parameter count (see ./sim-current for parameter order and add the model (current/if/...) as the first param)' >&2
+ exit 1
+fi
+
+# determine the path of the simulaton program
+SIM=`dirname $0`/../core/sim-$1
+if [ ! -x $SIM ]; then
+ echo "executable ($SIM) does not exist" >&2
+ exit 1
+fi
+
+# check if one of the input files is executable
+if [ -x $2 -o -x $3 -o -x $4 -o -x $5 -o -x $5 -o -x $6 -o -x $6 -o -x $7 -o -x $8 -o -x $9 ]; then
+ # yes -> interactive simulation
+ # create the FIFOs to communicate
+ echo Interactive spike program is not NOT IMPLEMENTED
+ exit 1
+else
+ # no -> static simulation
+ $SIM $2 $3 $4 $5 $6 $7 $8 $9 $10
+fi
diff --git a/code/matlab/Makefile b/code/matlab/Makefile
new file mode 100644
index 0000000..3546e44
--- /dev/null
+++ b/code/matlab/Makefile
@@ -0,0 +1,10 @@
+.PHONY: all symlinks clean
+
+all: symlinks
+
+symlinks:
+ ls | grep '.m$$' | sed 's/\.m$$//' | xargs -n1 ln -f -s ../glue/exec-matlab
+
+clean:
+ ls | grep '.m$$' | sed 's/\.m$$//' | xargs rm || true
+ rm *~ || true \ No newline at end of file
diff --git a/code/matlab/analye-perfomance.m b/code/matlab/analye-perfomance.m
new file mode 100644
index 0000000..75b399b
--- /dev/null
+++ b/code/matlab/analye-perfomance.m
@@ -0,0 +1,14 @@
+%% load data
+perf = load('performance.out');
+perf = perf > 1.0;
+s = length(perf);
+
+%% compute sliding avergae
+ws = 250; % window size
+perf_avg = zeros(s-ws,1);
+for i = 1:(s-ws)
+ perf_avg(i,1) = mean(perf(i:(i+ws),1));
+end
+
+%% plot
+plot(perf_avg); \ No newline at end of file
diff --git a/code/matlab/analye-stdp-freq-dep.m b/code/matlab/analye-stdp-freq-dep.m
new file mode 100644
index 0000000..85c4030
--- /dev/null
+++ b/code/matlab/analye-stdp-freq-dep.m
@@ -0,0 +1,12 @@
+
+raw = load('synapse.destilled');
+
+res = [];
+for i=1:(floor(length(raw) / 200)),
+ row = raw(((i*200)-199):((i*200)-99),2)';
+ res(i,:) = [mean(row), var(row)];
+end
+
+for i=1:length(res),
+ fprintf(2,'%d, %f, %f\n', int32(i), res(i,1), res(i,2))
+end \ No newline at end of file
diff --git a/code/matlab/analyze_weight_development.m b/code/matlab/analyze_weight_development.m
new file mode 100644
index 0000000..da34cd4
--- /dev/null
+++ b/code/matlab/analyze_weight_development.m
@@ -0,0 +1,30 @@
+%% -- load the synapse file
+
+syn_raw = load('synapse.out');
+
+%% -- generate mean weights (and some config stuff)
+
+num_syn = 96227;
+num_steps = length(syn_raw) / num_syn;
+types = [ 1*ones(100,1); 2*ones(600,1); 3*ones(100,1); 4*ones(200,1) ];
+
+syn_mean = zeros(num_steps, 16);
+syn_count = zeros(num_steps, 16);
+for i = 1:num_steps
+ ba = (i-1)*num_syn+1;
+ for j = ba:(ba+num_syn-1)
+ l = syn_raw(j,:);
+ src = l(1,2) + 1;
+ dst = l(1,3) + 1;
+ w = l(1,5);
+
+ syn_mean(i, 4*types(src)+types(dst)-4) = syn_mean(i, 4*types(src)+types(dst)-4) + w;
+ syn_count(i, 4*types(src)+types(dst)-4) = syn_count(i, 4*types(src)+types(dst)-4) + 1;
+ end
+end
+syn_mean = syn_mean ./ syn_count;
+
+%% plot it
+
+plot(syn_mean);
+legend('II', 'IB', 'IO', 'IX', 'BI', 'BB', 'BO', 'BX', 'OI', 'OB', 'OO', 'OX', 'XI', 'XB', 'XO', 'XX'); \ No newline at end of file
diff --git a/code/matlab/plot_stdp_param_scout.m b/code/matlab/plot_stdp_param_scout.m
new file mode 100644
index 0000000..2342fd7
--- /dev/null
+++ b/code/matlab/plot_stdp_param_scout.m
@@ -0,0 +1,51 @@
+%% load the raw data from synapses output file
+raw = load('synapse.out');
+
+
+%% get mean and variance from the 1000 neurons
+ns = 999;
+l = floor(length(raw)/ns);
+raw2 = zeros(l,1);
+raw2_var = zeros(l,1);
+for i=1:l,
+ % hint: adapted to read only the first 100 out of 1000 neurons
+ raw2(i,1) = mean(raw((i*ns-ns+1):(i*ns-900), 4), 1);
+ raw2_var(i,1) = var(raw((i*ns-ns+1):(i*ns-900), 4), 1);
+end
+
+%% erase duplicate lines (the silence period of simulation)
+l2 = floor(l/2) - 1;
+raw3 = zeros(l2,1);
+raw3_var = zeros(l2,1);
+for i=0:l2,
+ raw3(i+1,1) = raw2((2*i+1),1);
+ raw3_var(i+1,1) = raw2_var((2*i+1),1);
+end
+
+%% display graphs
+nx = 4;
+nt = 100;
+res = zeros(nx,nx);
+k =1;
+for i=0:(nx-1),
+ for j=0:(nx-1),
+ cur = raw3( ((i*nx + j)*nt + 1):((i*nx + j)*nt + nt - 1), 1);
+ cur_var = raw3_var( ((i*nx + j)*nt + 1):((i*nx + j)*nt + nt - 1), 1);
+ tr = (cur(nt - 1) > 0) + 2 * (sum(cur < 0) > 0);
+ res(i+1,j+1) = tr;
+
+ %if ((mod(i,5) == 0) && (mod(j,5) == 0)),
+ %if (tr == 2),
+ %if (i<=10 && j>10),
+ subplot(nx, nx, k);
+ k = k + 1;
+ %if (tr == 3),
+ plot( [ 1:0.5:(nt/2) ]', [ cur, zeros(nt-1, 1), cur-cur_var, cur+cur_var ]);
+ title( sprintf('i=%d, j=%d', i, j));
+ %end;
+ %axis([-1 (nt+1) -1 1])
+ set(gca,'xtick',0:10:(nt/2))
+ %set(gca,'ytick',[])
+ %end;
+ end;
+end; \ No newline at end of file
diff --git a/code/matlab/random_spikes.m b/code/matlab/random_spikes.m
new file mode 100644
index 0000000..89baf02
--- /dev/null
+++ b/code/matlab/random_spikes.m
@@ -0,0 +1,15 @@
+% fill the extenrally
+%num_neurons = 2; % wich should receive a spike
+%spike_freq = 1; % local per neuron per second
+%duration = 2; % [s]
+
+current = 100; % this should drive my neurons crazy
+
+format long
+
+num = num_neurons * spike_freq * duration;
+res = [ sort(rand(num, 1) .* duration), floor(rand(num, 1) .* num_neurons), ones(num, 1) * current ];
+
+for i=1:length(res),
+ fprintf(2,'%f,%d,%f\n', res(i,1), int32(floor(res(i,2))), res(i,3))
+end
diff --git a/code/matlab/random_topo.m b/code/matlab/random_topo.m
new file mode 100644
index 0000000..73d94ac
--- /dev/null
+++ b/code/matlab/random_topo.m
@@ -0,0 +1,60 @@
+%% config
+% the values below should be set externally
+% num_neurons = 10;
+% connection_density = 0.5;
+% inhibitory_fraction = 0.2;
+
+min_weight = 0;
+max_weight = 0.004;
+fie = - 1;
+fei = 15 / 20;
+fii = 0;
+
+min_delay = 0.001; % >= 0.1 ms
+max_delay = 0.005; % < 2 ms
+
+
+%% init weights
+
+weights = min_weight + (max_weight - min_weight) * rand(num_neurons);
+weights = weights .* (rand(num_neurons) < connection_density);
+
+l = num_neurons - inhibitory_fraction * num_neurons + 1;
+h = num_neurons;
+
+% make incoming and outgoing weight of each inhibitory neuron proportional
+weights(1:(l-1), l:h) = weights(l:h, 1:(l-1))';
+
+% scaling weights for inhibitory connections
+weights(l:h, 1:(l-1)) = fie * weights(l:h, 1:(l-1));
+weights(l:h, l:h) = fii * weights(l:h, l:h);
+weights(1:(l-1), l:h) = fei * weights(1:(l-1), l:h);
+
+
+%% init delays
+
+delay = min_delay + (max_delay - min_delay) * rand(num_neurons);
+
+% make inhibitory delays shorter
+delay(1:(l-1), l:h) = 0.1 * delay(1:(l-1), l:h);
+delay(l:h, 1:(l-1)) = 0.1 * delay(l:h, 1:(l-1));
+
+
+%% print resulting topology config
+
+[ a, b ] = find(weights ~= 0);
+index = find(weights ~= 0);
+
+format long
+
+res = [ a, b, delay(index), weights(index) ];
+
+for i=1:length(res),
+ constness = (max(res(i,1:2)) / num_neurons >= 1 - inhibitory_fraction) % is src or dst and inhibitory neuron?
+ if (constness)
+ fprintf(2,'%d,%d,1,%f,%f\n', int32(res(i,1))-1, int32(res(i,2))-1, res(i,3), res(i,4));
+ else
+ fprintf(2,'%d,%d,0,%f,%f\n', int32(res(i,1))-1, int32(res(i,2))-1, res(i,3), res(i,4));
+ end
+end
+
diff --git a/code/trainer/Makefile b/code/trainer/Makefile
new file mode 100644
index 0000000..1fc48d1
--- /dev/null
+++ b/code/trainer/Makefile
@@ -0,0 +1,29 @@
+# HINT: the paradigm is not to create object files to allow all otpimizations
+# take place even though the code is scattered across many files
+
+CC=g++
+CCFLAGS=-O3 -ggdb
+LDFLAGS=-lpthread -lm
+INCLUDE=-I/home/huwald/src/boost -I../core
+
+BASE_SRC_FILES=../core/model_switch.h ../core/fileutils.h ../core/fileutils.cpp
+
+.PHONY: all clean wordcount
+
+all: reinforce_synapse check_stdp_freq-dep
+
+clean:
+ rm -f *~ trainer massif.*.* reinforce_synapse-* check_stdp_freq-dep-*
+
+.PHONY: reinforce_synapse
+reinforce_synapse: reinforce_synapse-dalif
+reinforce_synapse-%: reinforce_synapse.cpp reinforce_synapse.h ../core/models/%.h $(BASE_SRC_FILES)
+ $(CC) -o $@ $(CCFLAGS) reinforce_synapse.cpp $(INCLUDE) $(LDFLAGS) -DMODEL_`echo $* | tr '[:lower:]' '[:upper:]'`
+
+.PHONY: check_stdp_freq-dep
+check_stdp_freq-dep: check_stdp_freq-dep-dalif
+check_stdp_freq-dep-%: check_stdp_freq-dep.cpp check_stdp_freq-dep.h ../core/models/%.h $(BASE_SRC_FILES)
+ $(CC) -o $@ $(CCFLAGS) check_stdp_freq-dep.cpp $(INCLUDE) $(LDFLAGS) -DMODEL_`echo $* | tr '[:lower:]' '[:upper:]'`
+
+wordcount:
+ wc *h *cpp Makefile
diff --git a/code/trainer/check_stdp_freq-dep.cpp b/code/trainer/check_stdp_freq-dep.cpp
new file mode 100644
index 0000000..f606d81
--- /dev/null
+++ b/code/trainer/check_stdp_freq-dep.cpp
@@ -0,0 +1,160 @@
+#include <stdlib.h>
+#include "fileutils.h"
+#include "math.h"
+#include "unistd.h"
+
+#include "check_stdp_freq-dep.h"
+#include "fileutils.cpp"
+#include "model_switch.h"
+
+using namespace std;
+
+int main(int argc, char **argv) {
+ // check cmd line sanity
+ if (argc != 5) {
+ fprintf(stderr, "Wrong argument count\n\n"
+ "Call format:\n"
+ "%s\n\t"
+ "performance out\n\t"
+ "trace cmd out\n\t"
+ "global out\n\t"
+ "spike out\n\t"
+ "\n"
+ "Special names allowed:\n\t- (standart input)\n\t0 (/dev/null)\n", argv[0]);
+ return -1;
+ }
+
+ Trainer *t = new Trainer(argc, argv);
+ t->run();
+
+ pthread_join(t->thread_write, NULL);
+}
+
+Trainer::Trainer(int argc, char** argv) {
+ // init vars
+ currentEpoch = 0;
+ epochDuration = 10.0; // [s]
+ neurons = 1000; // number of neurons to send noise to
+ voltage = 0.1; // [V]
+ md = 1.2; // 10.0;
+ mss = 1.1; //1.2;
+ fs = 100; // number of frequencies to try
+ frd = 1.0; // relative difference between to frequencies (f_i+1 = frd * f_i)
+ fad = 0.5; // absolute difference between to frequencies (f_i+1 = fad + f_i)
+
+ // open all file descriptors in an order complementary to the simulators one
+ // to avoid deadlocks
+ fd_spike_out = fd_magic(argv[4], true);
+ fd_global_out = fd_magic(argv[3], true);
+ fd_performance_out = fd_magic(argv[1], true);
+ fd_trace_out = fd_magic(argv[2], true);
+
+ // create read and write threads
+ pthread_create(&thread_write, NULL, (void* (*)(void*)) &write_spikes, this);
+}
+
+void Trainer::run() {
+ char *str_trace = "%f; synapse\n";
+
+ // init global sim variables
+ MS_Global msg;
+ msg_init(msg);
+ msg.dopamin_level = 0.0;
+
+ double ta = 0.009821, //0.0088541,
+ la = 0.140249; // 0.126445;
+
+ /*
+ // loop over both vars to examine
+ for (msg.stdp_tau_plus = msg.stdp_tau_minus / md;
+ msg.stdp_tau_plus <= msg.stdp_tau_minus * md;
+ msg.stdp_tau_plus *= mss) {
+ for (msg.stdp_lambda_plus = msg.stdp_lambda_minus / md;
+ msg.stdp_lambda_plus <= msg.stdp_lambda_minus * md;
+ msg.stdp_lambda_plus *= mss) {*/
+ // loop over both vars to examine
+ for (msg.stdp_tau_plus = ta / md;
+ msg.stdp_tau_plus <= ta * md;
+ msg.stdp_tau_plus *= mss) {
+ for (msg.stdp_lambda_plus = la / md;
+ msg.stdp_lambda_plus <= la * md;
+ msg.stdp_lambda_plus *= mss) {
+
+ // print the parameters to the performance output
+ msg_print(msg, fd_performance_out);
+ fprintf(fd_performance_out, "\n");
+
+ // print the global params
+ fprintf(fd_global_out, "%f, ", currentEpoch * epochDuration);
+ msg_print(msg, fd_global_out);
+ fprintf(fd_global_out, "\n");
+
+ // let the simulation proceed
+ fprintf(fd_trace_out, str_trace, epochDuration);
+ currentEpoch++;
+
+ // repeat this 2*n-1 times (n=number of different frequency trials)
+ for (int i=0; i < 2*fs-1; i++) {
+ fprintf(fd_trace_out, "\n");
+ currentEpoch++;
+ }
+ }
+ }
+
+ fclose(fd_trace_out);
+ fclose(fd_global_out);
+}
+
+// ---- send indepenent poisson noise w/ increasing fequency----
+void *write_spikes(Trainer *t) {
+ // calculate how often we have to try all frequencies (=outer loop)
+ // WARN: ignore minor numerical instabilities
+ int max = (int) floor(2.0 * log(t->md) / log(t->mss) ) + 1;
+ max *= max; // there are two nested loops of the same size
+
+ double time = 0.0; // global time (that one send to the simulator)
+
+ // for each paramter config (set in the main routine)
+ for (int i=0; i<max; i++) {
+
+ double freq = 1.0;
+
+ // examine a set of frequencies
+ for (int j=0; j < t->fs; j++) {
+ // send out the spikes
+ double localtime = 0.0;
+ double nextRefSpike = 0.0;
+ double refFreq = 10.0; // [Hz]
+ int dst = -1;
+ while (localtime < t->epochDuration) {
+ // starting with the second call ...
+ if (dst != -1) {
+ // check if we have to send a spike to the ref neuron
+ if (localtime > nextRefSpike) {
+ fprintf(t->fd_spike_out, "%f, %d, %f\n", time + nextRefSpike, 0, t->voltage);
+ nextRefSpike += 1.0 / refFreq;
+ }
+
+ // send spike to the simulator
+ fprintf(t->fd_spike_out, "%f, %d, %f\n", time + localtime, dst, t->voltage);
+ }else{
+ }
+
+ localtime -= log(1.0 - drand48()) / (freq * t->neurons); // possion distributed spike timing
+ dst = 1 + rand() % (t->neurons - 1); // random neuron (except reference neuron 0)
+ }
+
+ // increase time (twice because of the silence period after each noise period)
+ time = (i * t->fs + j) * 2.0 * t->epochDuration;
+
+ // increase frequency
+ freq *= t->frd;
+ freq += t->fad;
+ }
+ }
+
+ // close fd because fscanf sucks
+ fclose(t->fd_spike_out);
+
+ return NULL;
+}
diff --git a/code/trainer/check_stdp_freq-dep.h b/code/trainer/check_stdp_freq-dep.h
new file mode 100644
index 0000000..cf429b6
--- /dev/null
+++ b/code/trainer/check_stdp_freq-dep.h
@@ -0,0 +1,46 @@
+#ifndef TRAINER_H
+#define TRAINER_H
+
+#include <stdio.h>
+#include <pthread.h>
+#include <map>
+#include <queue>
+#include "boost/tuple/tuple.hpp"
+
+
+using namespace std;
+
+class Trainer {
+ public:
+ FILE *fd_spike_out,
+ *fd_global_out,
+ *fd_trace_out,
+ *fd_performance_out;
+
+ // init stuff
+ Trainer(int argc, char** argv);
+
+ // main routine
+ void run();
+
+ // state vars
+ long currentEpoch;
+
+ // thread related
+ pthread_t thread_write;
+
+ // configuration
+ double md; // multiplicative difference (>1)
+ double mss; // multiplicative step size (>1)
+ double fs, frd, fad; // number of frequency steps and relative step size
+ double epochDuration;
+ double voltage; // per outgoing (random) spike
+ long neurons;
+};
+
+// seperate thread to read all spikes neccessary because reading and
+// writing to these descriptors could block and thus cause a deadlock
+void *read_spikes(Trainer *t);
+void *write_spikes(Trainer *t);
+
+#endif // TRAINER_H
diff --git a/code/trainer/mem1.cpp b/code/trainer/mem1.cpp
new file mode 100644
index 0000000..3b522b4
--- /dev/null
+++ b/code/trainer/mem1.cpp
@@ -0,0 +1,412 @@
+#include <stdlib.h>
+#include "fileutils.h"
+#include "math.h"
+
+#include "reinforce_synapse.h"
+#include "fileutils.cpp"
+#include "model_switch.h"
+
+using namespace std;
+
+int main(int argc, char **argv) {
+ // check cmd line sanity
+ if (argc != 7) {
+ fprintf(stderr, "Wrong argument count\n\n"
+ "Call format:\n"
+ "%s\n\t"
+ "performance out\n\t"
+ "trace cmd out\n\t"
+ "global out\n\t"
+ "global in\n\t"
+ "spike out\n\t"
+ "spike in\n\t"
+ "\n"
+ "Special names allowed:\n\t- (standart input)\n\t0 (/dev/null)\n", argv[0]);
+ return -1;
+ }
+
+ Trainer *t = new Trainer(argc, argv);
+ t->run();
+}
+
+//===== Initialisation =====================================
+
+Trainer::Trainer(int argc, char** argv) {
+ initConfig();
+ initState();
+ initGroups(); // determine input and output neurons
+
+ initFiles();
+ initThreads();
+}
+
+void Trainer::initConfig() {
+ neurons = 1000;
+ neuronsPerSymbol = 200;
+ noiseFreq = 10.0; // [Hz]
+ noiseVoltage = 0.03; // [V]
+ reward = 0.1;
+
+ epochDuration = 1.0; // [s]
+ numTrials = 1000;
+ numSymbols = 2;
+ //readoutDelay = 1;
+ refractoryPeriods = 3;
+}
+
+void Trainer::initState() {
+ dopamin_level = 0.0;
+ currentTrial = 0;
+ state = 0;
+
+ msg_init(msg);
+ msg.dopamin_level = dopamin_level;
+
+ groupFreq.resize(numSymbols);
+ for (int i=0; i<numSymbols; i++) {
+ groupFreq[i] = 0;
+ }
+}
+
+void Trainer::initGroups() {
+ ioNeurons[i] = new set<int>();
+ for (int j=0; j<neuronsPerSymbol;) {
+ int n = rand() % numNeurons;
+ if (!isNeurons[i]->count(n)) {
+ ioNeurons[i]->insert(n);
+ j++;
+ }
+ }
+}
+
+void Trainer::initFiles() {
+ // open all file descriptors in an order complementary to the simulators one
+ // to avoid deadlocks
+ fd_spike_in = fd_magic(argv[6], false);
+ fd_global_in = fd_magic(argv[4], false);
+ fd_spike_out = fd_magic(argv[5], true);
+ fd_global_out = fd_magic(argv[3], true);
+ fd_performance_out = fd_magic(argv[1], true);
+ fd_trace_out = fd_magic(argv[2], true);
+}
+
+void Trainer::initThreads() {
+ // init locks
+ pthread_mutex_init(&incomingSpikeLock, NULL);
+ pthread_mutex_init(&writerLock, NULL);
+
+ // create read and write threads
+ pthread_create(&thread_read, NULL, (void* (*)(void*)) &read_spikes, this);
+ pthread_create(&thread_write, NULL, (void* (*)(void*)) &write_spikes, this);
+}
+
+//===== Core trainer ====================================
+
+void Trainer::pushGlobal(double time) {
+ fprintf(fd_global_out, "%f, ", time);
+ msg_print(msg, fd_global_out);
+ fprintf(fd_global_out, "\n");
+ fflush(fd_global_out);
+}
+
+// hint time is delta time!
+void Trainer::pushTrace(double time) {
+ const char *str_trace = "%f; spikes (0; 1); global; neuron (0; 1); synapse (0; 1)\n";
+ fprintf(fd_trace_out, str_trace, time);
+ fflush(fd_trace_out);
+}
+
+bool Trainer::readGlobal() {
+ double _foo_dbl;
+ char str_raw[128],
+ str_msg[128];
+ str_raw[0] = 0;
+
+ // read a single line
+ if (fgets((char*) str_raw, 128, fd_global_in) == NULL) {
+ fprintf(stderr, "ERROR: global status file descriptor from simulator closed unexpectedly\n");
+ return false;
+ }
+
+ // parse it
+ if ((sscanf((char*) str_raw, "%lf, %[^\n]\n", &_foo_dbl, (char*) str_msg) != 2)
+ || (!msg_parse(msg, (char*) str_msg))) {
+ fprintf(stderr, "ERROR: reading global status from simulator failed\n\t\"%s\"\n", (char*) str_raw);
+ return false;
+ }
+
+ return true;
+}
+
+void binIncomingSpikes() {
+ // reset bins
+ for (int i=0; i<groupFreq.size(); i++)
+ groupFreq[i] = 0;
+
+ // lock spike queue
+ pthread_yield(); // give the spike reading thread chance to finish ... this is not more than ugly semifix wrong par!
+ pthread_mutex_lock(&incomingSpikeLock);
+
+ // read all spikes in the correct time window
+ while ((!incomingSpikes.empty()) && (incomingSpikes.front().get<0>() <= currentEpoch * epochDuration)) {
+ // drop event out of queue
+ SpikeEvent se = incomingSpikes.front();
+ double time = se.get<0>();
+ int neuron = se.get<1>();
+ incomingSpikes.pop();
+
+ // check if it belongs to the previous bin (and ignore it if this is the case)
+ if (time < (currentEpoch - 1) * epochDuration) {
+ fprintf(stderr, "WARN: spike reading thread to slow; unprocessed spike of the past discovered\n%f\t%f\t%d\t%f\n",
+ time, (double) (currentEpoch - 1) * epochDuration, currentEpoch, epochDuration);
+ continue;
+ }
+
+ // check membership in each group and increase group frequency
+ for (int i=0; i < ioNeurons.size(); i++)
+ if (ioNeuros[i]->count(neuron))
+ groupFreq[i]++;
+ }
+
+ pthread_mutex_unlock(&incomingSpikeLock);
+}
+
+void Trainer::addBaselineSpikes() {
+}
+
+void Trainer::addSymbolSpikes() {
+
+}
+
+
+double Trainer::calcSignalStrength() {
+ if (symbolHist.empty()) {
+ fprintf(stderr, "Writer thread is too slow; missed the current symbol\n");
+ exit(-1);
+ }
+
+ int fs, fn = 0; // freq signal, freq noise
+ for (int i=0; i<numSymbols; i++) {
+ if (i == symbolHist.front()) {
+ fs = groupFreq[i];
+ }else{
+ fm = fmax(fm, groupFreq[i]);
+ }
+ }
+
+ if (fn == 0) {
+ return fs * INFINITY;
+ }else{
+ return ((double) fs) / fn;
+ }
+}
+
+void Trainer::run() {
+ // rough description of this function
+ // . start an epoch
+ // . wait for it's end
+ // . process incomig spikes (binning)
+ // . select if a reward takes place
+ // . print reward value
+ // . send out the reward signal
+
+ // send out the full trace command once (later it will be repeated by sending newline)
+ pushTrace(epochDuration);
+
+ // send the first two global states (at t=0 and t=1.5 [bintime] to allow the simulation to
+ // be initialized (before the causality of the loop below is met)
+ pushGlobal(0.0);
+ msg_process(msg, 1.5 * epochDuration);
+ dopamin_level = msg.dopamin_level;
+ pushGlobal(1.5 * epochDuration);
+
+ // loop until the experiment is done
+ for (; currentEpoch * epochDuration < entireDuration; currentEpoch++) {
+
+ // send a new trace command (do it as early as possible although it is
+ // only executed after the new global is send out at the bottom of this loop)
+ if ((currentEpoch + 2) * epochDuration < entireDuration) {
+ // repeat the previous trace command
+ fprintf(fd_trace_out, "\n");
+ fflush(fd_trace_out);
+ }else{
+ pushTrace(entireDuration - (currentEpoch + 1) * epochDuration);
+ }
+
+ // send new spikes
+ pthread_mutex_lock(&outgoingSpikeLock);
+ addBaselineSpikes();
+ if (state == 0) addSymbolSpikes();
+ pthread_cond_signal(&outgoingSpikeCond);
+ pthread_mutex_unlock(&outgoingSpikeLock);
+
+ // wait for the end of the epoch (by reading the global state resulting from it)
+ if (!readGlobal())
+ break;
+
+ // process incomig spikes (binning) of the previous epoch
+ if (currentEpoch > 0)
+ binIncomingSpikes();
+
+ // proceed the global state to keep it in sync with the simulator's global state
+ // the local dopamin level is kept seperately and aged only one epochDuration to
+ // avoid oscillation effects in dopamin level
+ msg_process(msg, 1.5 * epochDuration);
+ dopamin_level *= exp( - epochDuration / msg.dopamin_tau );
+
+ // do various actions depeding on state (thus lock mutex of the writer thread)
+
+
+ switch (state) {
+ case 0: // a signal is sent
+ state++;
+
+ case 1: // we are waiting for the signal to be reproduce
+ // get fraction of the current symbol's freq compared to the strongest wrong symbol
+ double ss = calcSignalStrength();
+
+ // check if the reward condition is met
+ if (ss > 1) {
+ dopmain_level += reward;
+ }else{
+ state++; // lost signal -> next state (and finally a new trial)
+ currentSymbol = rand() % numSymbols; // determine new symbol to display
+ }
+
+ break;
+
+ default: // the signal has been lost (in the last round); refractory time
+ ++state %= refractoryPeriods;
+ }
+
+ /*if ((currentEpoch > 1) && ((*neuronFreq[0])[0] > 0) && ((*neuronFreq[1])[1] > 0)) {
+ dopamin_level += da_single_reward;
+ fprintf(fd_performance_out, "+");
+ }else{
+ fprintf(fd_performance_out, "-");
+ }*/
+
+ // performance and "debug" output
+ if (currentEpoch > 1) {
+ //fprintf(fd_performance_out, "\n");
+ fprintf(fd_performance_out, "\t%f\t%d\t%d\n", dopamin_level, (*neuronFreq[0])[0], (*neuronFreq[1])[1]);
+ }else{
+ // fake output as acutal data is not available, yet
+ fprintf(fd_performance_out, "\t%f\t%d\t%d\n", dopamin_level, (int) 0, (int) 0);
+ }
+
+ // set the new DA level
+ msg.dopamin_level = dopamin_level;
+
+ // print new global state
+ // (do this even if there has been no evaluation of the performance yet,
+ // because it is neccessary for the simulator to proceed)
+ pushGlobal(((double) currentEpoch + 2.5) * epochDuration);
+ }
+
+ fclose(fd_trace_out);
+
+ // terminate child threads
+ pthread_cancel(thread_read);
+ pthread_cancel(thread_write);
+}
+
+void *read_spikes(Trainer *t) {
+ double lastSpike = -INFINITY; // used to check if the spikes are coming in order
+
+ // read spikes until eternity
+ while (!feof(t->fd_spike_in)) {
+ // read one line from stdin (blocking)
+ char buf[128];
+ if (fgets((char*) buf, 128, t->fd_spike_in) == NULL) continue; // this should stop the loop because of EOF
+
+ // parse the input
+ double time, current;
+ int neuron;
+ switch (sscanf((char*) buf, "%lf, %d, %lf\n", &time, &neuron, &current)) {
+ case 3:
+ // format is ok, continue
+ break;
+ default:
+ // format is wrong, stop
+ fprintf(stderr, "ERROR: malformatted incoming spike:\n\t%s\n", &buf);
+ return NULL;
+ }
+
+ if (lastSpike > time) {
+ fprintf(stderr, "WARN: out of order spike detected (coming from simulator)\n\t%f\t%d\n", time, neuron);
+ continue;
+ }
+
+ lastSpike = time;
+
+ // add the spike to the queue of spikes
+ pthread_mutex_lock(&(t->incomingSpikeLock));
+ t->incomingSpikes.push(boost::make_tuple(time, neuron, current));
+ pthread_mutex_unlock(&(t->incomingSpikeLock));
+ }
+
+ // we shouldn't reach this point in a non-error case
+ fprintf(stderr, "ERROR: EOF in incoming spike stream\n");
+ // TODO: kill entire programm
+ return NULL;
+}
+
+void *write_spikes(Trainer *t) {
+ // at the moment: generate noise until the file descriptor blocks
+ double time = 0.0;
+
+ // PAR HINT:
+ // loop until exactly one spike after the entire duration is send out
+ // this will block on full buffer on the file descriptor and thus keep
+ // the thread busy early enough
+
+
+ /* // ---- send 100% dependent spike train ---
+ time = 0.005;
+ while (time <= t->entireDuration) {
+ fprintf(t->fd_spike_out, "%f, %d, %f\n", time, 0, 1.0);
+ time += 0.012;
+ fprintf(t->fd_spike_out, "%f, %d, %f\n", time, 1, 1.0);
+ time += 1.0;
+ }*/
+
+
+ /* // ---- send indepenent poisson noise ----
+ while (time <= t->entireDuration) {
+ // calc timing, intensity and destination of the spike
+ // HINT:
+ // * log(...) is negative
+ // * drand48() returns something in [0,1), to avoid log(0) we transform it to (0,1]
+ time -= log(1.0 - drand48()) / (t->freq * t->neurons);
+ int dst = rand() % t->neurons;
+ double current = t->voltage;
+
+ // send it to the simulator
+ fprintf(t->fd_spike_out, "%f, %d, %f\n", time, dst, current);
+ }*/
+
+ // ---- send indepenent poisson noise w7 increasing fequency----
+ double blafoo = 0;
+ t->freq = 1.0;
+ while (time <= t->entireDuration) {
+ if (time - blafoo > 100.0) {
+ blafoo += 200.0;
+ t->freq += 1.0;
+ time += 100.0; // time jump to let ET recover to zero
+ }
+ // calc timing, intensity and destination of the spike
+ // HINT:
+ // * log(...) is negative
+ // * drand48() returns something in [0,1), to avoid log(0) we transform it to (0,1]
+ time -= log(1.0 - drand48()) / (t->freq * t->neurons);
+ int dst = rand() % t->neurons;
+ double current = t->voltage;
+
+ // send it to the simulator
+ fprintf(t->fd_spike_out, "%f, %d, %f\n", time, dst, current);
+ }
+
+ // close fd because fscanf sucks
+ fclose(t->fd_spike_out);
+}
diff --git a/code/trainer/mem1.h b/code/trainer/mem1.h
new file mode 100644
index 0000000..31fc2b0
--- /dev/null
+++ b/code/trainer/mem1.h
@@ -0,0 +1,72 @@
+#ifndef TRAINER_H
+#define TRAINER_H
+
+#include <stdio.h>
+#include <pthread.h>
+#include <map>
+#include <queue>
+#include "boost/tuple/tuple.hpp"
+
+
+using namespace std;
+
+class Trainer {
+ public:
+ FILE *fd_spike_in,
+ *fd_spike_out,
+ *fd_global_out,
+ *fd_global_in,
+ *fd_trace_out,
+ *fd_performance_out;
+
+ // init stuff
+ Trainer(int argc, char** argv);
+ void initConfig();
+ void initState();
+ void initGroups();
+ void initFiles();
+ void initThreads();
+
+ // main routine
+ void run();
+
+ // state vars
+ long currentTrial;
+ vector<int> groupFreq; // spikes fired during epoch for each symbol (=group of neurons)
+ vector< set<int>* > ioNeurons; // a set of neurons for each symbol to which the symbol is written and from wich it is read later
+ //queue< int > symbolHist; // stores the symbols displayed; written by thread_write, read by main thread
+ int currentSymbol;
+ double dopamin_level;
+ int state;
+ MS_Global msg;
+
+ // synchronisation vars
+ typedef boost::tuple<double, int, double> SpikeEvent; // <what time, wich neuron, current>
+ queue<SpikeEvent> incomingSpikes;
+ // TODO: outgoingSpikes;
+ pthread_mutex_t incomingSpikeLock, writerLock;
+ // TODO: , outgoingSpikeLock; (including a condition for the writer to wakeup upon if a previously empyt queue has been filled)
+ pthread_t thread_read, thread_write;
+
+ // configuration
+ // network
+ long neurons; // total number of neurons
+ long neuronsPerSymbol;
+ double noiseFreq; // of poisson noise (per neuron)
+ double noiseVoltage; // per noise spike
+ double reward; // per succesful trial
+
+ // learning task
+ double epochDuration; // a trial consists of several epochs
+ long numTrials;
+ int numSymbols; // number of different things to remember
+ //int readoutDelay; // number of epochs between symbol-write and symbol-read-epoch
+ int refractoryPeriods; // how many epochs to wait after a trial finished until we start with a new trial
+};
+
+// seperate thread to read all spikes neccessary because reading and
+// writing to these descriptors could block and thus cause a deadlock
+void *read_spikes(Trainer *t);
+void *write_spikes(Trainer *t);
+
+#endif // TRAINER_H
diff --git a/code/trainer/reinforce_synapse.cpp b/code/trainer/reinforce_synapse.cpp
new file mode 100644
index 0000000..bf6fc7f
--- /dev/null
+++ b/code/trainer/reinforce_synapse.cpp
@@ -0,0 +1,302 @@
+#include <stdlib.h>
+#include "fileutils.h"
+#include "math.h"
+
+#include "reinforce_synapse.h"
+#include "fileutils.cpp"
+#include "model_switch.h"
+
+using namespace std;
+
+int main(int argc, char **argv) {
+ // check cmd line sanity
+ if (argc != 7) {
+ fprintf(stderr, "Wrong argument count\n\n"
+ "Call format:\n"
+ "%s\n\t"
+ "performance out\n\t"
+ "trace cmd out\n\t"
+ "global out\n\t"
+ "global in\n\t"
+ "spike out\n\t"
+ "spike in\n\t"
+ "\n"
+ "Special names allowed:\n\t- (standart input)\n\t0 (/dev/null)\n", argv[0]);
+ return -1;
+ }
+
+ Trainer *t = new Trainer(argc, argv);
+ t->run();
+ // TODO: finalize
+}
+
+Trainer::Trainer(int argc, char** argv) {
+ // init vars
+ currentEpoch = 0;
+ dopamin_level = 0.0;
+
+ epochDuration = 0.01; // [s]
+ //epochDuration = 1.0; // [s]
+ entireDuration = 20000.0; // [s]
+ neurons = 2; // number of neurons to send noise to
+ freq = 1.0; // [Hz] per Neuron
+ voltage = 0.1; // [V]
+ da_single_reward = 0.01;
+
+ neuronFreq[0] = (map<int, int>*) NULL;
+ neuronFreq[1] = (map<int, int>*) NULL;
+
+ // open all file descriptors in an order complementary to the simulators one
+ // to avoid deadlocks
+ fd_spike_in = fd_magic(argv[6], false);
+ fd_global_in = fd_magic(argv[4], false);
+ fd_spike_out = fd_magic(argv[5], true);
+ fd_global_out = fd_magic(argv[3], true);
+ fd_performance_out = fd_magic(argv[1], true);
+ fd_trace_out = fd_magic(argv[2], true);
+
+ // init locks
+ pthread_mutex_init(&incomingSpikeLock, NULL);
+
+ // create read and write threads
+ pthread_create(&thread_read, NULL, (void* (*)(void*)) &read_spikes, this);
+ pthread_create(&thread_write, NULL, (void* (*)(void*)) &write_spikes, this);
+}
+
+void Trainer::run() {
+ // start an epoch
+ // wait for it's end
+ // process incomig spikes (binning)
+ // select if a reward takes place
+ // print reward value (TODO: into a seperate, externally given file descriptor)
+ // send out the reward signal
+
+ char *str_trace = "%f; spikes (0; 1); global; neuron (0; 1); synapse (0; 1)\n";
+
+ // send out the full trace commande once (later it will be repeated by sending newline)
+ fprintf(fd_trace_out, str_trace, epochDuration);
+ fflush(fd_trace_out);
+
+ // send the first two global states (at t=0 and t=1.5 [bintime] to allow the simulation to
+ // be initialized (before the causality of the loop below is met)
+ MS_Global msg;
+ msg_init(msg);
+ msg.dopamin_level = dopamin_level;
+
+ // set the tau-levels like in Izhi's network
+ msg.stdp_tau_minus = 1.5 * msg.stdp_tau_plus;
+ msg.stdp_lambda_plus = msg.stdp_lambda_minus;
+
+ fprintf(fd_global_out, "0.0, ");
+ msg_print(msg, fd_global_out);
+ fprintf(fd_global_out, "\n");
+
+ msg_process(msg, 1.5 * epochDuration);
+ dopamin_level = msg.dopamin_level;
+
+
+ fprintf(fd_global_out, "%f, ", 1.5 * epochDuration);
+ msg_print(msg, fd_global_out);
+ fprintf(fd_global_out, "\n");
+
+ fflush(fd_global_out);
+
+ // loop until the experiment is done
+ for (; currentEpoch * epochDuration < entireDuration; currentEpoch++) {
+ // send a new trace command (do it as early as possible although it is
+ // only executed after the new global is send out at the bottom of this loop)
+ if ((currentEpoch + 2) * epochDuration < entireDuration) {
+ // repeat the previous trace command
+ fprintf(fd_trace_out, "\n");
+ }else{
+ fprintf(fd_trace_out, str_trace, entireDuration - (currentEpoch + 1) * epochDuration);
+ }
+ fflush(fd_trace_out);
+
+ // wait for the end of the epoch (by reading the global state resulting from it)
+ char str_raw[128], str_msg[128]; str_raw[0] = 0;
+ double _foo_dbl;
+ if (fgets((char*) str_raw, 128, fd_global_in) == NULL) {
+ fprintf(stderr, "ERROR: global status file descriptor from simulator closed unexpectedly\n");
+ break;
+ }
+ if ((sscanf((char*) str_raw, "%lf, %[^\n]\n", &_foo_dbl, (char*) str_msg) != 2)
+ || (!msg_parse(msg, (char*) str_msg))) {
+ fprintf(stderr, "ERROR: reading global status from simulator failed\n\t\"%s\"\n", (char*) str_raw);
+ break;
+ }
+
+ // process incomig spikes (binning) of the previous epoch
+ if (currentEpoch > 0) {
+ // shift the bins
+ if (neuronFreq[0]) {
+ delete neuronFreq[0];
+ neuronFreq[0] = neuronFreq[1];
+ }else{
+ neuronFreq[0] = new map<int, int>();
+ }
+ neuronFreq[1] = new map<int, int>();
+
+ // read all spikes in the correct time window
+ pthread_mutex_lock(&incomingSpikeLock);
+ while ((!incomingSpikes.empty()) && (incomingSpikes.front().get<0>() <= currentEpoch * epochDuration)) {
+ // drop event out of queue
+ SpikeEvent se = incomingSpikes.front();
+ double time = se.get<0>();
+ int neuron = se.get<1>();
+ incomingSpikes.pop();
+
+ // check if it belongs to the previous bin (and ignore it if this is the case)
+ if (time < (currentEpoch - 1) * epochDuration) {
+ fprintf(stderr, "WARN: spike reading thread to slow; unprocessed spike of the past discovered\n%f\t%f\t%d\t%f\n", time, (double) (currentEpoch - 1) * epochDuration, currentEpoch, epochDuration);
+ continue;
+ }
+
+ // increment the frequency counter (relies on int being default constructable to value 0)
+ (*neuronFreq[1])[neuron]++;
+ }
+
+ pthread_mutex_unlock(&incomingSpikeLock);
+ }
+
+ // proceed the global state to keep it in sync with the simulator's global state
+ // the local dopamin level is kept seperately and aged only one epochDuration to
+ // avoid oscillation effects in dopamin level
+ msg_process(msg, 1.5 * epochDuration);
+ dopamin_level *= exp( - epochDuration / msg.dopamin_tau );
+
+ // select if the reward takes place
+ if ((currentEpoch > 1) && ((*neuronFreq[0])[0] > 0) && ((*neuronFreq[1])[1] > 0)) {
+ dopamin_level += da_single_reward;
+ fprintf(fd_performance_out, "+");
+ }else{
+ fprintf(fd_performance_out, "-");
+ }
+
+ if (currentEpoch > 1) {
+ //fprintf(fd_performance_out, "\n");
+ fprintf(fd_performance_out, "\t%f\t%d\t%d\n", dopamin_level, (*neuronFreq[0])[0], (*neuronFreq[1])[1]);
+ }else{
+ // fake output as acutal data i not available, yet
+ fprintf(fd_performance_out, "\t%f\t%d\t%d\n", dopamin_level, (int) 0, (int) 0);
+ }
+
+ // set the new DA level
+ msg.dopamin_level = dopamin_level;
+
+ // print new global state
+ // (do this even if there has been no evaluation of the performance yet,
+ // because it is neccessary for the simulator to proceed)
+
+ fprintf(fd_global_out, "%f, ", ((double) currentEpoch + 2.5) * epochDuration);
+ msg_print(msg, fd_global_out);
+ fprintf(fd_global_out, "\n");
+ fflush(fd_global_out);
+ }
+
+ fclose(fd_trace_out);
+
+ // terminate child threads
+ pthread_cancel(thread_read);
+ pthread_cancel(thread_write);
+}
+
+void *read_spikes(Trainer *t) {
+ double lastSpike = -INFINITY; // used to check if the spikes are coming in order
+
+ // read spikes until eternity
+ while (!feof(t->fd_spike_in)) {
+ // read one line from stdin (blocking)
+ char buf[128];
+ if (fgets((char*) buf, 128, t->fd_spike_in) == NULL) continue; // this should stop the loop because of EOF
+
+ // parse the input
+ double time, current;
+ int neuron;
+ switch (sscanf((char*) buf, "%lf, %d, %lf\n", &time, &neuron, &current)) {
+ case 3:
+ // format is ok, continue
+ break;
+ default:
+ // format is wrong, stop
+ fprintf(stderr, "ERROR: malformatted incoming spike:\n\t%s\n", &buf);
+ return NULL;
+ }
+
+ if (lastSpike > time) {
+ fprintf(stderr, "WARN: out of order spike detected (coming from simulator)\n\t%f\t%d\n", time, neuron);
+ continue;
+ }
+
+ lastSpike = time;
+
+ // add the spike to the queue of spikes
+ pthread_mutex_lock(&(t->incomingSpikeLock));
+ t->incomingSpikes.push(boost::make_tuple(time, neuron, current));
+ pthread_mutex_unlock(&(t->incomingSpikeLock));
+ }
+
+ // we shouldn't reach this point in a non-error case
+ fprintf(stderr, "ERROR: EOF in incoming spike stream\n");
+ // TODO: kill entire programm
+ return NULL;
+}
+
+void *write_spikes(Trainer *t) {
+ // at the moment: generate noise until the file descriptor blocks
+ double time = 0.0;
+
+ // PAR HINT:
+ // loop until exactly one spike after the entire duration is send out
+ // this will block on full buffer on the file descriptor and thus keep
+ // the thread busy early enough
+
+
+ /* // ---- send 100% dependent spike train ---
+ time = 0.005;
+ while (time <= t->entireDuration) {
+ fprintf(t->fd_spike_out, "%f, %d, %f\n", time, 0, 1.0);
+ time += 0.012;
+ fprintf(t->fd_spike_out, "%f, %d, %f\n", time, 1, 1.0);
+ time += 1.0;
+ }*/
+
+
+ // ---- send indepenent poisson noise ----
+ while (time <= t->entireDuration) {
+ // calc timing, intensity and destination of the spike
+ // HINT:
+ // * log(...) is negative
+ // * drand48() returns something in [0,1), to avoid log(0) we transform it to (0,1]
+ time -= log(1.0 - drand48()) / (t->freq * t->neurons);
+ int dst = rand() % t->neurons;
+ double current = t->voltage;
+
+ // send it to the simulator
+ fprintf(t->fd_spike_out, "%f, %d, %f\n", time, dst, current);
+ }
+
+ /*// ---- send indepenent poisson noise w7 increasing fequency----
+ double blafoo = 0;
+ t->freq = 1.0;
+ while (time <= t->entireDuration) {
+ if (time - blafoo > 100.0) {
+ blafoo += 200.0;
+ t->freq += 1.0;
+ time += 100.0; // time jump to let ET recover to zero
+ }
+ // calc timing, intensity and destination of the spike
+ // HINT:
+ // * log(...) is negative
+ // * drand48() returns something in [0,1), to avoid log(0) we transform it to (0,1]
+ time -= log(1.0 - drand48()) / (t->freq * t->neurons);
+ int dst = rand() % t->neurons;
+ double current = t->voltage;
+
+ // send it to the simulator
+ fprintf(t->fd_spike_out, "%f, %d, %f\n", time, dst, current);
+ }*/
+
+ // close fd because fscanf sucks
+ fclose(t->fd_spike_out);
+}
diff --git a/code/trainer/reinforce_synapse.h b/code/trainer/reinforce_synapse.h
new file mode 100644
index 0000000..46b0083
--- /dev/null
+++ b/code/trainer/reinforce_synapse.h
@@ -0,0 +1,55 @@
+#ifndef TRAINER_H
+#define TRAINER_H
+
+#include <stdio.h>
+#include <pthread.h>
+#include <map>
+#include <queue>
+#include "boost/tuple/tuple.hpp"
+
+
+using namespace std;
+
+class Trainer {
+ public:
+ FILE *fd_spike_in,
+ *fd_spike_out,
+ *fd_global_out,
+ *fd_global_in,
+ *fd_trace_out,
+ *fd_performance_out;
+
+ // init stuff
+ Trainer(int argc, char** argv);
+
+ // main routine
+ void run();
+
+ // state vars
+ long currentEpoch;
+ map<int, int> *neuronFreq[2]; // stores if a surveilled neuron fired during the current or last epoch
+ double dopamin_level;
+
+ // synchronisation vars
+ typedef boost::tuple<double, int, double> SpikeEvent; // <what time, wich neuron, current>
+ queue<SpikeEvent> incomingSpikes;
+ // TODO: outgoingSpikes;
+ pthread_mutex_t incomingSpikeLock;
+ // TODO: , outgoingSpikeLock; (including a condition for the writer to wakeup upon if a previously empyt queue has been filled)
+ pthread_t thread_read, thread_write;
+
+ // configuration
+ double epochDuration;
+ double entireDuration;
+ double freq; // of outgoing noise per neuron
+ double voltage; // per outgoing (random) spike
+ long neurons;
+ double da_single_reward;
+};
+
+// seperate thread to read all spikes neccessary because reading and
+// writing to these descriptors could block and thus cause a deadlock
+void *read_spikes(Trainer *t);
+void *write_spikes(Trainer *t);
+
+#endif // TRAINER_H
diff --git a/code/trainer/test.cpp b/code/trainer/test.cpp
new file mode 100644
index 0000000..ceb2484
--- /dev/null
+++ b/code/trainer/test.cpp
@@ -0,0 +1,13 @@
+#include <stdio.h>
+#include <math.h>
+
+int main() {
+ fprintf("aaa\n");
+ while (true);/*
+ double d=0;
+ printf("aa %lf\n", d);
+ d = 0;
+ scanf("%lf\n", &d);
+ printf("%lf\n", d);*/
+ return 0;
+}
diff --git a/model_input/Makefile b/model_input/Makefile
new file mode 100644
index 0000000..11358d6
--- /dev/null
+++ b/model_input/Makefile
@@ -0,0 +1,5 @@
+.PHONY: all
+all:
+ cd topology && make
+ cd neurons && make
+ cd spikes && make
diff --git a/model_input/global/if/default b/model_input/global/if/default
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/model_input/global/if/default
diff --git a/model_input/neurons/Makefile b/model_input/neurons/Makefile
new file mode 100644
index 0000000..8c205d2
--- /dev/null
+++ b/model_input/neurons/Makefile
@@ -0,0 +1,11 @@
+.PHONY: all
+all: if/1000_nocharge
+
+if/%_nocharge :
+ rm -f $@
+ I=0; while [ $* -gt $$I ]; do echo 0 >> $@; I=$$(( $$I + 1 )); done
+
+dalif/%_nocharge :
+ rm -f $@
+ I=0; while [ $* -gt $$I ]; do echo "-0.1" >> $@; I=$$(( $$I + 1 )); done
+
diff --git a/model_input/neurons/dalif/1000_randomcharge b/model_input/neurons/dalif/1000_randomcharge
new file mode 100644
index 0000000..5434321
--- /dev/null
+++ b/model_input/neurons/dalif/1000_randomcharge
@@ -0,0 +1,1000 @@
+-0.112337475052004
+-0.106984275736483
+-0.120614725321125
+-0.0945724712156905
+-0.117552402339211
+-0.0940349780804368
+-0.0908202467042814
+-0.0763767327390514
+-0.081482628943065
+-0.118019736786309
+-0.0905595591615099
+-0.129948740321092
+-0.0954988516322373
+-0.118117335986964
+-0.112298646727192
+-0.0924335678061119
+-0.0826642166012049
+-0.120933742600636
+-0.115080064522262
+-0.0878504804584805
+-0.110208965012337
+-0.0828085450605758
+-0.0912691185549528
+-0.0820284859413467
+-0.105977253005089
+-0.102859851550885
+-0.126839745539549
+-0.103041568525622
+-0.0765605714449842
+-0.104672883875329
+-0.0922791138389488
+-0.0790237603337049
+-0.0925488935068297
+-0.0915890786661748
+-0.127752942402851
+-0.0844286746230896
+-0.123303999994824
+-0.0937403000611721
+-0.0975396443939137
+-0.104901793173932
+-0.108408704950057
+-0.0770512464833097
+-0.0802011817984507
+-0.114421939964397
+-0.0928540954083121
+-0.116269716583547
+-0.0908240262096214
+-0.0860206190607903
+-0.117471814928627
+-0.113498748709815
+-0.121975066456992
+-0.0793840845820554
+-0.117358616591246
+-0.0823141302771159
+-0.0736201661091393
+-0.129689895588451
+-0.125716146028101
+-0.0836284236130641
+-0.106056879400539
+-0.0852789401419077
+-0.0922841709547836
+-0.122752834237476
+-0.103072121368505
+-0.120161973453213
+-0.0752215759279078
+-0.0830331745170861
+-0.122423708591514
+-0.0891065712720663
+-0.111030477694797
+-0.086407319611629
+-0.0842545683487826
+-0.127626520259351
+-0.0705894491056332
+-0.11364344422788
+-0.0737746555610239
+-0.105120908344511
+-0.0724056205426786
+-0.0726591031012176
+-0.122073342198108
+-0.0940425393548443
+-0.109002958621633
+-0.102459000042007
+-0.0716331407184101
+-0.121980874942281
+-0.123048129745462
+-0.115982687597334
+-0.108337939783352
+-0.106144400773067
+-0.112272663609342
+-0.11906377310114
+-0.113610439886668
+-0.126769262722255
+-0.108811216688223
+-0.0999944411415152
+-0.116116185622591
+-0.120567070905757
+-0.0980439516815432
+-0.115831897151264
+-0.109642309746075
+-0.0707456077006713
+-0.0730374207725772
+-0.0844108252081709
+-0.112790242745648
+-0.124603067775788
+-0.113302157832432
+-0.123547247789548
+-0.0745909844862787
+-0.106194813700981
+-0.101797112193085
+-0.115485968700187
+-0.121970964783134
+-0.114026425264163
+-0.092803244470832
+-0.114192506459893
+-0.0884419655908183
+-0.120847902731818
+-0.104883025105982
+-0.112714783823557
+-0.0995922315949143
+-0.0773316163406786
+-0.0989700174205315
+-0.0909323429194833
+-0.0892626910540305
+-0.117029766448355
+-0.122894956278899
+-0.100960171561037
+-0.0719915889343564
+-0.0709867627024691
+-0.0929060907909054
+-0.120821535784501
+-0.0824337870787229
+-0.0992756290214535
+-0.103472777937713
+-0.117911993050678
+-0.0818332626011749
+-0.0873409880201055
+-0.0990262908475928
+-0.120988677031442
+-0.124245388084451
+-0.0747595413122286
+-0.0861205425591157
+-0.122800446859669
+-0.084942134180266
+-0.120094124032015
+-0.0853078168925406
+-0.109724147584309
+-0.0732914105240602
+-0.0917248656965841
+-0.116094984354072
+-0.128642236235646
+-0.109161352884048
+-0.0725650670401817
+-0.0956193743337485
+-0.10207537237963
+-0.100953349279885
+-0.118899650673697
+-0.0771386905986203
+-0.125639164755055
+-0.122735286955128
+-0.124526946493961
+-0.100732143960774
+-0.084109959635346
+-0.0900960039505838
+-0.0787233958199887
+-0.115413993011966
+-0.107033258939405
+-0.107105096853406
+-0.104796247899584
+-0.10964452047225
+-0.099395111610178
+-0.0768018674773899
+-0.0980892932394726
+-0.0808399914095622
+-0.11854055198479
+-0.106551016335265
+-0.105102824027251
+-0.12718338779232
+-0.0761950212025882
+-0.0873677159893311
+-0.129404657415045
+-0.126179869271641
+-0.113221454569563
+-0.102835730588886
+-0.10394847437747
+-0.0852197063198419
+-0.0918376163093729
+-0.108854197934067
+-0.11008900521048
+-0.129880772589105
+-0.0855538628875469
+-0.0825701603979837
+-0.123596271877775
+-0.11539264902203
+-0.0831284618643281
+-0.0957869422357495
+-0.106627311773872
+-0.0774404797375397
+-0.0908414563428079
+-0.0839350020103265
+-0.0794638956217918
+-0.114106198144752
+-0.117963671577651
+-0.116667652844573
+-0.0843865558773472
+-0.0921867290703558
+-0.0828487660567458
+-0.12012477174011
+-0.0771774962014296
+-0.112114563594699
+-0.102270383914685
+-0.0850753984533645
+-0.119754576546951
+-0.11579217089876
+-0.112747653161271
+-0.0918840413773615
+-0.101653481224382
+-0.107767658339367
+-0.115371680848166
+-0.10866245383857
+-0.126240846188281
+-0.0766678314315546
+-0.121897272374426
+-0.0984974150164648
+-0.0914281452982445
+-0.0871721860613949
+-0.0719389819225876
+-0.0799354511332321
+-0.0963359517603178
+-0.122266855532733
+-0.125507086714784
+-0.103208546880613
+-0.10319817495649
+-0.117392361090074
+-0.116854496284594
+-0.101054789664269
+-0.0739649193717121
+-0.0837912046809685
+-0.0924455800092308
+-0.0756125472224463
+-0.0913717595359073
+-0.110244470720031
+-0.0770525674882188
+-0.0929990034660285
+-0.0710659000003601
+-0.0873966175163631
+-0.100108540362248
+-0.0749568272835378
+-0.120883788654165
+-0.11724429172661
+-0.088668810800601
+-0.0756568475818548
+-0.0981360671110982
+-0.113306594878999
+-0.116924800087453
+-0.10710079466543
+-0.126369539859477
+-0.0741580169726738
+-0.0887366830599664
+-0.129208338351325
+-0.0988850724075703
+-0.118917338366181
+-0.100517359675869
+-0.128285614926097
+-0.0870896737805273
+-0.0860476936187978
+-0.0812794457189526
+-0.116235777959047
+-0.0731372257598567
+-0.0873996030443684
+-0.12552844127238
+-0.0947216385329133
+-0.0876509453410478
+-0.0887642344287705
+-0.106905841130436
+-0.110805827633509
+-0.0937261558004585
+-0.085602193416535
+-0.0711168358723207
+-0.104763089252675
+-0.11945608913659
+-0.0894310870632265
+-0.126407879688413
+-0.0737557746950864
+-0.117897175320459
+-0.0818592780968876
+-0.0973785591574807
+-0.0839022929237954
+-0.0951427800407294
+-0.119300520892719
+-0.12605270576192
+-0.0913199735349036
+-0.109448222667963
+-0.0907126689488679
+-0.122105649964593
+-0.0828222809929332
+-0.112297504473977
+-0.123346848082161
+-0.125712729843143
+-0.120109157113096
+-0.123167226357942
+-0.0951003370197619
+-0.111478079017429
+-0.103250217091716
+-0.0900511639513796
+-0.111587367559748
+-0.0933794623462101
+-0.120538380707766
+-0.117677544563087
+-0.0706343944010276
+-0.107268855295618
+-0.0767979832052259
+-0.0789247186327462
+-0.0790457781766374
+-0.122245026533462
+-0.0933989534261362
+-0.0878633760767765
+-0.0906908018022831
+-0.100841467264541
+-0.100172251428635
+-0.0903142817892087
+-0.071813802649809
+-0.116838037571578
+-0.112145023892922
+-0.0902109709946147
+-0.0753202856722741
+-0.0875820940427757
+-0.0867088557009711
+-0.126640980890214
+-0.088640642940938
+-0.118502287131557
+-0.0843687463055228
+-0.107528590980128
+-0.0736990779421314
+-0.0911957145966442
+-0.0782368529614095
+-0.125420763789632
+-0.12933938545739
+-0.12017287378548
+-0.0804895802078902
+-0.100186708501295
+-0.106847061424918
+-0.100359815539569
+-0.119583991124215
+-0.0763418699392418
+-0.102171104700827
+-0.0955058867204512
+-0.101545569665972
+-0.0771097162662009
+-0.128553996287694
+-0.114326723758894
+-0.0904422127140999
+-0.106420349835547
+-0.0858502741693166
+-0.082428048955942
+-0.104301337690936
+-0.111290304203917
+-0.101741128024611
+-0.0914043165808551
+-0.110889142849598
+-0.0820959537432668
+-0.0725042903829141
+-0.12610184711068
+-0.117592053487115
+-0.080566677054181
+-0.107095137587346
+-0.127210387000814
+-0.106014133073336
+-0.108108972262648
+-0.116534208476223
+-0.0776962239288946
+-0.124960829199295
+-0.0980229297789562
+-0.0770198096870537
+-0.118688173792221
+-0.10853185533431
+-0.125438261755513
+-0.0943914033134064
+-0.0845400177075764
+-0.110037010482464
+-0.0778373521797278
+-0.0773873337252819
+-0.102848459842178
+-0.112156859213199
+-0.0832483139292765
+-0.070866370239027
+-0.0924093595396824
+-0.103895269222794
+-0.0870930218531548
+-0.127364747160431
+-0.112551872927935
+-0.0962957758128375
+-0.0872724508076657
+-0.0778240173356004
+-0.0947825271177149
+-0.0724361868822178
+-0.077723577155544
+-0.0844807768605658
+-0.0944444118778777
+-0.0831195644322792
+-0.0769633704566486
+-0.0831284941588751
+-0.0807588012639369
+-0.0955349825069481
+-0.097784404171025
+-0.0970444490407278
+-0.104879704437443
+-0.0794997405517066
+-0.119614495429086
+-0.0959782769812161
+-0.076877448312007
+-0.0940461789601963
+-0.0713106030451383
+-0.113524189476698
+-0.103578525924452
+-0.122082201616233
+-0.0973615356499286
+-0.0796019886432915
+-0.0888000877574125
+-0.0972275381525572
+-0.0901389602105666
+-0.0997134211884
+-0.102816153125452
+-0.0869523632406452
+-0.108546880786483
+-0.0815068062713708
+-0.126139673857489
+-0.0815246896318841
+-0.115872722330667
+-0.0721627190285232
+-0.09183719656221
+-0.124129623600578
+-0.116377173063737
+-0.0925291491188879
+-0.101171877347146
+-0.100393079891467
+-0.0758297200002573
+-0.120771948842593
+-0.114223945299691
+-0.121963699749534
+-0.119013873766529
+-0.117981177153547
+-0.0744288311592909
+-0.0727191111611754
+-0.0860694512816889
+-0.074719499660474
+-0.129017839126642
+-0.088571949398771
+-0.127569606126199
+-0.114905699499113
+-0.104294166987977
+-0.0972801279148263
+-0.122879598699279
+-0.102970910843466
+-0.105344142519765
+-0.113045759815581
+-0.0781418184694029
+-0.082704766292684
+-0.11170502976595
+-0.117998795453391
+-0.110917316878118
+-0.0920298405733191
+-0.110284423830787
+-0.129340065992575
+-0.0922050556315245
+-0.107538554885051
+-0.102181225465187
+-0.102959306865801
+-0.0964519729021223
+-0.0968299721072224
+-0.113462978655976
+-0.0940363698514841
+-0.111228483429118
+-0.090754625772964
+-0.0858461404916951
+-0.109458652157755
+-0.0847605288440167
+-0.0891335515956725
+-0.115634380847689
+-0.0705239748667604
+-0.103369200198829
+-0.114777013802443
+-0.0713479645786335
+-0.100850532226473
+-0.104233224103411
+-0.129157445133665
+-0.119930527996306
+-0.0756975086569936
+-0.117052777660907
+-0.109724606972095
+-0.121547183124027
+-0.0798496810212809
+-0.123246613649783
+-0.126213267875214
+-0.119929167775655
+-0.123547914070804
+-0.113595055938129
+-0.0830518198410617
+-0.0858321370001964
+-0.116422254946408
+-0.099752667615425
+-0.0872863018441525
+-0.0730821046249574
+-0.123160505947651
+-0.0985288651831796
+-0.0898447580125239
+-0.0774244814524663
+-0.125133283568995
+-0.0962509003273536
+-0.0726408671440191
+-0.128938596037505
+-0.0716673328971084
+-0.128062772338695
+-0.0872581975850877
+-0.0900513087486532
+-0.102312716728351
+-0.0820394193677765
+-0.122210997988773
+-0.108515724585003
+-0.127183949401498
+-0.083462156366747
+-0.113576961974527
+-0.0997436118558208
+-0.0763329423020579
+-0.0927461897000383
+-0.0780724589013714
+-0.0832180860492197
+-0.102334573102623
+-0.129842171359989
+-0.119180743050309
+-0.0922347365759711
+-0.0910531214829274
+-0.126970050671618
+-0.0950453762925688
+-0.078341050123114
+-0.0821209960452292
+-0.122416267441226
+-0.0877249371572204
+-0.0981433168149862
+-0.12017959220886
+-0.0945259023152371
+-0.0778621901908864
+-0.0981652485679502
+-0.0977459118967532
+-0.0935655514722181
+-0.0802018161503293
+-0.0998303183852635
+-0.112827320580514
+-0.0850262805668818
+-0.125713500648524
+-0.109048933080264
+-0.0833510998825059
+-0.0831258834064249
+-0.105780177368046
+-0.0814713871927748
+-0.109424117774691
+-0.0776214742578359
+-0.097531752865062
+-0.0933877214748639
+-0.0766429582505335
+-0.0887576502912472
+-0.113515777519414
+-0.10935277218501
+-0.10017856168744
+-0.0748101325365992
+-0.070382995170836
+-0.117015057331739
+-0.106569654748986
+-0.102282917940405
+-0.0931386954227465
+-0.100500614924197
+-0.119494410716579
+-0.0783144886977244
+-0.106855276645318
+-0.11890413026656
+-0.080433709538469
+-0.0712391737652495
+-0.0975287728823954
+-0.0929458748005074
+-0.0883023700357642
+-0.125757773756368
+-0.0832165369135143
+-0.125186493514436
+-0.1185107429706
+-0.093809108900004
+-0.0994613133925134
+-0.100388261411067
+-0.0749012138073581
+-0.110331913097344
+-0.110896772222334
+-0.0785105028324405
+-0.0786214079377104
+-0.0879697591459212
+-0.117260458501505
+-0.07923592923411
+-0.0830360469325937
+-0.120624139019915
+-0.0976538376521634
+-0.10133734983403
+-0.10142254149406
+-0.127530084121361
+-0.0738979739404546
+-0.0812150214966528
+-0.0711962028140946
+-0.126469415772593
+-0.0743201600622294
+-0.0892070472300632
+-0.113382830395118
+-0.113370377036033
+-0.121312980164833
+-0.114593222920386
+-0.0830013094104609
+-0.100472263965824
+-0.0873396654268786
+-0.105332213446081
+-0.0886085658990342
+-0.0979230813755045
+-0.0899213655681241
+-0.106944764072537
+-0.0929390799853345
+-0.10124229475896
+-0.121397792647385
+-0.0863237843185821
+-0.108609428521794
+-0.0811115304462529
+-0.0732401210183594
+-0.0729778103065798
+-0.0899587861818666
+-0.0723696244482221
+-0.111275948336445
+-0.121807010961637
+-0.110375381979108
+-0.124745372057262
+-0.123362901107968
+-0.0859209744539237
+-0.0889429692596398
+-0.10717754121162
+-0.0903560979563369
+-0.0751718014386014
+-0.127774687643268
+-0.074020716646082
+-0.12004329071097
+-0.0879346967953781
+-0.128412508059497
+-0.106901325040016
+-0.116937882556913
+-0.0888832024405944
+-0.119904989006128
+-0.124269257179505
+-0.12075820422472
+-0.0822173993362563
+-0.0843810476006183
+-0.109452301067599
+-0.0918817125369959
+-0.0756882503374932
+-0.127846725051624
+-0.0998722457710819
+-0.118603112414018
+-0.121211555505679
+-0.0902760049263736
+-0.0996541160178821
+-0.117399385961439
+-0.102803405885128
+-0.0796736084914214
+-0.0775791000382325
+-0.103610443681384
+-0.118449806256675
+-0.0754920968293344
+-0.0764126795388288
+-0.123868339948918
+-0.0801168987792444
+-0.110781087789261
+-0.0991316059181691
+-0.112381377500495
+-0.128035659224445
+-0.116118360101059
+-0.106833108178232
+-0.104149656896086
+-0.111942780434851
+-0.112351099064881
+-0.119676834274748
+-0.0862696746586673
+-0.112359334394086
+-0.0853056102807944
+-0.125742697593873
+-0.0915947759890453
+-0.0794610850585041
+-0.109674824886667
+-0.0864324368855547
+-0.112622677732168
+-0.0895124680445707
+-0.114095753846018
+-0.118479229931007
+-0.114757724650658
+-0.0820942447920439
+-0.126502380614743
+-0.0744899714297622
+-0.07345509482345
+-0.0788744193086035
+-0.0977829623216285
+-0.107281565626279
+-0.0716772763628724
+-0.122813112164861
+-0.0834916040367468
+-0.082310615872778
+-0.100918160309667
+-0.127343060322024
+-0.0963407993696273
+-0.0764247317568617
+-0.115848772078882
+-0.0753441647869081
+-0.0856910704773298
+-0.125844461475186
+-0.12241222477494
+-0.0920631568947063
+-0.117492153087797
+-0.0840014433738665
+-0.09279246262877
+-0.0718293295999066
+-0.0738670737083399
+-0.0711476011405265
+-0.0881357923296408
+-0.0929551736303684
+-0.126900754780361
+-0.122316584615063
+-0.124951025104822
+-0.0927006417695947
+-0.127196928433106
+-0.0870583876669895
+-0.109411008382551
+-0.113265689655542
+-0.101284427796572
+-0.125198200689561
+-0.073149912333834
+-0.122898041227989
+-0.0987088308121338
+-0.0924447556442422
+-0.11499962544712
+-0.0944287658828298
+-0.0952461253553366
+-0.087507896184762
+-0.117253378936103
+-0.0776279144149769
+-0.114466103694386
+-0.113479372833583
+-0.104049092852043
+-0.0780908653503729
+-0.102903467391361
+-0.121067930033723
+-0.0707401652706679
+-0.0812567659176116
+-0.101338103553943
+-0.0918294548895963
+-0.114965752611098
+-0.0968706264109603
+-0.116770734621814
+-0.0812556758658293
+-0.118379345161265
+-0.0717712202901515
+-0.090795122926931
+-0.0917937408680765
+-0.0788806988305339
+-0.110168799290897
+-0.113409284691992
+-0.127808806024303
+-0.10434669553761
+-0.0838616094512553
+-0.110068135428011
+-0.0956690976774057
+-0.121475978086062
+-0.0818265724346752
+-0.126893439853437
+-0.125505538532657
+-0.0821224811009988
+-0.100609085980928
+-0.0990793449702476
+-0.11594116562613
+-0.0986777293130807
+-0.128333400582072
+-0.0984961278454259
+-0.124977677477816
+-0.0743227066903223
+-0.123508194724563
+-0.101033696669339
+-0.129703292465674
+-0.110457390002112
+-0.100780851601346
+-0.0998234756856668
+-0.0894549907598176
+-0.119890783256132
+-0.129008778441058
+-0.127290345072258
+-0.110802409463439
+-0.105513801984543
+-0.116436155549623
+-0.110803038410935
+-0.0832770283201714
+-0.106272630742424
+-0.0928969068436315
+-0.113905534492352
+-0.0729257033627567
+-0.114398359008076
+-0.0737086803906988
+-0.0711531388722315
+-0.104919611964129
+-0.0848689750984531
+-0.0895014003092287
+-0.0943472531933829
+-0.088413211231123
+-0.104245310699999
+-0.0723926484100945
+-0.093138947472015
+-0.0871291688826013
+-0.0781632594125209
+-0.116148375939419
+-0.0857401096489887
+-0.0844509234096718
+-0.0810613993237004
+-0.103562243028202
+-0.107629668438763
+-0.11038715638292
+-0.0838580164631769
+-0.124673156271872
+-0.0736470142887894
+-0.0782187050208206
+-0.117545929889083
+-0.0975220415457424
+-0.074909807589329
+-0.0987422770024291
+-0.0712961386971223
+-0.0986688711931119
+-0.116974995356274
+-0.080517347274745
+-0.103126157822922
+-0.083841284624303
+-0.0983548957322269
+-0.0890738043487282
+-0.10152106919503
+-0.108793346704274
+-0.0942775534519644
+-0.0789789883139975
+-0.0779339804987626
+-0.0884906955277953
+-0.0785089927465585
+-0.0963100370444371
+-0.0717924923566302
+-0.0859597052816077
+-0.0928723864203495
+-0.104721234207771
+-0.0920057739743604
+-0.0889834627532403
+-0.0921531281794816
+-0.0821519940475746
+-0.0866704115525274
+-0.107010873306943
+-0.0749117356244347
+-0.118877200351556
+-0.10725652508426
+-0.079925820336638
+-0.109707216792571
+-0.093653713527788
+-0.128778791024645
+-0.100322541482991
+-0.0736144976665038
+-0.0934614087904473
+-0.0957142439741706
+-0.10690124159096
+-0.117490889394522
+-0.0925523323520339
+-0.120792571959233
+-0.110617366324737
+-0.0860775146193978
+-0.124490184338347
+-0.099466799961177
+-0.115781203848335
+-0.0787076710720679
+-0.105738342556323
+-0.123670853579895
+-0.126648524976844
+-0.0789115195433683
+-0.0807622364850075
+-0.109140966818917
+-0.111561415672372
+-0.0831479294992931
+-0.071579500360297
+-0.107763718993857
+-0.100881939925328
+-0.0815851528624287
+-0.102645075041487
+-0.110045971297001
+-0.124397504786619
+-0.0757073029556648
+-0.122409288417008
+-0.122507096476066
+-0.0902190406090633
+-0.0793858433743169
+-0.117542215082417
+-0.116933218924984
+-0.115475226475788
+-0.120687155267098
+-0.0963448931968005
+-0.116467979986649
+-0.0937825939636887
+-0.121760657892358
+-0.0978136707102099
+-0.0966866687456369
+-0.127776001790478
+-0.0759120373196783
+-0.103937252458471
+-0.129271627660495
+-0.0898176205022131
+-0.0710327291630368
+-0.10304648391716
+-0.106940820355162
+-0.118298427659292
+-0.0732652730767316
+-0.0744002130986012
+-0.08386442630479
+-0.124698139326954
+-0.0845256838666177
+-0.0928622630617651
+-0.107966142078328
+-0.111206677543767
+-0.128260986173332
+-0.0992282617012238
+-0.116342080611878
+-0.0831267122248933
+-0.0842487501113248
+-0.105938860392069
+-0.0993949957713823
+-0.101374805107801
+-0.092209669302257
+-0.122042638889274
+-0.123657886322315
+-0.0888910397198381
+-0.0858202258424366
+-0.0859059516296902
+-0.0920711700859685
+-0.11249937585178
+-0.0894587828152762
+-0.121678896523893
+-0.0861806934552878
+-0.123647588377568
+-0.072558281240252
+-0.109053167764123
+-0.124366666034282
+-0.0706110135201849
+-0.0997094566841345
+-0.103453681870859
+-0.124637296019628
+-0.116014722076209
+-0.0930865214039057
+-0.119621793879908
+-0.0838816878915162
+-0.0898446414551091
+-0.0775466569072117
+-0.0963418089741626
+-0.125876259760086
+-0.110049002175876
+-0.0803754107612044
+-0.122403052473954
+-0.123713983971605
+-0.0717926735881853
+-0.109599089345992
+-0.0700743672831054
+-0.0901517451941319
+-0.0809328052090456
+-0.107968296367421
+-0.08519617710922
+-0.0893776190025845
+-0.0735550256813157
+-0.0710071995784754
+-0.0884440185580705
+-0.10805873273571
+-0.10612096731221
+-0.108085957324373
+-0.0732624660259004
+-0.119230462508351
+-0.123174416505767
+-0.115666833335076
+-0.128221134276269
+-0.107758613777848
+-0.0885993211071855
+-0.112370799452119
+-0.107406256810847
+-0.0975720269286813
+-0.0938452609686938
+-0.120011541402118
+-0.0979110562489119
+-0.0984598522629132
+-0.0885790489297826
+-0.0790071387287236
+-0.080463499783313
+-0.108115034056398
+-0.125134205103061
+-0.0826136348684255
+-0.0984168952945464
+-0.128793154313083
+-0.0959341736234081
+-0.113642947912499
+-0.0933681256031773
diff --git a/model_input/neurons/if/1_smallcharge b/model_input/neurons/if/1_smallcharge
new file mode 100644
index 0000000..209e3ef
--- /dev/null
+++ b/model_input/neurons/if/1_smallcharge
@@ -0,0 +1 @@
+20
diff --git a/model_input/neurons/if/3_supercharge b/model_input/neurons/if/3_supercharge
new file mode 100644
index 0000000..f6f7853
--- /dev/null
+++ b/model_input/neurons/if/3_supercharge
@@ -0,0 +1,3 @@
+10000
+10000
+10000
diff --git a/model_input/spikes/Makefile b/model_input/spikes/Makefile
new file mode 100644
index 0000000..3a4f512
--- /dev/null
+++ b/model_input/spikes/Makefile
@@ -0,0 +1,7 @@
+.PHONY: all
+all: 1000N_10s_10Hz_random 1N_10s_10Hz_random
+
+%_random : ../../code/matlab/random_spikes.m
+ ../../code/matlab/random_spikes `echo $* | sed 's/^/num_neurons=/' | sed 's/N\_/;duration=/' | sed 's/s\_/;spike_freq=/' | sed 's/Hz$$/;/'` >/dev/null 2> $@
+
+
diff --git a/model_input/topology/Makefile b/model_input/topology/Makefile
new file mode 100644
index 0000000..22424b0
--- /dev/null
+++ b/model_input/topology/Makefile
@@ -0,0 +1,12 @@
+.PHONY: all
+all: if/1000_random
+
+if/%_random : ../../code/matlab/random_topo.m
+ ../../code/matlab/random_topo "num_neurons=$*; connection_density=0.1; inhibitory_fraction=0.2;" 1>/dev/null 2> $@
+
+if/1_to_%:
+ rm -f $@
+ for I in $$(seq 1 $$(echo "$@" | egrep -o '[0-9]+$$')); do echo "0, $$I, 0.01, 0.001" >> $@; done
+
+dalif/%: if/%
+ cat $< | sed 's/^.*$$/0.0/' | paste -d"," $< - > $@
diff --git a/model_input/topology/README b/model_input/topology/README
new file mode 100644
index 0000000..b3d3685
--- /dev/null
+++ b/model_input/topology/README
@@ -0,0 +1 @@
+the first number denotes the number of neurons used
diff --git a/model_input/topology/if/2circle b/model_input/topology/if/2circle
new file mode 100644
index 0000000..446d25c
--- /dev/null
+++ b/model_input/topology/if/2circle
@@ -0,0 +1,2 @@
+0, 1, 0.01, 0.001
+1, 0, 0.01, 0.001
diff --git a/model_input/topology/if/3circle2 b/model_input/topology/if/3circle2
new file mode 100644
index 0000000..35bbc5d
--- /dev/null
+++ b/model_input/topology/if/3circle2
@@ -0,0 +1,6 @@
+0, 1, 0.333, 0.9
+0, 2, 0.4, 0.9
+1, 2, 0.334, 0.9
+1, 0, 0.39, 0.9
+2, 0, 0.333, 0.9
+2, 1, 0.3, 0.9
diff --git a/model_input/trace/10s_10ms_all b/model_input/trace/10s_10ms_all
new file mode 100644
index 0000000..a3ee5a3
--- /dev/null
+++ b/model_input/trace/10s_10ms_all
@@ -0,0 +1,1000 @@
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
+0.01; neuron; synapse; global; spikes
diff --git a/model_input/trace/10s_10s_all b/model_input/trace/10s_10s_all
new file mode 100644
index 0000000..71eabd3
--- /dev/null
+++ b/model_input/trace/10s_10s_all
@@ -0,0 +1 @@
+10; neuron; synapse; global; spikes
diff --git a/model_input/trace/README b/model_input/trace/README
new file mode 100644
index 0000000..1f15812
--- /dev/null
+++ b/model_input/trace/README
@@ -0,0 +1,3 @@
+to have a repetitive trace command every x ms use
+
+code/glue/repeat-trace-cmd duration trace_interval "trace commands"
contact: Jan Huwald // Impressum