Skip to content

Commit 0d03755

Browse files
committed
Implement a variant of the Pearson detection engine
1 parent 7aa0e45 commit 0d03755

File tree

5 files changed

+170
-22
lines changed

5 files changed

+170
-22
lines changed

.env

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ DEVICE=dummy # EEG device
44
EPOCH=0.250 # Epoch length, in seconds
55
LATENCY=0.08 # If the signal has a constant latency, set it here
66
PIPELINE=riemann # Classification pipeline (riemann, eegnet)
7-
CALIBRATION_LAYOUT=single # Calibration layout (single, simple, grid, keyboard)
7+
CALIBRATION_LAYOUT=simple # Calibration layout (single, simple, grid, keyboard)
88
TASK_LAYOUT=simple # Task layout (simple, grid, keyboard)
99
DYNAMIC_CODES=0 # 1 to generate random codes or 0 to use static codes
1010
# SEED=42 # Optional random seed for reproducible dynamic codes

README.md

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -155,19 +155,39 @@ To create a new stimulus type, simply add a new image in [this folder](https://g
155155

156156
The application classifies single flashes. Epochs are triggered at each frame on 250ms windows. The classification pipeline computes xdawn covariances projected on the tangent space followed by a linear discriminant analysis. The resulting probabilities are [accumulated](https://github.com/timeflux/burst/blob/main/nodes/predict.py) in a circular buffer on which correlation analysis is performed. When enough confidence is reached for a specific target, a final prediction is made.
157157

158-
The accumulation engine is [configurable](https://github.com/timeflux/burst/blob/main/graphs/classification.yaml).
158+
Several accumulation engines are available, which can be configured either from the [classification graph](https://github.com/timeflux/burst/blob/main/graphs/classification.yaml) or adjusted in realtime using the contextual menu (by pressing the `s` key).
159+
160+
The current default decision engine is _Steady_.
161+
162+
#### Parameters available for all decision engines
159163

160164
| Setting | Description | Default |
161165
|---------|--------------|---------|
162166
| codes | The list of burst codes, one for each target | |
163167
| min_buffer_size | Minimum number of predictions to accumulate before emitting a prediction | 30 |
164168
| max_buffer_size | Maximum number of predictions to accumulate for each class | 200 |
169+
| recovery | Minimum duration in ms required between two consecutive epochs after a prediction | 300 |
170+
171+
#### _Pearson_ decision engine
172+
173+
This method computes the Pearson correlation for each frame and code. The final prediction is made when the `threshold` and `delta` limits are reached.
174+
175+
| Setting | Description | Default |
176+
|---------|--------------|---------|
165177
| threshold | Minimum value to reach according to the Pearson correlation coefficient | .75 |
166178
| delta | Minimum difference percentage to reach between the p-values of the two best candidates | .5 |
167-
| recovery | Minimum duration in ms required between two consecutive epochs after a prediction | 300 |
168179

169180
Please note that default values are reasonnably suitable for random data. For real EEG data, the threshold should probably be raised.
170181

182+
#### _Steady_ decision engine
183+
184+
Based on the _Pearson_ engine, this method uses a different decision process
185+
186+
| Setting | Description | Default |
187+
|---------|--------------|---------|
188+
| min_frames_pred | Minimum number of times the current candidate must have been detected to emit a prediction | 50 |
189+
| max_frames_pred | Maximum number of frames after which the best performing candidate is chosen | 200 |
190+
171191
## Running
172192

173193
Run the following:

graphs/classification.yaml

Lines changed: 18 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -52,20 +52,30 @@ graphs:
5252
- id: predict
5353
module: nodes.predict
5454
class: Accumulate
55+
# params:
56+
# method: AccumulateRandom
57+
# n_targets: 5
58+
# min_buffer_size: 50
59+
# params:
60+
# method: AccumulatePearson
61+
# codes:
62+
# {% for CODE in TASK_CODES.split() %}
63+
# - "{{CODE}}"
64+
# {% endfor %}
65+
# threshold: .2
66+
# delta: .5
67+
# min_buffer_size: 20
68+
# max_buffer_size: 80
5569
params:
56-
method: AccumulatePearson
70+
method: AccumulateSteady
5771
codes:
5872
{% for CODE in TASK_CODES.split() %}
5973
- "{{CODE}}"
6074
{% endfor %}
61-
threshold: .2
62-
delta: .5
63-
min_buffer_size: 20
75+
min_buffer_size: 50
6476
max_buffer_size: 80
65-
# params:
66-
# method: AccumulateRandom
67-
# n_targets: 5
68-
# min_buffer_size: 50
77+
min_frames_pred: 50
78+
max_frames_pred: 200
6979
- id: pub
7080
module: timeflux.nodes.zmq
7181
class: Pub

nodes/predict.py

Lines changed: 86 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -147,11 +147,39 @@ def decide(self):
147147
return False
148148

149149

150+
class AccumulateRandom(AccumulateAbstract):
151+
""" Random decision
152+
153+
This node accumulates the probabilities of single-trial classifications from a ML node.
154+
When the buffer size reaches `min_buffer_size`, a random target between 0 and `n_targets` is predicted.
155+
This node has no practical use except for demonstrating how to extend the base `AccumulateAbstract` class.
156+
157+
Args:
158+
n_targets (int): The number of targets.
159+
min_buffer_size (int): Minimum number of predictions to accumulate before emitting a prediction (default: 30).
160+
max_buffer_size (int): Maximum number of predictions to accumulate for each class (default: 200).
161+
recovery (int): Minimum duration in ms required between two consecutive epochs after a prediction (default: 300).
162+
163+
Attributes:
164+
i (Port): Default input, expects DataFrame.
165+
o (Port): Default output, provides DataFrame
166+
"""
167+
168+
def __init__(self, n_targets, min_buffer_size=30, max_buffer_size=200, recovery=300):
169+
self.n_targets = n_targets
170+
super().__init__(min_buffer_size, max_buffer_size, recovery)
171+
172+
def decide(self):
173+
return {"target": random.randint(0, self.n_targets - 1), "score": 42}
174+
175+
150176
class AccumulatePearson(AccumulateAbstract):
151177
""" Accumulation of probabilities
152178
153-
This node accumulates the probabilities of single-trial classifications from a ML node.
179+
This node accumulates the probabilities of single-trial classifications from a ML node,
180+
and computes the Pearson correlation for each code.
154181
When enough confidence is reached for a specific class, a final prediction is made.
182+
The decision is based on the `threshold` and `delta` parameters.
155183
156184
Args:
157185
codes (list): The list of burst codes, one for each target.
@@ -208,28 +236,76 @@ def decide(self):
208236
return {"target": target, "score": correlation}
209237

210238

211-
class AccumulateRandom(AccumulateAbstract):
212-
""" Random decision
239+
class AccumulateSteady(AccumulateAbstract):
240+
""" Accumulation of probabilities
213241
214242
This node accumulates the probabilities of single-trial classifications from a ML node.
215-
When the buffer size reaches `min_buffer_size`, a random target between 0 and `n_targets` is predicted.
216-
This node has no practical use except for demonstrating how to extend the base `AccumulateAbstract` class.
243+
Based on the Pearson correlation method above, it uses a different decision process.
217244
218245
Args:
219-
n_targets (int): The number of targets.
246+
codes (list): The list of burst codes, one for each target.
220247
min_buffer_size (int): Minimum number of predictions to accumulate before emitting a prediction (default: 30).
221248
max_buffer_size (int): Maximum number of predictions to accumulate for each class (default: 200).
249+
min_frames_pred (int): Minimum number of times the current candidate must have been detected to emit a prediction (default: 50).
250+
max_frames_pred (int): Maximum number of frames after which the best performing candidate is chosen (default: 200).
222251
recovery (int): Minimum duration in ms required between two consecutive epochs after a prediction (default: 300).
223252
224253
Attributes:
225254
i (Port): Default input, expects DataFrame.
226255
o (Port): Default output, provides DataFrame
227256
"""
228257

229-
def __init__(self, n_targets, min_buffer_size=30, max_buffer_size=200, recovery=300):
230-
self.n_targets = n_targets
231-
super().__init__(min_buffer_size, max_buffer_size, recovery)
258+
def __init__(self, codes, min_buffer_size=30, max_buffer_size=200, min_frames_pred=50, max_frames_pred=200, recovery=300):
259+
self.codes = [[int(bit) for bit in code] for code in codes]
260+
self.min_buffer_size = min_buffer_size
261+
self.max_buffer_size = max_buffer_size
262+
self.recovery = recovery
263+
self.min_frames_pred = min_frames_pred
264+
self.max_frames_pred = max_frames_pred
265+
self.reset()
232266

233267
def decide(self):
234-
return {"target": random.randint(0, self.n_targets - 1), "score": 42}
235268

269+
# Compute the Pearson correlation coefficient
270+
correlations = []
271+
pvalues = []
272+
x = self._probas
273+
for code in self.codes:
274+
y = [code[i] for i in self._indices]
275+
try:
276+
correlation, pvalue = pearsonr(x, y)
277+
except:
278+
# If one input is constant, the standard deviation will be 0, the correlation will not be computed,
279+
# and NaN will be returned. In this case, we force the correlation value to 0.
280+
correlation = 0
281+
pvalue = 1e-8
282+
correlations.append(correlation)
283+
pvalues.append(pvalue)
284+
285+
# Make a decision
286+
indices = np.flip(np.argsort(correlations))
287+
target = int(indices[0])
288+
diff_corr = correlations[indices[0]] - correlations[indices[1]]
289+
if target == self._current_target and correlations[indices[0]] > 0.0 and diff_corr > 0.0:
290+
self._target_acc += 1
291+
else:
292+
self._current_target = target
293+
self._target_acc = 1
294+
295+
self._preds.update({self._current_target: self._preds[self._current_target] + 1})
296+
297+
if self._target_acc > self.min_frames_pred:
298+
target = self._current_target
299+
elif self._frames >= self.max_frames_pred:
300+
target = max(self._preds, key=self._preds.get)
301+
else:
302+
return False
303+
304+
# Return target and score
305+
return {"target": target, "score": self._target_acc}
306+
307+
def reset(self):
308+
super().reset()
309+
self._preds = {c:0 for c in range(len(self.codes))}
310+
self._current_target = -1
311+
self._target_acc = 0

www/assets/js/schema.js

Lines changed: 43 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,13 +33,17 @@ const schema = {
3333
"labelPosition": "left-left",
3434
"widget": "html5",
3535
"placeholder": " ",
36-
"defaultValue": "AccumulatePearson",
36+
"defaultValue": "AccumulateSteady",
3737
"data": {
3838
"values": [
3939
{
4040
"label": "Pearson",
4141
"value": "AccumulatePearson"
4242
},
43+
{
44+
"label": "Steady",
45+
"value": "AccumulateSteady"
46+
},
4347
{
4448
"label": "Random",
4549
"value": "AccumulateRandom"
@@ -91,6 +95,44 @@ const schema = {
9195
},
9296
]
9397
},
98+
{
99+
"label": "Steady",
100+
"conditional": {
101+
"show": true,
102+
"when": "method",
103+
"eq": "AccumulateSteady"
104+
},
105+
"type": "well",
106+
"input": false,
107+
"components": [
108+
{
109+
"label": "Minimum prediction score",
110+
"labelPosition": "left-left",
111+
"defaultValue": 50,
112+
"validate": {
113+
"required": true,
114+
"min": 1,
115+
"max": 1e9
116+
},
117+
"clearOnHide": false,
118+
"key": "steady.min_frames_pred",
119+
"type": "number"
120+
},
121+
{
122+
"label": "Maximum predictions",
123+
"labelPosition": "left-left",
124+
"defaultValue": 200,
125+
"validate": {
126+
"required": true,
127+
"min": 1,
128+
"max": 1e9
129+
},
130+
"clearOnHide": false,
131+
"key": "steady.max_frames_pred",
132+
"type": "number",
133+
},
134+
]
135+
},
94136
{
95137
"label": "Random",
96138
"conditional": {

0 commit comments

Comments
 (0)