Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
d1fb1c5
Added new Visual Pattern Reversal VEP experiment implementation, impr…
pellet Jul 16, 2025
91311dd
Merge branch 'master' into dev/prvep_experiment
pellet Jul 22, 2025
5f7cbbd
cleanup
pellet Aug 1, 2025
a4f2e0b
set up params and trial dataframe
pellet Aug 1, 2025
cbb3899
created BlockExperiment.py
pellet Aug 3, 2025
39147a5
fixed psychxr on 3.9
pellet Aug 3, 2025
f758b26
refactored experiment instructions
pellet Aug 3, 2025
43f6e58
fix
pellet Aug 3, 2025
7654574
fixed blocks and event markers
pellet Aug 4, 2025
d786e47
only 4 blocks
pellet Aug 6, 2025
4202631
block example
pellet Aug 13, 2025
456555f
fixed vr display
pellet Aug 15, 2025
4bcf3cf
dont bother with specifying python version in yml
pellet Aug 17, 2025
c1ac2fd
fixed numpy pin
pellet Aug 17, 2025
669a24e
remove the name field so it doesnt hardcode the environment name
pellet Aug 17, 2025
5dea45d
try individual eyes
pellet Aug 17, 2025
1619e3d
try drawing instructions for both eyes
pellet Aug 17, 2025
f698d67
draw block instructions correctly on monitor
pellet Aug 17, 2025
63cdfe2
draw iti for monitor and vr eyes
pellet Aug 17, 2025
ba485ef
try simplifying instructions per block for vr
pellet Aug 18, 2025
70febad
added refresh frame rate check
pellet Aug 18, 2025
342c683
fixed display on monitor which was crashing
pellet Aug 18, 2025
37d3252
fixed iti presentation to not occur mid-experiment
pellet Aug 18, 2025
f63569b
showing double vision
pellet Aug 18, 2025
fe79d11
fixed stereoscopic positioning
pellet Aug 19, 2025
f8cc28d
fixed monitor positioning
pellet Aug 19, 2025
647be89
fixed instructions
pellet Aug 19, 2025
2915061
try drawing block instructions to single eye
pellet Aug 20, 2025
1fede30
fix for globbing multiple sessions/subjects
pellet Aug 20, 2025
755be8c
try using consistent luminance in headset
pellet Aug 20, 2025
78f965d
use black background for other eye during instructions
pellet Aug 20, 2025
2e8522c
refactored to improve performance
pellet Aug 28, 2025
19349a9
allow early exit from instructions
pellet Aug 28, 2025
74a587c
clean up
pellet Aug 28, 2025
7e085c7
made more performant, no loading animation now
pellet Aug 28, 2025
3b2fedc
optimize again
pellet Aug 28, 2025
464f3a5
improved focus
pellet Aug 31, 2025
5227ced
fixed instructions
pellet Aug 31, 2025
d2e47c9
use conda
pellet Sep 18, 2025
fc9d1e3
revert change
pellet Oct 2, 2025
47c7311
Merge branch 'master' into dev/prvep_experiment
pellet Dec 4, 2025
9c0ff61
revert some unneeded changes
pellet Dec 4, 2025
49914d3
clean up
pellet Dec 4, 2025
08047d1
revert non-essential changes
pellet Dec 4, 2025
11a8dc5
removed unneeded members
pellet Dec 4, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
135 changes: 135 additions & 0 deletions eegnb/experiments/BlockExperiment.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
"""
BlockExperiment Class - Extends BaseExperiment with block-based functionality

This class provides block-based experiment capabilities by inheriting from BaseExperiment
and overriding the run method to handle multiple blocks. It loads stimulus only once
and reuses it across blocks, while allowing block-specific instructions.

Experiments that need block-based execution should inherit from this class instead of BaseExperiment.
"""
from abc import ABC
from time import time

from .Experiment import BaseExperiment


class BlockExperiment(BaseExperiment, ABC):
"""
Inherits from BaseExperiment to provide block-based functionality.

This class is designed for experiments that need to run as multiple blocks.
Each block has its own instructions and duration. It loads all stimuli at once, then re/uses it across blocks.
"""

def __init__(self, exp_name, block_duration, eeg, save_fn, block_trial_size, n_blocks, iti: float, soa: float, jitter: float,
use_vr=False, use_fullscr=True, stereoscopic=False):
""" Initializer for the BlockExperiment Class

Args:
exp_name (str): Name of the experiment
block_duration (float): Duration of each block in seconds
eeg: EEG device object for recording
save_fn (str): Save filename for data
block_trial_size (int): Number of trials per block
n_blocks (int): Number of blocks to run
iti (float): Inter-trial interval
soa (float): Stimulus on arrival
jitter (float): Random delay between stimulus
use_vr (bool): Use VR for displaying stimulus
use_fullscr (bool): Use fullscreen mode
"""
# Calculate total trials for the base class
total_trials = block_trial_size * n_blocks

# Initialize BaseExperiment with total trials
# Pass None for duration if block_duration is None to ignore time spent in instructions
super().__init__(exp_name, block_duration, eeg, save_fn, total_trials, iti, soa, jitter, use_vr, use_fullscr, stereoscopic)

# Block-specific parameters
self.block_duration = block_duration
self.block_trial_size = block_trial_size
self.n_blocks = n_blocks

# Current block index
self.current_block_index = 0

def present_block_instructions(self, current_block):
"""
Display instructions for the current block to the user.

This method is meant to be overridden by child classes to provide
experiment-specific instructions before each block. The base implementation
simply flips the window without adding any text.

This method is called by __show_block_instructions in a loop until the user
provides input to continue or cancel the experiment.

Args:
current_block (int): The current block number (0-indexed), used to customize
instructions for specific blocks if needed.
"""
self.window.flip()

def _show_block_instructions(self, block_number):
"""
Show instructions for a specific block

Args:
block_number (int): Current block number (0-indexed)

Returns:
tuple: (continue_experiment, instruction_end_time)
- continue_experiment (bool): Whether to continue the experiment
"""

# Clear any previous input
self._clear_user_input()

# Wait for user input to continue
while True:
# Display the instruction text
super()._draw(lambda: self.present_block_instructions(block_number))

if self._user_input('start'):
return True
elif self._user_input('cancel'):
return False

def run(self, instructions=True):
"""
Run the experiment as a series of blocks

This method overrides BaseExperiment.run() to handle multiple blocks.

Args:
instructions (bool): Whether to show the initial experiment instructions
"""
# Setup the experiment (creates window, loads stimulus once)
if not self.setup(instructions):
return False

# Start EEG Stream once for all blocks
if self.eeg:
print("Wait for the EEG-stream to start...")
self.eeg.start(self.save_fn)
print("EEG Stream started")

# Run each block
for block_index in range(self.n_blocks):
self.current_block_index = block_index
print(f"Starting block {block_index + 1} of {self.n_blocks}")

# Show block-specific instructions
if not self._show_block_instructions(block_index):
break

# Run this block
if not self._run_trial_loop(start_time=time(), duration=self.block_duration):
break

# Stop EEG Stream after all blocks
if self.eeg:
self.eeg.stop()

# Close window at the end of all blocks
self.window.close()
5 changes: 4 additions & 1 deletion eegnb/experiments/Experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,12 +316,15 @@ def run(self, instructions=True):
# Setup the experiment
self.setup(instructions)

print("Wait for the EEG-stream to start...")

# Start EEG Stream, wait for signal to settle, and then pull timestamp for start point
if self.eeg:
if self.eeg.backend not in ['serialport']:
print("Wait for the EEG-stream to start...")
self.eeg.start(self.save_fn, duration=self.record_duration + 5)
print("EEG Stream started")

print("EEG Stream started")

# Record experiment until a key is pressed or duration has expired.
record_start_time = time()
Expand Down
215 changes: 215 additions & 0 deletions eegnb/experiments/visual_vep/pattern_reversal_vep.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,215 @@
from time import time
import numpy as np

from psychopy import visual
from typing import Optional, Dict, Any
from eegnb.devices.eeg import EEG
from eegnb.experiments.BlockExperiment import BlockExperiment
from stimupy.stimuli.checkerboards import contrast_contrast

QUEST_PPD = 20

class VisualPatternReversalVEP(BlockExperiment):

def __init__(self, display_refresh_rate: int, eeg: Optional[EEG] = None, save_fn=None,
block_duration_seconds=50, block_trial_size: int=100, n_blocks: int=4, use_vr=False, use_fullscr=True):

self.display_refresh_rate = display_refresh_rate
soa=0.5
iti=0
jitter=0

super().__init__("Visual Pattern Reversal VEP", block_duration_seconds, eeg, save_fn, block_trial_size, n_blocks, iti, soa, jitter, use_vr, use_fullscr, stereoscopic=True)

self.instruction_text = f"""Welcome to the Visual Pattern Reversal VEP experiment!

This experiment will run for {n_blocks} blocks of {block_duration_seconds} seconds each.

Press spacebar or controller to continue.
"""

# Setting up the trial and parameter list
left_eye = 0
right_eye = 1
# Alternate between left and right eye blocks
block_eyes = []
for block_num in range(n_blocks):
eye = left_eye if block_num % 2 == 0 else right_eye
block_eyes.extend([eye] * block_trial_size)
self.parameter = np.array(block_eyes)

@staticmethod
def create_monitor_checkerboard(intensity_checks):
# Standard parameters for monitor-based pattern reversal VEP
# Using standard 1 degree check size at 30 pixels per degree
return contrast_contrast(
visual_size=(16, 16), # aspect ratio in degrees
ppd=72, # pixels per degree
frequency=(0.5, 0.5), # spatial frequency of the checkerboard (0.5 cpd = 1 degree check size)
intensity_checks=intensity_checks,
target_shape=(0, 0),
alpha=0,
tau=0
)

@staticmethod
def create_vr_checkerboard(intensity_checks):
# Optimized parameters for Oculus/Meta Quest 2 with PC link
# Quest 2 has approximately 20 pixels per degree and a ~90° FOV
# Using standard 1 degree check size (0.5 cpd)
return contrast_contrast(
visual_size=(20, 20), # size in degrees - covers a good portion of the FOV
ppd=QUEST_PPD, # pixels per degree for Quest 2
frequency=(0.5, 0.5), # spatial frequency (0.5 cpd = 1 degree check size)
intensity_checks=intensity_checks,
target_shape=(0, 0),
alpha=0,
tau=0
)

def load_stimulus(self) -> Dict[str, Any]:
# Frame rate, in Hz
# TODO: Fix - Rift.GetActualFrameRate() crashes in psychxr due to 'EndFrame called before BeginFrame'
actual_frame_rate = np.round(self.window.displayRefreshRate if self.use_vr else self.window.getActualFrameRate())

# Ensure the expected frame rate matches and is divisable by the stimulus rate(soa)
assert actual_frame_rate % self.soa == 0, f"Expected frame rate divisable by stimulus rate: {self.soa}, but got {actual_frame_rate} Hz"
assert self.display_refresh_rate == actual_frame_rate, f"Expected frame rate {self.display_refresh_rate} Hz, but got {actual_frame_rate} Hz"

if self.use_vr:
# Create the VR checkerboard
create_checkerboard = self.create_vr_checkerboard
# the window is large over the eye, checkerboard should only cover the central vision
size = self.window.size / 1.5
else:
# Create the Monitor checkerboard
create_checkerboard = self.create_monitor_checkerboard
size = (self.window_size[1], self.window_size[1])

# The surrounding / periphery needs to be dark when not using vr.
# Also used for covering eye which is not being stimulated.
self.black_background = visual.Rect(self.window,
width=self.window.size[0],
height=self.window.size[1],
fillColor='black')

# A grey background behind the checkerboard must be used in vr to maintain luminence.
self.grey_background = visual.Rect(self.window,
width=self.window.size[0],
height=self.window.size[1],
fillColor=[-0.22, -0.22, -0.22])

# Create checkerboard stimuli
def create_checkerboard_stim(intensity_checks, pos):
return visual.ImageStim(self.window,
image=create_checkerboard(intensity_checks)['img'],
units='pix', size=size, color='white', pos=pos)

# Create fixation stimuli
def create_fixation_stim(pos):
fixation = visual.GratingStim(
win=self.window,
pos=pos,
sf=400 if self.use_vr else 0.2,
color=[1, 0, 0]
)
fixation.size = 0.02 if self.use_vr else 0.4
return fixation

# Create VR block instruction stimuli
def create_vr_block_instruction(pos):
return visual.TextStim(win=self.window, text="Focus on the red dot, and try not to blink whilst the squares are flashing, press the spacebar or pull the controller trigger when ready to commence.", color=[-1, -1, -1],
pos=pos, height=0.1)

# Create and position stimulus
def create_eye_stimuli(eye_x_pos, pix_x_pos):
return {
'checkerboards': [
create_checkerboard_stim((1, -1), pos=(pix_x_pos, 0)),
create_checkerboard_stim((-1, 1), pos=(pix_x_pos, 0))
],
'fixation': create_fixation_stim([eye_x_pos, 0]),
'vr_block_instructions': create_vr_block_instruction((eye_x_pos, 0))
}

# Structure all stimuli in organized dictionary
if self.use_vr:
# Calculate pixel positions for stereoscopic presentation
window_width = self.window.size[0]
left_pix_x_pos = self.left_eye_x_pos * (window_width / 2)
right_pix_x_pos = self.right_eye_x_pos * (window_width / 2)

return {
'left': create_eye_stimuli(self.left_eye_x_pos, left_pix_x_pos),
'right': create_eye_stimuli(self.right_eye_x_pos, right_pix_x_pos)
}
else:
return {
'monoscopic': create_eye_stimuli(0, 0)
}

def _present_vr_block_instructions(self, open_eye, closed_eye):
self.window.setBuffer(open_eye)
self.stim[open_eye]['vr_block_instructions'].draw()
self.stim[open_eye]['fixation'].draw()
self.window.setBuffer(closed_eye)
self.black_background.draw()

def present_block_instructions(self, current_block: int) -> None:
if self.use_vr:
if current_block % 2 == 0:
self._present_vr_block_instructions(open_eye="left", closed_eye="right")
else:
self._present_vr_block_instructions(open_eye="right", closed_eye="left")
else:
if current_block % 2 == 0:
instruction_text = (
"Close your right eye, then focus on the red dot with your left eye. "
"Press spacebar or controller when ready."
)
else:
instruction_text = (
"Close your left eye, then focus on the red dot with your right eye. "
"Press spacebar or controller when ready."
)
text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1])
text.draw()
self.stim['monoscopic']['fixation'].draw()
self.window.flip()

def present_stimulus(self, idx: int):
# Get the label of the trial
trial_idx = self.current_block_index * self.block_trial_size + idx
label = self.parameter[trial_idx]

open_eye = 'left' if label == 0 else 'right'
closed_eye = 'left' if label == 1 else 'right'

# draw checkerboard and fixation
if self.use_vr:
self.window.setBuffer(open_eye)
self.grey_background.draw()
display = self.stim['left' if label == 0 else 'right']
else:
self.black_background.draw()
display = self.stim['monoscopic']

checkerboard_frame = idx % 2
display['checkerboards'][checkerboard_frame].draw()
display['fixation'].draw()

if self.use_vr:
self.window.setBuffer(closed_eye)
self.black_background.draw()
self.window.flip()

# Pushing the sample to the EEG
marker = self.markernames[label]
self.eeg.push_sample(marker=marker, timestamp=time())

def present_iti(self):
if self.use_vr:
for eye in ['left', 'right']:
self.window.setBuffer(eye)
self.black_background.draw()
self.window.flip()
9 changes: 6 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@

scikit-learn>=0.23.2
pandas>=1.1.4
numpy>=1.26.0; python_version >= "3.9"
# psychxr build pinned to this version of numpy.
numpy>=1.26,<1.27; python_version >= "3.9"
numpy<=1.24.4; python_version == "3.8"
mne>=0.20.8
seaborn>=0.11.0
Expand Down Expand Up @@ -60,7 +61,8 @@ ffpyplayer==4.5.2 # 4.5.3 fails to build as wheel.
psychtoolbox
scikit-learn>=0.23.2
pandas>=1.1.4
numpy>=1.26.0; python_version >= "3.9"
# psychxr build pinned to this version of numpy.
numpy>=1.26,<1.27; python_version >= "3.9"
numpy==1.24.4; python_version == "3.8"
mne>=0.20.8
seaborn>=0.11.0
Expand All @@ -87,7 +89,8 @@ pyglet==1.4.11 ; platform_system == "Windows"
psychxr>=0.2.4rc2; platform_system == "Windows" and python_version <= "3.9"



# Used for generating checkerboard in pattern reversal experiment
stimupy

## ~~ Docsbuild Requirements ~~
recommonmark
Expand Down
Loading