Skip to content

Commit

Permalink
add ci
Browse files Browse the repository at this point in the history
  • Loading branch information
Helveg committed Feb 13, 2024
1 parent 8fd86bf commit 240da7f
Show file tree
Hide file tree
Showing 7 changed files with 347 additions and 1 deletion.
34 changes: 34 additions & 0 deletions .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
[bumpversion]
current_version = 0.0.0b1
files = bsb_nest/__init__.py
commit = True
tag = True
parse = ^
(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
((?P<prekind>a|alpha|b|beta|d|dev|rc)
(?P<pre>\d+) # pre-release version num
)?
(\.(?P<postkind>post)(?P<post>\d+))? # post-release
serialize =
{major}.{minor}.{patch}{prekind}{pre}.{postkind}{post}
{major}.{minor}.{patch}{prekind}{pre}
{major}.{minor}.{patch}{postkind}{post}
{major}.{minor}.{patch}

[bumpversion:part:prekind]
optional_value = _
values =
_
dev
d
alpha
a
beta
b
rc

[bumpversion:part:postkind]
optional_value = _
values =
_
post
15 changes: 15 additions & 0 deletions .github/workflows/black.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
name: Black

on: [push, pull_request]

jobs:
black:
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4
- uses: psf/black@stable
with:
options: "--check --verbose"
version: "24.1.1"
20 changes: 20 additions & 0 deletions .github/workflows/isort.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: Run isort
on:
- push

jobs:
isort:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.9
- name: Install apt dependencies
# Install `libopenmpi` for mpi4py
run: |
sudo apt update
sudo apt install openmpi-bin libopenmpi-dev
# Install dependencies for proper 1st/2nd/3rd party import sorting
- run: pip install -e .[parallel]
- uses: isort/isort-action@master
33 changes: 33 additions & 0 deletions .github/workflows/main.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: Test BSB NEST adapter

on: [push, pull_request]

jobs:
build:
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v4

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

- name: Install apt dependencies
run: |
sudo apt update
sudo apt install openmpi-bin libopenmpi-dev
- name: Install dependencies & self
run: |
pip install --upgrade pip
# Install self, with test dependencies
pip install .[test,parallel]
- name: Run tests & coverage
run: |
coverage run -p -m unittest discover -v -s ./tests
mpiexec -n 2 coverage run -p -m unittest discover -v -s ./tests
10 changes: 10 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
repos:
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.1.1
hooks:
- id: black
- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
name: isort (python)
15 changes: 14 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,17 @@ dependencies = ["bsb-core>=4.0.0b4,<=4.0.0b9999"]
name = "bsb_nest"

[project.entry-points."bsb.simulation_backends"]
nest = "bsb_nest"
nest = "bsb_nest"

[project.optional-dependencies]
parallel = ["bsb-core[parallel]"]
test = ["bsb-test>=0.0.0b0,<=0.0.0b9999", "coverage~=7.0", "bsb-hdf5>=1.0.0b0"]
dev = [
"pre-commit~=3.5",
"black~=24.1.1",
"isort~=5.12",
"bump2version~=1.0"
]

[tool.isort]
profile = "black"
221 changes: 221 additions & 0 deletions tests/test_nest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,221 @@
from bsb.config import from_file, Configuration

Check failure on line 1 in tests/test_nest.py

View workflow job for this annotation

GitHub Actions / isort

Imports are incorrectly sorted and/or formatted.
from bsb.core import Scaffold
from bsb.services import MPI
from bsb_test import RandomStorageFixture, get_test_config, NumpyTestCase
import numpy as np
import unittest
import nest


@unittest.skipIf(MPI.get_size() > 1, "Skipped during parallel testing.")
class TestNest(
RandomStorageFixture, NumpyTestCase, unittest.TestCase, engine_name="hdf5"
):
def test_gif_pop_psc_exp(self):
"""Mimics test_gif_pop_psc_exp of NEST's test suite to validate the adapter."""
pop_size = 500

cfg = get_test_config("gif_pop_psc_exp")
sim_cfg = cfg.simulations.test_nest
sim_cfg.resolution = 0.5
sim_cfg.cell_models.gif_pop_psc_exp.constants["N"] = pop_size

network = Scaffold(cfg, self.storage)
network.compile()

simulation = None
vm = None
nspike = None

def probe(_, sim, data):
# Probe and steal some local refs to data that's otherwise encapsulated :)
nonlocal vm, simulation
simulation = sim

# Get the important information out of the sim/data
cell_m = sim.cell_models.gif_pop_psc_exp
conn_m = sim.connection_models.gif_pop_psc_exp
pop = data.populations[cell_m]
syn = data.connections[conn_m]

# Add a voltmeter
vm = nest.Create(
"voltmeter",
params={"record_from": ["n_events"], "interval": sim.resolution},
)
nest.Connect(vm, pop)

# Add a spying recorder
def spy(_):
nonlocal nspike

start_time = 1000
start_step = int(start_time / simulation.resolution)
nspike = vm.events["n_events"][start_step:]

data.result.create_recorder(spy)

# Test node parameter transfer
for param, value in {
"V_reset": 0.0,
"V_T_star": 10.0,
"E_L": 0.0,
"Delta_V": 2.0,
"C_m": 250.0,
"tau_m": 20.0,
"t_ref": 4.0,
"I_e": 500.0,
"lambda_0": 10.0,
"tau_syn_in": 2.0,
"tau_sfa": (500.0,),
"q_sfa": (1.0,),
}.items():
with self.subTest(param=param, value=value):
self.assertEqual(value, pop.get(param))

# Test synapse parameter transfer
for param, value in (("weight", -6.25), ("delay", 1)):
with self.subTest(param=param, value=value):
self.assertEqual(value, syn.get(param))

network.simulations.test_nest.post_prepare.append(probe)
network.run_simulation("test_nest")

mean_nspike = np.mean(nspike)
mean_rate = mean_nspike / pop_size / simulation.resolution * 1000.0

var_nspike = np.var(nspike)
var_nspike = var_nspike / pop_size / simulation.resolution * 1000.0
var_rate = var_nspike / pop_size / simulation.resolution * 1000.0

err_mean = 1.0
err_var = 6.0
expected_rate = 22.0
expected_var = 102.0

self.assertGreaterEqual(err_mean, abs(mean_rate - expected_rate))
self.assertGreaterEqual(err_var, var_rate - expected_var)

def test_brunel(self):
cfg = get_test_config("brunel.json")
simcfg = cfg.simulations.test_nest

network = Scaffold(cfg, self.storage)
network.compile()
result = network.run_simulation("test_nest")

spiketrains = result.block.segments[0].spiketrains
sr_exc, sr_inh = None, None
for st in spiketrains:
if st.annotations["device"] == "sr_exc":
sr_exc = st
elif st.annotations["device"] == "sr_inh":
sr_inh = st

self.assertIsNotNone(sr_exc)
self.assertIsNotNone(sr_inh)

rate_ex = (
len(sr_exc) / simcfg.duration * 1000.0 / sr_exc.annotations["pop_size"]
)
rate_in = (
len(sr_inh) / simcfg.duration * 1000.0 / sr_inh.annotations["pop_size"]
)

self.assertAlmostEqual(rate_in, 50, delta=1)
self.assertAlmostEqual(rate_ex, 50, delta=1)

def test_brunel_with_conn(self):
cfg = get_test_config("brunel_wbsb")
simcfg = cfg.simulations.test_nest

network = Scaffold(cfg, self.storage)
network.compile()
result = network.run_simulation("test_nest")

spiketrains = result.block.segments[0].spiketrains
sr_exc, sr_inh = None, None
for st in spiketrains:
if st.annotations["device"] == "sr_exc":
sr_exc = st
elif st.annotations["device"] == "sr_inh":
sr_inh = st

self.assertIsNotNone(sr_exc)
self.assertIsNotNone(sr_inh)

rate_ex = (
len(sr_exc) / simcfg.duration * 1000.0 / sr_exc.annotations["pop_size"]
)
rate_in = (
len(sr_inh) / simcfg.duration * 1000.0 / sr_inh.annotations["pop_size"]
)

self.assertAlmostEqual(rate_in, 50, delta=1)
self.assertAlmostEqual(rate_ex, 50, delta=1)

def test_iaf_cond_alpha(self):
"""
Create an iaf_cond_alpha in NEST, and with the BSB, with a base current, and check
spike times.
"""
import nest

nest.ResetKernel()
nest.resolution = 0.1
A = nest.Create("iaf_cond_alpha", 1, params={"I_e": 260.0})
spikeA = nest.Create("spike_recorder")
nest.Connect(A, spikeA)
nest.Simulate(1000.0)

spike_times_nest = spikeA.get("events")["times"]

cfg = Configuration(
{
"name": "test",
"storage": {"engine": "hdf5"},
"network": {"x": 1, "y": 1, "z": 1},
"partitions": {"B": {"type": "layer", "thickness": 1}},
"cell_types": {"A": {"spatial": {"radius": 1, "count": 1}}},
"placement": {
"placement_A": {
"strategy": "bsb.placement.strategy.FixedPositions",
"cell_types": ["A"],
"partitions": ["B"],
"positions": [[1, 1, 1]],
}
},
"connectivity": {},
"after_connectivity": {},
"simulations": {
"test": {
"simulator": "nest",
"duration": 1000,
"resolution": 0.1,
"cell_models": {
"A": {
"model": "iaf_cond_alpha",
"constants": {"I_e": 260.0},
}
},
"connection_models": {},
"devices": {
"record_A_spikes": {
"device": "spike_recorder",
"delay": 0.5,
"targetting": {
"strategy": "cell_model",
"cell_models": ["A"],
},
}
},
}
},
}
)

netw = Scaffold(cfg, self.storage)
netw.compile()
results = netw.run_simulation("test")
spike_times_bsb = results.spiketrains[0]
self.assertClose(np.array(spike_times_nest), np.array(spike_times_bsb))

0 comments on commit 240da7f

Please sign in to comment.