From 240da7fe8fa5cf9cc5e653352664fdaf20712eb0 Mon Sep 17 00:00:00 2001 From: Robin De Schepper Date: Tue, 13 Feb 2024 16:38:01 +0100 Subject: [PATCH] add ci --- .bumpversion.cfg | 34 ++++++ .github/workflows/black.yml | 15 +++ .github/workflows/isort.yml | 20 ++++ .github/workflows/main.yaml | 33 ++++++ .pre-commit-config.yaml | 10 ++ pyproject.toml | 15 ++- tests/test_nest.py | 221 ++++++++++++++++++++++++++++++++++++ 7 files changed, 347 insertions(+), 1 deletion(-) create mode 100644 .bumpversion.cfg create mode 100644 .github/workflows/black.yml create mode 100644 .github/workflows/isort.yml create mode 100644 .github/workflows/main.yaml create mode 100644 .pre-commit-config.yaml create mode 100644 tests/test_nest.py diff --git a/.bumpversion.cfg b/.bumpversion.cfg new file mode 100644 index 0000000..bfecd3a --- /dev/null +++ b/.bumpversion.cfg @@ -0,0 +1,34 @@ +[bumpversion] +current_version = 0.0.0b1 +files = bsb_nest/__init__.py +commit = True +tag = True +parse = ^ + (?P\d+)\.(?P\d+)\.(?P\d+) + ((?Pa|alpha|b|beta|d|dev|rc) + (?P
\d+)  # pre-release version num
+	)?
+	(\.(?Ppost)(?P\d+))?  # post-release
+serialize = 
+	{major}.{minor}.{patch}{prekind}{pre}.{postkind}{post}
+	{major}.{minor}.{patch}{prekind}{pre}
+	{major}.{minor}.{patch}{postkind}{post}
+	{major}.{minor}.{patch}
+
+[bumpversion:part:prekind]
+optional_value = _
+values = 
+	_
+	dev
+	d
+	alpha
+	a
+	beta
+	b
+	rc
+
+[bumpversion:part:postkind]
+optional_value = _
+values = 
+	_
+	post
diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml
new file mode 100644
index 0000000..b3489f2
--- /dev/null
+++ b/.github/workflows/black.yml
@@ -0,0 +1,15 @@
+name: Black
+
+on: [push, pull_request]
+
+jobs:
+  black:
+    if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
+    runs-on: ubuntu-latest
+
+    steps:
+    - uses: actions/checkout@v4
+    - uses: psf/black@stable
+      with:
+        options: "--check --verbose"
+        version: "24.1.1"
diff --git a/.github/workflows/isort.yml b/.github/workflows/isort.yml
new file mode 100644
index 0000000..ed77430
--- /dev/null
+++ b/.github/workflows/isort.yml
@@ -0,0 +1,20 @@
+name: Run isort
+on:
+  - push
+
+jobs:
+  isort:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v4
+      - uses: actions/setup-python@v5
+        with:
+          python-version: 3.9
+      - name: Install apt dependencies
+        # Install `libopenmpi` for mpi4py
+        run: |
+          sudo apt update
+          sudo apt install openmpi-bin libopenmpi-dev
+      # Install dependencies for proper 1st/2nd/3rd party import sorting
+      - run: pip install -e .[parallel]
+      - uses: isort/isort-action@master
\ No newline at end of file
diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml
new file mode 100644
index 0000000..f7c6d67
--- /dev/null
+++ b/.github/workflows/main.yaml
@@ -0,0 +1,33 @@
+name: Test BSB NEST adapter
+
+on: [push, pull_request]
+
+jobs:
+  build:
+    if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        python-version: ["3.9", "3.10", "3.11", "3.12"]
+    steps:
+    - uses: actions/checkout@v4
+
+    - name: Set up Python ${{ matrix.python-version }}
+      uses: actions/setup-python@v5
+      with:
+        python-version: ${{ matrix.python-version }}
+
+    - name: Install apt dependencies
+      run: |
+        sudo apt update
+        sudo apt install openmpi-bin libopenmpi-dev
+
+    - name: Install dependencies & self
+      run: |
+        pip install --upgrade pip
+        # Install self, with test dependencies
+        pip install .[test,parallel]
+    - name: Run tests & coverage
+      run: |
+        coverage run -p -m unittest discover -v -s ./tests
+        mpiexec -n 2 coverage run -p -m unittest discover -v -s ./tests
\ No newline at end of file
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..748ece2
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,10 @@
+repos:
+  - repo: https://github.com/psf/black-pre-commit-mirror
+    rev: 24.1.1
+    hooks:
+      - id: black
+  - repo: https://github.com/pycqa/isort
+    rev: 5.12.0
+    hooks:
+      - id: isort
+        name: isort (python)
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index ff77bf6..086b6c3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -15,4 +15,17 @@ dependencies = ["bsb-core>=4.0.0b4,<=4.0.0b9999"]
 name = "bsb_nest"
 
 [project.entry-points."bsb.simulation_backends"]
-nest = "bsb_nest"
\ No newline at end of file
+nest = "bsb_nest"
+
+[project.optional-dependencies]
+parallel = ["bsb-core[parallel]"]
+test = ["bsb-test>=0.0.0b0,<=0.0.0b9999", "coverage~=7.0", "bsb-hdf5>=1.0.0b0"]
+dev = [
+    "pre-commit~=3.5",
+    "black~=24.1.1",
+    "isort~=5.12",
+    "bump2version~=1.0"
+]
+
+[tool.isort]
+profile = "black"
\ No newline at end of file
diff --git a/tests/test_nest.py b/tests/test_nest.py
new file mode 100644
index 0000000..238116b
--- /dev/null
+++ b/tests/test_nest.py
@@ -0,0 +1,221 @@
+from bsb.config import from_file, Configuration
+from bsb.core import Scaffold
+from bsb.services import MPI
+from bsb_test import RandomStorageFixture, get_test_config, NumpyTestCase
+import numpy as np
+import unittest
+import nest
+
+
+@unittest.skipIf(MPI.get_size() > 1, "Skipped during parallel testing.")
+class TestNest(
+    RandomStorageFixture, NumpyTestCase, unittest.TestCase, engine_name="hdf5"
+):
+    def test_gif_pop_psc_exp(self):
+        """Mimics test_gif_pop_psc_exp of NEST's test suite to validate the adapter."""
+        pop_size = 500
+
+        cfg = get_test_config("gif_pop_psc_exp")
+        sim_cfg = cfg.simulations.test_nest
+        sim_cfg.resolution = 0.5
+        sim_cfg.cell_models.gif_pop_psc_exp.constants["N"] = pop_size
+
+        network = Scaffold(cfg, self.storage)
+        network.compile()
+
+        simulation = None
+        vm = None
+        nspike = None
+
+        def probe(_, sim, data):
+            # Probe and steal some local refs to data that's otherwise encapsulated :)
+            nonlocal vm, simulation
+            simulation = sim
+
+            # Get the important information out of the sim/data
+            cell_m = sim.cell_models.gif_pop_psc_exp
+            conn_m = sim.connection_models.gif_pop_psc_exp
+            pop = data.populations[cell_m]
+            syn = data.connections[conn_m]
+
+            # Add a voltmeter
+            vm = nest.Create(
+                "voltmeter",
+                params={"record_from": ["n_events"], "interval": sim.resolution},
+            )
+            nest.Connect(vm, pop)
+
+            # Add a spying recorder
+            def spy(_):
+                nonlocal nspike
+
+                start_time = 1000
+                start_step = int(start_time / simulation.resolution)
+                nspike = vm.events["n_events"][start_step:]
+
+            data.result.create_recorder(spy)
+
+            # Test node parameter transfer
+            for param, value in {
+                "V_reset": 0.0,
+                "V_T_star": 10.0,
+                "E_L": 0.0,
+                "Delta_V": 2.0,
+                "C_m": 250.0,
+                "tau_m": 20.0,
+                "t_ref": 4.0,
+                "I_e": 500.0,
+                "lambda_0": 10.0,
+                "tau_syn_in": 2.0,
+                "tau_sfa": (500.0,),
+                "q_sfa": (1.0,),
+            }.items():
+                with self.subTest(param=param, value=value):
+                    self.assertEqual(value, pop.get(param))
+
+            # Test synapse parameter transfer
+            for param, value in (("weight", -6.25), ("delay", 1)):
+                with self.subTest(param=param, value=value):
+                    self.assertEqual(value, syn.get(param))
+
+        network.simulations.test_nest.post_prepare.append(probe)
+        network.run_simulation("test_nest")
+
+        mean_nspike = np.mean(nspike)
+        mean_rate = mean_nspike / pop_size / simulation.resolution * 1000.0
+
+        var_nspike = np.var(nspike)
+        var_nspike = var_nspike / pop_size / simulation.resolution * 1000.0
+        var_rate = var_nspike / pop_size / simulation.resolution * 1000.0
+
+        err_mean = 1.0
+        err_var = 6.0
+        expected_rate = 22.0
+        expected_var = 102.0
+
+        self.assertGreaterEqual(err_mean, abs(mean_rate - expected_rate))
+        self.assertGreaterEqual(err_var, var_rate - expected_var)
+
+    def test_brunel(self):
+        cfg = get_test_config("brunel.json")
+        simcfg = cfg.simulations.test_nest
+
+        network = Scaffold(cfg, self.storage)
+        network.compile()
+        result = network.run_simulation("test_nest")
+
+        spiketrains = result.block.segments[0].spiketrains
+        sr_exc, sr_inh = None, None
+        for st in spiketrains:
+            if st.annotations["device"] == "sr_exc":
+                sr_exc = st
+            elif st.annotations["device"] == "sr_inh":
+                sr_inh = st
+
+        self.assertIsNotNone(sr_exc)
+        self.assertIsNotNone(sr_inh)
+
+        rate_ex = (
+            len(sr_exc) / simcfg.duration * 1000.0 / sr_exc.annotations["pop_size"]
+        )
+        rate_in = (
+            len(sr_inh) / simcfg.duration * 1000.0 / sr_inh.annotations["pop_size"]
+        )
+
+        self.assertAlmostEqual(rate_in, 50, delta=1)
+        self.assertAlmostEqual(rate_ex, 50, delta=1)
+
+    def test_brunel_with_conn(self):
+        cfg = get_test_config("brunel_wbsb")
+        simcfg = cfg.simulations.test_nest
+
+        network = Scaffold(cfg, self.storage)
+        network.compile()
+        result = network.run_simulation("test_nest")
+
+        spiketrains = result.block.segments[0].spiketrains
+        sr_exc, sr_inh = None, None
+        for st in spiketrains:
+            if st.annotations["device"] == "sr_exc":
+                sr_exc = st
+            elif st.annotations["device"] == "sr_inh":
+                sr_inh = st
+
+        self.assertIsNotNone(sr_exc)
+        self.assertIsNotNone(sr_inh)
+
+        rate_ex = (
+            len(sr_exc) / simcfg.duration * 1000.0 / sr_exc.annotations["pop_size"]
+        )
+        rate_in = (
+            len(sr_inh) / simcfg.duration * 1000.0 / sr_inh.annotations["pop_size"]
+        )
+
+        self.assertAlmostEqual(rate_in, 50, delta=1)
+        self.assertAlmostEqual(rate_ex, 50, delta=1)
+
+    def test_iaf_cond_alpha(self):
+        """
+        Create an iaf_cond_alpha in NEST, and with the BSB, with a base current, and check
+        spike times.
+        """
+        import nest
+
+        nest.ResetKernel()
+        nest.resolution = 0.1
+        A = nest.Create("iaf_cond_alpha", 1, params={"I_e": 260.0})
+        spikeA = nest.Create("spike_recorder")
+        nest.Connect(A, spikeA)
+        nest.Simulate(1000.0)
+
+        spike_times_nest = spikeA.get("events")["times"]
+
+        cfg = Configuration(
+            {
+                "name": "test",
+                "storage": {"engine": "hdf5"},
+                "network": {"x": 1, "y": 1, "z": 1},
+                "partitions": {"B": {"type": "layer", "thickness": 1}},
+                "cell_types": {"A": {"spatial": {"radius": 1, "count": 1}}},
+                "placement": {
+                    "placement_A": {
+                        "strategy": "bsb.placement.strategy.FixedPositions",
+                        "cell_types": ["A"],
+                        "partitions": ["B"],
+                        "positions": [[1, 1, 1]],
+                    }
+                },
+                "connectivity": {},
+                "after_connectivity": {},
+                "simulations": {
+                    "test": {
+                        "simulator": "nest",
+                        "duration": 1000,
+                        "resolution": 0.1,
+                        "cell_models": {
+                            "A": {
+                                "model": "iaf_cond_alpha",
+                                "constants": {"I_e": 260.0},
+                            }
+                        },
+                        "connection_models": {},
+                        "devices": {
+                            "record_A_spikes": {
+                                "device": "spike_recorder",
+                                "delay": 0.5,
+                                "targetting": {
+                                    "strategy": "cell_model",
+                                    "cell_models": ["A"],
+                                },
+                            }
+                        },
+                    }
+                },
+            }
+        )
+
+        netw = Scaffold(cfg, self.storage)
+        netw.compile()
+        results = netw.run_simulation("test")
+        spike_times_bsb = results.spiketrains[0]
+        self.assertClose(np.array(spike_times_nest), np.array(spike_times_bsb))