Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enhance unit test coverage #2031

Open
wants to merge 32 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
7842987
Add missing tests for scratch
oruebel Feb 6, 2025
da1cbce
Add missing tests for core
oruebel Feb 6, 2025
a69307b
Fix ruff and some test failures
oruebel Feb 6, 2025
cf6ed3c
Fix test failures
oruebel Feb 6, 2025
cfc36c0
Fix test error
oruebel Feb 6, 2025
8b0ae2a
Add missing docstrings in core
oruebel Feb 6, 2025
31e30c0
Add missing tests for IndexSeries constructor
oruebel Feb 6, 2025
1cc20d1
Exclude ImageMaskSeries constructor from coverage because it is only …
oruebel Feb 6, 2025
b84e8e0
Fix failing test
oruebel Feb 6, 2025
3b89708
Add missing tests for NWBLinkSpec
oruebel Feb 6, 2025
40afed3
Add missing NWBDtypeSpec test and fix NWBLinkSpecTest
oruebel Feb 6, 2025
41178ba
Remove bad unit test for LinkSpec
oruebel Feb 6, 2025
d3798fc
Add missing unit tests for NWBGroupSpec
oruebel Feb 6, 2025
5c75be0
Fix broken GroupSpec tests
oruebel Feb 6, 2025
65aac18
Fix broken GroupSpec tests
oruebel Feb 6, 2025
2f2dde8
Add missing unit testo for ImageSegmentation.add_segmentation
oruebel Feb 6, 2025
0b813e4
Add missing test for PlaneSegementation.__init__ without a name
oruebel Feb 6, 2025
dff1ccd
Add missing unit test for PlaneSegementation.add_roi
oruebel Feb 6, 2025
c8878a0
Update names of PlaneSegmentation tests
oruebel Feb 6, 2025
1fed185
Add missing tests for IntracellularRecordingsTables.__init__ with cus…
oruebel Feb 6, 2025
65147be
Fix bad import
oruebel Feb 6, 2025
6c56743
Fix bug in icephys tests
oruebel Feb 6, 2025
e1231e6
Attempt to fix test error
oruebel Feb 6, 2025
e5e4254
Fix ruff
oruebel Feb 6, 2025
461af3c
Fix bug in IntracellularRecordingsTable
oruebel Feb 7, 2025
e14d1d9
Fix broken icephys test
oruebel Feb 7, 2025
ba90342
Add misssing unit tests for TimeIntervals.add_interval and __calculat…
oruebel Feb 7, 2025
22b3025
Fix validation check in SpikeEventSeries
oruebel Feb 7, 2025
f09ad42
Update error check in SpikeEventSeries to handle DataChunkIterator
oruebel Feb 7, 2025
eba14a8
Add missing unit tests for ImageSeries.bits_per_pixel
oruebel Feb 7, 2025
2feb09f
Add pragma nocover for line that is hard to test
oruebel Feb 7, 2025
a06e9d4
Update changelog
oruebel Feb 7, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,12 @@
- Updated `SpikeEventSeries`, `DecompositionSeries`, and `FilteredEphys` examples. @stephprince [#2012](https://github.com/NeurodataWithoutBorders/pynwb/pull/2012)
- Replaced deprecated `scipy.misc.face` dataset in the images tutorial with another example. @stephprince [#2016](https://github.com/NeurodataWithoutBorders/pynwb/pull/2016)

### Bug fixes
- Fixed bug in `IntracellularRecordingsTable.__init__` were `IntracellularResponsesTable` wasn't created correctly when custom category tables were provided @oruebel. [#2031](https://github.com/NeurodataWithoutBorders/pynwb/pull/2031)
- Fixed shape check in `SpikeEventSeries.__init__` to support `AbstractDataChunkIterator` for timestamps/data. @oruebel [#2031](https://github.com/NeurodataWithoutBorders/pynwb/pull/2031)
- Added unit tests to enhance coverage of `core.py`, `image.py`, `spec.py`, `icephys.py`, `epoch.py` and others. @oruebel [#2031](https://github.com/NeurodataWithoutBorders/pynwb/pull/2031)


## PyNWB 2.8.3 (November 19, 2024)

### Enhancements and minor changes
Expand Down
25 changes: 22 additions & 3 deletions src/pynwb/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,11 @@ def __init__(self, **kwargs):

@property
def data(self):
"""The data managed by this object"""
return self.__data

def __len__(self):
"""Size of the data. Same as len(self.data)"""
return len(self.__data)

def __getitem__(self, args):
Expand All @@ -96,6 +98,14 @@ def __getitem__(self, args):
return self.data[args]

def append(self, arg):
"""
Append a single element to the data

Note: The arg to append should be 1 dimension less than the data.
For example, if the data is a 2D array, arg should be a 1D array.
Appending to scalar data is not supported. To append multiple
elements, use extend.
"""
if isinstance(self.data, list):
self.data.append(arg)
elif isinstance(self.data, np.ndarray):
Expand All @@ -105,6 +115,9 @@ def append(self, arg):
raise ValueError(msg)

def extend(self, arg):
"""
Extend the data with multiple elements.
"""
if isinstance(self.data, list):
self.data.extend(arg)
elif isinstance(self.data, np.ndarray):
Expand All @@ -122,15 +135,15 @@ class ScratchData(NWBData):
@docval({'name': 'name', 'type': str, 'doc': 'the name of this container'},
{'name': 'data', 'type': ('scalar_data', 'array_data', 'data', Data), 'doc': 'the source of the data'},
{'name': 'notes', 'type': str,
'doc': 'notes about the data. This argument will be deprecated. Use description instead', 'default': ''},
'doc': 'notes about the data. This argument will be deprecated. Use description instead', 'default': None},
{'name': 'description', 'type': str, 'doc': 'notes about the data', 'default': None})
def __init__(self, **kwargs):
notes, description = popargs('notes', 'description', kwargs)
super().__init__(**kwargs)
if notes != '':
if notes is not None:
warn('The `notes` argument of ScratchData.__init__ will be deprecated. Use description instead.',
PendingDeprecationWarning)
if notes != '' and description != '':
if notes is not None and description is not None:
raise ValueError('Cannot provide both notes and description to ScratchData.__init__. The description '
'argument is recommended.')
description = notes
Expand All @@ -141,12 +154,18 @@ def __init__(self, **kwargs):

@property
def notes(self):
"""
Get the notes attribute. This will be deprecated in the future.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
Get the notes attribute. This will be deprecated in the future.
Get the notes attribute. Use of ScratchData.notes has been deprecated and will be removed in PyNWB 4.0.

"""
warn('Use of ScratchData.notes will be deprecated. Use ScratchData.description instead.',
PendingDeprecationWarning)
return self.description

@notes.setter
def notes(self, value):
"""
Set the notes attribute. This will be deprecated in the future.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
Set the notes attribute. This will be deprecated in the future.
Set the notes attribute. Use of ScratchData.notes has been deprecated and will be removed in PyNWB 4.0.

"""
warn('Use of ScratchData.notes will be deprecated. Use ScratchData.description instead.',
PendingDeprecationWarning)
self.description = value
Expand Down
20 changes: 13 additions & 7 deletions src/pynwb/ecephys.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from collections.abc import Iterable

from hdmf.common import DynamicTableRegion
from hdmf.data_utils import DataChunkIterator, assertEqualShape
from hdmf.data_utils import assertEqualShape
from hdmf.utils import docval, popargs, get_docval, popargs_to_dict, get_data_shape

from . import register_class, CORE_NAMESPACE
Expand Down Expand Up @@ -138,12 +138,18 @@ def __init__(self, **kwargs):
data = kwargs['data']
timestamps = kwargs['timestamps']
if not (isinstance(data, TimeSeries) or isinstance(timestamps, TimeSeries)):
if not (isinstance(data, DataChunkIterator) or isinstance(timestamps, DataChunkIterator)):
if len(data) != len(timestamps):
# Validate the shape of the inputs. Use get_data_shape to also handle the
# case where the data is a AbstractDataChunkIterator
data_shape = get_data_shape(kwargs['data'], strict_no_data_load=True)
timestamps_shape = get_data_shape(kwargs['timestamps'], strict_no_data_load=True)
if (data_shape is not None and
timestamps_shape is not None and
len(data_shape) > 0 and
len(timestamps_shape) > 0):
if (data_shape[0] != timestamps_shape[0] and
data_shape[0] is not None and
timestamps_shape[0] is not None):
raise ValueError('Must provide the same number of timestamps and spike events')
else:
# TODO: add check when we have DataChunkIterators
pass
super().__init__(**kwargs)


Expand Down Expand Up @@ -194,7 +200,7 @@ class EventWaveform(MultiContainerInterface):
}

def __init__(self, **kwargs):
if not self._in_construct_mode:
if not self._in_construct_mode: # pragma: no cover
raise ValueError(
"The EventWaveform neurodata type is deprecated. If you are interested in using it, "
"please create an issue on https://github.com/NeurodataWithoutBorders/nwb-schema/issues."
Expand Down
4 changes: 2 additions & 2 deletions src/pynwb/epoch.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,10 @@ def __calculate_idx_count(self, start_time, stop_time, ts_data):
ts_timestamps = ts.timestamps
ts_starting_time = ts.starting_time
ts_rate = ts.rate
if ts_starting_time is not None and ts_rate:
if ts_starting_time is not None and ts_rate is not None:
start_idx = int((start_time - ts_starting_time)*ts_rate)
stop_idx = int((stop_time - ts_starting_time)*ts_rate)
elif len(ts_timestamps) > 0:
elif ts_timestamps is not None and len(ts_timestamps) > 0:
timestamps = ts_timestamps
start_idx = bisect_left(timestamps, start_time)
stop_idx = bisect_left(timestamps, stop_time)
Expand Down
5 changes: 4 additions & 1 deletion src/pynwb/icephys.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,9 +495,12 @@ def __init__(self, **kwargs):
# Compile the complete list of tables
dynamic_table_arg = copy(in_category_tables)
categories_arg = [] if getargs('categories', kwargs) is None else copy(getargs('categories', kwargs))
# Add the required tables if they are missing. We can do this here since we already
# confirmed that any other category tables are empty so we can create the missing tables
# on behalf of the user here
if required_dynamic_table_missing:
if required_dynamic_table_given[2] < 0:
dynamic_table_arg.append(IntracellularResponsesTable)
dynamic_table_arg.append(IntracellularResponsesTable())
if dynamic_table_arg[-1].name not in categories_arg:
categories_arg.insert(0, dynamic_table_arg[-1].name)
if required_dynamic_table_given[1] < 0:
Expand Down
6 changes: 3 additions & 3 deletions src/pynwb/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,9 +305,9 @@ def __init__(self, **kwargs):
"The ImageMaskSeries neurodata type is deprecated. If you are interested in using it, "
"please create an issue on https://github.com/NeurodataWithoutBorders/nwb-schema/issues."
)
masked_imageseries = popargs('masked_imageseries', kwargs)
super().__init__(**kwargs)
self.masked_imageseries = masked_imageseries
masked_imageseries = popargs('masked_imageseries', kwargs) # pragma: no cover
super().__init__(**kwargs) # pragma: no cover
self.masked_imageseries = masked_imageseries # pragma: no cover


@register_class('OpticalSeries', CORE_NAMESPACE)
Expand Down
2 changes: 1 addition & 1 deletion src/pynwb/ophys.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,7 @@ def add_roi(self, **kwargs):
"""Add a Region Of Interest (ROI) data to this"""
pixel_mask, voxel_mask, image_mask = popargs('pixel_mask', 'voxel_mask', 'image_mask', kwargs)
if image_mask is None and pixel_mask is None and voxel_mask is None:
raise ValueError("Must provide 'image_mask' and/or 'pixel_mask'")
raise ValueError("Must provide at least on of 'image_mask', 'pixel_mask', or 'voxel_mask'")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
raise ValueError("Must provide at least on of 'image_mask', 'pixel_mask', or 'voxel_mask'")
raise ValueError("Must provide at least one of 'image_mask', 'pixel_mask', or 'voxel_mask'")

rkwargs = dict(kwargs)
if image_mask is not None:
rkwargs['image_mask'] = image_mask
Expand Down
23 changes: 18 additions & 5 deletions tests/unit/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@ def test_constructor(self):
self.assertEqual(obj.name, "obj1")

def test_append_list(self):

obj = MyNWBData("obj1", data=[[1, 2, 3], [1, 2, 3]])
obj.append([4, 5, 6])
np.testing.assert_array_equal(obj.data, [[1, 2, 3], [1, 2, 3], [4, 5, 6]])
Expand All @@ -71,10 +70,15 @@ def test_append_ndarray_2d(self):
obj.append([4, 5, 6])
np.testing.assert_array_equal(obj.data, [[1, 2, 3], [1, 2, 3], [4, 5, 6]])

def test_append_ndarray_1d(self):
obj = MyNWBData("obj1", data=np.array([1, 2, 3]))
obj.append([4])
np.testing.assert_array_equal(obj.data, [1, 2, 3, 4])
def test_append_ndarray_1d(self):
obj = MyNWBData("obj1", data=np.array([1, 2, 3]))
obj.append(4)
np.testing.assert_array_equal(obj.data, [1, 2, 3, 4])

def test_append_scalar(self):
obj = NWBData("obj1", data=1)
with self.assertRaises(ValueError):
obj.append(2)

def test_extend_list(self):
obj = MyNWBData("obj1", data=[[1, 2, 3], [1, 2, 3]])
Expand All @@ -91,6 +95,15 @@ def test_extend_ndarray_2d(self):
obj.extend([[4, 5, 6]])
np.testing.assert_array_equal(obj.data, [[1, 2, 3], [1, 2, 3], [4, 5, 6]])

def test_extend_scalar(self):
obj = NWBData("obj1", data=1)
with self.assertRaises(ValueError):
obj.extend(2)

def test_slicing_list_with_list(self):
obj = MyNWBData("obj1", data=[[1, 2, 3], [4, 5, 6]])
self.assertEqual(obj[[1,]], [[4, 5, 6]])


class TestPrint(TestCase):
def test_print_file(self):
Expand Down
18 changes: 16 additions & 2 deletions tests/unit/test_ecephys.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from pynwb.testing.mock.ecephys import mock_ElectricalSeries

from hdmf.common import DynamicTableRegion
from hdmf.data_utils import DataChunkIterator


def make_electrode_table():
Expand Down Expand Up @@ -161,18 +162,31 @@ def test_init(self):
np.testing.assert_array_equal(sES.data, data)
np.testing.assert_array_equal(sES.timestamps, timestamps)

def test_no_rate(self):
def test_init_no_rate(self):
table, region = self._create_table_and_region()
data = ((1, 1, 1), (2, 2, 2))
with self.assertRaises(TypeError):
SpikeEventSeries(name='test_sES', data=data, electrodes=region, rate=1.)

def test_incorrect_timestamps(self):
def test_init_incorrect_timestamps(self):
table, region = self._create_table_and_region()
data = ((1, 1, 1), (2, 2, 2))
with self.assertRaisesWith(ValueError, "Must provide the same number of timestamps and spike events"):
SpikeEventSeries(name='test_sES', data=data, electrodes=region, timestamps=[1.0, 2.0, 3.0])

def test_init_datachunkiterators(self):
# test data
table, region = self._create_table_and_region()
data = DataChunkIterator(np.ones((10, 2)))
good_timestamps = DataChunkIterator(np.arange(10))
bad_timestamps = DataChunkIterator(np.arange(2)) # too short
# check creation with good DataChunkIterators passes
sES = SpikeEventSeries(name='test_sES', data=data, timestamps=good_timestamps, electrodes=region)
self.assertEqual(sES.name, 'test_sES')
# check creation with bad DataChunkIterators fails
with self.assertRaisesWith(ValueError, "Must provide the same number of timestamps and spike events"):
SpikeEventSeries(name='test_sES', data=data, electrodes=region, timestamps=bad_timestamps)


class ElectrodeGroupConstructor(TestCase):

Expand Down
113 changes: 113 additions & 0 deletions tests/unit/test_epoch.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
from pynwb.base import TimeSeriesReference, TimeSeriesReferenceVectorData
from pynwb.testing import TestCase

from hdmf.backends.hdf5 import H5DataIO


class TimeIntervalsTest(TestCase):

Expand Down Expand Up @@ -66,6 +68,117 @@ def test_dataframe_roundtrip_drop_ts(self):
self.assertNotIn('timeseries', obtained.columns)
self.assertEqual(obtained.loc[2, 'foo'], df.loc[2, 'foo'])

def test_add_interval_basic(self):
"""Test adding interval with just start/stop times"""
epochs = TimeIntervals(name='test_epochs')
epochs.add_interval(start_time=10.0, stop_time=20.0)
row = epochs[0]
self.assertEqual(row.loc[0]['start_time'], 10.0)
self.assertEqual(row.loc[0]['stop_time'], 20.0)

def test_add_interval_tags_string(self):
"""Test adding interval with tags as comma-separated string"""
epochs = TimeIntervals(name='test_epochs')
epochs.add_interval(start_time=10.0, stop_time=20.0, tags='tag1, tag2, tag3')
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should tags be allowed to be supplied this way? It feels like this approach could be prone to misunderstandings on how the data gets written.

Looking at the line of code that this is testing and the tests that fail if you remove it:

tags = [s.strip() for s in tags.split(",") if not s.isspace()]

it seems this line is currently mostly important for converting the tags argument to a list if a single string is provided. (This issue might be also related to hdmf-dev/hdmf#1211, where we discussed deprecating the ability to provide a single string for a ragged column.)

row = epochs[0]
self.assertEqual(row.loc[0]['tags'], ['tag1', 'tag2', 'tag3'])

def test_add_interval_tags_list(self):
"""Test adding interval with tags as list"""
epochs = TimeIntervals(name='test_epochs')
epochs.add_interval(start_time=10.0, stop_time=20.0, tags=['tag1', 'tag2', 'tag3'])
row = epochs[0]
self.assertEqual(row.loc[0]['tags'], ['tag1', 'tag2', 'tag3'])

def test_add_interval_single_timeseries_timestamps(self):
"""Test adding interval with single TimeSeries using timestamps"""
epochs = TimeIntervals(name='test_epochs')
ts = TimeSeries(
name='test_ts',
data=list(range(100)),
unit='units',
timestamps=np.linspace(0, 10, 100)
)
epochs.add_interval(start_time=2.0, stop_time=4.0, timeseries=ts)
row = epochs[0]
self.assertEqual(len(row.loc[0]['timeseries']), 1)
ts_ref = row.loc[0]['timeseries'][0]
self.assertEqual(ts_ref.idx_start, 20) # at t=2.0
self.assertEqual(ts_ref.count, 20) # from t=2.0 to t=4.0

def test_add_interval_single_timeseries_timestamps_with_dataio(self):
"""Test adding interval with single TimeSeries using timestamps"""
epochs = TimeIntervals(name='test_epochs')
ts = TimeSeries(
name='test_ts',
data=list(range(100)),
unit='units',
timestamps=H5DataIO(np.linspace(0, 10, 100))
)
epochs.add_interval(start_time=2.0, stop_time=4.0, timeseries=ts)
row = epochs[0]
self.assertEqual(len(row.loc[0]['timeseries']), 1)
ts_ref = row.loc[0]['timeseries'][0]
self.assertEqual(ts_ref.idx_start, 20) # at t=2.0
self.assertEqual(ts_ref.count, 20) # from t=2.0 to t=4.0

def test_add_interval_single_timeseries_rate(self):
"""Test adding interval with single TimeSeries using starting_time and rate"""
epochs = TimeIntervals(name='test_epochs')
ts = TimeSeries(
name='test_ts',
data=list(range(100)),
unit='units',
starting_time=0.0,
rate=10.0 # 10 samples per second
)
epochs.add_interval(start_time=2.0, stop_time=4.0, timeseries=ts)
row = epochs[0]
self.assertEqual(len(row.loc[0]['timeseries']), 1)
ts_ref = row.loc[0]['timeseries'][0]
self.assertEqual(ts_ref.idx_start, 20) # at t=2.0
self.assertEqual(ts_ref.count, 20) # from t=2.0 to t=4.0

def test_add_interval_multiple_timeseries(self):
"""Test adding interval with multiple TimeSeries"""
epochs = TimeIntervals(name='test_epochs')
ts1 = TimeSeries(
name='test_ts1',
data=list(range(100)),
unit='units',
timestamps=np.linspace(0, 10, 100)
)
ts2 = TimeSeries(
name='test_ts2',
data=list(range(50)),
unit='units',
starting_time=0.0,
rate=5.0
)
epochs.add_interval(start_time=2.0, stop_time=4.0, timeseries=[ts1, ts2])
row = epochs[0]
self.assertEqual(len(row.loc[0]['timeseries']), 2)
ts1_ref = row.loc[0]['timeseries'][0]
ts2_ref = row.loc[0]['timeseries'][1]
self.assertEqual(ts1_ref.idx_start, 20)
self.assertEqual(ts1_ref.count, 20)
self.assertEqual(ts2_ref.idx_start, 10)
self.assertEqual(ts2_ref.count, 10)

def test_add_interval_timeseries_missing_timing(self):
"""Test error when TimeSeries has neither timestamps nor starting_time/rate"""
epochs = TimeIntervals(name='test_epochs')
ts = TimeSeries(
name='test_ts',
data=list(range(100)),
unit='units',
timestamps=np.linspace(0, 10, 100)
)
ts.fields['timestamps'] = None # remove timestamps to trigger error
msg = "TimeSeries object must have timestamps or starting_time and rate"
with self.assertRaisesWith(ValueError, msg):
epochs.add_interval(start_time=2.0, stop_time=4.0, timeseries=ts)

def test_no_tags(self):
nwbfile = NWBFile("a file with header data", "NB123A", datetime(1970, 1, 1, tzinfo=tz.tzutc()))
df = self.get_dataframe()
Expand Down
Loading
Loading