11"""
2- This module implement the "new" binary OpenEphys format.
2+ This module implements the "new" binary OpenEphys format.
33In this format channels are interleaved in one file.
44
55
2424
2525class OpenEphysBinaryRawIO (BaseRawIO ):
2626 """
27- Handle several Block and several Segment .
27+ Handle several Blocks and several Segments .
2828
2929
3030 # Correspondencies
3131 Neo OpenEphys
3232 block[n-1] experiment[n] New device start/stop
3333 segment[s-1] recording[s] New recording start/stop
3434
35- handle several signals stream
36- handle events special npy data are respresented as array_annotations
37- do not handle spikes at the moment can be implemented if user demand
35+ This IO handles several signal streams.
36+ Special event ( npy) data are represented as array_annotations.
37+ The current implementation does not handle spiking data, this will be added upon user request
3838
3939 """
4040 extensions = []
@@ -206,7 +206,7 @@ def _parse_header(self):
206206 sig_ann ['__array_annotations__' ][k ] = values
207207
208208 # array annotations for event channels
209- # use other possible datat in _possible_event_stream_names
209+ # use other possible data in _possible_event_stream_names
210210 for stream_index , stream_name in enumerate (event_stream_names ):
211211 ev_ann = seg_ann ['events' ][stream_index ]
212212 d = self ._evt_streams [0 ][0 ][stream_index ]
@@ -319,11 +319,11 @@ def explore_folder(dirname):
319319
320320 Returns
321321 -------
322- nested dictionaries containing structure and stream information:
322+ nested dictionaries containing structure and stream information
323323 """
324324 nb_block = 0
325325 nb_segment_per_block = []
326- # nested node_name / seg_index
326+ # nested dictionary: block_index > seg_index > data_type > stream_name
327327 all_streams = {}
328328 for root , dirs , files in os .walk (dirname ):
329329 for file in files :
@@ -360,7 +360,7 @@ def explore_folder(dirname):
360360
361361 if (root / 'continuous' ).exists () and len (structure ['continuous' ]) > 0 :
362362 for d in structure ['continuous' ]:
363- # when multi Record Node the stream name also contain
363+ # when multi Record Node the stream name also contains
364364 # the node name to make it unique
365365 stream_name = node_name + '#' + d ['folder_name' ]
366366
@@ -371,12 +371,6 @@ def explore_folder(dirname):
371371 timestamp0 = timestamps [0 ]
372372 t_start = timestamp0 / d ['sample_rate' ]
373373
374- # sync_timestamp is -1 for all elements in our dataset
375- # sync_timestamp_file = root / 'continuous' /
376- # d['folder_name'] / 'synchronized_timestamps.npy'
377- # sync_timestamps = np.load(str(sync_timestamp_file), mmap_mode='r')
378- # t_start = sync_timestamps[0]
379-
380374 # TODO for later : gap checking
381375 signal_stream = d .copy ()
382376 signal_stream ['raw_filename' ] = str (raw_filename )
0 commit comments