Skip to content

Commit

Permalink
don't load spike times greater than simtime + fixes
Browse files Browse the repository at this point in the history
Fixes #84
  • Loading branch information
espenhgn committed Feb 8, 2024
1 parent 479c2c4 commit d3c6644
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 15 deletions.
8 changes: 8 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,14 @@ documentation/
# Simulation output
simulation_output_*/
*.tar
*.h5
*.pdf
*.gdf
*.eps
examples/benchmarks
exmaples/Hagen_et_al_2016_cercor
jobs
logs

# compiled NMODL files and folders
x86_64/
Expand Down
22 changes: 16 additions & 6 deletions hybridLFPy/gdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def _blockread(self, fname, skiprows):
raise StopIteration
yield a

def create(self, re='brunel-py-ex-*.gdf', index=True, skiprows=0):
def create(self, re='brunel-py-ex-*.gdf', index=True, skiprows=0, simtime=None):
"""
Create db from list of gdf file glob
Expand All @@ -140,7 +140,9 @@ def create(self, re='brunel-py-ex-*.gdf', index=True, skiprows=0):
Create index on neurons for speed.
skiprows : int
Number of skipped first lines
simtime : float/int or None
if not None, stop reading of gdf files at simtime,
assuming that recorded spike times is monotonously increasing
Returns
-------
Expand All @@ -159,10 +161,18 @@ def create(self, re='brunel-py-ex-*.gdf', index=True, skiprows=0):
print(f)
while True:
try:
for data in self._blockread(f, skiprows):
self.cursor.executemany(
'INSERT INTO spikes VALUES (?, ?)', data)
self.conn.commit()
if simtime is None:
for data in self._blockread(f, skiprows):
self.cursor.executemany(
'INSERT INTO spikes VALUES (?, ?)', data)
self.conn.commit()
else:
for data in self._blockread(f, skiprows):
if data[1] > simtime:
break
self.cursor.executemany(
'INSERT INTO spikes VALUES (?, ?)', data)
self.conn.commit()
except RuntimeError:
break

Expand Down
16 changes: 8 additions & 8 deletions hybridLFPy/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def setup_file_dest(params, clearDestination=True):
"""
if RANK == 0:
if not os.path.isdir(params.savefolder):
os.mkdir(params.savefolder)
os.makedirs(params.savefolder, exist_ok=True)
assert(os.path.isdir(params.savefolder))
else:
if clearDestination:
Expand All @@ -257,35 +257,35 @@ def setup_file_dest(params, clearDestination=True):
os.system('find %s -delete' % params.savefolder)
except BaseException:
shutil.rmtree(params.savefolder)
os.mkdir(params.savefolder)
os.makedirs(params.savefolder, exist_ok=True)
assert(os.path.isdir(params.savefolder))

if not os.path.isdir(params.sim_scripts_path):
print('creating %s' % params.sim_scripts_path)
os.mkdir(params.sim_scripts_path)
os.makedirs(params.sim_scripts_path, exist_ok=True)

if not os.path.isdir(params.cells_path):
print('creating %s' % params.cells_path)
os.mkdir(params.cells_path)
os.makedirs(params.cells_path, exist_ok=True)

if not os.path.isdir(params.figures_path):
print('creating %s' % params.figures_path)
os.mkdir(params.figures_path)
os.makedirs(params.figures_path, exist_ok=True)

if not os.path.isdir(params.populations_path):
print('creating %s' % params.populations_path)
os.mkdir(params.populations_path)
os.makedirs(params.populations_path, exist_ok=True)

try:
if not os.path.isdir(params.raw_nest_output_path):
print('creating %s' % params.raw_nest_output_path)
os.mkdir(params.raw_nest_output_path)
os.makedirs(params.raw_nest_output_path, exist_ok=True)
except BaseException:
pass

if not os.path.isdir(params.spike_output_path):
print('creating %s' % params.spike_output_path)
os.mkdir(params.spike_output_path)
os.makedirs(params.spike_output_path, exist_ok=True)

for f in ['cellsim16popsParams.py',
'cellsim16pops.py',
Expand Down
2 changes: 1 addition & 1 deletion hybridLFPy/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
version = "0.2"
version = "0.2.1"

0 comments on commit d3c6644

Please sign in to comment.