Skip to content

Commit

Permalink
Save and load more than just the main arrays of data in the human pop…
Browse files Browse the repository at this point in the history
…ulation in the hdf5 file. Tested with EULAgized populations. Also ri code should allow for loading populations where accessibilities are not set because they are only assigned at birth and an initial population can be saved before any births have occurre and so before any accessibilities have been set.
  • Loading branch information
Jonathan Bloedow committed Aug 9, 2024
1 parent 616d6ec commit 222d166
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 6 deletions.
2 changes: 2 additions & 0 deletions nnmm/ri.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,12 @@ def add_with_ips(model, count_births, istart, iend):
mask_0 = (accessibility == 0)
mask_1 = (accessibility == 1)
mask_2 = (accessibility == 2) # for validation
"""
if np.count_nonzero( mask_1 ) == 0:
raise ValueError( "Didn't find anyone with accessibility set to 1 (medium)." )
if np.count_nonzero( mask_2 ) == 0:
raise ValueError( "Didn't find anyone with accessibility set to 2 (medium)." )
"""

# mask_2 is unnecessary since we don't apply any timer for it

Expand Down
29 changes: 23 additions & 6 deletions src/idmlaser/numpynumba/population.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,10 @@ def save_pd(self, filename: str, tail_number=0 ) -> None:
def save(self, filename: str, tail_number=0 ) -> None:
"""Save the population properties to an HDF5 file"""
with h5py.File(filename, 'w') as hdf:
hdf.attrs['count'] = self._count
hdf.attrs['capacity'] = self._capacity
hdf.attrs['node_count'] = self.node_count

for key, value in self.__dict__.items():
if isinstance(value, np.ndarray):
if tail_number > 0:
Expand Down Expand Up @@ -122,6 +126,9 @@ def load_hdf5( filename ):
population = Population(0) # We'll do capacity automatically
"""Load the population properties from an HDF5 file"""
with h5py.File(filename, 'r') as hdf:
population._count = hdf.attrs['count']
population._capacity = hdf.attrs['capacity']
population.node_count = hdf.attrs['node_count']
# Ensure nodeid is loaded first
population.__dict__['nodeid'] = np.array(hdf['nodeid'])

Expand Down Expand Up @@ -151,20 +158,27 @@ def load_hdf5( filename ):
return population

def set_capacity( self, new_capacity ):

self._capacity = new_capacity
for key, value in self.__dict__.items():
if isinstance(value, np.ndarray):
# Ignore 2D arrays
if value.ndim == 2:
print( "Ignoring key {key} while expanding capacity." )
continue

old_size = len(value)
if old_size < new_capacity:
# Create a new array of the required size, filled with zeros (or a suitable default)
new_array = np.zeros(new_capacity, dtype=value.dtype)
try:
# Copy the old data into the new array
new_array[:old_size] = value

# Copy the old data into the new array
new_array[:old_size] = value

# Replace the old array with the new array
self.__dict__[key] = new_array
# Replace the old array with the new array
self.__dict__[key] = new_array
except Exception as ex:
print( str( ex ) )
pdb.set_trace()

return

Expand All @@ -173,10 +187,13 @@ def current( self ):
return 0, self.count # not sure this is useful outside of original case

def current_populations( self ):
print( "NOTE: current_populations fn actually implemented as initial_populations." );
# TBD: maybe initial is all we actually need?
nodeid_array = self.__dict__['nodeid']

# Use np.unique to get the counts directly
_, counts = np.unique(nodeid_array[:self.count], return_counts=True)
counts += self.total_population_per_year[:,0] # why 0? Need year param

# Store counts in node_populations array
node_populations = counts
Expand Down

0 comments on commit 222d166

Please sign in to comment.