Skip to content

Commit

Permalink
Patch to read_velocity_cells to make return types consistent and to r…
Browse files Browse the repository at this point in the history
…aise errors instead of returning None
  • Loading branch information
alhom committed Aug 28, 2024
1 parent 6110b8d commit 9db64a3
Showing 1 changed file with 7 additions and 9 deletions.
16 changes: 7 additions & 9 deletions pyVlsv/vlsvreader.py
Original file line number Diff line number Diff line change
Expand Up @@ -3017,8 +3017,8 @@ def read_velocity_cells(self, cellid, pop="proton"):
self.__set_cell_offset_and_blocks(pop)
# Check that cells has vspace
if not cellid in self.__fileindex_for_cellid_blocks[pop]:
print("Cell does not have velocity distribution")
return []
warnings.warn("Cell(s) does not have velocity distribution")
return {}
# Navigate to the correct position:
offset = self.__fileindex_for_cellid_blocks[pop][cellid][0]
num_of_blocks = self.__fileindex_for_cellid_blocks[pop][cellid][1]
Expand All @@ -3030,8 +3030,8 @@ def read_velocity_cells(self, cellid, pop="proton"):
try:
cells_with_blocks_index = self.__order_for_cellid_blocks[pop][cellid]
except:
print("Cell does not have velocity distribution")
return []
warnings.warn("Cell(s) does not have velocity distribution")
return {}
# Navigate to the correct position:
offset = self.__blocks_per_cell_offsets[pop][cells_with_blocks_index]
num_of_blocks = self.__blocks_per_cell[pop][cells_with_blocks_index]
Expand Down Expand Up @@ -3074,8 +3074,7 @@ def read_velocity_cells(self, cellid, pop="proton"):
elif datatype == "uint" and element_size == 8:
data_block_ids = np.fromfile(fptr, dtype = np.uint64, count = vector_size*num_of_blocks)
else:
print("Error! Bad data type in blocks!")
return
raise TypeError("Error! Bad data type in blocks! datatype found was "+datatype)

if (pop=="avgs") and (child.tag == "BLOCKIDS"): # Old avgs files did not have the name set for BLOCKIDS
vector_size = ast.literal_eval(child.attrib["vectorsize"])
Expand All @@ -3091,16 +3090,15 @@ def read_velocity_cells(self, cellid, pop="proton"):
elif datatype == "uint" and element_size == 8:
data_block_ids = np.fromfile(fptr, dtype = np.uint64, count = vector_size*num_of_blocks)
else:
print("Error! Bad data type in blocks!")
return
raise TypeError("Error! Bad data type in blocks! datatype found was "+datatype)

data_block_ids = data_block_ids.reshape(num_of_blocks, vector_size)

fptr.close()

# Check to make sure the sizes match (just some extra debugging)
if len(data_avgs) != len(data_block_ids):
print("BAD DATA SIZES")
raise ValueError("BAD DATA SIZES")
# Make a dictionary (hash map) out of velocity cell ids and avgs:
velocity_cells = {}
array_size = len(data_avgs)
Expand Down

0 comments on commit 9db64a3

Please sign in to comment.