diff --git a/atlas_densities/densities/inhibitory_neuron_densities_optimization.py b/atlas_densities/densities/inhibitory_neuron_densities_optimization.py index 450a3f9..5065985 100644 --- a/atlas_densities/densities/inhibitory_neuron_densities_optimization.py +++ b/atlas_densities/densities/inhibitory_neuron_densities_optimization.py @@ -130,6 +130,7 @@ def _zero_descendants(id_, cell_type, x_result, deltas, hierarchy_info): deltas.loc[desc_names, cell_type] = 0.0 L.info("Preparing variables layout ...") + #Create 2 df-s with only np.nans, next we will update this based on assumptions x_result = pd.DataFrame( {cell_type: np.full(len(id_counts), KEEP) for cell_type in cell_types}, index=id_counts.index, # indexed by integer identifiers @@ -157,6 +158,7 @@ def _zero_descendants(id_, cell_type, x_result, deltas, hierarchy_info): region_counts.at[region_name, "gad67+_standard_deviation"], 0.0, atol=stddev_tolerance ) and np.isclose(region_counts.at[region_name, "gad67+"], 0.0, atol=stddev_tolerance): for cell_type in cell_types: + #(Basically, if GAD is 0 then every other inh neuron type should be zero..) _zero_descendants(id_, cell_type, x_result, deltas, hierarchy_info) # Set the (possibly non-zero) cell count estimates which are given with certainty. @@ -178,6 +180,18 @@ def _zero_descendants(id_, cell_type, x_result, deltas, hierarchy_info): # are used as definitive estimates. x_result.at[id_, cell_type] = id_counts.at[id_, cell_type] deltas.at[region_name, cell_type] = 0.0 + # IF the region is not empty of leaf regions + # AND all its leaf regions are np.nan + # AND the region's cell count is 0: + # Revert standard deviation to np.inf i.e. var is omitted + # Since cell count i.e. literature value was changed to 0 we change it to np.nan + if ( + not x_result.loc[desc_only, cell_type].empty and + x_result.loc[desc_only, cell_type].isnull().all() and + x_result.loc[id_, cell_type] == 0 + ): + deltas.at[region_name, cell_type] = SKIP + x_result.at[id_, cell_type] = np.nan return x_result, deltas @@ -481,9 +495,11 @@ def _check_variables_consistency( AtlasDensitiesError if on the the following assumptions is violated: - if cell count estimate of a region is known with certainty for a given cell type, then the cell count of every descendant region is also known with certainty. - - a cell count estimate which is given for certain does not + - a neuron subtype count estimate which is given for certain is greater than + its total neuron count estimate counterpart. """ cell_count_tolerance = 1e-2 # absolute tolerance to rule out round-off errors + # pylint: disable=too-many-nested-blocks for region_name, id_, id_set in zip( deltas.index, hierarchy_info.index, hierarchy_info["descendant_id_set"] ): @@ -492,7 +508,7 @@ def _check_variables_consistency( for desc_id in id_set: if np.isnan(x_result.loc[desc_id, cell_type]): raise AtlasDensitiesError( - f"Cell count estimate of region named '{region_name}' for cell type " + f"Cell count estimate of region '{region_name}' for cell type " f"{cell_type} was given for certain whereas the cell count of " f"descendant id {desc_id} is not certain." ) diff --git a/setup.py b/setup.py index 9fbd57d..a9e69bf 100644 --- a/setup.py +++ b/setup.py @@ -20,9 +20,9 @@ "click>=7.0,<=8.1.3", "cgal-pybind>=0.1.1", "joblib>=1.3.0", - "numpy>=1.15.0", + "numpy>=1.15.0,<=1.24.0", # to reduce errors when pandas uses new numpy "openpyxl>=3.0.3", - "pandas>=1.0.3", + "pandas>=1.0.3,<=2.0.0", # because https://github.com/pandas-dev/pandas/pull/54954 has broken csv parsing "PyYAML>=5.3.1", # Since version 1.6.0, scipy.optimize.linprog has fast, new methods for large, sparse problems # from the HiGHS library. We use the "highs" method in the densities module. diff --git a/tests/densities/test_excitatory_inhibitory_splitting.py b/tests/densities/test_excitatory_inhibitory_splitting.py index b212da5..c5f220f 100644 --- a/tests/densities/test_excitatory_inhibitory_splitting.py +++ b/tests/densities/test_excitatory_inhibitory_splitting.py @@ -133,7 +133,7 @@ def test_make_excitatory_density(): res = tested.make_excitatory_density(neuron_density, inhibitory_density) assert res.shape == neuron_density.shape - assert np.sum(res.raw) == np.product(neuron_density.shape) + assert np.sum(res.raw) == np.prod(neuron_density.shape) # this would create negative densities; make sure they are clipped to zero res = tested.make_excitatory_density(inhibitory_density, neuron_density)