diff --git a/caiman/components_evaluation.py b/caiman/components_evaluation.py index 30896ad7c..fee1655bc 100644 --- a/caiman/components_evaluation.py +++ b/caiman/components_evaluation.py @@ -284,7 +284,7 @@ def evaluate_components_CNN(A, dims, gSig, model_name:str=os.path.join(caiman_da loaded_model = model_from_json(loaded_model_json) loaded_model.load_weights(model_name + '.h5') - loaded_model.compile('sgd', 'mse') + #loaded_model.compile('sgd', 'mse') else: if os.path.isfile(os.path.join(caiman_datadir(), model_name + ".h5.pb")): model_file = os.path.join(caiman_datadir(), model_name + ".h5.pb") diff --git a/caiman/source_extraction/cnmf/estimates.py b/caiman/source_extraction/cnmf/estimates.py index 9c4bf4a0e..9351a6de1 100644 --- a/caiman/source_extraction/cnmf/estimates.py +++ b/caiman/source_extraction/cnmf/estimates.py @@ -721,7 +721,7 @@ def normalize_components(self): nA = np.sqrt(np.ravel(self.A.power(2).sum(axis=0))) nA_mat = scipy.sparse.spdiags(nA, 0, nA.shape[0], nA.shape[0]) - nA_inv_mat = scipy.sparse.spdiags(1. / nA, 0, nA.shape[0], nA.shape[0]) + nA_inv_mat = scipy.sparse.spdiags(1. / (nA + np.finfo(np.float32).eps), 0, nA.shape[0], nA.shape[0]) self.A = self.A * nA_inv_mat self.C = nA_mat * self.C if self.YrA is not None: @@ -1310,9 +1310,12 @@ def remove_duplicates(self, predictions=None, r_values=None, dist_thr=0.1, else: components_to_keep = np.arange(self.A.shape[-1]) - - - self.select_components(idx_components=components_to_keep) + if self.idx_components is None: + self.idx_components = np.arange(self.A.shape[-1]) + self.idx_components = np.intersect1d(self.idx_components, components_to_keep) + self.idx_components_bad = np.setdiff1d(np.arange(self.A.shape[-1]), self.idx_components) + if select_comp: + self.select_components(use_object=True) return components_to_keep diff --git a/caiman/source_extraction/cnmf/map_reduce.py b/caiman/source_extraction/cnmf/map_reduce.py index 83209e77e..63844a62a 100644 --- a/caiman/source_extraction/cnmf/map_reduce.py +++ b/caiman/source_extraction/cnmf/map_reduce.py @@ -408,7 +408,7 @@ def run_CNMF_patches(file_name, shape, params, gnb=1, dview=None, logging.info("Constructing background") Im = scipy.sparse.csr_matrix( - (1. / mask, (np.arange(d), np.arange(d))), dtype=np.float32) + (1. / (mask + np.finfo(np.float32).eps), (np.arange(d), np.arange(d))), dtype=np.float32) if not del_duplicates: A_tot = Im.dot(A_tot) diff --git a/caiman/source_extraction/cnmf/params.py b/caiman/source_extraction/cnmf/params.py index e4bf208bf..1afb9f0bd 100644 --- a/caiman/source_extraction/cnmf/params.py +++ b/caiman/source_extraction/cnmf/params.py @@ -575,7 +575,7 @@ def __init__(self, fnames=None, dims=None, dxy=(1, 1), 'decay_time': decay_time, 'dxy': dxy, 'var_name_hdf5': var_name_hdf5, - 'caiman_version': '1.6', + 'caiman_version': '1.6.1', 'last_commit': None, 'mmap_F': None, 'mmap_C': None @@ -812,7 +812,7 @@ def __init__(self, fnames=None, dims=None, dxy=(1, 1), if self.init['gSig'] is None: self.init['gSig'] = [-1, -1] if self.init['gSiz'] is None: - self.init['gSiz'] = [2*gs + 3 for gs in self.init['gSig']] + self.init['gSiz'] = [2*gs + 1 for gs in self.init['gSig']] self.init['gSiz'] = [gz if gz % 2 else gz + 1 for gz in self.init['gSiz']] if gnb <= 0: diff --git a/caiman/source_extraction/cnmf/temporal.py b/caiman/source_extraction/cnmf/temporal.py index dafc83306..a2d41aaf9 100644 --- a/caiman/source_extraction/cnmf/temporal.py +++ b/caiman/source_extraction/cnmf/temporal.py @@ -195,7 +195,7 @@ def update_temporal_components(Y, A, b, Cin, fin, bl=None, c1=None, g=None, sn=N A = scipy.sparse.hstack((A, b)).tocsc() S = np.zeros(np.shape(Cin)) Cin = np.vstack((Cin, fin)) - #C = Cin.copy() + C = Cin.copy() nA = np.ravel(A.power(2).sum(axis=0)) logging.info('Generating residuals') @@ -213,7 +213,7 @@ def update_temporal_components(Y, A, b, Cin, fin, bl=None, c1=None, g=None, sn=N # creating the patch of components to be computed in parrallel parrllcomp, len_parrllcomp = update_order_greedy(AA[:nr, :][:, :nr]) logging.info("entering the deconvolution ") - C, S, bl, YrA, c1, sn, g, lam = update_iteration(parrllcomp, len_parrllcomp, nb, Cin, S, bl, nr, + C, S, bl, YrA, c1, sn, g, lam = update_iteration(parrllcomp, len_parrllcomp, nb, C, S, bl, nr, ITER, YrA, c1, sn, g, Cin, T, nA, dview, debug, AA, kwargs) ff = np.where(np.sum(C, axis=1) == 0) # remove empty components if np.size(ff) > 0: # Eliminating empty temporal components diff --git a/caiman/tests/comparison_humans_online.py b/caiman/tests/comparison_humans_online.py index 7fad4a378..631308ca9 100644 --- a/caiman/tests/comparison_humans_online.py +++ b/caiman/tests/comparison_humans_online.py @@ -82,8 +82,8 @@ 'p': 1, # order of indicator dynamics 'gnb': 2, # number of background components 'epochs': 2, # number of passes over the data - 'rval_thr': 0.75, # spatial correlation threshold - 'max_thr': 0.15, # parameter for thresholding components when cleaning up shapes + 'rval_thr': 0.80, # spatial correlation threshold + 'max_thr': 0.25, # parameter for thresholding components when cleaning up shapes 'mot_corr': False, # flag for motion correction (set to False to compare directly on the same FOV) 'min_num_trial': 10, # maximum number of candidate components per frame 'use_peak_max': True, @@ -129,7 +129,7 @@ # % YST params_movie[3] = { 'folder_name': 'YST/', - 'epochs': 2, + 'epochs': 3, 'ds_factor': 1, 'fr': 10, 'decay_time': .75, @@ -140,7 +140,7 @@ # % neurofinder.04.00.test params_movie[4] = { 'folder_name': 'N.04.00.t/', - 'epochs': 2, + 'epochs': 3, 'ds_factor': 1, 'fr': 8, 'gSig': [7, 7], # expected half size of neurons