Skip to content

Commit

Permalink
Change qm_params to metric_params
Browse files Browse the repository at this point in the history
  • Loading branch information
chrishalcrow committed Dec 4, 2024
1 parent 37746e0 commit c14b5d7
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def test_exception_raised_when_metricparams_not_equal(sorting_analyzer_for_curat
be raised depending on the `enforce_metric_params` kwarg. This behaviour is tested here."""

sorting_analyzer_for_curation.compute(
"quality_metrics", metric_names=["num_spikes", "snr"], qm_params={"snr": {"peak_mode": "peak_to_peak"}}
"quality_metrics", metric_names=["num_spikes", "snr"], metric_params={"snr": {"peak_mode": "peak_to_peak"}}
)
sorting_analyzer_for_curation.compute("template_metrics", metric_names=["half_width"])

Expand All @@ -180,7 +180,7 @@ def test_exception_raised_when_metricparams_not_equal(sorting_analyzer_for_curat
model_based_classification._check_params_for_classification(enforce_metric_params=False, model_info=model_info)

# Now test the positive case. Recompute using the default parameters
sorting_analyzer_for_curation.compute("quality_metrics", metric_names=["num_spikes", "snr"], qm_params={})
sorting_analyzer_for_curation.compute("quality_metrics", metric_names=["num_spikes", "snr"], metric_params={})
sorting_analyzer_for_curation.compute("template_metrics", metric_names=["half_width"])

model, model_info = load_model(model_folder=model_folder, trusted=["numpy.dtype"])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,12 @@ def test_train_using_two_sorting_analyzers():
# Now check that there is an error raised if we demand the same metric params, but don't have them

sorting_analyzer_2.compute(
{"quality_metrics": {"metric_names": ["num_spikes", "snr"], "qm_params": {"snr": {"peak_mode": "at_index"}}}}
{
"quality_metrics": {
"metric_names": ["num_spikes", "snr"],
"metric_params": {"snr": {"peak_mode": "at_index"}},
}
}
)

with pytest.raises(Exception):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
],
"peak_sign": null,
"seed": null,
"qm_params": {
"metric_params": {
"num_spikes": {},
"snr": {
"peak_sign": "neg",
Expand Down
8 changes: 4 additions & 4 deletions src/spikeinterface/curation/train_manual_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,10 +244,10 @@ def load_and_preprocess_analyzers(self, analyzers, enforce_metric_params):
qm_names = self.metrics_params["quality_metric_params"]["metric_names"]
consistent_metrics = list(set(qm_names).difference(set(conflicting_metrics)))
consistent_metric_params = {
metric: analyzers[0].extensions["quality_metrics"].params["qm_params"][metric]
metric: analyzers[0].extensions["quality_metrics"].params["metric_params"][metric]
for metric in consistent_metrics
}
self.metrics_params["quality_metric_params"]["qm_params"] = consistent_metric_params
self.metrics_params["quality_metric_params"]["metric_params"] = consistent_metric_params

if analyzers[0].has_extension("template_metrics") is True:
self.metrics_params["template_metric_params"] = deepcopy(analyzers[0].extensions["template_metrics"].params)
Expand All @@ -273,9 +273,9 @@ def _check_metrics_parameters(self, analyzers, enforce_metric_params):
tm_params_2 = {}

if analyzer_1.has_extension("quality_metrics") is True:
qm_params_1 = analyzer_1.extensions["quality_metrics"].params["qm_params"]
qm_params_1 = analyzer_1.extensions["quality_metrics"].params["metric_params"]
if analyzer_2.has_extension("quality_metrics") is True:
qm_params_2 = analyzer_2.extensions["quality_metrics"].params["qm_params"]
qm_params_2 = analyzer_2.extensions["quality_metrics"].params["metric_params"]
if analyzer_1.has_extension("template_metrics") is True:
tm_params_1 = analyzer_1.extensions["template_metrics"].params["metrics_kwargs"]
if analyzer_2.has_extension("template_metrics") is True:
Expand Down

0 comments on commit c14b5d7

Please sign in to comment.