Skip to content

Commit

Permalink
Use complete heads by default when using a decomposable loss function…
Browse files Browse the repository at this point in the history
… and dense statistics.
  • Loading branch information
michael-rapp committed Aug 9, 2024
1 parent d753288 commit cab5f66
Show file tree
Hide file tree
Showing 98 changed files with 19,843 additions and 19,793 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,9 @@ namespace boosting {
AutomaticHeadConfig::createClassificationStatisticsProviderFactory(
const IFeatureMatrix& featureMatrix, const IRowWiseLabelMatrix& labelMatrix,
const IDecomposableClassificationLossConfig& lossConfig) const {
if (labelMatrix.getNumOutputs() > 1) {
SingleOutputHeadConfig headConfig(labelBinningConfig_, multiThreadingConfig_, l1RegularizationConfig_,
l2RegularizationConfig_);
return headConfig.createClassificationStatisticsProviderFactory(featureMatrix, labelMatrix, lossConfig);
} else {
CompleteHeadConfig headConfig(labelBinningConfig_, multiThreadingConfig_, l1RegularizationConfig_,
l2RegularizationConfig_);
return headConfig.createClassificationStatisticsProviderFactory(featureMatrix, labelMatrix, lossConfig);
}
CompleteHeadConfig headConfig(labelBinningConfig_, multiThreadingConfig_, l1RegularizationConfig_,
l2RegularizationConfig_);
return headConfig.createClassificationStatisticsProviderFactory(featureMatrix, labelMatrix, lossConfig);
}

std::unique_ptr<IClassificationStatisticsProviderFactory>
Expand Down Expand Up @@ -73,11 +67,11 @@ namespace boosting {
}

bool AutomaticHeadConfig::isPartial() const {
return lossConfig_.get().isDecomposable();
return lossConfig_.get().isDecomposable() && lossConfig_.get().isSparse();
}

bool AutomaticHeadConfig::isSingleOutput() const {
return lossConfig_.get().isDecomposable();
return lossConfig_.get().isDecomposable() && lossConfig_.get().isSparse();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,21 @@ DEBUG A sparse matrix is used to store the predicted labels
INFO Successfully predicted in <duration>
INFO Evaluation result for test data:

Example-wise F1 43.07
Example-wise Jaccard 31.28
Example-wise Precision 61.51
Example-wise Recall 36.12
Hamming Accuracy 94.68
Hamming Loss 5.32
Macro F1 10.58
Macro Jaccard 7.81
Macro Precision 88.88
Macro Recall 9.27
Micro F1 47.2
Micro Jaccard 30.89
Micro Precision 63.9
Micro Recall 37.42
Subset 0/1 Loss 96.98
Subset Accuracy 3.02
Example-wise F1 57.44
Example-wise Jaccard 46.65
Example-wise Precision 75.41
Example-wise Recall 53.46
Hamming Accuracy 95.54
Hamming Loss 4.46
Macro F1 22.37
Macro Jaccard 16.81
Macro Precision 61.04
Macro Recall 18.92
Micro F1 58.78
Micro Jaccard 41.63
Micro Precision 71.2
Micro Recall 50.05
Subset 0/1 Loss 84.88
Subset Accuracy 15.12

INFO Successfully finished after <duration>
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,21 @@ DEBUG A sparse matrix is used to store the predicted labels
INFO Successfully predicted in <duration>
INFO Evaluation result for test data:

Example-wise F1 43.07
Example-wise Jaccard 31.28
Example-wise Precision 61.51
Example-wise Recall 36.12
Hamming Accuracy 94.68
Hamming Loss 5.32
Macro F1 10.58
Macro Jaccard 7.81
Macro Precision 88.88
Macro Recall 9.27
Micro F1 47.2
Micro Jaccard 30.89
Micro Precision 63.9
Micro Recall 37.42
Subset 0/1 Loss 96.98
Subset Accuracy 3.02
Example-wise F1 57.44
Example-wise Jaccard 46.65
Example-wise Precision 75.41
Example-wise Recall 53.46
Hamming Accuracy 95.54
Hamming Loss 4.46
Macro F1 22.37
Macro Jaccard 16.81
Macro Precision 61.04
Macro Recall 18.92
Micro F1 58.78
Micro Jaccard 41.63
Micro Precision 71.2
Micro Recall 50.05
Subset 0/1 Loss 84.88
Subset Accuracy 15.12

INFO Successfully finished after <duration>
Loading

0 comments on commit cab5f66

Please sign in to comment.