it-no-bio-20251014-t14 / reports.json
SimoneAstarita's picture
Upload folder using huggingface_hub
f243687 verified
{
"overall": {
"at_0.5": {
"precision_macro": 0.8369736324281779,
"recall_macro": 0.9184995112414467,
"f1_macro": 0.8676160051978992,
"precision_weighted": 0.9277481209996546,
"recall_weighted": 0.9079754601226994,
"f1_weighted": 0.9129082823912861,
"accuracy": 0.9079754601226994,
"confusion_matrix": [
[
119,
13
],
[
2,
29
]
],
"classification_report": " precision recall f1-score support\n\n no-recl (0) 0.9835 0.9015 0.9407 132\n recl (1) 0.6905 0.9355 0.7945 31\n\n accuracy 0.9080 163\n macro avg 0.8370 0.9185 0.8676 163\nweighted avg 0.9277 0.9080 0.9129 163\n",
"threshold": 0.5
},
"at_best_global": {
"precision_macro": 0.88828125,
"recall_macro": 0.9250977517106549,
"f1_macro": 0.905011655011655,
"precision_weighted": 0.9429831288343559,
"recall_weighted": 0.9386503067484663,
"f1_weighted": 0.9400374676448295,
"accuracy": 0.9386503067484663,
"confusion_matrix": [
[
125,
7
],
[
3,
28
]
],
"classification_report": " precision recall f1-score support\n\n no-recl (0) 0.9766 0.9470 0.9615 132\n recl (1) 0.8000 0.9032 0.8485 31\n\n accuracy 0.9387 163\n macro avg 0.8883 0.9251 0.9050 163\nweighted avg 0.9430 0.9387 0.9400 163\n",
"threshold": 0.7000000000000001
},
"at_best_by_lang": {
"precision_macro": 0.88828125,
"recall_macro": 0.9250977517106549,
"f1_macro": 0.905011655011655,
"precision_weighted": 0.9429831288343559,
"recall_weighted": 0.9386503067484663,
"f1_weighted": 0.9400374676448295,
"accuracy": 0.9386503067484663,
"confusion_matrix": [
[
125,
7
],
[
3,
28
]
],
"classification_report": " precision recall f1-score support\n\n no-recl (0) 0.9766 0.9470 0.9615 132\n recl (1) 0.8000 0.9032 0.8485 31\n\n accuracy 0.9387 163\n macro avg 0.8883 0.9251 0.9050 163\nweighted avg 0.9430 0.9387 0.9400 163\n",
"thresholds_by_lang": {
"it": 0.7000000000000001
}
}
},
"thresholds": {
"global_best": {
"threshold": 0.7000000000000001,
"f1_macro": 0.905011655011655,
"precision_macro": 0.88828125,
"recall_macro": 0.9250977517106549
},
"by_lang_best": {
"it": {
"threshold": 0.7000000000000001,
"f1_macro": 0.905011655011655,
"precision_macro": 0.88828125,
"recall_macro": 0.9250977517106549
}
},
"default": 0.5
},
"per_lang": {
"at_0.5": [
{
"lang": "it",
"n": 163,
"accuracy": 0.9079754601226994,
"f1_macro": 0.8676160051978992,
"precision_macro": 0.8369736324281779,
"recall_macro": 0.9184995112414467,
"f1_weighted": 0.9129082823912861,
"precision_weighted": 0.9277481209996546,
"recall_weighted": 0.9079754601226994
}
],
"at_best_global": [
{
"lang": "it",
"n": 163,
"accuracy": 0.9386503067484663,
"f1_macro": 0.905011655011655,
"precision_macro": 0.88828125,
"recall_macro": 0.9250977517106549,
"f1_weighted": 0.9400374676448295,
"precision_weighted": 0.9429831288343559,
"recall_weighted": 0.9386503067484663
}
],
"at_best_by_lang": [
{
"lang": "it",
"n": 163,
"accuracy": 0.9386503067484663,
"f1_macro": 0.905011655011655,
"precision_macro": 0.88828125,
"recall_macro": 0.9250977517106549,
"f1_weighted": 0.9400374676448295,
"precision_weighted": 0.9429831288343559,
"recall_weighted": 0.9386503067484663
}
]
}
}