SimoneAstarita's picture
Upload folder using huggingface_hub
b329f4c verified
{
"overall": {
"at_0.5": {
"precision_macro": 0.6978962818003913,
"recall_macro": 0.7462459415584415,
"f1_macro": 0.7168468468468467,
"precision_weighted": 0.8649096273955169,
"recall_weighted": 0.844097995545657,
"f1_weighted": 0.8525532213728205,
"accuracy": 0.844097995545657,
"confusion_matrix": [
[
340,
45
],
[
25,
39
]
],
"classification_report": " precision recall f1-score support\n\n no-recl (0) 0.9315 0.8831 0.9067 385\n recl (1) 0.4643 0.6094 0.5270 64\n\n accuracy 0.8441 449\n macro avg 0.6979 0.7462 0.7168 449\nweighted avg 0.8649 0.8441 0.8526 449\n",
"threshold": 0.5
},
"at_best_global": {
"precision_macro": 0.7530962210225468,
"recall_macro": 0.6940746753246754,
"f1_macro": 0.7173094314136246,
"precision_weighted": 0.8655903065348989,
"recall_weighted": 0.8775055679287305,
"f1_weighted": 0.8694483657963139,
"accuracy": 0.8775055679287305,
"confusion_matrix": [
[
366,
19
],
[
36,
28
]
],
"classification_report": " precision recall f1-score support\n\n no-recl (0) 0.9104 0.9506 0.9301 385\n recl (1) 0.5957 0.4375 0.5045 64\n\n accuracy 0.8775 449\n macro avg 0.7531 0.6941 0.7173 449\nweighted avg 0.8656 0.8775 0.8694 449\n",
"threshold": 0.6
},
"at_best_by_lang": {
"precision_macro": 0.6860063203634209,
"recall_macro": 0.7293222402597402,
"f1_macro": 0.703098816091017,
"precision_weighted": 0.857747174025981,
"recall_weighted": 0.8374164810690423,
"f1_weighted": 0.8458669301901763,
"accuracy": 0.8374164810690423,
"confusion_matrix": [
[
339,
46
],
[
27,
37
]
],
"classification_report": " precision recall f1-score support\n\n no-recl (0) 0.9262 0.8805 0.9028 385\n recl (1) 0.4458 0.5781 0.5034 64\n\n accuracy 0.8374 449\n macro avg 0.6860 0.7293 0.7031 449\nweighted avg 0.8577 0.8374 0.8459 449\n",
"thresholds_by_lang": {
"en": 0.4,
"it": 0.65,
"es": 0.5
}
}
},
"thresholds": {
"global_best": {
"threshold": 0.6,
"f1_macro": 0.7173094314136246,
"precision_macro": 0.7530962210225468,
"recall_macro": 0.6940746753246754
},
"by_lang_best": {
"en": {
"threshold": 0.4,
"f1_macro": 0.5399933620975772,
"precision_macro": 0.5426997245179064,
"recall_macro": 0.5930169121658484
},
"it": {
"threshold": 0.65,
"f1_macro": 0.8398371701291409,
"precision_macro": 0.9101274312541918,
"recall_macro": 0.7988758553274682
},
"es": {
"threshold": 0.5,
"f1_macro": 0.7118177503559564,
"precision_macro": 0.6901573485102109,
"recall_macro": 0.7535714285714286
}
},
"default": 0.5
},
"per_lang": {
"at_0.5": [
{
"lang": "en",
"n": 154,
"accuracy": 0.8636363636363636,
"f1_macro": 0.5066361556064073,
"precision_macro": 0.5083333333333333,
"recall_macro": 0.5065466448445172,
"f1_weighted": 0.8554607863532349,
"precision_weighted": 0.8477272727272728,
"recall_weighted": 0.8636363636363636
},
{
"lang": "it",
"n": 163,
"accuracy": 0.8404907975460123,
"f1_macro": 0.7769473684210526,
"precision_macro": 0.7523540489642184,
"recall_macro": 0.8274682306940371,
"f1_weighted": 0.8507161769454311,
"precision_weighted": 0.8742966737143716,
"recall_weighted": 0.8404907975460123
},
{
"lang": "es",
"n": 132,
"accuracy": 0.8257575757575758,
"f1_macro": 0.7118177503559564,
"precision_macro": 0.6901573485102109,
"recall_macro": 0.7535714285714286,
"f1_weighted": 0.8381124965842573,
"precision_weighted": 0.8587414148177456,
"recall_weighted": 0.8257575757575758
}
],
"at_best_global": [
{
"lang": "en",
"n": 154,
"accuracy": 0.8896103896103896,
"f1_macro": 0.47079037800687284,
"precision_macro": 0.45666666666666667,
"recall_macro": 0.4858156028368794,
"f1_weighted": 0.8620966662203776,
"precision_weighted": 0.8362337662337662,
"recall_weighted": 0.8896103896103896
},
{
"lang": "it",
"n": 163,
"accuracy": 0.901840490797546,
"f1_macro": 0.8322182192485847,
"precision_macro": 0.852124183006536,
"recall_macro": 0.815982404692082,
"f1_weighted": 0.899188213786157,
"precision_weighted": 0.8981915874734351,
"recall_weighted": 0.901840490797546
},
{
"lang": "es",
"n": 132,
"accuracy": 0.8333333333333334,
"f1_macro": 0.6461988304093568,
"precision_macro": 0.6627155172413793,
"recall_macro": 0.6348214285714285,
"f1_weighted": 0.8255360623781677,
"precision_weighted": 0.819683908045977,
"recall_weighted": 0.8333333333333334
}
],
"at_best_by_lang": [
{
"lang": "en",
"n": 154,
"accuracy": 0.7662337662337663,
"f1_macro": 0.5399933620975772,
"precision_macro": 0.5426997245179064,
"recall_macro": 0.5930169121658484,
"f1_weighted": 0.8081301373700975,
"precision_weighted": 0.8678401488318844,
"recall_weighted": 0.7662337662337663
},
{
"lang": "it",
"n": 163,
"accuracy": 0.9141104294478528,
"f1_macro": 0.8398371701291409,
"precision_macro": 0.9101274312541918,
"recall_macro": 0.7988758553274682,
"f1_weighted": 0.9074191448245453,
"precision_weighted": 0.9134520826389833,
"recall_weighted": 0.9141104294478528
},
{
"lang": "es",
"n": 132,
"accuracy": 0.8257575757575758,
"f1_macro": 0.7118177503559564,
"precision_macro": 0.6901573485102109,
"recall_macro": 0.7535714285714286,
"f1_weighted": 0.8381124965842573,
"precision_weighted": 0.8587414148177456,
"recall_weighted": 0.8257575757575758
}
]
}
}