File size: 4,236 Bytes
f243687
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
{
  "overall": {
    "at_0.5": {
      "precision_macro": 0.8369736324281779,
      "recall_macro": 0.9184995112414467,
      "f1_macro": 0.8676160051978992,
      "precision_weighted": 0.9277481209996546,
      "recall_weighted": 0.9079754601226994,
      "f1_weighted": 0.9129082823912861,
      "accuracy": 0.9079754601226994,
      "confusion_matrix": [
        [
          119,
          13
        ],
        [
          2,
          29
        ]
      ],
      "classification_report": "              precision    recall  f1-score   support\n\n no-recl (0)     0.9835    0.9015    0.9407       132\n    recl (1)     0.6905    0.9355    0.7945        31\n\n    accuracy                         0.9080       163\n   macro avg     0.8370    0.9185    0.8676       163\nweighted avg     0.9277    0.9080    0.9129       163\n",
      "threshold": 0.5
    },
    "at_best_global": {
      "precision_macro": 0.88828125,
      "recall_macro": 0.9250977517106549,
      "f1_macro": 0.905011655011655,
      "precision_weighted": 0.9429831288343559,
      "recall_weighted": 0.9386503067484663,
      "f1_weighted": 0.9400374676448295,
      "accuracy": 0.9386503067484663,
      "confusion_matrix": [
        [
          125,
          7
        ],
        [
          3,
          28
        ]
      ],
      "classification_report": "              precision    recall  f1-score   support\n\n no-recl (0)     0.9766    0.9470    0.9615       132\n    recl (1)     0.8000    0.9032    0.8485        31\n\n    accuracy                         0.9387       163\n   macro avg     0.8883    0.9251    0.9050       163\nweighted avg     0.9430    0.9387    0.9400       163\n",
      "threshold": 0.7000000000000001
    },
    "at_best_by_lang": {
      "precision_macro": 0.88828125,
      "recall_macro": 0.9250977517106549,
      "f1_macro": 0.905011655011655,
      "precision_weighted": 0.9429831288343559,
      "recall_weighted": 0.9386503067484663,
      "f1_weighted": 0.9400374676448295,
      "accuracy": 0.9386503067484663,
      "confusion_matrix": [
        [
          125,
          7
        ],
        [
          3,
          28
        ]
      ],
      "classification_report": "              precision    recall  f1-score   support\n\n no-recl (0)     0.9766    0.9470    0.9615       132\n    recl (1)     0.8000    0.9032    0.8485        31\n\n    accuracy                         0.9387       163\n   macro avg     0.8883    0.9251    0.9050       163\nweighted avg     0.9430    0.9387    0.9400       163\n",
      "thresholds_by_lang": {
        "it": 0.7000000000000001
      }
    }
  },
  "thresholds": {
    "global_best": {
      "threshold": 0.7000000000000001,
      "f1_macro": 0.905011655011655,
      "precision_macro": 0.88828125,
      "recall_macro": 0.9250977517106549
    },
    "by_lang_best": {
      "it": {
        "threshold": 0.7000000000000001,
        "f1_macro": 0.905011655011655,
        "precision_macro": 0.88828125,
        "recall_macro": 0.9250977517106549
      }
    },
    "default": 0.5
  },
  "per_lang": {
    "at_0.5": [
      {
        "lang": "it",
        "n": 163,
        "accuracy": 0.9079754601226994,
        "f1_macro": 0.8676160051978992,
        "precision_macro": 0.8369736324281779,
        "recall_macro": 0.9184995112414467,
        "f1_weighted": 0.9129082823912861,
        "precision_weighted": 0.9277481209996546,
        "recall_weighted": 0.9079754601226994
      }
    ],
    "at_best_global": [
      {
        "lang": "it",
        "n": 163,
        "accuracy": 0.9386503067484663,
        "f1_macro": 0.905011655011655,
        "precision_macro": 0.88828125,
        "recall_macro": 0.9250977517106549,
        "f1_weighted": 0.9400374676448295,
        "precision_weighted": 0.9429831288343559,
        "recall_weighted": 0.9386503067484663
      }
    ],
    "at_best_by_lang": [
      {
        "lang": "it",
        "n": 163,
        "accuracy": 0.9386503067484663,
        "f1_macro": 0.905011655011655,
        "precision_macro": 0.88828125,
        "recall_macro": 0.9250977517106549,
        "f1_weighted": 0.9400374676448295,
        "precision_weighted": 0.9429831288343559,
        "recall_weighted": 0.9386503067484663
      }
    ]
  }
}