eduagarcia commited on
Commit
659ec6d
·
verified ·
1 Parent(s): 007b2b4

Updating model Magpie-Align/MagpieLM-8B-Chat-v0.1

Browse files
Magpie-Align/MagpieLM-8B-Chat-v0.1/results_2025-05-07T15-22-16.111876.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "start_date": "2025-05-07T15-22-16.111876",
4
+ "start_time": 1746631337.9216282,
5
+ "end_time": 1746633054.876184,
6
+ "total_evaluation_time_seconds": 1716.9545557498932,
7
+ "n_gpus": 1,
8
+ "model_dtype": "bfloat16",
9
+ "model_is_loaded_in_4bit": null,
10
+ "model_is_loaded_in_8bit": null,
11
+ "model_is_quantized": false,
12
+ "model_quantization": null,
13
+ "model_sha": "60cf460249e1f34524ece37a17df3a1d63415e72",
14
+ "batch_size": "auto",
15
+ "max_length": 4098,
16
+ "max_gen_toks": 2048,
17
+ "until": null,
18
+ "gen_kwargs": {},
19
+ "effective_batch_size": 7075.0,
20
+ "model_name": "Magpie-Align/MagpieLM-8B-Chat-v0.1",
21
+ "job_id": 1624,
22
+ "model_id": "Magpie-Align/MagpieLM-8B-Chat-v0.1_eval_request_False_bfloat16_Original",
23
+ "model_base_model": "",
24
+ "model_weight_type": "Original",
25
+ "model_revision": "main",
26
+ "model_private": false,
27
+ "model_type": "💬 : chat (RLHF, DPO, IFT, ...)",
28
+ "model_architectures": "LlamaForCausalLM",
29
+ "submitted_time": "2024-09-19T04:37:59Z",
30
+ "lm_eval_model_type": "vllm",
31
+ "eval_version": "1.1.0"
32
+ },
33
+ "results": {
34
+ "all_grouped_average": 0.5415242677971377,
35
+ "all_grouped_npm": 0.3219943312294076,
36
+ "all_grouped": {
37
+ "enem_challenge": 0.5787263820853744,
38
+ "bluex": 0.5118219749652295,
39
+ "oab_exams": 0.4123006833712984,
40
+ "assin2_rte": 0.888146118500855,
41
+ "assin2_sts": 0.3012536440181936,
42
+ "faquad_nli": 0.5342091520965496,
43
+ "hatebr_offensive": 0.7705044730987287,
44
+ "portuguese_hate_speech": 0.41389777668389527,
45
+ "tweetsentbr": 0.46285820535411437
46
+ },
47
+ "all": {
48
+ "harness|enem_challenge|enem_challenge|None|3": 0.5787263820853744,
49
+ "harness|bluex|bluex|None|3": 0.5118219749652295,
50
+ "harness|oab_exams|oab_exams|None|3": 0.4123006833712984,
51
+ "harness|assin2_rte|assin2_rte|None|15": 0.888146118500855,
52
+ "harness|assin2_sts|assin2_sts|None|15": 0.3012536440181936,
53
+ "harness|faquad_nli|faquad_nli|None|15": 0.5342091520965496,
54
+ "harness|hatebr_offensive|hatebr_offensive|None|25": 0.7705044730987287,
55
+ "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.41389777668389527,
56
+ "harness|tweetsentbr|tweetsentbr|None|25": 0.46285820535411437
57
+ },
58
+ "harness|enem_challenge|enem_challenge|None|3": {
59
+ "acc,all": 0.5787263820853744,
60
+ "acc,exam_id__2010": 0.5811965811965812,
61
+ "acc,exam_id__2015": 0.5966386554621849,
62
+ "acc,exam_id__2012": 0.6120689655172413,
63
+ "acc,exam_id__2009": 0.6,
64
+ "acc,exam_id__2016_2": 0.5609756097560976,
65
+ "acc,exam_id__2016": 0.6198347107438017,
66
+ "acc,exam_id__2017": 0.5862068965517241,
67
+ "acc,exam_id__2014": 0.5045871559633027,
68
+ "acc,exam_id__2022": 0.5413533834586466,
69
+ "acc,exam_id__2023": 0.562962962962963,
70
+ "acc,exam_id__2011": 0.6068376068376068,
71
+ "acc,exam_id__2013": 0.5740740740740741,
72
+ "main_score": 0.5787263820853744
73
+ },
74
+ "harness|bluex|bluex|None|3": {
75
+ "acc,all": 0.5118219749652295,
76
+ "acc,exam_id__UNICAMP_2021_2": 0.5294117647058824,
77
+ "acc,exam_id__UNICAMP_2018": 0.5555555555555556,
78
+ "acc,exam_id__USP_2024": 0.5609756097560976,
79
+ "acc,exam_id__UNICAMP_2021_1": 0.5217391304347826,
80
+ "acc,exam_id__UNICAMP_2023": 0.5348837209302325,
81
+ "acc,exam_id__USP_2020": 0.5,
82
+ "acc,exam_id__UNICAMP_2022": 0.6153846153846154,
83
+ "acc,exam_id__UNICAMP_2024": 0.4666666666666667,
84
+ "acc,exam_id__USP_2021": 0.5192307692307693,
85
+ "acc,exam_id__USP_2018": 0.4074074074074074,
86
+ "acc,exam_id__UNICAMP_2020": 0.5454545454545454,
87
+ "acc,exam_id__USP_2022": 0.40816326530612246,
88
+ "acc,exam_id__USP_2023": 0.5681818181818182,
89
+ "acc,exam_id__USP_2019": 0.425,
90
+ "acc,exam_id__UNICAMP_2019": 0.54,
91
+ "main_score": 0.5118219749652295
92
+ },
93
+ "harness|oab_exams|oab_exams|None|3": {
94
+ "acc,all": 0.4123006833712984,
95
+ "acc,exam_id__2012-08": 0.4375,
96
+ "acc,exam_id__2015-16": 0.375,
97
+ "acc,exam_id__2014-13": 0.3875,
98
+ "acc,exam_id__2011-03": 0.3333333333333333,
99
+ "acc,exam_id__2016-19": 0.44871794871794873,
100
+ "acc,exam_id__2012-07": 0.4625,
101
+ "acc,exam_id__2011-05": 0.425,
102
+ "acc,exam_id__2013-12": 0.4875,
103
+ "acc,exam_id__2017-23": 0.4,
104
+ "acc,exam_id__2016-21": 0.3875,
105
+ "acc,exam_id__2016-20": 0.4125,
106
+ "acc,exam_id__2013-10": 0.3625,
107
+ "acc,exam_id__2010-01": 0.32941176470588235,
108
+ "acc,exam_id__2011-04": 0.4,
109
+ "acc,exam_id__2015-17": 0.4230769230769231,
110
+ "acc,exam_id__2017-22": 0.475,
111
+ "acc,exam_id__2016-20a": 0.4125,
112
+ "acc,exam_id__2017-24": 0.4125,
113
+ "acc,exam_id__2015-18": 0.4125,
114
+ "acc,exam_id__2014-14": 0.425,
115
+ "acc,exam_id__2013-11": 0.4125,
116
+ "acc,exam_id__2012-06a": 0.4625,
117
+ "acc,exam_id__2010-02": 0.33,
118
+ "acc,exam_id__2012-06": 0.5,
119
+ "acc,exam_id__2014-15": 0.5256410256410257,
120
+ "acc,exam_id__2018-25": 0.425,
121
+ "acc,exam_id__2012-09": 0.3116883116883117,
122
+ "main_score": 0.4123006833712984
123
+ },
124
+ "harness|assin2_rte|assin2_rte|None|15": {
125
+ "f1_macro,all": 0.888146118500855,
126
+ "acc,all": 0.8770424836601307,
127
+ "main_score": 0.888146118500855
128
+ },
129
+ "harness|assin2_sts|assin2_sts|None|15": {
130
+ "pearson,all": 0.3012536440181936,
131
+ "mse,all": 1.8617602030056133,
132
+ "main_score": 0.3012536440181936
133
+ },
134
+ "harness|faquad_nli|faquad_nli|None|15": {
135
+ "f1_macro,all": 0.5342091520965496,
136
+ "acc,all": 0.5415384615384615,
137
+ "main_score": 0.5342091520965496
138
+ },
139
+ "harness|hatebr_offensive|hatebr_offensive|None|25": {
140
+ "f1_macro,all": 0.7705044730987287,
141
+ "acc,all": 0.7642857142857142,
142
+ "main_score": 0.7705044730987287
143
+ },
144
+ "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": {
145
+ "f1_macro,all": 0.41389777668389527,
146
+ "acc,all": 0.42420681551116335,
147
+ "main_score": 0.41389777668389527
148
+ },
149
+ "harness|tweetsentbr|tweetsentbr|None|25": {
150
+ "f1_macro,all": 0.46285820535411437,
151
+ "acc,all": 0.5049751243781094,
152
+ "main_score": 0.46285820535411437
153
+ }
154
+ },
155
+ "config_tasks": {
156
+ "harness|enem_challenge|enem_challenge": "LM Harness task",
157
+ "harness|bluex|bluex": "LM Harness task",
158
+ "harness|oab_exams|oab_exams": "LM Harness task",
159
+ "harness|assin2_rte|assin2_rte": "LM Harness task",
160
+ "harness|assin2_sts|assin2_sts": "LM Harness task",
161
+ "harness|faquad_nli|faquad_nli": "LM Harness task",
162
+ "harness|hatebr_offensive|hatebr_offensive": "LM Harness task",
163
+ "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task",
164
+ "harness|tweetsentbr|tweetsentbr": "LM Harness task"
165
+ },
166
+ "versions": {
167
+ "all": 0,
168
+ "harness|enem_challenge|enem_challenge": 1.1,
169
+ "harness|bluex|bluex": 1.1,
170
+ "harness|oab_exams|oab_exams": 1.5,
171
+ "harness|assin2_rte|assin2_rte": 1.1,
172
+ "harness|assin2_sts|assin2_sts": 1.1,
173
+ "harness|faquad_nli|faquad_nli": 1.1,
174
+ "harness|hatebr_offensive|hatebr_offensive": 1.0,
175
+ "harness|portuguese_hate_speech|portuguese_hate_speech": 1.0,
176
+ "harness|tweetsentbr|tweetsentbr": 1.0
177
+ },
178
+ "summary_tasks": {
179
+ "harness|enem_challenge|enem_challenge|None|3": {
180
+ "sample_size": 1429
181
+ },
182
+ "harness|bluex|bluex|None|3": {
183
+ "sample_size": 719
184
+ },
185
+ "harness|oab_exams|oab_exams|None|3": {
186
+ "sample_size": 2195
187
+ },
188
+ "harness|assin2_rte|assin2_rte|None|15": {
189
+ "sample_size": 2448
190
+ },
191
+ "harness|assin2_sts|assin2_sts|None|15": {
192
+ "sample_size": 2448
193
+ },
194
+ "harness|faquad_nli|faquad_nli|None|15": {
195
+ "sample_size": 650
196
+ },
197
+ "harness|hatebr_offensive|hatebr_offensive|None|25": {
198
+ "sample_size": 1400
199
+ },
200
+ "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": {
201
+ "sample_size": 851
202
+ },
203
+ "harness|tweetsentbr|tweetsentbr|None|25": {
204
+ "sample_size": 2010
205
+ }
206
+ },
207
+ "summary_general": {}
208
+ }