| { | |
| "results": { | |
| "truthfulqa_mc": { | |
| "mc1": 0.20563035495716034, | |
| "mc1_stderr": 0.014148482219460972, | |
| "mc2": 0.3302260090274828, | |
| "mc2_stderr": 0.013011547125529084 | |
| } | |
| }, | |
| "versions": { | |
| "truthfulqa_mc": 1 | |
| }, | |
| "config": { | |
| "model": "hf-causal", | |
| "model_args": "pretrained=workdir_7b/ckpt_351", | |
| "num_fewshot": 0, | |
| "batch_size": "32", | |
| "batch_sizes": [], | |
| "device": null, | |
| "no_cache": true, | |
| "limit": null, | |
| "bootstrap_iters": 100000, | |
| "description_dict": {} | |
| } | |
| } |