| { | |
| "results": { | |
| "truthfulqa_mc": { | |
| "mc1": 0.20563035495716034, | |
| "mc1_stderr": 0.01414848221946097, | |
| "mc2": 0.3427721103825635, | |
| "mc2_stderr": 0.013140833170280896 | |
| } | |
| }, | |
| "versions": { | |
| "truthfulqa_mc": 1 | |
| }, | |
| "config": { | |
| "model": "hf-causal", | |
| "model_args": "pretrained=./workdir_7b_16mix/ckpt_358", | |
| "num_fewshot": 0, | |
| "batch_size": "1", | |
| "batch_sizes": [], | |
| "device": null, | |
| "no_cache": true, | |
| "limit": null, | |
| "bootstrap_iters": 100000, | |
| "description_dict": {} | |
| } | |
| } |