File size: 536 Bytes
e603299 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 | {
"results": {
"truthfulqa_mc": {
"mc1": 0.20563035495716034,
"mc1_stderr": 0.01414848221946097,
"mc2": 0.3427721103825635,
"mc2_stderr": 0.013140833170280896
}
},
"versions": {
"truthfulqa_mc": 1
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=./workdir_7b_16mix/ckpt_358",
"num_fewshot": 0,
"batch_size": "1",
"batch_sizes": [],
"device": null,
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
} |