open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/mistral-7b-odpo
/v3.1
/arc
/results_2024-04-02T14-12-15.550471.json
edbeeching
HF Staff
Upload eval_results/HuggingFaceH4/mistral-7b-odpo/v3.1/arc/results_2024-04-02T14-12-15.550471.json with huggingface_hub
f2020cc
verified
| { | |
| "config_general": { | |
| "lighteval_sha": "?", | |
| "num_fewshot_seeds": 1, | |
| "override_batch_size": 1, | |
| "max_samples": null, | |
| "job_id": "", | |
| "start_time": 269620.753063767, | |
| "end_time": 269744.228074929, | |
| "total_evaluation_time_secondes": "123.47501116199419", | |
| "model_name": "HuggingFaceH4/mistral-7b-odpo", | |
| "model_sha": "86720fec9e71465a02f39ce2126f1c14cbb0fb11", | |
| "model_dtype": "torch.bfloat16", | |
| "model_size": "13.99 GB", | |
| "config": null | |
| }, | |
| "results": { | |
| "leaderboard|arc:challenge|25": { | |
| "acc": 0.5802047781569966, | |
| "acc_stderr": 0.014422181226303026, | |
| "acc_norm": 0.5930034129692833, | |
| "acc_norm_stderr": 0.01435639941800912 | |
| } | |
| }, | |
| "versions": { | |
| "leaderboard|arc:challenge|25": 0 | |
| }, | |
| "config_tasks": { | |
| "leaderboard|arc:challenge": { | |
| "name": "arc:challenge", | |
| "prompt_function": "arc", | |
| "hf_repo": "ai2_arc", | |
| "hf_subset": "ARC-Challenge", | |
| "metric": [ | |
| "loglikelihood_acc", | |
| "loglikelihood_acc_norm_nospace" | |
| ], | |
| "hf_avail_splits": [ | |
| "train", | |
| "test" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": null, | |
| "few_shots_select": "random_sampling_from_train", | |
| "generation_size": 1, | |
| "stop_sequence": [ | |
| "\n" | |
| ], | |
| "output_regex": null, | |
| "frozen": false, | |
| "suite": [ | |
| "leaderboard", | |
| "arc" | |
| ], | |
| "original_num_docs": 1172, | |
| "effective_num_docs": 1172, | |
| "trust_dataset": true | |
| } | |
| }, | |
| "summary_tasks": { | |
| "leaderboard|arc:challenge|25": { | |
| "hashes": { | |
| "hash_examples": "17b0cae357c0259e", | |
| "hash_full_prompts": "f42cdfcbcc9b6fa4", | |
| "hash_input_tokens": "bedd26d7d7ad2546", | |
| "hash_cont_tokens": "8b6f018bc80a3417" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 1172, | |
| "padded": 4660, | |
| "non_padded": 27, | |
| "effective_few_shots": 25.0, | |
| "num_truncated_few_shots": 0 | |
| } | |
| }, | |
| "summary_general": { | |
| "hashes": { | |
| "hash_examples": "aaa6929c6d3771fb", | |
| "hash_full_prompts": "099f2d4c459a2c1d", | |
| "hash_input_tokens": "7eee0cf412c3eaf0", | |
| "hash_cont_tokens": "6857c42b72c1a62a" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 1172, | |
| "padded": 4660, | |
| "non_padded": 27, | |
| "num_truncated_few_shots": 0 | |
| } | |
| } |