open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/mistral-7b-odpo
/v2.7
/arc
/results_2024-05-02T17-30-11.390947.json
edbeeching
HF Staff
Upload eval_results/HuggingFaceH4/mistral-7b-odpo/v2.7/arc/results_2024-05-02T17-30-11.390947.json with huggingface_hub
21fac4b
verified
| { | |
| "config_general": { | |
| "lighteval_sha": "?", | |
| "num_fewshot_seeds": 1, | |
| "override_batch_size": 1, | |
| "max_samples": null, | |
| "job_id": "", | |
| "start_time": 7081210.992901049, | |
| "end_time": 7081329.089847066, | |
| "total_evaluation_time_secondes": "118.09694601781666", | |
| "model_name": "HuggingFaceH4/mistral-7b-odpo", | |
| "model_sha": "8cf4bb34cb0b7dc646459ec70adda2e192e81c50", | |
| "model_dtype": "torch.bfloat16", | |
| "model_size": "13.99 GB", | |
| "config": null | |
| }, | |
| "results": { | |
| "leaderboard|arc:challenge|25": { | |
| "acc": 0.5656996587030717, | |
| "acc_stderr": 0.01448470304885736, | |
| "acc_norm": 0.5921501706484642, | |
| "acc_norm_stderr": 0.014361097288449701 | |
| } | |
| }, | |
| "versions": { | |
| "leaderboard|arc:challenge|25": 0 | |
| }, | |
| "config_tasks": { | |
| "leaderboard|arc:challenge": { | |
| "name": "arc:challenge", | |
| "prompt_function": "arc", | |
| "hf_repo": "ai2_arc", | |
| "hf_subset": "ARC-Challenge", | |
| "metric": [ | |
| "loglikelihood_acc", | |
| "loglikelihood_acc_norm_nospace" | |
| ], | |
| "hf_avail_splits": [ | |
| "train", | |
| "test" | |
| ], | |
| "evaluation_splits": [ | |
| "test" | |
| ], | |
| "few_shots_split": null, | |
| "few_shots_select": "random_sampling_from_train", | |
| "generation_size": 1, | |
| "stop_sequence": [ | |
| "\n" | |
| ], | |
| "output_regex": null, | |
| "frozen": false, | |
| "suite": [ | |
| "leaderboard", | |
| "arc" | |
| ], | |
| "original_num_docs": 1172, | |
| "effective_num_docs": 1172, | |
| "trust_dataset": true | |
| } | |
| }, | |
| "summary_tasks": { | |
| "leaderboard|arc:challenge|25": { | |
| "hashes": { | |
| "hash_examples": "17b0cae357c0259e", | |
| "hash_full_prompts": "f42cdfcbcc9b6fa4", | |
| "hash_input_tokens": "bedd26d7d7ad2546", | |
| "hash_cont_tokens": "8b6f018bc80a3417" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 1172, | |
| "padded": 4660, | |
| "non_padded": 27, | |
| "effective_few_shots": 25.0, | |
| "num_truncated_few_shots": 0 | |
| } | |
| }, | |
| "summary_general": { | |
| "hashes": { | |
| "hash_examples": "aaa6929c6d3771fb", | |
| "hash_full_prompts": "099f2d4c459a2c1d", | |
| "hash_input_tokens": "7eee0cf412c3eaf0", | |
| "hash_cont_tokens": "6857c42b72c1a62a" | |
| }, | |
| "truncated": 0, | |
| "non_truncated": 1172, | |
| "padded": 4660, | |
| "non_padded": 27, | |
| "num_truncated_few_shots": 0 | |
| } | |
| } |