href_results / temperature=1.0 /Llama-3.1-70B-Instruct.json
alrope's picture
Upload folder using huggingface_hub
efa4eea verified
{
"path": "meta-llama/Llama-3.1-70B-Instruct",
"brainstorm": 0.437,
"open_qa": 0.77,
"closed_qa": 0.408,
"extract": 0.369,
"generation": 0.431,
"rewrite": 0.424,
"summarize": 0.443,
"classify": 0.532,
"reasoning_over_numerical_data": 0.458,
"multi-document_synthesis": 0.481,
"fact_checking_or_attributed_qa": 0.498,
"average": 0.456,
"brainstorm_rank": 5,
"open_qa_rank": 1,
"closed_qa_rank": 1,
"extract_rank": 1,
"generation_rank": 3,
"rewrite_rank": 3,
"summarize_rank": 1,
"classify_rank": 1,
"reasoning_over_numerical_data_rank": 1,
"multi-document_synthesis_rank": 4,
"fact_checking_or_attributed_qa_rank": 1,
"average_rank": 1,
"brainstorm_confi": "+3.9 / -3.9",
"open_qa_confi": "+7.8 / -8.3",
"closed_qa_confi": "+5.9 / -5.9",
"extract_confi": "+5.7 / -5.7",
"generation_confi": "+3.8 / -3.9",
"rewrite_confi": "+3.6 / -3.4",
"summarize_confi": "+6.9 / -6.4",
"classify_confi": "+5.7 / -6.0",
"reasoning_over_numerical_data_confi": "+3.8 / -3.6",
"multi-document_synthesis_confi": "+4.6 / -4.6",
"fact_checking_or_attributed_qa_confi": "+4.5 / -4.6",
"average_confi": "+1.48 / -1.41"
}