href_results / temperature=0.0 /Mistral-7B-Instruct-v0.3.json
alrope's picture
Upload folder using huggingface_hub
7b20e33 verified
{
"path": "mistralai/Mistral-7B-Instruct-v0.3",
"brainstorm": 0.241,
"open_qa": 0.608,
"closed_qa": 0.213,
"extract": 0.141,
"generation": 0.191,
"rewrite": 0.185,
"summarize": 0.168,
"classify": 0.271,
"reasoning_over_numerical_data": 0.173,
"multi-document_synthesis": 0.255,
"fact_checking_or_attributed_qa": 0.465,
"average": 0.2427,
"brainstorm_rank": 16,
"open_qa_rank": 21,
"closed_qa_rank": 14,
"extract_rank": 14,
"generation_rank": 16,
"rewrite_rank": 16,
"summarize_rank": 15,
"classify_rank": 20,
"reasoning_over_numerical_data_rank": 16,
"multi-document_synthesis_rank": 14,
"fact_checking_or_attributed_qa_rank": 8,
"average_rank": 17,
"brainstorm_confi": "+3.4 / -3.3",
"open_qa_confi": "+9.8 / -9.8",
"closed_qa_confi": "+5.7 / -5.2",
"extract_confi": "+4.5 / -4.2",
"generation_confi": "+3.2 / -3.2",
"rewrite_confi": "+2.9 / -2.8",
"summarize_confi": "+5.2 / -5.0",
"classify_confi": "+6.0 / -5.7",
"reasoning_over_numerical_data_confi": "+3.0 / -3.0",
"multi-document_synthesis_confi": "+4.0 / -4.0",
"fact_checking_or_attributed_qa_confi": "+4.3 / -4.3",
"average_confi": "+1.27 / -1.24"
}