FIDLE Evaluator


FIDLE LLM-FR Leaderboard ๐Ÿ†

This is a leaderboard exclusively in French. We do not intend to become a reference for LLM evaluations. This is for informational and educational purposes only. Please cross-reference with other, more official leaderboards.

Note: The assessments have been adapted to the Reasoning Language Model: all tasks are in generative mode, with no limit on token generation.

  • IFEval-Fr : French Translation of IFEval
  • Pr-Fouras : "Pรจre Fouras"'s Riddles (ex : fan site)
  • Sornette : Classification of texts (GORAFI, wikipedia, le saviez-vous, ...) into 4 categories - burlesque et fantaisiste, ludique et didactique, insidieux et mensonger, moral et accablant
  • Kangourou-TO : MATH Quizzes Kangourou. Text Only : Only questions without figures.

Model Types:

  • ๐Ÿชจ - Base, Pretrained, Foundation Model
  • ๐Ÿ’ฌ - Chat Model (Instruct, RLHF, DPO, ...)
  • ๐Ÿ’…๐Ÿป - Fine-tuned Model
  • ๐Ÿค” - Reasoning Model
{
  • "headers": [
    • "R",
    • "T",
    • "Model",
    • "Average โฌ†๏ธ",
    • "IFEval-Fr",
    • "GPQA-Fr",
    • "Bac-Fr",
    • "Pr-Fouras",
    • "Sornette",
    • "Kangourou-TO",
    • "#Params (B)",
    • "Precision",
    • "Hub License",
    • "Hub โค๏ธ"
    ],
  • "data": [
    • [
      • "1 ๐Ÿฅ‡",
      • "๐Ÿค”",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1</a>",
      • 64.62,
      • 69.42,
      • 37.97,
      • 53.67,
      • 72.99,
      • 64.67,
      • 88.98,
      • 684.53,
      • "bfloat16",
      • "mit",
      • 11562
      ],
    • [
      • "2 ๐Ÿฅˆ",
      • "๐Ÿค”",
      • "<a target="_blank" href="https://huggingface.co/Qwen/QwQ-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/QwQ-32B</a>",
      • 59.36,
      • 67.62,
      • 20.63,
      • 55.93,
      • 55.96,
      • 73.33,
      • 82.7,
      • 32.76,
      • "bfloat16",
      • "apache-2.0",
      • 2303
      ],
    • [
      • "3 ๐Ÿฅ‰",
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-V3" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-V3</a>",
      • 54.8,
      • 72.25,
      • 32.36,
      • 47.74,
      • 59.85,
      • 58,
      • 58.62,
      • 684.53,
      • "bfloat16",
      • null,
      • 3660
      ],
    • [
      • 4,
      • "๐Ÿค”",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Llama-70B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Llama-70B</a>",
      • 54.56,
      • 66.01,
      • 42.12,
      • 50.85,
      • 40.39,
      • 68,
      • 59.97,
      • 70.55,
      • "bfloat16",
      • "mit",
      • 634
      ],
    • [
      • 5,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Large-Instruct-2411" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Large-Instruct-2411</a>",
      • 53.01,
      • 66.26,
      • 21.12,
      • 50.14,
      • 58.39,
      • 60.67,
      • 61.5,
      • 122.61,
      • "bfloat16",
      • "other",
      • 210
      ],
    • [
      • 6,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-72B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-72B-Instruct</a>",
      • 52.49,
      • 70.67,
      • 26.74,
      • 46.47,
      • 49.88,
      • 68,
      • 53.19,
      • 72.71,
      • "bfloat16",
      • "other",
      • 771
      ],
    • [
      • 7,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 48.9,
      • 66.08,
      • 21.12,
      • 47.6,
      • 38.44,
      • 67.33,
      • 52.85,
      • 32.76,
      • "bfloat16",
      • "apache-2.0",
      • 240
      ],
    • [
      • 8,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.3-70B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.3-70B-Instruct</a>",
      • 47.39,
      • 75.36,
      • 30.65,
      • 46.89,
      • 48.91,
      • 56,
      • 26.56,
      • 70.55,
      • "bfloat16",
      • "llama3.3",
      • 1744
      ],
    • [
      • 9,
      • "๐Ÿ’…๐Ÿป",
      • "<a target="_blank" href="https://huggingface.co/MaziyarPanahi/calme-3.2-instruct-78b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MaziyarPanahi/calme-3.2-instruct-78b</a>",
      • 47.37,
      • 65.04,
      • 22.1,
      • 42.09,
      • 53.53,
      • 70.67,
      • 30.8,
      • 77.96,
      • "bfloat16",
      • "other",
      • 107
      ],
    • [
      • 10,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.1-405B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.1-405B-Instruct</a>",
      • 46.85,
      • 68.59,
      • 21.61,
      • 48.45,
      • 62.29,
      • 37.33,
      • 42.84,
      • 405.85,
      • "bfloat16",
      • "llama3.1",
      • 568
      ],
    • [
      • 11,
      • "๐Ÿ’…๐Ÿป",
      • "<a target="_blank" href="https://huggingface.co/jpacifico/Chocolatine-2-14B-Instruct-v2.0.3" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">jpacifico/Chocolatine-2-14B-Instruct-v2.0.3</a>",
      • 43.17,
      • 64.71,
      • 14.29,
      • 42.51,
      • 31.87,
      • 66.67,
      • 38.94,
      • 14.77,
      • "bfloat16",
      • "apache-2.0",
      • 11
      ],
    • [
      • 12,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Small-24B-Instruct-2501" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Small-24B-Instruct-2501</a>",
      • 39.95,
      • 55.33,
      • 13.8,
      • 41.38,
      • 34.31,
      • 48.67,
      • 46.23,
      • 23.57,
      • "bfloat16",
      • "apache-2.0",
      • 878
      ],
    • [
      • 13,
      • "๐Ÿค”",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 39.74,
      • 61.33,
      • 35.78,
      • 45.9,
      • 30.9,
      • 58.67,
      • 5.87,
      • 32.76,
      • "bfloat16",
      • "mit",
      • 1284
      ],
    • [
      • 14,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 34.68,
      • 66.48,
      • 10.62,
      • 38.7,
      • 27.49,
      • 53.33,
      • 11.47,
      • 10.31,
      • "bfloat16",
      • "other",
      • 97
      ],
    • [
      • 15,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-7B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-7B-Instruct</a>",
      • 34.25,
      • 58.48,
      • 10.62,
      • 38.28,
      • 23.84,
      • 53.33,
      • 20.96,
      • 7.62,
      • "bfloat16",
      • "apache-2.0",
      • 581
      ],
    • [
      • 16,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct</a>",
      • 33.91,
      • 65.71,
      • 9.16,
      • 40.96,
      • 39.9,
      • 21.33,
      • 26.39,
      • 14.77,
      • "bfloat16",
      • "apache-2.0",
      • 208
      ],
    • [
      • 17,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-7B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-7B-Instruct</a>",
      • 28.77,
      • 62.03,
      • 7.94,
      • 34.6,
      • 23.6,
      • 35.33,
      • 9.09,
      • 7.46,
      • "bfloat16",
      • "other",
      • 64
      ],
    • [
      • 18,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-3B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-3B-Instruct</a>",
      • 28.36,
      • 47.35,
      • 10.87,
      • 30.65,
      • 15.57,
      • 40.67,
      • 25.03,
      • 3.09,
      • "bfloat16",
      • "other",
      • 221
      ],
    • [
      • 19,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/google/txgemma-27b-chat" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/txgemma-27b-chat</a>",
      • 24.28,
      • 61.94,
      • 0,
      • 37.99,
      • 45.74,
      • 0,
      • 0,
      • 27.23,
      • "bfloat16",
      • "other",
      • 13
      ],
    • [
      • 20,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-9B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-9B-Instruct</a>",
      • 19.74,
      • 47.35,
      • 1.83,
      • 19.49,
      • 12.41,
      • 37.33,
      • 0,
      • 9.15,
      • "bfloat16",
      • "apache-2.0",
      • 158
      ],
    • [
      • 21,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/internlm/internlm3-8b-instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">internlm/internlm3-8b-instruct</a>",
      • 19.69,
      • 45.58,
      • 0,
      • 25,
      • 7.06,
      • 27.33,
      • 13.16,
      • 8.8,
      • "bfloat16",
      • "apache-2.0",
      • 208
      ],
    • [
      • 22,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.2-3B-Instruct</a>",
      • 14.86,
      • 34.76,
      • 0,
      • 15.54,
      • 7.54,
      • 31.33,
      • 0,
      • 3.22,
      • "bfloat16",
      • "llama3.2",
      • 954
      ],
    • [
      • 23,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/OpenLLM-France/Lucie-7B-Instruct-v1.1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">OpenLLM-France/Lucie-7B-Instruct-v1.1</a>",
      • 11.7,
      • 24.98,
      • 2.56,
      • 14.83,
      • 8.03,
      • 18,
      • 1.8,
      • 6.71,
      • "bfloat16",
      • "apache-2.0",
      • 8
      ],
    • [
      • 24,
      • "๐Ÿค”",
      • "<a target="_blank" href="https://huggingface.co/open-r1/OpenR1-Qwen-7B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">open-r1/OpenR1-Qwen-7B</a>",
      • 10.07,
      • 17.99,
      • 0,
      • 27.4,
      • 0.49,
      • 0,
      • 14.52,
      • 7.62,
      • "bfloat16",
      • "apache-2.0",
      • 40
      ],
    • [
      • 25,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.2-1B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.2-1B-Instruct</a>",
      • 6.94,
      • 28.3,
      • 0,
      • 8.33,
      • 1.7,
      • 3.33,
      • 0,
      • 1.24,
      • "bfloat16",
      • "llama3.2",
      • 842
      ],
    • [
      • 26,
      • "๐Ÿ’ฌ",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-1.7B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-1.7B-Instruct</a>",
      • 5.19,
      • 18.01,
      • 0,
      • 8.05,
      • 5.11,
      • 0,
      • 0,
      • 1.66,
      • "bfloat16",
      • "apache-2.0",
      • 70
      ]
    ],
  • "metadata": null
}