FIDLE Evaluator


LLM-FR Leaderboard πŸ†

This leaderboard evaluates intelligence modeling in the French language. It is not intended to serve as a reference for LLM evaluations. It is provided for informational and educational purposes only. Please consult other, more official leaderboards for authoritative assessments.

Note: The assessments have been adapted to the Reasoning Language Model: all tasks are in generative mode, with no limit on token generation.

  • Pr-Fouras : "PΓ¨re Fouras"'s Riddles (ex : fan site)
  • Kangourou-TO : MATH Quizzes Kangourou. Text Only : Only questions without figures.
  • Sornette : Classification of texts (GORAFI, wikipedia, le saviez-vous, ...) into 4 categories - burlesque et fantaisiste, ludique et didactique, insidieux et mensonger, moral et accablant
  • Mix-Fr : 🍲 Mixture of public datasets translated in french

Model Types:

  • πŸͺ¨ - Base, Pretrained, Foundation Model
  • πŸ’¬ - Chat Model (Instruct, RLHF, DPO, ...)
  • πŸ’…πŸ» - Fine-tuned Model
  • πŸ€” - Reasoning Model
{
  • "headers": [
    • "R",
    • "T",
    • "Model",
    • "Average ⬆️",
    • "Pr-Fouras",
    • "Kangourou-TO",
    • "Sornette",
    • "Mix-Fr",
    • "#Params (B)",
    • "Precision",
    • "Hub License",
    • "Hub ❀️"
    ],
  • "data": [
    • [
      • "1 πŸ₯‡",
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1</a>",
      • 72.05,
      • 72.99,
      • 88.98,
      • 64.67,
      • 61.55,
      • 684.53,
      • "bfloat16",
      • "mit",
      • 11562
      ],
    • [
      • "2 πŸ₯ˆ",
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8</a>",
      • 69.8,
      • 58.64,
      • 85.41,
      • 65.33,
      • 69.81,
      • 401.65,
      • "bfloat16",
      • "other",
      • 111
      ],
    • [
      • "3 πŸ₯‰",
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/Qwen/QwQ-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/QwQ-32B</a>",
      • 68.44,
      • 55.96,
      • 82.7,
      • 73.33,
      • 61.78,
      • 32.76,
      • "bfloat16",
      • "apache-2.0",
      • 2303
      ],
    • [
      • 4,
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-0528" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-0528</a>",
      • 66.96,
      • 72.51,
      • 71,
      • 66.67,
      • 57.66,
      • 684.53,
      • "bfloat16",
      • "mit",
      • 1640
      ],
    • [
      • 5,
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-235B-A22B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-235B-A22B</a>",
      • 64.33,
      • 61.56,
      • 85.41,
      • 45.33,
      • 65.02,
      • 235.09,
      • "bfloat16",
      • "apache-2.0",
      • 775
      ],
    • [
      • 6,
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B</a>",
      • 62.95,
      • 45.5,
      • 92.03,
      • 46,
      • 68.27,
      • 30.53,
      • "bfloat16",
      • "apache-2.0",
      • 520
      ],
    • [
      • 7,
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-32B</a>",
      • 60.01,
      • 44.04,
      • 93.55,
      • 36.67,
      • 65.78,
      • 32.76,
      • "bfloat16",
      • "apache-2.0",
      • 293
      ],
    • [
      • 8,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Large-Instruct-2411" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Large-Instruct-2411</a>",
      • 59.69,
      • 58.39,
      • 61.5,
      • 60.67,
      • 58.2,
      • 122.61,
      • "bfloat16",
      • "other",
      • 210
      ],
    • [
      • 9,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-V3" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-V3</a>",
      • 59.12,
      • 59.85,
      • 58.62,
      • 58,
      • 60,
      • 684.53,
      • "bfloat16",
      • null,
      • 3660
      ],
    • [
      • 10,
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-8B</a>",
      • 58.63,
      • 41.12,
      • 88.98,
      • 38.67,
      • 65.78,
      • 8.19,
      • "bfloat16",
      • "apache-2.0",
      • 357
      ],
    • [
      • 11,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-72B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-72B-Instruct</a>",
      • 57.41,
      • 49.88,
      • 53.19,
      • 68,
      • 58.57,
      • 72.71,
      • "bfloat16",
      • "other",
      • 771
      ],
    • [
      • 12,
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Llama-70B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Llama-70B</a>",
      • 56.7,
      • 40.39,
      • 59.97,
      • 68,
      • 58.43,
      • 70.55,
      • "bfloat16",
      • "mit",
      • 634
      ],
    • [
      • 13,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Small-3.2-24B-Instruct-2506" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Small-3.2-24B-Instruct-2506</a>",
      • 54.38,
      • 50.36,
      • 60.99,
      • 52.67,
      • 53.48,
      • 24.01,
      • "bfloat16",
      • "apache-2.0",
      • 320
      ],
    • [
      • 14,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 53.87,
      • 38.44,
      • 52.85,
      • 67.33,
      • 56.84,
      • 32.76,
      • "bfloat16",
      • "apache-2.0",
      • 240
      ],
    • [
      • 15,
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Magistral-Small-2506" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Magistral-Small-2506</a>",
      • 53.61,
      • 47.69,
      • 75.75,
      • 46.67,
      • 44.34,
      • 23.57,
      • "bfloat16",
      • "apache-2.0",
      • 472
      ],
    • [
      • 16,
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon-H1-7B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon-H1-7B-Instruct</a>",
      • 53.05,
      • 36.5,
      • 58.45,
      • 60.67,
      • 56.59,
      • 7.59,
      • "bfloat16",
      • "apache-2.0",
      • 13
      ],
    • [
      • 17,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-4-Scout-17B-16E-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-4-Scout-17B-16E-Instruct</a>",
      • 52.34,
      • 51.34,
      • 66.93,
      • 24,
      • 67.09,
      • 108.64,
      • "bfloat16",
      • "other",
      • 913
      ],
    • [
      • 18,
      • "πŸ’…πŸ»",
      • "<a target="_blank" href="https://huggingface.co/MaziyarPanahi/calme-3.2-instruct-78b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MaziyarPanahi/calme-3.2-instruct-78b</a>",
      • 52.14,
      • 53.53,
      • 30.8,
      • 70.67,
      • 53.56,
      • 77.96,
      • "bfloat16",
      • "other",
      • 107
      ],
    • [
      • 19,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.1-405B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.1-405B-Instruct</a>",
      • 50.25,
      • 62.29,
      • 42.84,
      • 37.33,
      • 58.52,
      • 405.85,
      • "bfloat16",
      • "llama3.1",
      • 568
      ],
    • [
      • 20,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 48.58,
      • 54.5,
      • 29.44,
      • 50.67,
      • 59.69,
      • 27.43,
      • "bfloat16",
      • "gemma",
      • 1338
      ],
    • [
      • 21,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.3-70B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.3-70B-Instruct</a>",
      • 48.15,
      • 48.91,
      • 26.56,
      • 56,
      • 61.13,
      • 70.55,
      • "bfloat16",
      • "llama3.3",
      • 1744
      ],
    • [
      • 22,
      • "πŸ’…πŸ»",
      • "<a target="_blank" href="https://huggingface.co/jpacifico/Chocolatine-2-14B-Instruct-v2.0.3" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">jpacifico/Chocolatine-2-14B-Instruct-v2.0.3</a>",
      • 47.77,
      • 31.87,
      • 38.94,
      • 66.67,
      • 53.61,
      • 14.77,
      • "bfloat16",
      • "apache-2.0",
      • 11
      ],
    • [
      • 23,
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-0528-Qwen3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-0528-Qwen3-8B</a>",
      • 46.81,
      • 27.25,
      • 62.35,
      • 43.33,
      • 54.29,
      • 8.19,
      • "bfloat16",
      • "mit",
      • 592
      ],
    • [
      • 24,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Small-3.1-24B-Instruct-2503" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Small-3.1-24B-Instruct-2503</a>",
      • 46.4,
      • 42.82,
      • 51.83,
      • 36.67,
      • 54.27,
      • 24.01,
      • "bfloat16",
      • "apache-2.0",
      • 1275
      ],
    • [
      • 25,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Small-24B-Instruct-2501" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Small-24B-Instruct-2501</a>",
      • 44.39,
      • 34.31,
      • 46.23,
      • 48.67,
      • 48.36,
      • 23.57,
      • "bfloat16",
      • "apache-2.0",
      • 878
      ],
    • [
      • 26,
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 37.26,
      • 30.9,
      • 5.87,
      • 58.67,
      • 53.62,
      • 32.76,
      • "bfloat16",
      • "mit",
      • 1284
      ],
    • [
      • 27,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-7B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-7B-Instruct</a>",
      • 36.63,
      • 23.84,
      • 20.96,
      • 53.33,
      • 48.38,
      • 7.62,
      • "bfloat16",
      • "apache-2.0",
      • 581
      ],
    • [
      • 28,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-12b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-12b-it</a>",
      • 36.49,
      • 43.31,
      • 21.81,
      • 22,
      • 58.83,
      • 12.19,
      • "bfloat16",
      • "gemma",
      • 354
      ],
    • [
      • 29,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 36.22,
      • 27.49,
      • 11.47,
      • 53.33,
      • 52.59,
      • 10.31,
      • "bfloat16",
      • "other",
      • 97
      ],
    • [
      • 30,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct</a>",
      • 35.24,
      • 39.9,
      • 26.39,
      • 21.33,
      • 53.34,
      • 14.77,
      • "bfloat16",
      • "apache-2.0",
      • 208
      ],
    • [
      • 31,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-3B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-3B-Instruct</a>",
      • 30.07,
      • 15.57,
      • 25.03,
      • 40.67,
      • 39,
      • 3.09,
      • "bfloat16",
      • "other",
      • 221
      ],
    • [
      • 32,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-7B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-7B-Instruct</a>",
      • 29.09,
      • 23.6,
      • 9.09,
      • 35.33,
      • 48.32,
      • 7.46,
      • "bfloat16",
      • "other",
      • 64
      ],
    • [
      • 33,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/K-intelligence/Midm-2.0-Base-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">K-intelligence/Midm-2.0-Base-Instruct</a>",
      • 26.15,
      • 9.49,
      • 24.36,
      • 25.33,
      • 45.41,
      • 11.55,
      • "bfloat16",
      • "mit",
      • 69
      ],
    • [
      • 34,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/google/txgemma-27b-chat" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/txgemma-27b-chat</a>",
      • 23.93,
      • 45.74,
      • 0,
      • 0,
      • 49.97,
      • 27.23,
      • "bfloat16",
      • "other",
      • 13
      ],
    • [
      • 35,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-9B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-9B-Instruct</a>",
      • 20.79,
      • 12.41,
      • 0,
      • 37.33,
      • 33.42,
      • 9.15,
      • "bfloat16",
      • "apache-2.0",
      • 158
      ],
    • [
      • 36,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/internlm/internlm3-8b-instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">internlm/internlm3-8b-instruct</a>",
      • 20.71,
      • 7.06,
      • 13.16,
      • 27.33,
      • 35.29,
      • 8.8,
      • "bfloat16",
      • "apache-2.0",
      • 208
      ],
    • [
      • 37,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.2-3B-Instruct</a>",
      • 16.01,
      • 7.54,
      • 0,
      • 31.33,
      • 25.15,
      • 3.22,
      • "bfloat16",
      • "llama3.2",
      • 954
      ],
    • [
      • 38,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/OpenLLM-France/Lucie-7B-Instruct-v1.1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">OpenLLM-France/Lucie-7B-Instruct-v1.1</a>",
      • 11.93,
      • 8.03,
      • 1.8,
      • 18,
      • 19.91,
      • 6.71,
      • "bfloat16",
      • "apache-2.0",
      • 8
      ],
    • [
      • 39,
      • "πŸ€”",
      • "<a target="_blank" href="https://huggingface.co/open-r1/OpenR1-Qwen-7B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">open-r1/OpenR1-Qwen-7B</a>",
      • 9.43,
      • 0.49,
      • 14.52,
      • 0,
      • 22.7,
      • 7.62,
      • "bfloat16",
      • "apache-2.0",
      • 40
      ],
    • [
      • 40,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.2-1B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.2-1B-Instruct</a>",
      • 5.84,
      • 1.7,
      • 0,
      • 3.33,
      • 18.32,
      • 1.24,
      • "bfloat16",
      • "llama3.2",
      • 842
      ],
    • [
      • 41,
      • "πŸ’¬",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-1.7B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-1.7B-Instruct</a>",
      • 4.53,
      • 5.11,
      • 0,
      • 0,
      • 13.03,
      • 1.66,
      • "bfloat16",
      • "apache-2.0",
      • 70
      ]
    ],
  • "metadata": null
}