MELABench logo Leaderboard

A Maltese Evaluation Language Benchmark ๐Ÿ‡ฒ๐Ÿ‡น

{
  • "headers": [
    • "T",
    • "Model",
    • "N-Shot",
    • "Version",
    • "Average (All) โฌ†๏ธ",
    • "Average (NLU) ๐Ÿง ",
    • "Average (NLG) โœ๏ธ",
    • "Sentiment Analysis (F1)",
    • "SIB200 (F1)",
    • "Taxi1500 (F1)",
    • "Maltese News Categories (F1)",
    • "MultiEURLEX (F1)",
    • "Belebele (Accuracy)",
    • "OPUS-100 ENโ†’MT (BLEU)",
    • "OPUS-100 ENโ†’MT (ChrF)",
    • "Flores-200 ENโ†’MT (BLEU)",
    • "Flores-200 ENโ†’MT (ChrF)",
    • "WebNLG (ChrF)",
    • "WebNLG (Rouge-L)",
    • "EUR-Lex-Sum (ChrF)",
    • "EUR-Lex-Sum (Rouge-L)",
    • "Maltese News Headlines (ChrF)",
    • "Maltese News Headlines (Rouge-L)",
    • "Type",
    • "Maltese Training",
    • "#Languages",
    • "Architecture",
    • "Precision",
    • "Hub License",
    • "#Params (B)",
    • "Hub โค๏ธ",
    • "Available on the hub",
    • "Model SHA"
    ],
  • "data": [
    • [
      • "IT/IT",
      • "<a target="_blank" href="https://huggingface.co/CohereForAI/aya-101" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">CohereForAI/aya-101</a>",
      • 0,
      • "1.0_english",
      • 36.05,
      • 47.39,
      • 24.71,
      • 78.13,
      • 50.2,
      • 29.52,
      • 31.46,
      • 18.48,
      • 76.56,
      • null,
      • null,
      • 19.51,
      • 52.26,
      • 36.04,
      • 32.27,
      • 8.21,
      • 7.68,
      • 30.58,
      • 27.59,
      • "instruction-tuned",
      • "instruction-tuning",
      • 101,
      • "T5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 12.92,
      • 662,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/bigscience/mt0-xxl" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/mt0-xxl</a>",
      • 0,
      • "1.0_english",
      • 35.1,
      • 48.41,
      • 21.79,
      • 78.55,
      • 54.18,
      • 21.67,
      • 36.07,
      • 18.31,
      • 81.67,
      • 7.77,
      • 28.28,
      • 3.28,
      • 25.01,
      • 25.68,
      • 26.17,
      • 3.79,
      • 3.16,
      • 27.35,
      • 26.8,
      • "instruction-tuned",
      • "pre-training",
      • 120,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 13.95,
      • 60,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/bigscience/mt0-xxl" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/mt0-xxl</a>",
      • 0,
      • "1.0_maltese",
      • 34.83,
      • 46.3,
      • 23.35,
      • 77.63,
      • 50.82,
      • 13.94,
      • 35.37,
      • 18.16,
      • 81.89,
      • 12.48,
      • 35.85,
      • 6.09,
      • 33.27,
      • 17.3,
      • 22.5,
      • 3.31,
      • 2.7,
      • 27.75,
      • 27.64,
      • "instruction-tuned",
      • "pre-training",
      • 120,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 13.95,
      • 60,
      • true,
      • "main"
      ],
    • [
      • "IT/IT",
      • "<a target="_blank" href="https://huggingface.co/CohereForAI/aya-101" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">CohereForAI/aya-101</a>",
      • 0,
      • "1.0_maltese",
      • 29.33,
      • 33.56,
      • 25.09,
      • 55.26,
      • 36.15,
      • 4.2,
      • 18.81,
      • 14.25,
      • 72.67,
      • null,
      • null,
      • 19.7,
      • 53.12,
      • 38.12,
      • 33.61,
      • 5.09,
      • 5.48,
      • 32.81,
      • 28.75,
      • "instruction-tuned",
      • "instruction-tuning",
      • 101,
      • "T5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 12.92,
      • 662,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/BSC-LT/salamandra-7b-instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">BSC-LT/salamandra-7b-instruct</a>",
      • 0,
      • "1.0_english",
      • 29.1,
      • 41.56,
      • 16.64,
      • 73.4,
      • 53.24,
      • 15.07,
      • 26.92,
      • 11.74,
      • 69,
      • 9.11,
      • 39.35,
      • 5.52,
      • 41.61,
      • 2.12,
      • 2.59,
      • 0.2,
      • 0.1,
      • 0.03,
      • 0.02,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 7.77,
      • 72,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/bigscience/mt0-xl" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/mt0-xl</a>",
      • 0,
      • "1.0_maltese",
      • 26.78,
      • 42.26,
      • 11.29,
      • 77.82,
      • 47.95,
      • 14.18,
      • 32.15,
      • 16.24,
      • 65.22,
      • null,
      • null,
      • 0.58,
      • 10.71,
      • 20.09,
      • 23.15,
      • 3.7,
      • 2.95,
      • 23.84,
      • 22.72,
      • "instruction-tuned",
      • "pre-training",
      • 120,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 3.74,
      • 32,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/bigscience/mt0-xl" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/mt0-xl</a>",
      • 0,
      • "1.0_english",
      • 25.71,
      • 40.77,
      • 10.65,
      • 72.33,
      • 47.49,
      • 9.64,
      • 32.47,
      • 16.82,
      • 65.89,
      • null,
      • null,
      • 0.45,
      • 8.61,
      • 30.29,
      • 29.64,
      • 4.7,
      • 1.14,
      • 9.71,
      • 13.2,
      • "instruction-tuned",
      • "pre-training",
      • 120,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 3.74,
      • 32,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/BSC-LT/salamandra-7b-instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">BSC-LT/salamandra-7b-instruct</a>",
      • 0,
      • "1.0_maltese",
      • 24.53,
      • 31.98,
      • 17.09,
      • 38.85,
      • 40.9,
      • 10.36,
      • 23.87,
      • 9.54,
      • 68.33,
      • 4.8,
      • 34.74,
      • 7.89,
      • 46.7,
      • 3.86,
      • 3.8,
      • 0.19,
      • 0.13,
      • 0.04,
      • 0.02,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 7.77,
      • 72,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Meta-Llama-3-8B-Instruct</a>",
      • 0,
      • "1.0_english",
      • 23.13,
      • 30.71,
      • 15.54,
      • 37.17,
      • 43.61,
      • 16.49,
      • 21.99,
      • 13.18,
      • 51.85,
      • null,
      • null,
      • 8.8,
      • 39.85,
      • 12.59,
      • 6.52,
      • 1.01,
      • 1.08,
      • 31.68,
      • 24.16,
      • "pre-trained",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama3",
      • 8.03,
      • 4315,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 0,
      • "1.0_english",
      • 23.1,
      • 44.6,
      • 1.6,
      • 72.13,
      • 60.93,
      • 34.22,
      • null,
      • 16.43,
      • 83.89,
      • 0,
      • 0,
      • 0,
      • 0,
      • 8.01,
      • 6.34,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "unknown",
      • "?",
      • "Gemma2ForCausalLM",
      • "?",
      • "gemma",
      • 9.24,
      • 748,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/bigscience/mt0-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/mt0-large</a>",
      • 0,
      • "1.0_english",
      • 22.11,
      • 36.07,
      • 8.14,
      • 75.88,
      • 45.8,
      • 13.62,
      • 29.83,
      • 17.65,
      • 33.67,
      • null,
      • null,
      • 0.19,
      • 6.58,
      • 24.84,
      • 24.46,
      • 5.37,
      • 2.39,
      • 6.33,
      • 6.9,
      • "instruction-tuned",
      • "pre-training",
      • 120,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 1.23,
      • 49,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/bigscience/mt0-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/mt0-large</a>",
      • 0,
      • "1.0_maltese",
      • 21.63,
      • 35.14,
      • 8.12,
      • 76.2,
      • 49.47,
      • 5.04,
      • 28.82,
      • 16.95,
      • 34.33,
      • null,
      • null,
      • 0.5,
      • 5.61,
      • 21.92,
      • 22.18,
      • 4.88,
      • 2.49,
      • 8.52,
      • 10.6,
      • "instruction-tuned",
      • "pre-training",
      • 120,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 1.23,
      • 49,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 0,
      • "1.0_maltese",
      • 20.82,
      • 40.39,
      • 1.25,
      • 80.71,
      • 31.18,
      • 11.89,
      • 21.46,
      • 11.89,
      • 85.22,
      • 0,
      • 0,
      • 0,
      • 0,
      • 6.27,
      • 4.95,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "unknown",
      • "?",
      • "Gemma2ForCausalLM",
      • "?",
      • "gemma",
      • 9.24,
      • 748,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/BSC-LT/salamandra-2b-instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">BSC-LT/salamandra-2b-instruct</a>",
      • 0,
      • "1.0_english",
      • 20.37,
      • 28.74,
      • 12,
      • 75.39,
      • 20.04,
      • 10.73,
      • 31.03,
      • 10.33,
      • 24.89,
      • 2.72,
      • 23.41,
      • 3.51,
      • 34.09,
      • 1.95,
      • 1.83,
      • 0,
      • 0,
      • 1.08,
      • 0.56,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 2.25,
      • 25,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-1.7B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-1.7B-Instruct</a>",
      • 0,
      • "1.0_english",
      • 20.12,
      • 18.39,
      • 21.85,
      • 27.97,
      • 13.99,
      • 4.23,
      • 26.32,
      • 13.73,
      • 24.11,
      • 9.2,
      • 37.04,
      • 15.73,
      • 51.08,
      • 11.71,
      • 5.74,
      • 0,
      • 0,
      • 16.42,
      • 9.44,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 1.66,
      • 94,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloomz-3b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloomz-3b</a>",
      • 0,
      • "1.0_english",
      • 19.79,
      • 32.86,
      • 6.73,
      • 62.79,
      • 42.89,
      • 16.26,
      • 27.53,
      • 14.77,
      • 32.89,
      • null,
      • null,
      • 0.47,
      • 10.31,
      • 11.47,
      • 10.2,
      • 0.39,
      • 0.46,
      • 17.46,
      • 11.39,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 3,
      • 81,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-9B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-9B-Instruct</a>",
      • 0,
      • "1.0_english",
      • 19.63,
      • 37.03,
      • 2.23,
      • 68.14,
      • 41.24,
      • 23.05,
      • 12.56,
      • 7.65,
      • 69.56,
      • 0,
      • 0,
      • 0,
      • 0,
      • 11.17,
      • 4.87,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 9.15,
      • 190,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloomz-7b1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloomz-7b1</a>",
      • 0,
      • "1.0_english",
      • 19.48,
      • 32.81,
      • 6.15,
      • 56.39,
      • 48.77,
      • 21.22,
      • 27.4,
      • 14.22,
      • 28.89,
      • null,
      • null,
      • 0.97,
      • 13.68,
      • 5.34,
      • 4.55,
      • 0.81,
      • 1.09,
      • 16.78,
      • 10.66,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 7.07,
      • 146,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/bigscience/mt0-base" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/mt0-base</a>",
      • 0,
      • "1.0_english",
      • 19.12,
      • 31.9,
      • 6.34,
      • 66.26,
      • 43.09,
      • 5.9,
      • 33.82,
      • 13.44,
      • 28.89,
      • null,
      • null,
      • 0.2,
      • 5.33,
      • 21.1,
      • 21.33,
      • 2.63,
      • 0.38,
      • 4.17,
      • 4.89,
      • "instruction-tuned",
      • "pre-training",
      • 120,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0.58,
      • 32,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Meta-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Meta-Llama-3-8B</a>",
      • 0,
      • "1.0_english",
      • 18.84,
      • 26.31,
      • 11.36,
      • 25.97,
      • 29.47,
      • 15.75,
      • 26.79,
      • 16.22,
      • 43.67,
      • 3.3,
      • 12.49,
      • 5.34,
      • 32.76,
      • 11.27,
      • 6,
      • 0,
      • 0,
      • 0.81,
      • 0.3,
      • "pre-trained",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama3",
      • 8.03,
      • 6400,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/openGPT-X/Teuken-7B-instruct-research-v0.4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">openGPT-X/Teuken-7B-instruct-research-v0.4</a>",
      • 0,
      • "1.0_english",
      • 18.78,
      • 19.98,
      • 17.58,
      • 32.1,
      • 5.69,
      • 15.23,
      • 15.65,
      • 6.17,
      • 45.04,
      • 0.85,
      • 6.42,
      • 12.04,
      • 46.4,
      • 9.67,
      • 5.13,
      • 2.53,
      • 2.49,
      • 31.16,
      • 22.94,
      • "instruction-tuned",
      • "pre-training",
      • 24,
      • "LlamaForCausalLM",
      • "?",
      • "other",
      • 7.45,
      • 88,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Meta-Llama-3-8B-Instruct</a>",
      • 0,
      • "1.0_maltese",
      • 18.75,
      • 23.05,
      • 14.46,
      • 35.88,
      • 24.43,
      • 9.37,
      • 16.26,
      • 13.8,
      • 38.56,
      • null,
      • null,
      • 5.07,
      • 35.97,
      • 12.4,
      • 4.67,
      • 0.42,
      • 0.35,
      • 29.92,
      • 23.57,
      • "pre-trained",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama3",
      • 8.03,
      • 4315,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/bigscience/mt0-base" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/mt0-base</a>",
      • 0,
      • "1.0_maltese",
      • 18.02,
      • 28.95,
      • 7.08,
      • 56.95,
      • 41.51,
      • 4.2,
      • 28.65,
      • 13.95,
      • 28.44,
      • null,
      • null,
      • 0.79,
      • 11.2,
      • 13.9,
      • 12.94,
      • 3.48,
      • 1.61,
      • 6.94,
      • 8.7,
      • "instruction-tuned",
      • "pre-training",
      • 120,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0.58,
      • 32,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b</a>",
      • 0,
      • "1.0_english",
      • 17.52,
      • 33.05,
      • 1.98,
      • 25.97,
      • 59.16,
      • 25.47,
      • null,
      • 13.15,
      • 74.56,
      • 0,
      • 0,
      • 0,
      • 0,
      • 9.82,
      • 3.88,
      • 0.21,
      • 0.09,
      • 0.01,
      • 0,
      • "pre-trained",
      • "unknown",
      • "?",
      • "Gemma2ForCausalLM",
      • "?",
      • "gemma",
      • 9.24,
      • 680,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/BSC-LT/salamandra-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">BSC-LT/salamandra-7b</a>",
      • 0,
      • "1.0_english",
      • 17.17,
      • 23.79,
      • 10.55,
      • 26.98,
      • 26.43,
      • 15.44,
      • 27.74,
      • 10.46,
      • 35.67,
      • 1.99,
      • 17.12,
      • 7.91,
      • 33.04,
      • 2.4,
      • 1.62,
      • 0,
      • 0,
      • 0.26,
      • 0.18,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 7.77,
      • 29,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloomz-1b7" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloomz-1b7</a>",
      • 0,
      • "1.0_english",
      • 17.1,
      • 28.95,
      • 5.25,
      • 51.89,
      • 41.53,
      • 14.67,
      • 24.01,
      • 13.95,
      • 27.67,
      • null,
      • null,
      • 0.59,
      • 10.99,
      • 8.16,
      • 5.82,
      • 0.36,
      • 0.4,
      • 10.81,
      • 6.71,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 1.72,
      • 23,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/bigscience/mt0-small" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/mt0-small</a>",
      • 0,
      • "1.0_english",
      • 16.97,
      • 26.22,
      • 7.71,
      • 55.48,
      • 33.6,
      • 4.29,
      • 25.76,
      • 13.81,
      • 24.39,
      • null,
      • null,
      • 2.16,
      • 19.99,
      • 14.32,
      • 15.26,
      • 3.89,
      • 1.33,
      • 3.29,
      • 2.91,
      • "instruction-tuned",
      • "pre-training",
      • 120,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0.3,
      • 31,
      • true,
      • "main"
      ],
    • [
      • "IT/?",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Ministral-8B-Instruct-2410" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Ministral-8B-Instruct-2410</a>",
      • 0,
      • "1.0_english",
      • 16.85,
      • 26.82,
      • 6.88,
      • 47.41,
      • 34.42,
      • 6.22,
      • 18.9,
      • 13.42,
      • 40.56,
      • 1.2,
      • 6.74,
      • 1.25,
      • 16.93,
      • 10.54,
      • 5.04,
      • 0.17,
      • 0.18,
      • 0.09,
      • 0.03,
      • "instruction-tuned",
      • "unknown",
      • "?",
      • "MistralForCausalLM",
      • "?",
      • "other",
      • 8.02,
      • 564,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-2b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-2b-it</a>",
      • 0,
      • "1.0_english",
      • 16.25,
      • 30.17,
      • 2.32,
      • 26.98,
      • 39.37,
      • 18.15,
      • 34.57,
      • 14.42,
      • 47.56,
      • 0,
      • 0,
      • 0,
      • 0,
      • 11.6,
      • 6.53,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "unknown",
      • "?",
      • "Gemma2ForCausalLM",
      • "?",
      • "gemma",
      • 2.61,
      • 1240,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/BSC-LT/salamandra-2b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">BSC-LT/salamandra-2b</a>",
      • 0,
      • "1.0_english",
      • 16.06,
      • 21.79,
      • 10.33,
      • 58.99,
      • 9.77,
      • 13.58,
      • 18.31,
      • 8.55,
      • 21.56,
      • 1.42,
      • 17.3,
      • 3.67,
      • 25.71,
      • 2.87,
      • 2.5,
      • 0,
      • 0,
      • 10.94,
      • 5.79,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 2.25,
      • 24,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/openGPT-X/Teuken-7B-instruct-research-v0.4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">openGPT-X/Teuken-7B-instruct-research-v0.4</a>",
      • 0,
      • "1.0_maltese",
      • 15.97,
      • 19.12,
      • 12.82,
      • 40.33,
      • 7.15,
      • 10.69,
      • 5.65,
      • 8.06,
      • 42.85,
      • 0.83,
      • 9.42,
      • 1.77,
      • 19.59,
      • 11.54,
      • 4.86,
      • 3.16,
      • 3.43,
      • 25.78,
      • 20.11,
      • "instruction-tuned",
      • "pre-training",
      • 24,
      • "LlamaForCausalLM",
      • "?",
      • "other",
      • 7.45,
      • 88,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-9B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-9B</a>",
      • 0,
      • "1.0_english",
      • 15.96,
      • 29.77,
      • 2.16,
      • 58.54,
      • 38.66,
      • 18.14,
      • 6.11,
      • 5.94,
      • 51.22,
      • 0,
      • 0,
      • 0,
      • 0,
      • 10.81,
      • 4.49,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 9.15,
      • 155,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b</a>",
      • 0,
      • "1.0_maltese",
      • 15.78,
      • 29.44,
      • 2.13,
      • 26.98,
      • 31.34,
      • 8.67,
      • 21.15,
      • 15.37,
      • 73.11,
      • 0,
      • 0,
      • 0,
      • 0,
      • 10.13,
      • 4.3,
      • 0.42,
      • 0.5,
      • 0.04,
      • 0.04,
      • "pre-trained",
      • "unknown",
      • "?",
      • "Gemma2ForCausalLM",
      • "?",
      • "gemma",
      • 9.24,
      • 680,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/BSC-LT/salamandra-2b-instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">BSC-LT/salamandra-2b-instruct</a>",
      • 0,
      • "1.0_maltese",
      • 15.58,
      • 20.91,
      • 10.26,
      • 49.25,
      • 14.43,
      • 4.2,
      • 23.21,
      • 8.78,
      • 25.56,
      • 2.14,
      • 19.12,
      • 3.17,
      • 28.13,
      • 3.69,
      • 2.34,
      • 0,
      • 0,
      • 0.61,
      • 0.36,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 2.25,
      • 25,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloomz-1b1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloomz-1b1</a>",
      • 0,
      • "1.0_english",
      • 15.31,
      • 26,
      • 4.62,
      • 50.36,
      • 36.03,
      • 10.14,
      • 23.34,
      • 15.13,
      • 21,
      • null,
      • null,
      • 0.19,
      • 7.69,
      • 4.64,
      • 3.39,
      • 2.02,
      • 2.88,
      • 13.8,
      • 7.9,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 1.06,
      • 33,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloomz-560m" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloomz-560m</a>",
      • 0,
      • "1.0_english",
      • 14.9,
      • 25.66,
      • 4.14,
      • 49.6,
      • 32.67,
      • 8.69,
      • 20.77,
      • 14.92,
      • 27.28,
      • null,
      • null,
      • 0.24,
      • 6.77,
      • 7.09,
      • 4.93,
      • 0.75,
      • 1.01,
      • 11.89,
      • 5.86,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 0.56,
      • 131,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/BSC-LT/salamandra-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">BSC-LT/salamandra-7b</a>",
      • 0,
      • "1.0_maltese",
      • 14.66,
      • 19.37,
      • 9.96,
      • 25.97,
      • 10.1,
      • 12.24,
      • 16.98,
      • 6.46,
      • 44.44,
      • 1.56,
      • 17.32,
      • 6.29,
      • 28.83,
      • 3.41,
      • 3.17,
      • 0.06,
      • 0.06,
      • 0.31,
      • 0.18,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 7.77,
      • 29,
      • true,
      • "main"
      ],
    • [
      • "IT/?",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Ministral-8B-Instruct-2410" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Ministral-8B-Instruct-2410</a>",
      • 0,
      • "1.0_maltese",
      • 14.31,
      • 19.63,
      • 8.99,
      • 46.11,
      • 4.05,
      • 12.17,
      • 12.21,
      • 13.56,
      • 29.67,
      • 0.8,
      • 4.24,
      • 1.36,
      • 17.39,
      • 10.64,
      • 4.51,
      • 0.11,
      • 0.07,
      • 19.36,
      • 12.6,
      • "instruction-tuned",
      • "unknown",
      • "?",
      • "MistralForCausalLM",
      • "?",
      • "other",
      • 8.02,
      • 564,
      • true,
      • "main"
      ],
    • [
      • "IT/PT",
      • "<a target="_blank" href="https://huggingface.co/bigscience/mt0-small" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/mt0-small</a>",
      • 0,
      • "1.0_maltese",
      • 13.89,
      • 22.93,
      • 4.84,
      • 42.33,
      • 34.33,
      • 4.2,
      • 20.26,
      • 13.7,
      • 22.78,
      • null,
      • null,
      • 0.38,
      • 9.39,
      • 11.08,
      • 10.55,
      • 4.14,
      • 1.72,
      • 2.88,
      • 2,
      • "instruction-tuned",
      • "pre-training",
      • 120,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0.3,
      • 31,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/ai-forever/mGPT-13B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">ai-forever/mGPT-13B</a>",
      • 0,
      • "1.0_maltese",
      • 13.68,
      • 20.67,
      • 6.69,
      • 49.12,
      • 27.74,
      • 4.2,
      • 8.32,
      • 7.18,
      • 27.44,
      • 0.16,
      • 12.29,
      • 0.33,
      • 12.23,
      • 8.69,
      • 4.1,
      • 0.05,
      • 0.11,
      • 0.61,
      • 0.12,
      • "pre-trained",
      • "none",
      • 61,
      • "GPT2LMHeadModel",
      • "?",
      • "mit",
      • 0,
      • 49,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloomz-7b1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloomz-7b1</a>",
      • 0,
      • "1.0_maltese",
      • 13.65,
      • 25.26,
      • 2.04,
      • 50.2,
      • 39.29,
      • 7.85,
      • 18.49,
      • 12.75,
      • 23,
      • null,
      • null,
      • 0.72,
      • 7.75,
      • 2.28,
      • 1.83,
      • 0,
      • 0,
      • 0.3,
      • 0.17,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 7.07,
      • 146,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-2-7b-hf" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-2-7b-hf</a>",
      • 0,
      • "1.0_english",
      • 13.27,
      • 24.35,
      • 2.19,
      • 43.63,
      • 25.77,
      • 18.06,
      • 20.97,
      • 10.75,
      • 26.93,
      • null,
      • null,
      • 0,
      • 0,
      • 10.96,
      • 4.54,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama2",
      • 6.74,
      • 2213,
      • true,
      • "main"
      ],
    • [
      • "IT/NO",
      • "<a target="_blank" href="https://huggingface.co/CohereForAI/aya-23-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">CohereForAI/aya-23-8B</a>",
      • 0,
      • "1.0_english",
      • 13.17,
      • 22.79,
      • 3.56,
      • 53.09,
      • 16.91,
      • 10.79,
      • 4.45,
      • 12.21,
      • 39.26,
      • null,
      • null,
      • 0,
      • 0.5,
      • 11.16,
      • 4.91,
      • 0,
      • 0,
      • 11.54,
      • 6.15,
      • "instruction-tuned",
      • "none",
      • 23,
      • "CohereForCausalLM",
      • "?",
      • "cc-by-nc-4.0",
      • 8.03,
      • 425,
      • true,
      • "main"
      ],
    • [
      • "IT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-2-7b-chat-hf" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-2-7b-chat-hf</a>",
      • 0,
      • "1.0_english",
      • 12.86,
      • 23.26,
      • 2.46,
      • 31.79,
      • 34.02,
      • 11.86,
      • 19.93,
      • 10.34,
      • 31.59,
      • null,
      • null,
      • 0,
      • 0.04,
      • 12.26,
      • 5.9,
      • 0,
      • 0,
      • 0,
      • 0,
      • "instruction-tuned",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama2",
      • 6.74,
      • 4670,
      • true,
      • "main"
      ],
    • [
      • "IT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-2-13b-chat-hf" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-2-13b-chat-hf</a>",
      • 0,
      • "1.0_english",
      • 12.85,
      • 23.7,
      • 2,
      • 27.97,
      • 30.87,
      • 19.64,
      • 13.06,
      • 16.63,
      • 34,
      • null,
      • null,
      • 0,
      • 0,
      • 10,
      • 6.77,
      • 0,
      • 0,
      • 0,
      • 0,
      • "instruction-tuned",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama2",
      • 13.02,
      • 1105,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-9B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-9B-Instruct</a>",
      • 0,
      • "1.0_maltese",
      • 12.82,
      • 23.13,
      • 2.51,
      • 25.97,
      • 23.79,
      • 4.2,
      • 11.25,
      • 10.31,
      • 63.22,
      • 0,
      • 0.04,
      • 0,
      • 0.59,
      • 11.84,
      • 6.25,
      • 0,
      • 0,
      • 0.16,
      • 0.1,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 9.15,
      • 190,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Meta-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Meta-Llama-3-8B</a>",
      • 0,
      • "1.0_maltese",
      • 12.78,
      • 14.39,
      • 11.17,
      • 36.29,
      • 24.29,
      • 4.2,
      • 7.29,
      • 14.3,
      • null,
      • 1.56,
      • 6.3,
      • 2.88,
      • 23.5,
      • 10.94,
      • 5.04,
      • 0.15,
      • 0.12,
      • 20.5,
      • 15.02,
      • "pre-trained",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama3",
      • 8.03,
      • 6400,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/ai-forever/mGPT-13B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">ai-forever/mGPT-13B</a>",
      • 0,
      • "1.0_english",
      • 12.7,
      • 19.22,
      • 6.18,
      • 38.5,
      • 15.65,
      • 11.98,
      • 16.02,
      • 7.51,
      • 25.67,
      • 0.28,
      • 8.62,
      • 0.22,
      • 11,
      • 9.28,
      • 4.32,
      • 0.06,
      • 0.07,
      • 4.99,
      • 1.94,
      • "pre-trained",
      • "none",
      • 61,
      • "GPT2LMHeadModel",
      • "?",
      • "mit",
      • 0,
      • 49,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/facebook/xglm-564M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">facebook/xglm-564M</a>",
      • 0,
      • "1.0_maltese",
      • 12.52,
      • 19.72,
      • 5.32,
      • 46.35,
      • 24.35,
      • 4.2,
      • 5.98,
      • 14.87,
      • 22.56,
      • null,
      • null,
      • 0.31,
      • 12.06,
      • 9.64,
      • 4.24,
      • 2.71,
      • 3.08,
      • 7.48,
      • 1.83,
      • "pre-trained",
      • "none",
      • 30,
      • "XGLMForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 53,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-1.7B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-1.7B-Instruct</a>",
      • 0,
      • "1.0_maltese",
      • 12.32,
      • 14.88,
      • 9.76,
      • 25.97,
      • 3.12,
      • 4.2,
      • 18.58,
      • 11.98,
      • 25.44,
      • 0,
      • 2.61,
      • 10.45,
      • 35.23,
      • 10.75,
      • 5.23,
      • 0,
      • 0,
      • 0.4,
      • 0.19,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 1.66,
      • 94,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloom-7b1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloom-7b1</a>",
      • 0,
      • "1.0_english",
      • 12.21,
      • 22.24,
      • 2.17,
      • 25.97,
      • 36.63,
      • 13.88,
      • 20.04,
      • 12.84,
      • 24.11,
      • null,
      • null,
      • 0,
      • 0.22,
      • 10.63,
      • 5.32,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 7.07,
      • 203,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/facebook/xglm-7.5B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">facebook/xglm-7.5B</a>",
      • 0,
      • "1.0_maltese",
      • 12.01,
      • 19.1,
      • 4.92,
      • 55.16,
      • 12.16,
      • 4.23,
      • 6.7,
      • 13.46,
      • 22.89,
      • null,
      • null,
      • 1.28,
      • 11.4,
      • 10.21,
      • 4.3,
      • 0.84,
      • 1.16,
      • 5.84,
      • 1.82,
      • "pre-trained",
      • "none",
      • 30,
      • "XGLMForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 59,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/facebook/xglm-2.9B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">facebook/xglm-2.9B</a>",
      • 0,
      • "1.0_maltese",
      • 11.87,
      • 17.64,
      • 6.1,
      • 48.6,
      • 8.7,
      • 4.2,
      • 6.45,
      • 14.74,
      • 23.17,
      • null,
      • null,
      • 1.21,
      • 16.24,
      • 10.22,
      • 4.34,
      • 1.4,
      • 1.85,
      • 6.26,
      • 2.2,
      • "pre-trained",
      • "none",
      • 30,
      • "XGLMForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 10,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/BSC-LT/salamandra-2b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">BSC-LT/salamandra-2b</a>",
      • 0,
      • "1.0_maltese",
      • 11.86,
      • 14.81,
      • 8.92,
      • 39.15,
      • 7.06,
      • 4.2,
      • 7.98,
      • 6.44,
      • 24,
      • 0.49,
      • 10.97,
      • 3.51,
      • 22.43,
      • 6.93,
      • 4.86,
      • 0,
      • 0,
      • 7.67,
      • 4.25,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 2.25,
      • 24,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloom-1b1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloom-1b1</a>",
      • 0,
      • "1.0_english",
      • 11.76,
      • 21.42,
      • 2.1,
      • 56.14,
      • 15.85,
      • 9.98,
      • 10.69,
      • 13.66,
      • 22.19,
      • null,
      • null,
      • 0,
      • 0.06,
      • 10.45,
      • 5.37,
      • 0,
      • 0,
      • 0.02,
      • 0,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 1.06,
      • 64,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/facebook/xglm-1.7B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">facebook/xglm-1.7B</a>",
      • 0,
      • "1.0_maltese",
      • 11.71,
      • 17.04,
      • 6.38,
      • 42.07,
      • 14.23,
      • 4.2,
      • 6.48,
      • 12.47,
      • 22.78,
      • null,
      • null,
      • 1.43,
      • 16.95,
      • 10.24,
      • 4.51,
      • 2.35,
      • 2.59,
      • 7.47,
      • 2.12,
      • "pre-trained",
      • "none",
      • 30,
      • "XGLMForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 20,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloomz-3b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloomz-3b</a>",
      • 0,
      • "1.0_maltese",
      • 11.46,
      • 22.32,
      • 0.61,
      • 39.36,
      • 26.37,
      • 6.31,
      • 20.73,
      • 15.12,
      • 26,
      • null,
      • null,
      • 0,
      • 0.19,
      • 2.77,
      • 2.2,
      • 0,
      • 0,
      • 0.12,
      • 0.07,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 3,
      • 81,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloom-3b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloom-3b</a>",
      • 0,
      • "1.0_english",
      • 11.37,
      • 20.61,
      • 2.13,
      • 32.86,
      • 30.61,
      • 5.4,
      • 15.63,
      • 14.34,
      • 24.83,
      • null,
      • null,
      • 0,
      • 0.26,
      • 10.36,
      • 5.37,
      • 0,
      • 0,
      • 0.01,
      • 0.01,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 3,
      • 93,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/facebook/xglm-4.5B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">facebook/xglm-4.5B</a>",
      • 0,
      • "1.0_maltese",
      • 11.26,
      • 16.3,
      • 6.23,
      • 32.95,
      • 13.28,
      • 4.27,
      • 8.97,
      • 14.43,
      • 23.89,
      • null,
      • null,
      • 1.87,
      • 18.76,
      • 10.57,
      • 4.65,
      • 0.45,
      • 0.42,
      • 4.09,
      • 1.39,
      • "pre-trained",
      • "none",
      • 30,
      • "XGLMForCausalLM",
      • "?",
      • "mit",
      • 5.08,
      • 20,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-9B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-9B</a>",
      • 0,
      • "1.0_maltese",
      • 11.26,
      • 20.03,
      • 2.49,
      • 25.97,
      • 22.61,
      • 4.2,
      • 3.75,
      • 11.43,
      • 52.22,
      • 0,
      • 0,
      • 0,
      • 0.78,
      • 11.66,
      • 6.48,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 9.15,
      • 155,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloomz-1b7" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloomz-1b7</a>",
      • 0,
      • "1.0_maltese",
      • 11.19,
      • 21.64,
      • 0.74,
      • 39.36,
      • 30.3,
      • 7.07,
      • 16.2,
      • 15.36,
      • 21.56,
      • null,
      • null,
      • 0,
      • 0.04,
      • 3.58,
      • 2.44,
      • 0,
      • 0,
      • 0.09,
      • 0.07,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 1.72,
      • 23,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/facebook/xglm-1.7B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">facebook/xglm-1.7B</a>",
      • 0,
      • "1.0_english",
      • 10.81,
      • 19.57,
      • 2.04,
      • 40.87,
      • 16.58,
      • 5.46,
      • 18.89,
      • 12.66,
      • 22.94,
      • null,
      • null,
      • 0,
      • 0.04,
      • 10.13,
      • 4.93,
      • 0.04,
      • 0.03,
      • 0,
      • 0,
      • "pre-trained",
      • "none",
      • 30,
      • "XGLMForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 20,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloom-1b7" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloom-1b7</a>",
      • 0,
      • "1.0_english",
      • 10.71,
      • 19.22,
      • 2.21,
      • 50.14,
      • 5.32,
      • 9.05,
      • 11.66,
      • 12.77,
      • 26.37,
      • null,
      • null,
      • 0,
      • 0,
      • 10.94,
      • 5.83,
      • 0,
      • 0,
      • 0.17,
      • 0.09,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 1.72,
      • 122,
      • true,
      • "main"
      ],
    • [
      • "IT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-2-7b-chat-hf" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-2-7b-chat-hf</a>",
      • 0,
      • "1.0_maltese",
      • 10.59,
      • 17.37,
      • 3.82,
      • 42.32,
      • 2.79,
      • 3.73,
      • 15.8,
      • 11.77,
      • 27.78,
      • null,
      • null,
      • 0,
      • 1.93,
      • 12.59,
      • 5.82,
      • 0,
      • 0,
      • 9.55,
      • 4.57,
      • "instruction-tuned",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama2",
      • 6.74,
      • 4670,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-1.7B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-1.7B</a>",
      • 0,
      • "1.0_english",
      • 10.48,
      • 18.78,
      • 2.18,
      • 28.6,
      • 27.52,
      • 4.2,
      • 17.36,
      • 11.87,
      • 23.11,
      • 0,
      • 0,
      • 0,
      • 0,
      • 10.9,
      • 5.5,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 0,
      • 104,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-2b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-2b-it</a>",
      • 0,
      • "1.0_maltese",
      • 10.44,
      • 18.57,
      • 2.31,
      • 25.97,
      • 4.68,
      • 11.27,
      • 16.17,
      • 9.99,
      • 43.33,
      • 0,
      • 0,
      • 0,
      • 0,
      • 11.55,
      • 6.15,
      • 0,
      • 0,
      • 0.04,
      • 0.02,
      • "pre-trained",
      • "unknown",
      • "?",
      • "Gemma2ForCausalLM",
      • "?",
      • "gemma",
      • 2.61,
      • 1240,
      • true,
      • "main"
      ],
    • [
      • "IT/NO",
      • "<a target="_blank" href="https://huggingface.co/MBZUAI/bactrian-x-bloom-7b1-lora" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MBZUAI/bactrian-x-bloom-7b1-lora</a>",
      • 0,
      • "1.0_maltese",
      • 10.44,
      • 18.13,
      • 2.74,
      • 48.1,
      • 14.9,
      • 4.2,
      • 7.52,
      • 9.95,
      • 24.11,
      • null,
      • null,
      • 0.01,
      • 1.12,
      • 7.98,
      • 2.59,
      • 0,
      • 0,
      • 9.81,
      • 4.59,
      • "instruction-tuned",
      • "none",
      • 77,
      • "BloomForCausalLM",
      • "?",
      • "mit",
      • 0,
      • "?",
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/facebook/xglm-2.9B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">facebook/xglm-2.9B</a>",
      • 0,
      • "1.0_english",
      • 10.34,
      • 18.35,
      • 2.33,
      • 32.68,
      • 8.94,
      • 9.72,
      • 21.01,
      • 14.35,
      • 23.41,
      • null,
      • null,
      • 0,
      • 0.93,
      • 10.62,
      • 4.92,
      • 0,
      • 0,
      • 0.39,
      • 0.1,
      • "pre-trained",
      • "none",
      • 30,
      • "XGLMForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 10,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-2-13b-hf" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-2-13b-hf</a>",
      • 0,
      • "1.0_english",
      • 10.28,
      • 18.3,
      • 2.26,
      • 32.86,
      • 5.18,
      • 8.67,
      • 14.97,
      • 16.37,
      • 31.78,
      • null,
      • null,
      • 0,
      • 0,
      • 11.29,
      • 4.74,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama2",
      • 13.02,
      • 618,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/google/mt5-xxl" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/mt5-xxl</a>",
      • 0,
      • "1.0_maltese",
      • 10.27,
      • 15.45,
      • 5.1,
      • 25.97,
      • 15.16,
      • 4.87,
      • 4.16,
      • 14.3,
      • 28.22,
      • null,
      • null,
      • 0.08,
      • 9.87,
      • 8.64,
      • 2.99,
      • 4.24,
      • 4.44,
      • 7.27,
      • 2.53,
      • "pre-trained",
      • "pre-training",
      • 101,
      • "T5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0,
      • 70,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloomz-560m" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloomz-560m</a>",
      • 0,
      • "1.0_maltese",
      • 10.23,
      • 19.86,
      • 0.6,
      • 48.25,
      • 17.81,
      • 3.73,
      • 10.64,
      • 15.07,
      • 23.67,
      • null,
      • null,
      • 0,
      • 0.73,
      • 1.96,
      • 1.35,
      • 0,
      • 0,
      • 0.66,
      • 0.31,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 0.56,
      • 131,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/ai-forever/mGPT" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">ai-forever/mGPT</a>",
      • 0,
      • "1.0_english",
      • 10.22,
      • 16.38,
      • 4.06,
      • 32.99,
      • 10.14,
      • 12.53,
      • 12.04,
      • 7.9,
      • 22.67,
      • null,
      • null,
      • 0.27,
      • 11.9,
      • 7.74,
      • 3.03,
      • 0.16,
      • 0.55,
      • 0.42,
      • 0.13,
      • "pre-trained",
      • "none",
      • 61,
      • "GPT2LMHeadModel",
      • "?",
      • "apache-2.0",
      • 0,
      • 270,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/google/mt5-xxl" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/mt5-xxl</a>",
      • 0,
      • "1.0_english",
      • 10.18,
      • 15.46,
      • 4.9,
      • 25.97,
      • 14.59,
      • 4.87,
      • 4.05,
      • 13.96,
      • 29.33,
      • null,
      • null,
      • 0.11,
      • 9.42,
      • 8.41,
      • 3.05,
      • 4.28,
      • 4.34,
      • 6.79,
      • 2.34,
      • "pre-trained",
      • "pre-training",
      • 101,
      • "T5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0,
      • 70,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/facebook/xglm-4.5B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">facebook/xglm-4.5B</a>",
      • 0,
      • "1.0_english",
      • 10.17,
      • 18.21,
      • 2.14,
      • 26.65,
      • 5.83,
      • 11.69,
      • 22.11,
      • 18.24,
      • 24.72,
      • null,
      • null,
      • 0,
      • 0.05,
      • 10.61,
      • 4.84,
      • 0,
      • 0,
      • 0.06,
      • 0.03,
      • "pre-trained",
      • "none",
      • 30,
      • "XGLMForCausalLM",
      • "?",
      • "mit",
      • 5.08,
      • 20,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-2b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-2b</a>",
      • 0,
      • "1.0_maltese",
      • 10.13,
      • 18.08,
      • 2.19,
      • 25.97,
      • 21.84,
      • 7.33,
      • 9.14,
      • 14.11,
      • 30.11,
      • 0,
      • 0.06,
      • 0,
      • 0.01,
      • 10.18,
      • 4.66,
      • 0.24,
      • 0.35,
      • 0.79,
      • 0.33,
      • "pre-trained",
      • "unknown",
      • "?",
      • "Gemma2ForCausalLM",
      • "?",
      • "gemma",
      • 2.61,
      • 610,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/DAMO-NLP-MT/polylm-1.7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DAMO-NLP-MT/polylm-1.7b</a>",
      • 0,
      • "1.0_english",
      • 10.11,
      • 17.03,
      • 3.18,
      • 46.29,
      • 3.45,
      • 11.97,
      • 5.38,
      • 12.08,
      • 23,
      • null,
      • null,
      • 0.1,
      • 7.74,
      • 8.19,
      • 3.74,
      • 0,
      • 0,
      • 0.01,
      • 0,
      • "pre-trained",
      • "none",
      • 18,
      • "GPT2LMHeadModel",
      • "?",
      • "apache-2.0",
      • 0,
      • 11,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/DAMO-NLP-MT/polylm-13b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DAMO-NLP-MT/polylm-13b</a>",
      • 0,
      • "1.0_maltese",
      • 10.06,
      • 17.76,
      • 2.36,
      • 50.43,
      • 5.8,
      • 9.36,
      • 6.18,
      • 11.71,
      • 23.11,
      • null,
      • null,
      • 0,
      • 0.72,
      • 11.07,
      • 5.63,
      • 0,
      • 0,
      • 0.18,
      • 0.03,
      • "pre-trained",
      • "none",
      • 18,
      • "PolyLMHeadModel",
      • "?",
      • "apache-2.0",
      • 0,
      • 53,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-2b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-2b</a>",
      • 0,
      • "1.0_english",
      • 9.96,
      • 17.85,
      • 2.06,
      • 25.97,
      • 27.04,
      • 11.24,
      • null,
      • 10.2,
      • 32.67,
      • 0,
      • 0.04,
      • 0,
      • 0.02,
      • 10.01,
      • 4.26,
      • 0.17,
      • 0.21,
      • 0.01,
      • 0,
      • "pre-trained",
      • "unknown",
      • "?",
      • "Gemma2ForCausalLM",
      • "?",
      • "gemma",
      • 2.61,
      • 610,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloom-1b7" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloom-1b7</a>",
      • 0,
      • "1.0_maltese",
      • 9.91,
      • 15.56,
      • 4.26,
      • 39.36,
      • 6.13,
      • 4.2,
      • 5.11,
      • 14.2,
      • 24.33,
      • null,
      • null,
      • 0.38,
      • 11.51,
      • 9.63,
      • 4.62,
      • 0,
      • 0,
      • 0.81,
      • 0.16,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 1.72,
      • 122,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloom-560m" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloom-560m</a>",
      • 0,
      • "1.0_maltese",
      • 9.86,
      • 16.27,
      • 3.45,
      • 45.2,
      • 5.62,
      • 4.52,
      • 2.99,
      • 16.69,
      • 22.61,
      • null,
      • null,
      • 0.22,
      • 9.01,
      • 7.97,
      • 3.56,
      • 0,
      • 0,
      • 1.49,
      • 0.26,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 0.56,
      • 369,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloomz-1b1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloomz-1b1</a>",
      • 0,
      • "1.0_maltese",
      • 9.85,
      • 18.4,
      • 1.3,
      • 39.36,
      • 14.44,
      • 4.2,
      • 16.68,
      • 14,
      • 21.72,
      • null,
      • null,
      • 0.31,
      • 2.48,
      • 3.15,
      • 2.31,
      • 0,
      • 0,
      • 2.55,
      • 0.87,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 1.06,
      • 33,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloom-3b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloom-3b</a>",
      • 0,
      • "1.0_maltese",
      • 9.77,
      • 17.24,
      • 2.3,
      • 39.36,
      • 19,
      • 4.2,
      • 3.74,
      • 14.62,
      • 22.5,
      • null,
      • null,
      • 0.04,
      • 1.6,
      • 9.75,
      • 4.64,
      • 0,
      • 0,
      • 0.79,
      • 0.14,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 3,
      • 93,
      • true,
      • "main"
      ],
    • [
      • "IT/NO",
      • "<a target="_blank" href="https://huggingface.co/MBZUAI/bactrian-x-bloom-7b1-lora" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MBZUAI/bactrian-x-bloom-7b1-lora</a>",
      • 0,
      • "1.0_english",
      • 9.67,
      • 16.81,
      • 2.53,
      • 25.97,
      • 14.83,
      • 12.34,
      • 15.21,
      • 10.84,
      • 21.67,
      • null,
      • null,
      • 0,
      • 0.17,
      • 10.36,
      • 5.12,
      • 0,
      • 0,
      • 5.87,
      • 2.13,
      • "instruction-tuned",
      • "none",
      • 77,
      • "BloomForCausalLM",
      • "?",
      • "mit",
      • 0,
      • "?",
      • true,
      • "main"
      ],
    • [
      • "IT/NO",
      • "<a target="_blank" href="https://huggingface.co/MBZUAI/bactrian-x-llama-13b-lora" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MBZUAI/bactrian-x-llama-13b-lora</a>",
      • 0,
      • "1.0_maltese",
      • 9.67,
      • 16.11,
      • 3.22,
      • 45.66,
      • 2.79,
      • 8.93,
      • 5.46,
      • 10.53,
      • 23.28,
      • null,
      • null,
      • 0.63,
      • 6.1,
      • 8.01,
      • 4.06,
      • 0.19,
      • 0.24,
      • 4.83,
      • 1.78,
      • "instruction-tuned",
      • "none",
      • 52,
      • "LlamaForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 3,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/facebook/xglm-7.5B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">facebook/xglm-7.5B</a>",
      • 0,
      • "1.0_english",
      • 9.66,
      • 17.26,
      • 2.07,
      • 29.92,
      • 10.19,
      • 6.36,
      • 19.31,
      • 14.89,
      • 22.89,
      • null,
      • null,
      • 0,
      • 0.01,
      • 10.23,
      • 4.78,
      • 0,
      • 0,
      • 0.22,
      • 0.11,
      • "pre-trained",
      • "none",
      • 30,
      • "XGLMForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 59,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/ai-forever/mGPT" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">ai-forever/mGPT</a>",
      • 0,
      • "1.0_maltese",
      • 9.53,
      • 15.18,
      • 3.88,
      • 42.6,
      • 8.59,
      • 4.2,
      • 5.54,
      • 7.73,
      • 22.44,
      • null,
      • null,
      • 0.5,
      • 12.05,
      • 6.84,
      • 2.96,
      • 0.15,
      • 0.27,
      • 0.7,
      • 0.24,
      • "pre-trained",
      • "none",
      • 61,
      • "GPT2LMHeadModel",
      • "?",
      • "apache-2.0",
      • 0,
      • 270,
      • true,
      • "main"
      ],
    • [
      • "IT/NO",
      • "<a target="_blank" href="https://huggingface.co/CohereForAI/aya-23-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">CohereForAI/aya-23-8B</a>",
      • 0,
      • "1.0_maltese",
      • 9.51,
      • 16.01,
      • 3.02,
      • 45.74,
      • 4.78,
      • 4.2,
      • 7.37,
      • 9.86,
      • 24.11,
      • null,
      • null,
      • 0.13,
      • 4.04,
      • 10.87,
      • 5.01,
      • 0.01,
      • 0.01,
      • 0.41,
      • 0.17,
      • "instruction-tuned",
      • "none",
      • 23,
      • "CohereForCausalLM",
      • "?",
      • "cc-by-nc-4.0",
      • 8.03,
      • 425,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloom-1b1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloom-1b1</a>",
      • 0,
      • "1.0_maltese",
      • 9.42,
      • 15.67,
      • 3.18,
      • 39.15,
      • 9.53,
      • 4.2,
      • 3.59,
      • 15.19,
      • 22.33,
      • null,
      • null,
      • 0.15,
      • 6.65,
      • 9.09,
      • 4.07,
      • 0,
      • 0,
      • 0.63,
      • 0.14,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 1.06,
      • 64,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-2-7b-hf" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-2-7b-hf</a>",
      • 0,
      • "1.0_maltese",
      • 9.35,
      • 15.9,
      • 2.8,
      • 49.25,
      • 2.78,
      • 3.79,
      • 7.73,
      • 7.46,
      • 24.39,
      • null,
      • null,
      • 0,
      • 1.17,
      • 11.2,
      • 4.8,
      • 0,
      • 0,
      • 6.35,
      • 1.65,
      • "pre-trained",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama2",
      • 6.74,
      • 2213,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloom-560m" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloom-560m</a>",
      • 0,
      • "1.0_english",
      • 9.29,
      • 15.97,
      • 2.6,
      • 26.65,
      • 11.08,
      • 9.6,
      • 8.93,
      • 16.44,
      • 23.15,
      • null,
      • null,
      • 0.28,
      • 3.86,
      • 9.14,
      • 4.56,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 0.56,
      • 369,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/facebook/xglm-564M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">facebook/xglm-564M</a>",
      • 0,
      • "1.0_english",
      • 9.24,
      • 16.57,
      • 1.91,
      • 29.44,
      • 17.15,
      • 7.52,
      • 7.09,
      • 15.14,
      • 23.11,
      • null,
      • null,
      • 0,
      • 0,
      • 9.54,
      • 4.29,
      • 0.02,
      • 0.01,
      • 0.01,
      • 0.01,
      • "pre-trained",
      • "none",
      • 30,
      • "XGLMForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 53,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/google/mt5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/mt5-large</a>",
      • 0,
      • "1.0_english",
      • 9.2,
      • 12.64,
      • 5.76,
      • 28.25,
      • 6.68,
      • 4.87,
      • 2.11,
      • 11.01,
      • 22.89,
      • null,
      • null,
      • 0.08,
      • 9.93,
      • 11.07,
      • 4.33,
      • 3.18,
      • 4.4,
      • 8.79,
      • 3.42,
      • "pre-trained",
      • "pre-training",
      • 101,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0,
      • 101,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/DAMO-NLP-MT/polylm-1.7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DAMO-NLP-MT/polylm-1.7b</a>",
      • 0,
      • "1.0_maltese",
      • 9.15,
      • 15.12,
      • 3.18,
      • 39.36,
      • 11.44,
      • 3.73,
      • 3.82,
      • 9.5,
      • 22.89,
      • null,
      • null,
      • 0.1,
      • 8.18,
      • 7.5,
      • 3.31,
      • 0,
      • 0,
      • 1.6,
      • 0.21,
      • "pre-trained",
      • "none",
      • 18,
      • "GPT2LMHeadModel",
      • "?",
      • "apache-2.0",
      • 0,
      • 11,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/MaLA-LM/mala-500-10b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MaLA-LM/mala-500-10b</a>",
      • 0,
      • "1.0_english",
      • 9.08,
      • 14.76,
      • 3.39,
      • 25.97,
      • 21.37,
      • 7.45,
      • 2.27,
      • 3.6,
      • 27.89,
      • null,
      • null,
      • 0,
      • 7.28,
      • 9.68,
      • 3.75,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "pre-training",
      • 511,
      • "LlamaForCausalLM",
      • "?",
      • "llama2",
      • 0,
      • 6,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/DAMO-NLP-MT/polylm-13b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DAMO-NLP-MT/polylm-13b</a>",
      • 0,
      • "1.0_english",
      • 9.04,
      • 15.06,
      • 3.03,
      • 25.97,
      • 3.84,
      • 5.47,
      • 20.35,
      • 12.54,
      • 22.17,
      • null,
      • null,
      • 0.14,
      • 3.67,
      • 11.49,
      • 6.03,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "none",
      • 18,
      • "PolyLMHeadModel",
      • "?",
      • "apache-2.0",
      • 0,
      • 53,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/MaLA-LM/mala-500-10b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MaLA-LM/mala-500-10b</a>",
      • 0,
      • "1.0_maltese",
      • 9.04,
      • 13.46,
      • 4.62,
      • 25.97,
      • 15.78,
      • 4.2,
      • 1.49,
      • 5.45,
      • 27.89,
      • null,
      • null,
      • 0.01,
      • 12.06,
      • 9.81,
      • 4.54,
      • 0,
      • 0,
      • 3.65,
      • 1.2,
      • "pre-trained",
      • "pre-training",
      • 511,
      • "LlamaForCausalLM",
      • "?",
      • "llama2",
      • 0,
      • 6,
      • true,
      • "main"
      ],
    • [
      • "PT/NO",
      • "<a target="_blank" href="https://huggingface.co/bigscience/bloom-7b1" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">bigscience/bloom-7b1</a>",
      • 0,
      • "1.0_maltese",
      • 8.97,
      • 15.97,
      • 1.96,
      • 32.95,
      • 15.26,
      • 4.2,
      • 7.45,
      • 13.74,
      • 22.22,
      • null,
      • null,
      • 0,
      • 0.11,
      • 9.61,
      • 4.41,
      • 0,
      • 0,
      • 0.24,
      • 0.1,
      • "pre-trained",
      • "none",
      • 46,
      • "BloomForCausalLM",
      • "?",
      • "bigscience-bloom-rail-1.0",
      • 7.07,
      • 203,
      • true,
      • "main"
      ],
    • [
      • "IT/NO",
      • "<a target="_blank" href="https://huggingface.co/MBZUAI/bactrian-x-llama-13b-lora" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MBZUAI/bactrian-x-llama-13b-lora</a>",
      • 0,
      • "1.0_english",
      • 8.89,
      • 15.44,
      • 2.33,
      • 36.75,
      • 9.09,
      • 5.73,
      • 7.03,
      • 11.29,
      • 22.78,
      • null,
      • null,
      • 0,
      • 0.59,
      • 10.86,
      • 5.54,
      • 0.07,
      • 0.07,
      • 0.83,
      • 0.15,
      • "instruction-tuned",
      • "none",
      • 52,
      • "LlamaForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 3,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/google/mt5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/mt5-large</a>",
      • 0,
      • "1.0_maltese",
      • 8.79,
      • 11.52,
      • 6.05,
      • 25.97,
      • 2.43,
      • 4.87,
      • 2.12,
      • 10.85,
      • 22.89,
      • null,
      • null,
      • 0.09,
      • 11.02,
      • 11.45,
      • 4.86,
      • 3.11,
      • 4.39,
      • 8.81,
      • 3.39,
      • "pre-trained",
      • "pre-training",
      • 101,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0,
      • 101,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/utter-project/EuroLLM-1.7B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">utter-project/EuroLLM-1.7B</a>",
      • 0,
      • "1.0_maltese",
      • 8.77,
      • 13.51,
      • 4.03,
      • 26.98,
      • 3.12,
      • 4.2,
      • 13.72,
      • 10.17,
      • 22.89,
      • 0,
      • 0.15,
      • 1.12,
      • 9.31,
      • 10.67,
      • 5.23,
      • 0,
      • 0,
      • 0,
      • 0,
      • "pre-trained",
      • "pre-training",
      • 35,
      • "LlamaForCausalLM",
      • "?",
      • "apache-2.0",
      • 0,
      • 104,
      • true,
      • "main"
      ],
    • [
      • "PT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-2-13b-hf" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-2-13b-hf</a>",
      • 0,
      • "1.0_maltese",
      • 8.63,
      • 14.1,
      • 3.17,
      • 25.97,
      • 11.12,
      • 4.2,
      • 9.61,
      • 9.7,
      • 24,
      • null,
      • null,
      • 0,
      • 0.5,
      • 11.6,
      • 5.06,
      • 0,
      • 0,
      • 9.21,
      • 3.74,
      • "pre-trained",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama2",
      • 13.02,
      • 618,
      • true,
      • "main"
      ],
    • [
      • "IT/?",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-2-13b-chat-hf" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-2-13b-chat-hf</a>",
      • 0,
      • "1.0_maltese",
      • 8.26,
      • 13.41,
      • 3.1,
      • 25.97,
      • 4.59,
      • 4.2,
      • 10.32,
      • 12.03,
      • 23.33,
      • null,
      • null,
      • 0,
      • 0.01,
      • 11.86,
      • 6.67,
      • 0,
      • 0,
      • 6.1,
      • 3.65,
      • "instruction-tuned",
      • "unknown",
      • "?",
      • "LlamaForCausalLM",
      • "?",
      • "llama2",
      • 13.02,
      • 1105,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/google/mt5-base" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/mt5-base</a>",
      • 0,
      • "1.0_english",
      • 8.17,
      • 12.9,
      • 3.44,
      • 31.58,
      • 2.78,
      • 4.95,
      • 2.89,
      • 7.29,
      • 27.89,
      • null,
      • null,
      • 0.04,
      • 4.15,
      • 4.76,
      • 2.93,
      • 0.33,
      • 0.86,
      • 8.71,
      • 7.43,
      • "pre-trained",
      • "pre-training",
      • 101,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0,
      • 253,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/google/mt5-xl" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/mt5-xl</a>",
      • 0,
      • "1.0_english",
      • 7.97,
      • 11.48,
      • 4.46,
      • 26.98,
      • 4.31,
      • 3.73,
      • 2.08,
      • 9.88,
      • 21.89,
      • null,
      • null,
      • 0.04,
      • 9.01,
      • 6.26,
      • 2.4,
      • 4.29,
      • 4.46,
      • 7.39,
      • 2.58,
      • "pre-trained",
      • "pre-training",
      • 101,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0,
      • 25,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/google/mt5-xl" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/mt5-xl</a>",
      • 0,
      • "1.0_maltese",
      • 7.95,
      • 11.12,
      • 4.77,
      • 25.97,
      • 2.78,
      • 3.73,
      • 2.19,
      • 10.17,
      • 21.89,
      • null,
      • null,
      • 0.03,
      • 7.95,
      • 9.39,
      • 4.14,
      • 4.19,
      • 4.34,
      • 6.63,
      • 2.19,
      • "pre-trained",
      • "pre-training",
      • 101,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0,
      • 25,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/google/mt5-base" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/mt5-base</a>",
      • 0,
      • "1.0_maltese",
      • 7.66,
      • 12.1,
      • 3.22,
      • 26.98,
      • 2.78,
      • 4.87,
      • 2.98,
      • 7.13,
      • 27.89,
      • null,
      • null,
      • 0.05,
      • 4.43,
      • 3.57,
      • 2.22,
      • 0.31,
      • 0.82,
      • 8.55,
      • 7.3,
      • "pre-trained",
      • "pre-training",
      • 101,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0,
      • 253,
      • true,
      • "main"
      ],
    • [
      • "IT/NO",
      • "<a target="_blank" href="https://huggingface.co/MBZUAI/bactrian-x-llama-7b-lora" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MBZUAI/bactrian-x-llama-7b-lora</a>",
      • 0,
      • "1.0_english",
      • 7.62,
      • 14.22,
      • 1.02,
      • 25.97,
      • 7.97,
      • 11.26,
      • 6.93,
      • 7.43,
      • 25.74,
      • null,
      • null,
      • 0.01,
      • 2.38,
      • 2.64,
      • 0.8,
      • 1.2,
      • 0.01,
      • 1.36,
      • 0.09,
      • "instruction-tuned",
      • "none",
      • 52,
      • "LlamaForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 4,
      • true,
      • "main"
      ],
    • [
      • "IT/NO",
      • "<a target="_blank" href="https://huggingface.co/MBZUAI/bactrian-x-llama-7b-lora" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MBZUAI/bactrian-x-llama-7b-lora</a>",
      • 0,
      • "1.0_maltese",
      • 7.47,
      • 14.25,
      • 0.69,
      • 25.65,
      • 6.21,
      • 12.11,
      • 6.28,
      • 7.54,
      • 27.72,
      • null,
      • null,
      • 0.01,
      • 1.87,
      • 1.45,
      • 0.81,
      • 1.17,
      • 0.03,
      • 1.22,
      • 0.1,
      • "instruction-tuned",
      • "none",
      • 52,
      • "LlamaForCausalLM",
      • "?",
      • "mit",
      • 0,
      • 4,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/google/mt5-small" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/mt5-small</a>",
      • 0,
      • "1.0_english",
      • 6.74,
      • 11.5,
      • 1.98,
      • 25.97,
      • 2.43,
      • 3.73,
      • 6.23,
      • 8.72,
      • 21.89,
      • 0.12,
      • 1.53,
      • 0,
      • 1.48,
      • 1.43,
      • 0.64,
      • 0.21,
      • 0.52,
      • 6.29,
      • 4.92,
      • "pre-trained",
      • "pre-training",
      • 101,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0,
      • 173,
      • true,
      • "main"
      ],
    • [
      • "PT/PT",
      • "<a target="_blank" href="https://huggingface.co/google/mt5-small" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/mt5-small</a>",
      • 0,
      • "1.0_maltese",
      • 6.68,
      • 11.48,
      • 1.88,
      • 25.97,
      • 2.43,
      • 3.73,
      • 6.09,
      • 8.77,
      • 21.89,
      • 0.01,
      • 1.31,
      • 0.01,
      • 1.47,
      • 1.43,
      • 0.64,
      • 0.2,
      • 0.5,
      • 6.06,
      • 4.68,
      • "pre-trained",
      • "pre-training",
      • 101,
      • "MT5ForConditionalGeneration",
      • "?",
      • "apache-2.0",
      • 0,
      • 173,
      • true,
      • "main"
      ]
    ],
  • "metadata": null
}