{
  "slug": "bigscience-bloom",
  "name": "BigScience BLOOM",
  "description": "BLOOM is a 176-billion parameter open-access multilingual large language model. Developed by the BigScience research collective and coordinated by Hugging Face, it was trained on the Jean Zay supercomputer to promote open science in AI research.",
  "url": "https://optimly.ai/brand/bigscience-bloom",
  "logoUrl": "",
  "baiScore": 82,
  "archetype": "Challenger",
  "category": "Artificial Intelligence",
  "categorySlug": null,
  "keyFacts": [],
  "aiReadiness": [],
  "competitors": [
    {
      "slug": "eleutherai-gpt-neojneox",
      "name": "Eleutherai Gpt Neojneox"
    },
    {
      "slug": "hugging-face-platform",
      "name": "Hugging Face Platform"
    },
    {
      "slug": "meta-llama-series",
      "name": "Meta Llama Series"
    },
    {
      "slug": "mistral-ai-mixtral",
      "name": "Mistral Ai Mixtral"
    }
  ],
  "inboundCompetitors": [
    {
      "slug": "eleutherai",
      "name": "EleutherAI"
    }
  ],
  "aiAlternatives": [],
  "parentBrand": {
    "slug": "hugging-face",
    "name": "Hugging Face"
  },
  "subBrands": [],
  "updatedAt": "2026-04-10T19:44:37.165+00:00",
  "verifiedVitals": {
    "website": "huggingface.co/bigscience/bloom",
    "founded": "2021 (Project Start)",
    "headquarters": "Distributed / Paris, France (Hugging Face / CNRS)",
    "pricing_model": "Free (Open Access via Hugging Face)",
    "core_products": "BLOOM Language Model, BLOOMZ (instruction-tuned version)",
    "key_differentiator": "One of the first massive-scale language models developed through a transparent global collaborative effort rather than a private corporation.",
    "target_markets": "Academic researchers, NLP developers, organizations requiring multilingual broad-spectrum data processing.",
    "employee_count": "Not publicly available",
    "funding_stage": "Not publicly available",
    "subcategory": "Large Language Models (LLM)"
  },
  "timestamp": 1776019794560
}