{
  "slug": "mistral-ai-mixtral",
  "name": "Mistral AI Mixtral",
  "description": "Mixtral is a family of sparse Mixture-of-Experts (MoE) large language models developed by the French company Mistral AI. It is designed for high efficiency, using only a fraction of its total parameters for each token during inference, allowing it to match or exceed the performance of much larger dense models.",
  "url": "https://optimly.ai/brand/mistral-ai-mixtral",
  "logoUrl": "",
  "baiScore": 76,
  "archetype": "Challenger",
  "category": "Artificial Intelligence",
  "categorySlug": null,
  "keyFacts": [],
  "aiReadiness": [],
  "competitors": [],
  "inboundCompetitors": [
    {
      "slug": "bigscience-bloom",
      "name": "Bigscience Bloom"
    }
  ],
  "aiAlternatives": [],
  "parentBrand": {
    "slug": "mistral-ai",
    "name": "Mistral AI"
  },
  "subBrands": [],
  "updatedAt": "2026-04-11T14:37:44.099+00:00",
  "verifiedVitals": {
    "website": "https://mistral.ai",
    "founded": "2023",
    "headquarters": "Paris, France",
    "pricing_model": "Usage-based (API) and Free (Open weights download)",
    "core_products": "Mixtral 8x7B, Mixtral 8x22B, API access via La Plateforme.",
    "key_differentiator": "Leading the market in sparse Mixture-of-Experts (MoE) architecture, providing GPT-4 class efficiency in an open-weights format.",
    "target_markets": "Developers, Enterprise AI, Research Institutions, Local Hosting Enthusiasts",
    "employee_count": "50-100",
    "funding_stage": "Series B / Venture Backed",
    "subcategory": "Large Language Models (LLM)"
  },
  "timestamp": 1776026110380
}