1{
2    "chat": {
3        "open-mistral-7b": {
4            "description": "Our very first. A 7B transformer model, fast-deployed and easily customisable. Small, yet very powerful for a variety of use cases. English and code.",
5            "inputTokens": 32000,
6            "inputTokenPrice": 0.25,
7            "outputTokens": 8191,
8            "outputTokenPrice": 0.25
9        },
10        "open-mixtral-8x7b": {
11            "description": "A 7B sparse Mixture-of-Experts (SMoE). Uses 12B active parameters out of 45B total. Fluent in English, French, Italian, German, Spanish, and strong in code.",
12            "inputTokens": 32000,
13            "inputTokenPrice": 0.7,
14            "outputTokens": 8191,
15            "outputTokenPrice": 0.7
16        },
17        "open-mixtral-8x22b": {
18            "description": "A 22B sparse Mixture-of-Experts (SMoE). Uses only 39B active parameters out of 141B. Fluent in English, French, Italian, German, Spanish, and strong in code.",
19            "inputTokens": 64000,
20            "inputTokenPrice": 2,
21            "outputTokens": 8191,
22            "outputTokenPrice": 6
23        },
24        "mistral-small-latest": {
25            "description": "Cost-efficient reasoning for low-latency workloads. Fluent in English, French, Italian, German, Spanish, and strong in code.",
26            "inputTokens": 32000,
27            "inputTokenPrice": 1,
28            "outputTokens": 8191,
29            "outputTokenPrice": 3
30        },
31        "mistral-medium-latest": {
32            "description": "Balanced reasoning for a wide range of tasks. Fluent in English, French, Italian, German, Spanish, and strong in code.",
33            "inputTokens": 32000,
34            "inputTokenPrice": 2.7,
35            "outputTokens": 8191,
36            "outputTokenPrice": 8.1
37        },
38        "mistral-large-latest": {
39            "description": "Top-tier reasoning for high-complexity tasks. Fluent in English, French, Italian, German, Spanish, and strong in code.",
40            "inputTokens": 128000,
41            "inputTokenPrice": 3,
42            "outputTokens": 128000,
43            "outputTokenPrice": 9
44        },
45        "mistral-tiny": {
46            "description": "",
47            "inputTokens": 32000,
48            "inputTokenPrice": 0.25,
49            "outputTokens": 8191,
50            "outputTokenPrice": 0.25
51        },
52        "mistral-small": {
53            "description": "",
54            "inputTokens": 32000,
55            "inputTokenPrice": 1,
56            "outputTokens": 8191,
57            "outputTokenPrice": 3
58        },
59        "mistral-medium": {
60            "description": "",
61            "inputTokens": 32000,
62            "inputTokenPrice": 2.7,
63            "outputTokens": 8191,
64            "outputTokenPrice": 8.1
65        },
66        "codestral-latest": {
67            "description": "",
68            "inputTokens": 32000,
69            "inputTokenPrice": 1,
70            "outputTokens": 8191,
71            "outputTokenPrice": 3
72        },
73        "open-mistral-nemo": {
74            "description": "https:\/\/mistral.ai\/technology\/",
75            "inputTokens": 128000,
76            "inputTokenPrice": 0.3,
77            "outputTokens": 128000,
78            "outputTokenPrice": 0.3
79        },
80        "open-codestral-mamba": {
81            "description": "https:\/\/mistral.ai\/technology\/",
82            "inputTokens": 256000,
83            "inputTokenPrice": 0.25,
84            "outputTokens": 256000,
85            "outputTokenPrice": 0.25
86        },
87        "codestral-mamba-latest": {
88            "description": "https:\/\/mistral.ai\/technology\/",
89            "inputTokens": 256000,
90            "inputTokenPrice": 0.25,
91            "outputTokens": 256000,
92            "outputTokenPrice": 0.25
93        }
94    },
95    "embedding": {
96        "mistral-embed": {
97            "description": "State-of-the-art semantic for extracting representation of text extracts. English only for now.",
98            "inputTokens": 8192,
99            "inputTokenPrice": 0.1,
100            "dimensions": 1024
101        }
102    }
103}