id | name | contextWindow | inputCostPer1kTokens | outputCostPer1kTokens | quality | medianPrice | outputTokensPerSecond | latencyToFirstChunk | averagePercentage | multiChoiceQsPercentage | reasoningPercentage | pythonCodingPercentage | futureCapabilitiesPercentage | gradeSchoolMathPercentage | mathProblemsPercentage | arenaScore | confidenceInterval | votes | organization | license | knowledgeCutoff |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1 | ChatGPT-4o-latest (2024-08-08) | 128000 | 0.00500 | 0.01500 | 77 | 0.00750 | 96.1 | 0.43 | 85.45 | 88.7 | 90.2 | 90.2 | 85.45 | 76.60 | 76.60% | 1317 | +5/-4 | 20599 | OpenAI | Proprietary | 2023/10 |
2 | GPT-4o-2024-08-06 | 128000 | 0.00250 | 0.01000 | 77 | 0.00438 | 103.6 | 0.43 | 85.45 | 88.7 | 90.2 | 90.2 | 85.45 | 76.60 | null | 1262 | +5/-5 | 13603 | OpenAI | Proprietary | 2023/10 |
3 | Gemini-1.5-Pro-Exp-0801 | 128000 | 0.00700 | 0.02100 | 75 | null | null | null | null | null | null | null | null | null | null | 1298 | +5/-4 | 23124 | Proprietary | 2023/11 | |
4 | Meta-Llama-3.1-405b-Instruct | 128000 | 0.00150 | 0.00300 | 72 | 0.00450 | 29.5 | 0.68 | 78.45 | 80 | 85 | 85 | 78.45 | 75 | null | 1265 | +4/-5 | 22215 | Meta | Llama 3.1 Community | 2023/12 |
5 | GPT-4o-mini-2024-07-18 | 128000 | 0.00015 | 0.00060 | 71 | 0.00026 | 114.9 | 0.42 | 80.5 | 82 | null | 87.00 | 80.5 | null | 70.20% | 1275 | +4/-4 | 21547 | OpenAI | Proprietary | 2023/10 |
6 | Command-R+ | 128000 | 0.00500 | 0.01500 | 46 | 0.00600 | 65.3 | 0.46 | 60 | 60 | 60 | 60 | 60 | 60 | null | null | null | null | Cohere | Open | null |
7 | Claude 3.5 Sonnet (Alt) | 200000 | 0.00300 | 0.01500 | 57 | 0.00600 | 58.5 | 0.88 | 70 | 70 | 70 | 70 | 70 | 70 | null | null | null | null | Anthropic | Proprietary | null |
8 | Gemini 1.5 Pro | 128000 | 0.00350 | 0.01050 | 72 | 0.00525 | 64.4 | 0.95 | 80.08 | 81.90 | 92.50 | 71.90 | 84 | 91.70 | 58.50 | null | null | null | Proprietary | null | |
9 | Gemini 1.5 Flash | 1000000 | 0.00007 | 0.00030 | 60 | 0.00013 | 208.4 | 0.79 | null | 78.90 | null | 89.20 | null | 67.70 | null | null | null | null | Proprietary | null | |
10 | GPT-4o-2024-05-13 | 128000 | 0.00500 | 0.01500 | null | null | null | 0.43 | 85.45 | 88.7 | 90.2 | 90.2 | 85.45 | 76.60 | null | 1286 | +3/-3 | 80639 | OpenAI | Proprietary | 2023/10 |
11 | GPT-4 | 8000 | 0.03000 | 0.06000 | null | 0.03750 | 29.3 | 0.59 | 79.45 | 86.40 | 95.30 | 67 | 83.10 | 92 | 52.90 | null | null | null | OpenAI | Proprietary | 2023/03 |
12 | GPT-4-Turbo-2024-04-09 | 128000 | 0.01000 | 0.03000 | 74 | 0.01500 | 35.4 | 0.60 | 85.05 | 86.5 | null | 87.60 | 85.05 | null | 72.2% | 1257 | +3/-2 | 86593 | OpenAI | Proprietary | 2023/12 |
13 | Claude 2.0 | 100000 | 0.01000 | 0.03000 | null | 0.01200 | 40.2 | 1.10 | 75 | 75 | 75 | 75 | 75 | 75 | null | null | null | null | Anthropic | Proprietary | 2023/03 |
14 | Claude 2.1 | 200000 | 0.01000 | 0.03000 | null | 0.01200 | 37.3 | 1.41 | 75 | 75 | 75 | 75 | 75 | 75 | null | null | null | null | Anthropic | Proprietary | 2023/05 |
15 | Claude 3 Opus | 200000 | 0.01500 | 0.07500 | 70 | 0.03000 | 28.0 | 1.77 | 84.83 | 86.80 | 95.40 | 84.90 | 86.80 | 95.00 | 60.10 | 1248 | +2/-2 | 158026 | Anthropic | Proprietary | 2023/8 |
16 | Claude 3.5 Sonnet | 200000 | 0.00300 | 0.01500 | 77 | 0.00600 | 75.6 | 1.36 | 88.38 | 88.70 | 89.00 | 92.00 | 93.10 | 96.40 | 71.10 | 1271 | +3/-3 | 51008 | Anthropic | Proprietary | 2024/4 |
17 | GPT-3.5 Turbo | 16000 | 0.00050 | 0.00150 | 53 | 0.00075 | 90.4 | 0.38 | 65 | 65 | 65 | 65 | 65 | 65 | null | null | null | null | OpenAI | Proprietary | 2023/03 |
18 | GPT-3.5 Turbo Instruct | 4000 | 0.00150 | 0.00200 | 55 | 0.00163 | 113.8 | 0.55 | 65 | 65 | 65 | 65 | 65 | 65 | null | null | null | null | OpenAI | Proprietary | 2023/03 |
19 | GPT-4-32k | 32000 | 0.06000 | 0.12000 | 78 | 0.09000 | null | null | null | null | null | null | null | null | null | null | null | null | OpenAI | Proprietary | 2023/03 |
20 | Llama 3.1 70B | 128000 | 0.00150 | 0.00300 | 65 | 0.00089 | 55.1 | 0.42 | 75 | 75 | 75 | 75 | 75 | 75 | null | null | null | null | Meta | Open | null |
21 | Llama 3.1 8B | 128000 | 0.00150 | 0.00300 | 53 | 0.00019 | 167.8 | 0.28 | 65 | 65 | 65 | 65 | 65 | 65 | null | null | null | null | Meta | Open | null |
22 | Mixtral 8x7B | 32000 | 0.00150 | 0.00300 | null | null | null | null | 59.79 | 70.60 | 84.40 | 40.20 | 60.76 | 74.40 | 28.40 | null | null | null | null | null | null |
23 | Mixtral 8x22B | 65000 | 0.00150 | 0.00300 | 61 | 0.00120 | 59.1 | 0.35 | 70 | 70 | 70 | 70 | 70 | 70 | null | null | null | null | Mistral | Open | null |
24 | Gemini Pro | 32000 | 0.00013 | 0.00038 | null | null | null | null | 65 | 65 | 65 | 65 | 65 | 65 | null | null | null | null | Proprietary | 2023/03 | |
25 | Google Palm | 8192 | 0.00150 | 0.00300 | null | null | null | null | 65 | 65 | 65 | 65 | 65 | 65 | null | null | null | null | null | 2022/04 | |
26 | GPT-3.5 Turbo 16k | 16000 | 0.00200 | 0.00400 | 55 | null | null | null | 65 | 65 | 65 | 65 | 65 | 65 | null | null | null | null | OpenAI | Proprietary | 2023/03 |
27 | GPT-4-0125-preview | 8000 | 0.03000 | 0.06000 | null | null | null | null | null | null | null | null | null | null | null | 1245 | +3/-3 | 86878 | OpenAI | Proprietary | 2023/12 |
28 | GPT-4-1106-preview | 8000 | 0.03000 | 0.06000 | null | null | null | null | null | null | null | null | null | null | null | 1251 | +3/-2 | 93502 | OpenAI | Proprietary | 2023/4 |
29 | DeepSeek-V2 | 128000 | 0.00150 | 0.00300 | 66 | 0.00017 | 18.4 | 1.16 | 70 | 70 | 70 | 70 | 70 | 70 | null | null | null | null | DeepSeek | Open | 2024/06 |
30 | gpt-3.5-turbo | 16000 | 0.00050 | 0.00150 | 53 | 0.00075 | 90.4 | 0.38 | 65 | 65 | 65 | 65 | 65 | 65 | null | null | null | null | OpenAI | Proprietary | 2023/03 |
31 | gpt-3.5-turbo-instruct | 4000 | 0.00150 | 0.00200 | 55 | 0.00163 | 113.8 | 0.55 | 65 | 65 | 65 | 65 | 65 | 65 | null | null | null | null | OpenAI | Proprietary | 2023/03 |
32 | gpt-4 | 8000 | 0.03000 | 0.06000 | null | 0.03750 | 29.3 | 0.59 | 79.45 | 86.40 | 95.30 | 67 | 83.10 | 92 | 52.90 | null | null | null | OpenAI | Proprietary | 2023/03 |
33 | gpt-4-turbo | 128000 | 0.01000 | 0.03000 | 74 | 0.01500 | 35.4 | 0.60 | null | null | null | null | null | null | null | null | null | null | OpenAI | Proprietary | 2023/12 |
34 | gpt-4-32k | 32000 | 0.06000 | 0.12000 | null | null | null | null | null | null | null | null | null | null | null | null | null | null | OpenAI | Proprietary | 2023/03 |
35 | gpt-4o | 128000 | 0.00500 | 0.01500 | 77 | 0.00750 | 96.1 | 0.43 | null | 88.7 | null | 90.2 | null | 76.60 | null | null | null | null | OpenAI | Proprietary | 2023/10 |
36 | gpt-4o-mini | 128000 | 0.00015 | 0.00060 | 71 | 0.00026 | 114.9 | 0.42 | null | 82 | null | 87.00 | null | 70.20 | null | null | null | null | OpenAI | Proprietary | 2023/10 |
37 | claude-3-opus | 200000 | 0.01500 | 0.07500 | 70 | 0.03000 | 28.0 | 1.77 | 84.83 | 86.80 | 95.40 | 84.90 | 86.80 | 95.00 | 60.10 | 1248 | +2/-2 | 158026 | Anthropic | Proprietary | 2023/8 |
38 | claude-3-sonnet | 200000 | 0.00300 | 0.01500 | 77 | 0.00600 | 75.6 | 1.36 | 88.38 | 88.70 | 89.00 | 92.00 | 93.10 | 96.40 | 71.10 | 1271 | +3/-3 | 51008 | Anthropic | Proprietary | 2024/4 |
39 | claude-3-haiku | 200000 | 0.00025 | 0.00125 | 54 | 0.00050 | 128.4 | 0.51 | 73.08 | 75.20 | 85.90 | 75.90 | 73.70 | 88.90 | 38.90 | null | null | null | Anthropic | Proprietary | 2023/08 |
40 | Llama 3 Instruct - 8B | 8000 | 0.00150 | 0.00300 | 53 | 0.00019 | 167.8 | 0.28 | 68.40 | 68.40 | 62 | 61 | 68.40 | 79.60 | 30 | null | null | null | Meta | Open | 2023/12 |
41 | Llama 3 Instruct - 70B | 8000 | 0.00150 | 0.00300 | 65 | 0.00089 | 55.1 | 0.42 | 79.23 | 82 | 87 | 81.7 | 81.3 | 93 | 50.40 | null | null | null | Meta | Open | 2023/12 |
42 | Meta-Llama-3.1-405b-Instruct-2 | 8192 | 0.00150 | 0.00300 | 72 | 0.00450 | 29.5 | 0.68 | null | null | null | null | null | null | null | 1265 | +4/-5 | 22215 | Meta | Llama 3.1 Community | 2023/12 |
43 | Mistral 7B | 33000 | 0.00150 | 0.00300 | 24 | 0.00018 | 94.6 | 0.29 | 40 | 40 | 40 | 40 | 40 | 40 | null | null | null | null | Mistral | null | null |
44 | Mixtral 8x7B-2 | 32000 | 0.00150 | 0.00300 | 42 | 0.00050 | 88.5 | 0.32 | 59.79 | 70.60 | 84.40 | 40.20 | 60.76 | 74.40 | 28.40 | null | null | null | Mistral | null | null |
45 | Mixtral 8x22B-2 | 65000 | 0.00150 | 0.00300 | 61 | 0.00120 | 59.1 | 0.35 | null | null | null | null | null | null | null | null | null | null | Mistral | null | null |
46 | command-r | 128000 | 0.00500 | 0.01500 | 36 | 0.00075 | 114.3 | 0.42 | 50 | 50 | 50 | 50 | 50 | 50 | null | null | null | null | Cohere | null | null |
47 | command-r-plus | 128000 | 0.00500 | 0.01500 | 46 | 0.00600 | 65.3 | 0.46 | 60 | 60 | 60 | 60 | 60 | 60 | null | null | null | null | Cohere | null | null |
48 | gemini-pro | 32000 | 0.00013 | 0.00038 | null | null | null | null | null | null | null | null | null | null | null | null | null | null | Proprietary | 2023/03 | |
49 | Mistral Large 2 | 128000 | 0.00150 | 0.00300 | 73 | 0.00450 | 41.2 | 0.48 | 75 | 75 | 75 | 75 | 75 | 75 | null | null | null | null | Mistral | null | null |
50 | Mistral NeMo | 128000 | 0.00150 | 0.00300 | 52 | 0.00030 | 139.7 | 0.35 | 65 | 65 | 65 | 65 | 65 | 65 | null | null | null | null | Mistral | null | null |
51 | ChatGPT-4o-latest (2024-08-08)-2 | 128000 | 0.00500 | 0.01500 | 77 | 0.00750 | 96.1 | 0.43 | null | 88.7 | null | 90.2 | null | 76.60 | null | 1317 | +5/-4 | 20599 | OpenAI | Proprietary | 2023/10 |
52 | GPT-4o-2024-08-06-2 | 128000 | 0.00250 | 0.01000 | 77 | 0.00438 | 103.6 | 0.43 | null | null | null | null | null | null | null | 1262 | +5/-5 | 13603 | OpenAI | Proprietary | 2023/10 |
53 | command-r-plus-online | 128000 | 0.00500 | 0.01500 | 46 | 0.00600 | 65.3 | 0.46 | 60 | 60 | 60 | 60 | 60 | 60 | null | null | null | null | Cohere | null | null |
54 | glm-4 | 32000 | 0.00150 | 0.00300 | null | 0.00225 | 50.0 | 0.60 | 65 | 65 | 65 | 65 | 65 | 65 | null | null | null | null | THUDM | Apache 2.0 | 2023/12 |
55 | glm-3-turbo | 8192 | 0.00150 | 0.00300 | null | 0.00225 | 50.0 | 0.60 | 60 | 60 | 60 | 60 | 60 | 60 | null | null | null | null | THUDM | Apache 2.0 | 2023/10 |
56 | deepseek-chat | 128000 | 0.00150 | 0.00300 | null | null | null | null | 70 | 70 | 70 | 70 | 70 | 70 | null | null | null | null | DeepSeek | Open | 2024/8 |
57 | claude-3-5-sonnet | 200000 | 0.00300 | 0.01500 | null | null | null | null | null | null | null | null | null | null | null | null | null | null | Anthropic | Proprietary | 2024/4 |
58 | Qwen1.5-0.5B-Chat | 4096 | 0.00010 | 0.00020 | 50 | 0.00015 | 100.0 | 0.50 | 55 | 55 | 55 | 55 | 55 | 55 | null | null | null | null | Alibaba Cloud | Apache 2.0 | 2023/12 |
59 | Qwen1.5-7B-Chat | 8192 | 0.00050 | 0.00100 | null | 0.00075 | 75.0 | 0.60 | 60 | 60 | 60 | 60 | 60 | 60 | null | null | null | null | Alibaba Cloud | Apache 2.0 | 2023/12 |
60 | Yi-34B-Chat | 8192 | 0.00150 | 0.00300 | null | 0.00225 | 50.0 | 0.60 | 70 | 70 | 70 | 70 | 70 | 70 | null | null | null | null | 01.AI | Apache 2.0 | 2023/12 |
61 | Qwen1.5-32B-Chat | 8192 | 0.00150 | 0.00300 | null | 0.00225 | 50.0 | 0.60 | 75 | 75 | 75 | 75 | 75 | 75 | null | null | null | null | Alibaba Cloud | Apache 2.0 | 2023/12 |
62 | Qwen2-72B-Instruct | 128000 | 0.00090 | 0.00090 | null | 0.00090 | 47.0 | 0.39 | 75 | 75 | 75 | 75 | 75 | 75 | null | null | null | null | Alibaba Cloud | Apache 2.0 | 2024/08 |