Lines Matching full:billion
4 "description": "Meta's Llama 3.2 model with 3 billion parameters.",
11 "description": "Meta's Llama 3.2 model with 1 billion parameters.",
18 … "description": "Llama 3.1 is a new state-of-the-art model from Meta. 405 billion parameters.",
32 … "description": "Llama 3.1 is a new state-of-the-art model from Meta. 8 billion parameters.",
39 … "description": "Google Gemma 2 is a high-performing and efficient model. 27 billion parameters.",
46 … "description": "Google Gemma 2 is a high-performing and efficient model. 9 billion parameters.",
53 … "description": "Google Gemma 2 is a high-performing and efficient model. 2 billion parameters.",
60 …": "Qwen2.5 models are pretrained on Alibaba's latest large-scale dataset. 72 billion parameters.",
67 …": "Qwen2.5 models are pretrained on Alibaba's latest large-scale dataset. 32 billion parameters.",
74 …": "Qwen2.5 models are pretrained on Alibaba's latest large-scale dataset. 14 billion parameters.",
81 …n": "Qwen2.5 models are pretrained on Alibaba's latest large-scale dataset. 7 billion parameters.",
88 …n": "Qwen2.5 models are pretrained on Alibaba's latest large-scale dataset. 3 billion parameters.",
95 …: "Qwen2.5 models are pretrained on Alibaba's latest large-scale dataset. 1.5 billion parameters.",
102 …: "Qwen2.5 models are pretrained on Alibaba's latest large-scale dataset. 0.5 billion parameters.",