Mistral-Small-Instruct-2409
17.1K
393
10 languages
—
by
mistralai
Language Model
OTHER
Fair
17K downloads
Community-tested
Edge AI:
Mobile
Laptop
Server
Unknown
Mobile
Laptop
Server
Quick Summary
Mistral-Small-Instruct-2409 is an instruct fine-tuned version with the following characteristics: - 22B parameters - Vocabulary to 32768 - Supports function ca...
Code Examples
textvllm
pip install --upgrade vllmtextvllm
pip install --upgrade vllmtextvllm
pip install --upgrade vllmtextvllm
pip install --upgrade vllmtextvllm
pip install --upgrade vllmtextvllm
pip install --upgrade vllmtextvllm
pip install --upgrade vllmtext
pip install --upgrade mistral_commontext
pip install --upgrade mistral_commontext
pip install --upgrade mistral_commontext
pip install --upgrade mistral_commontext
pip install --upgrade mistral_commontext
pip install --upgrade mistral_commontext
pip install --upgrade mistral_commontext
pip install mistral_inference --upgradetext
pip install mistral_inference --upgradetext
pip install mistral_inference --upgradetext
pip install mistral_inference --upgradetext
pip install mistral_inference --upgradetext
pip install mistral_inference --upgradetext
pip install mistral_inference --upgradepython
from huggingface_hub import snapshot_download
from pathlib import Path
mistral_models_path = Path.home().joinpath('mistral_models', '22B-Instruct-Small')
mistral_models_path.mkdir(parents=True, exist_ok=True)
snapshot_download(repo_id="mistralai/Mistral-Small-Instruct-2409", allow_patterns=["params.json", "consolidated.safetensors", "tokenizer.model.v3"], local_dir=mistral_models_path)python
from huggingface_hub import snapshot_download
from pathlib import Path
mistral_models_path = Path.home().joinpath('mistral_models', '22B-Instruct-Small')
mistral_models_path.mkdir(parents=True, exist_ok=True)
snapshot_download(repo_id="mistralai/Mistral-Small-Instruct-2409", allow_patterns=["params.json", "consolidated.safetensors", "tokenizer.model.v3"], local_dir=mistral_models_path)python
from huggingface_hub import snapshot_download
from pathlib import Path
mistral_models_path = Path.home().joinpath('mistral_models', '22B-Instruct-Small')
mistral_models_path.mkdir(parents=True, exist_ok=True)
snapshot_download(repo_id="mistralai/Mistral-Small-Instruct-2409", allow_patterns=["params.json", "consolidated.safetensors", "tokenizer.model.v3"], local_dir=mistral_models_path)python
from huggingface_hub import snapshot_download
from pathlib import Path
mistral_models_path = Path.home().joinpath('mistral_models', '22B-Instruct-Small')
mistral_models_path.mkdir(parents=True, exist_ok=True)
snapshot_download(repo_id="mistralai/Mistral-Small-Instruct-2409", allow_patterns=["params.json", "consolidated.safetensors", "tokenizer.model.v3"], local_dir=mistral_models_path)python
from huggingface_hub import snapshot_download
from pathlib import Path
mistral_models_path = Path.home().joinpath('mistral_models', '22B-Instruct-Small')
mistral_models_path.mkdir(parents=True, exist_ok=True)
snapshot_download(repo_id="mistralai/Mistral-Small-Instruct-2409", allow_patterns=["params.json", "consolidated.safetensors", "tokenizer.model.v3"], local_dir=mistral_models_path)python
from huggingface_hub import snapshot_download
from pathlib import Path
mistral_models_path = Path.home().joinpath('mistral_models', '22B-Instruct-Small')
mistral_models_path.mkdir(parents=True, exist_ok=True)
snapshot_download(repo_id="mistralai/Mistral-Small-Instruct-2409", allow_patterns=["params.json", "consolidated.safetensors", "tokenizer.model.v3"], local_dir=mistral_models_path)python
from huggingface_hub import snapshot_download
from pathlib import Path
mistral_models_path = Path.home().joinpath('mistral_models', '22B-Instruct-Small')
mistral_models_path.mkdir(parents=True, exist_ok=True)
snapshot_download(repo_id="mistralai/Mistral-Small-Instruct-2409", allow_patterns=["params.json", "consolidated.safetensors", "tokenizer.model.v3"], local_dir=mistral_models_path)Chattext
mistral-chat $HOME/mistral_models/22B-Instruct-Small --instruct --max_tokens 256Chattext
mistral-chat $HOME/mistral_models/22B-Instruct-Small --instruct --max_tokens 256Chattext
mistral-chat $HOME/mistral_models/22B-Instruct-Small --instruct --max_tokens 256Chattext
mistral-chat $HOME/mistral_models/22B-Instruct-Small --instruct --max_tokens 256Chattext
mistral-chat $HOME/mistral_models/22B-Instruct-Small --instruct --max_tokens 256Chattext
mistral-chat $HOME/mistral_models/22B-Instruct-Small --instruct --max_tokens 256Chattext
mistral-chat $HOME/mistral_models/22B-Instruct-Small --instruct --max_tokens 256Deploy This Model
Production-ready deployment in minutes
Together.ai
Instant API access to this model
Production-ready inference API. Start free, scale to millions.
Try Free APIReplicate
One-click model deployment
Run models in the cloud with simple API. No DevOps required.
Deploy NowDisclosure: We may earn a commission from these partners. This helps keep LLMYourWay free.