calme-3.2-instruct-3b

520
3
3.0B
2 languages
by
MaziyarPanahi
Language Model
OTHER
3B params
New
520 downloads
Early-stage
Edge AI:
Mobile
Laptop
Server
7GB+ RAM
Mobile
Laptop
Server
Quick Summary

Language support for French and English.

Device Compatibility

Mobile
4-6GB RAM
Laptop
16GB RAM
Server
GPU
Minimum Recommended
3GB+ RAM

Code Examples

Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
Prompt Templatepythontransformers
# Use a pipeline as a high-level helper

from transformers import pipeline

messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="MaziyarPanahi/calme-3.2-instruct-3b")
pipe(messages)


# Load model directly

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")
model = AutoModelForCausalLM.from_pretrained("MaziyarPanahi/calme-3.2-instruct-3b")

Deploy This Model

Production-ready deployment in minutes

Together.ai

Instant API access to this model

Fastest API

Production-ready inference API. Start free, scale to millions.

Try Free API

Replicate

One-click model deployment

Easiest Setup

Run models in the cloud with simple API. No DevOps required.

Deploy Now

Disclosure: We may earn a commission from these partners. This helps keep LLMYourWay free.