PULI-Trio-Q
9
3
3 languages
license:apache-2.0
by
NYTK
Other
OTHER
New
9 downloads
Early-stage
Edge AI:
Mobile
Laptop
Server
Unknown
Mobile
Laptop
Server
Quick Summary
- Trained with LLaMA-Factory github - The Qwen2.
Code Examples
Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Limitationspythontransformers
from transformers import pipeline, Qwen2ForCausalLM, AutoTokenizer
model = Qwen2ForCausalLM.from_pretrained("NYTK/PULI-Trio-Q")
tokenizer = AutoTokenizer.from_pretrained("NYTK/PULI-Trio-Q")
prompt = "Elmesélek egy történetet a nyelvtechnológiáról."
generator = pipeline(task="text-generation", model=model, tokenizer=tokenizer, device=0)
print(generator(prompt, max_new_tokens=30)[0]["generated_text"])Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Citationtext
@inproceedings {yang-llumix-llama,
title = {PULI Chat: Our First Hungarian Conversational Model},
booktitle = {International Conference on Formal Methods and Foundations of Artificial Intelligence},
year = {2025},
publisher = {Eszterházy Károly Catholic University},
address = {Eger, Hungary},
author = {Yang, Zijian Győző and Bánfi, Ágnes and Dodé, Réka and Ferenczi, Gergő and Földesi, Flóra and Hatvani, Péter and Héja, Enikő and Lengyel, Mariann and Madarász, Gábor and Osváth, Mátyás and Sárossy, Bence and Varga, Kristóf and Váradi, Tamás and Prószéky, Gábor and Ligeti-Nagy, Noémi},
pages = {1--3},
pubstate={accepted abstract},
url ={https://uni-eszterhazy.hu/api/media/file/7f9158bd443acc29dbd2a211971fe8677768257c}
}Deploy This Model
Production-ready deployment in minutes
Together.ai
Instant API access to this model
Production-ready inference API. Start free, scale to millions.
Try Free APIReplicate
One-click model deployment
Run models in the cloud with simple API. No DevOps required.
Deploy NowDisclosure: We may earn a commission from these partners. This helps keep LLMYourWay free.