coptic-megalaa-finetuned
23
license:agpl-3.0
by
Norelad
Language Model
OTHER
New
23 downloads
Early-stage
Edge AI:
Mobile
Laptop
Server
Unknown
Mobile
Laptop
Server
Quick Summary
AI model with specialized capabilities.
Code Examples
Usagepythontransformers
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
# Greekification preprocessing (required for megalaa models)
COPTIC_TO_GREEK = {
"ⲁ": "α", "ⲃ": "β", "ⲅ": "γ", "ⲇ": "δ", "ⲉ": "ε", "ⲋ": "ϛ",
"ⲍ": "ζ", "ⲏ": "η", "ⲑ": "θ", "ⲓ": "ι", "ⲕ": "κ", "ⲗ": "λ",
"ⲙ": "μ", "ⲛ": "ν", "ⲝ": "ξ", "ⲟ": "ο", "ⲡ": "π", "ⲣ": "ρ",
"ⲥ": "σ", "ⲧ": "τ", "ⲩ": "υ", "ⲫ": "φ", "ⲭ": "χ", "ⲯ": "ψ",
"ⲱ": "ω", "ϣ": "s", "ϥ": "f", "ϧ": "k", "ϩ": "h", "ϫ": "j",
"ϭ": "c", "ϯ": "t",
}
def greekify(coptic_text):
"""Convert Coptic Unicode to Greek transcription"""
return "".join(COPTIC_TO_GREEK.get(c.lower(), c.lower()) for c in coptic_text)
# Load model
model_name = "Norelad/coptic-megalaa-finetuned"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
# Translate
coptic_text = "ⲁⲛⲟⲕ ⲡⲉ ⲡⲉⲭⲣⲓⲥⲧⲟⲥ"
greekified = greekify(coptic_text)
inputs = tokenizer(greekified, return_tensors="pt", padding=True)
outputs = model.generate(**inputs, max_length=256, num_beams=5)
translation = tokenizer.decode(outputs[0], skip_special_tokens=True)
print(f"Coptic: {coptic_text}")
print(f"English: {translation}")
# Output: "I am Christ."Citationbibtex
@misc{coptic-megalaa-finetuned,
author = {Linden, André},
title = {Coptic-English Translation Model (Fine-tuned)},
year = {2025},
publisher = {HuggingFace},
howpublished = {\url{https://huggingface.co/Norelad/coptic-megalaa-finetuned}},
note = {Fine-tuned from megalaa/coptic-english-translator}
}bibtex
@misc{megalaa-coptic-translator,
author = {Megalaa},
title = {Coptic-English Translator},
year = {2024},
publisher = {HuggingFace},
howpublished = {\url{https://huggingface.co/megalaa/coptic-english-translator}}
}Deploy This Model
Production-ready deployment in minutes
Together.ai
Instant API access to this model
Production-ready inference API. Start free, scale to millions.
Try Free APIReplicate
One-click model deployment
Run models in the cloud with simple API. No DevOps required.
Deploy NowDisclosure: We may earn a commission from these partners. This helps keep LLMYourWay free.