articles-pairs-event-detection-v2
1
license:apache-2.0
by
Juanillaberia
Embedding Model
OTHER
New
0 downloads
Early-stage
Edge AI:
Mobile
Laptop
Server
Unknown
Mobile
Laptop
Server
Quick Summary
AI model with specialized capabilities.
Code Examples
How to Usepythontransformers
import torch
import torch.nn.functional as F
from transformers import AutoTokenizer, AutoModelForSequenceClassification
MODEL_NAME = "Juanillaberia/articles-pairs-event-detection"
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForSequenceClassification.from_pretrained(MODEL_NAME)
model.eval()
def predict_same_event(headline_a: str, headline_b: str) -> dict:
"""
Predicts whether two article headlines refer to the same real-world event.
Args:
headline_a: Headline of the first article.
headline_b: Headline of the second article.
Returns:
Dictionary with predicted label and probabilities.
"""
inputs = tokenizer(
text=headline_a,
text_pair=headline_b,
return_tensors="pt",
truncation=True,
max_length=128
)
with torch.no_grad():
outputs = model(**inputs)
logits = outputs.logits
probs = F.softmax(logits, dim=-1)
predicted_class = torch.argmax(probs, dim=-1).item()
labels = {0: "Different Event", 1: "Same Event"}
return {
"label": labels[predicted_class],
"score": probs[0][predicted_class].item(),
"probabilities": {
"Different Event": probs[0][0].item(),
"Same Event": probs[0][1].item()
}
}
# Example usage
headline_a = "Government announces new climate policy targeting carbon emissions"
headline_b = "New climate bill signed into law by administration"
result = predict_same_event(headline_a, headline_b)
print(result)
# {'label': 'Same Event', 'score': 0.93, 'probabilities': {'Different Event': 0.07, 'Same Event': 0.93}}Optional: Apply Temporal Adjustmentpython
from datetime import datetime
def predict_with_date_adjustment(
headline_a: str,
headline_b: str,
date_a: str,
date_b: str,
lambda_: float = 0.20,
threshold: float = 0.45
) -> dict:
"""
Predicts same-event with temporal adjustment based on publication date difference.
Args:
headline_a: Headline of the first article.
headline_b: Headline of the second article.
date_a: Publication date of article A (format: 'YYYY-MM-DD').
date_b: Publication date of article B (format: 'YYYY-MM-DD').
lambda_: Decay factor for temporal adjustment (default: 0.20).
threshold: Classification threshold after adjustment (default: 0.45).
"""
inputs = tokenizer(
text=headline_a,
text_pair=headline_b,
return_tensors="pt",
truncation=True,
max_length=128
)
with torch.no_grad():
outputs = model(**inputs)
logits = outputs.logits
prob_same_event = F.softmax(logits, dim=-1)[0][1]
diff_days = abs((
datetime.strptime(date_a, "%Y-%m-%d") - datetime.strptime(date_b, "%Y-%m-%d")
).days)
adjusted_prob = prob_same_event * torch.exp(torch.tensor(-lambda_ * diff_days))
predicted = int(adjusted_prob.item() >= threshold)
labels = {0: "Different Event", 1: "Same Event"}
return {
"label": labels[predicted],
"adjusted_score": adjusted_prob.item(),
"raw_score": prob_same_event.item(),
"diff_days": diff_days
}
# Example usage
result = predict_with_date_adjustment(
headline_a="Government announces new climate policy",
headline_b="New climate bill signed into law",
date_a="2024-03-01",
date_b="2024-03-02"
)
print(result)Deploy This Model
Production-ready deployment in minutes
Together.ai
Instant API access to this model
Production-ready inference API. Start free, scale to millions.
Try Free APIReplicate
One-click model deployment
Run models in the cloud with simple API. No DevOps required.
Deploy NowDisclosure: We may earn a commission from these partners. This helps keep LLMYourWay free.