sentiment-model-imdb
47
2
1 language
license:mit
by
Daksh0505
Other
OTHER
New
47 downloads
Early-stage
Edge AI:
Mobile
Laptop
Server
Unknown
Mobile
Laptop
Server
Quick Summary
This repository contains two deep learning models for sentiment classification of IMDB movie reviews, each trained with a different vocabulary size and number of parameters.
Code Examples
š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)š§ Load Models & Tokenizers (from Hugging Face Hub)python
from huggingface_hub import hf_hub_download
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
import json
# === Model A ===
model_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_6.6M.keras")
tokenizer_path_a = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_50k.json")
with open(tokenizer_path_a, "r") as f:
tokenizer_a = tokenizer_from_json(json.load(f))
model_a = load_model(model_path_a)
# === Model B ===
model_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="sentiment_model_imdb_34M.keras")
tokenizer_path_b = hf_hub_download(repo_id="Daksh0505/sentiment-model-imdb", filename="tokenizer_256k.json")
with open(tokenizer_path_b, "r") as f:
tokenizer_b = tokenizer_from_json(json.load(f))
model_b = load_model(model_path_b)Deploy This Model
Production-ready deployment in minutes
Together.ai
Instant API access to this model
Production-ready inference API. Start free, scale to millions.
Try Free APIReplicate
One-click model deployment
Run models in the cloud with simple API. No DevOps required.
Deploy NowDisclosure: We may earn a commission from these partners. This helps keep LLMYourWay free.