efficientnet_b3

40
by
litert-community
Image Model
OTHER
New
40 downloads
Early-stage
Edge AI:
Mobile
Laptop
Server
Unknown
Mobile
Laptop
Server
Quick Summary

AI model with specialized capabilities.

Code Examples

Usepython
#!/usr/bin/env python3
import argparse, json
import numpy as np
from PIL import Image
from huggingface_hub import hf_hub_download
from ai_edge_litert.compiled_model import CompiledModel


def preprocess(img: Image.Image) -> np.ndarray:
   img = img.convert("RGB")
   w, h = img.size
   s = 320
   if w < h:
       img = img.resize((s, int(round(h * s / w))), Image.BICUBIC)
   else:
       img = img.resize((int(round(w * s / h)), s), Image.BICUBIC)
   left = (img.size[0] - 300) // 2
   top = (img.size[1] - 300) // 2
   img = img.crop((left, top, left + 300, top + 300))


   x = np.asarray(img, dtype=np.float32) / 255.0
   x = (x - np.array([0.485, 0.456, 0.406], dtype=np.float32)) / np.array(
       [0.229, 0.224, 0.225], dtype=np.float32
   )
   return np.transpose(x, (2, 0, 1))


def main():
   ap = argparse.ArgumentParser()
   ap.add_argument("--image", required=True)
   args = ap.parse_args()


   model_path = hf_hub_download("litert-community/efficientnet_b3", "efficientnet_b3.tflite")
   labels_path = hf_hub_download(
       "huggingface/label-files", "imagenet-1k-id2label.json", repo_type="dataset"
   )
   with open(labels_path, "r", encoding="utf-8") as f:
       id2label = {int(k): v for k, v in json.load(f).items()}


   img = Image.open(args.image)
   x = preprocess(img)


   model = CompiledModel.from_file(model_path)
   inp = model.create_input_buffers(0)
   out = model.create_output_buffers(0)


   inp[0].write(x)
   model.run_by_index(0, inp, out)


   req = model.get_output_buffer_requirements(0, 0)
   y = out[0].read(req["buffer_size"] // np.dtype(np.float32).itemsize, np.float32)


   pred = int(np.argmax(y))
   label = id2label.get(pred, f"class_{pred}")


   print(f"Top-1 class index: {pred}")
   print(f"Top-1 label: {label}")
if __name__ == "__main__":
   main()

Deploy This Model

Production-ready deployment in minutes

Together.ai

Instant API access to this model

Fastest API

Production-ready inference API. Start free, scale to millions.

Try Free API

Replicate

One-click model deployment

Easiest Setup

Run models in the cloud with simple API. No DevOps required.

Deploy Now

Disclosure: We may earn a commission from these partners. This helps keep LLMYourWay free.