Spaces:
Runtime error
Runtime error
File size: 421 Bytes
2217d55 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
import gradio as gr
import numpy as np
from PIL import Image
from sentence_transformers import SentenceTransformer
model = SentenceTransformer('clip-ViT-B-32')
def image_to_embedding(img: np.ndarray):
embedding = model.encode(sentences=[Image.fromarray(img)], batch_size=128)
return embedding
iface = gr.Interface(fn=image_to_embedding, inputs="image", outputs="textbox", cache_examples=True)
iface.launch()
|