Bert-base_uncased / stream.py
Sid26Roy's picture
Upload stream.py
c853db8 verified
raw
history blame
1.08 kB
import streamlit as st
from transformers import BertTokenizer, BertForSequenceClassification
import torch
# Load the model and tokenizer from Hugging Face
@st.cache_resource(allow_output_mutation=True)
def load_model():
model = BertForSequenceClassification.from_pretrained("your-huggingface-username/your-model-repo")
tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
return model, tokenizer
model, tokenizer = load_model()
# Define Streamlit app layout
st.title("AI vs Human Text Classifier")
user_input = st.text_area("Enter the text to classify:")
if st.button("Classify"):
# Preprocess the input text
inputs = tokenizer(user_input, return_tensors="pt", max_length=256, padding="max_length", truncation=True)
with torch.no_grad():
outputs = model(**inputs)
# Get prediction
prediction = torch.argmax(outputs.logits, dim=1).item()
# Convert prediction to human-readable label
label_mapping = {0: "Human-written", 1: "AI-generated"}
st.write(f"The text is classified as: {label_mapping[prediction]}")