olivercareyncl's picture
Update app.py
369ac08 verified
import gradio as gr
import pandas as pd
from transformers import pipeline
from huggingface_hub import login
import os
# Load the NYT Connections dataset
url = "https://huggingface.co/datasets/tm21cy/NYT-Connections/resolve/main/ConnectionsFinalDataset.json"
df = pd.read_json(url)
from transformers import pipeline
pipe = pipeline(
"text-generation",
model="tiiuae/falcon-1b" # βœ… 1B Model - Fully Open, No Login Needed
)
# Generate a prompt for AI
def generate_prompt(words):
return f"""
You are solving an NYT Connections puzzle.
Words: {', '.join(words)}
Format:
Group 1 (Highest Confidence): words
Reasoning:
Group 2: words
Reasoning:
Group 3: words
Reasoning:
Group 4 (Lowest Confidence): words
Reasoning:
"""
# Function to run AI on a random puzzle
def solve_puzzle():
sample_puzzle = df.sample(1, random_state=None).iloc[0]
words = sample_puzzle["words"]
prompt = generate_prompt(words)
response = pipe(prompt, max_length=500, do_sample=False)
return "\n".join(response[0]["generated_text"].split("\n")[:6])
# Create a simple Gradio UI
with gr.Blocks() as demo:
gr.Markdown("# 🧩 NYT Connections Experiment (Powered by AI)")
with gr.Row():
solve_button = gr.Button("πŸ” Solve a Random Puzzle")
output = gr.Textbox(label="AI Response", interactive=False)
solve_button.click(solve_puzzle, outputs=output)
# Launch the app
demo.launch()