File size: 1,421 Bytes
4b6cc5e
 
 
 
 
 
 
 
 
 
 
10d9251
 
4b6cc5e
 
369ac08
4b6cc5e
 
dcef856
369ac08
4b6cc5e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import gradio as gr
import pandas as pd
from transformers import pipeline
from huggingface_hub import login
import os


# Load the NYT Connections dataset
url = "https://huggingface.co/datasets/tm21cy/NYT-Connections/resolve/main/ConnectionsFinalDataset.json"
df = pd.read_json(url)

from transformers import pipeline

pipe = pipeline(
    "text-generation",
    model="tiiuae/falcon-1b"  # βœ… 1B Model - Fully Open, No Login Needed
)



# Generate a prompt for AI
def generate_prompt(words):
    return f"""
You are solving an NYT Connections puzzle.
Words: {', '.join(words)}

Format:
Group 1 (Highest Confidence): words
Reasoning:

Group 2: words
Reasoning:

Group 3: words
Reasoning:

Group 4 (Lowest Confidence): words
Reasoning:
"""

# Function to run AI on a random puzzle
def solve_puzzle():
    sample_puzzle = df.sample(1, random_state=None).iloc[0]
    words = sample_puzzle["words"]
    prompt = generate_prompt(words)
    response = pipe(prompt, max_length=500, do_sample=False)
    return "\n".join(response[0]["generated_text"].split("\n")[:6])

# Create a simple Gradio UI
with gr.Blocks() as demo:
    gr.Markdown("# 🧩 NYT Connections Experiment (Powered by AI)")
    with gr.Row():
        solve_button = gr.Button("πŸ” Solve a Random Puzzle")
    output = gr.Textbox(label="AI Response", interactive=False)
    
    solve_button.click(solve_puzzle, outputs=output)

# Launch the app
demo.launch()