Razzaqi3143 commited on
Commit
ac77498
·
verified ·
1 Parent(s): 9f01bc9

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +106 -0
app.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ import docx2txt
4
+ import PyPDF2
5
+ import csv
6
+ from groq import Groq
7
+
8
+ # Initialize Groq API client
9
+ client = Groq(api_key=os.getenv("GROQ_API_KEY"))
10
+
11
+ def extract_text_from_file(file):
12
+ """
13
+ Extracts text from a PDF or Word document file.
14
+ """
15
+ text = ""
16
+ if file.name.endswith(".pdf"):
17
+ pdf_reader = PyPDF2.PdfReader(file)
18
+ for page in pdf_reader.pages:
19
+ text += page.extract_text()
20
+ elif file.name.endswith(".docx"):
21
+ text = docx2txt.process(file.name)
22
+ return text
23
+
24
+ def classify_and_generate_questions(text_chunk, question_type="MCQ"):
25
+ """
26
+ Uses Groq's Llama model to classify content and generate specific types of questions.
27
+ """
28
+ # Define prompt templates for each question type
29
+ question_prompts = {
30
+ "MCQ": f"Based on the following content, generate a multiple-choice question (MCQ) with four answer options. Specify the correct answer.\n\nText:\n{text_chunk}\n\nResponse:",
31
+ "Fill-in-the-Blank": f"Based on the following content, create a fill-in-the-blank question by leaving out an important word or concept.\n\nText:\n{text_chunk}\n\nResponse:",
32
+ "Short Answer": f"Based on the following content, generate a short answer question.\n\nText:\n{text_chunk}\n\nResponse:",
33
+ "Concept Explanation": f"Based on the following content, create a question that requires explaining the concept in detail.\n\nText:\n{text_chunk}\n\nResponse:",
34
+ "Numerical": f"Based on the following content, create a numerical question. Provide the correct answer if possible.\n\nText:\n{text_chunk}\n\nResponse:",
35
+ }
36
+
37
+ # Select the prompt based on the question type
38
+ prompt = question_prompts.get(question_type, question_prompts["MCQ"])
39
+
40
+ # Perform chat completion using Groq's Llama model
41
+ chat_completion = client.chat.completions.create(
42
+ messages=[
43
+ {
44
+ "role": "user",
45
+ "content": prompt
46
+ }
47
+ ],
48
+ model="llama3-8b-8192" # Replace with the model name you have access to
49
+ )
50
+
51
+ # Extract the response content
52
+ return chat_completion.choices[0].message.content
53
+
54
+ def generate_questions(file, question_type, save_format):
55
+ """
56
+ Main function to process the file, generate questions, and optionally save them.
57
+ """
58
+ # Extract text
59
+ text = extract_text_from_file(file)
60
+ if not text:
61
+ return "Could not extract text from the document.", None
62
+
63
+ # Split text into chunks to handle model limits
64
+ chunk_size = 500
65
+ text_chunks = [text[i:i + chunk_size] for i in range(0, len(text), chunk_size)]
66
+
67
+ # Generate questions for each chunk
68
+ questions = []
69
+ for chunk in text_chunks:
70
+ question = classify_and_generate_questions(chunk, question_type=question_type)
71
+ questions.append(question)
72
+
73
+ # Format questions for display
74
+ questions_display = "\n\n".join(questions)
75
+
76
+ # Save questions if selected
77
+ if save_format == "Text File":
78
+ with open("generated_questions.txt", "w") as f:
79
+ for question in questions:
80
+ f.write(question + "\n\n")
81
+ return questions_display, "generated_questions.txt"
82
+
83
+ elif save_format == "CSV File":
84
+ with open("generated_questions.csv", "w", newline="") as f:
85
+ writer = csv.writer(f)
86
+ writer.writerow(["Question"])
87
+ for question in questions:
88
+ writer.writerow([question])
89
+ return questions_display, "generated_questions.csv"
90
+
91
+ return questions_display, None
92
+
93
+ # Gradio Interface
94
+ file_input = gr.inputs.File(label="Upload PDF or Word Document")
95
+ question_type = gr.inputs.Dropdown(["MCQ", "Fill-in-the-Blank", "Short Answer", "Concept Explanation", "Numerical"], label="Question Type")
96
+ save_format = gr.inputs.Dropdown(["None", "Text File", "CSV File"], label="Save Questions As")
97
+ output_text = gr.outputs.Textbox(label="Generated Questions")
98
+ output_file = gr.outputs.File(label="Download File")
99
+
100
+ gr.Interface(
101
+ fn=generate_questions,
102
+ inputs=[file_input, question_type, save_format],
103
+ outputs=[output_text, output_file],
104
+ title="Engineering Quiz Question Generator",
105
+ description="Upload a PDF or Word document, choose the type of quiz question, and save them to a file if desired."
106
+ ).launch()