Spaces:
Runtime error
Runtime error
Nikolay Angelov
commited on
Commit
·
ca0f48e
1
Parent(s):
5ff4011
move from Gradio_UI to web fe
Browse files- .dockerignore +14 -0
- .gitignore +13 -2
- Dockerfile +27 -9
- Gradio_UI.py +0 -99
- app.py +54 -24
- docker-compose.yml +7 -5
- frontend/package.json +42 -0
- frontend/public/index.html +19 -0
- frontend/public/manifest.json +15 -0
- frontend/src/App.css +6 -0
- frontend/src/App.tsx +13 -0
- frontend/src/components/ChatBot.tsx +97 -0
- frontend/src/components/ChatHeader.tsx +56 -0
- frontend/src/components/ChatInput.tsx +69 -0
- frontend/src/components/ChatMessage.tsx +37 -0
- frontend/src/components/ChatWindow.tsx +80 -0
- frontend/src/index.css +14 -0
- frontend/src/index.tsx +13 -0
- frontend/src/types.ts +12 -0
- frontend/tsconfig.json +26 -0
- main.py +3 -3
- prompts.yaml +35 -21
- requirements.txt +2 -2
.dockerignore
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
.git
|
2 |
+
.gitignore
|
3 |
+
.env
|
4 |
+
__pycache__
|
5 |
+
*.pyc
|
6 |
+
*.pyo
|
7 |
+
*.pyd
|
8 |
+
.Python
|
9 |
+
env/
|
10 |
+
venv/
|
11 |
+
.venv/
|
12 |
+
node_modules/
|
13 |
+
npm-debug.log
|
14 |
+
logs/
|
.gitignore
CHANGED
@@ -1,3 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
.venv/
|
2 |
-
|
3 |
-
.
|
|
|
|
1 |
+
.git
|
2 |
+
.gitignore
|
3 |
+
.env
|
4 |
+
__pycache__
|
5 |
+
*.pyc
|
6 |
+
*.pyo
|
7 |
+
*.pyd
|
8 |
+
.Python
|
9 |
+
env/
|
10 |
+
venv/
|
11 |
.venv/
|
12 |
+
node_modules/
|
13 |
+
npm-debug.log
|
14 |
+
logs/
|
Dockerfile
CHANGED
@@ -1,24 +1,42 @@
|
|
1 |
FROM python:3.12-slim
|
2 |
|
3 |
-
RUN useradd -m -u 1000 user
|
4 |
-
ENV PATH="/home/user/.local/bin:$PATH"
|
5 |
-
|
6 |
WORKDIR /app
|
7 |
|
8 |
# Install system packages as root
|
9 |
RUN apt-get update && \
|
10 |
-
apt-get install -y --no-install-recommends wget git && \
|
|
|
|
|
11 |
rm -rf /var/lib/apt/lists/*
|
12 |
|
13 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
USER user
|
|
|
15 |
|
16 |
-
|
17 |
-
|
|
|
18 |
|
19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
|
21 |
-
# Expose
|
22 |
EXPOSE 7860
|
23 |
|
24 |
CMD ["python", "main.py"]
|
|
|
1 |
FROM python:3.12-slim
|
2 |
|
|
|
|
|
|
|
3 |
WORKDIR /app
|
4 |
|
5 |
# Install system packages as root
|
6 |
RUN apt-get update && \
|
7 |
+
apt-get install -y --no-install-recommends wget git curl && \
|
8 |
+
curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
|
9 |
+
apt-get install -y nodejs && \
|
10 |
rm -rf /var/lib/apt/lists/*
|
11 |
|
12 |
+
# Create logs directory as root and set permissions
|
13 |
+
RUN mkdir -p logs
|
14 |
+
|
15 |
+
# Create a non-root user and set permissions
|
16 |
+
RUN useradd -m -u 1000 user && \
|
17 |
+
chown -R user:user /app
|
18 |
+
|
19 |
+
# Switch to non-root user
|
20 |
USER user
|
21 |
+
ENV PATH="/home/user/.local/bin:$PATH"
|
22 |
|
23 |
+
# Copy Python requirements and install
|
24 |
+
COPY --chown=user requirements.txt .
|
25 |
+
RUN pip install --user --no-cache-dir -r requirements.txt
|
26 |
|
27 |
+
# Copy the entire project
|
28 |
+
COPY --chown=user . .
|
29 |
+
|
30 |
+
# Set up React frontend
|
31 |
+
WORKDIR /app/frontend
|
32 |
+
|
33 |
+
# Install dependencies and build
|
34 |
+
RUN npm install && npm run build
|
35 |
+
|
36 |
+
# Back to main app directory
|
37 |
+
WORKDIR /app
|
38 |
|
39 |
+
# Expose port (used by FastAPI)
|
40 |
EXPOSE 7860
|
41 |
|
42 |
CMD ["python", "main.py"]
|
Gradio_UI.py
DELETED
@@ -1,99 +0,0 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
from typing import Optional
|
3 |
-
import os
|
4 |
-
from fastapi.responses import RedirectResponse
|
5 |
-
|
6 |
-
class GradioUI:
|
7 |
-
"""A Gradio interface that serves as a frontend for the agent API"""
|
8 |
-
|
9 |
-
def __init__(self, agent=None, file_upload_folder: str = None):
|
10 |
-
self.agent = agent
|
11 |
-
self.file_upload_folder = file_upload_folder
|
12 |
-
|
13 |
-
if self.file_upload_folder is not None:
|
14 |
-
if not os.path.exists(file_upload_folder):
|
15 |
-
os.makedirs(file_upload_folder, exist_ok=True)
|
16 |
-
|
17 |
-
def create_interface(self):
|
18 |
-
with gr.Blocks(css="""
|
19 |
-
#main-container {
|
20 |
-
display: flex;
|
21 |
-
height: 100%;
|
22 |
-
}
|
23 |
-
#menu-column {
|
24 |
-
min-height: 600px;
|
25 |
-
border-right: 1px solid #ddd;
|
26 |
-
}
|
27 |
-
#chat-column {
|
28 |
-
min-height: 600px;
|
29 |
-
flex-grow: 1;
|
30 |
-
}
|
31 |
-
.button-container {
|
32 |
-
padding: 10px;
|
33 |
-
}
|
34 |
-
""") as interface:
|
35 |
-
with gr.Row(elem_id="main-container"):
|
36 |
-
# Left menu strip (1/4 width)
|
37 |
-
with gr.Column(scale=1, elem_id="menu-column"):
|
38 |
-
gr.Markdown("# Menu")
|
39 |
-
with gr.Group(elem_classes="button-container"):
|
40 |
-
new_chat_btn = gr.Button("New Chat", variant="primary")
|
41 |
-
|
42 |
-
# Output area for button actions
|
43 |
-
menu_output = gr.HTML(label="Action Result")
|
44 |
-
|
45 |
-
# Right chat console (3/4 width)
|
46 |
-
with gr.Column(scale=3, elem_id="chat-column"):
|
47 |
-
gr.Markdown("# AI Assistant")
|
48 |
-
chatbot = gr.Chatbot(
|
49 |
-
height=500,
|
50 |
-
type='messages'
|
51 |
-
)
|
52 |
-
msg = gr.Textbox(
|
53 |
-
placeholder="Ask me anything...",
|
54 |
-
show_label=False,
|
55 |
-
container=False
|
56 |
-
)
|
57 |
-
clear = gr.Button("Clear")
|
58 |
-
|
59 |
-
def handle_new_chat():
|
60 |
-
return None, "Started new conversation"
|
61 |
-
|
62 |
-
def user_input(message, history):
|
63 |
-
# Show user message immediately
|
64 |
-
history = history + [{"role": "user", "content": message}]
|
65 |
-
yield "", history # This updates the UI right away
|
66 |
-
|
67 |
-
try:
|
68 |
-
response = self.agent.invoke({
|
69 |
-
"input": message
|
70 |
-
})
|
71 |
-
final_answer = response.get("output", "No response generated")
|
72 |
-
thought_process = response.get("full_thought_process") or response.get("intermediate_steps") or ""
|
73 |
-
if thought_process and thought_process != final_answer:
|
74 |
-
assistant_message = f"**Final Answer:** {final_answer}\n\n**Thought Process:**\n{thought_process}"
|
75 |
-
else:
|
76 |
-
assistant_message = final_answer
|
77 |
-
history = history + [{"role": "assistant", "content": assistant_message}]
|
78 |
-
yield "", history # This updates the UI with the assistant's response
|
79 |
-
except Exception as e:
|
80 |
-
history = history + [{"role": "assistant", "content": f"Error: {str(e)}"}]
|
81 |
-
yield "", history
|
82 |
-
|
83 |
-
# Set up event handlers
|
84 |
-
msg.submit(user_input, [msg, chatbot], [msg, chatbot])
|
85 |
-
clear.click(lambda: None, None, chatbot, queue=False)
|
86 |
-
new_chat_btn.click(
|
87 |
-
fn=handle_new_chat,
|
88 |
-
inputs=[],
|
89 |
-
outputs=[chatbot, menu_output]
|
90 |
-
)
|
91 |
-
|
92 |
-
return interface
|
93 |
-
|
94 |
-
def launch(self, **kwargs):
|
95 |
-
"""Launch the Gradio interface standalone (for development)"""
|
96 |
-
interface = self.create_interface()
|
97 |
-
interface.launch(**kwargs)
|
98 |
-
|
99 |
-
__all__ = ["GradioUI"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app.py
CHANGED
@@ -16,9 +16,8 @@ from pydantic import BaseModel, Field
|
|
16 |
|
17 |
from fastapi import FastAPI, HTTPException
|
18 |
from fastapi.middleware.cors import CORSMiddleware
|
19 |
-
|
20 |
-
|
21 |
-
from Gradio_UI import GradioUI
|
22 |
|
23 |
# Initialize FastAPI app
|
24 |
app = FastAPI(title="AI Assistant", description="AI Assistant with LangChain and Gradio")
|
@@ -35,21 +34,18 @@ app.add_middleware(
|
|
35 |
if not os.getenv('HUGGINGFACEHUB_API_TOKEN'):
|
36 |
raise ValueError("Please set HUGGINGFACEHUB_API_TOKEN environment variable")
|
37 |
|
38 |
-
# Initialize the HuggingFace pipeline
|
39 |
llm = HuggingFaceEndpoint(
|
40 |
repo_id="meta-llama/Llama-3.3-70B-Instruct",
|
41 |
huggingfacehub_api_token=os.getenv('HUGGINGFACEHUB_API_TOKEN'),
|
42 |
provider="hf-inference",
|
43 |
task="text-generation",
|
44 |
-
temperature=0.1,
|
45 |
-
max_new_tokens=
|
46 |
top_p=0.95,
|
47 |
-
repetition_penalty=1.
|
48 |
do_sample=True,
|
49 |
return_full_text=False,
|
50 |
-
model_kwargs={
|
51 |
-
"stop": ["Human:", "Assistant:", "Observation:"]
|
52 |
-
}
|
53 |
)
|
54 |
|
55 |
# Load system prompt and template
|
@@ -62,29 +58,28 @@ prompt = PromptTemplate.from_template(
|
|
62 |
partial_variables={"system_prompt": prompt_templates["system_prompt"]}
|
63 |
)
|
64 |
|
65 |
-
# Create the agent with stop sequences
|
66 |
tools = [visit_webpage, wikipedia_search, run_python_code, internet_search]
|
|
|
67 |
agent = create_react_agent(
|
68 |
llm=llm,
|
69 |
tools=tools,
|
70 |
prompt=prompt
|
71 |
)
|
72 |
|
73 |
-
# Set up
|
74 |
-
memory = ConversationBufferMemory(return_messages=True)
|
75 |
agent_executor = AgentExecutor(
|
76 |
agent=agent,
|
77 |
tools=tools,
|
78 |
-
memory=memory,
|
79 |
verbose=True,
|
80 |
handle_parsing_errors=True,
|
81 |
-
#
|
82 |
-
|
|
|
|
|
83 |
)
|
84 |
-
|
85 |
# API Models
|
86 |
class QueryRequest(BaseModel):
|
87 |
-
query: str
|
88 |
thread_id: Optional[str] = None
|
89 |
context: Dict[str, Any] = Field(default_factory=dict)
|
90 |
|
@@ -94,19 +89,54 @@ async def query_agent(request: QueryRequest):
|
|
94 |
print("Received query:", request.query)
|
95 |
try:
|
96 |
thread_id = request.thread_id or str(uuid.uuid4())
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
response = agent_executor.invoke({
|
98 |
-
"input":
|
99 |
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
return {
|
101 |
"status": "success",
|
102 |
"thread_id": thread_id,
|
103 |
-
"response":
|
104 |
-
"full_thought_process": response.get("
|
105 |
}
|
106 |
except Exception as e:
|
107 |
print(e)
|
108 |
raise HTTPException(status_code=500, detail=str(e))
|
109 |
|
110 |
-
#
|
111 |
-
|
112 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
from fastapi import FastAPI, HTTPException
|
18 |
from fastapi.middleware.cors import CORSMiddleware
|
19 |
+
from fastapi.staticfiles import StaticFiles
|
20 |
+
from fastapi.responses import FileResponse
|
|
|
21 |
|
22 |
# Initialize FastAPI app
|
23 |
app = FastAPI(title="AI Assistant", description="AI Assistant with LangChain and Gradio")
|
|
|
34 |
if not os.getenv('HUGGINGFACEHUB_API_TOKEN'):
|
35 |
raise ValueError("Please set HUGGINGFACEHUB_API_TOKEN environment variable")
|
36 |
|
37 |
+
# Initialize the HuggingFace pipeline with more strict parameters
|
38 |
llm = HuggingFaceEndpoint(
|
39 |
repo_id="meta-llama/Llama-3.3-70B-Instruct",
|
40 |
huggingfacehub_api_token=os.getenv('HUGGINGFACEHUB_API_TOKEN'),
|
41 |
provider="hf-inference",
|
42 |
task="text-generation",
|
43 |
+
temperature=0.1, # Keep low for more deterministic responses
|
44 |
+
max_new_tokens=2048, # Increase token limit
|
45 |
top_p=0.95,
|
46 |
+
repetition_penalty=1.2, # Slightly increase to prevent repetition
|
47 |
do_sample=True,
|
48 |
return_full_text=False,
|
|
|
|
|
|
|
49 |
)
|
50 |
|
51 |
# Load system prompt and template
|
|
|
58 |
partial_variables={"system_prompt": prompt_templates["system_prompt"]}
|
59 |
)
|
60 |
|
|
|
61 |
tools = [visit_webpage, wikipedia_search, run_python_code, internet_search]
|
62 |
+
# Create the agent with more explicit instructions
|
63 |
agent = create_react_agent(
|
64 |
llm=llm,
|
65 |
tools=tools,
|
66 |
prompt=prompt
|
67 |
)
|
68 |
|
69 |
+
# Set up agent executor with better error handling
|
|
|
70 |
agent_executor = AgentExecutor(
|
71 |
agent=agent,
|
72 |
tools=tools,
|
|
|
73 |
verbose=True,
|
74 |
handle_parsing_errors=True,
|
75 |
+
max_iterations=5, # Increased to give more chances
|
76 |
+
return_intermediate_steps=True, # Return thought process
|
77 |
+
early_stopping_method="force", # Force stop after max iterations
|
78 |
+
stop=["Human:", "Assistant:"]
|
79 |
)
|
|
|
80 |
# API Models
|
81 |
class QueryRequest(BaseModel):
|
82 |
+
query: str # This matches the "query" field sent by Gradio
|
83 |
thread_id: Optional[str] = None
|
84 |
context: Dict[str, Any] = Field(default_factory=dict)
|
85 |
|
|
|
89 |
print("Received query:", request.query)
|
90 |
try:
|
91 |
thread_id = request.thread_id or str(uuid.uuid4())
|
92 |
+
|
93 |
+
# Preprocess the query to help guide the agent
|
94 |
+
query = request.query
|
95 |
+
|
96 |
+
# Check if this is likely a factual question
|
97 |
+
factual_indicators = ["who is", "what is", "when did", "where is", "why did", "how does"]
|
98 |
+
is_likely_factual = any(query.lower().startswith(indicator) for indicator in factual_indicators)
|
99 |
+
|
100 |
+
# Add a hint for factual questions
|
101 |
+
if is_likely_factual:
|
102 |
+
query = f"{query} (Please use your tools to find accurate information about this.)"
|
103 |
+
|
104 |
response = agent_executor.invoke({
|
105 |
+
"input": query
|
106 |
})
|
107 |
+
|
108 |
+
print("full_thought_process: ", response.get("intermediate_steps", "No thought process generated"))
|
109 |
+
|
110 |
+
# Clean up the response if needed
|
111 |
+
output = response.get("output", "No response generated")
|
112 |
+
if "<|eot_id|>" in output:
|
113 |
+
output = output.split("<|eot_id|>")[0]
|
114 |
+
|
115 |
return {
|
116 |
"status": "success",
|
117 |
"thread_id": thread_id,
|
118 |
+
"response": output,
|
119 |
+
"full_thought_process": str(response.get("intermediate_steps", "No thought process generated"))
|
120 |
}
|
121 |
except Exception as e:
|
122 |
print(e)
|
123 |
raise HTTPException(status_code=500, detail=str(e))
|
124 |
|
125 |
+
# Check if the frontend build directory exists
|
126 |
+
if os.path.exists("frontend/build"):
|
127 |
+
# Serve static files from React build
|
128 |
+
app.mount("/static", StaticFiles(directory="frontend/build/static"), name="static")
|
129 |
+
|
130 |
+
# Serve other static files from the build directory
|
131 |
+
if os.path.exists("frontend/build/assets"):
|
132 |
+
app.mount("/assets", StaticFiles(directory="frontend/build/assets"), name="assets")
|
133 |
+
|
134 |
+
# Serve React app for all other routes that don't match API endpoints
|
135 |
+
@app.get("/{full_path:path}")
|
136 |
+
async def serve_react_app(full_path: str):
|
137 |
+
# Skip API routes
|
138 |
+
if full_path.startswith("agent/"):
|
139 |
+
raise HTTPException(status_code=404, detail="Not found")
|
140 |
+
|
141 |
+
# For all other routes, serve the React app
|
142 |
+
return FileResponse("frontend/build/index.html")
|
docker-compose.yml
CHANGED
@@ -1,9 +1,11 @@
|
|
1 |
-
# docker-compose.yml
|
2 |
-
version: '3'
|
3 |
services:
|
4 |
app:
|
5 |
-
build:
|
|
|
|
|
6 |
ports:
|
7 |
- "7860:7860"
|
8 |
-
|
9 |
-
-
|
|
|
|
|
|
|
|
|
|
1 |
services:
|
2 |
app:
|
3 |
+
build:
|
4 |
+
context: .
|
5 |
+
dockerfile: Dockerfile
|
6 |
ports:
|
7 |
- "7860:7860"
|
8 |
+
environment:
|
9 |
+
- HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
10 |
+
# Remove the volumes section that's causing issues
|
11 |
+
restart: unless-stopped
|
frontend/package.json
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"name": "careercoach-ui",
|
3 |
+
"version": "0.1.0",
|
4 |
+
"private": true,
|
5 |
+
"dependencies": {
|
6 |
+
"@types/node": "^16.18.0",
|
7 |
+
"@types/react": "^18.2.0",
|
8 |
+
"@types/react-dom": "^18.2.0",
|
9 |
+
"@types/styled-components": "^5.1.26",
|
10 |
+
"axios": "^1.6.0",
|
11 |
+
"react": "^18.2.0",
|
12 |
+
"react-dom": "^18.2.0",
|
13 |
+
"react-scripts": "5.0.1",
|
14 |
+
"styled-components": "^6.0.0",
|
15 |
+
"typescript": "^4.9.5"
|
16 |
+
},
|
17 |
+
"scripts": {
|
18 |
+
"start": "react-scripts start",
|
19 |
+
"build": "react-scripts build",
|
20 |
+
"test": "react-scripts test",
|
21 |
+
"eject": "react-scripts eject"
|
22 |
+
},
|
23 |
+
"eslintConfig": {
|
24 |
+
"extends": [
|
25 |
+
"react-app",
|
26 |
+
"react-app/jest"
|
27 |
+
]
|
28 |
+
},
|
29 |
+
"browserslist": {
|
30 |
+
"production": [
|
31 |
+
">0.2%",
|
32 |
+
"not dead",
|
33 |
+
"not op_mini all"
|
34 |
+
],
|
35 |
+
"development": [
|
36 |
+
"last 1 chrome version",
|
37 |
+
"last 1 firefox version",
|
38 |
+
"last 1 safari version"
|
39 |
+
]
|
40 |
+
},
|
41 |
+
"proxy": "http://localhost:7860"
|
42 |
+
}
|
frontend/public/index.html
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html lang="en">
|
3 |
+
<head>
|
4 |
+
<meta charset="utf-8" />
|
5 |
+
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
|
6 |
+
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
7 |
+
<meta name="theme-color" content="#000000" />
|
8 |
+
<meta
|
9 |
+
name="description"
|
10 |
+
content="CareerCoach AI - Your personal career development assistant"
|
11 |
+
/>
|
12 |
+
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
|
13 |
+
<title>CareerCoach AI</title>
|
14 |
+
</head>
|
15 |
+
<body>
|
16 |
+
<noscript>You need to enable JavaScript to run this app.</noscript>
|
17 |
+
<div id="root"></div>
|
18 |
+
</body>
|
19 |
+
</html>
|
frontend/public/manifest.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"short_name": "CareerCoach AI",
|
3 |
+
"name": "CareerCoach AI - Your personal career development assistant",
|
4 |
+
"icons": [
|
5 |
+
{
|
6 |
+
"src": "favicon.ico",
|
7 |
+
"sizes": "64x64 32x32 24x24 16x16",
|
8 |
+
"type": "image/x-icon"
|
9 |
+
}
|
10 |
+
],
|
11 |
+
"start_url": ".",
|
12 |
+
"display": "standalone",
|
13 |
+
"theme_color": "#000000",
|
14 |
+
"background_color": "#ffffff"
|
15 |
+
}
|
frontend/src/App.css
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
.App {
|
2 |
+
text-align: center;
|
3 |
+
height: 100vh;
|
4 |
+
display: flex;
|
5 |
+
flex-direction: column;
|
6 |
+
}
|
frontend/src/App.tsx
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from 'react';
|
2 |
+
import ChatBot from './components/ChatBot';
|
3 |
+
import './App.css';
|
4 |
+
|
5 |
+
function App() {
|
6 |
+
return (
|
7 |
+
<div className="App">
|
8 |
+
<ChatBot />
|
9 |
+
</div>
|
10 |
+
);
|
11 |
+
}
|
12 |
+
|
13 |
+
export default App;
|
frontend/src/components/ChatBot.tsx
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React, { useState, useEffect } from 'react';
|
2 |
+
import styled from 'styled-components';
|
3 |
+
import axios from 'axios';
|
4 |
+
import ChatHeader from './ChatHeader';
|
5 |
+
import ChatWindow from './ChatWindow';
|
6 |
+
import ChatInput from './ChatInput';
|
7 |
+
import { Message, ChatResponse } from '../types';
|
8 |
+
|
9 |
+
const ChatContainer = styled.div`
|
10 |
+
display: flex;
|
11 |
+
flex-direction: column;
|
12 |
+
height: 100vh;
|
13 |
+
max-width: 800px;
|
14 |
+
margin: 0 auto;
|
15 |
+
border: 1px solid #ddd;
|
16 |
+
box-shadow: 0 0 10px rgba(0, 0, 0, 0.1);
|
17 |
+
`;
|
18 |
+
|
19 |
+
const ChatBot: React.FC = () => {
|
20 |
+
const [messages, setMessages] = useState<Message[]>([]);
|
21 |
+
const [threadId, setThreadId] = useState<string | null>(null);
|
22 |
+
const [isLoading, setIsLoading] = useState(false);
|
23 |
+
|
24 |
+
// Add welcome message when component mounts
|
25 |
+
useEffect(() => {
|
26 |
+
setMessages([
|
27 |
+
{
|
28 |
+
role: 'assistant',
|
29 |
+
content: 'Hello! I\'m CareerCoach AI, your personal career development assistant. How can I help you today?'
|
30 |
+
}
|
31 |
+
]);
|
32 |
+
}, []);
|
33 |
+
|
34 |
+
const sendMessage = async (content: string) => {
|
35 |
+
// Add user message to chat
|
36 |
+
const userMessage: Message = { role: 'user', content };
|
37 |
+
setMessages(prev => [...prev, userMessage]);
|
38 |
+
|
39 |
+
setIsLoading(true);
|
40 |
+
|
41 |
+
try {
|
42 |
+
// Send message to API
|
43 |
+
const response = await axios.post<ChatResponse>('/agent/query', {
|
44 |
+
query: content,
|
45 |
+
thread_id: threadId,
|
46 |
+
context: {}
|
47 |
+
});
|
48 |
+
|
49 |
+
// Update thread ID
|
50 |
+
if (response.data.thread_id) {
|
51 |
+
setThreadId(response.data.thread_id);
|
52 |
+
}
|
53 |
+
|
54 |
+
// Add assistant response to chat
|
55 |
+
const assistantMessage: Message = {
|
56 |
+
role: 'assistant',
|
57 |
+
content: response.data.response,
|
58 |
+
thread_id: response.data.thread_id
|
59 |
+
};
|
60 |
+
|
61 |
+
setMessages(prev => [...prev, assistantMessage]);
|
62 |
+
} catch (error) {
|
63 |
+
console.error('Error sending message:', error);
|
64 |
+
|
65 |
+
// Add error message
|
66 |
+
setMessages(prev => [
|
67 |
+
...prev,
|
68 |
+
{
|
69 |
+
role: 'assistant',
|
70 |
+
content: 'Sorry, I encountered an error. Please try again later.'
|
71 |
+
}
|
72 |
+
]);
|
73 |
+
} finally {
|
74 |
+
setIsLoading(false);
|
75 |
+
}
|
76 |
+
};
|
77 |
+
|
78 |
+
const clearChat = () => {
|
79 |
+
setMessages([
|
80 |
+
{
|
81 |
+
role: 'assistant',
|
82 |
+
content: 'Hello! I\'m CareerCoach AI, your personal career development assistant. How can I help you today?'
|
83 |
+
}
|
84 |
+
]);
|
85 |
+
setThreadId(null);
|
86 |
+
};
|
87 |
+
|
88 |
+
return (
|
89 |
+
<ChatContainer>
|
90 |
+
<ChatHeader onClearChat={clearChat} />
|
91 |
+
<ChatWindow messages={messages} isLoading={isLoading} />
|
92 |
+
<ChatInput onSendMessage={sendMessage} isLoading={isLoading} />
|
93 |
+
</ChatContainer>
|
94 |
+
);
|
95 |
+
};
|
96 |
+
|
97 |
+
export default ChatBot;
|
frontend/src/components/ChatHeader.tsx
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from 'react';
|
2 |
+
import styled from 'styled-components';
|
3 |
+
|
4 |
+
const HeaderContainer = styled.div`
|
5 |
+
display: flex;
|
6 |
+
align-items: center;
|
7 |
+
justify-content: space-between;
|
8 |
+
padding: 15px 20px;
|
9 |
+
background-color: #0084ff;
|
10 |
+
color: white;
|
11 |
+
font-weight: bold;
|
12 |
+
font-size: 18px;
|
13 |
+
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.1);
|
14 |
+
`;
|
15 |
+
|
16 |
+
const Title = styled.div`
|
17 |
+
display: flex;
|
18 |
+
align-items: center;
|
19 |
+
`;
|
20 |
+
|
21 |
+
const Logo = styled.div`
|
22 |
+
margin-right: 10px;
|
23 |
+
font-size: 24px;
|
24 |
+
`;
|
25 |
+
|
26 |
+
const ClearButton = styled.button`
|
27 |
+
background-color: transparent;
|
28 |
+
color: white;
|
29 |
+
border: 1px solid white;
|
30 |
+
border-radius: 4px;
|
31 |
+
padding: 5px 10px;
|
32 |
+
cursor: pointer;
|
33 |
+
font-size: 14px;
|
34 |
+
|
35 |
+
&:hover {
|
36 |
+
background-color: rgba(255, 255, 255, 0.1);
|
37 |
+
}
|
38 |
+
`;
|
39 |
+
|
40 |
+
interface ChatHeaderProps {
|
41 |
+
onClearChat: () => void;
|
42 |
+
}
|
43 |
+
|
44 |
+
const ChatHeader: React.FC<ChatHeaderProps> = ({ onClearChat }) => {
|
45 |
+
return (
|
46 |
+
<HeaderContainer>
|
47 |
+
<Title>
|
48 |
+
<Logo>👨💼</Logo>
|
49 |
+
CareerCoach AI
|
50 |
+
</Title>
|
51 |
+
<ClearButton onClick={onClearChat}>New Chat</ClearButton>
|
52 |
+
</HeaderContainer>
|
53 |
+
);
|
54 |
+
};
|
55 |
+
|
56 |
+
export default ChatHeader;
|
frontend/src/components/ChatInput.tsx
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React, { useState } from 'react';
|
2 |
+
import styled from 'styled-components';
|
3 |
+
|
4 |
+
const InputContainer = styled.div`
|
5 |
+
display: flex;
|
6 |
+
padding: 10px;
|
7 |
+
background-color: #f5f5f5;
|
8 |
+
border-top: 1px solid #ddd;
|
9 |
+
`;
|
10 |
+
|
11 |
+
const Input = styled.input`
|
12 |
+
flex: 1;
|
13 |
+
padding: 10px 15px;
|
14 |
+
border: 1px solid #ddd;
|
15 |
+
border-radius: 20px;
|
16 |
+
font-size: 16px;
|
17 |
+
outline: none;
|
18 |
+
`;
|
19 |
+
|
20 |
+
const SendButton = styled.button`
|
21 |
+
margin-left: 10px;
|
22 |
+
padding: 10px 15px;
|
23 |
+
background-color: #0084ff;
|
24 |
+
color: white;
|
25 |
+
border: none;
|
26 |
+
border-radius: 20px;
|
27 |
+
cursor: pointer;
|
28 |
+
font-size: 16px;
|
29 |
+
|
30 |
+
&:hover {
|
31 |
+
background-color: #0073e6;
|
32 |
+
}
|
33 |
+
`;
|
34 |
+
|
35 |
+
interface ChatInputProps {
|
36 |
+
onSendMessage: (message: string) => void;
|
37 |
+
isLoading: boolean;
|
38 |
+
}
|
39 |
+
|
40 |
+
const ChatInput: React.FC<ChatInputProps> = ({ onSendMessage, isLoading }) => {
|
41 |
+
const [message, setMessage] = useState('');
|
42 |
+
|
43 |
+
const handleSubmit = (e: React.FormEvent) => {
|
44 |
+
e.preventDefault();
|
45 |
+
if (message.trim() && !isLoading) {
|
46 |
+
onSendMessage(message);
|
47 |
+
setMessage('');
|
48 |
+
}
|
49 |
+
};
|
50 |
+
|
51 |
+
return (
|
52 |
+
<InputContainer>
|
53 |
+
<form onSubmit={handleSubmit} style={{ display: 'flex', width: '100%' }}>
|
54 |
+
<Input
|
55 |
+
type="text"
|
56 |
+
value={message}
|
57 |
+
onChange={(e) => setMessage(e.target.value)}
|
58 |
+
placeholder="Type your message..."
|
59 |
+
disabled={isLoading}
|
60 |
+
/>
|
61 |
+
<SendButton type="submit" disabled={isLoading}>
|
62 |
+
{isLoading ? 'Sending...' : 'Send'}
|
63 |
+
</SendButton>
|
64 |
+
</form>
|
65 |
+
</InputContainer>
|
66 |
+
);
|
67 |
+
};
|
68 |
+
|
69 |
+
export default ChatInput;
|
frontend/src/components/ChatMessage.tsx
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from 'react';
|
2 |
+
import styled from 'styled-components';
|
3 |
+
import { Message } from '../types';
|
4 |
+
|
5 |
+
const MessageContainer = styled.div<{ isUser: boolean }>`
|
6 |
+
display: flex;
|
7 |
+
margin-bottom: 10px;
|
8 |
+
justify-content: ${props => props.isUser ? 'flex-end' : 'flex-start'};
|
9 |
+
`;
|
10 |
+
|
11 |
+
const MessageBubble = styled.div<{ isUser: boolean }>`
|
12 |
+
max-width: 70%;
|
13 |
+
padding: 10px 15px;
|
14 |
+
border-radius: 18px;
|
15 |
+
background-color: ${props => props.isUser ? '#0084ff' : '#f0f0f0'};
|
16 |
+
color: ${props => props.isUser ? 'white' : 'black'};
|
17 |
+
text-align: left;
|
18 |
+
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.1);
|
19 |
+
`;
|
20 |
+
|
21 |
+
interface ChatMessageProps {
|
22 |
+
message: Message;
|
23 |
+
}
|
24 |
+
|
25 |
+
const ChatMessage: React.FC<ChatMessageProps> = ({ message }) => {
|
26 |
+
const isUser = message.role === 'user';
|
27 |
+
|
28 |
+
return (
|
29 |
+
<MessageContainer isUser={isUser}>
|
30 |
+
<MessageBubble isUser={isUser}>
|
31 |
+
{message.content}
|
32 |
+
</MessageBubble>
|
33 |
+
</MessageContainer>
|
34 |
+
);
|
35 |
+
};
|
36 |
+
|
37 |
+
export default ChatMessage;
|
frontend/src/components/ChatWindow.tsx
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React, { useEffect, useRef } from 'react';
|
2 |
+
import styled from 'styled-components';
|
3 |
+
import ChatMessage from './ChatMessage';
|
4 |
+
import { Message } from '../types';
|
5 |
+
|
6 |
+
const WindowContainer = styled.div`
|
7 |
+
flex: 1;
|
8 |
+
overflow-y: auto;
|
9 |
+
padding: 20px;
|
10 |
+
background-color: white;
|
11 |
+
`;
|
12 |
+
|
13 |
+
const LoadingIndicator = styled.div`
|
14 |
+
display: flex;
|
15 |
+
justify-content: flex-start;
|
16 |
+
margin-bottom: 10px;
|
17 |
+
|
18 |
+
.dot {
|
19 |
+
width: 8px;
|
20 |
+
height: 8px;
|
21 |
+
margin: 0 4px;
|
22 |
+
background-color: #ccc;
|
23 |
+
border-radius: 50%;
|
24 |
+
animation: pulse 1.5s infinite;
|
25 |
+
}
|
26 |
+
|
27 |
+
.dot:nth-child(2) {
|
28 |
+
animation-delay: 0.3s;
|
29 |
+
}
|
30 |
+
|
31 |
+
.dot:nth-child(3) {
|
32 |
+
animation-delay: 0.6s;
|
33 |
+
}
|
34 |
+
|
35 |
+
@keyframes pulse {
|
36 |
+
0%, 100% {
|
37 |
+
opacity: 0.4;
|
38 |
+
}
|
39 |
+
50% {
|
40 |
+
opacity: 1;
|
41 |
+
}
|
42 |
+
}
|
43 |
+
`;
|
44 |
+
|
45 |
+
interface ChatWindowProps {
|
46 |
+
messages: Message[];
|
47 |
+
isLoading: boolean;
|
48 |
+
}
|
49 |
+
|
50 |
+
const ChatWindow: React.FC<ChatWindowProps> = ({ messages, isLoading }) => {
|
51 |
+
const messagesEndRef = useRef<HTMLDivElement>(null);
|
52 |
+
|
53 |
+
const scrollToBottom = () => {
|
54 |
+
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
|
55 |
+
};
|
56 |
+
|
57 |
+
useEffect(() => {
|
58 |
+
scrollToBottom();
|
59 |
+
}, [messages, isLoading]);
|
60 |
+
|
61 |
+
return (
|
62 |
+
<WindowContainer>
|
63 |
+
{messages.map((message, index) => (
|
64 |
+
<ChatMessage key={index} message={message} />
|
65 |
+
))}
|
66 |
+
|
67 |
+
{isLoading && (
|
68 |
+
<LoadingIndicator>
|
69 |
+
<div className="dot"></div>
|
70 |
+
<div className="dot"></div>
|
71 |
+
<div className="dot"></div>
|
72 |
+
</LoadingIndicator>
|
73 |
+
)}
|
74 |
+
|
75 |
+
<div ref={messagesEndRef} />
|
76 |
+
</WindowContainer>
|
77 |
+
);
|
78 |
+
};
|
79 |
+
|
80 |
+
export default ChatWindow;
|
frontend/src/index.css
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
body {
|
2 |
+
margin: 0;
|
3 |
+
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
|
4 |
+
'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
|
5 |
+
sans-serif;
|
6 |
+
-webkit-font-smoothing: antialiased;
|
7 |
+
-moz-osx-font-smoothing: grayscale;
|
8 |
+
background-color: #f5f5f5;
|
9 |
+
}
|
10 |
+
|
11 |
+
code {
|
12 |
+
font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
|
13 |
+
monospace;
|
14 |
+
}
|
frontend/src/index.tsx
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import React from 'react';
|
2 |
+
import ReactDOM from 'react-dom/client';
|
3 |
+
import './index.css';
|
4 |
+
import App from './App';
|
5 |
+
|
6 |
+
const root = ReactDOM.createRoot(
|
7 |
+
document.getElementById('root') as HTMLElement
|
8 |
+
);
|
9 |
+
root.render(
|
10 |
+
<React.StrictMode>
|
11 |
+
<App />
|
12 |
+
</React.StrictMode>
|
13 |
+
);
|
frontend/src/types.ts
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export interface Message {
|
2 |
+
role: 'user' | 'assistant';
|
3 |
+
content: string;
|
4 |
+
thread_id?: string;
|
5 |
+
}
|
6 |
+
|
7 |
+
export interface ChatResponse {
|
8 |
+
status: string;
|
9 |
+
thread_id: string;
|
10 |
+
response: string;
|
11 |
+
full_thought_process?: string;
|
12 |
+
}
|
frontend/tsconfig.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"compilerOptions": {
|
3 |
+
"target": "es5",
|
4 |
+
"lib": [
|
5 |
+
"dom",
|
6 |
+
"dom.iterable",
|
7 |
+
"esnext"
|
8 |
+
],
|
9 |
+
"allowJs": true,
|
10 |
+
"skipLibCheck": true,
|
11 |
+
"esModuleInterop": true,
|
12 |
+
"allowSyntheticDefaultImports": true,
|
13 |
+
"strict": true,
|
14 |
+
"forceConsistentCasingInFileNames": true,
|
15 |
+
"noFallthroughCasesInSwitch": true,
|
16 |
+
"module": "esnext",
|
17 |
+
"moduleResolution": "node",
|
18 |
+
"resolveJsonModule": true,
|
19 |
+
"isolatedModules": true,
|
20 |
+
"noEmit": true,
|
21 |
+
"jsx": "react-jsx"
|
22 |
+
},
|
23 |
+
"include": [
|
24 |
+
"src"
|
25 |
+
]
|
26 |
+
}
|
main.py
CHANGED
@@ -3,13 +3,13 @@ from app import app
|
|
3 |
|
4 |
def main():
|
5 |
"""
|
6 |
-
Run the FastAPI server with integrated
|
7 |
"""
|
8 |
port = 7860
|
9 |
host = "0.0.0.0"
|
10 |
-
|
11 |
print(f"Starting server on {host}:{port}...")
|
12 |
uvicorn.run(app, host=host, port=port)
|
13 |
|
14 |
if __name__ == "__main__":
|
15 |
-
main()
|
|
|
3 |
|
4 |
def main():
|
5 |
"""
|
6 |
+
Run the FastAPI server with integrated React UI
|
7 |
"""
|
8 |
port = 7860
|
9 |
host = "0.0.0.0"
|
10 |
+
|
11 |
print(f"Starting server on {host}:{port}...")
|
12 |
uvicorn.run(app, host=host, port=port)
|
13 |
|
14 |
if __name__ == "__main__":
|
15 |
+
main()
|
prompts.yaml
CHANGED
@@ -1,22 +1,14 @@
|
|
1 |
system_prompt: |-
|
2 |
-
You are
|
|
|
|
|
3 |
- Career planning and goal setting
|
4 |
-
- Career coaching and mentoring
|
5 |
- Professional development advice
|
6 |
- Job search strategies
|
7 |
- Skill development recommendations
|
8 |
- Industry insights and trends
|
9 |
-
|
10 |
-
|
11 |
-
IDENTITY:
|
12 |
-
- If the user greets you (e.g., "Hi", "Hello"), respond with a simple, friendly greeting as "CareerCoach AI" and do not use tools.
|
13 |
-
- Always introduce yourself as "CareerCoach AI, your personal career development assistant"
|
14 |
-
- Maintain a professional, supportive, and encouraging tone
|
15 |
-
- Always use the name "CareerCoach AI"
|
16 |
-
- Always answer only if you have relevant information to provide, otherwise say "I'm sorry, I don't have information on that topic."
|
17 |
-
- DO NOT make up information, if you don't know the answer, say "I'm sorry, I don't have information on that topic."
|
18 |
-
- DO NOT HALLUCINATE
|
19 |
-
- Focus on career-related guidance and development
|
20 |
|
21 |
template: |-
|
22 |
{system_prompt}
|
@@ -24,15 +16,37 @@ template: |-
|
|
24 |
You have access to the following tools:
|
25 |
{tools}
|
26 |
|
27 |
-
|
28 |
|
29 |
Question: {input}
|
30 |
-
Thought:
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
|
38 |
{agent_scratchpad}
|
|
|
1 |
system_prompt: |-
|
2 |
+
You are CareerCoach AI, a helpful assistant powered by a large language model. While your primary expertise is in career development, you can answer questions on a wide range of topics using your tools.
|
3 |
+
|
4 |
+
Your primary areas of expertise include:
|
5 |
- Career planning and goal setting
|
|
|
6 |
- Professional development advice
|
7 |
- Job search strategies
|
8 |
- Skill development recommendations
|
9 |
- Industry insights and trends
|
10 |
+
|
11 |
+
IMPORTANT: For ANY factual or knowledge-based questions (like "Who is Charlie Chaplin?" or "Who is Donald Trump?"), ALWAYS use your tools (wikipedia_search or internet_search) to find accurate information.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
template: |-
|
14 |
{system_prompt}
|
|
|
16 |
You have access to the following tools:
|
17 |
{tools}
|
18 |
|
19 |
+
ALWAYS follow this exact format:
|
20 |
|
21 |
Question: {input}
|
22 |
+
Thought: I need to determine if this requires using a tool or if I can answer directly.
|
23 |
+
|
24 |
+
For ANY factual questions or requests for information about people, places, events, or concepts:
|
25 |
+
Thought: I should use a tool to find accurate information about this.
|
26 |
+
Action: the action to take, should be one of [{tool_names}]
|
27 |
+
Action Input: [search term]
|
28 |
+
Observation: [Result from tool]
|
29 |
+
... (continue if needed)
|
30 |
+
Thought: Now I can provide an accurate answer based on the information.
|
31 |
+
Final Answer: [Your comprehensive answer]
|
32 |
+
|
33 |
+
For career advice or questions within your knowledge:
|
34 |
+
Thought: I can answer this directly based on my knowledge.
|
35 |
+
Final Answer: [Your helpful response]
|
36 |
+
|
37 |
+
For coaching or mentoring approach:
|
38 |
+
Throught: I can search information in wikipedia or internet with my tools
|
39 |
+
Action: the action to take, should be one of [{tool_names}]
|
40 |
+
Action Input: [search term]
|
41 |
+
Observation: [Result from tool]
|
42 |
+
... (continue if needed)
|
43 |
+
Thought: Now I can provide an accurate answer based on the information.
|
44 |
+
Final Answer: [Your comprehensive answer]
|
45 |
+
|
46 |
+
For greetings or simple questions:
|
47 |
+
Thought: This is a simple greeting or question.
|
48 |
+
Final Answer: [Your friendly response]
|
49 |
+
|
50 |
+
REMEMBER: ALWAYS use wikipedia_search or internet_search for factual questions about people, places, or events.
|
51 |
|
52 |
{agent_scratchpad}
|
requirements.txt
CHANGED
@@ -2,7 +2,6 @@ markdownify
|
|
2 |
requests>=2.31.0
|
3 |
fastapi>=0.104.1
|
4 |
uvicorn[standard]>=0.24.0
|
5 |
-
gradio
|
6 |
langchain>=0.1.0
|
7 |
langchain-core>=0.1.0
|
8 |
langchain-community>=0.0.13
|
@@ -17,4 +16,5 @@ python-multipart>=0.0.6
|
|
17 |
wikipedia
|
18 |
duckduckgo-search
|
19 |
huggingface_hub
|
20 |
-
hf_xet
|
|
|
|
2 |
requests>=2.31.0
|
3 |
fastapi>=0.104.1
|
4 |
uvicorn[standard]>=0.24.0
|
|
|
5 |
langchain>=0.1.0
|
6 |
langchain-core>=0.1.0
|
7 |
langchain-community>=0.0.13
|
|
|
16 |
wikipedia
|
17 |
duckduckgo-search
|
18 |
huggingface_hub
|
19 |
+
hf_xet
|
20 |
+
aiofiles
|