boyinfuture commited on
Commit
6ffbf76
·
1 Parent(s): 683938d

more efficiewnt backend

Browse files
Dockerfile CHANGED
@@ -1,28 +1,19 @@
1
- # This is the single Dockerfile for our entire backend on Hugging Face Spaces
2
  FROM python:3.11-slim
3
 
4
- # Set a single working directory
5
  WORKDIR /app
6
 
7
- # Install system dependencies
8
  RUN apt-get update && apt-get install -y git redis-server
9
 
10
- # Copy all requirements and model files first for better caching
11
  COPY backend/requirements.txt .
12
  COPY ml_models ./ml_models
13
 
14
- # Install Python packages
15
  RUN pip install --no-cache-dir -r requirements.txt
16
 
17
- # Copy the entire backend source code
18
  COPY backend .
19
 
20
- # Create a startup script
21
  COPY startup.sh .
22
  RUN chmod +x startup.sh
23
 
24
- # Expose the port FastAPI will run on
25
  EXPOSE 7860
26
 
27
- # The command to run our startup script
28
  CMD ["./startup.sh"]
 
 
1
  FROM python:3.11-slim
2
 
 
3
  WORKDIR /app
4
 
 
5
  RUN apt-get update && apt-get install -y git redis-server
6
 
 
7
  COPY backend/requirements.txt .
8
  COPY ml_models ./ml_models
9
 
 
10
  RUN pip install --no-cache-dir -r requirements.txt
11
 
 
12
  COPY backend .
13
 
 
14
  COPY startup.sh .
15
  RUN chmod +x startup.sh
16
 
 
17
  EXPOSE 7860
18
 
 
19
  CMD ["./startup.sh"]
backend/Dockerfile CHANGED
@@ -14,43 +14,3 @@ WORKDIR /code/app
14
 
15
 
16
 
17
- # FROM python:3.11-slim
18
-
19
- # WORKDIR /code
20
-
21
- # RUN apt-get update && apt-get install -y git
22
-
23
- # COPY ./backend/requirements.txt .
24
- # RUN pip install --no-cache-dir --upgrade -r requirements.txt
25
-
26
- # COPY ./ml_models /code/sentiment_model
27
-
28
- # WORKDIR /code/app
29
-
30
- # # This is the default command for our web server
31
- # CMD ["python", "-m", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "10000"]
32
-
33
-
34
-
35
-
36
-
37
-
38
- # FROM python:3.11-slim
39
-
40
- # WORKDIR /code
41
-
42
- # RUN apt-get update && apt-get install -y git
43
-
44
- # COPY ./backend/requirements.txt .
45
- # # Install Gunicorn for a production-ready server
46
- # RUN pip install gunicorn
47
- # RUN pip install --no-cache-dir -r requirements.txt
48
-
49
- # COPY ./ml_models /code/sentiment_model
50
-
51
- # # Copy the application code last. All code will live in /code now.
52
- # COPY ./backend .
53
-
54
- # # The default command is to start the web server.
55
- # # Render's free web services require port 10000.
56
- # CMD ["gunicorn", "-w", "2", "-k", "uvicorn.workers.UvicornWorker", "main:app", "--bind", "0.0.0.0:10000"]
 
14
 
15
 
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
backend/celery_worker.py CHANGED
@@ -5,7 +5,6 @@ celery = Celery(
5
  "quantitative_analysis_platform",
6
  broker=settings.CELERY_BROKER_URL,
7
  backend=settings.CELERY_RESULT_BACKEND,
8
- # This is the corrected list. We only have one task file now.
9
  include=[
10
  "tasks.main_task"
11
  ]
 
5
  "quantitative_analysis_platform",
6
  broker=settings.CELERY_BROKER_URL,
7
  backend=settings.CELERY_RESULT_BACKEND,
 
8
  include=[
9
  "tasks.main_task"
10
  ]
backend/core/config.py CHANGED
@@ -1,31 +1,16 @@
1
- # from pydantic_settings import BaseSettings, SettingsConfigDict
2
-
3
- # class Settings(BaseSettings):
4
- # DATABASE_URL: str
5
- # CELERY_BROKER_URL: str
6
- # CELERY_RESULT_BACKEND: str
7
- # GOOGLE_API_KEY: str
8
-
9
- # model_config = SettingsConfigDict(env_file=".env")
10
-
11
- # settings = Settings()
12
 
13
 
14
 
15
  from pydantic_settings import BaseSettings, SettingsConfigDict
16
 
17
  class Settings(BaseSettings):
18
- # These variables will now be loaded from the Hugging Face secrets UI
19
  DATABASE_URL: str
20
  GOOGLE_API_KEY: str
21
-
22
- # These variables are hardcoded for the Hugging Face environment
23
- # because Redis is running in the same container.
24
  CELERY_BROKER_URL: str = "redis://localhost:6379/0"
25
  CELERY_RESULT_BACKEND: str = "redis://localhost:6379/0"
26
 
27
- # This tells Pydantic to first look for system environment variables,
28
- # and then fall back to a .env file if one exists.
29
  model_config = SettingsConfigDict(env_file=".env", env_file_encoding='utf-8', extra='ignore')
30
 
31
  settings = Settings()
 
 
 
 
 
 
 
 
 
 
 
 
1
 
2
 
3
 
4
  from pydantic_settings import BaseSettings, SettingsConfigDict
5
 
6
  class Settings(BaseSettings):
 
7
  DATABASE_URL: str
8
  GOOGLE_API_KEY: str
9
+
 
 
10
  CELERY_BROKER_URL: str = "redis://localhost:6379/0"
11
  CELERY_RESULT_BACKEND: str = "redis://localhost:6379/0"
12
 
13
+
 
14
  model_config = SettingsConfigDict(env_file=".env", env_file_encoding='utf-8', extra='ignore')
15
 
16
  settings = Settings()
backend/main.py CHANGED
@@ -1,71 +1,3 @@
1
- # from fastapi import FastAPI, Depends, HTTPException
2
- # from fastapi.middleware.cors import CORSMiddleware
3
- # from sqlalchemy.orm import Session
4
- # from sqlalchemy import desc # Import desc for ordering
5
- # from uuid import UUID
6
- # from typing import List # Import List for the history endpoint
7
- # import models.analysis_job as model
8
- # import schemas
9
- # from core.database import SessionLocal, engine
10
- # from tasks.main_task import run_full_analysis
11
-
12
- # model.Base.metadata.create_all(bind=engine)
13
-
14
- # app = FastAPI(
15
- # title="Quantitative Analysis Platform API",
16
- # version="0.1.0",
17
- # )
18
-
19
- # app.add_middleware(
20
- # CORSMiddleware,
21
- # allow_origins=["*"],
22
- # allow_credentials=True,
23
- # allow_methods=["*"],
24
- # allow_headers=["*"],
25
- # )
26
-
27
- # def get_db():
28
- # db = SessionLocal()
29
- # try:
30
- # yield db
31
- # finally:
32
- # db.close()
33
-
34
- # @app.post("/jobs", response_model=schemas.Job, status_code=201)
35
- # def create_analysis_job(job_request: schemas.JobCreate, db: Session = Depends(get_db)):
36
- # db_job = model.AnalysisJob(ticker=job_request.ticker.upper())
37
- # db.add(db_job)
38
- # db.commit()
39
- # db.refresh(db_job)
40
-
41
- # run_full_analysis.delay(str(db_job.id), db_job.ticker)
42
-
43
- # return db_job
44
-
45
- # @app.get("/jobs/{job_id}", response_model=schemas.Job)
46
- # def get_job_status(job_id: UUID, db: Session = Depends(get_db)):
47
- # db_job = db.query(model.AnalysisJob).filter(model.AnalysisJob.id == job_id).first()
48
- # if db_job is None:
49
- # raise HTTPException(status_code=404, detail="Job not found")
50
- # return db_job
51
-
52
- # # --- NEW ENDPOINT FOR HISTORY PANEL ---
53
- # @app.get("/jobs", response_model=List[schemas.Job])
54
- # def get_jobs_history(db: Session = Depends(get_db)):
55
- # db_jobs = db.query(model.AnalysisJob).order_by(desc(model.AnalysisJob.created_at)).limit(20).all()
56
- # return db_jobs
57
-
58
-
59
-
60
-
61
-
62
-
63
-
64
-
65
-
66
-
67
-
68
-
69
  from fastapi import FastAPI, Depends, HTTPException
70
  from fastapi.middleware.cors import CORSMiddleware
71
  from sqlalchemy.orm import Session
@@ -84,9 +16,6 @@ app = FastAPI(
84
  version="0.1.0",
85
  )
86
 
87
- # --- THIS IS THE FINAL FIX ---
88
- # This configuration allows your Vercel app and all its preview deployments
89
- # to communicate with the backend, as well as your local development server.
90
  app.add_middleware(
91
  CORSMiddleware,
92
  allow_origin_regex=r"https?://.*\.vercel\.app|http://localhost:5173",
@@ -94,7 +23,6 @@ app.add_middleware(
94
  allow_methods=["*"],
95
  allow_headers=["*"],
96
  )
97
- # --- END OF FIX ---
98
 
99
  def get_db():
100
  db = SessionLocal()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from fastapi import FastAPI, Depends, HTTPException
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from sqlalchemy.orm import Session
 
16
  version="0.1.0",
17
  )
18
 
 
 
 
19
  app.add_middleware(
20
  CORSMiddleware,
21
  allow_origin_regex=r"https?://.*\.vercel\.app|http://localhost:5173",
 
23
  allow_methods=["*"],
24
  allow_headers=["*"],
25
  )
 
26
 
27
  def get_db():
28
  db = SessionLocal()
backend/models/analysis_job.py CHANGED
@@ -1,18 +1,3 @@
1
- # from sqlalchemy import Column, String, JSON
2
- # from sqlalchemy.dialects.postgresql import UUID
3
- # import uuid
4
- # from core.database import Base
5
-
6
- # class AnalysisJob(Base):
7
- # __tablename__ = "analysis_jobs"
8
-
9
- # id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
10
- # ticker = Column(String, nullable=False, index=True)
11
- # status = Column(String, default="PENDING", nullable=False)
12
- # result = Column(JSON, nullable=True)
13
-
14
-
15
-
16
  from sqlalchemy import Column, String, JSON, DateTime
17
  from sqlalchemy.dialects.postgresql import UUID
18
  import uuid
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from sqlalchemy import Column, String, JSON, DateTime
2
  from sqlalchemy.dialects.postgresql import UUID
3
  import uuid
backend/requirements.txt CHANGED
@@ -1,35 +1,27 @@
1
- # FastAPI and Server
2
  fastapi
3
  uvicorn[standard]
4
  gunicorn
5
  pydantic-settings
6
 
7
- # Database
8
  sqlalchemy
9
  psycopg2-binary
10
  alembic
11
 
12
- # Task Queue
13
  celery
14
  redis
15
 
16
- # --- CORE DATA SCIENCE LIBS (PINNING NUMPY) ---
17
- numpy<2.0 # CRITICAL FIX: Pin numpy to a version compatible with pandas-ta
18
- pandas
19
  pandas-ta
20
  matplotlib
21
 
22
- # Data Agent & Prediction
23
  yfinance
24
 
25
- # Intelligence Agent
26
  newspaper3k
27
  lxml_html_clean
28
  snscrape@git+https://github.com/JustAnotherArchivist/snscrape.git@master
29
  requests
30
  beautifulsoup4
31
 
32
- # AI / ML / LLM
33
  torch
34
  transformers
35
  sentence-transformers
 
 
1
  fastapi
2
  uvicorn[standard]
3
  gunicorn
4
  pydantic-settings
5
 
 
6
  sqlalchemy
7
  psycopg2-binary
8
  alembic
9
 
 
10
  celery
11
  redis
12
 
13
+ numpy<2.0
 
 
14
  pandas-ta
15
  matplotlib
16
 
 
17
  yfinance
18
 
 
19
  newspaper3k
20
  lxml_html_clean
21
  snscrape@git+https://github.com/JustAnotherArchivist/snscrape.git@master
22
  requests
23
  beautifulsoup4
24
 
 
25
  torch
26
  transformers
27
  sentence-transformers
backend/tasks/analyst_tasks.py CHANGED
@@ -1,110 +1,3 @@
1
- # from celery_worker import celery
2
- # from core.database import SessionLocal
3
- # from models.analysis_job import AnalysisJob
4
- # from tools.analyst_tools import get_llm_analysis
5
- # from uuid import UUID
6
-
7
- # @celery.task
8
- # def run_llm_analysis(job_id: str):
9
- # db = SessionLocal()
10
- # job = None
11
- # try:
12
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
13
- # if not job or not job.result:
14
- # raise ValueError("Job not found or has no initial data.")
15
-
16
- # job.status = "ANALYZING" # New status for the frontend
17
- # db.commit()
18
-
19
- # current_data = job.result
20
- # ticker = current_data.get("ticker")
21
- # company_name = current_data.get("company_name")
22
- # intelligence_briefing = current_data.get("intelligence_briefing", {})
23
-
24
- # llm_report_data = get_llm_analysis(ticker, company_name, intelligence_briefing)
25
-
26
- # new_result = current_data.copy()
27
- # new_result['llm_analysis'] = llm_report_data
28
- # job.result = new_result
29
-
30
- # job.status = "SUCCESS"
31
- # db.commit()
32
-
33
- # print(f"LLM analysis for job {job_id} completed successfully.")
34
-
35
- # except Exception as e:
36
- # print(f"Error during LLM analysis for job {job_id}: {e}")
37
- # if job:
38
- # job.status = "FAILED"
39
- # error_data = job.result if job.result else {}
40
- # error_data['error'] = f"LLM analysis failed: {str(e)}"
41
- # job.result = error_data
42
- # db.commit()
43
- # finally:
44
- # db.close()
45
-
46
-
47
-
48
-
49
-
50
-
51
-
52
-
53
-
54
-
55
- # from celery_worker import celery
56
- # from core.database import SessionLocal
57
- # from models.analysis_job import AnalysisJob
58
- # from tools.analyst_tools import get_llm_analysis
59
- # from uuid import UUID
60
-
61
- # @celery.task
62
- # def run_llm_analysis(job_id: str):
63
- # with SessionLocal() as db:
64
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
65
- # if not job or not job.result:
66
- # print(f"Job {job_id} not found or has no data for analyst.")
67
- # return
68
-
69
- # try:
70
- # job.status = "ANALYZING"
71
- # db.commit()
72
-
73
- # current_data = job.result
74
- # ticker = current_data.get("ticker")
75
- # company_name = current_data.get("company_name")
76
- # intelligence_briefing = current_data.get("intelligence_briefing", {})
77
-
78
- # llm_report_data = get_llm_analysis(ticker, company_name, intelligence_briefing)
79
-
80
- # new_result = dict(current_data)
81
- # new_result['llm_analysis'] = llm_report_data
82
- # job.result = new_result
83
-
84
- # job.status = "SUCCESS"
85
- # db.commit()
86
-
87
- # print(f"LLM analysis for job {job_id} completed successfully.")
88
- # return "LLM analysis successful."
89
- # except Exception as e:
90
- # print(f"Error during LLM analysis for job {job_id}: {e}")
91
- # job.status = "FAILED"
92
- # error_data = job.result if job.result else {}
93
- # error_data['error'] = f"LLM analysis failed: {str(e)}"
94
- # job.result = error_data
95
- # db.commit()
96
- # return f"LLM analysis failed: {e}"
97
-
98
-
99
-
100
-
101
-
102
-
103
-
104
-
105
-
106
-
107
-
108
  from celery_worker import celery
109
  from tools.analyst_tools import get_llm_analysis
110
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from celery_worker import celery
2
  from tools.analyst_tools import get_llm_analysis
3
 
backend/tasks/data_tasks.py CHANGED
@@ -1,93 +1,3 @@
1
- # from celery_worker import celery
2
- # from core.database import SessionLocal
3
- # from models.analysis_job import AnalysisJob
4
- # from tools.data_tools import get_stock_data
5
- # from uuid import UUID
6
-
7
- # @celery.task
8
- # def run_data_analysis(job_id: str, ticker: str):
9
- # db = SessionLocal()
10
- # job = None
11
- # final_result = ""
12
- # try:
13
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
14
- # if not job:
15
- # raise ValueError(f"Job {job_id} not found in database.")
16
-
17
- # print(f"Status - DATA_FETCHING for job {job_id}...")
18
- # job.status = "DATA_FETCHING"
19
- # db.commit()
20
-
21
- # data = get_stock_data(ticker)
22
-
23
- # if "error" in data:
24
- # raise ValueError(data["error"])
25
-
26
- # job.result = data
27
- # db.commit()
28
- # print(f"Data analysis for job {job_id} completed successfully.")
29
-
30
- # final_result = str(job.result)
31
-
32
- # except Exception as e:
33
- # print(f"Error during data analysis for job {job_id}: {e}")
34
- # if job:
35
- # job.status = "FAILED"
36
- # job.result = {"error": f"Data analysis failed: {str(e)}"}
37
- # db.commit()
38
- # final_result = f"Error: {e}"
39
- # finally:
40
- # db.close()
41
-
42
- # return final_result
43
-
44
-
45
-
46
-
47
-
48
-
49
-
50
- # from celery_worker import celery
51
- # from core.database import SessionLocal
52
- # from models.analysis_job import AnalysisJob
53
- # from tools.data_tools import get_stock_data
54
- # from uuid import UUID
55
-
56
- # @celery.task
57
- # def run_data_analysis(job_id: str, ticker: str):
58
- # with SessionLocal() as db:
59
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
60
- # if not job:
61
- # print(f"Job {job_id} not found.")
62
- # return
63
-
64
- # try:
65
- # job.status = "DATA_FETCHING"
66
- # db.commit()
67
-
68
- # data = get_stock_data(ticker)
69
- # if "error" in data:
70
- # raise ValueError(data["error"])
71
-
72
- # job.result = data
73
- # db.commit()
74
- # print(f"Data analysis for job {job_id} completed successfully.")
75
- # return "Data fetching successful."
76
- # except Exception as e:
77
- # print(f"Error during data analysis for job {job_id}: {e}")
78
- # job.status = "FAILED"
79
- # job.result = {"error": f"Data analysis failed: {str(e)}"}
80
- # db.commit()
81
- # return f"Data fetching failed: {e}"
82
-
83
-
84
-
85
-
86
-
87
-
88
-
89
-
90
-
91
  from celery_worker import celery
92
  from tools.data_tools import get_stock_data
93
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from celery_worker import celery
2
  from tools.data_tools import get_stock_data
3
 
backend/tasks/main_task.py CHANGED
@@ -1,113 +1,3 @@
1
- # from celery_worker import celery
2
- # from core.database import SessionLocal
3
- # from models.analysis_job import AnalysisJob
4
- # from tools.data_tools import get_stock_data
5
- # from tools.news_tools import get_combined_news_and_sentiment
6
- # from tools.analyst_tools import get_llm_analysis
7
- # from uuid import UUID
8
- # import json
9
-
10
- # @celery.task
11
- # def run_full_analysis(job_id: str, ticker: str):
12
- # print(f"\n--- [START] Full Analysis for Job ID: {job_id} ---")
13
-
14
- # # --- Stage 1: Data Fetching ---
15
- # try:
16
- # with SessionLocal() as db:
17
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
18
- # if not job: raise ValueError("Job not found")
19
- # job.status = "DATA_FETCHING"
20
- # db.commit()
21
- # print("[LOG] STATUS UPDATE: DATA_FETCHING")
22
-
23
- # data_result = get_stock_data(ticker)
24
- # if "error" in data_result: raise ValueError(data_result['error'])
25
- # company_name = data_result.get("company_name", ticker)
26
-
27
- # with SessionLocal() as db:
28
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
29
- # job.result = data_result
30
- # db.commit()
31
- # db.refresh(job) # Force reload from DB
32
- # print(f"[LOG] DB SAVE 1 (Data): Result keys are now: {list(job.result.keys())}")
33
-
34
- # except Exception as e:
35
- # print(f"!!! [FAILURE] Stage 1 (Data): {e}")
36
- # # ... error handling ...
37
- # return
38
-
39
- # # --- Stage 2: Intelligence Gathering ---
40
- # try:
41
- # with SessionLocal() as db:
42
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
43
- # job.status = "INTELLIGENCE_GATHERING"
44
- # db.commit()
45
- # print("[LOG] STATUS UPDATE: INTELLIGENCE_GATHERING")
46
-
47
- # intelligence_result = get_combined_news_and_sentiment(ticker, company_name)
48
-
49
- # with SessionLocal() as db:
50
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
51
- # current_result = dict(job.result)
52
- # current_result['intelligence_briefing'] = intelligence_result
53
- # job.result = current_result
54
- # db.commit()
55
- # db.refresh(job) # Force reload
56
- # print(f"[LOG] DB SAVE 2 (Intelligence): Result keys are now: {list(job.result.keys())}")
57
- # except Exception as e:
58
- # print(f"!!! [FAILURE] Stage 2 (Intelligence): {e}")
59
- # # ... error handling ...
60
- # return
61
-
62
- # # --- Stage 3: LLM Analysis ---
63
- # try:
64
- # with SessionLocal() as db:
65
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
66
- # job.status = "ANALYZING"
67
- # db.commit()
68
- # print("[LOG] STATUS UPDATE: ANALYZING")
69
-
70
- # with SessionLocal() as db:
71
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
72
- # data_for_llm = job.result
73
-
74
- # llm_result = get_llm_analysis(ticker, company_name, data_for_llm.get("intelligence_briefing", {}))
75
- # if "error" in llm_result: raise ValueError(llm_result['error'])
76
-
77
- # # --- Final Assembly and Save ---
78
- # print("[LOG] Finalizing results...")
79
- # with SessionLocal() as db:
80
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
81
- # final_result_data = dict(job.result)
82
- # final_result_data['llm_analysis'] = llm_result
83
- # job.result = final_result_data
84
- # job.status = "SUCCESS"
85
- # db.commit()
86
- # db.refresh(job)
87
- # print(f"[LOG] DB SAVE 3 (Final): Result keys are now: {list(job.result.keys())}")
88
-
89
- # print(f"--- [SUCCESS] Full analysis for {job_id} complete. ---")
90
-
91
- # except Exception as e:
92
- # print(f"!!! [FAILURE] Stage 3 (LLM): {e}")
93
- # with SessionLocal() as db:
94
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
95
- # if job:
96
- # job.status = "FAILED"
97
- # error_data = job.result if job.result else {}
98
- # error_data['error'] = str(e)
99
- # job.result = error_data
100
- # db.commit()
101
-
102
-
103
-
104
-
105
-
106
-
107
-
108
-
109
-
110
-
111
  from celery_worker import celery
112
  from core.database import SessionLocal
113
  from models.analysis_job import AnalysisJob
@@ -123,9 +13,7 @@ def run_full_analysis(job_id: str, ticker: str):
123
  The single, main task that orchestrates the entire analysis pipeline.
124
  """
125
  print(f"\n--- [START] Full Analysis for Job ID: {job_id} ---")
126
-
127
- # We will use one job object throughout and update it, committing as we go.
128
- # This requires careful session management.
129
  db = SessionLocal()
130
  job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
131
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from celery_worker import celery
2
  from core.database import SessionLocal
3
  from models.analysis_job import AnalysisJob
 
13
  The single, main task that orchestrates the entire analysis pipeline.
14
  """
15
  print(f"\n--- [START] Full Analysis for Job ID: {job_id} ---")
16
+
 
 
17
  db = SessionLocal()
18
  job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
19
 
backend/tasks/news_tasks.py CHANGED
@@ -1,348 +1,7 @@
1
- # # tasks/news_tasks.py - SIMPLIFIED VERSION THAT ALWAYS WORKS
2
-
3
- # from celery_worker import celery
4
- # from core.database import SessionLocal
5
- # from models.analysis_job import AnalysisJob
6
- # from uuid import UUID
7
- # import logging
8
- # from datetime import datetime
9
- # import yfinance as yf
10
-
11
- # logger = logging.getLogger(__name__)
12
-
13
- # def get_stock_basic_info(ticker: str):
14
- # """Get basic stock information to create realistic content"""
15
- # try:
16
- # stock = yf.Ticker(ticker)
17
- # info = stock.info
18
- # return {
19
- # 'name': info.get('longName', ticker.replace('.NS', '')),
20
- # 'sector': info.get('sector', 'Unknown'),
21
- # 'industry': info.get('industry', 'Unknown'),
22
- # 'current_price': info.get('currentPrice', 0),
23
- # 'previous_close': info.get('previousClose', 0)
24
- # }
25
- # except Exception as e:
26
- # logger.warning(f"Could not get stock info for {ticker}: {e}")
27
- # return {
28
- # 'name': ticker.replace('.NS', ''),
29
- # 'sector': 'Unknown',
30
- # 'industry': 'Unknown',
31
- # 'current_price': 0,
32
- # 'previous_close': 0
33
- # }
34
-
35
- # def create_realistic_articles(ticker: str, company_name: str, stock_info: dict):
36
- # """Create realistic articles based on stock information"""
37
-
38
- # # Calculate price movement for realistic sentiment
39
- # current_price = stock_info.get('current_price', 0)
40
- # previous_close = stock_info.get('previous_close', 0)
41
-
42
- # price_change = 0
43
- # if current_price and previous_close:
44
- # price_change = ((current_price - previous_close) / previous_close) * 100
45
-
46
- # # Generate articles based on actual stock performance
47
- # articles = []
48
-
49
- # if price_change > 2:
50
- # articles.extend([
51
- # {
52
- # "title": f"{company_name} Shares Rally {price_change:.1f}% on Strong Market Sentiment",
53
- # "url": f"https://finance.yahoo.com/quote/{ticker}",
54
- # "source": "Market Analysis",
55
- # "sentiment": "Positive",
56
- # "sentiment_score": 0.8
57
- # },
58
- # {
59
- # "title": f"Investors Show Confidence in {company_name} as Stock Gains Momentum",
60
- # "url": f"https://www.moneycontrol.com/india/stockpricequote/{ticker}",
61
- # "source": "Financial Express",
62
- # "sentiment": "Positive",
63
- # "sentiment_score": 0.7
64
- # }
65
- # ])
66
- # elif price_change < -2:
67
- # articles.extend([
68
- # {
69
- # "title": f"{company_name} Stock Declines {abs(price_change):.1f}% Amid Market Volatility",
70
- # "url": f"https://finance.yahoo.com/quote/{ticker}",
71
- # "source": "Market Watch",
72
- # "sentiment": "Negative",
73
- # "sentiment_score": 0.8
74
- # },
75
- # {
76
- # "title": f"Market Correction Impacts {company_name} Share Price",
77
- # "url": f"https://www.moneycontrol.com/india/stockpricequote/{ticker}",
78
- # "source": "Economic Times",
79
- # "sentiment": "Negative",
80
- # "sentiment_score": 0.6
81
- # }
82
- # ])
83
- # else:
84
- # articles.extend([
85
- # {
86
- # "title": f"{company_name} Stock Shows Steady Performance in Current Market",
87
- # "url": f"https://finance.yahoo.com/quote/{ticker}",
88
- # "source": "Yahoo Finance",
89
- # "sentiment": "Neutral",
90
- # "sentiment_score": 0.5
91
- # },
92
- # {
93
- # "title": f"Technical Analysis: {company_name} Maintains Stable Trading Range",
94
- # "url": f"https://www.moneycontrol.com/india/stockpricequote/{ticker}",
95
- # "source": "Market Analysis",
96
- # "sentiment": "Neutral",
97
- # "sentiment_score": 0.5
98
- # }
99
- # ])
100
-
101
- # # Add sector-specific articles
102
- # sector = stock_info.get('sector', 'Unknown')
103
- # if sector != 'Unknown':
104
- # articles.extend([
105
- # {
106
- # "title": f"{sector} Sector Update: Key Players Including {company_name} in Focus",
107
- # "url": "https://example.com/sector-analysis",
108
- # "source": "Sector Reports",
109
- # "sentiment": "Neutral",
110
- # "sentiment_score": 0.6
111
- # },
112
- # {
113
- # "title": f"Industry Outlook: {stock_info.get('industry', 'Market')} Trends Affecting {company_name}",
114
- # "url": "https://example.com/industry-outlook",
115
- # "source": "Industry Analysis",
116
- # "sentiment": "Positive",
117
- # "sentiment_score": 0.6
118
- # }
119
- # ])
120
-
121
- # # Add general market articles
122
- # articles.extend([
123
- # {
124
- # "title": f"Quarterly Performance Review: {company_name} Financials and Market Position",
125
- # "url": f"https://finance.yahoo.com/quote/{ticker}/financials",
126
- # "source": "Financial Reports",
127
- # "sentiment": "Neutral",
128
- # "sentiment_score": 0.5
129
- # },
130
- # {
131
- # "title": f"Analyst Coverage: Investment Recommendations for {company_name} Stock",
132
- # "url": "https://example.com/analyst-coverage",
133
- # "source": "Research Reports",
134
- # "sentiment": "Positive",
135
- # "sentiment_score": 0.7
136
- # },
137
- # {
138
- # "title": f"Market Sentiment Analysis: Retail vs Institutional Interest in {company_name}",
139
- # "url": "https://example.com/market-sentiment",
140
- # "source": "Market Research",
141
- # "sentiment": "Neutral",
142
- # "sentiment_score": 0.5
143
- # }
144
- # ])
145
-
146
- # return articles[:8] # Return top 8 articles
147
-
148
- # def try_real_news_sources(ticker: str, company_name: str):
149
- # """Attempt to get real news, but don't fail if it doesn't work"""
150
- # real_articles = []
151
-
152
- # try:
153
- # # Try Yahoo Finance news (most reliable)
154
- # logger.info(f"Attempting to fetch real Yahoo Finance news for {ticker}")
155
- # stock = yf.Ticker(ticker)
156
- # news = stock.news
157
-
158
- # if news:
159
- # logger.info(f"Found {len(news)} Yahoo Finance articles")
160
- # for article in news[:5]: # Take first 5
161
- # if article.get('title'):
162
- # # Simple sentiment analysis
163
- # title_lower = article['title'].lower()
164
- # if any(word in title_lower for word in ['gain', 'rise', 'growth', 'profit', 'strong']):
165
- # sentiment = 'Positive'
166
- # score = 0.7
167
- # elif any(word in title_lower for word in ['fall', 'decline', 'loss', 'weak', 'drop']):
168
- # sentiment = 'Negative'
169
- # score = 0.7
170
- # else:
171
- # sentiment = 'Neutral'
172
- # score = 0.5
173
-
174
- # real_articles.append({
175
- # "title": article['title'].strip(),
176
- # "url": article.get('link', ''),
177
- # "source": article.get('publisher', 'Yahoo Finance'),
178
- # "sentiment": sentiment,
179
- # "sentiment_score": score,
180
- # "is_real": True
181
- # })
182
-
183
- # logger.info(f"Successfully retrieved {len(real_articles)} real articles")
184
-
185
- # except Exception as e:
186
- # logger.warning(f"Could not fetch real news: {e}")
187
-
188
- # return real_articles
189
-
190
- # @celery.task
191
- # def run_intelligence_analysis(job_id: str):
192
- # """Simplified intelligence analysis that always provides results"""
193
- # db = SessionLocal()
194
- # job = None
195
-
196
- # try:
197
- # logger.info(f"Starting intelligence analysis for job {job_id}")
198
-
199
- # # Get job
200
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
201
- # if not job or not job.result:
202
- # raise ValueError(f"Job {job_id} not found or has no initial data.")
203
-
204
- # job.status = "INTELLIGENCE_GATHERING"
205
- # db.commit()
206
-
207
- # current_data = job.result
208
- # ticker = current_data.get("ticker")
209
- # company_name = current_data.get("company_name", ticker.replace('.NS', ''))
210
-
211
- # logger.info(f"Analyzing {company_name} ({ticker})")
212
-
213
- # # Get basic stock information
214
- # stock_info = get_stock_basic_info(ticker)
215
- # logger.info(f"Stock info: {stock_info['name']} - {stock_info['sector']}")
216
-
217
- # # Try to get real news first
218
- # real_articles = try_real_news_sources(ticker, company_name)
219
-
220
- # # Create realistic articles
221
- # realistic_articles = create_realistic_articles(ticker, company_name, stock_info)
222
-
223
- # # Combine real and realistic articles
224
- # all_articles = real_articles + realistic_articles
225
-
226
- # # Remove duplicates and limit to 10 articles
227
- # seen_titles = set()
228
- # unique_articles = []
229
- # for article in all_articles:
230
- # if article['title'] not in seen_titles:
231
- # seen_titles.add(article['title'])
232
- # unique_articles.append(article)
233
-
234
- # final_articles = unique_articles[:10]
235
-
236
- # # Count sentiments
237
- # sentiment_counts = {'Positive': 0, 'Negative': 0, 'Neutral': 0}
238
- # for article in final_articles:
239
- # sentiment_counts[article['sentiment']] += 1
240
-
241
- # # Create intelligence briefing
242
- # intelligence_briefing = {
243
- # "articles": final_articles,
244
- # "sentiment_summary": {
245
- # "total_items": len(final_articles),
246
- # "positive": sentiment_counts['Positive'],
247
- # "negative": sentiment_counts['Negative'],
248
- # "neutral": sentiment_counts['Neutral'],
249
- # "real_articles": len(real_articles),
250
- # "generated_articles": len(realistic_articles),
251
- # "analysis_timestamp": datetime.now().isoformat()
252
- # }
253
- # }
254
-
255
- # # Update job result
256
- # new_result = current_data.copy()
257
- # new_result['intelligence_briefing'] = intelligence_briefing
258
- # job.result = new_result
259
- # job.status = "INTELLIGENCE_COMPLETE"
260
-
261
- # db.commit()
262
-
263
- # logger.info(f"Intelligence analysis completed successfully:")
264
- # logger.info(f"- Total articles: {len(final_articles)}")
265
- # logger.info(f"- Real articles: {len(real_articles)}")
266
- # logger.info(f"- Generated articles: {len(realistic_articles)}")
267
- # logger.info(f"- Sentiment: {sentiment_counts}")
268
-
269
- # return str(job.result)
270
-
271
- # except Exception as e:
272
- # logger.error(f"Intelligence analysis failed for job {job_id}: {e}")
273
-
274
- # if job:
275
- # job.status = "FAILED"
276
- # error_data = job.result if job.result else {}
277
- # error_data['error'] = f"Intelligence analysis failed: {str(e)}"
278
- # job.result = error_data
279
- # db.commit()
280
-
281
- # return f"Error: {e}"
282
-
283
- # finally:
284
- # db.close()
285
-
286
-
287
-
288
-
289
-
290
-
291
-
292
-
293
-
294
- # from celery_worker import celery
295
- # from core.database import SessionLocal
296
- # from models.analysis_job import AnalysisJob
297
- # from tools.news_tools import get_combined_news_and_sentiment
298
- # from uuid import UUID
299
-
300
- # @celery.task
301
- # def run_intelligence_analysis(job_id: str):
302
- # with SessionLocal() as db:
303
- # job = db.query(AnalysisJob).filter(AnalysisJob.id == UUID(job_id)).first()
304
- # if not job or not job.result:
305
- # print(f"Job {job_id} not found or has no data for intelligence.")
306
- # return
307
-
308
- # try:
309
- # job.status = "INTELLIGENCE_GATHERING"
310
- # db.commit()
311
-
312
- # current_data = job.result
313
- # ticker = current_data.get("ticker")
314
- # company_name = current_data.get("company_name")
315
-
316
- # intelligence_briefing = get_combined_news_and_sentiment(ticker, company_name)
317
-
318
- # new_result = dict(current_data)
319
- # new_result['intelligence_briefing'] = intelligence_briefing
320
- # job.result = new_result
321
-
322
- # db.commit()
323
- # print(f"Intelligence analysis for job {job_id} completed successfully.")
324
- # return "Intelligence gathering successful."
325
- # except Exception as e:
326
- # print(f"Error during intelligence analysis for job {job_id}: {e}")
327
- # job.status = "FAILED"
328
- # error_data = job.result if job.result else {}
329
- # error_data['error'] = f"Intelligence analysis failed: {str(e)}"
330
- # job.result = error_data
331
- # db.commit()
332
- # return f"Intelligence gathering failed: {e}"
333
-
334
-
335
-
336
-
337
-
338
-
339
-
340
-
341
  from celery_worker import celery
342
  from tools.news_tools import get_combined_news_and_sentiment
343
 
344
  @celery.task
345
  def get_intelligence_task(ticker: str, company_name: str):
346
  print(f"Executing get_intelligence_task for {company_name}...")
347
- # This task now depends on the company_name from the first task's result
348
  return get_combined_news_and_sentiment(ticker, company_name)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from celery_worker import celery
2
  from tools.news_tools import get_combined_news_and_sentiment
3
 
4
  @celery.task
5
  def get_intelligence_task(ticker: str, company_name: str):
6
  print(f"Executing get_intelligence_task for {company_name}...")
 
7
  return get_combined_news_and_sentiment(ticker, company_name)
backend/tmp.py CHANGED
@@ -1,8 +1,6 @@
1
- # test_news.py - Run this to test the news functionality
2
  from tools.news_tools import get_combined_news_and_sentiment_debug
3
 
4
  def test_news():
5
- # Test with a popular Indian stock
6
  ticker = "RELIANCE.NS"
7
  company_name = "Reliance Industries"
8
 
 
 
1
  from tools.news_tools import get_combined_news_and_sentiment_debug
2
 
3
  def test_news():
 
4
  ticker = "RELIANCE.NS"
5
  company_name = "Reliance Industries"
6
 
backend/tools/prediction_tools.py CHANGED
@@ -11,14 +11,10 @@ def generate_forecast(ticker: str) -> Dict[str, Any]:
11
  if stock_data.empty:
12
  return {"error": f"Could not download historical data for {ticker}."}
13
 
14
- # --- THE FINAL, MOST ROBUST FIX FOR THE DATAFRAME ---
15
- # 1. Create a new DataFrame with only the columns we need.
16
  df_prophet = stock_data[['Close']].copy()
17
- # 2. Reset the index to turn the 'Date' index into a column.
18
  df_prophet.reset_index(inplace=True)
19
  # 3. Rename the columns to what Prophet expects.
20
  df_prophet.columns = ['ds', 'y']
21
- # --- END OF FIX ---
22
 
23
  model = Prophet(
24
  daily_seasonality=False,
 
11
  if stock_data.empty:
12
  return {"error": f"Could not download historical data for {ticker}."}
13
 
 
 
14
  df_prophet = stock_data[['Close']].copy()
 
15
  df_prophet.reset_index(inplace=True)
16
  # 3. Rename the columns to what Prophet expects.
17
  df_prophet.columns = ['ds', 'y']
 
18
 
19
  model = Prophet(
20
  daily_seasonality=False,
docker-compose.yml CHANGED
@@ -1,5 +1,4 @@
1
  services:
2
- # --- Application Services ---
3
  redis:
4
  image: redis:7-alpine
5
  ports:
@@ -53,53 +52,3 @@ services:
53
 
54
 
55
 
56
-
57
-
58
- # services:
59
- # redis:
60
- # image: redis:7-alpine
61
- # ports:
62
- # - "6379:6379"
63
- # restart: always
64
-
65
- # backend:
66
- # build:
67
- # context: .
68
- # dockerfile: ./backend/Dockerfile
69
- # ports:
70
- # - "8000:8000"
71
- # volumes:
72
- # - ./backend:/code/app
73
- # env_file:
74
- # - .env
75
- # command: python -m uvicorn main:app --host 0.0.0.0 --port 8000 --reload
76
- # restart: always
77
- # depends_on:
78
- # - redis
79
-
80
- # worker:
81
- # build:
82
- # context: .
83
- # dockerfile: ./backend/Dockerfile
84
- # volumes:
85
- # - ./backend:/code/app
86
- # env_file:
87
- # - .env
88
- # command: python -m celery -A celery_worker.celery worker --loglevel=info
89
- # restart: always
90
- # depends_on:
91
- # - redis
92
- # - backend
93
-
94
- # frontend:
95
- # build:
96
- # context: .
97
- # dockerfile: ./frontend/Dockerfile
98
- # ports:
99
- # - "5173:5173"
100
- # volumes:
101
- # - ./frontend:/app
102
- # - /app/node_modules
103
- # restart: always
104
- # depends_on:
105
- # - backend
 
1
  services:
 
2
  redis:
3
  image: redis:7-alpine
4
  ports:
 
52
 
53
 
54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
frontend/src/App.jsx CHANGED
@@ -1,125 +1,3 @@
1
- // import React, { useState, useEffect } from 'react';
2
- // import Header from './components/Header';
3
- // import JobForm from './components/JobForm';
4
- // import JobStatusCard from './components/JobStatusCard';
5
- // import ResultsDisplay from './components/ResultsDisplay';
6
- // import LoadingSkeleton from './components/LoadingSkeleton';
7
- // import HistoryPanel from './components/HistoryPanel';
8
- // import { createJob, getJob } from './services/api';
9
- // import { XCircle } from 'lucide-react';
10
-
11
- // function App() {
12
- // const [job, setJob] = useState(null);
13
- // const [isLoading, setIsLoading] = useState(false);
14
- // const [isPolling, setIsPolling] = useState(false);
15
- // const [error, setError] = useState(null);
16
-
17
- // const handleAnalysisRequest = async (ticker) => {
18
- // setIsLoading(true);
19
- // setIsPolling(true);
20
- // setError(null);
21
- // setJob(null);
22
- // try {
23
- // const response = await createJob(ticker);
24
- // setJob(response.data);
25
- // } catch (err) {
26
- // setError('Failed to create job. Please check the API server and try again.');
27
- // setIsLoading(false);
28
- // setIsPolling(false);
29
- // }
30
- // };
31
-
32
- // const handleSelectHistoryJob = (historyJob) => {
33
- // setIsLoading(false);
34
- // setIsPolling(false);
35
- // setError(null);
36
- // setJob(historyJob);
37
- // }
38
-
39
- // useEffect(() => {
40
- // if (!job?.id || !isPolling) return;
41
-
42
- // if (job.status !== 'PENDING') {
43
- // setIsLoading(false);
44
- // }
45
-
46
- // const intervalId = setInterval(async () => {
47
- // try {
48
- // const response = await getJob(job.id);
49
- // const updatedJob = response.data;
50
- // setJob(updatedJob);
51
-
52
- // if (updatedJob.status === 'SUCCESS' || updatedJob.status === 'FAILED') {
53
- // clearInterval(intervalId);
54
- // setIsPolling(false);
55
- // }
56
- // } catch (err) {
57
- // setError('Failed to poll job status.');
58
- // clearInterval(intervalId);
59
- // setIsPolling(false);
60
- // }
61
- // }, 3000);
62
-
63
- // return () => clearInterval(intervalId);
64
- // }, [job, isPolling]);
65
-
66
- // return (
67
- // <div className="min-h-screen bg-gray-900 text-white font-sans">
68
- // <Header />
69
- // <HistoryPanel onSelectJob={handleSelectHistoryJob} />
70
-
71
- // <main className="container mx-auto p-4 md:p-8">
72
- // <div className="max-w-4xl mx-auto">
73
- // <p className="text-lg text-gray-400 mb-8 text-center">
74
- // Enter an Indian stock ticker to receive a comprehensive, AI-powered analysis.
75
- // </p>
76
-
77
- // <JobForm onAnalyze={handleAnalysisRequest} isLoading={isLoading || isPolling} />
78
-
79
- // {error && <div className="my-6 p-4 bg-red-900/50 rounded-lg text-red-300 text-center">{error}</div>}
80
-
81
- // {isLoading && !job && <LoadingSkeleton />}
82
-
83
- // {job && !isLoading && <JobStatusCard job={job} />}
84
-
85
- // {job?.status === 'SUCCESS' && job.result && (
86
- // <ResultsDisplay result={job.result} />
87
- // )}
88
-
89
- // {job?.status === 'FAILED' && job.result?.error && (
90
- // <div className="mt-8 p-6 bg-gray-800/30 border border-red-500/30 rounded-lg text-center animate-fade-in">
91
- // <XCircle className="w-16 h-16 text-red-400 mx-auto mb-4" />
92
- // <h2 className="text-2xl font-bold text-red-300 mb-2">Analysis Failed</h2>
93
- // <p className="text-gray-400 max-w-lg mx-auto">
94
- // We couldn't complete the analysis for <strong className="text-white">{job.ticker}</strong>.
95
- // This usually means the stock symbol is incorrect or not listed.
96
- // </p>
97
- // <p className="text-xs text-gray-500 mt-4">Please double-check the ticker (e.g., RELIANCE.NS) and try again.</p>
98
-
99
- // <details className="mt-6 text-left w-full max-w-lg mx-auto">
100
- // <summary className="cursor-pointer text-xs text-gray-500 hover:text-gray-400 focus:outline-none">Show technical details</summary>
101
- // <pre className="mt-2 bg-gray-900 p-4 rounded-md text-gray-400 text-xs whitespace-pre-wrap font-mono">
102
- // {job.result.error}
103
- // </pre>
104
- // </details>
105
- // </div>
106
- // )}
107
- // </div>
108
- // </main>
109
- // </div>
110
- // );
111
- // }
112
-
113
- // export default App;
114
-
115
-
116
-
117
-
118
-
119
-
120
-
121
-
122
-
123
  import React, { useState, useEffect } from 'react';
124
  import Header from './components/Header';
125
  import JobForm from './components/JobForm';
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import React, { useState, useEffect } from 'react';
2
  import Header from './components/Header';
3
  import JobForm from './components/JobForm';
frontend/src/components/Header.jsx CHANGED
@@ -1,5 +1,5 @@
1
  import React from 'react';
2
- import { Bot } from 'lucide-react'; // Using a cool icon
3
 
4
  function Header() {
5
  return (
 
1
  import React from 'react';
2
+ import { Bot } from 'lucide-react';
3
 
4
  function Header() {
5
  return (
frontend/src/components/HistoricalChart.jsx CHANGED
@@ -2,7 +2,6 @@ import React, { useState, useEffect } from 'react';
2
  import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer } from 'recharts';
3
  import axios from 'axios';
4
 
5
- // A more reliable public CORS proxy
6
  const PROXY_URL = 'https://api.allorigins.win/raw?url=';
7
 
8
  const fetchHistoricalData = async (ticker) => {
@@ -15,7 +14,6 @@ const fetchHistoricalData = async (ticker) => {
15
  const timestamps = data.timestamp;
16
  const prices = data.indicators.quote[0].close;
17
 
18
- // Filter out any null price points which can crash the chart
19
  return timestamps
20
  .map((ts, i) => ({
21
  date: new Date(ts * 1000).toLocaleDateString('en-IN', {day: 'numeric', month: 'short'}),
 
2
  import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer } from 'recharts';
3
  import axios from 'axios';
4
 
 
5
  const PROXY_URL = 'https://api.allorigins.win/raw?url=';
6
 
7
  const fetchHistoricalData = async (ticker) => {
 
14
  const timestamps = data.timestamp;
15
  const prices = data.indicators.quote[0].close;
16
 
 
17
  return timestamps
18
  .map((ts, i) => ({
19
  date: new Date(ts * 1000).toLocaleDateString('en-IN', {day: 'numeric', month: 'short'}),
frontend/src/components/HistoryPanel.jsx CHANGED
@@ -11,14 +11,12 @@ function HistoryPanel({ onSelectJob }) {
11
  setIsLoading(true);
12
  getJobsHistory()
13
  .then(response => {
14
- // Filter for only completed jobs to make the list cleaner
15
  setHistory(response.data.filter(job => job.status === 'SUCCESS' || job.status === 'FAILED'));
16
  })
17
  .catch(error => console.error("Failed to fetch history:", error))
18
  .finally(() => setIsLoading(false));
19
  };
20
 
21
- // When the panel opens, fetch the history
22
  const togglePanel = () => {
23
  const newIsOpen = !isOpen;
24
  setIsOpen(newIsOpen);
@@ -42,7 +40,6 @@ function HistoryPanel({ onSelectJob }) {
42
  <History className="w-8 h-8" />
43
  </button>
44
 
45
- {/* Overlay to close panel when clicking outside */}
46
  {isOpen && <div onClick={() => setIsOpen(false)} className="fixed inset-0 bg-black/50 z-30 transition-opacity"></div>}
47
 
48
  <div className={`fixed top-0 right-0 h-full bg-gray-900 border-l border-gray-700 shadow-2xl z-40 transition-transform duration-500 ease-in-out ${isOpen ? 'translate-x-0' : 'translate-x-full'} w-full md:w-96`}>
 
11
  setIsLoading(true);
12
  getJobsHistory()
13
  .then(response => {
 
14
  setHistory(response.data.filter(job => job.status === 'SUCCESS' || job.status === 'FAILED'));
15
  })
16
  .catch(error => console.error("Failed to fetch history:", error))
17
  .finally(() => setIsLoading(false));
18
  };
19
 
 
20
  const togglePanel = () => {
21
  const newIsOpen = !isOpen;
22
  setIsOpen(newIsOpen);
 
40
  <History className="w-8 h-8" />
41
  </button>
42
 
 
43
  {isOpen && <div onClick={() => setIsOpen(false)} className="fixed inset-0 bg-black/50 z-30 transition-opacity"></div>}
44
 
45
  <div className={`fixed top-0 right-0 h-full bg-gray-900 border-l border-gray-700 shadow-2xl z-40 transition-transform duration-500 ease-in-out ${isOpen ? 'translate-x-0' : 'translate-x-full'} w-full md:w-96`}>
frontend/src/components/JobForm.jsx CHANGED
@@ -1,14 +1,13 @@
1
  import React, { useState } from 'react';
2
  import { Search, LoaderCircle } from 'lucide-react';
3
 
4
- // The component now receives 'onAnalyze' and 'isLoading' as props
5
  function JobForm({ onAnalyze, isLoading }) {
6
  const [ticker, setTicker] = useState('');
7
 
8
  const handleSubmit = (e) => {
9
  e.preventDefault();
10
  if (!ticker.trim() || isLoading) return;
11
- onAnalyze(ticker); // Call the function passed down from App.jsx
12
  };
13
 
14
  return (
 
1
  import React, { useState } from 'react';
2
  import { Search, LoaderCircle } from 'lucide-react';
3
 
 
4
  function JobForm({ onAnalyze, isLoading }) {
5
  const [ticker, setTicker] = useState('');
6
 
7
  const handleSubmit = (e) => {
8
  e.preventDefault();
9
  if (!ticker.trim() || isLoading) return;
10
+ onAnalyze(ticker);
11
  };
12
 
13
  return (
frontend/src/components/JobStatusCard.jsx CHANGED
@@ -3,7 +3,6 @@ import { LoaderCircle, CheckCircle2, XCircle, FileClock, Database, Search, Bot }
3
 
4
  function JobStatusCard({ job }) {
5
  const getStatusInfo = (status) => {
6
- // This map now perfectly matches the statuses set in your main_task.py
7
  const statusMap = {
8
  'PENDING': {
9
  icon: <FileClock className="w-8 h-8 text-yellow-400" />,
 
3
 
4
  function JobStatusCard({ job }) {
5
  const getStatusInfo = (status) => {
 
6
  const statusMap = {
7
  'PENDING': {
8
  icon: <FileClock className="w-8 h-8 text-yellow-400" />,
frontend/src/components/LoadingSkeleton.jsx CHANGED
@@ -1,6 +1,5 @@
1
  import React from 'react';
2
 
3
- // A simple helper for the pulsing effect
4
  const SkeletonBlock = ({ className }) => (
5
  <div className={`bg-gray-700 rounded-md animate-pulse ${className}`} />
6
  );
 
1
  import React from 'react';
2
 
 
3
  const SkeletonBlock = ({ className }) => (
4
  <div className={`bg-gray-700 rounded-md animate-pulse ${className}`} />
5
  );
frontend/src/services/api.js CHANGED
@@ -1,28 +1,3 @@
1
- // import axios from 'axios';
2
-
3
- // const apiClient = axios.create({
4
- // baseURL: 'http://localhost:8000',
5
- // headers: {
6
- // 'Content-Type': 'application/json',
7
- // },
8
- // });
9
-
10
- // export const createJob = (ticker) => {
11
- // return apiClient.post('/jobs', { ticker });
12
- // };
13
-
14
- // export const getJob = (jobId) => {
15
- // return apiClient.get(`/jobs/${jobId}`);
16
- // };
17
-
18
-
19
- // export const getJobsHistory = () => {
20
- // return apiClient.get('/jobs');
21
- // };
22
-
23
-
24
-
25
-
26
  import axios from 'axios';
27
 
28
  const API_URL = import.meta.env.VITE_API_URL || 'http://localhost:8000';
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import axios from 'axios';
2
 
3
  const API_URL = import.meta.env.VITE_API_URL || 'http://localhost:8000';
tmp_down.py CHANGED
@@ -1,6 +1,5 @@
1
  from transformers import pipeline
2
 
3
  print("Downloading NEW model 'ProsusAI/finbert'...")
4
- # Using the pipeline API is the easiest way to download all necessary files
5
  classifier = pipeline('text-classification', model='ProsusAI/finbert')
6
  print("Download complete!")
 
1
  from transformers import pipeline
2
 
3
  print("Downloading NEW model 'ProsusAI/finbert'...")
 
4
  classifier = pipeline('text-classification', model='ProsusAI/finbert')
5
  print("Download complete!")