karenwky commited on
Commit
4fe90b3
·
verified ·
1 Parent(s): 0b77e68

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -4
app.py CHANGED
@@ -7,7 +7,7 @@ from llama_index.indices.managed.llama_cloud import (
7
  LlamaCloudCompositeRetriever,
8
  )
9
  from llama_index.core import Settings
10
- from llama_index.llms.anthropic import Anthropic
11
  from llama_cloud.types import CompositeRetrievalMode
12
  from llama_index.core.memory import ChatMemoryBuffer
13
  from llama_index.core.chat_engine import CondensePlusContextChatEngine
@@ -26,9 +26,12 @@ from openinference.instrumentation.llama_index import LlamaIndexInstrumentor
26
  # Replace with your actual LlamaCloud Project Name
27
  LLAMA_CLOUD_PROJECT_NAME = "CustomerSupportProject"
28
 
29
- # Configure Anthropic LLM
30
- # Ensure ANTHROPIC_API_KEY is set in your environment variables
31
- Settings.llm = Anthropic(model="claude-sonnet-4-0", temperature=0)
 
 
 
32
  print(f"[INFO] Configured LLM: {Settings.llm.model}")
33
 
34
  # Configure LlamaTrace (Arize Phoenix)
 
7
  LlamaCloudCompositeRetriever,
8
  )
9
  from llama_index.core import Settings
10
+ from llama_index.llms.nebius import NebiusLLM
11
  from llama_cloud.types import CompositeRetrievalMode
12
  from llama_index.core.memory import ChatMemoryBuffer
13
  from llama_index.core.chat_engine import CondensePlusContextChatEngine
 
26
  # Replace with your actual LlamaCloud Project Name
27
  LLAMA_CLOUD_PROJECT_NAME = "CustomerSupportProject"
28
 
29
+ # Configure NebiusLLM
30
+ # Ensure NEBIUS_API_KEY is set in your environment variables
31
+ Settings.llm = NebiusLLM(
32
+ model="meta-llama/Meta-Llama-3.1-405B-Instruct",
33
+ temperature=0
34
+ )
35
  print(f"[INFO] Configured LLM: {Settings.llm.model}")
36
 
37
  # Configure LlamaTrace (Arize Phoenix)