jeonghin
commited on
Commit
·
0c0bb1b
1
Parent(s):
c819284
Rollback to current version
Browse files- app_function.py +7 -23
app_function.py
CHANGED
@@ -39,17 +39,9 @@ def get_vectorstore(text_chunks):
|
|
39 |
Returns:
|
40 |
- FAISS: A FAISS vector store containing the embeddings of the text chunks.
|
41 |
"""
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
openai_api_key=OPENAI_API_KEY,
|
46 |
-
)
|
47 |
-
except:
|
48 |
-
embeddings = OpenAIEmbeddings(
|
49 |
-
openai_api_base="https://openai.vocareum.com/v1",
|
50 |
-
openai_api_key=OPENAI_API_KEY2,
|
51 |
-
)
|
52 |
-
|
53 |
vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embeddings)
|
54 |
return vectorstore
|
55 |
|
@@ -65,18 +57,10 @@ def get_conversation_chain(vectorstore):
|
|
65 |
Returns:
|
66 |
- ConversationalRetrievalChain: An initialized conversational chain object.
|
67 |
"""
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
openai_api_key=OPENAI_API_KEY,
|
73 |
-
)
|
74 |
-
except:
|
75 |
-
llm = ChatOpenAI(
|
76 |
-
model_name="gpt-4-1106-preview",
|
77 |
-
openai_api_base="https://openai.vocareum.com/v1",
|
78 |
-
openai_api_key=OPENAI_API_KEY2,
|
79 |
-
)
|
80 |
|
81 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
82 |
conversation_chain = ConversationalRetrievalChain.from_llm(
|
|
|
39 |
Returns:
|
40 |
- FAISS: A FAISS vector store containing the embeddings of the text chunks.
|
41 |
"""
|
42 |
+
embeddings = OpenAIEmbeddings(
|
43 |
+
openai_api_base="https://openai.vocareum.com/v1",
|
44 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embeddings)
|
46 |
return vectorstore
|
47 |
|
|
|
57 |
Returns:
|
58 |
- ConversationalRetrievalChain: An initialized conversational chain object.
|
59 |
"""
|
60 |
+
llm = ChatOpenAI(
|
61 |
+
model_name="gpt-4-1106-preview",
|
62 |
+
openai_api_base="https://openai.vocareum.com/v1",
|
63 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
64 |
|
65 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
66 |
conversation_chain = ConversationalRetrievalChain.from_llm(
|