Gregor Betz
commited on
warnings
Browse files
app.py
CHANGED
@@ -211,23 +211,20 @@ with gr.Blocks() as demo:
|
|
211 |
conversation_id = gr.State(str(uuid.uuid4()))
|
212 |
tos_approved = gr.State(False)
|
213 |
|
214 |
-
# set up client and guide
|
215 |
if not client_kwargs["inference_server_url"]:
|
216 |
-
|
217 |
-
"⚠️ Please set the client model inference endpoint in the config.yaml file."
|
218 |
-
duration=0
|
219 |
)
|
220 |
if not guide_kwargs["inference_server_url"]:
|
221 |
-
|
222 |
-
"⚠️ Please set the expert model inference endpoint in the config.yaml file."
|
223 |
-
duration=0
|
224 |
)
|
225 |
if not guide_kwargs["classifier_kwargs"]["inference_server_url"]:
|
226 |
-
|
227 |
-
"⚠️ Please set the classifier model inference endpoint in the config.yaml file."
|
228 |
-
duration=0
|
229 |
)
|
230 |
|
|
|
231 |
client_llm = setup_client_llm(**client_kwargs)
|
232 |
guide_config = RecursiveBalancingGuideConfig(**guide_kwargs)
|
233 |
guide = RecursiveBalancingGuide(tourist_llm=client_llm, config=guide_config)
|
|
|
211 |
conversation_id = gr.State(str(uuid.uuid4()))
|
212 |
tos_approved = gr.State(False)
|
213 |
|
|
|
214 |
if not client_kwargs["inference_server_url"]:
|
215 |
+
gr.Markdown(
|
216 |
+
"⚠️ Please set the client model inference endpoint in the config.yaml file."
|
|
|
217 |
)
|
218 |
if not guide_kwargs["inference_server_url"]:
|
219 |
+
gr.Markdown(
|
220 |
+
"⚠️ Please set the expert model inference endpoint in the config.yaml file."
|
|
|
221 |
)
|
222 |
if not guide_kwargs["classifier_kwargs"]["inference_server_url"]:
|
223 |
+
gr.Markdown(
|
224 |
+
"⚠️ Please set the classifier model inference endpoint in the config.yaml file."
|
|
|
225 |
)
|
226 |
|
227 |
+
# set up client and guide
|
228 |
client_llm = setup_client_llm(**client_kwargs)
|
229 |
guide_config = RecursiveBalancingGuideConfig(**guide_kwargs)
|
230 |
guide = RecursiveBalancingGuide(tourist_llm=client_llm, config=guide_config)
|