cfahlgren1 HF staff commited on
Commit
9fa0d19
·
verified ·
1 Parent(s): 4e801bc

Fix 500 error on Warm Models Endpoint

Browse files

Since we don't really use warm param the same and only support HF Inference in this playgound, we'll need to update to use this endpoint.

Files changed (1) hide show
  1. src/routes/+page.server.ts +1 -1
src/routes/+page.server.ts CHANGED
@@ -4,7 +4,7 @@ import type { PageServerLoad } from "./$types";
4
  import { env } from "$env/dynamic/private";
5
 
6
  export const load: PageServerLoad = async ({ fetch }) => {
7
- const apiUrl = "https://huggingface.co/api/models?pipeline_tag=text-generation&inference=warm&filter=conversational";
8
  const HF_TOKEN = env.HF_TOKEN;
9
 
10
  const res = await fetch(apiUrl, {
 
4
  import { env } from "$env/dynamic/private";
5
 
6
  export const load: PageServerLoad = async ({ fetch }) => {
7
+ const apiUrl = "https://huggingface.co/api/models?pipeline_tag=text-generation&inference_provider=hf-inference&filter=conversational";
8
  const HF_TOKEN = env.HF_TOKEN;
9
 
10
  const res = await fetch(apiUrl, {