Thomas G. Lopes commited on
Commit
4e7f4fc
·
1 Parent(s): 7260a80

fix tokenizer

Browse files
Files changed (1) hide show
  1. src/routes/api/models/+server.ts +13 -9
src/routes/api/models/+server.ts CHANGED
@@ -1,4 +1,3 @@
1
- import { env } from "$env/dynamic/private";
2
  import type { Model, ModelWithTokenizer } from "$lib/types";
3
  import { json } from "@sveltejs/kit";
4
  import type { RequestHandler } from "./$types";
@@ -7,7 +6,7 @@ import { dev } from "$app/environment";
7
  let cache: ModelWithTokenizer[] | undefined;
8
 
9
  export const GET: RequestHandler = async ({ fetch }) => {
10
- if (cache && dev) {
11
  console.log("Skipping load, using in memory cache");
12
  return json(cache);
13
  }
@@ -15,14 +14,9 @@ export const GET: RequestHandler = async ({ fetch }) => {
15
  const apiUrl =
16
  "https://huggingface.co/api/models?pipeline_tag=text-generation&filter=conversational&inference_provider=all&limit=100&expand[]=inferenceProviderMapping&expand[]=config&expand[]=library_name&expand[]=pipeline_tag&expand[]=tags&expand[]=mask_token&expand[]=trendingScore";
17
 
18
- const HF_TOKEN = env.HF_TOKEN;
19
-
20
  const res = await fetch(apiUrl, {
21
  credentials: "include",
22
  headers: {
23
- "User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:135.0) Gecko/20100101 Firefox/135.0",
24
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
25
- "Accept-Language": "en-US,en;q=0.5",
26
  "Upgrade-Insecure-Requests": "1",
27
  "Sec-Fetch-Dest": "document",
28
  "Sec-Fetch-Mode": "navigate",
@@ -47,13 +41,23 @@ export const GET: RequestHandler = async ({ fetch }) => {
47
  const promises = compatibleModels.map(async model => {
48
  const configUrl = `https://huggingface.co/${model.id}/raw/main/tokenizer_config.json`;
49
  const res = await fetch(configUrl, {
 
50
  headers: {
51
- Authorization: `Bearer ${HF_TOKEN}`,
 
 
 
 
 
 
 
52
  },
 
 
53
  });
54
 
55
  if (!res.ok) {
56
- console.error(`Error fetching tokenizer file for ${model.id}`, res.status, res.statusText);
57
  return null; // Ignore failed requests by returning null
58
  }
59
 
 
 
1
  import type { Model, ModelWithTokenizer } from "$lib/types";
2
  import { json } from "@sveltejs/kit";
3
  import type { RequestHandler } from "./$types";
 
6
  let cache: ModelWithTokenizer[] | undefined;
7
 
8
  export const GET: RequestHandler = async ({ fetch }) => {
9
+ if (cache?.length && dev) {
10
  console.log("Skipping load, using in memory cache");
11
  return json(cache);
12
  }
 
14
  const apiUrl =
15
  "https://huggingface.co/api/models?pipeline_tag=text-generation&filter=conversational&inference_provider=all&limit=100&expand[]=inferenceProviderMapping&expand[]=config&expand[]=library_name&expand[]=pipeline_tag&expand[]=tags&expand[]=mask_token&expand[]=trendingScore";
16
 
 
 
17
  const res = await fetch(apiUrl, {
18
  credentials: "include",
19
  headers: {
 
 
 
20
  "Upgrade-Insecure-Requests": "1",
21
  "Sec-Fetch-Dest": "document",
22
  "Sec-Fetch-Mode": "navigate",
 
41
  const promises = compatibleModels.map(async model => {
42
  const configUrl = `https://huggingface.co/${model.id}/raw/main/tokenizer_config.json`;
43
  const res = await fetch(configUrl, {
44
+ credentials: "include",
45
  headers: {
46
+ "Upgrade-Insecure-Requests": "1",
47
+ "Sec-Fetch-Dest": "document",
48
+ "Sec-Fetch-Mode": "navigate",
49
+ "Sec-Fetch-Site": "none",
50
+ "Sec-Fetch-User": "?1",
51
+ "Priority": "u=0, i",
52
+ "Pragma": "no-cache",
53
+ "Cache-Control": "no-cache",
54
  },
55
+ method: "GET",
56
+ mode: "cors",
57
  });
58
 
59
  if (!res.ok) {
60
+ // console.error(`Error fetching tokenizer file for ${model.id}`, res.status, res.statusText);
61
  return null; // Ignore failed requests by returning null
62
  }
63