Thomas G. Lopes
commited on
Commit
·
5acf3a4
1
Parent(s):
17c3edf
move to csr
Browse files- src/app.html +9 -1
- src/lib/components/Avatar.svelte +0 -3
- src/lib/components/InferencePlayground/InferencePlaygroundHFTokenModal.svelte +0 -3
- src/lib/components/InferencePlayground/InferencePlaygroundProviderSelect.svelte +0 -2
- src/lib/stores/session.ts +12 -19
- src/lib/stores/token.ts +3 -6
- src/lib/utils/store.ts +1 -5
- src/routes/+layout.ts +1 -0
- src/routes/+page.ts +8 -0
- src/routes/{+page.server.ts → api/models/+server.ts} +15 -8
src/app.html
CHANGED
@@ -7,7 +7,15 @@
|
|
7 |
%sveltekit.head%
|
8 |
</head>
|
9 |
|
10 |
-
<body data-sveltekit-preload-data="hover"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
<script>
|
12 |
(function () {
|
13 |
const urlParams = new URLSearchParams(window.location.search);
|
|
|
7 |
%sveltekit.head%
|
8 |
</head>
|
9 |
|
10 |
+
<body data-sveltekit-preload-data="hover">
|
11 |
+
<style>
|
12 |
+
body {
|
13 |
+
background: white;
|
14 |
+
}
|
15 |
+
body.dark {
|
16 |
+
background: #101828;
|
17 |
+
}
|
18 |
+
</style>
|
19 |
<script>
|
20 |
(function () {
|
21 |
const urlParams = new URLSearchParams(window.location.search);
|
src/lib/components/Avatar.svelte
CHANGED
@@ -1,6 +1,4 @@
|
|
1 |
<script lang="ts">
|
2 |
-
import { browser } from "$app/environment";
|
3 |
-
|
4 |
export let orgName: string | undefined;
|
5 |
export let size: "sm" | "md" = "md";
|
6 |
|
@@ -9,7 +7,6 @@
|
|
9 |
async function getAvatarUrl(orgName?: string) {
|
10 |
if (!orgName) return;
|
11 |
const url = `https://huggingface.co/api/organizations/${orgName}/avatar`;
|
12 |
-
if (!browser) return;
|
13 |
const res = await fetch(url);
|
14 |
if (!res.ok) {
|
15 |
console.error(`Error getting avatar url for org: ${orgName}`, res.status, res.statusText);
|
|
|
1 |
<script lang="ts">
|
|
|
|
|
2 |
export let orgName: string | undefined;
|
3 |
export let size: "sm" | "md" = "md";
|
4 |
|
|
|
7 |
async function getAvatarUrl(orgName?: string) {
|
8 |
if (!orgName) return;
|
9 |
const url = `https://huggingface.co/api/organizations/${orgName}/avatar`;
|
|
|
10 |
const res = await fetch(url);
|
11 |
if (!res.ok) {
|
12 |
console.error(`Error getting avatar url for org: ${orgName}`, res.status, res.statusText);
|
src/lib/components/InferencePlayground/InferencePlaygroundHFTokenModal.svelte
CHANGED
@@ -1,6 +1,4 @@
|
|
1 |
<script lang="ts">
|
2 |
-
import { browser } from "$app/environment";
|
3 |
-
|
4 |
import { createEventDispatcher, onDestroy, onMount } from "svelte";
|
5 |
|
6 |
import IconCross from "../Icons/IconCross.svelte";
|
@@ -35,7 +33,6 @@
|
|
35 |
});
|
36 |
|
37 |
onDestroy(() => {
|
38 |
-
if (!browser) return;
|
39 |
// remove inert attribute if this is the last modal
|
40 |
if (document.querySelectorAll('[role="dialog"]:not(#app *)').length === 1) {
|
41 |
document.getElementById("app")?.removeAttribute("inert");
|
|
|
1 |
<script lang="ts">
|
|
|
|
|
2 |
import { createEventDispatcher, onDestroy, onMount } from "svelte";
|
3 |
|
4 |
import IconCross from "../Icons/IconCross.svelte";
|
|
|
33 |
});
|
34 |
|
35 |
onDestroy(() => {
|
|
|
36 |
// remove inert attribute if this is the last modal
|
37 |
if (document.querySelectorAll('[role="dialog"]:not(#app *)').length === 1) {
|
38 |
document.getElementById("app")?.removeAttribute("inert");
|
src/lib/components/InferencePlayground/InferencePlaygroundProviderSelect.svelte
CHANGED
@@ -1,7 +1,6 @@
|
|
1 |
<script lang="ts">
|
2 |
import type { Conversation } from "$lib/types";
|
3 |
|
4 |
-
import { browser } from "$app/environment";
|
5 |
import { randomPick } from "$lib/utils/array";
|
6 |
import { cn } from "$lib/utils/cn";
|
7 |
import { createSelect, createSync } from "@melt-ui/svelte";
|
@@ -13,7 +12,6 @@
|
|
13 |
export { classes as class };
|
14 |
|
15 |
function reset(providers: typeof conversation.model.inferenceProviderMapping) {
|
16 |
-
if (!browser) return;
|
17 |
const validProvider = providers.find(p => p.provider === conversation.provider);
|
18 |
if (validProvider) return;
|
19 |
conversation.provider = randomPick(providers)?.provider;
|
|
|
1 |
<script lang="ts">
|
2 |
import type { Conversation } from "$lib/types";
|
3 |
|
|
|
4 |
import { randomPick } from "$lib/utils/array";
|
5 |
import { cn } from "$lib/utils/cn";
|
6 |
import { createSelect, createSync } from "@melt-ui/svelte";
|
|
|
12 |
export { classes as class };
|
13 |
|
14 |
function reset(providers: typeof conversation.model.inferenceProviderMapping) {
|
|
|
15 |
const validProvider = providers.find(p => p.provider === conversation.provider);
|
16 |
if (validProvider) return;
|
17 |
conversation.provider = randomPick(providers)?.provider;
|
src/lib/stores/session.ts
CHANGED
@@ -1,4 +1,3 @@
|
|
1 |
-
import { browser } from "$app/environment";
|
2 |
import { goto } from "$app/navigation";
|
3 |
import { defaultGenerationConfig } from "$lib/components/InferencePlayground/generationConfigSettings";
|
4 |
import { models } from "$lib/stores/models";
|
@@ -44,7 +43,7 @@ function createSessionStore() {
|
|
44 |
const defaultModel = featured[0] ?? $models[0] ?? emptyModel;
|
45 |
|
46 |
// Parse URL query parameters
|
47 |
-
const searchParams = new URLSearchParams(
|
48 |
const searchProviders = searchParams.getAll("provider");
|
49 |
const searchModelIds = searchParams.getAll("modelId");
|
50 |
const modelsFromSearch = searchModelIds.map(id => $models.find(model => model.id === id)).filter(Boolean);
|
@@ -62,14 +61,12 @@ function createSessionStore() {
|
|
62 |
conversations: [defaultConversation],
|
63 |
};
|
64 |
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
else localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(savedSession));
|
72 |
-
}
|
73 |
}
|
74 |
|
75 |
// Merge query params with savedSession.
|
@@ -100,12 +97,10 @@ function createSessionStore() {
|
|
100 |
const s = cb($s);
|
101 |
|
102 |
// Save to localStorage
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
console.error("Failed to save session to localStorage:", e);
|
108 |
-
}
|
109 |
}
|
110 |
|
111 |
// Update URL query parameters
|
@@ -137,9 +132,7 @@ function createSessionStore() {
|
|
137 |
|
138 |
// Add a method to clear localStorage
|
139 |
const clearSavedSession = () => {
|
140 |
-
|
141 |
-
localStorage.removeItem(LOCAL_STORAGE_KEY);
|
142 |
-
}
|
143 |
};
|
144 |
|
145 |
return { ...store, set, update, clearSavedSession };
|
|
|
|
|
1 |
import { goto } from "$app/navigation";
|
2 |
import { defaultGenerationConfig } from "$lib/components/InferencePlayground/generationConfigSettings";
|
3 |
import { models } from "$lib/stores/models";
|
|
|
43 |
const defaultModel = featured[0] ?? $models[0] ?? emptyModel;
|
44 |
|
45 |
// Parse URL query parameters
|
46 |
+
const searchParams = new URLSearchParams(window.location.search);
|
47 |
const searchProviders = searchParams.getAll("provider");
|
48 |
const searchModelIds = searchParams.getAll("modelId");
|
49 |
const modelsFromSearch = searchModelIds.map(id => $models.find(model => model.id === id)).filter(Boolean);
|
|
|
61 |
conversations: [defaultConversation],
|
62 |
};
|
63 |
|
64 |
+
const savedData = localStorage.getItem(LOCAL_STORAGE_KEY);
|
65 |
+
if (savedData) {
|
66 |
+
const parsed = safeParse(savedData);
|
67 |
+
const res = typia.validate<Session>(parsed);
|
68 |
+
if (res.success) savedSession = parsed;
|
69 |
+
else localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(savedSession));
|
|
|
|
|
70 |
}
|
71 |
|
72 |
// Merge query params with savedSession.
|
|
|
97 |
const s = cb($s);
|
98 |
|
99 |
// Save to localStorage
|
100 |
+
try {
|
101 |
+
localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(s));
|
102 |
+
} catch (e) {
|
103 |
+
console.error("Failed to save session to localStorage:", e);
|
|
|
|
|
104 |
}
|
105 |
|
106 |
// Update URL query parameters
|
|
|
132 |
|
133 |
// Add a method to clear localStorage
|
134 |
const clearSavedSession = () => {
|
135 |
+
localStorage.removeItem(LOCAL_STORAGE_KEY);
|
|
|
|
|
136 |
};
|
137 |
|
138 |
return { ...store, set, update, clearSavedSession };
|
src/lib/stores/token.ts
CHANGED
@@ -1,4 +1,3 @@
|
|
1 |
-
import { browser } from "$app/environment";
|
2 |
import { writable } from "svelte/store";
|
3 |
|
4 |
const key = "hf_token";
|
@@ -13,11 +12,9 @@ function createTokenStore() {
|
|
13 |
});
|
14 |
}
|
15 |
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
setValue(JSON.parse(storedHfToken));
|
20 |
-
}
|
21 |
}
|
22 |
|
23 |
return {
|
|
|
|
|
1 |
import { writable } from "svelte/store";
|
2 |
|
3 |
const key = "hf_token";
|
|
|
12 |
});
|
13 |
}
|
14 |
|
15 |
+
const storedHfToken = localStorage.getItem(key);
|
16 |
+
if (storedHfToken !== null) {
|
17 |
+
setValue(JSON.parse(storedHfToken));
|
|
|
|
|
18 |
}
|
19 |
|
20 |
return {
|
src/lib/utils/store.ts
CHANGED
@@ -1,9 +1,5 @@
|
|
1 |
-
import {
|
2 |
-
import { page } from "$app/stores";
|
3 |
-
import { readable, type Writable } from "svelte/store";
|
4 |
|
5 |
export function partialSet<T extends Record<string, unknown>>(store: Writable<T>, partial: Partial<T>) {
|
6 |
store.update(s => ({ ...s, ...partial }));
|
7 |
}
|
8 |
-
|
9 |
-
export const safePage = browser ? page : readable(undefined);
|
|
|
1 |
+
import { type Writable } from "svelte/store";
|
|
|
|
|
2 |
|
3 |
export function partialSet<T extends Record<string, unknown>>(store: Writable<T>, partial: Partial<T>) {
|
4 |
store.update(s => ({ ...s, ...partial }));
|
5 |
}
|
|
|
|
src/routes/+layout.ts
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
export const ssr = false;
|
src/routes/+page.ts
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import type { ModelWithTokenizer } from "$lib/types";
|
2 |
+
import type { PageLoad } from "./$types";
|
3 |
+
|
4 |
+
export const load: PageLoad = async ({ fetch }) => {
|
5 |
+
const res = await fetch("/api/models");
|
6 |
+
const models: ModelWithTokenizer[] = await res.json();
|
7 |
+
return { models };
|
8 |
+
};
|
src/routes/{+page.server.ts → api/models/+server.ts}
RENAMED
@@ -1,17 +1,20 @@
|
|
1 |
import { env } from "$env/dynamic/private";
|
2 |
import type { Model, ModelWithTokenizer } from "$lib/types";
|
3 |
-
import
|
|
|
|
|
4 |
|
5 |
-
let cache:
|
6 |
|
7 |
-
export const
|
8 |
-
if (cache) {
|
9 |
console.log("Skipping load, using in memory cache");
|
10 |
-
return cache;
|
11 |
}
|
12 |
|
13 |
const apiUrl =
|
14 |
"https://huggingface.co/api/models?pipeline_tag=text-generation&filter=conversational&inference_provider=all&limit=100&expand[]=inferenceProviderMapping&expand[]=config&expand[]=library_name&expand[]=pipeline_tag&expand[]=tags&expand[]=mask_token&expand[]=trendingScore";
|
|
|
15 |
const HF_TOKEN = env.HF_TOKEN;
|
16 |
|
17 |
const res = await fetch(apiUrl, {
|
@@ -19,10 +22,12 @@ export const load: PageServerLoad = async ({ fetch }) => {
|
|
19 |
Authorization: `Bearer ${HF_TOKEN}`,
|
20 |
},
|
21 |
});
|
|
|
22 |
if (!res.ok) {
|
23 |
console.error(`Error fetching warm models`, res.status, res.statusText);
|
24 |
-
return { models: [] };
|
25 |
}
|
|
|
26 |
const compatibleModels: Model[] = await res.json();
|
27 |
compatibleModels.sort((a, b) => a.id.toLowerCase().localeCompare(b.id.toLowerCase()));
|
28 |
|
@@ -33,16 +38,18 @@ export const load: PageServerLoad = async ({ fetch }) => {
|
|
33 |
Authorization: `Bearer ${HF_TOKEN}`,
|
34 |
},
|
35 |
});
|
|
|
36 |
if (!res.ok) {
|
37 |
console.error(`Error fetching tokenizer file for ${model.id}`, res.status, res.statusText);
|
38 |
return null; // Ignore failed requests by returning null
|
39 |
}
|
|
|
40 |
const tokenizerConfig = await res.json();
|
41 |
return { ...model, tokenizerConfig } satisfies ModelWithTokenizer;
|
42 |
});
|
43 |
|
44 |
const models: ModelWithTokenizer[] = (await Promise.all(promises)).filter(model => model !== null);
|
|
|
45 |
|
46 |
-
cache
|
47 |
-
return { models };
|
48 |
};
|
|
|
1 |
import { env } from "$env/dynamic/private";
|
2 |
import type { Model, ModelWithTokenizer } from "$lib/types";
|
3 |
+
import { json } from "@sveltejs/kit";
|
4 |
+
import type { RequestHandler } from "./$types";
|
5 |
+
import { dev } from "$app/environment";
|
6 |
|
7 |
+
let cache: ModelWithTokenizer[] | undefined;
|
8 |
|
9 |
+
export const GET: RequestHandler = async ({ fetch }) => {
|
10 |
+
if (cache && dev) {
|
11 |
console.log("Skipping load, using in memory cache");
|
12 |
+
return json(cache);
|
13 |
}
|
14 |
|
15 |
const apiUrl =
|
16 |
"https://huggingface.co/api/models?pipeline_tag=text-generation&filter=conversational&inference_provider=all&limit=100&expand[]=inferenceProviderMapping&expand[]=config&expand[]=library_name&expand[]=pipeline_tag&expand[]=tags&expand[]=mask_token&expand[]=trendingScore";
|
17 |
+
|
18 |
const HF_TOKEN = env.HF_TOKEN;
|
19 |
|
20 |
const res = await fetch(apiUrl, {
|
|
|
22 |
Authorization: `Bearer ${HF_TOKEN}`,
|
23 |
},
|
24 |
});
|
25 |
+
|
26 |
if (!res.ok) {
|
27 |
console.error(`Error fetching warm models`, res.status, res.statusText);
|
28 |
+
return json({ models: [] });
|
29 |
}
|
30 |
+
|
31 |
const compatibleModels: Model[] = await res.json();
|
32 |
compatibleModels.sort((a, b) => a.id.toLowerCase().localeCompare(b.id.toLowerCase()));
|
33 |
|
|
|
38 |
Authorization: `Bearer ${HF_TOKEN}`,
|
39 |
},
|
40 |
});
|
41 |
+
|
42 |
if (!res.ok) {
|
43 |
console.error(`Error fetching tokenizer file for ${model.id}`, res.status, res.statusText);
|
44 |
return null; // Ignore failed requests by returning null
|
45 |
}
|
46 |
+
|
47 |
const tokenizerConfig = await res.json();
|
48 |
return { ...model, tokenizerConfig } satisfies ModelWithTokenizer;
|
49 |
});
|
50 |
|
51 |
const models: ModelWithTokenizer[] = (await Promise.all(promises)).filter(model => model !== null);
|
52 |
+
cache = models;
|
53 |
|
54 |
+
return json(cache);
|
|
|
55 |
};
|