ngxson's picture
ngxson HF staff
inference provider ok
1538aa3
raw
history blame contribute delete
720 Bytes
// @ts-expect-error this is custom
const TTS_SPACE_ID = window.huggingface?.variables?.TTS_SPACE_ID;
// @ts-expect-error this is custom
const LLM_ENDPOINT = window.huggingface?.variables?.LLM_ENDPOINT;
export const CONFIG = {
llmEndpoint:
LLM_ENDPOINT ||
'https://router.huggingface.co/hf-inference/v1/chat/completions',
ttsSpaceId: TTS_SPACE_ID || 'ngxson/kokoro-podcast-backend',
inferenceProviderModels: [
//'deepseek-ai/DeepSeek-R1-Distill-Qwen-14B',
//'deepseek-ai/DeepSeek-R1-Distill-Llama-8B',
//'deepseek-ai/DeepSeek-R1-Distill-Qwen-7B',
'deepseek-ai/DeepSeek-R1',
'deepseek-ai/DeepSeek-R1-Distill-Qwen-32B',
],
inferenceProvider: 'together',
};
console.log({ CONFIG });