xiaowei / librechat.yaml
qitongwei's picture
Update librechat.yaml
99d1e41 verified
# Configuration version (required)
version: 1.2.4
# Cache settings: Set to true to enable caching
cache: true
fileStrategy: "firebase"
# Definition of custom endpoints
endpoints:
custom:
# deepseek
# https://platform.deepseek.com/api_keys
# Model list: https://platform.deepseek.com/api-docs/pricing
- name: "Deepseek"
apiKey: "${DEEPSEEK_API_KEY}"
baseURL: "https://api.siliconflow.cn/v1"
models:
default: ["deepseek-ai/DeepSeek-V3",
"deepseek-ai/DeepSeek-R1", ]
fetch: false
titleConvo: true
titleModel: "Qwen/Qwen2.5-7B-Instruct"
modelDisplayLabel: "Deepseek"
# OpenRouter Example
- name: 'OpenRouter'
# For `apiKey` and `baseURL`, you can use environment variables that you define.
#
apiKey: '${OPENROUTER_KEY}'
baseURL: 'https://openrouter.ai/api/v1'
models:
default: ["openrouter/quasar-alpha",
"openrouter/optimus-alpha"]
fetch: false
titleConvo: true
titleModel: "google/gemma-3-27b-it:free"
# Recommended: Drop the stop parameter from the request as Openrouter models use a variety of stop tokens.
dropParams: ['stop']
modelDisplayLabel: 'OpenRouter'
# OpenRouter.ai Example
- name: "Siliconflow"
apiKey: "user_provided"
baseURL: "https://api.siliconflow.cn/v1"
models:
default: ["nvidia/Llama-3.1-Nemotron-70B-Instruct"]
fetch: true
titleConvo: true
titleModel: "Qwen/Qwen2.5-7B-Instruct"
summarize: false
summaryModel: "Qwen/Qwen2.5-7B-Instruct"
forcePrompt: false
modelDisplayLabel: "siliconflow"
# OpenRouter.ai Example
- name: "URL1"
# For `apiKey` and `baseURL`, you can use environment variables that you define.
# recommended environment variables:(apiKey: "user_provided" baseURL: "user_provided")
apiKey: "sk-xum3k3At9s7JOyGrTllyjI6HbTKlWa0XRFnfHsxEOqsqH1vD"
baseURL: "https://api.ephone.chat/v1"
models:
default: ["gpt-4.1-2025-04-14","gpt-4.1-mini-2025-04-14","gpt-4.1-nano-2025-04-14"]
fetch: false
titleConvo: true
titleModel: "gpt-4.1-nano-2025-04-14"
summarize: false
summaryModel: "gpt-4.1-nano-2025-04-14"
forcePrompt: false
modelDisplayLabel: "URL1"
iconURL: https://cdn-icons-png.flaticon.com/128/1240/1240946.png
# OpenRouter.ai Example
- name: "URL2"
apiKey: "sk-3AEY28Or8ORvWbwgYJo7DXftapQPk7GyMI7pDaQyi4XOpKyP"
baseURL: "https://doi9.top/v1"
models:
default: ["claude-sonnet-4-20250514",
"gemini-2.0-flash-exp",
"gemini-2.5-flash-preview-05-20",
"gemini-2.5-pro-preview-03-25",
"gemini-2.5-pro-preview-05-06",
"gemini-2.5-pro-preview-06-05"]
fetch: false
titleConvo: false
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
modelDisplayLabel: "URL2"
iconURL: https://cdn-icons-png.flaticon.com/128/1240/1240979.png
# See the Custom Configuration Guide for more information:
# https://docs.librechat.ai/install/configuration/custom_config.html