version: 1.0.1 cache: true endpoints: custom: - name: "LiteLLM" iconURL: https://ollama.ai/public/icon.png apiKey: "${LITELLM_MASTER_KEY}" baseURL: "http://librechat-litellm.llm.svc:8000" models: default: ["openhermes", "solar"] fetch: true titleConvo: true titleModel: "solar" summarize: false summaryModel: "solar" forcePrompt: false modelDisplayLabel: "LiteLLM" dropParams: ["stop", "frequency_penalty", "presence_penalty"]