# Configuration version (required) version: 1.0.0 # Cache settings: Set to true to enable caching cache: true # See the Custom Configuration Guide for more information: # https://docs.librechat.ai/install/configuration/custom_config.html endpoints: custom: - name: "together.ai" apiKey: "${TOGETHERAI_API_KEY}" baseURL: "https://api.together.xyz" models: default: [ "deepseek-ai/DeepSeek-R1" ] fetch: false # fetching list of models is not supported titleConvo: true titleModel: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo" summarize: false summaryModel: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo" forcePrompt: false modelDisplayLabel: "together.ai"