#!/bin/bash N_GPU_LAYERS="${N_GPU_LAYERS:-10}" MODEL_PATH=${MODEL_PATH:-/data/models/mixtral-8x7b-v0.1.Q5_K_M.gguf} MODEL_URL_="${MODEL_URL:-https://huggingface.co./TheBloke/Mixtral-8x7B-v0.1-GGUF/resolve/main/mixtral-8x7b-v0.1.Q5_K_M.gguf}" PORT="${PORT:-7860}" CONTEXT="${CONTEXT:-32768}" huggingface-cli download "pmysl/c4ai-command-r-plus-GGUF" command-r-plus-Q5_K_M-00001-of-00002.gguf --local-dir /data/models huggingface-cli download "pmysl/c4ai-command-r-plus-GGUF" command-r-plus-Q5_K_M-00002-of-00002.gguf --local-dir /data/models #--n-gpu-layers $N_GPU_LAYERS #-c $CONTEXT cd /app ./server -m "/data/models/command-r-plus-Q6_K-00001-of-00002.gguf" --port $PORT --host 0.0.0.0 --path "/app/public"