Update README.md
Browse files
README.md
CHANGED
@@ -11,18 +11,22 @@ tags:
|
|
11 |
ChatGPT4 trained modell without constraints, no filters.
|
12 |
|
13 |
Run model in colab:
|
|
|
|
|
14 |
```
|
|
|
15 |
%cd /content
|
16 |
!git clone https://github.com/ggerganov/llama.cpp
|
17 |
%cd llama.cpp
|
18 |
!make
|
19 |
|
|
|
20 |
!cp /content/drive/MyDrive/alpaca-models/ggml-alpaca-7b-q4.bin ggml-alpaca-7b-q4.bin
|
21 |
|
22 |
-
#@title 3 ways of launching chat from
|
23 |
cd into /content/llama.cpp
|
24 |
./main -m ./ggml-alpaca-7b-q4.bin -c 512 -b 1024 -n 256 --keep 48 --repeat_penalty 1.0 --color -i -r "User:" -f prompts/chat-with-el-diablo.txt
|
25 |
-
./main -m
|
26 |
./main --color --instruct --threads 4 --model ./ggml-alpaca-7b-q4.bin --file ./prompts/alpaca.txt --batch_size 8 --ctx_size 2048 -n -1 --repeat_last_n 64 --repeat_penalty 1.3 --n_predict 128 --temp 0.1 --top_k 40 --top_p 0.95
|
27 |
```
|
28 |
|
|
|
11 |
ChatGPT4 trained modell without constraints, no filters.
|
12 |
|
13 |
Run model in colab:
|
14 |
+
<a href="https://colab.research.google.com/github/steinhaug/stable-diffusion/blob/main/llm/ggml_alpaca_7b_q4_bin.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>
|
15 |
+
|
16 |
```
|
17 |
+
# Inference repository
|
18 |
%cd /content
|
19 |
!git clone https://github.com/ggerganov/llama.cpp
|
20 |
%cd llama.cpp
|
21 |
!make
|
22 |
|
23 |
+
# copy model into directory
|
24 |
!cp /content/drive/MyDrive/alpaca-models/ggml-alpaca-7b-q4.bin ggml-alpaca-7b-q4.bin
|
25 |
|
26 |
+
#@title 3 ways of launching chat from terminal after above code is done
|
27 |
cd into /content/llama.cpp
|
28 |
./main -m ./ggml-alpaca-7b-q4.bin -c 512 -b 1024 -n 256 --keep 48 --repeat_penalty 1.0 --color -i -r "User:" -f prompts/chat-with-el-diablo.txt
|
29 |
+
./main -m ./ggml-alpaca-7b-q4.bin -c 512 -b 1024 -n 256 --keep 48 --repeat_penalty 1.0 --color -i -r "User:" -f prompts/dan.txt
|
30 |
./main --color --instruct --threads 4 --model ./ggml-alpaca-7b-q4.bin --file ./prompts/alpaca.txt --batch_size 8 --ctx_size 2048 -n -1 --repeat_last_n 64 --repeat_penalty 1.3 --n_predict 128 --temp 0.1 --top_k 40 --top_p 0.95
|
31 |
```
|
32 |
|