aviol commited on
Commit
a71052a
·
verified ·
1 Parent(s): eaf718b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +11 -6
README.md CHANGED
@@ -5,30 +5,35 @@ license: apache-2.0
5
 
6
  # Download tiny llama llamafile
7
  - Go to https://huggingface.co/aviol/TinyLlama1.1-llamafile-bootstraped/blob/main/TinyLlama-1.1B.llamafile
8
- - Download using the download button
9
 
10
  # Run the server
11
  ```shell
12
- # Make executable after downloading
13
  chmod +x TinyLlama-1.1B.llamafile
14
 
15
- # Run
16
  ./TinyLlama-1.1B.llamafile --server --host 0.0.0.0 --port 1234
17
  ```
18
 
19
-
20
- # Use the LLM using OpenAI SDK:
21
  ```python
22
  from openai import OpenAI
23
 
 
24
  client = OpenAI(base_url="http://127.0.0.1:1234/v1", api_key="test")
25
 
 
 
 
 
26
  stream = client.chat.completions.create(
27
- model="chatgpt-4",
28
  messages=[{"role": "user", "content": prompt}],
29
  stream=True,
30
  )
 
 
31
  for chunk in stream:
32
  if chunk.choices[0].delta.content is not None:
33
  print(chunk.choices[0].delta.content, end="")
 
34
  ```
 
5
 
6
  # Download tiny llama llamafile
7
  - Go to https://huggingface.co/aviol/TinyLlama1.1-llamafile-bootstraped/blob/main/TinyLlama-1.1B.llamafile
8
+ - Download this file using the download button.
9
 
10
  # Run the server
11
  ```shell
 
12
  chmod +x TinyLlama-1.1B.llamafile
13
 
 
14
  ./TinyLlama-1.1B.llamafile --server --host 0.0.0.0 --port 1234
15
  ```
16
 
17
+ # Use the LLM with OpenAI SDK:
 
18
  ```python
19
  from openai import OpenAI
20
 
21
+
22
  client = OpenAI(base_url="http://127.0.0.1:1234/v1", api_key="test")
23
 
24
+ # Prompt
25
+ prompt = "Hi, tell me something new about AppSec"
26
+
27
+ # Send API request to llamafile server
28
  stream = client.chat.completions.create(
29
+ model="avi-llmsky",
30
  messages=[{"role": "user", "content": prompt}],
31
  stream=True,
32
  )
33
+
34
+ # Print the responses
35
  for chunk in stream:
36
  if chunk.choices[0].delta.content is not None:
37
  print(chunk.choices[0].delta.content, end="")
38
+
39
  ```