Divyasreepat
commited on
Commit
•
28745a1
1
Parent(s):
929e14b
Update README.md with new model card content
Browse files
README.md
CHANGED
@@ -55,7 +55,7 @@ import numpy as np
|
|
55 |
|
56 |
Use `generate()` to do text generation.
|
57 |
```python
|
58 |
-
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_instruct_7b_en"
|
59 |
mistral_lm.generate("[INST] What is Keras? [/INST]", max_length=500)
|
60 |
|
61 |
# Generate with batched prompts.
|
@@ -64,7 +64,7 @@ mistral_lm.generate(["[INST] What is Keras? [/INST]", "[INST] Give me your best
|
|
64 |
|
65 |
Compile the `generate()` function with a custom sampler.
|
66 |
```python
|
67 |
-
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_instruct_7b_en"
|
68 |
mistral_lm.compile(sampler="greedy")
|
69 |
mistral_lm.generate("I want to say", max_length=30)
|
70 |
|
@@ -92,7 +92,7 @@ mistral_lm.generate(prompt)
|
|
92 |
Call `fit()` on a single batch.
|
93 |
```python
|
94 |
features = ["The quick brown fox jumped.", "I forgot my homework."]
|
95 |
-
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_instruct_7b_en"
|
96 |
mistral_lm.fit(x=features, batch_size=2)
|
97 |
```
|
98 |
|
@@ -123,7 +123,7 @@ import numpy as np
|
|
123 |
|
124 |
Use `generate()` to do text generation.
|
125 |
```python
|
126 |
-
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_instruct_7b_en"
|
127 |
mistral_lm.generate("[INST] What is Keras? [/INST]", max_length=500)
|
128 |
|
129 |
# Generate with batched prompts.
|
@@ -132,7 +132,7 @@ mistral_lm.generate(["[INST] What is Keras? [/INST]", "[INST] Give me your best
|
|
132 |
|
133 |
Compile the `generate()` function with a custom sampler.
|
134 |
```python
|
135 |
-
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_instruct_7b_en"
|
136 |
mistral_lm.compile(sampler="greedy")
|
137 |
mistral_lm.generate("I want to say", max_length=30)
|
138 |
|
@@ -160,7 +160,7 @@ mistral_lm.generate(prompt)
|
|
160 |
Call `fit()` on a single batch.
|
161 |
```python
|
162 |
features = ["The quick brown fox jumped.", "I forgot my homework."]
|
163 |
-
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_instruct_7b_en"
|
164 |
mistral_lm.fit(x=features, batch_size=2)
|
165 |
```
|
166 |
|
|
|
55 |
|
56 |
Use `generate()` to do text generation.
|
57 |
```python
|
58 |
+
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_instruct_7b_en")
|
59 |
mistral_lm.generate("[INST] What is Keras? [/INST]", max_length=500)
|
60 |
|
61 |
# Generate with batched prompts.
|
|
|
64 |
|
65 |
Compile the `generate()` function with a custom sampler.
|
66 |
```python
|
67 |
+
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_instruct_7b_en")
|
68 |
mistral_lm.compile(sampler="greedy")
|
69 |
mistral_lm.generate("I want to say", max_length=30)
|
70 |
|
|
|
92 |
Call `fit()` on a single batch.
|
93 |
```python
|
94 |
features = ["The quick brown fox jumped.", "I forgot my homework."]
|
95 |
+
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_instruct_7b_en")
|
96 |
mistral_lm.fit(x=features, batch_size=2)
|
97 |
```
|
98 |
|
|
|
123 |
|
124 |
Use `generate()` to do text generation.
|
125 |
```python
|
126 |
+
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_instruct_7b_en")
|
127 |
mistral_lm.generate("[INST] What is Keras? [/INST]", max_length=500)
|
128 |
|
129 |
# Generate with batched prompts.
|
|
|
132 |
|
133 |
Compile the `generate()` function with a custom sampler.
|
134 |
```python
|
135 |
+
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_instruct_7b_en")
|
136 |
mistral_lm.compile(sampler="greedy")
|
137 |
mistral_lm.generate("I want to say", max_length=30)
|
138 |
|
|
|
160 |
Call `fit()` on a single batch.
|
161 |
```python
|
162 |
features = ["The quick brown fox jumped.", "I forgot my homework."]
|
163 |
+
mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_instruct_7b_en")
|
164 |
mistral_lm.fit(x=features, batch_size=2)
|
165 |
```
|
166 |
|