Spaces:
Running
Running
Initial commit
Browse files
README.md
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
---
|
2 |
title: Verbalized Rebus Solver
|
3 |
-
emoji:
|
4 |
colorFrom: blue
|
5 |
colorTo: red
|
6 |
sdk: gradio
|
@@ -8,6 +8,17 @@ sdk_version: 4.40.0
|
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
license: apache-2.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
---
|
12 |
|
13 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
---
|
2 |
title: Verbalized Rebus Solver
|
3 |
+
emoji: 🧩
|
4 |
colorFrom: blue
|
5 |
colorTo: red
|
6 |
sdk: gradio
|
|
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
license: apache-2.0
|
11 |
+
fullWidth: true
|
12 |
+
models:
|
13 |
+
- gsarti/phi3-mini-rebus-solver-fp16
|
14 |
+
datasets:
|
15 |
+
- gsarti/eureka-rebus
|
16 |
+
tags:
|
17 |
+
- word-game
|
18 |
+
- rebus
|
19 |
+
- italian
|
20 |
+
- word-puzzle
|
21 |
+
- crossword
|
22 |
---
|
23 |
|
24 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import spaces
|
2 |
+
import gradio as gr
|
3 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
4 |
+
|
5 |
+
template = """<s><|user|>
|
6 |
+
Risolvi gli indizi tra parentesi per ottenere una prima lettura, e usa la chiave di lettura per ottenere la soluzione del rebus.
|
7 |
+
|
8 |
+
{input}<|end|>
|
9 |
+
<|assistant|>"""
|
10 |
+
|
11 |
+
tokenizer = AutoTokenizer.from_pretrained("gsarti/phi3-mini-rebus-solver-fp16")
|
12 |
+
model = AutoModelForCausalLM.from_pretrained("gsarti/phi3-mini-rebus-solver-fp16")
|
13 |
+
|
14 |
+
@spaces.GPU
|
15 |
+
def solve_verbalized_rebus(ex):
|
16 |
+
input = template.format(input=ex)
|
17 |
+
inputs = tokenizer(input, return_tensors="pt")["input_ids"]
|
18 |
+
outputs = model.generate(input_ids = inputs, max_new_tokens = 500, use_cache = True)
|
19 |
+
model_generations = tokenizer.batch_decode(outputs)
|
20 |
+
return model_generations[0]
|
21 |
+
|
22 |
+
demo = gr.ChatInterface(fn=solve_verbalized_rebus, examples=["Rebus: [Materiale espulso dai vulcani] R O [Strumento del calzolaio] [Si trovano ai lati del bacino] C I [Si ingrassano con la polenta] E I N [Contiene scorte di cibi] B [Isola in francese]\nChiave risolutiva: 1 ' 5 6 5 3 3 1 14"], title="Verbalized Rebus Solver")
|
23 |
+
demo.launch()
|