Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -5,7 +5,7 @@ from dotenv import load_dotenv
|
|
5 |
from httpx import Client
|
6 |
from huggingface_hub import HfApi
|
7 |
import pandas as pd
|
8 |
-
|
9 |
import spaces
|
10 |
from llama_cpp import Llama
|
11 |
|
@@ -22,7 +22,6 @@ headers = {
|
|
22 |
}
|
23 |
client = Client(headers=headers)
|
24 |
api = HfApi(token=HF_TOKEN)
|
25 |
-
#pipe = pipeline("text-generation", model="motherduckdb/DuckDB-NSQL-7B-v0.1", device="cuda")
|
26 |
llama = Llama(
|
27 |
model_path="DuckDB-NSQL-7B-v0.1-q8_0.gguf",
|
28 |
n_ctx=2048,
|
@@ -102,7 +101,12 @@ def text2sql(dataset_name, query_input):
|
|
102 |
|
103 |
with gr.Blocks() as demo:
|
104 |
gr.Markdown("# π« Generate SQL queries based on a given text for your Hugging Face Dataset π«")
|
105 |
-
dataset_name =
|
|
|
|
|
|
|
|
|
|
|
106 |
query_input = gr.Textbox("Cities from Albania country", label="Ask something about your data")
|
107 |
examples = [
|
108 |
["Cities from Albania country"],
|
|
|
5 |
from httpx import Client
|
6 |
from huggingface_hub import HfApi
|
7 |
import pandas as pd
|
8 |
+
from gradio_huggingfacehub_search import HuggingfaceHubSearch
|
9 |
import spaces
|
10 |
from llama_cpp import Llama
|
11 |
|
|
|
22 |
}
|
23 |
client = Client(headers=headers)
|
24 |
api = HfApi(token=HF_TOKEN)
|
|
|
25 |
llama = Llama(
|
26 |
model_path="DuckDB-NSQL-7B-v0.1-q8_0.gguf",
|
27 |
n_ctx=2048,
|
|
|
101 |
|
102 |
with gr.Blocks() as demo:
|
103 |
gr.Markdown("# π« Generate SQL queries based on a given text for your Hugging Face Dataset π«")
|
104 |
+
dataset_name = HuggingfaceHubSearch(
|
105 |
+
label="Hub Dataset ID",
|
106 |
+
placeholder="Search for dataset id on Huggingface",
|
107 |
+
search_type="dataset",
|
108 |
+
)
|
109 |
+
# dataset_name = gr.Textbox("jamescalam/world-cities-geo", label="Dataset Name")
|
110 |
query_input = gr.Textbox("Cities from Albania country", label="Ask something about your data")
|
111 |
examples = [
|
112 |
["Cities from Albania country"],
|