rukaiyaaaah commited on
Commit
3db7762
1 Parent(s): c213e21

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +58 -0
app.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import torch
3
+ from transformers import (
4
+ AutoModelForCausalLM,
5
+ AutoTokenizer,
6
+ BitsAndBytesConfig,
7
+ pipeline,
8
+ )
9
+ from transformers import BitsAndBytesConfig
10
+
11
+ bnb_config = BitsAndBytesConfig(
12
+ load_in_4bit=True,
13
+ bnb_4bit_quant_type="nf4",
14
+ bnb_4bit_compute_dtype="float16",
15
+ bnb_4bit_use_double_quant=False,
16
+ )
17
+ device_map = {"": 0}
18
+
19
+ model_name = "rukaiyah-indika-ai/rv-chatbot-2"
20
+
21
+ model = AutoModelForCausalLM.from_pretrained(
22
+ model_name,
23
+ quantization_config=bnb_config
24
+ )
25
+ tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
26
+ from transformers import pipeline
27
+ inst = "You are a very helpful assistant providing solutions to road-related queries. Ensure you provide correct and relevant answers. If you don't know the answer to a question, please don't share false information. "
28
+ prompt = "Hello"
29
+
30
+ pipe = pipeline(task="text-generation", model=model, tokenizer=tokenizer, temperature=0.2, max_new_tokens=256)
31
+ ranked_results = pipe(f"<s>[INST] {inst}{prompt} [/INST]")
32
+ for result in ranked_results:
33
+ response = result['generated_text']
34
+ response = response.split("[/INST]", 1)[-1]
35
+ response = response.replace("<s>", "")
36
+ import gradio as gr
37
+
38
+ def generate_response(prompt):
39
+ inst = "You are a very helpful assistant providing solutions to road-related queries. Ensure you provide correct and relevant answers according to the IRC guidelines. If you don't know the answer to a question, please don't share false information."
40
+ pipe = pipeline(task="text-generation", model=model, tokenizer=tokenizer, temperature=0.2, max_new_tokens=256)
41
+ ranked_results = pipe(f"<s>[INST] {inst}{prompt} [/INST]")
42
+
43
+ for result in ranked_results:
44
+ response = result['generated_text']
45
+ response = response.split("[/INST]", 1)[-1]
46
+ response = response.replace("<s>", "")
47
+ response = response.replace("</s>", "")
48
+ return response
49
+
50
+ iface = gr.Interface(
51
+ fn=generate_response,
52
+ inputs="text",
53
+ outputs="text",
54
+ title="Road-GPT",
55
+ description="Enter your query related to road management and get a response generated by an AI model."
56
+ )
57
+
58
+ iface.launch(share=True)