FlawedLLM commited on
Commit
925ce28
1 Parent(s): fe1b079

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -3
app.py CHANGED
@@ -4,11 +4,19 @@ import gradio as gr
4
  from peft import PeftModel, PeftConfig
5
  from peft import AutoPeftModelForCausalLM
6
  from transformers import AutoTokenizer
7
- model = AutoPeftModelForCausalLM.from_pretrained(
8
- "FlawedLLM/BhashiniLLM", # YOUR MODEL YOU USED FOR TRAINING
 
 
 
 
 
 
 
 
9
  load_in_4bit = True,
10
  )
11
- tokenizer = AutoTokenizer.from_pretrained("FlawedLLM/BhashiniLLM")
12
  @spaces.GPU(duration=300)
13
  def chunk_it(input_command):
14
 
 
4
  from peft import PeftModel, PeftConfig
5
  from peft import AutoPeftModelForCausalLM
6
  from transformers import AutoTokenizer
7
+ # model = AutoPeftModelForCausalLM.from_pretrained(
8
+ # "FlawedLLM/BhashiniLLM", # YOUR MODEL YOU USED FOR TRAINING
9
+ # load_in_4bit = True,
10
+ # )
11
+ # tokenizer = AutoTokenizer.from_pretrained("FlawedLLM/BhashiniLLM")
12
+ from unsloth import FastLanguageModel
13
+ model, tokenizer = FastLanguageModel.from_pretrained(
14
+ model_name = "FlawedLLM/BhashiniLLM", # YOUR MODEL YOU USED FOR TRAINING
15
+ max_seq_length = 2048,
16
+ dtype = None,
17
  load_in_4bit = True,
18
  )
19
+ FastLanguageModel.for_inference(model)
20
  @spaces.GPU(duration=300)
21
  def chunk_it(input_command):
22