FlawedLLM commited on
Commit
03d49a3
1 Parent(s): 7d60ecc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -2
app.py CHANGED
@@ -65,7 +65,7 @@ tokenizer = AutoTokenizer.from_pretrained("FlawedLLM/Bhashini_gemma16")
65
  model = AutoModelForCausalLM.from_pretrained("FlawedLLM/Bhashini_gemma16", load_in_4bit=True)
66
 
67
  @spaces.GPU(duration=300)
68
- def chunk_it(input_command):
69
  alpaca_prompt = """Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.
70
 
71
  ### Instruction:
@@ -76,6 +76,8 @@ def chunk_it(input_command):
76
 
77
  ### Response:
78
  {}"""
 
 
79
  inputs = tokenizer(
80
  [
81
  alpaca_prompt.format(
@@ -118,7 +120,7 @@ def chunk_it(input_command):
118
  Duration (integer: number of days, default: 6)
119
  ReportType (string: "profit", "revenue", "inventory", or Null for all reports)
120
 
121
- ALWAYS provide output in a JSON format.''', # instruction
122
  input_command, # input
123
  "", # output - leave this blank for generation!
124
  )
@@ -139,6 +141,7 @@ def chunk_it(input_command):
139
 
140
  iface=gr.Interface(fn=chunk_it,
141
  inputs="text",
 
142
  outputs="text",
143
  title="Formatter_Pro",
144
  )
 
65
  model = AutoModelForCausalLM.from_pretrained("FlawedLLM/Bhashini_gemma16", load_in_4bit=True)
66
 
67
  @spaces.GPU(duration=300)
68
+ def chunk_it(input_command, item_list):
69
  alpaca_prompt = """Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.
70
 
71
  ### Instruction:
 
76
 
77
  ### Response:
78
  {}"""
79
+ if item_list is not None:
80
+ item_list="The ItemName should be STRICTLY chosen from the given list of ItemNames : " + item_list
81
  inputs = tokenizer(
82
  [
83
  alpaca_prompt.format(
 
120
  Duration (integer: number of days, default: 6)
121
  ReportType (string: "profit", "revenue", "inventory", or Null for all reports)
122
 
123
+ ALWAYS provide output in a JSON format.''' + item_list, # instruction
124
  input_command, # input
125
  "", # output - leave this blank for generation!
126
  )
 
141
 
142
  iface=gr.Interface(fn=chunk_it,
143
  inputs="text",
144
+ inputs="text"
145
  outputs="text",
146
  title="Formatter_Pro",
147
  )