Spaces:
Sleeping
Sleeping
Upload app.py
Browse files
app.py
CHANGED
@@ -60,23 +60,23 @@ def AlbertUntrained_fn(text1, text2):
|
|
60 |
|
61 |
|
62 |
# Handle calls to Deberta--------------------------------------------
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
|
69 |
DebertaUntrained_pipe = pipeline("text-classification", model="microsoft/deberta-v3-xsmall")
|
70 |
DebertanoLORA_pipe = pipeline(model="rajevan123/STS-Conventional-Fine-Tuning")
|
71 |
-
|
72 |
|
73 |
#STS models
|
74 |
def DebertanoLORA_fn(text1, text2):
|
75 |
return DebertanoLORA_pipe({'text': text1, 'text_pair': text2})
|
76 |
|
77 |
def DebertawithLORA_fn(text1, text2):
|
78 |
-
|
79 |
-
return ("working2")
|
80 |
|
81 |
def DebertaUntrained_fn(text1, text2):
|
82 |
return DebertaUntrained_pipe({'text': text1, 'text_pair': text2})
|
@@ -199,7 +199,7 @@ def displayMetricStatsTextNLILora():
|
|
199 |
|
200 |
def displayMetricStatsTextSTSLora():
|
201 |
#file_name = 'events.out.tfevents.STS-Lora.2'
|
202 |
-
file_name = hf_hub_download(repo_id="rajevan123/STS-Lora-Fine-Tuning-Capstone-
|
203 |
event_acc = event_accumulator.EventAccumulator(file_name,
|
204 |
size_guidance={
|
205 |
event_accumulator.COMPRESSED_HISTOGRAMS: 500,
|
@@ -212,7 +212,7 @@ def displayMetricStatsTextSTSLora():
|
|
212 |
event_acc.Reload()
|
213 |
accuracy_data = event_acc.Scalars('eval/accuracy')
|
214 |
loss_data = event_acc.Scalars('eval/loss')
|
215 |
-
metrics = "Active Training Time:
|
216 |
for i in range(0, len(loss_data)):
|
217 |
metrics = metrics + 'Epoch Number: ' + str(i) + '\n'
|
218 |
metrics = metrics + 'Accuracy (%): ' + str(round(accuracy_data[i].value * 100, 3)) + '\n'
|
@@ -336,11 +336,11 @@ with gr.Blocks(
|
|
336 |
|
337 |
with gr.Row(variant="panel"):
|
338 |
TextClassOut1 = gr.Textbox(label= "Conventionaly Trained Model")
|
339 |
-
TextClassNoLoraStats = gr.Textbox(label = "Training Informaiton")
|
340 |
|
341 |
with gr.Row(variant="panel"):
|
342 |
TextClassOut2 = gr.Textbox(label= "LoRA Fine Tuned Model")
|
343 |
-
TextClassLoraStats = gr.Textbox(label = "Training Informaiton")
|
344 |
|
345 |
btn.click(fn=distilBERTUntrained_fn, inputs=inp, outputs=TextClassOut)
|
346 |
btn.click(fn=distilBERTnoLORA_fn, inputs=inp, outputs=TextClassOut1)
|
@@ -394,11 +394,11 @@ with gr.Blocks(
|
|
394 |
|
395 |
with gr.Row(variant="panel"):
|
396 |
NLIOut1 = gr.Textbox(label= "Conventionaly Trained Model")
|
397 |
-
NLINoLoraStats = gr.Textbox(label = "Training Informaiton")
|
398 |
|
399 |
with gr.Row(variant="panel"):
|
400 |
NLIOut2 = gr.Textbox(label= "LoRA Fine Tuned Model")
|
401 |
-
NLILoraStats = gr.Textbox(label = "Training Informaiton")
|
402 |
|
403 |
nli_btn.click(fn=AlbertUntrained_fn, inputs=[nli_p1,nli_p2], outputs=NLIOut)
|
404 |
nli_btn.click(fn=AlbertnoLORA_fn, inputs=[nli_p1,nli_p2], outputs=NLIOut1)
|
@@ -453,11 +453,11 @@ with gr.Blocks(
|
|
453 |
|
454 |
with gr.Row(variant="panel"):
|
455 |
sts_out1 = gr.Textbox(label= "Conventionally Trained Model")
|
456 |
-
STSNoLoraStats = gr.Textbox(label = "Training Informaiton")
|
457 |
|
458 |
with gr.Row(variant="panel"):
|
459 |
sts_out2 = gr.Textbox(label= "LoRA Fine Tuned Model")
|
460 |
-
STSLoraStats = gr.Textbox(label = "Training Informaiton")
|
461 |
|
462 |
sts_btn.click(fn=DebertaUntrained_fn, inputs=[sts_p1,sts_p2], outputs=sts_out)
|
463 |
sts_btn.click(fn=DebertanoLORA_fn, inputs=[sts_p1,sts_p2], outputs=sts_out1)
|
|
|
60 |
|
61 |
|
62 |
# Handle calls to Deberta--------------------------------------------
|
63 |
+
base_model2 = BertForSequenceClassification.from_pretrained("dslim/bert-base-NER")
|
64 |
+
peft_model_id2 = "rajevan123/STS-Lora-Fine-Tuning-Capstone-bert-testing-42-with-lower-r-mid"
|
65 |
+
model2 = PeftModel.from_pretrained(model=base_model2, model_id=peft_model_id2)
|
66 |
+
sa_merged_model2 = model2.merge_and_unload()
|
67 |
+
bbu_tokenizer2 = AutoTokenizer.from_pretrained("dslim/bert-base-NER")
|
68 |
|
69 |
DebertaUntrained_pipe = pipeline("text-classification", model="microsoft/deberta-v3-xsmall")
|
70 |
DebertanoLORA_pipe = pipeline(model="rajevan123/STS-Conventional-Fine-Tuning")
|
71 |
+
DebertawithLORA_pipe = pipeline("text-classification",model=sa_merged_model2, tokenizer=bbu_tokenizer2)
|
72 |
|
73 |
#STS models
|
74 |
def DebertanoLORA_fn(text1, text2):
|
75 |
return DebertanoLORA_pipe({'text': text1, 'text_pair': text2})
|
76 |
|
77 |
def DebertawithLORA_fn(text1, text2):
|
78 |
+
return DebertawithLORA_pipe({'text': text1, 'text_pair': text2})
|
79 |
+
#return ("working2")
|
80 |
|
81 |
def DebertaUntrained_fn(text1, text2):
|
82 |
return DebertaUntrained_pipe({'text': text1, 'text_pair': text2})
|
|
|
199 |
|
200 |
def displayMetricStatsTextSTSLora():
|
201 |
#file_name = 'events.out.tfevents.STS-Lora.2'
|
202 |
+
file_name = hf_hub_download(repo_id="rajevan123/STS-Lora-Fine-Tuning-Capstone-bert-testing-42-with-lower-r-mid", filename="runs/Mar25_00-56-35_e29d85799d45/events.out.tfevents.1711328197.e29d85799d45.483.4")
|
203 |
event_acc = event_accumulator.EventAccumulator(file_name,
|
204 |
size_guidance={
|
205 |
event_accumulator.COMPRESSED_HISTOGRAMS: 500,
|
|
|
212 |
event_acc.Reload()
|
213 |
accuracy_data = event_acc.Scalars('eval/accuracy')
|
214 |
loss_data = event_acc.Scalars('eval/loss')
|
215 |
+
metrics = "Active Training Time: 41.07 mins \n\n"
|
216 |
for i in range(0, len(loss_data)):
|
217 |
metrics = metrics + 'Epoch Number: ' + str(i) + '\n'
|
218 |
metrics = metrics + 'Accuracy (%): ' + str(round(accuracy_data[i].value * 100, 3)) + '\n'
|
|
|
336 |
|
337 |
with gr.Row(variant="panel"):
|
338 |
TextClassOut1 = gr.Textbox(label= "Conventionaly Trained Model")
|
339 |
+
TextClassNoLoraStats = gr.Textbox(label = "Training Informaiton - Active Training Time: 27.95 mins")
|
340 |
|
341 |
with gr.Row(variant="panel"):
|
342 |
TextClassOut2 = gr.Textbox(label= "LoRA Fine Tuned Model")
|
343 |
+
TextClassLoraStats = gr.Textbox(label = "Training Informaiton - Active Training Time: 15.58 mins")
|
344 |
|
345 |
btn.click(fn=distilBERTUntrained_fn, inputs=inp, outputs=TextClassOut)
|
346 |
btn.click(fn=distilBERTnoLORA_fn, inputs=inp, outputs=TextClassOut1)
|
|
|
394 |
|
395 |
with gr.Row(variant="panel"):
|
396 |
NLIOut1 = gr.Textbox(label= "Conventionaly Trained Model")
|
397 |
+
NLINoLoraStats = gr.Textbox(label = "Training Informaiton - Active Training Time: 6.74 mins")
|
398 |
|
399 |
with gr.Row(variant="panel"):
|
400 |
NLIOut2 = gr.Textbox(label= "LoRA Fine Tuned Model")
|
401 |
+
NLILoraStats = gr.Textbox(label = "Training Informaiton - Active Training Time: 15.04 mins")
|
402 |
|
403 |
nli_btn.click(fn=AlbertUntrained_fn, inputs=[nli_p1,nli_p2], outputs=NLIOut)
|
404 |
nli_btn.click(fn=AlbertnoLORA_fn, inputs=[nli_p1,nli_p2], outputs=NLIOut1)
|
|
|
453 |
|
454 |
with gr.Row(variant="panel"):
|
455 |
sts_out1 = gr.Textbox(label= "Conventionally Trained Model")
|
456 |
+
STSNoLoraStats = gr.Textbox(label = "Training Informaiton - Active Training Time: 23.96 mins")
|
457 |
|
458 |
with gr.Row(variant="panel"):
|
459 |
sts_out2 = gr.Textbox(label= "LoRA Fine Tuned Model")
|
460 |
+
STSLoraStats = gr.Textbox(label = "Training Informaiton - Active Training Time: 41.07 mins")
|
461 |
|
462 |
sts_btn.click(fn=DebertaUntrained_fn, inputs=[sts_p1,sts_p2], outputs=sts_out)
|
463 |
sts_btn.click(fn=DebertanoLORA_fn, inputs=[sts_p1,sts_p2], outputs=sts_out1)
|