owaiskha9654 commited on
Commit
63afc40
β€’
1 Parent(s): b344400

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -41,10 +41,10 @@ def Personality_Detection_from_reviews_submitted (model_input: str) -> Dict[str,
41
  "Conscientiousness": float(pred_label[0][3]),
42
  "Openness": float(pred_label[0][4]),}
43
  return ret
44
- model_input = gr.Textbox("Input text here (Note: This model is trained to classify Essays(Still in Progress phase))", show_label=False)
45
- model_output = gr.Label(" Big-Five personality traits Result", num_top_classes=6, show_label=True, label="Big-Five personality traits Labels assigned to this text")
46
  examples = [
47
- ( "Well, here we go with the stream of consciousness essay. I used to do things like this in high school sometimes.",
48
  "They were pretty interesting, but I often find myself with a lack of things to say. ",
49
  "I normally consider myself someone who gets straight to the point. I wonder if I should hit enter any time to send this back to the front",
50
  "Maybe I'll fix it later. My friend is playing guitar in my room now. Sort of playing anyway.",
@@ -81,7 +81,7 @@ examples = [
81
  ]
82
 
83
  title = "Big Five Personality Traits Detection From Expository text features"
84
- description = ("In traditional machine learning, it can be challenging to train an accurate model if there is a lack of labeled data specific to the task or domain of interest. Transfer learning offers a way to address this issue by utilizing the pre-existing labeled data from a similar task or domain to improve model performance. By transferring knowledge learned from one task to another, transfer learning enables us to overcome the limitations posed by a shortage of labeled data, and to train more effective models even in data-scarce scenarios. We try to store this knowledge gained in solving the source task in the source domain and applying it to our problem of interest. In this work, I have utilized Transfer Learning utilizing BERT BASE UNCASED model to fine-tune on Big-Five Personality traits Dataset.")
85
 
86
  Fotter = (
87
 
 
41
  "Conscientiousness": float(pred_label[0][3]),
42
  "Openness": float(pred_label[0][4]),}
43
  return ret
44
+ model_input = gr.Textbox("Input text here (Note: This model is trained to classify Big Five Personality Traits From Expository text features)", show_label=False)
45
+ model_output = gr.Label(" Big-Five personality traits Result", num_top_classes=6, show_label=True, label="Big-Five personality traits Labels assigned to this text based on its features")
46
  examples = [
47
+ ( "Well, here we go with the stream-of-consciousness essay. I used to do things like this in high school sometimes.",
48
  "They were pretty interesting, but I often find myself with a lack of things to say. ",
49
  "I normally consider myself someone who gets straight to the point. I wonder if I should hit enter any time to send this back to the front",
50
  "Maybe I'll fix it later. My friend is playing guitar in my room now. Sort of playing anyway.",
 
81
  ]
82
 
83
  title = "Big Five Personality Traits Detection From Expository text features"
84
+ description = ("<center><img src='https://thoucentric.com/wp-content/themes/cevian-child/assets/img/Thoucentric-Logo.png' alt='Thoucentric-Logo'></center><br><br>In traditional machine learning, it can be challenging to train an accurate model if there is a lack of labeled data specific to the task or domain of interest. Transfer learning offers a way to address this issue by utilizing the pre-existing labeled data from a similar task or domain to improve model performance. By transferring knowledge learned from one task to another, transfer learning enables us to overcome the limitations posed by a shortage of labeled data, and to train more effective models even in data-scarce scenarios. We try to store this knowledge gained in solving the source task in the source domain and applying it to our problem of interest. In this work, I have utilized Transfer Learning utilizing BERT BASE UNCASED model to fine-tune on Big-Five Personality traits Dataset.")
85
 
86
  Fotter = (
87