Yhhxhfh commited on
Commit
1e9c143
1 Parent(s): 8d842ab

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -10
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import nltk
2
  nltk.download('punkt')
3
  nltk.download('wordnet')
 
4
  from nltk.stem import WordNetLemmatizer
5
 
6
  import json
@@ -11,6 +12,7 @@ from tensorflow.keras import Sequential
11
  from tensorflow.keras.layers import Dense, Dropout
12
  from tensorflow.keras.optimizers import SGD
13
 
 
14
  import os
15
  from dotenv import load_dotenv
16
  import uvicorn
@@ -22,7 +24,7 @@ app = FastAPI()
22
 
23
  @app.on_event("startup")
24
  async def startup_event():
25
- global lemmatizer, words, classes, documents, ignore_words, intents, model, training, train_x, train_y, sgd, hist
26
 
27
  lemmatizer = WordNetLemmatizer()
28
  words = []
@@ -32,6 +34,9 @@ async def startup_event():
32
 
33
  with open('intents.json') as file:
34
  intents = json.load(file)
 
 
 
35
 
36
  while True:
37
  for intent in intents['intents']:
@@ -74,16 +79,15 @@ async def startup_event():
74
 
75
  hist = model.fit(np.array(train_x), np.array(train_y), epochs=300, batch_size=5, verbose=1)
76
 
77
- # Save data and model locally
78
- with open('words.pkl', 'wb') as f:
79
- pickle.dump(words, f)
80
- with open('classes.pkl', 'wb') as f:
81
- pickle.dump(classes, f)
82
- with open('training.pkl', 'wb') as f: # Save training data
83
- pickle.dump(training, f)
84
- model.save('chatbot_model.h5')
85
 
86
- print("Data and model saved locally. Re-training...")
87
 
88
  if __name__ == "__main__":
89
  uvicorn.run(app, host="0.0.0.0", port=7860)
 
1
  import nltk
2
  nltk.download('punkt')
3
  nltk.download('wordnet')
4
+ nltk.download('omw-1.4') # This is important for WordNetLemmatizer
5
  from nltk.stem import WordNetLemmatizer
6
 
7
  import json
 
12
  from tensorflow.keras.layers import Dense, Dropout
13
  from tensorflow.keras.optimizers import SGD
14
 
15
+ import redis
16
  import os
17
  from dotenv import load_dotenv
18
  import uvicorn
 
24
 
25
  @app.on_event("startup")
26
  async def startup_event():
27
+ global lemmatizer, words, classes, documents, ignore_words, intents, model, r, training, train_x, train_y, sgd, hist
28
 
29
  lemmatizer = WordNetLemmatizer()
30
  words = []
 
34
 
35
  with open('intents.json') as file:
36
  intents = json.load(file)
37
+
38
+ redis_password = os.getenv("REDIS_PASSWORD")
39
+ r = redis.Redis(host=os.getenv("REDIS_HOST"), port=int(os.getenv("REDIS_PORT")), db=int(os.getenv("REDIS_DB")), password=redis_password)
40
 
41
  while True:
42
  for intent in intents['intents']:
 
79
 
80
  hist = model.fit(np.array(train_x), np.array(train_y), epochs=300, batch_size=5, verbose=1)
81
 
82
+ r.set('words', pickle.dumps(words))
83
+ r.set('classes', pickle.dumps(classes))
84
+ r.set('train_x', pickle.dumps(train_x))
85
+ r.set('train_y', pickle.dumps(train_y))
86
+ r.set('model', pickle.dumps(model))
87
+
88
+ print("Data and model saved to Redis. Re-training...")
89
+
90
 
 
91
 
92
  if __name__ == "__main__":
93
  uvicorn.run(app, host="0.0.0.0", port=7860)