Yhhxhfh commited on
Commit
8d842ab
1 Parent(s): 0832ddb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -15
app.py CHANGED
@@ -11,7 +11,6 @@ from tensorflow.keras import Sequential
11
  from tensorflow.keras.layers import Dense, Dropout
12
  from tensorflow.keras.optimizers import SGD
13
 
14
- import redis
15
  import os
16
  from dotenv import load_dotenv
17
  import uvicorn
@@ -23,7 +22,7 @@ app = FastAPI()
23
 
24
  @app.on_event("startup")
25
  async def startup_event():
26
- global lemmatizer, words, classes, documents, ignore_words, intents, model, r, training, train_x, train_y, sgd, hist
27
 
28
  lemmatizer = WordNetLemmatizer()
29
  words = []
@@ -33,10 +32,8 @@ async def startup_event():
33
 
34
  with open('intents.json') as file:
35
  intents = json.load(file)
36
-
37
- r = redis.Redis(host=os.getenv("REDIS_HOST"), port=int(os.getenv("REDIS_PORT")), db=int(os.getenv("REDIS_DB")))
38
 
39
- while True: # Infinite training loop
40
  for intent in intents['intents']:
41
  for pattern in intent['patterns']:
42
  w = nltk.word_tokenize(pattern)
@@ -77,17 +74,16 @@ async def startup_event():
77
 
78
  hist = model.fit(np.array(train_x), np.array(train_y), epochs=300, batch_size=5, verbose=1)
79
 
 
 
 
 
 
 
 
 
80
 
81
- # Save objects to Redis
82
- r.set('words', pickle.dumps(words))
83
- r.set('classes', pickle.dumps(classes))
84
- r.set('train_x', pickle.dumps(train_x))
85
- r.set('train_y', pickle.dumps(train_y))
86
- r.set('model', pickle.dumps(model))
87
-
88
- print("Data and model saved to Redis. Re-training...")
89
-
90
-
91
 
92
  if __name__ == "__main__":
93
  uvicorn.run(app, host="0.0.0.0", port=7860)
 
11
  from tensorflow.keras.layers import Dense, Dropout
12
  from tensorflow.keras.optimizers import SGD
13
 
 
14
  import os
15
  from dotenv import load_dotenv
16
  import uvicorn
 
22
 
23
  @app.on_event("startup")
24
  async def startup_event():
25
+ global lemmatizer, words, classes, documents, ignore_words, intents, model, training, train_x, train_y, sgd, hist
26
 
27
  lemmatizer = WordNetLemmatizer()
28
  words = []
 
32
 
33
  with open('intents.json') as file:
34
  intents = json.load(file)
 
 
35
 
36
+ while True:
37
  for intent in intents['intents']:
38
  for pattern in intent['patterns']:
39
  w = nltk.word_tokenize(pattern)
 
74
 
75
  hist = model.fit(np.array(train_x), np.array(train_y), epochs=300, batch_size=5, verbose=1)
76
 
77
+ # Save data and model locally
78
+ with open('words.pkl', 'wb') as f:
79
+ pickle.dump(words, f)
80
+ with open('classes.pkl', 'wb') as f:
81
+ pickle.dump(classes, f)
82
+ with open('training.pkl', 'wb') as f: # Save training data
83
+ pickle.dump(training, f)
84
+ model.save('chatbot_model.h5')
85
 
86
+ print("Data and model saved locally. Re-training...")
 
 
 
 
 
 
 
 
 
87
 
88
  if __name__ == "__main__":
89
  uvicorn.run(app, host="0.0.0.0", port=7860)