-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
149 lines (105 loc) · 4.08 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
import nltk
from nltk.stem.lancaster import LancasterStemmer
stemmer = LancasterStemmer()
import numpy as np
import tflearn
import tensorflow
import random
import json
import pickle
from flask import Flask, render_template, request, url_for
app = Flask(__name__)
with open("intents.json") as file:
data = json.load(file)
try:
with open("other/data1.pickle", "rb") as f:
words, labels, training, output = pickle.load(f)
except:
words=[]
labels=[]
docs_x=[]
docs_y=[]
for intent in data["intents"]:
for pattern in intent["patterns"]:
wrds = nltk.word_tokenize(pattern) #to bring it to the root word
words.extend(wrds)
docs_x.append(wrds)
docs_y.append(intent["tag"])
if intent["tag"] not in labels:
labels.append(intent["tag"])
#print(docs_x)
#print(docs_y)
#Data Preprocessing
words = [stemmer.stem(w.lower()) for w in words if w != "?"] # Main word list
words = sorted(list(set(words))) # removing duplicates
labels = sorted(labels)
#print(words)
''' Using One Hot Encoding to convert string into numerals,
In our case we will be stating whether that word exist in our main word list(words)
if exists it will append 1 in bag else it will append 0
This will be used as an input to the Neural Network'''
training = [] #Set up training output
output = []
out_empty = [0 for _ in range(len(labels))] #initializes every tag by 0
for x, doc in enumerate(docs_x):
bag = [] #bag of one hot encoded words
wrds = [stemmer.stem(w) for w in doc]
for w in words:
if w in wrds:
bag.append(1)
else:
bag.append(0)
output_row = out_empty[:]
output_row[labels.index(docs_y[x])] = 1 #it will add 1 to that corresponding tag
training.append(bag)
output.append(output_row)
training = np.array(training)
output = np.array(output)
with open("other/data1.pickle", "wb") as f:
pickle.dump((words, labels, training, output),f)
# Creating the model
''' Our model basically predicts which tag that we should take a response from to give to the user
softmax function gives a probability rating to each tag and the tag having highest probability is
then given as an output to the user'''
tensorflow.compat.v1.reset_default_graph()
net = tflearn.input_data(shape = [None, len(training[0])]) #input layer
net = tflearn.fully_connected(net, 8) # hidden layers
net = tflearn.fully_connected(net, 8)
net = tflearn.fully_connected(net, len(output[0]), activation="softmax") #output layer softmax gives the probability for each output(neuron)
net = tflearn.regression(net)
model = tflearn.DNN(net)
try:
model.load("other/model1.tflearn")
except:
model.fit(training, output, n_epoch=1000, batch_size=8, show_metric=True)
model.save("other/model1.tflearn")
def bag_of_words(sentence, words):
bag = [0 for _ in range(len(words))]
s_words = nltk.word_tokenize(sentence)
s_words = [stemmer.stem(word.lower()) for word in s_words]
for se in s_words:
for i, w in enumerate(words):
if w == se:
bag[i] = 1
return np.array(bag)
@app.route("/", methods=['GET', 'POST'])
def chat():
inp = request.form.get('text_input')
output_text=""
if request.method == "POST":
result = model.predict([bag_of_words(inp, words)])[0]
result_index = np.argmax(result)
tag = labels[result_index]
if result[result_index] > 0.7:
for tg in data['intents']:
if tg['tag'] == tag:
responses = tg['responses']
#print(random.choice(responses))
output_text=random.choice(responses)
#print(output_text)
else:
output_text="I didn't understand that, Please ask another question..."
return render_template("index.html",content=output_text)
#chat()
if __name__ == "__main__":
app.run(debug = True)