Anjana-S commited on
Commit
529c0f4
·
1 Parent(s): 097096a

Upload 2 files

Browse files
Files changed (2) hide show
  1. __init__.py +0 -53
  2. dnn_main.py +38 -0
__init__.py CHANGED
@@ -1,53 +0,0 @@
1
- import numpy as np
2
- from tqdm import tqdm
3
-
4
-
5
- class BackPropogation:
6
- def __init__(self,learning_rate=0.01, epochs=100,activation_function='step'):
7
- self.bias = 0
8
- self.learning_rate = learning_rate
9
- self.max_epochs = epochs
10
- self.activation_function = activation_function
11
-
12
-
13
- def activate(self, x):
14
- if self.activation_function == 'step':
15
- return 1 if x >= 0 else 0
16
- elif self.activation_function == 'sigmoid':
17
- return 1 if (1 / (1 + np.exp(-x)))>=0.5 else 0
18
- elif self.activation_function == 'relu':
19
- return 1 if max(0,x)>=0.5 else 0
20
-
21
- def fit(self, X, y):
22
- error_sum=0
23
- n_features = X.shape[1]
24
- self.weights = np.zeros((n_features))
25
- for epoch in tqdm(range(self.max_epochs)):
26
- for i in range(len(X)):
27
- inputs = X[i]
28
- target = y[i]
29
- weighted_sum = np.dot(inputs, self.weights) + self.bias
30
- prediction = self.activate(weighted_sum)
31
-
32
- # Calculating loss and updating weights.
33
- error = target - prediction
34
- self.weights += self.learning_rate * error * inputs
35
- self.bias += self.learning_rate * error
36
-
37
- print(f"Updated Weights after epoch {epoch} with {self.weights}")
38
- print("Training Completed")
39
-
40
- def predict(self, X):
41
- predictions = []
42
- for i in range(len(X)):
43
- inputs = X[i]
44
- weighted_sum = np.dot(inputs, self.weights) + self.bias
45
- prediction = self.activate(weighted_sum)
46
- predictions.append(prediction)
47
- return predictions
48
-
49
-
50
-
51
-
52
-
53
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dnn_main.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from tensorflow.keras.models import Sequential
2
+ from tensorflow.keras.layers import Dense, Embedding, Flatten
3
+ from tensorflow.keras.datasets import imdb
4
+ from tensorflow.keras.preprocessing.sequence import pad_sequences
5
+
6
+ top_words = 5000
7
+ (X_train, y_train), (X_test, y_test) = imdb.load_data(num_words=top_words)
8
+
9
+ max_review_length = 500
10
+ X_train = pad_sequences(X_train, maxlen=max_review_length)
11
+ X_test = pad_sequences(X_test, maxlen=max_review_length)
12
+
13
+ # Modelling a sample DNN
14
+ model = Sequential()
15
+ model.add(Embedding(input_dim=top_words, output_dim=24, input_length=max_review_length))
16
+ model.add(Flatten())
17
+ model.add(Dense(64, activation='relu'))
18
+ model.add(Dense(32, activation='relu'))
19
+ model.add(Dense(16, activation='relu'))
20
+ model.add(Dense(1, activation='sigmoid'))
21
+
22
+ # opt=Adam(learning_rate=0.001)
23
+ model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
24
+ model.summary()
25
+
26
+ print("Training Started.")
27
+ history = model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=100, batch_size=20)
28
+
29
+ loss, acc = model.evaluate(X_test, y_test)
30
+ print("Training Finished.")
31
+
32
+ print(f'Test Accuracy: {round(acc * 100)}')
33
+
34
+ model.save(r'C:\Users\shahi\Desktop\My Projects\DeepPredictorHub\DP.keras')
35
+
36
+
37
+
38
+