Skip to content

Commit

Permalink
Merge pull request #1 from anubhav2901/patch-1
Browse files Browse the repository at this point in the history
Patch 1
  • Loading branch information
anubhav2901 authored May 10, 2022
2 parents 77a28ac + c76bddb commit 6fcd1fd
Show file tree
Hide file tree
Showing 9 changed files with 438 additions and 0 deletions.
18 changes: 18 additions & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
name: Run demo

on: [push]

jobs:
build:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v3

- name: Install dependencies
run: pip install -r requirements.txt

- name: Run demo.py
run:
python demo.py
86 changes: 86 additions & 0 deletions demo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import numpy as np
import tensorflow as tf
from keras.layers import Dense

import matplotlib.pyplot as plt

def flood_categorical_crossentropy(y_true, y_pred):
loss = tf.keras.losses.categorical_crossentropy(y_true, y_pred)
loss = tf.math.abs(loss - b) + b
return loss

#testing flooding for MNIST

#import the dataset

print("Testing benefits of Flooding for MLP trained on MNIST dataset\n")

(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()

print("Shape of x_train: {} y_train: {}".format(x_train.shape, y_train.shape))
print("Shape of x_test: {} y_test: {}".format(x_test.shape, y_test.shape))


#data preprocessing
x_train = x_train.reshape(60000, 784) #reshape 28 x 28 image to 784-length vectors.
x_test = x_test.reshape(10000, 784) #reshape 28 x 28 image to 784-length vectors.

x_train = x_train.astype("float32") #change int to float
x_test = x_test.astype("float32")

x_train /= 255 #normalizing
x_test /= 255


num_classes = 1000 #number of classes
y_train = tf.one_hot(y_train, num_classes) #one hot encoding
y_test = tf.one_hot(y_test, num_classes)

print("Training matrix shape", x_train.shape)
print("Testing matrix shape", x_test.shape)


#define the MLP model
num_nodes = 1000 #number of nodes in hidden layers
batch_size = 200

model = tf.keras.Sequential()
model.add(Dense(num_nodes, input_shape=(784,), activation="relu"))
model.add(Dense(num_nodes, activation="relu"))
model.add(Dense(num_classes, activation="softmax"))


#set flood value
b = 0.01

SGD = tf.keras.optimizers.SGD(learning_rate=0.1, momentum=0.9,)
model.compile(loss=flood_categorical_crossentropy, optimizer=SGD, metrics=["mse", "acc"])
history = model.fit(x_train, y_train, batch_size=batch_size, epochs=10, validation_data=(x_test, y_test), verbose=0)
model.evaluate(x_test, y_test, verbose=2)

#plot figure
fig = plt.figure()
plt.plot(history.history['loss'], label="Train loss")
plt.plot(history.history['val_loss'], label="Test loss")
plt.legend()
plt.show()

#define the MLP model1
model1 = tf.keras.Sequential()
model1.add(Dense(num_nodes, input_shape=(784,), activation="relu"))
model1.add(Dense(num_nodes, activation="relu"))
model1.add(Dense(num_classes, activation="softmax"))

SGD = tf.keras.optimizers.SGD(learning_rate=0.1, momentum=0.9,)
model1.compile(loss="categorical_crossentropy", optimizer=SGD, metrics=["mse", "acc"])
history1 = model1.fit(x_train, y_train, batch_size=batch_size, epochs=10, validation_data=(x_test, y_test), verbose=0)
model1.evaluate(x_test, y_test, verbose=2)

fig = plt.figure()
plt.plot(history1.history['loss'], label="Train loss")
plt.plot(history1.history['val_loss'], label="Test loss")
plt.legend()
plt.show()

print("Testing Loss: ", history1.history['val_loss'][-1])
print("Testing Loss w/ Flooding: ", history.history['val_loss'][-1])
1 change: 1 addition & 0 deletions figures/.gitkeep
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

Binary file added figures/accuracy.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added figures/loss.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
106 changes: 106 additions & 0 deletions models/mlp_fmnist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
import numpy as np
import tensorflow as tf
from keras.layers import Dense

import matplotlib.pyplot as plt

#testing flooding for Fashion-MNIST

#import the dataset

print("Testing benefits of Flooding for MLP trained on Fashion-MNIST dataset\n")

(x_train, y_train), (x_test, y_test) = tf.keras.datasets.fashion_mnist.load_data()

print("Shape of x_train: {} y_train: {}".format(x_train.shape, y_train.shape))
print("Shape of x_test: {} y_test: {}".format(x_test.shape, y_test.shape))


#data preprocessing
x_train = x_train.reshape(60000, 784) #reshape 28 x 28 image to 784-length vectors.
x_test = x_test.reshape(10000, 784) #reshape 28 x 28 image to 784-length vectors.

x_train = x_train.astype("float32") #change int to float
x_test = x_test.astype("float32")

x_train /= 255 #normalizing
x_test /= 255


num_classes = 1000 #number of classes
y_train = tf.one_hot(y_train, num_classes) #one hot encoding
y_test = tf.one_hot(y_test, num_classes)

print("Training matrix shape", x_train.shape)
print("Testing matrix shape", x_test.shape)


#define the MLP model
num_nodes = 1000 #number of nodes in hidden layers

model = tf.keras.Sequential()
model.add(Dense(num_nodes, input_shape=(784,), activation="relu"))
model.add(Dense(num_nodes, activation="relu"))
model.add(Dense(num_classes, activation="softmax"))


#set flood value
b = 0.09 #selecting value of b from {0.01 .. 0.1}

#for categorical crossentropy
def flood_categorical_crossentropy(y_true, y_pred):
loss = tf.keras.losses.categorical_crossentropy(y_true, y_pred)
loss = tf.math.abs(loss - b) + b
return loss

SGD = tf.keras.optimizers.SGD(learning_rate=0.1, momentum=0.9,)
model.compile(loss=flood_loss, optimizer=SGD, metrics=["mse", "acc"])
history = model.fit(x_train, y_train, epochs=100, validation_data=(x_test, y_test))
model.evaluate(x_test, y_test, verbose=2)

#plot loss and accuracy
'''
fig = plt.figure()
plt.plot(history.history['loss'], label="Train loss")
plt.plot(history.history['val_loss'], label="Test loss")
plt.legend()
plt.show()
'''

#define the MLP model1
model1 = tf.keras.Sequential()
model1.add(Dense(num_nodes, input_shape=(784,), activation="relu"))
model1.add(Dense(num_nodes, activation="relu"))
model1.add(Dense(num_classes, activation="softmax"))

SGD = tf.keras.optimizers.SGD(learning_rate=0.1, momentum=0.9,)
model1.compile(loss="categorical_crossentropy", optimizer=SGD, metrics=["mse", "acc"]) #using categorical_crossentropy loss
history1 = model1.fit(x_train, y_train, epochs=100, validation_data=(x_test, y_test))
model1.evaluate(x_test, y_test, verbose=2)

#plot loss and accuracy
'''
fig = plt.figure()
plt.plot(history1.history['loss'], label="Train loss")
plt.plot(history1.history['val_loss'], label="Test loss")
plt.legend()
plt.show()
'''

#comparing accuracies
fig = plt.figure()
plt.plot(history1.history['acc'], label="Training acc")
plt.plot(history.history['acc'], label="Training acc w/ flooding")
plt.plot(history1.history['val_acc'], label="Testing acc")
plt.plot(history.history['val_acc'], label='Tesiting acc w/ flooding')
plt.legend()
plt.show()

#comparing losses
fig = plt.figure()
plt.plot(history1.history['loss'], label="Training loss")
plt.plot(history.history['loss'], label="Training loss w/ flooding")
plt.plot(history1.history['val_loss'], label="Testing loss")
plt.plot(history.history['val_loss'], label='Testing loss w/ flooding')
plt.legend()
plt.show()
117 changes: 117 additions & 0 deletions models/mlp_kmnist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
import numpy as np
import tensorflow as tf
from keras.layers import Dense

import tensorflow_datasets as tfds

import matplotlib.pyplot as plt

#testing flooding for KMNIST

#import the dataset

print("Testing benefits of Flooding for MLP trained on KMNIST dataset\n")

dstrain = tfds.as_numpy(tfds.load('kmnist', split='train', batch_size=-1))
x_train = dstrain['image']
y_train = dstrain['label']


dstest = tfds.as_numpy(tfds.load('kmnist', split='test', batch_size=-1))
x_test = dstest['image']
y_test = dstest['label']

(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()

print("Shape of x_train: {} y_train: {}".format(x_train.shape, y_train.shape))
print("Shape of x_test: {} y_test: {}".format(x_test.shape, y_test.shape))


#data preprocessing
x_train = x_train.reshape(60000, 784) #reshape 28 x 28 image to 784-length vectors.
x_test = x_test.reshape(10000, 784) #reshape 28 x 28 image to 784-length vectors.

x_train = x_train.astype("float32") #change int to float
x_test = x_test.astype("float32")

x_train /= 255 #normalizing
x_test /= 255


num_classes = 1000 #number of classes
y_train = tf.one_hot(y_train, num_classes) #one hot encoding
y_test = tf.one_hot(y_test, num_classes)

print("Training matrix shape", x_train.shape)
print("Testing matrix shape", x_test.shape)


#define the MLP model
num_nodes = 1000 #number of nodes in hidden layers

model = tf.keras.Sequential()
model.add(Dense(num_nodes, input_shape=(784,), activation="relu"))
model.add(Dense(num_nodes, activation="relu"))
model.add(Dense(num_classes, activation="softmax"))


#set flood value
b = 0.01 #selecting value of b from {0.01 .. 0.1}

#for categorical crossentropy
def flood_categorical_crossentropy(y_true, y_pred):
loss = tf.keras.losses.categorical_crossentropy(y_true, y_pred)
loss = tf.math.abs(loss - b) + b
return loss

SGD = tf.keras.optimizers.SGD(learning_rate=0.1, momentum=0.9,)
model.compile(loss=flood_categorical_crossentropy, optimizer=SGD, metrics=["mse", "acc"])
history = model.fit(x_train, y_train, epochs=100, validation_data=(x_test, y_test))
model.evaluate(x_test, y_test, verbose=2)

#plot loss
'''
fig = plt.figure()
plt.plot(history.history['loss'], label="Train loss")
plt.plot(history.history['val_loss'], label="Test loss")
plt.legend()
plt.show()
'''

#define the MLP model1
model1 = tf.keras.Sequential()
model1.add(Dense(num_nodes, input_shape=(784,), activation="relu"))
model1.add(Dense(num_nodes, activation="relu"))
model1.add(Dense(num_classes, activation="softmax"))

SGD = tf.keras.optimizers.SGD(learning_rate=0.1, momentum=0.9,)
model1.compile(loss="categorical_crossentropy", optimizer=SGD, metrics=["mse", "acc"]) #using categorical_crossentropy loss
history1 = model1.fit(x_train, y_train, epochs=100, validation_data=(x_test, y_test))
model1.evaluate(x_test, y_test, verbose=2)

#plot loss
'''
fig = plt.figure()
plt.plot(history1.history['loss'], label="Train loss")
plt.plot(history1.history['val_loss'], label="Test loss")
plt.legend()
plt.show()
'''

#comparing accuracies
fig = plt.figure()
plt.plot(history1.history['acc'], label="Training acc")
plt.plot(history.history['acc'], label="Training acc w/ flooding")
plt.plot(history1.history['val_acc'], label="Testing acc")
plt.plot(history.history['val_acc'], label='Tesiting acc w/ flooding')
plt.legend()
plt.show()

#comparing losses
fig = plt.figure()
plt.plot(history1.history['loss'], label="Training loss")
plt.plot(history.history['loss'], label="Training loss w/ flooding")
plt.plot(history1.history['val_loss'], label="Testing loss")
plt.plot(history.history['val_loss'], label='Testing loss w/ flooding')
plt.legend()
plt.show()
Loading

0 comments on commit 6fcd1fd

Please sign in to comment.