Skip to content

Commit

Permalink
fixed loss function
Browse files Browse the repository at this point in the history
  • Loading branch information
anas-awadalla committed Aug 4, 2020
1 parent 00c028b commit 6f67c53
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 31 deletions.
28 changes: 14 additions & 14 deletions libra/modeling/prediction_model_creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from tensorflow.python.keras.layers import Dense, Input
import numpy as np
from keras.models import model_from_json
import os
import os

# Creates a regression neural network

Expand Down Expand Up @@ -67,7 +67,7 @@ def get_keras_model_class(dataset, i, num_classes, add_layer):
while len(key)>0:
model.add(key[0])
del key[0]

if (i < 5):
model.add(
Dense(
Expand All @@ -81,18 +81,18 @@ def get_keras_model_class(dataset, i, num_classes, add_layer):
kernel_initializer="normal",
activation="relu"))

if num_classes == 2:
model.add(Dense(1, activation="sigmoid"))
model.compile(
loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
elif num_classes > 2:
model.add(Dense(num_classes, activation="softmax"))
model.compile(
loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# if num_classes == 2:
# model.add(Dense(1, activation="sigmoid"))
# model.compile(
# loss='binary_crossentropy',
# optimizer='adam',
# metrics=['accuracy'])
# elif num_classes > 2:
model.add(Dense(num_classes, activation="softmax"))
model.compile(
loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])

return model

Expand Down
33 changes: 17 additions & 16 deletions libra/query/feedforward_nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
from sklearn.preprocessing import StandardScaler
import numpy as np


os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'

counter = 0
Expand Down Expand Up @@ -77,14 +76,16 @@ def logger(instruction, found=""):


def get_folder_dir(self):
dir_path= tkFileDialog.askdirectory()
dir_path = tkFileDialog.askdirectory()
return dir_path


def get_file():
filename = tkFileDialog.askopenfilename()
if os.path.isfile(filename):
return filename
else: print ('No file chosen')
else:
print('No file chosen')


def regression_ann(
Expand Down Expand Up @@ -153,7 +154,7 @@ def regression_ann(

i = 0

#add_layer format: {<object> : list of indexs}
# add_layer format: {<object> : list of indexs}
# get the first 3 layer model
model = get_keras_model_reg(data, i, add_layer)

Expand Down Expand Up @@ -200,8 +201,8 @@ def regression_ann(
print((" " * 2 * counter) + "| " + ("".join(word.ljust(col_width)
for word in row)) + " |")
datax = []
#while all(x > y for x, y in zip(losses, losses[1:])):
while (len(losses)<=2 or losses[len(losses)-1] < losses[len(losses)-2]):
# while all(x > y for x, y in zip(losses, losses[1:])):
while (len(losses) <= 2 or losses[len(losses) - 1] < losses[len(losses) - 2]):
model = get_keras_model_reg(data, i, add_layer)
history = model.fit(
X_train,
Expand Down Expand Up @@ -312,12 +313,12 @@ def classification_ann(instruction,

num_classes = len(np.unique(y))

X_train = data['train']
X_test = data['test']

if num_classes < 2:
raise Exception("Number of classes must be greater than or equal to 2")

X_train = data['train']
X_test = data['test']

if num_classes >= 2:
# ANN needs target one hot encoded for classification
one_hot_encoder = OneHotEncoder()
Expand Down Expand Up @@ -400,8 +401,8 @@ def classification_ann(instruction,
print((" " * 2 * counter) + "| " + ("".join(word.ljust(col_width)
for word in row)) + " |")
datax = []
#while all(x < y for x, y in zip(accuracies, accuracies[1:])):
while (len(accuracies)<=2 or accuracies[len(accuracies)-1] > accuracies[len(accuracies)-2]):
# while all(x < y for x, y in zip(accuracies, accuracies[1:])):
while (len(accuracies) <= 2 or accuracies[len(accuracies) - 1] > accuracies[len(accuracies) - 2]):
model = get_keras_model_class(data, i, num_classes, add_layer)
history = model.fit(
X_train,
Expand Down Expand Up @@ -498,7 +499,7 @@ def convolutional(instruction=None,
:return dictionary that holds all the information for the finished model.
'''

#data_path = get_folder_dir()
# data_path = get_folder_dir()

logger("Generating datasets for classes")

Expand Down Expand Up @@ -633,9 +634,11 @@ def convolutional(instruction=None,
else:
# Randomly initialized weights
if arch_lower == "vggnet16":
model = VGG16(include_top=True, weights=None, classes=num_classes, classifier_activation = output_layer_activation)
model = VGG16(include_top=True, weights=None, classes=num_classes,
classifier_activation=output_layer_activation)
elif arch_lower == "vggnet19":
model = VGG19(include_top=True, weights=None, classes=num_classes, classifier_activation = output_layer_activation)
model = VGG19(include_top=True, weights=None, classes=num_classes,
classifier_activation=output_layer_activation)
elif arch_lower == "resnet50":
model = ResNet50(include_top=True, weights=None, classes=num_classes)
elif arch_lower == "resnet101":
Expand Down Expand Up @@ -688,7 +691,6 @@ def convolutional(instruction=None,
activation="softmax"
))


model.compile(
optimizer="adam",
loss=loss_func,
Expand Down Expand Up @@ -756,4 +758,3 @@ def convolutional(instruction=None,
'validation_accuracy': history.history['val_accuracy']},
'num_classes': (2 if num_classes == 1 else num_classes),
'data_sizes': {'train_size': processInfo['train_size'], 'test_size': processInfo['test_size']}}

2 changes: 1 addition & 1 deletion tests/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def compare(a, b):
unittest.defaultTestLoader.sortTestMethodsUsing = compare

class TestQueries(unittest.TestCase):
newClient = client('tools/data/structured_data/housing.csv')
newClient = client('/Users/anasawadalla/PycharmProjects/libra/tools/data/structured_data/housing2.csv')
"""
TEST QUERIES
Expand Down

0 comments on commit 6f67c53

Please sign in to comment.