# -*- coding: utf-8 -*-
"""Project8_AndrewMorhan

Automatically generated by Colab.

Original file is located at
    https://colab.research.google.com/drive/1oMqKChNSIAYytf6YhQMj5IAJy5c6TnIK

Assigned the Traffic data
"""

import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns

import tensorflow as tf

from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras.layers import TextVectorization, Normalization, Rescaling

print(tf.__version__)

"""Get the Data(Date, Time, Humidity, Tempature, Windspeed, Car, Truck, WInd Direction, NOx, Pm2.5)"""

url = 'http://drnam.org/pythonfiles/traffic.csv'
raw_dataset=pd.read_csv(url)
raw_dataset.columns = ['Date','Time','Humidity','Tempature','Windspeed','Car','Truck','Wind Direction','NOx','Pm25']
dataset = raw_dataset.copy()
dataset.tail()

"""Clean The Data"""

dataset.isna().sum()

#dataset = dataset.dropna()

del dataset['Date']
del dataset['Time']

"""Spli data into a train and test"""

train_dataset = dataset.sample(frac=0.8, random_state=1)
test_dataset = dataset.drop(train_dataset.index)

train_dataset.tail()
len(train_dataset)
test_dataset.tail()

sns.pairplot(train_dataset[["Humidity","Tempature","Windspeed","Car","Truck","Wind Direction","NOx","Pm25"]], diag_kind="kde")

"""Inspect The Data

"""

train_dataset.describe().transpose()

"""Split Features from Labels."""

train_features = train_dataset.copy()
test_features = test_dataset.copy()

train_labels = train_features.pop('NOx')
test_labels = test_features.pop('NOx')
train_labels.tail()

"""Normalization Layer"""

normalizer = Normalization()
normalizer.adapt(np.array(train_features))
print(normalizer.mean.numpy())

first = np.array(train_features[:1])

with np.printoptions(precision=2, suppress=True):
  print('First example:', first)
  print()
  print('Normalized:', normalizer(first).numpy())

"""Linear Regression: One variable (????)"""

print(train_features.columns)

Humidity = np.array(train_features['Humidity'])
Humidity_normalizer = Normalization(input_shape=[1,],axis=None)
Humidity_normalizer.adapt(Humidity)

Humidity_model = tf.keras.Sequential([
    Humidity_normalizer,
    layers.Dense(units=1)
])

Humidity_model.summary()

Humidity_model.predict(Humidity[:10])

Humidity_model.compile(
    optimizer=tf.optimizers.Adam(learning_rate=0.1),
    loss='mean_absolute_error')

# Commented out IPython magic to ensure Python compatibility.
# %%time
# history = Humidity_model.fit(
#     train_features['Humidity'], train_labels,
#     epochs=100,
#     verbose=0,
#     validation_split = 0.2)

hist = pd.DataFrame(history.history)
hist['epoch'] = history.epoch
hist.tail()

def plot_loss(history):
  plt.plot(history.history['loss'], label='loss')
  plt.plot(history.history['val_loss'], label='val_loss')
 # plt.ylim([0, 10])
  plt.xlabel('Humidity')
  plt.ylabel('[NOx]')
  plt.legend()
  plt.grid(True)

plot_loss(history)

test_results = {}

test_results['Humidity_model'] = Humidity_model.evaluate(
    test_features['Humidity'],
    test_labels, verbose=0)

x = tf.linspace(0.0, 250, 251)
y = Humidity_model.predict(x)

def plot_length1(x, y):
  plt.scatter(train_features['Humidity'], train_labels, label='Data')
  plt.plot(x, y, color='k', label='Predictions')
  plt.xlabel('Humidity')
  plt.ylabel('NOx')
  plt.legend()

plot_length1(x,y)

"""Multiple Inputs"""

Humidity_model = tf.keras.Sequential([
    normalizer,
    layers.Dense(units=1)
])

Humidity_model.predict(train_features[:10])

Humidity_model.layers[1].kernel

Humidity_model.compile(
    optimizer=tf.optimizers.Adam(learning_rate=0.1),
    loss='mean_absolute_error')

# Commented out IPython magic to ensure Python compatibility.
# %%time
# history = Humidity_model.fit(
#     train_features, train_labels,
#     epochs=100,
#     verbose=0,
#     validation_split = 0.2)

plot_loss(history)

test_results['Humidity'] = Humidity_model.evaluate(
    test_features, test_labels, verbose=0)
print(test_results)

"""DNN: One Variable"""

def build_and_compile_model(norm):
  model = keras.Sequential([
      norm,
      layers.Dense(64, activation='relu'),
      layers.Dense(64, activation='relu'),
      layers.Dense(1)
  ])

  model.compile(loss='mean_absolute_error',
                optimizer=tf.keras.optimizers.Adam(0.001))
  return model

dnn_Humidity_model = build_and_compile_model(Humidity_normalizer)

dnn_Humidity_model.summary()

"""Train"""

history = dnn_Humidity_model.fit(
    train_features['Humidity'], train_labels,
    validation_split=0.2,
    verbose=0, epochs=100)

plot_loss(history)

x = tf.linspace(20, 100, 300)
y = dnn_Humidity_model.predict(x)

plot_length1(x, y)

test_results['dnn_Humidity_model'] = dnn_Humidity_model.evaluate(
    test_features['Humidity'], test_labels,
    verbose=0)

"""Full Model"""

dnn_model = build_and_compile_model(normalizer)
dnn_model.summary()

# Commented out IPython magic to ensure Python compatibility.
# %%time
# history = dnn_model.fit(
#     train_features, train_labels,
#     validation_split=0.2,
#     verbose=0, epochs=100)

plot_loss(history)

"""collect results"""

test_results['dnn_model'] = dnn_model.evaluate(test_features, test_labels, verbose=0)

pd.DataFrame(test_results, index=['Mean absolute error [Humidity]']).T

"""Make predictions
Finally, predict have a look at the errors made by the model when making predictions on the test set:
"""

test_predictions = dnn_model.predict(test_features).flatten()

a = plt.axes(aspect='equal')
plt.scatter(test_labels, test_predictions)
plt.xlabel('True Values Humidity')
plt.ylabel('Predictions Humidity')
xlims = [0, 250]
ylims = [0, 250]
plt.xlim(xlims)
plt.ylim(ylims)
_ = plt.plot(xlims, ylims, color='red')

error = test_predictions - test_labels
plt.hist(error, bins=25)
plt.xlabel('Prediction Error [Humidity]')
_ = plt.ylabel('Count')

"""Save It"""

dnn_model.save('dnn_model.keras')