Multivariate Bayesian regression

Thu 15 April 2021

In [1]:
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import matplotlib.patches as mpatches


plt.rcParams['figure.figsize'] = [12, 8]
sns.set()
sns.set_style('ticks')
sns.set_palette("Set3")
np.set_printoptions(suppress=True)


def loaddata(filename):
    """Load the balloon data set from filename and return t, X
        t - N-dim. vector of target (temperature) values
        X - N-dim. vector containing the inputs (lift) x for each data point
    """
    # Load data set from CSV file
    Xt = np.loadtxt(filename, delimiter=',')

    # Split into data matrix and target vector
    X = Xt[:,0]
    t = Xt[:,1]
    
    return t, X
    
    
def predictiveplot(xnew, mu_pred, sigma2_pred, t, X):
    """Plots the mean of the predictive distribution (green curve) and +/- the predictive standard deviation (red curves).
        xnew - Mx1 vector of new input x values to evaluate the predictive distribution for
        mu_pred - Mx1 vector of predictive mean values evaluated at xnew,
        sigma2_pred - Mx1 vector of predictive standard deviation values evaluated at xnew 
        t - vector containing the target values of the training data set 
        X - vector containing the input values of the training data set
    """
    plt.figure()
    plt.scatter(X, t)
    plt.plot(xnew, mu_pred, c=sns.color_palette("Set3")[4])
    plt.plot(xnew, mu_pred + np.sqrt(sigma2_pred).reshape((sigma2_pred.shape[0],1)), c= sns.color_palette("Set3")[3])
    plt.plot(xnew, mu_pred - np.sqrt(sigma2_pred).reshape((sigma2_pred.shape[0],1)), c= sns.color_palette("Set3")[3])



# Load data
t, X = loaddata('./data/hot-balloon-data.csv')


# Visualize the data
plt.figure()
plt.scatter(X, t)
plt.xlabel('Lift')
plt.ylabel('Temperature')
plt.title('Data set')



# This is a good range of input x values to use when visualizing the estimated models
xnew = np.arange(120, 300, dtype=np.float)
xnew = xnew.reshape(-1,1)

# Show all figures
plt.show()
2021-04-22T16:19:13.414110 image/svg+xml Matplotlib v3.3.3, https://matplotlib.org/
In [10]:
class MleRegression():
    def __init__(self):
        
        pass
            
    def fit(self, X, t):
        
        X = np.array(X).reshape((len(X), -1))
        t = np.array(t).reshape((len(t), 1))
        N = X.shape[0]

        # prepend a column of ones
        ones = np.ones((X.shape[0], 1))
        X = np.concatenate((ones, X), axis=1)

        # compute weights (solve system)
        a = np.dot(X.T, X)
        b = np.dot(X.T, t)
        w_hat = np.linalg.solve(a,b)
        self.w = w_hat

        # compute estimated variance of noise
        sigma_squared_hat = (1/N) * ((t.T @ t) - (t.T @ X @ w_hat))
        self.sigma_squared = sigma_squared_hat
        
        # this term is needed to compute predicted variance later on
        X_T_X_inv = np.linalg.inv(X.T @ X)
        self.X_T_X_inv = X_T_X_inv

    def predict(self, X):
        X = np.array(X).reshape((len(X), -1))

        # prepend a column of ones
        ones = np.ones((X.shape[0], 1))
        X = np.concatenate((ones, X), axis=1)

        # compute predictions
        predictions = np.dot(X, self.w)

        # compute predicted noise
        noise_array = []
        for x in X:
            noise_array.append(self.sigma_squared * x.T @ self.X_T_X_inv @ x)
        
        sigma_squared_predictions = np.array(noise_array).reshape(-1,1)

        return predictions, sigma_squared_predictions
In [11]:
# K'th polynomial data transform function
def K_poly_data_transform(X, K):
    res = []
    for k in range(1,K + 1):
        res.append(X**k)
    return np.hstack(res)

# K'th polynomial model implementation
K = 3
X_train_poly = X.reshape(-1,1)
t_train = t.reshape(-1,1)

X_train_poly = K_poly_data_transform(X_train_poly, K)

model_K_poly = MleRegression()
model_K_poly.fit(X_train_poly,t_train)

# Get all w parameters from model
w_number = 0
for w in model_K_poly.w:
    print(f'w{w_number} = {w[0]}')
    w_number += 1

# Get all estimated variance from model
sigma_squared_hat = model_K_poly.sigma_squared
print(f'Estimated variance = {sigma_squared_hat[0,0]}')

# Create xnew values with the basis functions applied
xnew = xnew.reshape(-1,1)
xnew_poly = K_poly_data_transform(xnew, K)

# Model predictions on the X data for the two models
K_poly_t_preds, K_poly_variance_preds = model_K_poly.predict(xnew_poly)
w0 = 81.19108407593413
w1 = 3.5229704588455246
w2 = -0.01896960164106733
w3 = 3.3533872222210275e-05
Estimated variance = 19.675745590015623
In [12]:
# Logarithmic data transform function
def log_data_transform(X):
    return np.log(X)

X_train_log = X.reshape(-1,1)
t_train = t.reshape(-1,1)

X_train_log = log_data_transform(X).reshape(-1,1)

model_log = MleRegression()
model_log.fit(X_train_log, t_train)

# Get all w parameters from model
w_number = 0
for w in model_log.w:
    print(f'w{w_number} = {w[0]}')
    w_number += 1

# Get all estimated variance from model
sigma_squared_hat = model_log.sigma_squared
print(f'Estimated variance = {sigma_squared_hat[0,0]}')

# Create xnew values with the basis functions applied
xnew_log = log_data_transform(xnew)

# Model predictions on the X data for the two models
log_t_preds, log_variance_preds = model_log.predict(log_data_transform(xnew))
w0 = 133.69384685371222
w1 = 32.35571692731338
Estimated variance = 25.413164490119865
In [13]:
# Plot the predictions against the data from the dataset
plt.plot(xnew, K_poly_t_preds, c=sns.color_palette("Set3")[3])
plt.plot(xnew, log_t_preds, c=sns.color_palette("Set3")[4])
plt.scatter(X, t)

plt.xlabel('Lift')
plt.ylabel('Temperature')
plt.title("Plot of the predictions of the two models against the original data points")

plt.legend(["K'th polynomial model prediction curve" , 'Logarithmic model prediction curve', 'True target values'])
plt.show()
2021-04-22T16:06:59.029423 image/svg+xml Matplotlib v3.3.3, https://matplotlib.org/
In [14]:
# Bayesian regression
def posterior_density(t, X, sigma_square, mu_0, Sigma_0):
    N = X.shape[0]
    one = np.ones((N,1))
    X = np.concatenate((one, X), axis = 1)

    Sigma_w = np.linalg.inv((1/sigma_square) * np.dot(X.T,X) + np.linalg.inv(Sigma_0))
    mu_w = np.dot(Sigma_w, (1/sigma_square) * np.dot(X.T,t) + np.dot(np.linalg.inv(Sigma_0), mu_0))
    return [mu_w, Sigma_w]

def predictive_density(xnew, t, X, sigma_square, mu_0, Sigma_0):
    N = xnew.shape[0]
    one = np.ones((N,1))
    xnew = np.concatenate((one, xnew), axis = 1)

    mu_w, Sigma_w = posterior_density(t, X, sigma_square, mu_0, Sigma_0)

    # Compute the prediction normal distribution parameters
    mu_preds = xnew @ mu_w

    res = []
    for x_n in xnew:
        res.append(sigma_square + x_n.T @ Sigma_w @ x_n)
    sigma_square_preds = np.array(res).reshape(-1,1)
    return mu_preds, sigma_square_preds

# Implementation for  logarithmic model
mu_0 = np.array([[0],[0]])
sigma_squared = 25
Sigma_0 = 10 * np.eye(mu_0.shape[0])

mu_preds_log, sigma_squared_preds_log = predictive_density(xnew_log, t_train, X_train_log, sigma_squared, mu_0, Sigma_0)

# Implementation for K'th polynomial model
mu_0 = np.array([[268], [0], [0],[0]])
sigma_squared = 25
Sigma_0 = 10 * np.eye(mu_0.shape[0])

mu_preds_poly, sigma_squared_preds_poly = predictive_density(xnew_poly, t_train, X_train_poly, sigma_squared, mu_0, Sigma_0)
In [15]:
predictiveplot(xnew, mu_preds_log, sigma_squared_preds_log, t, X)
plt.xlabel('Lift')
plt.ylabel('Temperature')
plt.title('Predictive plot of Bayesian regression on the logarithmic model')
plt.legend(['Predicted mean', 'Predicted variance bounds'])
plt.show()
2021-04-22T16:06:59.671830 image/svg+xml Matplotlib v3.3.3, https://matplotlib.org/
In [17]:
predictiveplot(xnew, mu_preds_poly, sigma_squared_preds_poly, t, X)
plt.xlabel('Lift')
plt.ylabel('Temperature')
plt.title("Predictive plot of Bayesian regression on the K'th polynomial model")
plt.legend(['Predicted mean', 'Predicted variance bounds'])
plt.show()
2021-04-22T16:08:06.216753 image/svg+xml Matplotlib v3.3.3, https://matplotlib.org/