Bartleby Related Questions Icon

Related questions

Question

import numpy as np
from scipy.optimize import minimize
import matplotlib.pyplot as plt
import pickle

#Implement the function logisticObjVal

def logisticObjVal(w, X, y):

# compute log-loss error (scalar) with respect
# to w (vector) for the given data X and y
# Inputs:
# w = d x 1
# X = N x d
# y = N x 1
# Output:
# error = scalar


if len(w.shape) == 1:
w = w[:,np.newaxis]
# IMPLEMENT THIS METHOD - REMOVE THE NEXT LINE
error = 0

return error

#Implement the function logisticGradient

def logisticGradient(w, X, y):

# compute the gradient of the log-loss error (vector) with respect
# to w (vector) for the given data X and y
#
# Inputs:
# w = d x 1
# X = N x d
# y = N x 1
# Output:
# error = d length gradient vector (not a d x 1 matrix)

if len(w.shape) == 1:
w = w[:,np.newaxis]
# IMPLEMENT THIS METHOD - REMOVE THE NEXT LINE
gradient = np.zeros((w.shape[0],))

return gradient

#Implement the function predictLinearModel

def predictLinearModel(w,Xtest):
# Inputs:
# w = d x 1
# Xtest = N x d
# Output:
# ypred = N x 1 vector of predictions

# IMPLEMENT THIS METHOD - REMOVE THE NEXT LINE
ypred = np.zeros([Xtest.shape[0],1])

return ypred

#Implement the function evaluateLinearModel

def evaluateLinearModel(w,Xtest,ytest):
# Inputs:
# w = d x 1
# Xtest = N x d
# ytest = N x 1
# Output:
# accuracy = scalar values
# precision = scalar values
# recall = scalar values
# f1 = scalar values

if len(w.shape) == 1:
w = w[:,np.newaxis]
# IMPLEMENT THIS METHOD - REMOVE THE NEXT LINE
acc = 0
precision = 0
recall = 0
f1 = 0

return acc, precision, recall, f1

Xtrain,ytrain, Xtest, ytest = pickle.load(open('sample.pickle','rb'))

Xtrain_i = np.concatenate((np.ones((Xtrain.shape[0],1)), Xtrain), axis=1)
Xtest_i = np.concatenate((np.ones((Xtest.shape[0],1)), Xtest), axis=1)

args = (Xtrain_i,ytrain)
opts = {'maxiter' : 50} # Preferred value.
w_init = np.zeros((Xtrain_i.shape[1],))
soln = minimize(logisticObjVal, w_init, jac=logisticGradient, args=args,method='CG', options=opts)
w = np.transpose(np.array(soln.x))
w = np.reshape(w,[len(w),1])
acc, pre, rec, f1 = evaluateLinearModel(w,Xtrain_i,ytrain)
print('Logistic Regression results on train data - acc:%.2f, pre:%.2f, rec:%.2f, f1:%.2f'%(acc, pre, rec, f1))
acc, pre, rec, f1 = evaluateLinearModel(w,Xtest_i,ytest)
print('Logistic Regression results on test data - acc:%.2f, pre:%.2f, rec:%.2f, f1:%.2f'%(acc, pre, rec, f1))

def plotBoundaries(w,X,y):
# plotting boundaries

mn = np.min(X,axis=0)
mx = np.max(X,axis=0)
x1 = np.linspace(mn[1],mx[1],100)
x2 = np.linspace(mn[2],mx[2],100)
xx1,xx2 = np.meshgrid(x1,x2)
xx = np.zeros((x1.shape[0]*x2.shape[0],2))
xx[:,0] = xx1.ravel()
xx[:,1] = xx2.ravel()
xx_i = np.concatenate((np.ones((xx.shape[0],1)), xx), axis=1)
ypred = predictLinearModel(w,xx_i)
ax.contourf(x1,x2,ypred.reshape((x1.shape[0],x2.shape[0])),alpha=0.3,cmap='cool')
ax.scatter(X[:,1],X[:,2],c=y.flatten())

#Modify the code based on the comment

Xtrain,ytrain, Xtest, ytest = pickle.load(open('sample.pickle','rb'))

Xtrain_i = np.concatenate((np.ones((Xtrain.shape[0],1)), Xtrain), axis=1)
Xtest_i = np.concatenate((np.ones((Xtest.shape[0],1)), Xtest), axis=1)

# Replace next line with code for learning w using the logistic regression
w_logistic = np.zeros((Xtrain_i.shape[1],1))

fig = plt.figure(figsize=(6,6))


ax = plt.subplot(1,1,1)
plotBoundaries(w_logistic,Xtrain_i,ytrain)
ax.set_title('Logistic Regression')

Expert Solution
Check Mark