In [4]:
import numpy as np
from sklearn.preprocessing import PolynomialFeatures

In [5]:
def sigmoid(x):
    '''
    A função sigmoid
    '''
    g = np.array([x]).flatten()
    s = 1 / (1 + np.exp(-x))
    return s

In [6]:
def plotDecisionBoundary(data, grad, LAMBDA):
    ''' 
    Função que deve esboçar um Gráfico de Dispersão com a sua Fronteira de Decisão.
    '''
    
    X = data[:,0:2]
    y = np.c_[data[:,2]]
    #theta = np.zeros(X_feature.shape[1])
    
    X_poli = PolynomialFeatures(6)
    X_fit = X_poli.fit_transform(X)
    #FMIN = minFunction(theta, 1, X_fit, y)
    
    # Plot decisionboundary
    X0_min, X0_max = X[:,0].min(), X[:,0].max()
    X1_min, X1_max = X[:,1].min(), X[:,1].max()
    X0, X1 = np.meshgrid(np.linspace(X0_min, X0_max), np.linspace(X1_min, X1_max))
    
    h = sigmoid(X_poli.fit_transform(np.c_[X0.ravel(), X1.ravel()]).dot(grad))
    #h = sigmoid((np.c_[X0.ravel(), X1.ravel()]).dot(grad))
    h = h.reshape(X0.shape)

    # Scatter plot of X,y and Get indexes for class 0 and class 1
    neg = data[:,2] == 0
    pos = data[:,2] == 1

    # If no specific axes object has been passed, get the current axes
    plt.figure(figsize=(10,5))
    plt.contour(X0, X1, h, [0.5], linewidths=3, colors='g', s=60, label='Decision boundary'); 
    plt.scatter(data[pos][:,0], data[pos][:,1], marker='+', c='k', s=60, linewidth=2, label='y=1')
    plt.scatter(data[neg][:,0], data[neg][:,1], marker='o', c='y', s=60, linewidth=2, label='y=0')
    plt.xlabel('Microchip Test 1')
    plt.ylabel('Microchip Test 2')      
    plt.title('Lambda = '+str(LAMBDA))
    plt.legend(loc=1);