In [1]:
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
%matplotlib notebook

In [2]:
def show_loss_plot(raw_train, raw_valid, plot_title, save_name):
    LW = .7
    ALPHA = 0.2
    C1 = 'b'
    C2 = 'r'

    x = range(len(raw_train))
    y_train = raw_train.mean(axis=1)
    err_train = raw_train.std(axis=1)
    y_valid = raw_valid[:,1] # use accuracies only

    # create graph
    fig, ax1 = plt.subplots(figsize=(8, 4))
    plt.title("{} - Training Losses and Validation Accuracy".format(plot_title))
    # plot training losses
    ax1.plot(x, y_train, C1, linewidth=LW)
    ax1.fill_between(x, y_train-err_train, y_train+err_train, facecolor=C1, alpha=ALPHA, interpolate=True)
    # axes stuff
    ax1.set_xlabel('Epochs')
    ax1.set_ylabel('Cross Entropy Loss', color=C1)
    ax1.tick_params('y', colors=C1)
    ax1.set_ylim(0.,1.75)

    # plot accuracies
    ax2 = ax1.twinx()
    ax2.plot(x, y_valid, C2, linewidth=LW)
    # set labels
    ax2.set_ylabel('Accuracy (%)', color=C2)
    ax2.tick_params('y', colors=C2)
    ax2.set_ylim(0.,1.)
    ax2.set_xlim(0,140)

    # save / show plots
    fig.tight_layout()
    fig.savefig("../output/{}.png".format(save_name))
    plt.show()

In [3]:
TRAIN_LOSSES_PATH = "../output/train_losses_resnet34_conv.csv"
raw_train = np.loadtxt(open(TRAIN_LOSSES_PATH, "r"), delimiter=",")

VALID_LOSSES_PATH = "../output/valid_losses_resnet34_conv.csv"
raw_valid = np.loadtxt(open(VALID_LOSSES_PATH, "r"), delimiter=",")

show_loss_plot(raw_train, raw_valid, "Resnet34 Spectrograms", "resnet34_conv")

print(raw_train.mean(axis=1)[-1], raw_valid[-1, 1])


0.023385326316 0.925566343042

In [4]:
TRAIN_LOSSES_PATH = "../output/train_losses_resnet34_mfcc.csv"
raw_train = np.loadtxt(open(TRAIN_LOSSES_PATH, "r"), delimiter=",")

VALID_LOSSES_PATH = "../output/valid_losses_resnet34_mfcc.csv"
raw_valid = np.loadtxt(open(VALID_LOSSES_PATH, "r"), delimiter=",")

show_loss_plot(raw_train, raw_valid, "Resnet34 MFCC", "resnet34_mfcc")

print(raw_train.mean(axis=1)[-1], raw_valid[-1, 1])


0.035271078969 0.611650485437