In [1]:
from __future__ import print_function

# We'll need numpy for some mathematical operations
import numpy as np


# matplotlib for displaying the output
import matplotlib.pyplot as plt
import matplotlib.style as ms
ms.use('seaborn-muted')
%matplotlib inline

# and IPython.display for audio output
import IPython.display

# Librosa for audio
import librosa
import librosa.display
import pandas as pd
import tensorflow as tf
import timeit

from joblib import Parallel, delayed
import multiprocessing

num_cores = multiprocessing.cpu_count()


/Users/kayote/anaconda/lib/python2.7/site-packages/matplotlib/font_manager.py:273: UserWarning: Matplotlib is building the font cache using fc-list. This may take a moment.
  warnings.warn('Matplotlib is building the font cache using fc-list. This may take a moment.')

In [2]:
#audio_path = librosa.util.example_audio_file()
audio_path = 'first_test2.aiff'

y, sr = librosa.load(audio_path)

In [3]:
data = pd.read_csv('first_test2.txt', names=['ts', 'bank', 'pitch', 'vel'],sep=' ')
chirp_ts = data.ts[data.bank=='start'].iloc[0]
print(chirp_ts)
chirp_ind = np.min(np.where(y>0.1))
print(chirp_ind)
data.head()


1.48661117593e+12
413903
Out[3]:
ts bank pitch vel
0 1.486611e+12 start NaN NaN
1 1.486611e+12 0 1.0 32.0
2 1.486611e+12 0 1.0 0.0
3 1.486611e+12 0 2.0 32.0
4 1.486611e+12 0 2.0 0.0

In [4]:
def get_sample(y,ts1,ts2,sr,chirp_ts,chirp_ind):
    ind1 = int(chirp_ind+(ts1+150-chirp_ts)*sr/1000.0)
    ind2 = int(chirp_ind+(ts2-150-chirp_ts)*sr/1000.0)
    return y[ind1:ind2]

def get_sample_by_meta(y,data,bank,pitch,vel,sr,chirp_ts,chirp_ind):
    ts1 = data.ts[(data.pitch==pitch) & (data.bank==bank) & (data.vel==vel)].iloc[0]
    ts2 = data.ts[np.where((data.pitch==pitch) & (data.bank==bank) & (data.vel==vel))[0]+1].iloc[0]
    ind1 = int(chirp_ind+(ts1+150-chirp_ts)*sr/1000.0)
    ind2 = int(chirp_ind+(ts2-150-chirp_ts)*sr/1000.0)
    return y[ind1:ind2]

In [5]:
print(np.array(0))
first_dict = {'bank':data.bank[0],'pitch':data.pitch[0],'vel':data.vel[9],'sample':[[]]}
print(first_dict)
samples = pd.DataFrame(first_dict,index=[0])
print(samples)
for pitch in np.unique(data.pitch):
    for vel in np.setdiff1d(np.unique(data.vel),0):
        for bank in np.setdiff1d(np.unique(data.bank),'start'):
            if np.isnan(pitch): continue
            if np.isnan(vel): continue
            try:
                sample = get_sample_by_meta(y,data,bank,int(pitch),int(vel),sr,chirp_ts,chirp_ind)
                samples = samples.append({'bank':bank,'pitch':pitch,'vel':vel,'sample':sample},ignore_index=True)
            except: continue
                
print(samples.shape)
print(samples.head())


0
{'sample': [[]], 'vel': 32.0, 'bank': 'start', 'pitch': nan}
    bank  pitch sample   vel
0  start    NaN     []  32.0
(485, 4)
    bank  pitch                                             sample   vel
0  start    NaN                                                 []  32.0
1      0    1.0  [4.44706e-06, 9.13977e-07, -4.78962e-06, 9.225...  32.0
2      1    1.0  [1.05107e-06, -8.39425e-07, -9.55803e-07, -4.7...  32.0
3      0    1.0  [-3.14419e-06, 4.66695e-06, -5.74328e-06, 5.29...  64.0
4      0    1.0  [-7.5734e-06, 4.44263e-06, -1.09487e-06, -2.57...  96.0

In [6]:
def get_fft(sample):
    try:
        if np.sum(np.abs(sample))<1.0: return []
        S_sample = np.abs((librosa.stft(sample)))**0.5
        return np.abs(S_sample[:,S_sample.shape[1]/2])
    except:
        return []
    
def get_fft_with_pitch_shift(sample,sr,n_steps=0):
    if np.sum(np.abs(sample))<1.0: return []
    S_sample = np.abs((librosa.stft(sample)))**0.5
    S_sample = np.abs((librosa.stft(librosa.effects.pitch_shift(sample,sr,n_steps=n_steps))))**0.5
    return np.abs(S_sample[:,S_sample.shape[1]/2])
        
        
samples['fft'] = [get_fft(sample) for sample in samples['sample']]
samples['total_sample'] = [sum(np.abs(sample)) for sample in samples['sample']]

samples['valid_sample'] = [True if len(x)>0 else False for x in samples['fft']]
plt.plot(samples['pitch'],samples['total_sample'])


samples.head()


Out[6]:
bank pitch sample vel fft total_sample valid_sample
0 start NaN [] 32.0 [] 0.000000 False
1 0 1.0 [4.44706e-06, 9.13977e-07, -4.78962e-06, 9.225... 32.0 [] 0.053637 False
2 1 1.0 [1.05107e-06, -8.39425e-07, -9.55803e-07, -4.7... 32.0 [] 0.052744 False
3 0 1.0 [-3.14419e-06, 4.66695e-06, -5.74328e-06, 5.29... 64.0 [] 0.052508 False
4 0 1.0 [-7.5734e-06, 4.44263e-06, -1.09487e-06, -2.57... 96.0 [] 0.053330 False

In [7]:
def get_chord_pitches(root,majmin3='maj',majmin7='none',extensions=[]):
    
    fundamental = root%12
    fundamentals = [fundamental + x*12 for x in range(12)]
    
    all_of_them = fundamentals
    if majmin3 =='maj': all_of_them += [(fundamental+4)%12 + x*12 for x in range(12)]
    if majmin3 =='min': all_of_them += [(fundamental+3)%12 + x*12 for x in range(12)]
    all_of_them += [(fundamental+7)%12 + x*12 for x in range(12)]
    if majmin7 =='maj': all_of_them += [(fundamental+11)%12 + x*12 for x in range(12)]
    if majmin7 =='min': all_of_them += [(fundamental+10)%12 + x*12 for x in range(12)]
    for extension in extensions:
        if extension not in range(12): continue
        all_of_them += [(fundamental+extension)%12 + x*12 for x in range(12)]
        
    return sorted([num for num in all_of_them if num >= 1 and num <= 127])

def make_chord(root,samples,root_or_pitch_class='root',bank='0',vel=128,majmin3='maj',majmin7='none',extensions=[]):
    if root_or_pitch_class=='root': 
        all_notes = get_chord_pitches(root,majmin3,majmin7,extensions)
        all_notes = sorted([num for num in all_notes if num >= 40 and num <= 63])
    else: 
        all_notes = [x + 12*y for x in root for y in range(12)]
        all_notes = sorted([num for num in all_notes if num >= 40 and num <= 63])
        #print(all_notes)
    if len(all_notes)>0: pitch_class = list(frozenset([x%12 for x in all_notes]))[0]
    else: 
        pitch_class = []
        return None
    pitch_class = list(frozenset([x%12 for x in all_notes]))[0]
    weights = np.random.uniform(0.25,1,len(all_notes))
    ever_worked = False
    good_weights = []
    ever_worked = False
    for i in range(len(weights)):
        tmp = samples['fft'][(samples['bank']==bank) & (samples['vel']==vel) & (samples['pitch']==all_notes[i])]
        if len(tmp)==0: continue
        tmp = tmp.iloc[0]
        if len(tmp)==0: continue
        if not ever_worked: 
            tmp2 = tmp
            ever_worked = True
            good_weights = [weights[i]]
        else: 
            tmp2 = np.vstack((tmp2,tmp))
            good_weights.append(weights[i])
    fft = np.dot(good_weights,tmp2)
    return fft/np.sum(fft)#, sample[0]/np.max(sample[0])


def make_chord_with_pitch_shift(root,samples,sr,root_or_pitch_class='root',n_steps=0,bank='0',vel=128,majmin3='maj',majmin7='none',extensions=[]):
    if root_or_pitch_class=='root': all_notes = get_chord_pitches(root,majmin3,majmin7,extensions)
    else: 
        all_notes = [x + 12*y for x in root for y in range(12)]
        all_notes = sorted([num for num in all_notes if num >= 1 and num <= 127])
    pitch_class = list(frozenset([x%12 for x in all_notes]))[0]
    weights = np.random.uniform(0.25,1,len(all_notes))
    ever_worked = False
    for i in range(len(weights)):
        sample = samples['sample'][(samples['bank']==bank) & (samples['vel']==vel) & (samples['pitch']==all_notes[i])]
        if len(sample)==0: continue
        tmp = np.array(get_fft_with_pitch_shift(np.array(sample)[0],sr,n_steps))
        if len(tmp)==0: continue
        #tmp2 = np.array(samples['sample'][(samples['bank']==bank) & (samples['vel']==vel) & (samples['pitch']==all_notes[i])])
        if not ever_worked:
            fft = weights[i]*tmp
            ever_worked = True
            #sample = weights[i]*tmp2
        else: 
            fft += weights[i]*tmp
            #sample += weights[i]*tmp2
    if ever_worked: return fft/np.sum(fft), pitch_class#, sample[0]/np.max(sample[0])
    else: return []
    
def to1hot(row,num_categories=2):
    one_hot = np.zeros(num_categories)
    one_hot[row]=1.0
    return one_hot

def toMultiHot(pitch_bank,num_categories=12):
    output = np.zeros(num_categories)
    for i in pitch_bank:
        output[i] = 1
    return output

In [8]:
def tmp2(j):
    n_steps = 0
    chord_fft = make_chord(pitch_class,samples,root_or_pitch_class='pitch_class')
    #n_steps = np.random.uniform(-0.1,0.1,1)
    #chord_fft = make_chord_with_pitch_shift(pitch_class,samples,sr,majmin3=label_to_chord[i]['majmin3'],majmin7=label_to_chord[i]['majmin7'],n_steps=n_steps)
    tmp_dict = {'fft':chord_fft,'pitch_shift':n_steps,'pitch_class':pitch_class}
    tmp_dict.update({'repetition':j})
    return tmp_dict

num_reps = num_cores*4
ds = pd.DataFrame()
#parallel_output = Parallel(n_jobs=num_cores)(delayed(tmp2)(i) for i in range(num_reps))
#print(parallel_output[0])


start = timeit.default_timer()

#zero notes
print('Doing zero-notes')
pitch_class = []
print(tmp2(0))
parallel_output = Parallel(n_jobs=num_cores)(delayed(tmp2)(i) for i in range(num_reps*20))
ds = ds.append(parallel_output,ignore_index=True)

#one notes
print('Doing one-notes')
for i1 in range(12):
    print("Step %g of %g at %g percent: "%(i1,12,100*(i1)/(12.)))
    pitch_class = [i1]
    parallel_output = Parallel(n_jobs=num_cores)(delayed(tmp2)(i) for i in range(num_reps*4))
    ds = ds.append(parallel_output,ignore_index=True)

print('Doing two-notes')
for i1 in range(12):
    for i2 in range(12):
        print("Step %g of %g at %g percent: "%(i1*12+i2,12*12,100*(i1*12+i2)/(12*12.)))
        pitch_class = [i1, i2]
        parallel_output = Parallel(n_jobs=num_cores)(delayed(tmp2)(i) for i in range(num_reps*2))
        ds = ds.append(parallel_output,ignore_index=True)

stop = timeit.default_timer()

print(stop-start)


Doing zero-notes
{'pitch_shift': 0, 'repetition': 0, 'fft': None, 'pitch_class': []}
Doing one-notes
Step 0 of 12 at 0 percent: 
Step 1 of 12 at 8.33333 percent: 
Step 2 of 12 at 16.6667 percent: 
Step 3 of 12 at 25 percent: 
Step 4 of 12 at 33.3333 percent: 
Step 5 of 12 at 41.6667 percent: 
Step 6 of 12 at 50 percent: 
Step 7 of 12 at 58.3333 percent: 
Step 8 of 12 at 66.6667 percent: 
Step 9 of 12 at 75 percent: 
Step 10 of 12 at 83.3333 percent: 
Step 11 of 12 at 91.6667 percent: 
Doing two-notes
Step 0 of 144 at 0 percent: 
Step 1 of 144 at 0.694444 percent: 
Step 2 of 144 at 1.38889 percent: 
Step 3 of 144 at 2.08333 percent: 
Step 4 of 144 at 2.77778 percent: 
Step 5 of 144 at 3.47222 percent: 
Step 6 of 144 at 4.16667 percent: 
Step 7 of 144 at 4.86111 percent: 
Step 8 of 144 at 5.55556 percent: 
Step 9 of 144 at 6.25 percent: 
Step 10 of 144 at 6.94444 percent: 
Step 11 of 144 at 7.63889 percent: 
Step 12 of 144 at 8.33333 percent: 
Step 13 of 144 at 9.02778 percent: 
Step 14 of 144 at 9.72222 percent: 
Step 15 of 144 at 10.4167 percent: 
Step 16 of 144 at 11.1111 percent: 
Step 17 of 144 at 11.8056 percent: 
Step 18 of 144 at 12.5 percent: 
Step 19 of 144 at 13.1944 percent: 
Step 20 of 144 at 13.8889 percent: 
Step 21 of 144 at 14.5833 percent: 
Step 22 of 144 at 15.2778 percent: 
Step 23 of 144 at 15.9722 percent: 
Step 24 of 144 at 16.6667 percent: 
Step 25 of 144 at 17.3611 percent: 
Step 26 of 144 at 18.0556 percent: 
Step 27 of 144 at 18.75 percent: 
Step 28 of 144 at 19.4444 percent: 
Step 29 of 144 at 20.1389 percent: 
Step 30 of 144 at 20.8333 percent: 
Step 31 of 144 at 21.5278 percent: 
Step 32 of 144 at 22.2222 percent: 
Step 33 of 144 at 22.9167 percent: 
Step 34 of 144 at 23.6111 percent: 
Step 35 of 144 at 24.3056 percent: 
Step 36 of 144 at 25 percent: 
Step 37 of 144 at 25.6944 percent: 
Step 38 of 144 at 26.3889 percent: 
Step 39 of 144 at 27.0833 percent: 
Step 40 of 144 at 27.7778 percent: 
Step 41 of 144 at 28.4722 percent: 
Step 42 of 144 at 29.1667 percent: 
Step 43 of 144 at 29.8611 percent: 
Step 44 of 144 at 30.5556 percent: 
Step 45 of 144 at 31.25 percent: 
Step 46 of 144 at 31.9444 percent: 
Step 47 of 144 at 32.6389 percent: 
Step 48 of 144 at 33.3333 percent: 
Step 49 of 144 at 34.0278 percent: 
Step 50 of 144 at 34.7222 percent: 
Step 51 of 144 at 35.4167 percent: 
Step 52 of 144 at 36.1111 percent: 
Step 53 of 144 at 36.8056 percent: 
Step 54 of 144 at 37.5 percent: 
Step 55 of 144 at 38.1944 percent: 
Step 56 of 144 at 38.8889 percent: 
Step 57 of 144 at 39.5833 percent: 
Step 58 of 144 at 40.2778 percent: 
Step 59 of 144 at 40.9722 percent: 
Step 60 of 144 at 41.6667 percent: 
Step 61 of 144 at 42.3611 percent: 
Step 62 of 144 at 43.0556 percent: 
Step 63 of 144 at 43.75 percent: 
Step 64 of 144 at 44.4444 percent: 
Step 65 of 144 at 45.1389 percent: 
Step 66 of 144 at 45.8333 percent: 
Step 67 of 144 at 46.5278 percent: 
Step 68 of 144 at 47.2222 percent: 
Step 69 of 144 at 47.9167 percent: 
Step 70 of 144 at 48.6111 percent: 
Step 71 of 144 at 49.3056 percent: 
Step 72 of 144 at 50 percent: 
Step 73 of 144 at 50.6944 percent: 
Step 74 of 144 at 51.3889 percent: 
Step 75 of 144 at 52.0833 percent: 
Step 76 of 144 at 52.7778 percent: 
Step 77 of 144 at 53.4722 percent: 
Step 78 of 144 at 54.1667 percent: 
Step 79 of 144 at 54.8611 percent: 
Step 80 of 144 at 55.5556 percent: 
Step 81 of 144 at 56.25 percent: 
Step 82 of 144 at 56.9444 percent: 
Step 83 of 144 at 57.6389 percent: 
Step 84 of 144 at 58.3333 percent: 
Step 85 of 144 at 59.0278 percent: 
Step 86 of 144 at 59.7222 percent: 
Step 87 of 144 at 60.4167 percent: 
Step 88 of 144 at 61.1111 percent: 
Step 89 of 144 at 61.8056 percent: 
Step 90 of 144 at 62.5 percent: 
Step 91 of 144 at 63.1944 percent: 
Step 92 of 144 at 63.8889 percent: 
Step 93 of 144 at 64.5833 percent: 
Step 94 of 144 at 65.2778 percent: 
Step 95 of 144 at 65.9722 percent: 
Step 96 of 144 at 66.6667 percent: 
Step 97 of 144 at 67.3611 percent: 
Step 98 of 144 at 68.0556 percent: 
Step 99 of 144 at 68.75 percent: 
Step 100 of 144 at 69.4444 percent: 
Step 101 of 144 at 70.1389 percent: 
Step 102 of 144 at 70.8333 percent: 
Step 103 of 144 at 71.5278 percent: 
Step 104 of 144 at 72.2222 percent: 
Step 105 of 144 at 72.9167 percent: 
Step 106 of 144 at 73.6111 percent: 
Step 107 of 144 at 74.3056 percent: 
Step 108 of 144 at 75 percent: 
Step 109 of 144 at 75.6944 percent: 
Step 110 of 144 at 76.3889 percent: 
Step 111 of 144 at 77.0833 percent: 
Step 112 of 144 at 77.7778 percent: 
Step 113 of 144 at 78.4722 percent: 
Step 114 of 144 at 79.1667 percent: 
Step 115 of 144 at 79.8611 percent: 
Step 116 of 144 at 80.5556 percent: 
Step 117 of 144 at 81.25 percent: 
Step 118 of 144 at 81.9444 percent: 
Step 119 of 144 at 82.6389 percent: 
Step 120 of 144 at 83.3333 percent: 
Step 121 of 144 at 84.0278 percent: 
Step 122 of 144 at 84.7222 percent: 
Step 123 of 144 at 85.4167 percent: 
Step 124 of 144 at 86.1111 percent: 
Step 125 of 144 at 86.8056 percent: 
Step 126 of 144 at 87.5 percent: 
Step 127 of 144 at 88.1944 percent: 
Step 128 of 144 at 88.8889 percent: 
Step 129 of 144 at 89.5833 percent: 
Step 130 of 144 at 90.2778 percent: 
Step 131 of 144 at 90.9722 percent: 
Step 132 of 144 at 91.6667 percent: 
Step 133 of 144 at 92.3611 percent: 
Step 134 of 144 at 93.0556 percent: 
Step 135 of 144 at 93.75 percent: 
Step 136 of 144 at 94.4444 percent: 
Step 137 of 144 at 95.1389 percent: 
Step 138 of 144 at 95.8333 percent: 
Step 139 of 144 at 96.5278 percent: 
Step 140 of 144 at 97.2222 percent: 
Step 141 of 144 at 97.9167 percent: 
Step 142 of 144 at 98.6111 percent: 
Step 143 of 144 at 99.3056 percent: 
51.4573152065

In [9]:
def tmp3(j):
    n_steps = 0
    chord_fft = make_chord(label_to_chord[i]['root'],samples,majmin3=label_to_chord[i]['majmin3'],majmin7=label_to_chord[i]['majmin7'])
    #chord_fft = make_chord_with_pitch_shift(label_to_chord[i]['root'],samples,sr,majmin3=label_to_chord[i]['majmin3'],majmin7=label_to_chord[i]['majmin7'],n_steps=n_steps)
    tmp_dict = {'fft':[chord_fft],'target':i,'pitch_shift':n_steps}
    tmp_dict.update(label_to_chord[i])
    tmp_dict.update({'repetition':j})
    return tmp_dict


            
label_to_chord = []
n = -1
set_created = False
for l in ('none', 1, 2, 3, 4, 5, 6, 8, 9, 10, 11):  #skip 7
    for k in ('none','maj','min'):
        for j in ('maj','min'):
            for i in range(12):
                if j=='maj' and l==4: continue
                if j=='maj' and l==3: continue #so we don't majmin3==maj+3 as well as majmin3==min+14
                if j=='min' and l==3: continue #leaves only majmin3==min+4
                if k=='none' and l==11: continue
                if k=='none' and l==10: continue
                if k=='maj' and l==11: continue
                if k=='maj' and l==10: continue #so we don't majmin7==maj+10 as well as majmin7==min+11
                if k=='min' and l==10: continue #leaves only majmin7==min+11
                all_notes = set([0,7])
                if j=='maj': all_notes.add(4)
                if j=='min': all_notes.add(3)
                if k=='maj': all_notes.add(11)
                if k=='min': all_notes.add(10)
                if l!='none': all_notes.add(l)
                all_notes = frozenset([(x+i)%12 for x in all_notes])
                if set_created and all_notes in set_of_all_chords: 
                    #print(all_notes)
                    continue
                if not set_created: 
                    set_of_all_chords = {all_notes}
                    set_created = True
                else: 
                    set_of_all_chords.add(all_notes)
                #k = 'none'
                n += 1
                label_to_chord.append({'root':i, 'majmin3': j, 'majmin7': k, 'extensions': l, 'pitch_class': sorted(list(all_notes))}) #it's annoying that list and ds behavior is different here
        

for i in range(len(label_to_chord)):
    print("Step %g of %g: "%(i,len(label_to_chord)) + str(label_to_chord[i]))
    if [label_to_chord[i]['extensions']=='none']: num_reps_tmp = num_reps*2
    else: num_reps_tmp = num_reps
    parallel_output = Parallel(n_jobs=num_cores)(delayed(tmp3)(i) for i in range(num_reps_tmp))
    ds = ds.append(parallel_output,ignore_index=True)


Step 0 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 4, 7], 'root': 0, 'extensions': 'none'}
Step 1 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 5, 8], 'root': 1, 'extensions': 'none'}
Step 2 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 6, 9], 'root': 2, 'extensions': 'none'}
Step 3 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [3, 7, 10], 'root': 3, 'extensions': 'none'}
Step 4 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [4, 8, 11], 'root': 4, 'extensions': 'none'}
Step 5 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 5, 9], 'root': 5, 'extensions': 'none'}
Step 6 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 6, 10], 'root': 6, 'extensions': 'none'}
Step 7 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 7, 11], 'root': 7, 'extensions': 'none'}
Step 8 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 3, 8], 'root': 8, 'extensions': 'none'}
Step 9 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 4, 9], 'root': 9, 'extensions': 'none'}
Step 10 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 5, 10], 'root': 10, 'extensions': 'none'}
Step 11 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [3, 6, 11], 'root': 11, 'extensions': 'none'}
Step 12 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 3, 7], 'root': 0, 'extensions': 'none'}
Step 13 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 4, 8], 'root': 1, 'extensions': 'none'}
Step 14 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 5, 9], 'root': 2, 'extensions': 'none'}
Step 15 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [3, 6, 10], 'root': 3, 'extensions': 'none'}
Step 16 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [4, 7, 11], 'root': 4, 'extensions': 'none'}
Step 17 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 5, 8], 'root': 5, 'extensions': 'none'}
Step 18 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 6, 9], 'root': 6, 'extensions': 'none'}
Step 19 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 7, 10], 'root': 7, 'extensions': 'none'}
Step 20 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [3, 8, 11], 'root': 8, 'extensions': 'none'}
Step 21 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 4, 9], 'root': 9, 'extensions': 'none'}
Step 22 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 5, 10], 'root': 10, 'extensions': 'none'}
Step 23 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 6, 11], 'root': 11, 'extensions': 'none'}
Step 24 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 4, 7, 11], 'root': 0, 'extensions': 'none'}
Step 25 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 1, 5, 8], 'root': 1, 'extensions': 'none'}
Step 26 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 2, 6, 9], 'root': 2, 'extensions': 'none'}
Step 27 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 3, 7, 10], 'root': 3, 'extensions': 'none'}
Step 28 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [3, 4, 8, 11], 'root': 4, 'extensions': 'none'}
Step 29 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 9], 'root': 5, 'extensions': 'none'}
Step 30 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 5, 6, 10], 'root': 6, 'extensions': 'none'}
Step 31 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 6, 7, 11], 'root': 7, 'extensions': 'none'}
Step 32 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 3, 7, 8], 'root': 8, 'extensions': 'none'}
Step 33 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 4, 8, 9], 'root': 9, 'extensions': 'none'}
Step 34 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 5, 9, 10], 'root': 10, 'extensions': 'none'}
Step 35 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [3, 6, 10, 11], 'root': 11, 'extensions': 'none'}
Step 36 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 3, 7, 11], 'root': 0, 'extensions': 'none'}
Step 37 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 1, 4, 8], 'root': 1, 'extensions': 'none'}
Step 38 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 2, 5, 9], 'root': 2, 'extensions': 'none'}
Step 39 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 3, 6, 10], 'root': 3, 'extensions': 'none'}
Step 40 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [3, 4, 7, 11], 'root': 4, 'extensions': 'none'}
Step 41 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 8], 'root': 5, 'extensions': 'none'}
Step 42 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 5, 6, 9], 'root': 6, 'extensions': 'none'}
Step 43 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 6, 7, 10], 'root': 7, 'extensions': 'none'}
Step 44 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [3, 7, 8, 11], 'root': 8, 'extensions': 'none'}
Step 45 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 4, 8, 9], 'root': 9, 'extensions': 'none'}
Step 46 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 5, 9, 10], 'root': 10, 'extensions': 'none'}
Step 47 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 6, 10, 11], 'root': 11, 'extensions': 'none'}
Step 48 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 4, 7, 10], 'root': 0, 'extensions': 'none'}
Step 49 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 5, 8, 11], 'root': 1, 'extensions': 'none'}
Step 50 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 2, 6, 9], 'root': 2, 'extensions': 'none'}
Step 51 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 3, 7, 10], 'root': 3, 'extensions': 'none'}
Step 52 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 4, 8, 11], 'root': 4, 'extensions': 'none'}
Step 53 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 9], 'root': 5, 'extensions': 'none'}
Step 54 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 4, 6, 10], 'root': 6, 'extensions': 'none'}
Step 55 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 5, 7, 11], 'root': 7, 'extensions': 'none'}
Step 56 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 6, 8], 'root': 8, 'extensions': 'none'}
Step 57 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 4, 7, 9], 'root': 9, 'extensions': 'none'}
Step 58 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 5, 8, 10], 'root': 10, 'extensions': 'none'}
Step 59 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [3, 6, 9, 11], 'root': 11, 'extensions': 'none'}
Step 60 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 7, 10], 'root': 0, 'extensions': 'none'}
Step 61 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 8, 11], 'root': 1, 'extensions': 'none'}
Step 62 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 5, 9], 'root': 2, 'extensions': 'none'}
Step 63 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 3, 6, 10], 'root': 3, 'extensions': 'none'}
Step 64 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 4, 7, 11], 'root': 4, 'extensions': 'none'}
Step 65 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 8], 'root': 5, 'extensions': 'none'}
Step 66 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 6, 9], 'root': 6, 'extensions': 'none'}
Step 67 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 5, 7, 10], 'root': 7, 'extensions': 'none'}
Step 68 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [3, 6, 8, 11], 'root': 8, 'extensions': 'none'}
Step 69 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 4, 7, 9], 'root': 9, 'extensions': 'none'}
Step 70 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 5, 8, 10], 'root': 10, 'extensions': 'none'}
Step 71 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 6, 9, 11], 'root': 11, 'extensions': 'none'}
Step 72 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 1, 4, 7], 'root': 0, 'extensions': 1}
Step 73 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 2, 5, 8], 'root': 1, 'extensions': 1}
Step 74 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 3, 6, 9], 'root': 2, 'extensions': 1}
Step 75 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [3, 4, 7, 10], 'root': 3, 'extensions': 1}
Step 76 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [4, 5, 8, 11], 'root': 4, 'extensions': 1}
Step 77 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 5, 6, 9], 'root': 5, 'extensions': 1}
Step 78 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 6, 7, 10], 'root': 6, 'extensions': 1}
Step 79 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 7, 8, 11], 'root': 7, 'extensions': 1}
Step 80 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 3, 8, 9], 'root': 8, 'extensions': 1}
Step 81 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 4, 9, 10], 'root': 9, 'extensions': 1}
Step 82 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 5, 10, 11], 'root': 10, 'extensions': 1}
Step 83 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 3, 6, 11], 'root': 11, 'extensions': 1}
Step 84 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 1, 3, 7], 'root': 0, 'extensions': 1}
Step 85 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 2, 4, 8], 'root': 1, 'extensions': 1}
Step 86 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 3, 5, 9], 'root': 2, 'extensions': 1}
Step 87 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [3, 4, 6, 10], 'root': 3, 'extensions': 1}
Step 88 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [4, 5, 7, 11], 'root': 4, 'extensions': 1}
Step 89 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 5, 6, 8], 'root': 5, 'extensions': 1}
Step 90 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 6, 7, 9], 'root': 6, 'extensions': 1}
Step 91 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 7, 8, 10], 'root': 7, 'extensions': 1}
Step 92 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [3, 8, 9, 11], 'root': 8, 'extensions': 1}
Step 93 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 4, 9, 10], 'root': 9, 'extensions': 1}
Step 94 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 5, 10, 11], 'root': 10, 'extensions': 1}
Step 95 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 2, 6, 11], 'root': 11, 'extensions': 1}
Step 96 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 1, 4, 7, 11], 'root': 0, 'extensions': 1}
Step 97 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 1, 2, 5, 8], 'root': 1, 'extensions': 1}
Step 98 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 2, 3, 6, 9], 'root': 2, 'extensions': 1}
Step 99 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 3, 4, 7, 10], 'root': 3, 'extensions': 1}
Step 100 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [3, 4, 5, 8, 11], 'root': 4, 'extensions': 1}
Step 101 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 6, 9], 'root': 5, 'extensions': 1}
Step 102 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 5, 6, 7, 10], 'root': 6, 'extensions': 1}
Step 103 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 6, 7, 8, 11], 'root': 7, 'extensions': 1}
Step 104 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 3, 7, 8, 9], 'root': 8, 'extensions': 1}
Step 105 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 4, 8, 9, 10], 'root': 9, 'extensions': 1}
Step 106 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 5, 9, 10, 11], 'root': 10, 'extensions': 1}
Step 107 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 3, 6, 10, 11], 'root': 11, 'extensions': 1}
Step 108 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 1, 3, 7, 11], 'root': 0, 'extensions': 1}
Step 109 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 1, 2, 4, 8], 'root': 1, 'extensions': 1}
Step 110 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 2, 3, 5, 9], 'root': 2, 'extensions': 1}
Step 111 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 3, 4, 6, 10], 'root': 3, 'extensions': 1}
Step 112 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [3, 4, 5, 7, 11], 'root': 4, 'extensions': 1}
Step 113 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 6, 8], 'root': 5, 'extensions': 1}
Step 114 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 5, 6, 7, 9], 'root': 6, 'extensions': 1}
Step 115 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 6, 7, 8, 10], 'root': 7, 'extensions': 1}
Step 116 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [3, 7, 8, 9, 11], 'root': 8, 'extensions': 1}
Step 117 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 4, 8, 9, 10], 'root': 9, 'extensions': 1}
Step 118 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 5, 9, 10, 11], 'root': 10, 'extensions': 1}
Step 119 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 2, 6, 10, 11], 'root': 11, 'extensions': 1}
Step 120 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 1, 4, 7, 10], 'root': 0, 'extensions': 1}
Step 121 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 2, 5, 8, 11], 'root': 1, 'extensions': 1}
Step 122 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 2, 3, 6, 9], 'root': 2, 'extensions': 1}
Step 123 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 3, 4, 7, 10], 'root': 3, 'extensions': 1}
Step 124 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 4, 5, 8, 11], 'root': 4, 'extensions': 1}
Step 125 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 6, 9], 'root': 5, 'extensions': 1}
Step 126 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 4, 6, 7, 10], 'root': 6, 'extensions': 1}
Step 127 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 5, 7, 8, 11], 'root': 7, 'extensions': 1}
Step 128 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 6, 8, 9], 'root': 8, 'extensions': 1}
Step 129 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 4, 7, 9, 10], 'root': 9, 'extensions': 1}
Step 130 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 5, 8, 10, 11], 'root': 10, 'extensions': 1}
Step 131 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 6, 9, 11], 'root': 11, 'extensions': 1}
Step 132 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 1, 3, 7, 10], 'root': 0, 'extensions': 1}
Step 133 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 2, 4, 8, 11], 'root': 1, 'extensions': 1}
Step 134 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 3, 5, 9], 'root': 2, 'extensions': 1}
Step 135 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 3, 4, 6, 10], 'root': 3, 'extensions': 1}
Step 136 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 4, 5, 7, 11], 'root': 4, 'extensions': 1}
Step 137 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 6, 8], 'root': 5, 'extensions': 1}
Step 138 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 6, 7, 9], 'root': 6, 'extensions': 1}
Step 139 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 5, 7, 8, 10], 'root': 7, 'extensions': 1}
Step 140 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [3, 6, 8, 9, 11], 'root': 8, 'extensions': 1}
Step 141 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 4, 7, 9, 10], 'root': 9, 'extensions': 1}
Step 142 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 5, 8, 10, 11], 'root': 10, 'extensions': 1}
Step 143 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 6, 9, 11], 'root': 11, 'extensions': 1}
Step 144 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 2, 4, 7], 'root': 0, 'extensions': 2}
Step 145 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 3, 5, 8], 'root': 1, 'extensions': 2}
Step 146 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 4, 6, 9], 'root': 2, 'extensions': 2}
Step 147 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [3, 5, 7, 10], 'root': 3, 'extensions': 2}
Step 148 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [4, 6, 8, 11], 'root': 4, 'extensions': 2}
Step 149 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 5, 7, 9], 'root': 5, 'extensions': 2}
Step 150 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 6, 8, 10], 'root': 6, 'extensions': 2}
Step 151 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 7, 9, 11], 'root': 7, 'extensions': 2}
Step 152 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 3, 8, 10], 'root': 8, 'extensions': 2}
Step 153 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 4, 9, 11], 'root': 9, 'extensions': 2}
Step 154 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 2, 5, 10], 'root': 10, 'extensions': 2}
Step 155 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 3, 6, 11], 'root': 11, 'extensions': 2}
Step 156 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 2, 3, 7], 'root': 0, 'extensions': 2}
Step 157 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 3, 4, 8], 'root': 1, 'extensions': 2}
Step 158 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 4, 5, 9], 'root': 2, 'extensions': 2}
Step 159 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [3, 5, 6, 10], 'root': 3, 'extensions': 2}
Step 160 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [4, 6, 7, 11], 'root': 4, 'extensions': 2}
Step 161 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 5, 7, 8], 'root': 5, 'extensions': 2}
Step 162 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 6, 8, 9], 'root': 6, 'extensions': 2}
Step 163 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 7, 9, 10], 'root': 7, 'extensions': 2}
Step 164 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [3, 8, 10, 11], 'root': 8, 'extensions': 2}
Step 165 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 4, 9, 11], 'root': 9, 'extensions': 2}
Step 166 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 1, 5, 10], 'root': 10, 'extensions': 2}
Step 167 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 2, 6, 11], 'root': 11, 'extensions': 2}
Step 168 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 2, 4, 7, 11], 'root': 0, 'extensions': 2}
Step 169 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 1, 3, 5, 8], 'root': 1, 'extensions': 2}
Step 170 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 2, 4, 6, 9], 'root': 2, 'extensions': 2}
Step 171 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 3, 5, 7, 10], 'root': 3, 'extensions': 2}
Step 172 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [3, 4, 6, 8, 11], 'root': 4, 'extensions': 2}
Step 173 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 7, 9], 'root': 5, 'extensions': 2}
Step 174 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 5, 6, 8, 10], 'root': 6, 'extensions': 2}
Step 175 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 6, 7, 9, 11], 'root': 7, 'extensions': 2}
Step 176 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 3, 7, 8, 10], 'root': 8, 'extensions': 2}
Step 177 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 4, 8, 9, 11], 'root': 9, 'extensions': 2}
Step 178 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 2, 5, 9, 10], 'root': 10, 'extensions': 2}
Step 179 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 3, 6, 10, 11], 'root': 11, 'extensions': 2}
Step 180 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 2, 3, 7, 11], 'root': 0, 'extensions': 2}
Step 181 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 1, 3, 4, 8], 'root': 1, 'extensions': 2}
Step 182 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 2, 4, 5, 9], 'root': 2, 'extensions': 2}
Step 183 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 3, 5, 6, 10], 'root': 3, 'extensions': 2}
Step 184 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [3, 4, 6, 7, 11], 'root': 4, 'extensions': 2}
Step 185 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 7, 8], 'root': 5, 'extensions': 2}
Step 186 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 5, 6, 8, 9], 'root': 6, 'extensions': 2}
Step 187 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 6, 7, 9, 10], 'root': 7, 'extensions': 2}
Step 188 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [3, 7, 8, 10, 11], 'root': 8, 'extensions': 2}
Step 189 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 4, 8, 9, 11], 'root': 9, 'extensions': 2}
Step 190 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 1, 5, 9, 10], 'root': 10, 'extensions': 2}
Step 191 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 2, 6, 10, 11], 'root': 11, 'extensions': 2}
Step 192 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 2, 4, 7, 10], 'root': 0, 'extensions': 2}
Step 193 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 3, 5, 8, 11], 'root': 1, 'extensions': 2}
Step 194 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 2, 4, 6, 9], 'root': 2, 'extensions': 2}
Step 195 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 3, 5, 7, 10], 'root': 3, 'extensions': 2}
Step 196 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 4, 6, 8, 11], 'root': 4, 'extensions': 2}
Step 197 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 7, 9], 'root': 5, 'extensions': 2}
Step 198 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 4, 6, 8, 10], 'root': 6, 'extensions': 2}
Step 199 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 5, 7, 9, 11], 'root': 7, 'extensions': 2}
Step 200 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 6, 8, 10], 'root': 8, 'extensions': 2}
Step 201 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 4, 7, 9, 11], 'root': 9, 'extensions': 2}
Step 202 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 2, 5, 8, 10], 'root': 10, 'extensions': 2}
Step 203 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 3, 6, 9, 11], 'root': 11, 'extensions': 2}
Step 204 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 3, 7, 10], 'root': 0, 'extensions': 2}
Step 205 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 3, 4, 8, 11], 'root': 1, 'extensions': 2}
Step 206 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 4, 5, 9], 'root': 2, 'extensions': 2}
Step 207 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 3, 5, 6, 10], 'root': 3, 'extensions': 2}
Step 208 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 4, 6, 7, 11], 'root': 4, 'extensions': 2}
Step 209 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 7, 8], 'root': 5, 'extensions': 2}
Step 210 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 6, 8, 9], 'root': 6, 'extensions': 2}
Step 211 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 5, 7, 9, 10], 'root': 7, 'extensions': 2}
Step 212 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [3, 6, 8, 10, 11], 'root': 8, 'extensions': 2}
Step 213 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 4, 7, 9, 11], 'root': 9, 'extensions': 2}
Step 214 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 1, 5, 8, 10], 'root': 10, 'extensions': 2}
Step 215 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 2, 6, 9, 11], 'root': 11, 'extensions': 2}
Step 216 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 3, 4, 7], 'root': 0, 'extensions': 4}
Step 217 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 4, 5, 8], 'root': 1, 'extensions': 4}
Step 218 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 5, 6, 9], 'root': 2, 'extensions': 4}
Step 219 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [3, 6, 7, 10], 'root': 3, 'extensions': 4}
Step 220 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [4, 7, 8, 11], 'root': 4, 'extensions': 4}
Step 221 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 5, 8, 9], 'root': 5, 'extensions': 4}
Step 222 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 6, 9, 10], 'root': 6, 'extensions': 4}
Step 223 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 7, 10, 11], 'root': 7, 'extensions': 4}
Step 224 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 3, 8, 11], 'root': 8, 'extensions': 4}
Step 225 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 1, 4, 9], 'root': 9, 'extensions': 4}
Step 226 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 2, 5, 10], 'root': 10, 'extensions': 4}
Step 227 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 3, 6, 11], 'root': 11, 'extensions': 4}
Step 228 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 3, 4, 7, 11], 'root': 0, 'extensions': 4}
Step 229 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 1, 4, 5, 8], 'root': 1, 'extensions': 4}
Step 230 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 2, 5, 6, 9], 'root': 2, 'extensions': 4}
Step 231 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 3, 6, 7, 10], 'root': 3, 'extensions': 4}
Step 232 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [3, 4, 7, 8, 11], 'root': 4, 'extensions': 4}
Step 233 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 8, 9], 'root': 5, 'extensions': 4}
Step 234 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 5, 6, 9, 10], 'root': 6, 'extensions': 4}
Step 235 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 6, 7, 10, 11], 'root': 7, 'extensions': 4}
Step 236 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 3, 7, 8, 11], 'root': 8, 'extensions': 4}
Step 237 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 1, 4, 8, 9], 'root': 9, 'extensions': 4}
Step 238 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 2, 5, 9, 10], 'root': 10, 'extensions': 4}
Step 239 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 3, 6, 10, 11], 'root': 11, 'extensions': 4}
Step 240 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 4, 7, 10], 'root': 0, 'extensions': 4}
Step 241 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 5, 8, 11], 'root': 1, 'extensions': 4}
Step 242 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 5, 6, 9], 'root': 2, 'extensions': 4}
Step 243 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 3, 6, 7, 10], 'root': 3, 'extensions': 4}
Step 244 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 4, 7, 8, 11], 'root': 4, 'extensions': 4}
Step 245 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 8, 9], 'root': 5, 'extensions': 4}
Step 246 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 6, 9, 10], 'root': 6, 'extensions': 4}
Step 247 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 5, 7, 10, 11], 'root': 7, 'extensions': 4}
Step 248 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 6, 8, 11], 'root': 8, 'extensions': 4}
Step 249 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 1, 4, 7, 9], 'root': 9, 'extensions': 4}
Step 250 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 2, 5, 8, 10], 'root': 10, 'extensions': 4}
Step 251 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 3, 6, 9, 11], 'root': 11, 'extensions': 4}
Step 252 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 4, 5, 7], 'root': 0, 'extensions': 5}
Step 253 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 5, 6, 8], 'root': 1, 'extensions': 5}
Step 254 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 6, 7, 9], 'root': 2, 'extensions': 5}
Step 255 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [3, 7, 8, 10], 'root': 3, 'extensions': 5}
Step 256 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [4, 8, 9, 11], 'root': 4, 'extensions': 5}
Step 257 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 5, 9, 10], 'root': 5, 'extensions': 5}
Step 258 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 6, 10, 11], 'root': 6, 'extensions': 5}
Step 259 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 2, 7, 11], 'root': 7, 'extensions': 5}
Step 260 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 1, 3, 8], 'root': 8, 'extensions': 5}
Step 261 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 2, 4, 9], 'root': 9, 'extensions': 5}
Step 262 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 3, 5, 10], 'root': 10, 'extensions': 5}
Step 263 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [3, 4, 6, 11], 'root': 11, 'extensions': 5}
Step 264 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 3, 5, 7], 'root': 0, 'extensions': 5}
Step 265 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 4, 6, 8], 'root': 1, 'extensions': 5}
Step 266 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 5, 7, 9], 'root': 2, 'extensions': 5}
Step 267 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [3, 6, 8, 10], 'root': 3, 'extensions': 5}
Step 268 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [4, 7, 9, 11], 'root': 4, 'extensions': 5}
Step 269 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 5, 8, 10], 'root': 5, 'extensions': 5}
Step 270 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 6, 9, 11], 'root': 6, 'extensions': 5}
Step 271 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 2, 7, 10], 'root': 7, 'extensions': 5}
Step 272 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 3, 8, 11], 'root': 8, 'extensions': 5}
Step 273 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 2, 4, 9], 'root': 9, 'extensions': 5}
Step 274 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 3, 5, 10], 'root': 10, 'extensions': 5}
Step 275 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 4, 6, 11], 'root': 11, 'extensions': 5}
Step 276 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 7, 11], 'root': 0, 'extensions': 5}
Step 277 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 1, 5, 6, 8], 'root': 1, 'extensions': 5}
Step 278 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 2, 6, 7, 9], 'root': 2, 'extensions': 5}
Step 279 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 3, 7, 8, 10], 'root': 3, 'extensions': 5}
Step 280 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [3, 4, 8, 9, 11], 'root': 4, 'extensions': 5}
Step 281 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 9, 10], 'root': 5, 'extensions': 5}
Step 282 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 5, 6, 10, 11], 'root': 6, 'extensions': 5}
Step 283 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 2, 6, 7, 11], 'root': 7, 'extensions': 5}
Step 284 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 1, 3, 7, 8], 'root': 8, 'extensions': 5}
Step 285 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 2, 4, 8, 9], 'root': 9, 'extensions': 5}
Step 286 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 3, 5, 9, 10], 'root': 10, 'extensions': 5}
Step 287 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [3, 4, 6, 10, 11], 'root': 11, 'extensions': 5}
Step 288 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 3, 5, 7, 11], 'root': 0, 'extensions': 5}
Step 289 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 1, 4, 6, 8], 'root': 1, 'extensions': 5}
Step 290 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 2, 5, 7, 9], 'root': 2, 'extensions': 5}
Step 291 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 3, 6, 8, 10], 'root': 3, 'extensions': 5}
Step 292 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [3, 4, 7, 9, 11], 'root': 4, 'extensions': 5}
Step 293 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 8, 10], 'root': 5, 'extensions': 5}
Step 294 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 5, 6, 9, 11], 'root': 6, 'extensions': 5}
Step 295 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 2, 6, 7, 10], 'root': 7, 'extensions': 5}
Step 296 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 3, 7, 8, 11], 'root': 8, 'extensions': 5}
Step 297 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 2, 4, 8, 9], 'root': 9, 'extensions': 5}
Step 298 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 3, 5, 9, 10], 'root': 10, 'extensions': 5}
Step 299 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 4, 6, 10, 11], 'root': 11, 'extensions': 5}
Step 300 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 4, 5, 7, 10], 'root': 0, 'extensions': 5}
Step 301 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 5, 6, 8, 11], 'root': 1, 'extensions': 5}
Step 302 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 2, 6, 7, 9], 'root': 2, 'extensions': 5}
Step 303 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 3, 7, 8, 10], 'root': 3, 'extensions': 5}
Step 304 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 4, 8, 9, 11], 'root': 4, 'extensions': 5}
Step 305 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 9, 10], 'root': 5, 'extensions': 5}
Step 306 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 4, 6, 10, 11], 'root': 6, 'extensions': 5}
Step 307 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 2, 5, 7, 11], 'root': 7, 'extensions': 5}
Step 308 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 1, 3, 6, 8], 'root': 8, 'extensions': 5}
Step 309 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 2, 4, 7, 9], 'root': 9, 'extensions': 5}
Step 310 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 3, 5, 8, 10], 'root': 10, 'extensions': 5}
Step 311 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [3, 4, 6, 9, 11], 'root': 11, 'extensions': 5}
Step 312 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 7, 10], 'root': 0, 'extensions': 5}
Step 313 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 6, 8, 11], 'root': 1, 'extensions': 5}
Step 314 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 5, 7, 9], 'root': 2, 'extensions': 5}
Step 315 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 3, 6, 8, 10], 'root': 3, 'extensions': 5}
Step 316 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 4, 7, 9, 11], 'root': 4, 'extensions': 5}
Step 317 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 8, 10], 'root': 5, 'extensions': 5}
Step 318 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 6, 9, 11], 'root': 6, 'extensions': 5}
Step 319 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 5, 7, 10], 'root': 7, 'extensions': 5}
Step 320 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 3, 6, 8, 11], 'root': 8, 'extensions': 5}
Step 321 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 4, 7, 9], 'root': 9, 'extensions': 5}
Step 322 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 3, 5, 8, 10], 'root': 10, 'extensions': 5}
Step 323 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 4, 6, 9, 11], 'root': 11, 'extensions': 5}
Step 324 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 4, 6, 7], 'root': 0, 'extensions': 6}
Step 325 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 5, 7, 8], 'root': 1, 'extensions': 6}
Step 326 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 6, 8, 9], 'root': 2, 'extensions': 6}
Step 327 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [3, 7, 9, 10], 'root': 3, 'extensions': 6}
Step 328 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [4, 8, 10, 11], 'root': 4, 'extensions': 6}
Step 329 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 5, 9, 11], 'root': 5, 'extensions': 6}
Step 330 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 1, 6, 10], 'root': 6, 'extensions': 6}
Step 331 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 2, 7, 11], 'root': 7, 'extensions': 6}
Step 332 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 2, 3, 8], 'root': 8, 'extensions': 6}
Step 333 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 3, 4, 9], 'root': 9, 'extensions': 6}
Step 334 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 4, 5, 10], 'root': 10, 'extensions': 6}
Step 335 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [3, 5, 6, 11], 'root': 11, 'extensions': 6}
Step 336 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 3, 6, 7], 'root': 0, 'extensions': 6}
Step 337 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 4, 7, 8], 'root': 1, 'extensions': 6}
Step 338 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 5, 8, 9], 'root': 2, 'extensions': 6}
Step 339 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [3, 6, 9, 10], 'root': 3, 'extensions': 6}
Step 340 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [4, 7, 10, 11], 'root': 4, 'extensions': 6}
Step 341 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 5, 8, 11], 'root': 5, 'extensions': 6}
Step 342 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 1, 6, 9], 'root': 6, 'extensions': 6}
Step 343 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 2, 7, 10], 'root': 7, 'extensions': 6}
Step 344 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 3, 8, 11], 'root': 8, 'extensions': 6}
Step 345 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 3, 4, 9], 'root': 9, 'extensions': 6}
Step 346 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 4, 5, 10], 'root': 10, 'extensions': 6}
Step 347 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 5, 6, 11], 'root': 11, 'extensions': 6}
Step 348 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 4, 6, 7, 11], 'root': 0, 'extensions': 6}
Step 349 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 1, 5, 7, 8], 'root': 1, 'extensions': 6}
Step 350 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 2, 6, 8, 9], 'root': 2, 'extensions': 6}
Step 351 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 3, 7, 9, 10], 'root': 3, 'extensions': 6}
Step 352 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [3, 4, 8, 10, 11], 'root': 4, 'extensions': 6}
Step 353 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 9, 11], 'root': 5, 'extensions': 6}
Step 354 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 1, 5, 6, 10], 'root': 6, 'extensions': 6}
Step 355 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 2, 6, 7, 11], 'root': 7, 'extensions': 6}
Step 356 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 2, 3, 7, 8], 'root': 8, 'extensions': 6}
Step 357 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 3, 4, 8, 9], 'root': 9, 'extensions': 6}
Step 358 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 4, 5, 9, 10], 'root': 10, 'extensions': 6}
Step 359 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [3, 5, 6, 10, 11], 'root': 11, 'extensions': 6}
Step 360 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 3, 6, 7, 11], 'root': 0, 'extensions': 6}
Step 361 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 1, 4, 7, 8], 'root': 1, 'extensions': 6}
Step 362 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 2, 5, 8, 9], 'root': 2, 'extensions': 6}
Step 363 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 3, 6, 9, 10], 'root': 3, 'extensions': 6}
Step 364 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [3, 4, 7, 10, 11], 'root': 4, 'extensions': 6}
Step 365 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 4, 5, 8, 11], 'root': 5, 'extensions': 6}
Step 366 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 1, 5, 6, 9], 'root': 6, 'extensions': 6}
Step 367 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 2, 6, 7, 10], 'root': 7, 'extensions': 6}
Step 368 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 3, 7, 8, 11], 'root': 8, 'extensions': 6}
Step 369 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 3, 4, 8, 9], 'root': 9, 'extensions': 6}
Step 370 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 4, 5, 9, 10], 'root': 10, 'extensions': 6}
Step 371 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 5, 6, 10, 11], 'root': 11, 'extensions': 6}
Step 372 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 4, 6, 7, 10], 'root': 0, 'extensions': 6}
Step 373 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 5, 7, 8, 11], 'root': 1, 'extensions': 6}
Step 374 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 2, 6, 8, 9], 'root': 2, 'extensions': 6}
Step 375 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 3, 7, 9, 10], 'root': 3, 'extensions': 6}
Step 376 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 4, 8, 10, 11], 'root': 4, 'extensions': 6}
Step 377 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 9, 11], 'root': 5, 'extensions': 6}
Step 378 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 1, 4, 6, 10], 'root': 6, 'extensions': 6}
Step 379 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 2, 5, 7, 11], 'root': 7, 'extensions': 6}
Step 380 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 2, 3, 6, 8], 'root': 8, 'extensions': 6}
Step 381 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 3, 4, 7, 9], 'root': 9, 'extensions': 6}
Step 382 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 4, 5, 8, 10], 'root': 10, 'extensions': 6}
Step 383 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [3, 5, 6, 9, 11], 'root': 11, 'extensions': 6}
Step 384 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 6, 7, 10], 'root': 0, 'extensions': 6}
Step 385 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 7, 8, 11], 'root': 1, 'extensions': 6}
Step 386 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 5, 8, 9], 'root': 2, 'extensions': 6}
Step 387 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 3, 6, 9, 10], 'root': 3, 'extensions': 6}
Step 388 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 4, 7, 10, 11], 'root': 4, 'extensions': 6}
Step 389 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 5, 8, 11], 'root': 5, 'extensions': 6}
Step 390 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 1, 4, 6, 9], 'root': 6, 'extensions': 6}
Step 391 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 2, 5, 7, 10], 'root': 7, 'extensions': 6}
Step 392 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 3, 6, 8, 11], 'root': 8, 'extensions': 6}
Step 393 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 4, 7, 9], 'root': 9, 'extensions': 6}
Step 394 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 5, 8, 10], 'root': 10, 'extensions': 6}
Step 395 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 5, 6, 9, 11], 'root': 11, 'extensions': 6}
Step 396 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 4, 7, 8], 'root': 0, 'extensions': 8}
Step 397 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 5, 8, 9], 'root': 1, 'extensions': 8}
Step 398 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 6, 9, 10], 'root': 2, 'extensions': 8}
Step 399 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [3, 7, 10, 11], 'root': 3, 'extensions': 8}
Step 400 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 4, 8, 11], 'root': 4, 'extensions': 8}
Step 401 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 1, 5, 9], 'root': 5, 'extensions': 8}
Step 402 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 2, 6, 10], 'root': 6, 'extensions': 8}
Step 403 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 3, 7, 11], 'root': 7, 'extensions': 8}
Step 404 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [0, 3, 4, 8], 'root': 8, 'extensions': 8}
Step 405 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [1, 4, 5, 9], 'root': 9, 'extensions': 8}
Step 406 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [2, 5, 6, 10], 'root': 10, 'extensions': 8}
Step 407 of 492: {'majmin3': 'maj', 'majmin7': 'none', 'pitch_class': [3, 6, 7, 11], 'root': 11, 'extensions': 8}
Step 408 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 4, 7, 8, 11], 'root': 0, 'extensions': 8}
Step 409 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 1, 5, 8, 9], 'root': 1, 'extensions': 8}
Step 410 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 2, 6, 9, 10], 'root': 2, 'extensions': 8}
Step 411 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 3, 7, 10, 11], 'root': 3, 'extensions': 8}
Step 412 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 3, 4, 8, 11], 'root': 4, 'extensions': 8}
Step 413 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 1, 4, 5, 9], 'root': 5, 'extensions': 8}
Step 414 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 2, 5, 6, 10], 'root': 6, 'extensions': 8}
Step 415 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 3, 6, 7, 11], 'root': 7, 'extensions': 8}
Step 416 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [0, 3, 4, 7, 8], 'root': 8, 'extensions': 8}
Step 417 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [1, 4, 5, 8, 9], 'root': 9, 'extensions': 8}
Step 418 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [2, 5, 6, 9, 10], 'root': 10, 'extensions': 8}
Step 419 of 492: {'majmin3': 'maj', 'majmin7': 'maj', 'pitch_class': [3, 6, 7, 10, 11], 'root': 11, 'extensions': 8}
Step 420 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 4, 7, 8, 10], 'root': 0, 'extensions': 8}
Step 421 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 5, 8, 9, 11], 'root': 1, 'extensions': 8}
Step 422 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 2, 6, 9, 10], 'root': 2, 'extensions': 8}
Step 423 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 3, 7, 10, 11], 'root': 3, 'extensions': 8}
Step 424 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 2, 4, 8, 11], 'root': 4, 'extensions': 8}
Step 425 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 1, 3, 5, 9], 'root': 5, 'extensions': 8}
Step 426 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 2, 4, 6, 10], 'root': 6, 'extensions': 8}
Step 427 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 3, 5, 7, 11], 'root': 7, 'extensions': 8}
Step 428 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 4, 6, 8], 'root': 8, 'extensions': 8}
Step 429 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 4, 5, 7, 9], 'root': 9, 'extensions': 8}
Step 430 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 5, 6, 8, 10], 'root': 10, 'extensions': 8}
Step 431 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [3, 6, 7, 9, 11], 'root': 11, 'extensions': 8}
Step 432 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 3, 7, 9], 'root': 0, 'extensions': 9}
Step 433 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 4, 8, 10], 'root': 1, 'extensions': 9}
Step 434 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 5, 9, 11], 'root': 2, 'extensions': 9}
Step 435 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 3, 6, 10], 'root': 3, 'extensions': 9}
Step 436 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 4, 7, 11], 'root': 4, 'extensions': 9}
Step 437 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 2, 5, 8], 'root': 5, 'extensions': 9}
Step 438 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 3, 6, 9], 'root': 6, 'extensions': 9}
Step 439 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 4, 7, 10], 'root': 7, 'extensions': 9}
Step 440 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [3, 5, 8, 11], 'root': 8, 'extensions': 9}
Step 441 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [0, 4, 6, 9], 'root': 9, 'extensions': 9}
Step 442 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [1, 5, 7, 10], 'root': 10, 'extensions': 9}
Step 443 of 492: {'majmin3': 'min', 'majmin7': 'none', 'pitch_class': [2, 6, 8, 11], 'root': 11, 'extensions': 9}
Step 444 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 3, 7, 9, 11], 'root': 0, 'extensions': 9}
Step 445 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 1, 4, 8, 10], 'root': 1, 'extensions': 9}
Step 446 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 2, 5, 9, 11], 'root': 2, 'extensions': 9}
Step 447 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 2, 3, 6, 10], 'root': 3, 'extensions': 9}
Step 448 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 3, 4, 7, 11], 'root': 4, 'extensions': 9}
Step 449 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 2, 4, 5, 8], 'root': 5, 'extensions': 9}
Step 450 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 3, 5, 6, 9], 'root': 6, 'extensions': 9}
Step 451 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 4, 6, 7, 10], 'root': 7, 'extensions': 9}
Step 452 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [3, 5, 7, 8, 11], 'root': 8, 'extensions': 9}
Step 453 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [0, 4, 6, 8, 9], 'root': 9, 'extensions': 9}
Step 454 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [1, 5, 7, 9, 10], 'root': 10, 'extensions': 9}
Step 455 of 492: {'majmin3': 'min', 'majmin7': 'maj', 'pitch_class': [2, 6, 8, 10, 11], 'root': 11, 'extensions': 9}
Step 456 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 7, 9, 10], 'root': 0, 'extensions': 9}
Step 457 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 8, 10, 11], 'root': 1, 'extensions': 9}
Step 458 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 5, 9, 11], 'root': 2, 'extensions': 9}
Step 459 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 1, 3, 6, 10], 'root': 3, 'extensions': 9}
Step 460 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 2, 4, 7, 11], 'root': 4, 'extensions': 9}
Step 461 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 2, 3, 5, 8], 'root': 5, 'extensions': 9}
Step 462 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 3, 4, 6, 9], 'root': 6, 'extensions': 9}
Step 463 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 4, 5, 7, 10], 'root': 7, 'extensions': 9}
Step 464 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [3, 5, 6, 8, 11], 'root': 8, 'extensions': 9}
Step 465 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 4, 6, 7, 9], 'root': 9, 'extensions': 9}
Step 466 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 5, 7, 8, 10], 'root': 10, 'extensions': 9}
Step 467 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 6, 8, 9, 11], 'root': 11, 'extensions': 9}
Step 468 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 4, 7, 10, 11], 'root': 0, 'extensions': 11}
Step 469 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 1, 5, 8, 11], 'root': 1, 'extensions': 11}
Step 470 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 1, 2, 6, 9], 'root': 2, 'extensions': 11}
Step 471 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 2, 3, 7, 10], 'root': 3, 'extensions': 11}
Step 472 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 3, 4, 8, 11], 'root': 4, 'extensions': 11}
Step 473 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 4, 5, 9], 'root': 5, 'extensions': 11}
Step 474 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 4, 5, 6, 10], 'root': 6, 'extensions': 11}
Step 475 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 5, 6, 7, 11], 'root': 7, 'extensions': 11}
Step 476 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [0, 3, 6, 7, 8], 'root': 8, 'extensions': 11}
Step 477 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [1, 4, 7, 8, 9], 'root': 9, 'extensions': 11}
Step 478 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [2, 5, 8, 9, 10], 'root': 10, 'extensions': 11}
Step 479 of 492: {'majmin3': 'maj', 'majmin7': 'min', 'pitch_class': [3, 6, 9, 10, 11], 'root': 11, 'extensions': 11}
Step 480 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 7, 10, 11], 'root': 0, 'extensions': 11}
Step 481 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 1, 4, 8, 11], 'root': 1, 'extensions': 11}
Step 482 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 1, 2, 5, 9], 'root': 2, 'extensions': 11}
Step 483 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 2, 3, 6, 10], 'root': 3, 'extensions': 11}
Step 484 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 3, 4, 7, 11], 'root': 4, 'extensions': 11}
Step 485 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 3, 4, 5, 8], 'root': 5, 'extensions': 11}
Step 486 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 4, 5, 6, 9], 'root': 6, 'extensions': 11}
Step 487 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 5, 6, 7, 10], 'root': 7, 'extensions': 11}
Step 488 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [3, 6, 7, 8, 11], 'root': 8, 'extensions': 11}
Step 489 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [0, 4, 7, 8, 9], 'root': 9, 'extensions': 11}
Step 490 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [1, 5, 8, 9, 10], 'root': 10, 'extensions': 11}
Step 491 of 492: {'majmin3': 'min', 'majmin7': 'min', 'pitch_class': [2, 6, 9, 10, 11], 'root': 11, 'extensions': 11}

In [10]:
#ds["one_hot_encoding"] = [to1hot(x,len(label_to_chord)) for x in ds['target']]
ds["multi_hot_encoding"] = [toMultiHot(x,12) for x in ds['pitch_class']]
ds.head()
ds.tail()


Out[10]:
extensions fft majmin3 majmin7 pitch_class pitch_shift repetition root target multi_hot_encoding
42875 11 [[0.000317645216402, 0.000330283532693, 0.0003... min min [2, 6, 9, 10, 11] 0 59 11.0 491.0 [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, ...
42876 11 [[0.000232915756681, 0.000282462013064, 0.0003... min min [2, 6, 9, 10, 11] 0 60 11.0 491.0 [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, ...
42877 11 [[0.000317274116683, 0.000331640555004, 0.0003... min min [2, 6, 9, 10, 11] 0 61 11.0 491.0 [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, ...
42878 11 [[0.000264476459399, 0.00030275802486, 0.00031... min min [2, 6, 9, 10, 11] 0 62 11.0 491.0 [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, ...
42879 11 [[0.000280290193016, 0.000305630328844, 0.0003... min min [2, 6, 9, 10, 11] 0 63 11.0 491.0 [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, ...

In [11]:
tmp = pd.DataFrame(index=range(ds.shape[0]))
tmp['harmony_pitch_class'] = None
for i in range(ds.shape[0]):
    num_pitches = np.sum(ds['multi_hot_encoding'][i])
    if num_pitches >=3:
        curr_pitches = [int((ds['root'][i])%12)]
        curr_pitches += [int((ds['root'][i]+7)%12)]
        if ds['majmin3'][i]=='maj':
            curr_pitches += [int((ds['root'][i]+4)%12)]
        else:
            curr_pitches += [int((ds['root'][i]+3)%12)]
        if ds['majmin7'][i]=='maj':
            curr_pitches += [int((ds['root'][i]+11)%12)]
        elif ds['majmin7'][i]=='min':
            curr_pitches += [int((ds['root'][i]+10)%12)]
        tmp['harmony_pitch_class'][i] = curr_pitches
    else:
        tmp['harmony_pitch_class'][i] = ds['pitch_class'][i]

tmp['multi_hot_harmony'] = [toMultiHot(x,12) for x in tmp['harmony_pitch_class']]
ds['harmony_pitch_class'] = tmp['harmony_pitch_class']
ds['multi_hot_harmony'] = tmp['multi_hot_harmony']

In [12]:
good_ind = np.where([sum(x)>0 for x in ds['multi_hot_encoding']])
#good_ind = np.where([x=='none' for x in ds['extensions']])
print(len(good_ind))
ds_sub = ds.iloc[good_ind]

#ds_sub = ds[ds['extensions']=='none']

print(ds.shape)
print(ds_sub.shape)

num_ind = ds_sub.shape[0]
train_ratio = 0.8
validation_ratio = 0.1
test_ratio = 1 - train_ratio - validation_ratio
train_ind = np.random.choice(num_ind,int(np.floor(num_ind*train_ratio)),replace=False)
rest_ind = np.setdiff1d(range(num_ind),train_ind)
validation_ind = np.random.choice(rest_ind,int(np.floor(len(rest_ind)*(validation_ratio/(validation_ratio+test_ratio)))),replace=False)
test_ind = np.setdiff1d(rest_ind,validation_ind)

train_data = ds_sub.iloc[train_ind]
validation_data = ds_sub.iloc[validation_ind]
test_data = ds_sub.iloc[test_ind]

train_data.fft.shape


1
(42880, 12)
(42240, 12)
Out[12]:
(33792,)

In [13]:
#Gotta adjust these shape parameters
train_x = np.vstack(train_data['fft']).reshape(train_data.shape[0],len(train_data['fft'].iloc[0][0]),1,1).astype(np.float32)
train_y = np.vstack(train_data["multi_hot_harmony"])
train_size = train_y.shape[0]
validation_x = np.vstack(validation_data['fft']).reshape(validation_data.shape[0],len(validation_data['fft'].iloc[0][0]),1,1).astype(np.float32)
validation_y = np.vstack(validation_data["multi_hot_harmony"])
test_x = np.vstack(test_data['fft']).reshape(test_data.shape[0],len(validation_data['fft'].iloc[0][0]),1,1).astype(np.float32)
test_y = np.vstack(test_data["multi_hot_harmony"])

#shuffle the training_data
rand_ind = np.random.choice(train_x.shape[0],train_x.shape[0],replace=False)
train_x = train_x[rand_ind,:]
train_y = train_y[rand_ind,:]

In [14]:
BATCH_SIZE = min(train_x.shape[0],3000) # we have so little data, just set the batch size to the entire training set
NUM_CHANNELS = 1 
NUM_LABELS = train_y.shape[1]
STEP_UPDATE = 100

SEED = 27
FC1_SIZE = 512
FC2_SIZE = 256
FC3_SIZE = 128
FC4_SIZE = 64
ADAM_SIZE = 5e-5 #5e-5
REGULARIZER_SIZE = 1e-5#1e-4

MAX_STEPS = 100000
train_x.shape


Out[14]:
(33792, 1025, 1, 1)

In [15]:
tf.reset_default_graph()

inputs = tf.placeholder(tf.float32, [None, train_x.shape[1],train_x.shape[2],train_x.shape[3]],name="x-in")
true_y = tf.placeholder(tf.float32, [None, NUM_LABELS],name="y-in")
keep_prob1 = tf.placeholder("float")
keep_prob2 = tf.placeholder("float")
keep_prob3 = tf.placeholder("float")



input_length = train_x.shape[1]*train_x.shape[2]*train_x.shape[3]
input_flat = tf.reshape(inputs,[-1, input_length])

with tf.name_scope('fc1') as scope:
    weights =  tf.Variable(tf.truncated_normal([input_length, FC1_SIZE], dtype=tf.float32, stddev=0.1, seed=SEED))
    fc = tf.matmul(input_flat,weights)
    biases = tf.Variable(tf.constant(0.1, shape=[FC1_SIZE]))
    bias = tf.nn.bias_add(fc, biases)
    fc1 = tf.nn.relu(bias, name=scope)
    keep_prob1 = tf.placeholder(tf.float32)
    fc1_drop = tf.nn.dropout(fc1, keep_prob1, name=scope)
    #fc1_drop = fc1
    
    fc1_weights = weights
    fc1_biases = biases
    
with tf.name_scope('fc2') as scope:
    weights =  tf.Variable(tf.truncated_normal([FC1_SIZE, FC2_SIZE], dtype=tf.float32, stddev=0.1, seed=SEED))
    fc = tf.matmul(fc1_drop,weights)
    biases = tf.Variable(tf.constant(0.1, shape=[FC2_SIZE]))
    bias = tf.nn.bias_add(fc, biases)
    fc2 = tf.nn.relu(bias, name=scope)
    keep_prob2 = tf.placeholder(tf.float32)
    fc2_drop = tf.nn.dropout(fc2, keep_prob2, name=scope)
    #fc1_drop = fc1
    
    fc2_weights = weights
    fc2_biases = biases
    
with tf.name_scope('fc3') as scope:
    weights =  tf.Variable(tf.truncated_normal([FC2_SIZE, FC3_SIZE], dtype=tf.float32, stddev=0.1, seed=SEED))
    fc = tf.matmul(fc2_drop,weights)
    biases = tf.Variable(tf.constant(0.1, shape=[FC3_SIZE]))
    bias = tf.nn.bias_add(fc, biases)
    fc3 = tf.nn.relu(bias, name=scope)
    keep_prob3 = tf.placeholder(tf.float32)
    fc3_drop = tf.nn.dropout(fc3, keep_prob3, name=scope)
    #fc1_drop = fc1
    
    fc3_weights = weights
    fc3_biases = biases
    
with tf.name_scope('fc4') as scope:
    weights =  tf.Variable(tf.truncated_normal([FC3_SIZE, FC4_SIZE], dtype=tf.float32, stddev=0.1, seed=SEED))
    fc = tf.matmul(fc3_drop,weights)
    biases = tf.Variable(tf.constant(0.1, shape=[FC4_SIZE]))
    bias = tf.nn.bias_add(fc, biases)
    fc4 = tf.nn.relu(bias, name=scope)
    keep_prob4 = tf.placeholder(tf.float32)
    fc4_drop = tf.nn.dropout(fc4, keep_prob3, name=scope)
    #fc1_drop = fc1
    
    fc4_weights = weights
    fc4_biases = biases
    
    
with tf.name_scope('fc5') as scope:
    weights =  tf.Variable(tf.truncated_normal([FC4_SIZE, NUM_LABELS], dtype=tf.float32, stddev=0.1, seed=SEED))
    fc = tf.matmul(fc4_drop,weights)
    biases = tf.Variable(tf.constant(0.1, shape=[NUM_LABELS]))
    bias = tf.nn.bias_add(fc, biases)
    #fc2 = tf.nn.relu(bias, name=scope)
    fc5 = bias
    out_y = fc5
    
    fc5_weights = weights
    fc5_biases = biases
    
        
#loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(out_y, true_y))
loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(out_y, true_y))

#cross_entropy = -tf.reduce_sum(true_y*tf.log(out_y))
#loss = cross_entropy

# L2 regularization for the fully connected parameters.
regularizers =  (
                tf.nn.l2_loss(fc1_weights) + tf.nn.l2_loss(fc1_biases) +
                tf.nn.l2_loss(fc2_weights) + tf.nn.l2_loss(fc2_biases) +
                tf.nn.l2_loss(fc3_weights) + tf.nn.l2_loss(fc3_biases) +
                tf.nn.l2_loss(fc4_weights) + tf.nn.l2_loss(fc4_biases)
                #tf.nn.l2_loss(fc4_weights) + tf.nn.l2_loss(fc4_biases)
                #tf.nn.l2_loss(conv1_weights) + tf.nn.l2_loss(conv1_biases) +
                #tf.nn.l2_loss(conv2_weights) + tf.nn.l2_loss(conv2_biases)
)
# Add the regularization term to the loss.
loss += REGULARIZER_SIZE * (regularizers)


#For one-label
#correct_prediction = tf.equal(tf.argmax(out_y,1), tf.argmax(true_y,1))
#accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
#accuracy2 = tf.reduce_mean(tf.cast(correct_prediction, "float"))

#For multi-label
#see http://stackoverflow.com/questions/37746670/tensorflow-multi-label-accuracy-calculation
correct_prediction = tf.equal(tf.round(tf.nn.sigmoid(out_y)), tf.round(true_y))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
#Accuracy where all labels need to be correct:
all_labels_true = tf.reduce_min(tf.cast(correct_prediction, tf.float32), 1)
accuracy2 = tf.reduce_mean(all_labels_true)




#train_step = tf.train.GradientDescentOptimizer(1e-5).minimize(loss)
train_step = tf.train.AdamOptimizer().minimize(loss)

In [16]:
def next_batch(data_size,batch_size,first_batch=False):
    global ind
    try:
        ind += 0
    except:
        first_batch = True
    
    if first_batch == True or batch_size != len(ind):
        ind = np.mod(range(batch_size),data_size)
        return ind
    
    ind += batch_size
    ind %= data_size
    return ind

In [17]:
sess = tf.Session()
init = tf.global_variables_initializer()
sess.run(init)
batch_size = BATCH_SIZE
layer_diagnostics = pd.DataFrame(columns=('step','layer',
                                          'weight','weight_grad','bias','bias_grad'#,
                                          #'weight_norm','weight_grad_norm','bias_norm','bias_grad_norm'
                                         ))


start = timeit.default_timer()


for i in range(MAX_STEPS+1):
    ind = next_batch(train_x.shape[0],batch_size)
    batch_x = train_x[ind]
    batch_y = train_y[ind]
    if i > 0: 
        if i<2: train_start = timeit.default_timer()
        sess.run(train_step, feed_dict={inputs:batch_x,true_y:batch_y, keep_prob1:0.5, keep_prob2:0.5, keep_prob3:0.5})
        if i<2: train_stop = timeit.default_timer()
        if i<2: print("Training time for 1000 steps: %g"%(1000*(train_stop-train_start)))
    if i < 2 or ( i % STEP_UPDATE == 0 and i != 0 ) :
        
        fc1_weights_val, fc1_weights_grad_val,\
        fc1_biases_val, fc1_biases_grad_val,\
        fc2_weights_val, fc2_weights_grad_val,\
        fc2_biases_val, fc2_biases_grad_val,\
        fc3_weights_val, fc3_weights_grad_val,\
        fc3_biases_val, fc3_biases_grad_val,\
        fc4_weights_val, fc4_weights_grad_val,\
        fc4_biases_val, fc4_biases_grad_val,\
        fc5_weights_val, fc5_weights_grad_val,\
        fc5_biases_val, fc5_biases_grad_val,\
        out_y_val, out_y_grad_val,\
        trainAccuracy, trainAccuracy2, trainLoss =\
            sess.run([
                fc1_weights, tf.gradients(loss,fc1_weights)[0],
                fc1_biases, tf.gradients(loss,fc1_biases)[0],
                fc2_weights, tf.gradients(loss,fc2_weights)[0],
                fc2_biases, tf.gradients(loss,fc2_biases)[0],
                fc3_weights, tf.gradients(loss,fc3_weights)[0],
                fc3_biases, tf.gradients(loss,fc3_biases)[0],
                fc4_weights, tf.gradients(loss,fc4_weights)[0],
                fc4_biases, tf.gradients(loss,fc4_biases)[0],
                fc5_weights, tf.gradients(loss,fc5_weights)[0],
                fc5_biases, tf.gradients(loss,fc5_biases)[0],
                out_y, tf.gradients(loss,out_y)[0],
                accuracy, accuracy2, loss
            ],
                                feed_dict={
                                    inputs:batch_x,
                                    true_y:batch_y,
                                    keep_prob1:1.0,
                                    keep_prob2:1.0,
                                    keep_prob3:1.0,
                                    keep_prob4:1.0})
        
        #validationAccuracy = sess.run(accuracy, 
        #                        feed_dict={
        #                            inputs:validation_x,
        #                            true_y:validation_y, 
        #                            keep_prob:1.0})
        
        fc1_norm = np.linalg.norm(fc1_weights_val)
        fc1_grad_norm = np.linalg.norm(fc1_weights_val)
        fc1_bias_norm = np.linalg.norm(fc1_biases_val)
        fc1_bias_grad_norm = np.linalg.norm(fc1_biases_val)
        fc2_norm = np.linalg.norm(fc2_weights_val)
        fc2_grad_norm = np.linalg.norm(fc2_weights_val)
        fc2_bias_norm = np.linalg.norm(fc2_biases_val)
        fc2_bias_grad_norm = np.linalg.norm(fc2_biases_val)
        fc3_norm = np.linalg.norm(fc3_weights_val)
        fc3_grad_norm = np.linalg.norm(fc3_weights_val)
        fc3_bias_norm = np.linalg.norm(fc3_biases_val)
        fc3_bias_grad_norm = np.linalg.norm(fc3_biases_val)
        fc4_norm = np.linalg.norm(fc4_weights_val)
        fc4_grad_norm = np.linalg.norm(fc4_weights_val)
        fc4_bias_norm = np.linalg.norm(fc4_biases_val)
        fc4_bias_grad_norm = np.linalg.norm(fc4_biases_val)
        fc5_norm = np.linalg.norm(fc5_weights_val)
        fc5_grad_norm = np.linalg.norm(fc5_weights_val)
        fc5_bias_norm = np.linalg.norm(fc5_biases_val)
        fc5_bias_grad_norm = np.linalg.norm(fc5_biases_val)
        out_y_norm = np.linalg.norm(out_y_val)
        out_y_grad_norm = np.linalg.norm(out_y_grad_val)
        
        
        layer_diagnostics.append({
                'step':i,
                'layer':'fc1',
                'weight_norm':fc1_norm,
                'weight_grad_norm':fc1_grad_norm,
                'bias_norm':fc1_bias_norm,
                'bias_grad_norm':fc1_bias_grad_norm},ignore_index=True)
        layer_diagnostics.append({
                'step':i,
                'layer':'fc2',
                'weight_norm':fc2_norm,
                'weight_grad_norm':fc2_grad_norm,
                'bias_norm':fc2_bias_norm,
                'bias_grad_norm':fc2_bias_grad_norm},ignore_index=True)
        layer_diagnostics.append({
                'step':i,
                'layer':'fc3',
                'weight_norm':fc3_norm,
                'weight_grad_norm':fc3_grad_norm,
                'bias_norm':fc3_bias_norm,
                'bias_grad_norm':fc3_bias_grad_norm},ignore_index=True)
        layer_diagnostics.append({
                'step':i,
                'layer':'fc4',
                'weight_norm':fc4_norm,
                'weight_grad_norm':fc4_grad_norm,
                'bias_norm':fc4_bias_norm,
                'bias_grad_norm':fc4_bias_grad_norm},ignore_index=True)
        layer_diagnostics.append({
                'step':i,
                'layer':'fc5',
                'weight_norm':fc5_norm,
                'weight_grad_norm':fc5_grad_norm,
                'bias_norm':fc5_bias_norm,
                'bias_grad_norm':fc5_bias_grad_norm},ignore_index=True)
        layer_diagnostics.append({
                'step':i,
                'layer':'out_y',
                'weight_norm':out_y_norm,
                'weight_grad_norm':out_y_grad_norm,
                'bias_norm':0,
                'bias_grad_norm':0},ignore_index=True)
                
        validationAccuracy, validationAccuracy2 = sess.run([accuracy, accuracy2], 
                            feed_dict={
                                inputs:validation_x,
                                true_y:validation_y,
                                    keep_prob1:1.0,
                                    keep_prob2:1.0,
                                    keep_prob3:1.0,
                                    keep_prob4:1.0})
            
                
        stop = timeit.default_timer()

        if i==0: print("Initialization:")
        else: print("Step %d:"%(i))
        print("    fc1 norm   %g, grad norm %g, bias norm %g, bias grad norm %g"%
              (fc1_norm, fc1_grad_norm, fc1_bias_norm, fc1_bias_grad_norm))
        print("    fc2 norm   %g, grad norm %g, bias norm %g, bias grad norm %g"%
              (fc2_norm, fc2_grad_norm, fc2_bias_norm, fc2_bias_grad_norm))
        print("    fc3 norm   %g, grad norm %g, bias norm %g, bias grad norm %g"%
              (fc3_norm, fc3_grad_norm, fc3_bias_norm, fc3_bias_grad_norm))
        print("    fc4 norm   %g, grad norm %g, bias norm %g, bias grad norm %g"%
              (fc4_norm, fc4_grad_norm, fc4_bias_norm, fc4_bias_grad_norm))
        print("    fc5 norm   %g, grad norm %g, bias norm %g, bias grad norm %g"%
              (fc5_norm, fc5_grad_norm, fc5_bias_norm, fc5_bias_grad_norm))
        print("    out_y_norm %g, grad norm %g"%(out_y_norm, out_y_grad_norm))
        print("    training accuracy %g, valid'n accuracy %g, loss %g"%(trainAccuracy, validationAccuracy,trainLoss))
        print("    training accuracy2 %g, valid'n accuracy2 %g"%(trainAccuracy2, validationAccuracy2))
        #print("    percent training samples classified as not zero %g"%(100.0*float(np.sum((np.argmax(out_y_val,1))))/float(out_y_val.shape[0])))
        if i>0: print("    seconds since start %g, ETA(s) %g"%((stop-start),(stop-start)*float(MAX_STEPS-i)/float(i)))
            
        
        if trainAccuracy2>0.99:
            print('Success! Shall stop training now although I could go further to improve loss function')
            break


Initialization:
    fc1 norm   63.7157, grad norm 63.7157, bias norm 2.26274, bias grad norm 2.26274
    fc2 norm   31.9248, grad norm 31.9248, bias norm 1.6, bias grad norm 1.6
    fc3 norm   15.9852, grad norm 15.9852, bias norm 1.13137, bias grad norm 1.13137
    fc4 norm   7.88883, grad norm 7.88883, bias norm 0.8, bias grad norm 0.8
    fc5 norm   2.29394, grad norm 2.29394, bias norm 0.34641, bias grad norm 0.34641
    out_y_norm 22.3004, grad norm 0.00269108
    training accuracy 0.305472, valid'n accuracy 0.302971, loss 0.741645
    training accuracy2 0, valid'n accuracy2 0
Training time for 1000 steps: 199.008
Step 1:
    fc1 norm   63.474, grad norm 63.474, bias norm 2.25976, bias grad norm 2.25976
    fc2 norm   31.9193, grad norm 31.9193, bias norm 1.59734, bias grad norm 1.59734
    fc3 norm   15.982, grad norm 15.982, bias norm 1.129, bias grad norm 1.129
    fc4 norm   7.88665, grad norm 7.88665, bias norm 0.799033, bias grad norm 0.799033
    fc5 norm   2.29413, grad norm 2.29413, bias norm 0.342946, bias grad norm 0.342946
    out_y_norm 17.9144, grad norm 0.00261295
    training accuracy 0.583194, valid'n accuracy 0.580946, loss 0.711594
    training accuracy2 0, valid'n accuracy2 0
    seconds since start 6.22907, ETA(s) 622901
Step 100:
    fc1 norm   29.6073, grad norm 29.6073, bias norm 1.96853, bias grad norm 1.96853
    fc2 norm   30.2146, grad norm 30.2146, bias norm 1.45098, bias grad norm 1.45098
    fc3 norm   15.0197, grad norm 15.0197, bias norm 1.24722, bias grad norm 1.24722
    fc4 norm   7.78206, grad norm 7.78206, bias norm 0.954141, bias grad norm 0.954141
    fc5 norm   2.35869, grad norm 2.35869, bias norm 0.177517, bias grad norm 0.177517
    out_y_norm 159.048, grad norm 0.00233527
    training accuracy 0.733889, valid'n accuracy 0.735046, loss 0.592284
    training accuracy2 0, valid'n accuracy2 0
    seconds since start 29.1522, ETA(s) 29123
Step 200:
    fc1 norm   24.2928, grad norm 24.2928, bias norm 1.55426, bias grad norm 1.55426
    fc2 norm   28.0837, grad norm 28.0837, bias norm 1.39612, bias grad norm 1.39612
    fc3 norm   14.1139, grad norm 14.1139, bias norm 1.22742, bias grad norm 1.22742
    fc4 norm   7.82912, grad norm 7.82912, bias norm 1.0942, bias grad norm 1.0942
    fc5 norm   2.42336, grad norm 2.42336, bias norm 0.0902919, bias grad norm 0.0902919
    out_y_norm 186.282, grad norm 0.00222433
    training accuracy 0.731611, valid'n accuracy 0.735046, loss 0.544704
    training accuracy2 0, valid'n accuracy2 0
    seconds since start 52.5745, ETA(s) 26234.7
Step 300:
    fc1 norm   37.7627, grad norm 37.7627, bias norm 0.715645, bias grad norm 0.715645
    fc2 norm   28.5861, grad norm 28.5861, bias norm 1.44973, bias grad norm 1.44973
    fc3 norm   15.8574, grad norm 15.8574, bias norm 1.33976, bias grad norm 1.33976
    fc4 norm   9.53671, grad norm 9.53671, bias norm 1.1278, bias grad norm 1.1278
    fc5 norm   2.98662, grad norm 2.98662, bias norm 0.0706716, bias grad norm 0.0706716
    out_y_norm 595.293, grad norm 0.00158694
    training accuracy 0.866861, valid'n accuracy 0.860401, loss 0.304275
    training accuracy2 0.168, valid'n accuracy2 0.150095
    seconds since start 77.0082, ETA(s) 25592.4
Step 400:
    fc1 norm   36.5609, grad norm 36.5609, bias norm 0.610163, bias grad norm 0.610163
    fc2 norm   29.1964, grad norm 29.1964, bias norm 1.43072, bias grad norm 1.43072
    fc3 norm   17.4861, grad norm 17.4861, bias norm 1.22663, bias grad norm 1.22663
    fc4 norm   10.5799, grad norm 10.5799, bias norm 1.11583, bias grad norm 1.11583
    fc5 norm   3.37074, grad norm 3.37074, bias norm 0.0952679, bias grad norm 0.0952679
    out_y_norm 762.924, grad norm 0.00150226
    training accuracy 0.883528, valid'n accuracy 0.880307, loss 0.271545
    training accuracy2 0.199667, valid'n accuracy2 0.202652
    seconds since start 100.177, ETA(s) 24944.2
Step 500:
    fc1 norm   37.0937, grad norm 37.0937, bias norm 0.559059, bias grad norm 0.559059
    fc2 norm   30.4214, grad norm 30.4214, bias norm 1.37344, bias grad norm 1.37344
    fc3 norm   19.1886, grad norm 19.1886, bias norm 1.16596, bias grad norm 1.16596
    fc4 norm   11.5831, grad norm 11.5831, bias norm 1.1376, bias grad norm 1.1376
    fc5 norm   3.72514, grad norm 3.72514, bias norm 0.127842, bias grad norm 0.127842
    out_y_norm 874.08, grad norm 0.00116204
    training accuracy 0.936083, valid'n accuracy 0.932134, loss 0.184728
    training accuracy2 0.482667, valid'n accuracy2 0.472064
    seconds since start 123.728, ETA(s) 24622
Step 600:
    fc1 norm   36.8563, grad norm 36.8563, bias norm 0.518131, bias grad norm 0.518131
    fc2 norm   31.0107, grad norm 31.0107, bias norm 1.3341, bias grad norm 1.3341
    fc3 norm   20.4623, grad norm 20.4623, bias norm 1.08413, bias grad norm 1.08413
    fc4 norm   12.4611, grad norm 12.4611, bias norm 1.08998, bias grad norm 1.08998
    fc5 norm   4.05609, grad norm 4.05609, bias norm 0.139436, bias grad norm 0.139436
    out_y_norm 937.71, grad norm 0.00111062
    training accuracy 0.939389, valid'n accuracy 0.938368, loss 0.168762
    training accuracy2 0.518, valid'n accuracy2 0.519176
    seconds since start 147.995, ETA(s) 24517.8
Step 700:
    fc1 norm   36.8966, grad norm 36.8966, bias norm 0.4878, bias grad norm 0.4878
    fc2 norm   31.6285, grad norm 31.6285, bias norm 1.30193, bias grad norm 1.30193
    fc3 norm   21.606, grad norm 21.606, bias norm 1.05065, bias grad norm 1.05065
    fc4 norm   13.246, grad norm 13.246, bias norm 1.09353, bias grad norm 1.09353
    fc5 norm   4.39903, grad norm 4.39903, bias norm 0.156287, bias grad norm 0.156287
    out_y_norm 1040.01, grad norm 0.00100543
    training accuracy 0.950194, valid'n accuracy 0.943379, loss 0.143494
    training accuracy2 0.575667, valid'n accuracy2 0.54143
    seconds since start 174.419, ETA(s) 24742.5
Step 800:
    fc1 norm   37.0528, grad norm 37.0528, bias norm 0.465345, bias grad norm 0.465345
    fc2 norm   32.2865, grad norm 32.2865, bias norm 1.27566, bias grad norm 1.27566
    fc3 norm   22.6758, grad norm 22.6758, bias norm 1.04917, bias grad norm 1.04917
    fc4 norm   14.0091, grad norm 14.0091, bias norm 1.13101, bias grad norm 1.13101
    fc5 norm   4.75685, grad norm 4.75685, bias norm 0.175984, bias grad norm 0.175984
    out_y_norm 1165.53, grad norm 0.000940957
    training accuracy 0.9565, valid'n accuracy 0.953086, loss 0.128946
    training accuracy2 0.621333, valid'n accuracy2 0.611742
    seconds since start 200.221, ETA(s) 24827.4
Step 900:
    fc1 norm   37.407, grad norm 37.407, bias norm 0.444234, bias grad norm 0.444234
    fc2 norm   33.0146, grad norm 33.0146, bias norm 1.2526, bias grad norm 1.2526
    fc3 norm   23.7485, grad norm 23.7485, bias norm 1.03963, bias grad norm 1.03963
    fc4 norm   14.7841, grad norm 14.7841, bias norm 1.18439, bias grad norm 1.18439
    fc5 norm   5.13345, grad norm 5.13345, bias norm 0.200799, bias grad norm 0.200799
    out_y_norm 1273.76, grad norm 0.000856619
    training accuracy 0.964889, valid'n accuracy 0.962476, loss 0.113107
    training accuracy2 0.698667, valid'n accuracy2 0.683712
    seconds since start 224.872, ETA(s) 24760.9
Step 1000:
    fc1 norm   37.8753, grad norm 37.8753, bias norm 0.425853, bias grad norm 0.425853
    fc2 norm   33.8278, grad norm 33.8278, bias norm 1.23548, bias grad norm 1.23548
    fc3 norm   24.862, grad norm 24.862, bias norm 1.03217, bias grad norm 1.03217
    fc4 norm   15.5877, grad norm 15.5877, bias norm 1.24988, bias grad norm 1.24988
    fc5 norm   5.52827, grad norm 5.52827, bias norm 0.228723, bias grad norm 0.228723
    out_y_norm 1399.4, grad norm 0.000782014
    training accuracy 0.971639, valid'n accuracy 0.969934, loss 0.100686
    training accuracy2 0.762, valid'n accuracy2 0.747633
    seconds since start 250.902, ETA(s) 24839.3
Step 1100:
    fc1 norm   38.4378, grad norm 38.4378, bias norm 0.410878, bias grad norm 0.410878
    fc2 norm   34.6837, grad norm 34.6837, bias norm 1.21848, bias grad norm 1.21848
    fc3 norm   25.9653, grad norm 25.9653, bias norm 1.0298, bias grad norm 1.0298
    fc4 norm   16.3988, grad norm 16.3988, bias norm 1.32206, bias grad norm 1.32206
    fc5 norm   5.92281, grad norm 5.92281, bias norm 0.261858, bias grad norm 0.261858
    out_y_norm 1567.82, grad norm 0.000665616
    training accuracy 0.981139, valid'n accuracy 0.976937, loss 0.0835038
    training accuracy2 0.823333, valid'n accuracy2 0.799479
    seconds since start 277.896, ETA(s) 24985.4
Step 1200:
    fc1 norm   38.9425, grad norm 38.9425, bias norm 0.396611, bias grad norm 0.396611
    fc2 norm   35.5247, grad norm 35.5247, bias norm 1.2075, bias grad norm 1.2075
    fc3 norm   27.0403, grad norm 27.0403, bias norm 1.0344, bias grad norm 1.0344
    fc4 norm   17.2053, grad norm 17.2053, bias norm 1.40229, bias grad norm 1.40229
    fc5 norm   6.32492, grad norm 6.32492, bias norm 0.29927, bias grad norm 0.29927
    out_y_norm 1735.52, grad norm 0.000576977
    training accuracy 0.987917, valid'n accuracy 0.985795, loss 0.0718731
    training accuracy2 0.881333, valid'n accuracy2 0.868608
    seconds since start 304.632, ETA(s) 25081.4
Step 1300:
    fc1 norm   39.4239, grad norm 39.4239, bias norm 0.383298, bias grad norm 0.383298
    fc2 norm   36.2894, grad norm 36.2894, bias norm 1.19475, bias grad norm 1.19475
    fc3 norm   28.067, grad norm 28.067, bias norm 1.03226, bias grad norm 1.03226
    fc4 norm   17.9827, grad norm 17.9827, bias norm 1.48993, bias grad norm 1.48993
    fc5 norm   6.71975, grad norm 6.71975, bias norm 0.342536, bias grad norm 0.342536
    out_y_norm 1877.7, grad norm 0.000512958
    training accuracy 0.991722, valid'n accuracy 0.989643, loss 0.0646544
    training accuracy2 0.916667, valid'n accuracy2 0.906723
    seconds since start 330.592, ETA(s) 25099.6
Step 1400:
    fc1 norm   39.8309, grad norm 39.8309, bias norm 0.373191, bias grad norm 0.373191
    fc2 norm   36.9748, grad norm 36.9748, bias norm 1.18707, bias grad norm 1.18707
    fc3 norm   29.031, grad norm 29.031, bias norm 1.0366, bias grad norm 1.0366
    fc4 norm   18.7421, grad norm 18.7421, bias norm 1.58204, bias grad norm 1.58204
    fc5 norm   7.11103, grad norm 7.11103, bias norm 0.390085, bias grad norm 0.390085
    out_y_norm 2018.62, grad norm 0.000464453
    training accuracy 0.993111, valid'n accuracy 0.993233, loss 0.0591016
    training accuracy2 0.936, valid'n accuracy2 0.941998
    seconds since start 358.902, ETA(s) 25277
Step 1500:
    fc1 norm   40.1997, grad norm 40.1997, bias norm 0.361986, bias grad norm 0.361986
    fc2 norm   37.6292, grad norm 37.6292, bias norm 1.17367, bias grad norm 1.17367
    fc3 norm   29.9401, grad norm 29.9401, bias norm 1.04427, bias grad norm 1.04427
    fc4 norm   19.4977, grad norm 19.4977, bias norm 1.66398, bias grad norm 1.66398
    fc5 norm   7.50154, grad norm 7.50154, bias norm 0.442098, bias grad norm 0.442098
    out_y_norm 2190.09, grad norm 0.000380353
    training accuracy 0.995917, valid'n accuracy 0.995561, loss 0.0503476
    training accuracy2 0.957667, valid'n accuracy2 0.956439
    seconds since start 386.06, ETA(s) 25351.3
Step 1600:
    fc1 norm   40.4492, grad norm 40.4492, bias norm 0.35294, bias grad norm 0.35294
    fc2 norm   38.1462, grad norm 38.1462, bias norm 1.16153, bias grad norm 1.16153
    fc3 norm   30.7524, grad norm 30.7524, bias norm 1.04998, bias grad norm 1.04998
    fc4 norm   20.2135, grad norm 20.2135, bias norm 1.7345, bias grad norm 1.7345
    fc5 norm   7.88021, grad norm 7.88021, bias norm 0.492493, bias grad norm 0.492493
    out_y_norm 2324.94, grad norm 0.00031827
    training accuracy 0.997972, valid'n accuracy 0.997199, loss 0.0457389
    training accuracy2 0.979, valid'n accuracy2 0.971828
    seconds since start 413.147, ETA(s) 25408.5
Step 1700:
    fc1 norm   40.6829, grad norm 40.6829, bias norm 0.343433, bias grad norm 0.343433
    fc2 norm   38.5401, grad norm 38.5401, bias norm 1.1528, bias grad norm 1.1528
    fc3 norm   31.4873, grad norm 31.4873, bias norm 1.05873, bias grad norm 1.05873
    fc4 norm   20.8991, grad norm 20.8991, bias norm 1.78677, bias grad norm 1.78677
    fc5 norm   8.25078, grad norm 8.25078, bias norm 0.542951, bias grad norm 0.542951
    out_y_norm 2496.92, grad norm 0.000283889
    training accuracy 0.998306, valid'n accuracy 0.997869, loss 0.0427915
    training accuracy2 0.982667, valid'n accuracy2 0.97822
    seconds since start 441.004, ETA(s) 25500.4
Step 1800:
    fc1 norm   40.7883, grad norm 40.7883, bias norm 0.334659, bias grad norm 0.334659
    fc2 norm   38.8043, grad norm 38.8043, bias norm 1.14053, bias grad norm 1.14053
    fc3 norm   32.1193, grad norm 32.1193, bias norm 1.06297, bias grad norm 1.06297
    fc4 norm   21.5336, grad norm 21.5336, bias norm 1.84044, bias grad norm 1.84044
    fc5 norm   8.60714, grad norm 8.60714, bias norm 0.593539, bias grad norm 0.593539
    out_y_norm 2616.66, grad norm 0.000265972
    training accuracy 0.998389, valid'n accuracy 0.998363, loss 0.0408543
    training accuracy2 0.983667, valid'n accuracy2 0.982244
    seconds since start 468.227, ETA(s) 25544.4
Step 1900:
    fc1 norm   40.8405, grad norm 40.8405, bias norm 0.325437, bias grad norm 0.325437
    fc2 norm   38.9737, grad norm 38.9737, bias norm 1.13282, bias grad norm 1.13282
    fc3 norm   32.6737, grad norm 32.6737, bias norm 1.07301, bias grad norm 1.07301
    fc4 norm   22.1366, grad norm 22.1366, bias norm 1.88554, bias grad norm 1.88554
    fc5 norm   8.95838, grad norm 8.95838, bias norm 0.646924, bias grad norm 0.646924
    out_y_norm 2789.18, grad norm 0.000226149
    training accuracy 0.999, valid'n accuracy 0.99858, loss 0.0381988
    training accuracy2 0.988, valid'n accuracy2 0.983191
    seconds since start 497.856, ETA(s) 25705.1
Step 2000:
    fc1 norm   40.8496, grad norm 40.8496, bias norm 0.318907, bias grad norm 0.318907
    fc2 norm   39.0511, grad norm 39.0511, bias norm 1.11895, bias grad norm 1.11895
    fc3 norm   33.1562, grad norm 33.1562, bias norm 1.07555, bias grad norm 1.07555
    fc4 norm   22.6944, grad norm 22.6944, bias norm 1.92763, bias grad norm 1.92763
    fc5 norm   9.30041, grad norm 9.30041, bias norm 0.698848, bias grad norm 0.698848
    out_y_norm 2852.99, grad norm 0.000192511
    training accuracy 0.9995, valid'n accuracy 0.999053, loss 0.0365526
    training accuracy2 0.994, valid'n accuracy2 0.988636
    seconds since start 526.127, ETA(s) 25780.2
Step 2100:
    fc1 norm   40.798, grad norm 40.798, bias norm 0.311669, bias grad norm 0.311669
    fc2 norm   39.0583, grad norm 39.0583, bias norm 1.11001, bias grad norm 1.11001
    fc3 norm   33.5585, grad norm 33.5585, bias norm 1.08538, bias grad norm 1.08538
    fc4 norm   23.2286, grad norm 23.2286, bias norm 1.95817, bias grad norm 1.95817
    fc5 norm   9.6377, grad norm 9.6377, bias norm 0.752685, bias grad norm 0.752685
    out_y_norm 2955.17, grad norm 0.000165643
    training accuracy 0.999778, valid'n accuracy 0.999665, loss 0.0351433
    training accuracy2 0.997333, valid'n accuracy2 0.995975
    seconds since start 551.592, ETA(s) 25714.7
Step 2200:
    fc1 norm   40.7084, grad norm 40.7084, bias norm 0.304785, bias grad norm 0.304785
    fc2 norm   38.9868, grad norm 38.9868, bias norm 1.09567, bias grad norm 1.09567
    fc3 norm   33.8872, grad norm 33.8872, bias norm 1.09389, bias grad norm 1.09389
    fc4 norm   23.7259, grad norm 23.7259, bias norm 1.98891, bias grad norm 1.98891
    fc5 norm   9.96756, grad norm 9.96756, bias norm 0.806141, bias grad norm 0.806141
    out_y_norm 3073.94, grad norm 0.000159948
    training accuracy 0.999778, valid'n accuracy 0.999842, loss 0.0345189
    training accuracy2 0.997333, valid'n accuracy2 0.998106
    seconds since start 576.965, ETA(s) 25648.7
Step 2300:
    fc1 norm   40.5481, grad norm 40.5481, bias norm 0.296848, bias grad norm 0.296848
    fc2 norm   38.8352, grad norm 38.8352, bias norm 1.08352, bias grad norm 1.08352
    fc3 norm   34.1385, grad norm 34.1385, bias norm 1.10088, bias grad norm 1.10088
    fc4 norm   24.1889, grad norm 24.1889, bias norm 2.01865, bias grad norm 2.01865
    fc5 norm   10.292, grad norm 10.292, bias norm 0.859485, bias grad norm 0.859485
    out_y_norm 3184.07, grad norm 0.000130061
    training accuracy 0.999917, valid'n accuracy 0.999941, loss 0.0328052
    training accuracy2 0.999, valid'n accuracy2 0.99929
    seconds since start 603.131, ETA(s) 25620
Step 2400:
    fc1 norm   40.3909, grad norm 40.3909, bias norm 0.289637, bias grad norm 0.289637
    fc2 norm   38.689, grad norm 38.689, bias norm 1.06929, bias grad norm 1.06929
    fc3 norm   34.3398, grad norm 34.3398, bias norm 1.10972, bias grad norm 1.10972
    fc4 norm   24.6225, grad norm 24.6225, bias norm 2.04479, bias grad norm 2.04479
    fc5 norm   10.6143, grad norm 10.6143, bias norm 0.91399, bias grad norm 0.91399
    out_y_norm 3331.91, grad norm 0.0001169
    training accuracy 0.999972, valid'n accuracy 0.99998, loss 0.0319111
    training accuracy2 0.999667, valid'n accuracy2 0.999763
    seconds since start 629.223, ETA(s) 25588.4
Step 2500:
    fc1 norm   40.1866, grad norm 40.1866, bias norm 0.284556, bias grad norm 0.284556
    fc2 norm   38.4968, grad norm 38.4968, bias norm 1.05694, bias grad norm 1.05694
    fc3 norm   34.4996, grad norm 34.4996, bias norm 1.11415, bias grad norm 1.11415
    fc4 norm   25.0232, grad norm 25.0232, bias norm 2.06467, bias grad norm 2.06467
    fc5 norm   10.934, grad norm 10.934, bias norm 0.969732, bias grad norm 0.969732
    out_y_norm 3358.83, grad norm 0.000108081
    training accuracy 1, valid'n accuracy 1, loss 0.0316229
    training accuracy2 1, valid'n accuracy2 1
    seconds since start 654.313, ETA(s) 25518.2
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-17-f2a960ce6b05> in <module>()
     18     if i > 0:
     19         if i<2: train_start = timeit.default_timer()
---> 20         sess.run(train_step, feed_dict={inputs:batch_x,true_y:batch_y, keep_prob1:0.5, keep_prob2:0.5, keep_prob3:0.5})
     21         if i<2: train_stop = timeit.default_timer()
     22         if i<2: print("Training time for 1000 steps: %g"%(1000*(train_stop-train_start)))

/Users/kayote/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in run(self, fetches, feed_dict, options, run_metadata)
    764     try:
    765       result = self._run(None, fetches, feed_dict, options_ptr,
--> 766                          run_metadata_ptr)
    767       if run_metadata:
    768         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

/Users/kayote/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run(self, handle, fetches, feed_dict, options, run_metadata)
    962     if final_fetches or final_targets:
    963       results = self._do_run(handle, final_targets, final_fetches,
--> 964                              feed_dict_string, options, run_metadata)
    965     else:
    966       results = []

/Users/kayote/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
   1012     if handle is None:
   1013       return self._do_call(_run_fn, self._session, feed_dict, fetch_list,
-> 1014                            target_list, options, run_metadata)
   1015     else:
   1016       return self._do_call(_prun_fn, self._session, handle, feed_dict,

/Users/kayote/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_call(self, fn, *args)
   1019   def _do_call(self, fn, *args):
   1020     try:
-> 1021       return fn(*args)
   1022     except errors.OpError as e:
   1023       message = compat.as_text(e.message)

/Users/kayote/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
   1001         return tf_session.TF_Run(session, options,
   1002                                  feed_dict, fetch_list, target_list,
-> 1003                                  status, run_metadata)
   1004 
   1005     def _prun_fn(session, handle, feed_dict, fetch_list):

KeyboardInterrupt: 

In [18]:
#I want to get indices of train_x that are correctly, and incorrectly identified
tmp1, tmp2 = sess.run([tf.nn.sigmoid(out_y), true_y], 
                            feed_dict={
                                inputs:validation_x,
                                true_y:validation_y, 
                                keep_prob1:1.0,keep_prob2:1.0,keep_prob3:1.0})

#print("Correct Predictions:")
#for i in np.where(np.sum(np.abs(np.round(tmp1)-tmp2),1)==0)[0]:
#    print('Guess vector: '+str(tmp1[i,:]))
#    print('Guess: '+str(np.where(np.round(tmp1[i,:])==1)))
#    #tmp = np.power(validation_data['fft'].iloc[i],2.0)
#    #tmp = librosa.istft(np.repeat(tmp.reshape([len(tmp),1]),10,axis=1))
#    #IPython.display.display(IPython.display.Audio(data=tmp, rate=sr))
#print("")    
    
print("Incorrect Predictions:")
for i in np.where(np.sum(np.abs(np.round(tmp1)-tmp2),1)!=0)[0]:
    print(i)
    #print('Guess vector: '+str(tmp1[i,:]))
    print('Guess: '+str(np.where(np.round(tmp1[i,:])==1))+' Reality: '+str(np.where(tmp2[i,:]==1)))
    #tmp = np.power(validation_data['fft'].iloc[i],2.0)
    #tmp = librosa.istft(np.repeat(tmp.reshape([len(tmp),1]),10,axis=1))
    #IPython.display.display(IPython.display.Audio(data=tmp, rate=sr))
print("")


Correct Predictions:

Incorrect Predictions:


In [22]:
#audio_path = librosa.util.example_audio_file()

emaj, sr = librosa.load('emaj.aiff')
amaj, sr = librosa.load('amaj.aiff')

emaj_fft = get_fft(emaj)
qmaj_fft = get_fft(librosa.effects.pitch_shift(emaj,sr,n_steps=1))
amaj_fft = get_fft(amaj)
emaj_fft = emaj_fft/np.sum(emaj_fft)
qmaj_fft = qmaj_fft/np.sum(qmaj_fft)
amaj_fft = amaj_fft/np.sum(amaj_fft)

rwds = pd.DataFrame() #real-world dataset

#http://www.electronics.dit.ie/staff/tscarff/Music_technology/midi/midi_note_numbers_for_octaves.htm
emaj_dict = {'root':4,'majmin3':'maj','majmin7':'none','pitch_class':[4, 8, 11]}
#emaj_dict.update({'target':label_to_chord.index(emaj_dict)})
emaj_dict.update({'fft':emaj_fft})
qmaj_dict = {'root':5,'majmin3':'maj','majmin7':'none','pitch_class':[0, 5, 9]}
#qmaj_dict.update({'target':label_to_chord.index(qmaj_dict)})
qmaj_dict.update({'fft':qmaj_fft})
amaj_dict = {'root':9,'majmin3':'maj','majmin7':'none','pitch_class':[1, 4, 9]}
#amaj_dict.update({'target':label_to_chord.index(amaj_dict)})
amaj_dict.update({'fft':amaj_fft})



rwds = rwds.append(emaj_dict,ignore_index=True)
rwds = rwds.append(qmaj_dict,ignore_index=True)
rwds = rwds.append(amaj_dict,ignore_index=True)

#rwds["one_hot_encoding"] = [to1hot(x,len(label_to_chord)) for x in rwds['target']]
rwds["multi_hot_encoding"] = [toMultiHot(x,12) for x in rwds['pitch_class']]



realworld_x = np.vstack(rwds['fft']).reshape(rwds.shape[0],rwds['fft'].iloc[0].shape[0],1,1).astype(np.float32)
#realworld_y = np.vstack(rwds["one_hot_encoding"])
realworld_y = np.vstack(rwds["multi_hot_encoding"])


tmp1, tmp2 = sess.run([tf.nn.sigmoid(out_y), true_y], 
                            feed_dict={
                                inputs:realworld_x,
                                true_y:realworld_y, 
                                keep_prob1:1.0, keep_prob2:1.0, keep_prob3:1.0})

for i in range(tmp1.shape[0]):
    #print('Guess vector: '+str(tmp1[i,:]))
    print('Guess: '+str(np.where(np.round(tmp1[i,:])==1))+' Reality: '+str(np.where(tmp2[i,:]==1)))


Guess: (array([ 4,  8, 11]),) Reality: (array([ 4,  8, 11]),)
Guess: (array([0, 5]),) Reality: (array([0, 5, 9]),)
Guess: (array([1, 4, 9]),) Reality: (array([1, 4, 9]),)

In [ ]:
#any_song_file is originally an mp3 on my hard drive of The Beatles' "Something"
#I can't upload that, but you can substitute any song of your choosing
something, something_sr = librosa.load(any_song_file)

something_harmonic, something_percussive = librosa.effects.hpss(something)
print(something_sr)

In [ ]:
def extract_fft(sample):
    try:
        S_sample = np.abs((librosa.stft(sample)))**0.5
        return S_sample[:,range(0,S_sample.shape[1],9)]
    except:
        return []
    
tmp = extract_fft(something)
print(tmp.shape)
print(len(something)/something_sr/60.)
print(7881./(float(len(something))/float(something_sr))/5.)
print(round(float(len(something))/7881.*4))
print('Samples per sec: %g'%(876/3.03/60))

In [ ]:
something_x = extract_fft(something_harmonic).transpose()
something_x = something_x.reshape([something_x.shape[0], something_x.shape[1], 1, 1])
something_x.shape

#I want to get indices of train_x that are correctly, and incorrectly identified
tmp1 = sess.run(tf.round(tf.nn.sigmoid(out_y)), 
                            feed_dict={
                                inputs:something_x,
                                #true_y:validation_y, 
                                keep_prob1:1.0,keep_prob2:1.0,keep_prob3:1.0})



print("Predictions:")
for i in range(tmp1.shape[0]):
    print(np.where(tmp1[i,:]==1))