Graduation time by training program

COB vs NEURO (see slide)


In [53]:
print("Hellow")


Hellow

In [54]:
print(1+1)


2

In [56]:
x = 'asdf' + 'wer'
print(x)


asdfwer

In [64]:
x = [3,4,'asdf']

In [68]:
print x[-1]


asdf

In [59]:
f = open('../data/gradtimes.txt','r')
print(f.readline())


NEURO	7.59


In [62]:
f = open('../data/gradtimes.txt','r')
numStudents = 0
for line in f:
    numStudents = numStudents + 1

print(numStudents)


525

In [87]:
f = open('/Users/justinjee/Desktop/neuro.txt','r')
allTimes = []

print(len(allTimes))

for line in f:
    (program, time) = line.split()
    allTimes.append(float(time))
    
print(len(allTimes))


0
84

In [97]:
a  ='5'
b = '8'
print(a+b)
print(int(a)+float(b))


58
13.0

In [88]:
print(sum(allTimes)/len(allTimes))


5.93273809524

In [89]:
f = open('/Users/justinjee/Desktop/nonneuro.txt','r')
allTimes = []
for line in f:
    (program, time) = line.split()
    allTimes.append(float(time))
    
print(sum(allTimes)/len(allTimes))


5.33195011338

In [118]:
import random
r = random.sample(allTimes,84)
print sum(r)/len(r)


5.23404761905

In [121]:
n = 0
for i in range(100000):
    r = random.sample(allTimes,84)
    if sum(r)/len(r)>=5.93:
        n+=1
        
print n/100000.0


0.00023

standard deviation vs. standard deviation of the mean


In [126]:
def mean(a):
    return sum(a)/len(a)

a = [1,2,3]
print mean(a)

def std(a):
    er = 0
    for i in a:
        er+= (mean(a)-i)**2
    return (float(er)/len(a))**0.5

print std(a)


2
0.816496580928

In [134]:
r = random.sample(allTimes,84)
print(mean(r))
print(std(r))


5.18821428571
1.77516542537

In [130]:
print(std(allTimes))


1.62119571987

In [140]:
means = []
for i in range(10):
    r = random.sample(allTimes,84)
    means.append(mean(r))

print std(allTimes)
print std(means) #standard error of the mean!


1.62119571987
0.151228617705

In [142]:
print std(allTimes)/(84)**0.5 #theoretical standard error of the mean


0.176886954825

In [ ]: