After solving all the questions in the exam save your notebook with the name username.ipynb
and submit it to: https://www.dropbox.com/request/GM4PK3ibNSH46IejnzLP
In [20]:
def bn_model(data, k):
'''
data: training data as a list of lists
[[x_1, x_2, ..., X_n, A, B]
[x_1, x_2, ..., X_n, A, B]
:
[x_1, x_2, ..., X_n, A, B]
]
k: Laplace's smoothing parameter
returns:
It must return the model as a dictionary with the following form:
For example:
{ 'A' : 0.1,
('B', 'A', 0) : 0.4
('B', 'A', 1) : 0.7
('x1', 'B', 0) : 0.3,
('x1', 'B', 1) : 0.1,
('x2', 'B', 0) : 0.2,
('x2', 'B', 1) : 0.6,
}
In this case the entry for 'A' means that P(A = 1) = 0.4 and therefore P(A = 0) = 0.6.
In the same way, the entry for ('x1', 'B', 1) indicates that P(X_1 = 1 | B = 1) = 0.1 and
therefore P(X_1 = 0 | B = 1) = 0.9.
'''
model = {}
return model
data = [[0, 0, 1, 1],
[0, 1, 0, 0],
[0, 0, 0, 0],
[1, 1, 0, 1],
[0, 1, 1, 1],
[0, 1, 0, 1]]
bn_model(data,3.5)
Out[20]:
In [12]:
def p1(model, X):
'''
model: a dictionary with the model probabilities.
X: a list with x_i values [x_1, x_2, ... , x_n]
Returns: the probability P(B = 1 | x_1, x_2, ... , x_n)
'''
return 0.0
In [13]:
def p2(model, X):
'''
model: a dictionary with the model probabilities.
X: a list with x_i values [x_1, x_2, ... , x_n]
Returns: the probability P(A = 1 | x_1, x_2, ... , x_n)
'''
return 0.0
In [14]:
def p3(model, x_1, x_n):
'''
model: a dictionary with the model probabilities.
x_1, x_n: x values
Returns: the probability P(A = 1 | x_1, x_n)
'''
return 0.0
In [21]:
""" Testing """
def approx_equal(val1, val2):
return abs(val1-val2) <= 0.00001
def test_dict(test,answer):
if sorted(test.keys()) != sorted(answer.keys()): return False
for k,v in test.items():
if not approx_equal(v,answer[k]):
return False
return True
def test_P1():
data = [[0, 0, 1, 0],
[0, 0, 1, 0],
[0, 0, 0, 0],
[1, 0, 0, 1],
[0, 0, 1, 1],
[0, 0, 0, 1]]
student = bn_model(data,0.0)
print student
answer = {'A': 0.5, ('x1', 'B', 0): 0.0, ('x2', 'B', 0): 0.0, ('B', 'A', 0): 0.6666666666666666, ('x2', 'B', 1): 0.0, ('B', 'A', 1): 0.3333333333333333, ('x1', 'B', 1): 0.3333333333333333}
assert(test_dict(student,answer))
print "Test 1"
data = [[0, 0, 1, 1],
[0, 1, 0, 0],
[0, 0, 0, 0],
[1, 1, 0, 1],
[0, 1, 1, 1],
[0, 1, 0, 1]]
student = bn_model(data,0.0)
print student
answer = {'A': 0.3333333333333333, ('x1', 'B', 0): 0.0, ('x2', 'B', 0): 0.5, ('B', 'A', 0): 0.5, ('x2', 'B', 1): 0.75, ('B', 'A', 1): 1.0, ('x1', 'B', 1): 0.25}
assert(test_dict(student,answer))
print "Test 2"
data = [[0, 0, 1, 0],
[0, 0, 1, 0],
[0, 0, 0, 0],
[1, 0, 0, 1],
[0, 0, 1, 1],
[0, 0, 0, 1]]
student = bn_model(data,1.0)
print student
answer = {'A': 0.5,
('B', 'A', 0): 0.6,
('B', 'A', 1): 0.4,
('x1', 'B', 0): 0.2,
('x1', 'B', 1): 0.4,
('x2', 'B', 0): 0.2,
('x2', 'B', 1): 0.2}
assert(test_dict(student,answer))
print "Test 3"
data = [[0, 0, 1, 1],
[0, 1, 0, 0],
[0, 0, 0, 0],
[1, 1, 0, 1],
[0, 1, 1, 1],
[0, 1, 0, 1]]
student = bn_model(data,3.5)
print student
answer = {'A': 0.4230769230769231,
('B', 'A', 0): 0.5,
('B', 'A', 1): 0.6111111111111112,
('x1', 'B', 0): 0.3888888888888889,
('x1', 'B', 1): 0.4090909090909091,
('x2', 'B', 0): 0.5,
('x2', 'B', 1): 0.5909090909090909}
assert(test_dict(student,answer))
print "Test 4"
def test_P2():
model = { 'A' : 0.35,
('B', 'A', 0) : 0.2,
('B', 'A', 1) : 0.7,
('x1', 'B', 1) : 0.44,
('x1', 'B', 0) : 0.74,
}
#print p1(model,[1])
assert(approx_equal(0.262948207171,p1(model,[1])))
model = { 'A' : 0.35,
('B', 'A', 0) : 0.2,
('B', 'A', 1) : 0.7,
('x1', 'B', 1) : 0.44,
('x1', 'B', 0) : 0.74,
('x2', 'B', 0) : 0.84,
('x2', 'B', 1) : 0.5,
}
#print p1(model,[1,0])
assert(approx_equal(0.527156549521,p1(model,[1,0])))
model = { 'A' : 0.35,
('B', 'A', 0) : 0.2,
('B', 'A', 1) : 0.7,
('x1', 'B', 1) : 0.44,
('x1', 'B', 0) : 0.74,
('x2', 'B', 0) : 0.84,
('x2', 'B', 1) : 0.5,
('x3', 'B', 1) : 0.1,
('x3', 'B', 0) : 0.2,
}
#print p1(model,[1,0,1])
assert(approx_equal(0.357917570499,p1(model,[1,0,1])))
model = { 'A' : 0.35,
('B', 'A', 0) : 0.2,
('B', 'A', 1) : 0.7,
('x1', 'B', 1) : 0.44,
('x1', 'B', 0) : 0.74,
('x2', 'B', 0) : 0.84,
('x2', 'B', 1) : 0.5,
('x3', 'B', 1) : 0.1,
('x3', 'B', 0) : 0.2,
('x4', 'B', 1) : 0.1,
('x4', 'B', 0) : 0.9
}
#print p1(model,[1,0,1,1])
assert(approx_equal(0.0583244962884,p1(model,[1,0,1,1])))
def test_P3():
model = { 'A' : 0.35,
('B', 'A', 0) : 0.2,
('B', 'A', 1) : 0.7,
('x1', 'B', 1) : 0.44,
('x1', 'B', 0) : 0.74,
}
print p2(model,[1])
assert(approx_equal(0.29561752988,p2(model,[1])))
model = { 'A' : 0.35,
('B', 'A', 0) : 0.2,
('B', 'A', 1) : 0.7,
('x1', 'B', 1) : 0.44,
('x1', 'B', 0) : 0.74,
('x2', 'B', 0) : 0.84,
('x2', 'B', 1) : 0.5,
}
#print p2(model,[1,0])
assert(approx_equal(0.423846645367,p2(model,[1,0])))
model = { 'A' : 0.35,
('B', 'A', 0) : 0.2,
('B', 'A', 1) : 0.7,
('x1', 'B', 1) : 0.44,
('x1', 'B', 0) : 0.74,
('x2', 'B', 0) : 0.84,
('x2', 'B', 1) : 0.5,
('x3', 'B', 1) : 0.1,
('x3', 'B', 0) : 0.2,
}
#print p2(model,[1,0,1])
assert(approx_equal(0.341709327549,p2(model,[1,0,1])))
model = { 'A' : 0.35,
('B', 'A', 0) : 0.2,
('B', 'A', 1) : 0.7,
('x1', 'B', 1) : 0.44,
('x1', 'B', 0) : 0.74,
('x2', 'B', 0) : 0.84,
('x2', 'B', 1) : 0.5,
('x3', 'B', 1) : 0.1,
('x3', 'B', 0) : 0.2,
('x4', 'B', 1) : 0.1,
('x4', 'B', 0) : 0.9
}
#print p2(model,[1,0,1,1])
assert(approx_equal(0.196306822199,p2(model,[1,0,1,1])))
def test_P4():
model = { 'A' : 0.35,
('B', 'A', 0) : 0.2,
('B', 'A', 1) : 0.7,
('x1', 'B', 1) : 0.44,
('x1', 'B', 0) : 0.74,
('x2', 'B', 0) : 0.84,
('x2', 'B', 1) : 0.5,
('x3', 'B', 1) : 0.1,
('x3', 'B', 0) : 0.2,
}
assert(approx_equal(0.358504672897 ,p3(model,0,1)))
model = { 'A' : 0.35,
('B', 'A', 0) : 0.2,
('B', 'A', 1) : 0.7,
('x1', 'B', 1) : 0.44,
('x1', 'B', 0) : 0.74,
('x2', 'B', 0) : 0.84,
('x2', 'B', 1) : 0.5,
('x3', 'B', 1) : 0.1,
('x3', 'B', 0) : 0.2,
('x4', 'B', 1) : 0.1,
('x4', 'B', 0) : 0.9
}
assert(approx_equal(0.186504910456,p3(model,1,1)))
model = { 'A' : 0.35,
('B', 'A', 0) : 0.2,
('B', 'A', 1) : 0.7,
('x1', 'B', 1) : 0.44,
('x1', 'B', 0) : 0.74,
('x2', 'B', 0) : 0.84,
('x2', 'B', 1) : 0.5,
('x3', 'B', 1) : 0.1,
('x3', 'B', 0) : 0.2,
('x4', 'B', 1) : 0.1,
('x4', 'B', 0) : 0.9
}
test_P1()
In [ ]: