# Basic of Machine Learning

``````1.Data:
Image, Text, Audio, Video, Structured data
2.A model of how to transform the data
3.A loss function to measure how well we’re doing
4.An algorithm to tweak the model parameters such that the loss function is minimized``````

### ND Array in MXNet

``````

In [1]:

import mxnet as mx
from mxnet import nd
import numpy as np
mx.random.seed(1)

``````
``````

In [2]:

x = nd.empty((3, 4))
print(x)

``````
``````

[[ -5.40419733e+35   4.57594014e-41   2.61536179e-33   1.40129846e-45]
[  2.61511640e-33   1.40129846e-45   1.21392126e-33   1.40129846e-45]
[  0.00000000e+00   0.00000000e+00   0.00000000e+00   0.00000000e+00]]
<NDArray 3x4 @cpu(0)>

``````
``````

In [3]:

x = nd.ones((3, 4))
x

``````
``````

Out[3]:

[[ 1.  1.  1.  1.]
[ 1.  1.  1.  1.]
[ 1.  1.  1.  1.]]
<NDArray 3x4 @cpu(0)>

``````
``````

In [4]:

y = nd.random_normal(0, 1, shape=(3, 4))
print y
print y.shape
print y.size

``````
``````

[[ 0.03629481 -0.49024421 -0.95017916  0.03751944]
[-0.72984636 -2.04010558  1.482131    1.04082799]
[-0.45256865  0.31160426 -0.83673781 -0.78830057]]
<NDArray 3x4 @cpu(0)>
(3L, 4L)
12

``````
``````

In [5]:

x * y

``````
``````

Out[5]:

[[ 0.03629481 -0.49024421 -0.95017916  0.03751944]
[-0.72984636 -2.04010558  1.482131    1.04082799]
[-0.45256865  0.31160426 -0.83673781 -0.78830057]]
<NDArray 3x4 @cpu(0)>

``````
``````

In [6]:

nd.exp(y)

``````
``````

Out[6]:

[[ 1.03696156  0.61247683  0.38667175  1.03823221]
[ 0.48198304  0.13001499  4.40231705  2.83156061]
[ 0.63599241  1.36561418  0.43312114  0.45461673]]
<NDArray 3x4 @cpu(0)>

``````
``````

In [7]:

nd.dot(x, y.T)

``````
``````

Out[7]:

[[-1.3666091  -0.24699283 -1.76600277]
[-1.3666091  -0.24699283 -1.76600277]
[-1.3666091  -0.24699283 -1.76600277]]
<NDArray 3x3 @cpu(0)>

``````
``````

In [8]:

# Memory Host
print "The current mem host y is {}".format(id(y))
y[:] = x + y
print "The current mem host after add + assigning y is {}".format(id(y))
y = x + y
print "The current mem host after add y is {}".format(id(y))

``````
``````

The current mem host y is 4546129040
The current mem host after add + assigning y is 4546129040
The current mem host after add y is 4488497104

``````
``````

In [9]:

print y
print y[1:3]
print y[1:3,1:2]

``````
``````

[[ 2.03629494  1.50975585  1.0498209   2.03751945]
[ 1.27015364 -0.04010558  3.482131    3.04082799]
[ 1.54743135  2.31160426  1.16326213  1.21169949]]
<NDArray 3x4 @cpu(0)>

[[ 1.27015364 -0.04010558  3.482131    3.04082799]
[ 1.54743135  2.31160426  1.16326213  1.21169949]]
<NDArray 2x4 @cpu(0)>

[[-0.04010558]
[ 2.31160426]]
<NDArray 2x1 @cpu(0)>

``````
``````

In [10]:

print x
x[1,2] = 9
print x
x[1:2,1:3] = 5
print x

``````
``````

[[ 1.  1.  1.  1.]
[ 1.  1.  1.  1.]
[ 1.  1.  1.  1.]]
<NDArray 3x4 @cpu(0)>

[[ 1.  1.  1.  1.]
[ 1.  1.  9.  1.]
[ 1.  1.  1.  1.]]
<NDArray 3x4 @cpu(0)>

[[ 1.  1.  1.  1.]
[ 1.  5.  5.  1.]
[ 1.  1.  1.  1.]]
<NDArray 3x4 @cpu(0)>

``````

#### Brodcasting

``````

In [11]:

x = nd.ones(shape=(3,3))
print('x = ', x)
y = nd.arange(3)
print('y = ', y)
print('x + y = ', x + y)

``````
``````

('x = ',
[[ 1.  1.  1.]
[ 1.  1.  1.]
[ 1.  1.  1.]]
<NDArray 3x3 @cpu(0)>)
('y = ',
[ 0.  1.  2.]
<NDArray 3 @cpu(0)>)
('x + y = ',
[[ 1.  2.  3.]
[ 1.  2.  3.]
[ 1.  2.  3.]]
<NDArray 3x3 @cpu(0)>)

``````

#### nd array <-> Numpy

``````

In [12]:

a = x.asnumpy()
print "The type of a is {}".format(type(a))
y = nd.array(a)
print "The type of a is {}".format(type(y))

``````
``````

The type of a is <type 'numpy.ndarray'>
The type of a is <class 'mxnet.ndarray.ndarray.NDArray'>

``````

### Deal data with gpu

``````

In [13]:

# z = nd.ones(shape=(3,3), ctx=mx.gpu(0))
# z

``````
``````

In [14]:

# x_gpu = x.copyto(mx.gpu(0))
# print(x_gpu)

``````

## Scala, Vector, Matrices, Tensors

``````

In [15]:

# scalars
x = nd.array([3.0])
y = nd.array([2.0])
print 'x + y = ', x + y
print 'x * y = ', x * y
print 'x / y = ', x / y
print 'x ** y = ', nd.power(x,y)

``````
``````

x + y =
[ 5.]
<NDArray 1 @cpu(0)>
x * y =
[ 6.]
<NDArray 1 @cpu(0)>
x / y =
[ 1.5]
<NDArray 1 @cpu(0)>
x ** y =
[ 9.]
<NDArray 1 @cpu(0)>

``````
``````

In [16]:

# convert it to python scala
x.asscalar()

``````
``````

Out[16]:

3.0

``````
``````

In [17]:

# Vector
u = nd.arange(4)
print('u = ', u)
print u[3]
print len(u)
print u.shape

a = 2
x = nd.array([1,2,3])
y = nd.array([10,20,30])
print(a * x)
print(a * x + y)

``````
``````

('u = ',
[ 0.  1.  2.  3.]
<NDArray 4 @cpu(0)>)

[ 3.]
<NDArray 1 @cpu(0)>
4
(4L,)

[ 2.  4.  6.]
<NDArray 3 @cpu(0)>

[ 12.  24.  36.]
<NDArray 3 @cpu(0)>

``````
``````

In [18]:

# Matrices
x = nd.arange(20)
A = x.reshape((5, 4))
print A
print 'A[2, 3] = ', A[2, 3]

print('row 2', A[2, :])
print('column 3', A[:, 3])
print A.T

``````
``````

[[  0.   1.   2.   3.]
[  4.   5.   6.   7.]
[  8.   9.  10.  11.]
[ 12.  13.  14.  15.]
[ 16.  17.  18.  19.]]
<NDArray 5x4 @cpu(0)>
A[2, 3] =
[ 11.]
<NDArray 1 @cpu(0)>
('row 2',
[  8.   9.  10.  11.]
<NDArray 4 @cpu(0)>)
('column 3',
[  3.   7.  11.  15.  19.]
<NDArray 5 @cpu(0)>)

[[  0.   4.   8.  12.  16.]
[  1.   5.   9.  13.  17.]
[  2.   6.  10.  14.  18.]
[  3.   7.  11.  15.  19.]]
<NDArray 4x5 @cpu(0)>

``````
``````

In [19]:

# Tensor
X = nd.arange(24).reshape((2, 3, 4))
print 'X.shape =', X.shape
print 'X =', X

``````
``````

X.shape = (2L, 3L, 4L)
X =
[[[  0.   1.   2.   3.]
[  4.   5.   6.   7.]
[  8.   9.  10.  11.]]

[[ 12.  13.  14.  15.]
[ 16.  17.  18.  19.]
[ 20.  21.  22.  23.]]]
<NDArray 2x3x4 @cpu(0)>

``````
``````

In [20]:

u = nd.array([1, 2, 4, 8])
v = nd.ones_like(u) * 2
print 'v =', v
print 'u + v', u + v
print 'u - v', u - v
print 'u * v', u * v
print 'u / v', u / v
print nd.sum(u)
print nd.mean(u)
print nd.sum(u) / u.size
print nd.dot(u, v)
print nd.sum(u * v)

``````
``````

v =
[ 2.  2.  2.  2.]
<NDArray 4 @cpu(0)>
u + v
[  3.   4.   6.  10.]
<NDArray 4 @cpu(0)>
u - v
[-1.  0.  2.  6.]
<NDArray 4 @cpu(0)>
u * v
[  2.   4.   8.  16.]
<NDArray 4 @cpu(0)>
u / v
[ 0.5  1.   2.   4. ]
<NDArray 4 @cpu(0)>

[ 15.]
<NDArray 1 @cpu(0)>

[ 3.75]
<NDArray 1 @cpu(0)>

[ 3.75]
<NDArray 1 @cpu(0)>

[ 30.]
<NDArray 1 @cpu(0)>

[ 30.]
<NDArray 1 @cpu(0)>

``````
``````

In [21]:

# Matrices multiple Vector
print nd.dot(A, u)
print nd.dot(A, A.T)

``````
``````

[  34.   94.  154.  214.  274.]
<NDArray 5 @cpu(0)>

[[   14.    38.    62.    86.   110.]
[   38.   126.   214.   302.   390.]
[   62.   214.   366.   518.   670.]
[   86.   302.   518.   734.   950.]
[  110.   390.   670.   950.  1230.]]
<NDArray 5x5 @cpu(0)>

``````
``````

In [22]:

print nd.norm(u)
print nd.sqrt(nd.sum(u**2))
print nd.sum(nd.abs(u))

``````
``````

[ 9.21954441]
<NDArray 1 @cpu(0)>

[ 9.21954441]
<NDArray 1 @cpu(0)>

[ 15.]
<NDArray 1 @cpu(0)>

``````

### Todo

``````Additive axioms
Multiplicative axioms
Distributive axioms``````
``````

In [ ]:

``````