In [5]:
import numpy as np
import numpy as np
from sklearn.datasets import load_boston
from sklearn.utils import shuffle, resample

In [6]:
class Node(object):
    """
    Base class for nodes in the network.

    Arguments:

        `inbound_nodes`: A list of nodes with edges into this node.
    """
    def __init__(self, inbound_nodes=[]):
        """
        Node's constructor (runs when the object is instantiated). Sets
        properties that all nodes need.
        """
        # A list of nodes with edges into this node.
        self.inbound_nodes = inbound_nodes
        # The eventual value of this node. Set by running
        # the forward() method.
        self.value = None
        # A list of nodes that this node outputs to.
        self.outbound_nodes = []
        # New property! Keys are the inputs to this node and
        # their values are the partials of this node with
        # respect to that input.
        self.gradients = {}
        # Sets this node as an outbound node for all of
        # this node's inputs.
        for node in inbound_nodes:
            node.outbound_nodes.append(self)

    def forward(self):
        """
        Every node that uses this class as a base class will
        need to define its own `forward` method.
        """
        raise NotImplementedError

    def backward(self):
        """
        Every node that uses this class as a base class will
        need to define its own `backward` method.
        """
        raise NotImplementedError


class Input(Node):
    """
    A generic input into the network.
    """
    def __init__(self):
        # The base class constructor has to run to set all
        # the properties here.
        #
        # The most important property on an Input is value.
        # self.value is set during `topological_sort` later.
        Node.__init__(self)

    def forward(self):
        # Do nothing because nothing is calculated.
        pass

    def backward(self):
        # An Input node has no inputs so the gradient (derivative)
        # is zero.
        # The key, `self`, is reference to this object.
        self.gradients = {self: 0}
        # Weights and bias may be inputs, so you need to sum
        # the gradient from output gradients.
        for n in self.outbound_nodes:
            self.gradients[self] += n.gradients[self]

class Linear(Node):
    """
    Represents a node that performs a linear transform.
    """
    def __init__(self, X, W, b):
        # The base class (Node) constructor. Weights and bias
        # are treated like inbound nodes.
        Node.__init__(self, [X, W, b])

    def forward(self):
        """
        Performs the math behind a linear transform.
        """
        X = self.inbound_nodes[0].value
        W = self.inbound_nodes[1].value
        b = self.inbound_nodes[2].value
        self.value = np.dot(X, W) + b

    def backward(self):
        """
        Calculates the gradient based on the output values.
        """
        # Initialize a partial for each of the inbound_nodes.
        self.gradients = {n: np.zeros_like(n.value) for n in self.inbound_nodes}
        # Cycle through the outputs. The gradient will change depending
        # on each output, so the gradients are summed over all outputs.
        for n in self.outbound_nodes:
            # Get the partial of the cost with respect to this node.
            grad_cost = n.gradients[self]
            # Set the partial of the loss with respect to this node's inputs.
            self.gradients[self.inbound_nodes[0]] += np.dot(grad_cost, self.inbound_nodes[1].value.T)
            # Set the partial of the loss with respect to this node's weights.
            self.gradients[self.inbound_nodes[1]] += np.dot(self.inbound_nodes[0].value.T, grad_cost)
            # Set the partial of the loss with respect to this node's bias.
            self.gradients[self.inbound_nodes[2]] += np.sum(grad_cost, axis=0, keepdims=False)


class Sigmoid(Node):
    """
    Represents a node that performs the sigmoid activation function.
    """
    def __init__(self, node):
        # The base class constructor.
        Node.__init__(self, [node])

    def _sigmoid(self, x):
        """
        This method is separate from `forward` because it
        will be used with `backward` as well.

        `x`: A numpy array-like object.
        """
        return 1. / (1. + np.exp(-x))

    def forward(self):
        """
        Perform the sigmoid function and set the value.
        """
        input_value = self.inbound_nodes[0].value
        self.value = self._sigmoid(input_value)

    def backward(self):
        """
        Calculates the gradient using the derivative of
        the sigmoid function.
        """
        # Initialize the gradients to 0.
        self.gradients = {n: np.zeros_like(n.value) for n in self.inbound_nodes}
        # Sum the partial with respect to the input over all the outputs.
        for n in self.outbound_nodes:
            grad_cost = n.gradients[self]
            sigmoid = self.value
            self.gradients[self.inbound_nodes[0]] += sigmoid * (1 - sigmoid) * grad_cost


class MSE(Node):
    def __init__(self, y, a):
        """
        The mean squared error cost function.
        Should be used as the last node for a network.
        """
        # Call the base class' constructor.
        Node.__init__(self, [y, a])

    def forward(self):
        """
        Calculates the mean squared error.
        """
        # NOTE: We reshape these to avoid possible matrix/vector broadcast
        # errors.
        #
        # For example, if we subtract an array of shape (3,) from an array of shape
        # (3,1) we get an array of shape(3,3) as the result when we want
        # an array of shape (3,1) instead.
        #
        # Making both arrays (3,1) insures the result is (3,1) and does
        # an elementwise subtraction as expected.
        y = self.inbound_nodes[0].value.reshape(-1, 1)
        a = self.inbound_nodes[1].value.reshape(-1, 1)

        self.m = self.inbound_nodes[0].value.shape[0]
        # Save the computed output for backward.
        self.diff = y - a
        self.value = np.mean(self.diff**2)

    def backward(self):
        """
        Calculates the gradient of the cost.
        """
        self.gradients[self.inbound_nodes[0]] = (2 / self.m) * self.diff
        self.gradients[self.inbound_nodes[1]] = (-2 / self.m) * self.diff


def topological_sort(feed_dict):
    """
    Sort the nodes in topological order using Kahn's Algorithm.

    `feed_dict`: A dictionary where the key is a `Input` Node and the value is the respective value feed to that Node.

    Returns a list of sorted nodes.
    """

    input_nodes = [n for n in feed_dict.keys()]

    G = {}
    nodes = [n for n in input_nodes]
    while len(nodes) > 0:
        n = nodes.pop(0)
        if n not in G:
            G[n] = {'in': set(), 'out': set()}
        for m in n.outbound_nodes:
            if m not in G:
                G[m] = {'in': set(), 'out': set()}
            G[n]['out'].add(m)
            G[m]['in'].add(n)
            nodes.append(m)

    L = []
    S = set(input_nodes)
    while len(S) > 0:
        n = S.pop()

        if isinstance(n, Input):
            n.value = feed_dict[n]

        L.append(n)
        for m in n.outbound_nodes:
            G[n]['out'].remove(m)
            G[m]['in'].remove(n)
            # if no other incoming edges add to S
            if len(G[m]['in']) == 0:
                S.add(m)
    return L


def forward_and_backward(graph):
    """
    Performs a forward pass and a backward pass through a list of sorted Nodes.

    Arguments:

        `graph`: The result of calling `topological_sort`.
    """
    # Forward pass
    for n in graph:
        n.forward()

    # Backward pass
    # see: https://docs.python.org/2.3/whatsnew/section-slices.html
    for n in graph[::-1]:
        n.backward()


def sgd_update(trainables, learning_rate=5e-4):
    """
    Updates the value of each trainable with SGD.

    Arguments:

        `trainables`: A list of `Input` Nodes representing weights/biases.
        `learning_rate`: The learning rate.
    """
    # TODO: update all the `trainables` with SGD
    # You can access and assign the value of a trainable with `value` attribute.
    # Example:
    # for t in trainables:
    #   t.value = your implementation here
    for n in trainables:
        n.value -= learning_rate * n.gradients[n]

In [7]:
# Load data
data = load_boston()
X_ = data['data']
y_ = data['target']

# Normalize data
X_ = (X_ - np.mean(X_, axis=0)) / np.std(X_, axis=0)

n_features = X_.shape[1]
n_hidden = 10
W1_ = np.random.randn(n_features, n_hidden)
b1_ = np.zeros(n_hidden)
W2_ = np.random.randn(n_hidden, 1)
b2_ = np.zeros(1)

# Neural network
X, y = Input(), Input()
W1, b1 = Input(), Input()
W2, b2 = Input(), Input()

l1 = Linear(X, W1, b1)
s1 = Sigmoid(l1)
l2 = Linear(s1, W2, b2)
cost = MSE(y, l2)

feed_dict = {
    X: X_,
    y: y_,
    W1: W1_,
    b1: b1_,
    W2: W2_,
    b2: b2_
}

epochs = 1000
# Total number of examples
m = X_.shape[0]
batch_size = 11
steps_per_epoch = m // batch_size

graph = topological_sort(feed_dict)
trainables = [W1, b1, W2, b2]

print("Total number of examples = {}".format(m))

# Step 4
for i in range(epochs):
    loss = 0
    for j in range(steps_per_epoch):
        # Step 1
        # Randomly sample a batch of examples
        X_batch, y_batch = resample(X_, y_, n_samples=batch_size)

        # Reset value of X and y Inputs
        X.value = X_batch
        y.value = y_batch

        # Step 2
        forward_and_backward(graph)

        # Step 3
        sgd_update(trainables)

        loss += graph[-1].value

    print("Epoch: {}, Loss: {:.3f}".format(i+1, loss/steps_per_epoch))


Total number of examples = 506
Epoch: 1, Loss: 609.784
Epoch: 2, Loss: 469.687
Epoch: 3, Loss: 364.749
Epoch: 4, Loss: 296.176
Epoch: 5, Loss: 236.254
Epoch: 6, Loss: 194.610
Epoch: 7, Loss: 137.410
Epoch: 8, Loss: 136.365
Epoch: 9, Loss: 119.273
Epoch: 10, Loss: 97.105
Epoch: 11, Loss: 71.320
Epoch: 12, Loss: 78.834
Epoch: 13, Loss: 61.899
Epoch: 14, Loss: 53.806
Epoch: 15, Loss: 66.151
Epoch: 16, Loss: 70.449
Epoch: 17, Loss: 59.936
Epoch: 18, Loss: 63.733
Epoch: 19, Loss: 50.076
Epoch: 20, Loss: 45.429
Epoch: 21, Loss: 51.093
Epoch: 22, Loss: 44.838
Epoch: 23, Loss: 53.843
Epoch: 24, Loss: 51.727
Epoch: 25, Loss: 38.654
Epoch: 26, Loss: 47.621
Epoch: 27, Loss: 49.691
Epoch: 28, Loss: 41.360
Epoch: 29, Loss: 48.414
Epoch: 30, Loss: 43.245
Epoch: 31, Loss: 33.826
Epoch: 32, Loss: 36.589
Epoch: 33, Loss: 43.423
Epoch: 34, Loss: 38.756
Epoch: 35, Loss: 31.914
Epoch: 36, Loss: 38.415
Epoch: 37, Loss: 40.637
Epoch: 38, Loss: 41.472
Epoch: 39, Loss: 37.896
Epoch: 40, Loss: 43.201
Epoch: 41, Loss: 32.265
Epoch: 42, Loss: 36.142
Epoch: 43, Loss: 36.586
Epoch: 44, Loss: 38.064
Epoch: 45, Loss: 36.978
Epoch: 46, Loss: 37.351
Epoch: 47, Loss: 31.249
Epoch: 48, Loss: 31.961
Epoch: 49, Loss: 39.759
Epoch: 50, Loss: 33.755
Epoch: 51, Loss: 39.475
Epoch: 52, Loss: 32.057
Epoch: 53, Loss: 39.204
Epoch: 54, Loss: 29.556
Epoch: 55, Loss: 32.608
Epoch: 56, Loss: 35.760
Epoch: 57, Loss: 37.631
Epoch: 58, Loss: 30.584
Epoch: 59, Loss: 27.034
Epoch: 60, Loss: 34.759
Epoch: 61, Loss: 33.544
Epoch: 62, Loss: 32.726
Epoch: 63, Loss: 36.642
Epoch: 64, Loss: 27.252
Epoch: 65, Loss: 32.419
Epoch: 66, Loss: 35.366
Epoch: 67, Loss: 26.953
Epoch: 68, Loss: 29.402
Epoch: 69, Loss: 28.098
Epoch: 70, Loss: 31.241
Epoch: 71, Loss: 28.842
Epoch: 72, Loss: 27.687
Epoch: 73, Loss: 31.397
Epoch: 74, Loss: 27.035
Epoch: 75, Loss: 31.534
Epoch: 76, Loss: 25.513
Epoch: 77, Loss: 34.148
Epoch: 78, Loss: 23.323
Epoch: 79, Loss: 24.711
Epoch: 80, Loss: 25.235
Epoch: 81, Loss: 27.777
Epoch: 82, Loss: 21.189
Epoch: 83, Loss: 25.329
Epoch: 84, Loss: 26.739
Epoch: 85, Loss: 27.165
Epoch: 86, Loss: 29.571
Epoch: 87, Loss: 26.986
Epoch: 88, Loss: 25.983
Epoch: 89, Loss: 24.591
Epoch: 90, Loss: 24.995
Epoch: 91, Loss: 24.975
Epoch: 92, Loss: 24.115
Epoch: 93, Loss: 19.932
Epoch: 94, Loss: 27.245
Epoch: 95, Loss: 19.855
Epoch: 96, Loss: 22.936
Epoch: 97, Loss: 24.502
Epoch: 98, Loss: 26.808
Epoch: 99, Loss: 24.741
Epoch: 100, Loss: 24.010
Epoch: 101, Loss: 28.058
Epoch: 102, Loss: 21.597
Epoch: 103, Loss: 25.176
Epoch: 104, Loss: 25.710
Epoch: 105, Loss: 21.867
Epoch: 106, Loss: 25.582
Epoch: 107, Loss: 20.906
Epoch: 108, Loss: 18.531
Epoch: 109, Loss: 26.955
Epoch: 110, Loss: 28.787
Epoch: 111, Loss: 19.312
Epoch: 112, Loss: 20.427
Epoch: 113, Loss: 17.283
Epoch: 114, Loss: 16.187
Epoch: 115, Loss: 18.881
Epoch: 116, Loss: 23.239
Epoch: 117, Loss: 20.073
Epoch: 118, Loss: 20.564
Epoch: 119, Loss: 22.370
Epoch: 120, Loss: 21.803
Epoch: 121, Loss: 19.900
Epoch: 122, Loss: 18.632
Epoch: 123, Loss: 22.986
Epoch: 124, Loss: 24.703
Epoch: 125, Loss: 25.598
Epoch: 126, Loss: 23.114
Epoch: 127, Loss: 22.790
Epoch: 128, Loss: 19.870
Epoch: 129, Loss: 21.759
Epoch: 130, Loss: 22.237
Epoch: 131, Loss: 13.414
Epoch: 132, Loss: 21.846
Epoch: 133, Loss: 20.732
Epoch: 134, Loss: 20.066
Epoch: 135, Loss: 20.139
Epoch: 136, Loss: 24.379
Epoch: 137, Loss: 15.724
Epoch: 138, Loss: 18.923
Epoch: 139, Loss: 20.920
Epoch: 140, Loss: 21.819
Epoch: 141, Loss: 21.349
Epoch: 142, Loss: 22.067
Epoch: 143, Loss: 19.220
Epoch: 144, Loss: 17.828
Epoch: 145, Loss: 22.617
Epoch: 146, Loss: 24.007
Epoch: 147, Loss: 22.729
Epoch: 148, Loss: 16.325
Epoch: 149, Loss: 19.810
Epoch: 150, Loss: 18.404
Epoch: 151, Loss: 18.642
Epoch: 152, Loss: 19.806
Epoch: 153, Loss: 18.241
Epoch: 154, Loss: 19.708
Epoch: 155, Loss: 14.712
Epoch: 156, Loss: 17.858
Epoch: 157, Loss: 18.586
Epoch: 158, Loss: 18.551
Epoch: 159, Loss: 14.377
Epoch: 160, Loss: 15.910
Epoch: 161, Loss: 12.883
Epoch: 162, Loss: 17.328
Epoch: 163, Loss: 14.389
Epoch: 164, Loss: 19.230
Epoch: 165, Loss: 14.509
Epoch: 166, Loss: 19.546
Epoch: 167, Loss: 18.490
Epoch: 168, Loss: 15.317
Epoch: 169, Loss: 19.304
Epoch: 170, Loss: 18.417
Epoch: 171, Loss: 15.289
Epoch: 172, Loss: 16.253
Epoch: 173, Loss: 16.980
Epoch: 174, Loss: 13.637
Epoch: 175, Loss: 16.499
Epoch: 176, Loss: 18.687
Epoch: 177, Loss: 16.126
Epoch: 178, Loss: 17.141
Epoch: 179, Loss: 13.913
Epoch: 180, Loss: 17.867
Epoch: 181, Loss: 16.881
Epoch: 182, Loss: 15.429
Epoch: 183, Loss: 16.580
Epoch: 184, Loss: 12.568
Epoch: 185, Loss: 16.852
Epoch: 186, Loss: 12.574
Epoch: 187, Loss: 18.401
Epoch: 188, Loss: 18.202
Epoch: 189, Loss: 14.491
Epoch: 190, Loss: 13.404
Epoch: 191, Loss: 13.028
Epoch: 192, Loss: 13.445
Epoch: 193, Loss: 13.672
Epoch: 194, Loss: 15.452
Epoch: 195, Loss: 14.758
Epoch: 196, Loss: 21.897
Epoch: 197, Loss: 17.044
Epoch: 198, Loss: 16.139
Epoch: 199, Loss: 20.258
Epoch: 200, Loss: 16.756
Epoch: 201, Loss: 16.825
Epoch: 202, Loss: 20.395
Epoch: 203, Loss: 14.187
Epoch: 204, Loss: 16.336
Epoch: 205, Loss: 13.575
Epoch: 206, Loss: 15.839
Epoch: 207, Loss: 15.053
Epoch: 208, Loss: 20.249
Epoch: 209, Loss: 18.290
Epoch: 210, Loss: 14.584
Epoch: 211, Loss: 15.315
Epoch: 212, Loss: 13.752
Epoch: 213, Loss: 18.308
Epoch: 214, Loss: 17.191
Epoch: 215, Loss: 15.914
Epoch: 216, Loss: 13.949
Epoch: 217, Loss: 15.757
Epoch: 218, Loss: 11.701
Epoch: 219, Loss: 17.981
Epoch: 220, Loss: 15.847
Epoch: 221, Loss: 13.740
Epoch: 222, Loss: 15.323
Epoch: 223, Loss: 14.210
Epoch: 224, Loss: 17.335
Epoch: 225, Loss: 12.560
Epoch: 226, Loss: 13.391
Epoch: 227, Loss: 19.155
Epoch: 228, Loss: 15.324
Epoch: 229, Loss: 13.665
Epoch: 230, Loss: 17.549
Epoch: 231, Loss: 15.366
Epoch: 232, Loss: 14.613
Epoch: 233, Loss: 16.968
Epoch: 234, Loss: 18.838
Epoch: 235, Loss: 13.221
Epoch: 236, Loss: 14.038
Epoch: 237, Loss: 12.125
Epoch: 238, Loss: 15.753
Epoch: 239, Loss: 11.614
Epoch: 240, Loss: 15.858
Epoch: 241, Loss: 16.211
Epoch: 242, Loss: 12.643
Epoch: 243, Loss: 13.369
Epoch: 244, Loss: 12.609
Epoch: 245, Loss: 13.995
Epoch: 246, Loss: 15.597
Epoch: 247, Loss: 17.136
Epoch: 248, Loss: 14.690
Epoch: 249, Loss: 11.924
Epoch: 250, Loss: 14.166
Epoch: 251, Loss: 11.878
Epoch: 252, Loss: 13.547
Epoch: 253, Loss: 15.914
Epoch: 254, Loss: 14.672
Epoch: 255, Loss: 13.131
Epoch: 256, Loss: 10.746
Epoch: 257, Loss: 15.150
Epoch: 258, Loss: 15.546
Epoch: 259, Loss: 12.389
Epoch: 260, Loss: 12.133
Epoch: 261, Loss: 14.973
Epoch: 262, Loss: 13.464
Epoch: 263, Loss: 18.110
Epoch: 264, Loss: 12.525
Epoch: 265, Loss: 12.463
Epoch: 266, Loss: 12.797
Epoch: 267, Loss: 13.682
Epoch: 268, Loss: 12.560
Epoch: 269, Loss: 13.177
Epoch: 270, Loss: 13.146
Epoch: 271, Loss: 13.401
Epoch: 272, Loss: 10.374
Epoch: 273, Loss: 12.080
Epoch: 274, Loss: 14.153
Epoch: 275, Loss: 12.173
Epoch: 276, Loss: 13.068
Epoch: 277, Loss: 13.585
Epoch: 278, Loss: 12.998
Epoch: 279, Loss: 12.497
Epoch: 280, Loss: 13.548
Epoch: 281, Loss: 12.467
Epoch: 282, Loss: 13.587
Epoch: 283, Loss: 13.644
Epoch: 284, Loss: 12.823
Epoch: 285, Loss: 11.914
Epoch: 286, Loss: 12.992
Epoch: 287, Loss: 12.341
Epoch: 288, Loss: 13.132
Epoch: 289, Loss: 13.482
Epoch: 290, Loss: 13.651
Epoch: 291, Loss: 12.785
Epoch: 292, Loss: 16.197
Epoch: 293, Loss: 14.055
Epoch: 294, Loss: 11.930
Epoch: 295, Loss: 11.235
Epoch: 296, Loss: 11.741
Epoch: 297, Loss: 12.361
Epoch: 298, Loss: 12.574
Epoch: 299, Loss: 14.668
Epoch: 300, Loss: 10.918
Epoch: 301, Loss: 10.992
Epoch: 302, Loss: 12.425
Epoch: 303, Loss: 11.751
Epoch: 304, Loss: 11.125
Epoch: 305, Loss: 14.298
Epoch: 306, Loss: 16.114
Epoch: 307, Loss: 10.924
Epoch: 308, Loss: 14.130
Epoch: 309, Loss: 11.486
Epoch: 310, Loss: 13.798
Epoch: 311, Loss: 10.031
Epoch: 312, Loss: 10.877
Epoch: 313, Loss: 12.907
Epoch: 314, Loss: 13.898
Epoch: 315, Loss: 12.846
Epoch: 316, Loss: 14.170
Epoch: 317, Loss: 13.516
Epoch: 318, Loss: 12.263
Epoch: 319, Loss: 10.308
Epoch: 320, Loss: 13.829
Epoch: 321, Loss: 11.313
Epoch: 322, Loss: 10.873
Epoch: 323, Loss: 11.143
Epoch: 324, Loss: 12.079
Epoch: 325, Loss: 14.934
Epoch: 326, Loss: 11.073
Epoch: 327, Loss: 10.180
Epoch: 328, Loss: 13.611
Epoch: 329, Loss: 10.260
Epoch: 330, Loss: 11.130
Epoch: 331, Loss: 9.323
Epoch: 332, Loss: 11.011
Epoch: 333, Loss: 12.345
Epoch: 334, Loss: 12.905
Epoch: 335, Loss: 12.158
Epoch: 336, Loss: 13.069
Epoch: 337, Loss: 11.142
Epoch: 338, Loss: 9.327
Epoch: 339, Loss: 9.638
Epoch: 340, Loss: 12.325
Epoch: 341, Loss: 12.673
Epoch: 342, Loss: 12.627
Epoch: 343, Loss: 13.059
Epoch: 344, Loss: 11.799
Epoch: 345, Loss: 11.848
Epoch: 346, Loss: 11.362
Epoch: 347, Loss: 11.626
Epoch: 348, Loss: 10.395
Epoch: 349, Loss: 12.155
Epoch: 350, Loss: 12.161
Epoch: 351, Loss: 11.648
Epoch: 352, Loss: 11.435
Epoch: 353, Loss: 10.642
Epoch: 354, Loss: 11.631
Epoch: 355, Loss: 13.176
Epoch: 356, Loss: 10.587
Epoch: 357, Loss: 9.168
Epoch: 358, Loss: 12.900
Epoch: 359, Loss: 10.485
Epoch: 360, Loss: 12.252
Epoch: 361, Loss: 13.208
Epoch: 362, Loss: 9.230
Epoch: 363, Loss: 12.863
Epoch: 364, Loss: 9.755
Epoch: 365, Loss: 9.533
Epoch: 366, Loss: 14.445
Epoch: 367, Loss: 10.886
Epoch: 368, Loss: 10.291
Epoch: 369, Loss: 11.890
Epoch: 370, Loss: 12.273
Epoch: 371, Loss: 9.339
Epoch: 372, Loss: 9.872
Epoch: 373, Loss: 10.904
Epoch: 374, Loss: 10.378
Epoch: 375, Loss: 10.258
Epoch: 376, Loss: 11.302
Epoch: 377, Loss: 9.407
Epoch: 378, Loss: 9.818
Epoch: 379, Loss: 8.640
Epoch: 380, Loss: 11.186
Epoch: 381, Loss: 10.735
Epoch: 382, Loss: 12.179
Epoch: 383, Loss: 10.684
Epoch: 384, Loss: 12.089
Epoch: 385, Loss: 11.742
Epoch: 386, Loss: 9.896
Epoch: 387, Loss: 9.127
Epoch: 388, Loss: 11.973
Epoch: 389, Loss: 10.722
Epoch: 390, Loss: 9.625
Epoch: 391, Loss: 8.925
Epoch: 392, Loss: 9.789
Epoch: 393, Loss: 11.049
Epoch: 394, Loss: 9.966
Epoch: 395, Loss: 10.185
Epoch: 396, Loss: 12.132
Epoch: 397, Loss: 10.455
Epoch: 398, Loss: 13.030
Epoch: 399, Loss: 9.964
Epoch: 400, Loss: 12.169
Epoch: 401, Loss: 11.190
Epoch: 402, Loss: 10.380
Epoch: 403, Loss: 11.722
Epoch: 404, Loss: 11.592
Epoch: 405, Loss: 10.094
Epoch: 406, Loss: 11.316
Epoch: 407, Loss: 10.143
Epoch: 408, Loss: 9.688
Epoch: 409, Loss: 10.777
Epoch: 410, Loss: 9.570
Epoch: 411, Loss: 12.548
Epoch: 412, Loss: 10.968
Epoch: 413, Loss: 11.563
Epoch: 414, Loss: 9.252
Epoch: 415, Loss: 11.610
Epoch: 416, Loss: 11.180
Epoch: 417, Loss: 10.301
Epoch: 418, Loss: 11.349
Epoch: 419, Loss: 9.259
Epoch: 420, Loss: 9.960
Epoch: 421, Loss: 10.832
Epoch: 422, Loss: 12.907
Epoch: 423, Loss: 9.043
Epoch: 424, Loss: 11.421
Epoch: 425, Loss: 10.948
Epoch: 426, Loss: 9.809
Epoch: 427, Loss: 9.176
Epoch: 428, Loss: 9.324
Epoch: 429, Loss: 10.833
Epoch: 430, Loss: 10.102
Epoch: 431, Loss: 10.053
Epoch: 432, Loss: 9.769
Epoch: 433, Loss: 11.055
Epoch: 434, Loss: 9.576
Epoch: 435, Loss: 10.935
Epoch: 436, Loss: 10.815
Epoch: 437, Loss: 12.684
Epoch: 438, Loss: 11.356
Epoch: 439, Loss: 9.778
Epoch: 440, Loss: 9.376
Epoch: 441, Loss: 9.658
Epoch: 442, Loss: 9.521
Epoch: 443, Loss: 9.173
Epoch: 444, Loss: 12.848
Epoch: 445, Loss: 10.126
Epoch: 446, Loss: 11.150
Epoch: 447, Loss: 9.308
Epoch: 448, Loss: 9.019
Epoch: 449, Loss: 8.595
Epoch: 450, Loss: 10.089
Epoch: 451, Loss: 10.461
Epoch: 452, Loss: 10.392
Epoch: 453, Loss: 11.134
Epoch: 454, Loss: 9.963
Epoch: 455, Loss: 10.824
Epoch: 456, Loss: 10.610
Epoch: 457, Loss: 13.157
Epoch: 458, Loss: 10.133
Epoch: 459, Loss: 9.332
Epoch: 460, Loss: 10.180
Epoch: 461, Loss: 10.788
Epoch: 462, Loss: 9.748
Epoch: 463, Loss: 12.082
Epoch: 464, Loss: 10.613
Epoch: 465, Loss: 10.398
Epoch: 466, Loss: 8.606
Epoch: 467, Loss: 9.463
Epoch: 468, Loss: 10.597
Epoch: 469, Loss: 10.812
Epoch: 470, Loss: 10.026
Epoch: 471, Loss: 9.299
Epoch: 472, Loss: 8.480
Epoch: 473, Loss: 8.875
Epoch: 474, Loss: 10.080
Epoch: 475, Loss: 12.386
Epoch: 476, Loss: 9.504
Epoch: 477, Loss: 10.287
Epoch: 478, Loss: 10.444
Epoch: 479, Loss: 9.111
Epoch: 480, Loss: 8.464
Epoch: 481, Loss: 8.768
Epoch: 482, Loss: 11.351
Epoch: 483, Loss: 12.963
Epoch: 484, Loss: 8.045
Epoch: 485, Loss: 7.711
Epoch: 486, Loss: 11.005
Epoch: 487, Loss: 10.317
Epoch: 488, Loss: 8.706
Epoch: 489, Loss: 9.334
Epoch: 490, Loss: 9.073
Epoch: 491, Loss: 8.841
Epoch: 492, Loss: 10.627
Epoch: 493, Loss: 8.761
Epoch: 494, Loss: 10.584
Epoch: 495, Loss: 8.951
Epoch: 496, Loss: 9.791
Epoch: 497, Loss: 10.664
Epoch: 498, Loss: 9.387
Epoch: 499, Loss: 10.886
Epoch: 500, Loss: 8.788
Epoch: 501, Loss: 10.730
Epoch: 502, Loss: 10.352
Epoch: 503, Loss: 8.321
Epoch: 504, Loss: 10.317
Epoch: 505, Loss: 8.862
Epoch: 506, Loss: 7.680
Epoch: 507, Loss: 10.918
Epoch: 508, Loss: 10.978
Epoch: 509, Loss: 10.479
Epoch: 510, Loss: 11.507
Epoch: 511, Loss: 8.432
Epoch: 512, Loss: 9.791
Epoch: 513, Loss: 9.848
Epoch: 514, Loss: 11.886
Epoch: 515, Loss: 10.084
Epoch: 516, Loss: 9.772
Epoch: 517, Loss: 7.086
Epoch: 518, Loss: 9.871
Epoch: 519, Loss: 9.401
Epoch: 520, Loss: 9.843
Epoch: 521, Loss: 8.577
Epoch: 522, Loss: 9.067
Epoch: 523, Loss: 8.369
Epoch: 524, Loss: 9.668
Epoch: 525, Loss: 8.351
Epoch: 526, Loss: 9.509
Epoch: 527, Loss: 8.468
Epoch: 528, Loss: 11.494
Epoch: 529, Loss: 9.744
Epoch: 530, Loss: 10.380
Epoch: 531, Loss: 9.111
Epoch: 532, Loss: 11.386
Epoch: 533, Loss: 9.739
Epoch: 534, Loss: 9.862
Epoch: 535, Loss: 9.815
Epoch: 536, Loss: 11.541
Epoch: 537, Loss: 8.443
Epoch: 538, Loss: 9.323
Epoch: 539, Loss: 9.219
Epoch: 540, Loss: 8.317
Epoch: 541, Loss: 11.293
Epoch: 542, Loss: 9.235
Epoch: 543, Loss: 9.839
Epoch: 544, Loss: 10.152
Epoch: 545, Loss: 9.150
Epoch: 546, Loss: 8.877
Epoch: 547, Loss: 9.435
Epoch: 548, Loss: 10.037
Epoch: 549, Loss: 8.626
Epoch: 550, Loss: 10.658
Epoch: 551, Loss: 9.016
Epoch: 552, Loss: 7.414
Epoch: 553, Loss: 8.590
Epoch: 554, Loss: 7.774
Epoch: 555, Loss: 11.621
Epoch: 556, Loss: 9.786
Epoch: 557, Loss: 8.577
Epoch: 558, Loss: 9.646
Epoch: 559, Loss: 8.506
Epoch: 560, Loss: 7.613
Epoch: 561, Loss: 9.729
Epoch: 562, Loss: 8.445
Epoch: 563, Loss: 8.059
Epoch: 564, Loss: 8.933
Epoch: 565, Loss: 7.987
Epoch: 566, Loss: 9.164
Epoch: 567, Loss: 10.760
Epoch: 568, Loss: 7.497
Epoch: 569, Loss: 7.876
Epoch: 570, Loss: 10.480
Epoch: 571, Loss: 9.260
Epoch: 572, Loss: 9.231
Epoch: 573, Loss: 8.460
Epoch: 574, Loss: 7.609
Epoch: 575, Loss: 8.824
Epoch: 576, Loss: 10.155
Epoch: 577, Loss: 8.658
Epoch: 578, Loss: 7.120
Epoch: 579, Loss: 7.807
Epoch: 580, Loss: 9.999
Epoch: 581, Loss: 10.515
Epoch: 582, Loss: 7.374
Epoch: 583, Loss: 9.143
Epoch: 584, Loss: 8.321
Epoch: 585, Loss: 11.881
Epoch: 586, Loss: 9.435
Epoch: 587, Loss: 10.302
Epoch: 588, Loss: 8.447
Epoch: 589, Loss: 9.750
Epoch: 590, Loss: 8.685
Epoch: 591, Loss: 9.876
Epoch: 592, Loss: 7.517
Epoch: 593, Loss: 8.687
Epoch: 594, Loss: 10.173
Epoch: 595, Loss: 8.048
Epoch: 596, Loss: 9.509
Epoch: 597, Loss: 8.338
Epoch: 598, Loss: 10.647
Epoch: 599, Loss: 9.691
Epoch: 600, Loss: 8.348
Epoch: 601, Loss: 9.362
Epoch: 602, Loss: 10.938
Epoch: 603, Loss: 8.587
Epoch: 604, Loss: 9.222
Epoch: 605, Loss: 7.817
Epoch: 606, Loss: 8.472
Epoch: 607, Loss: 8.127
Epoch: 608, Loss: 9.962
Epoch: 609, Loss: 11.122
Epoch: 610, Loss: 9.620
Epoch: 611, Loss: 9.250
Epoch: 612, Loss: 10.057
Epoch: 613, Loss: 6.583
Epoch: 614, Loss: 10.276
Epoch: 615, Loss: 9.565
Epoch: 616, Loss: 9.931
Epoch: 617, Loss: 8.636
Epoch: 618, Loss: 7.287
Epoch: 619, Loss: 8.496
Epoch: 620, Loss: 9.233
Epoch: 621, Loss: 9.610
Epoch: 622, Loss: 9.848
Epoch: 623, Loss: 8.675
Epoch: 624, Loss: 8.987
Epoch: 625, Loss: 7.254
Epoch: 626, Loss: 7.770
Epoch: 627, Loss: 9.355
Epoch: 628, Loss: 9.609
Epoch: 629, Loss: 8.334
Epoch: 630, Loss: 7.894
Epoch: 631, Loss: 8.599
Epoch: 632, Loss: 10.529
Epoch: 633, Loss: 10.180
Epoch: 634, Loss: 10.127
Epoch: 635, Loss: 9.185
Epoch: 636, Loss: 8.752
Epoch: 637, Loss: 7.595
Epoch: 638, Loss: 10.944
Epoch: 639, Loss: 7.299
Epoch: 640, Loss: 9.267
Epoch: 641, Loss: 8.631
Epoch: 642, Loss: 9.477
Epoch: 643, Loss: 10.781
Epoch: 644, Loss: 11.723
Epoch: 645, Loss: 9.230
Epoch: 646, Loss: 8.905
Epoch: 647, Loss: 7.994
Epoch: 648, Loss: 9.310
Epoch: 649, Loss: 8.770
Epoch: 650, Loss: 7.843
Epoch: 651, Loss: 9.337
Epoch: 652, Loss: 7.976
Epoch: 653, Loss: 8.940
Epoch: 654, Loss: 7.008
Epoch: 655, Loss: 7.579
Epoch: 656, Loss: 9.748
Epoch: 657, Loss: 9.899
Epoch: 658, Loss: 8.027
Epoch: 659, Loss: 8.532
Epoch: 660, Loss: 7.731
Epoch: 661, Loss: 9.462
Epoch: 662, Loss: 8.045
Epoch: 663, Loss: 8.402
Epoch: 664, Loss: 9.499
Epoch: 665, Loss: 9.361
Epoch: 666, Loss: 7.439
Epoch: 667, Loss: 7.797
Epoch: 668, Loss: 9.302
Epoch: 669, Loss: 8.648
Epoch: 670, Loss: 8.983
Epoch: 671, Loss: 7.358
Epoch: 672, Loss: 10.633
Epoch: 673, Loss: 7.646
Epoch: 674, Loss: 6.911
Epoch: 675, Loss: 6.449
Epoch: 676, Loss: 8.550
Epoch: 677, Loss: 8.838
Epoch: 678, Loss: 6.218
Epoch: 679, Loss: 8.468
Epoch: 680, Loss: 6.660
Epoch: 681, Loss: 10.331
Epoch: 682, Loss: 8.910
Epoch: 683, Loss: 7.791
Epoch: 684, Loss: 10.205
Epoch: 685, Loss: 8.257
Epoch: 686, Loss: 8.421
Epoch: 687, Loss: 11.655
Epoch: 688, Loss: 8.067
Epoch: 689, Loss: 9.625
Epoch: 690, Loss: 9.505
Epoch: 691, Loss: 6.792
Epoch: 692, Loss: 9.756
Epoch: 693, Loss: 9.408
Epoch: 694, Loss: 8.625
Epoch: 695, Loss: 7.862
Epoch: 696, Loss: 10.440
Epoch: 697, Loss: 9.673
Epoch: 698, Loss: 10.444
Epoch: 699, Loss: 9.451
Epoch: 700, Loss: 7.483
Epoch: 701, Loss: 10.710
Epoch: 702, Loss: 9.428
Epoch: 703, Loss: 8.877
Epoch: 704, Loss: 9.045
Epoch: 705, Loss: 7.360
Epoch: 706, Loss: 7.379
Epoch: 707, Loss: 8.926
Epoch: 708, Loss: 8.222
Epoch: 709, Loss: 9.776
Epoch: 710, Loss: 9.393
Epoch: 711, Loss: 7.428
Epoch: 712, Loss: 8.774
Epoch: 713, Loss: 10.828
Epoch: 714, Loss: 8.956
Epoch: 715, Loss: 8.775
Epoch: 716, Loss: 7.403
Epoch: 717, Loss: 9.157
Epoch: 718, Loss: 8.551
Epoch: 719, Loss: 9.715
Epoch: 720, Loss: 10.683
Epoch: 721, Loss: 9.028
Epoch: 722, Loss: 7.312
Epoch: 723, Loss: 9.418
Epoch: 724, Loss: 7.262
Epoch: 725, Loss: 10.243
Epoch: 726, Loss: 7.823
Epoch: 727, Loss: 8.155
Epoch: 728, Loss: 8.687
Epoch: 729, Loss: 8.112
Epoch: 730, Loss: 8.040
Epoch: 731, Loss: 7.413
Epoch: 732, Loss: 6.991
Epoch: 733, Loss: 8.418
Epoch: 734, Loss: 8.044
Epoch: 735, Loss: 7.508
Epoch: 736, Loss: 8.494
Epoch: 737, Loss: 8.743
Epoch: 738, Loss: 8.063
Epoch: 739, Loss: 8.457
Epoch: 740, Loss: 8.525
Epoch: 741, Loss: 8.669
Epoch: 742, Loss: 9.131
Epoch: 743, Loss: 7.681
Epoch: 744, Loss: 8.122
Epoch: 745, Loss: 8.764
Epoch: 746, Loss: 9.061
Epoch: 747, Loss: 8.001
Epoch: 748, Loss: 8.572
Epoch: 749, Loss: 7.139
Epoch: 750, Loss: 8.241
Epoch: 751, Loss: 7.827
Epoch: 752, Loss: 8.803
Epoch: 753, Loss: 9.118
Epoch: 754, Loss: 7.353
Epoch: 755, Loss: 7.478
Epoch: 756, Loss: 7.995
Epoch: 757, Loss: 9.185
Epoch: 758, Loss: 6.530
Epoch: 759, Loss: 8.160
Epoch: 760, Loss: 7.977
Epoch: 761, Loss: 8.554
Epoch: 762, Loss: 8.881
Epoch: 763, Loss: 8.807
Epoch: 764, Loss: 9.878
Epoch: 765, Loss: 8.260
Epoch: 766, Loss: 8.698
Epoch: 767, Loss: 7.374
Epoch: 768, Loss: 9.274
Epoch: 769, Loss: 7.371
Epoch: 770, Loss: 8.961
Epoch: 771, Loss: 7.708
Epoch: 772, Loss: 9.289
Epoch: 773, Loss: 9.079
Epoch: 774, Loss: 7.663
Epoch: 775, Loss: 7.644
Epoch: 776, Loss: 8.763
Epoch: 777, Loss: 8.068
Epoch: 778, Loss: 8.937
Epoch: 779, Loss: 8.375
Epoch: 780, Loss: 9.291
Epoch: 781, Loss: 6.663
Epoch: 782, Loss: 7.823
Epoch: 783, Loss: 8.298
Epoch: 784, Loss: 9.521
Epoch: 785, Loss: 8.198
Epoch: 786, Loss: 9.235
Epoch: 787, Loss: 9.118
Epoch: 788, Loss: 9.751
Epoch: 789, Loss: 8.015
Epoch: 790, Loss: 9.767
Epoch: 791, Loss: 6.608
Epoch: 792, Loss: 7.503
Epoch: 793, Loss: 7.094
Epoch: 794, Loss: 8.387
Epoch: 795, Loss: 8.936
Epoch: 796, Loss: 7.740
Epoch: 797, Loss: 8.609
Epoch: 798, Loss: 7.734
Epoch: 799, Loss: 8.190
Epoch: 800, Loss: 6.361
Epoch: 801, Loss: 9.471
Epoch: 802, Loss: 10.893
Epoch: 803, Loss: 5.991
Epoch: 804, Loss: 7.112
Epoch: 805, Loss: 7.561
Epoch: 806, Loss: 7.167
Epoch: 807, Loss: 10.030
Epoch: 808, Loss: 8.105
Epoch: 809, Loss: 6.770
Epoch: 810, Loss: 7.033
Epoch: 811, Loss: 6.908
Epoch: 812, Loss: 8.267
Epoch: 813, Loss: 8.024
Epoch: 814, Loss: 8.818
Epoch: 815, Loss: 9.167
Epoch: 816, Loss: 9.759
Epoch: 817, Loss: 7.988
Epoch: 818, Loss: 8.810
Epoch: 819, Loss: 6.912
Epoch: 820, Loss: 7.716
Epoch: 821, Loss: 7.950
Epoch: 822, Loss: 8.301
Epoch: 823, Loss: 8.163
Epoch: 824, Loss: 8.600
Epoch: 825, Loss: 8.915
Epoch: 826, Loss: 8.001
Epoch: 827, Loss: 8.138
Epoch: 828, Loss: 7.817
Epoch: 829, Loss: 6.920
Epoch: 830, Loss: 9.420
Epoch: 831, Loss: 8.142
Epoch: 832, Loss: 8.021
Epoch: 833, Loss: 8.588
Epoch: 834, Loss: 7.778
Epoch: 835, Loss: 8.279
Epoch: 836, Loss: 8.381
Epoch: 837, Loss: 8.777
Epoch: 838, Loss: 6.865
Epoch: 839, Loss: 9.262
Epoch: 840, Loss: 7.418
Epoch: 841, Loss: 8.173
Epoch: 842, Loss: 9.512
Epoch: 843, Loss: 5.637
Epoch: 844, Loss: 7.578
Epoch: 845, Loss: 6.720
Epoch: 846, Loss: 8.357
Epoch: 847, Loss: 7.080
Epoch: 848, Loss: 7.210
Epoch: 849, Loss: 10.263
Epoch: 850, Loss: 9.338
Epoch: 851, Loss: 9.095
Epoch: 852, Loss: 8.307
Epoch: 853, Loss: 8.665
Epoch: 854, Loss: 7.415
Epoch: 855, Loss: 7.465
Epoch: 856, Loss: 8.170
Epoch: 857, Loss: 7.187
Epoch: 858, Loss: 8.383
Epoch: 859, Loss: 7.132
Epoch: 860, Loss: 7.657
Epoch: 861, Loss: 6.820
Epoch: 862, Loss: 9.561
Epoch: 863, Loss: 8.070
Epoch: 864, Loss: 7.541
Epoch: 865, Loss: 8.557
Epoch: 866, Loss: 8.367
Epoch: 867, Loss: 8.717
Epoch: 868, Loss: 6.740
Epoch: 869, Loss: 7.767
Epoch: 870, Loss: 7.469
Epoch: 871, Loss: 8.057
Epoch: 872, Loss: 8.468
Epoch: 873, Loss: 8.006
Epoch: 874, Loss: 7.335
Epoch: 875, Loss: 9.895
Epoch: 876, Loss: 8.237
Epoch: 877, Loss: 7.638
Epoch: 878, Loss: 8.172
Epoch: 879, Loss: 7.554
Epoch: 880, Loss: 7.437
Epoch: 881, Loss: 7.168
Epoch: 882, Loss: 7.875
Epoch: 883, Loss: 8.318
Epoch: 884, Loss: 7.740
Epoch: 885, Loss: 8.977
Epoch: 886, Loss: 7.216
Epoch: 887, Loss: 8.210
Epoch: 888, Loss: 7.150
Epoch: 889, Loss: 7.314
Epoch: 890, Loss: 8.279
Epoch: 891, Loss: 7.208
Epoch: 892, Loss: 8.141
Epoch: 893, Loss: 9.156
Epoch: 894, Loss: 7.258
Epoch: 895, Loss: 8.390
Epoch: 896, Loss: 8.745
Epoch: 897, Loss: 6.874
Epoch: 898, Loss: 7.365
Epoch: 899, Loss: 6.588
Epoch: 900, Loss: 7.150
Epoch: 901, Loss: 8.208
Epoch: 902, Loss: 8.846
Epoch: 903, Loss: 8.522
Epoch: 904, Loss: 7.346
Epoch: 905, Loss: 8.683
Epoch: 906, Loss: 7.621
Epoch: 907, Loss: 8.235
Epoch: 908, Loss: 7.191
Epoch: 909, Loss: 8.813
Epoch: 910, Loss: 6.416
Epoch: 911, Loss: 7.089
Epoch: 912, Loss: 8.980
Epoch: 913, Loss: 7.357
Epoch: 914, Loss: 8.544
Epoch: 915, Loss: 6.217
Epoch: 916, Loss: 6.082
Epoch: 917, Loss: 7.205
Epoch: 918, Loss: 7.429
Epoch: 919, Loss: 7.510
Epoch: 920, Loss: 8.646
Epoch: 921, Loss: 7.108
Epoch: 922, Loss: 6.898
Epoch: 923, Loss: 9.079
Epoch: 924, Loss: 7.708
Epoch: 925, Loss: 7.621
Epoch: 926, Loss: 8.630
Epoch: 927, Loss: 9.293
Epoch: 928, Loss: 8.053
Epoch: 929, Loss: 7.324
Epoch: 930, Loss: 7.611
Epoch: 931, Loss: 6.817
Epoch: 932, Loss: 7.941
Epoch: 933, Loss: 6.336
Epoch: 934, Loss: 6.806
Epoch: 935, Loss: 8.053
Epoch: 936, Loss: 7.744
Epoch: 937, Loss: 7.603
Epoch: 938, Loss: 6.472
Epoch: 939, Loss: 8.127
Epoch: 940, Loss: 6.360
Epoch: 941, Loss: 6.920
Epoch: 942, Loss: 7.092
Epoch: 943, Loss: 6.999
Epoch: 944, Loss: 7.198
Epoch: 945, Loss: 7.295
Epoch: 946, Loss: 7.186
Epoch: 947, Loss: 6.968
Epoch: 948, Loss: 9.313
Epoch: 949, Loss: 7.264
Epoch: 950, Loss: 7.237
Epoch: 951, Loss: 8.049
Epoch: 952, Loss: 7.851
Epoch: 953, Loss: 8.985
Epoch: 954, Loss: 7.631
Epoch: 955, Loss: 7.262
Epoch: 956, Loss: 5.928
Epoch: 957, Loss: 8.027
Epoch: 958, Loss: 6.983
Epoch: 959, Loss: 7.819
Epoch: 960, Loss: 7.030
Epoch: 961, Loss: 9.556
Epoch: 962, Loss: 10.308
Epoch: 963, Loss: 7.251
Epoch: 964, Loss: 7.369
Epoch: 965, Loss: 7.508
Epoch: 966, Loss: 7.635
Epoch: 967, Loss: 7.824
Epoch: 968, Loss: 7.258
Epoch: 969, Loss: 8.047
Epoch: 970, Loss: 7.424
Epoch: 971, Loss: 7.237
Epoch: 972, Loss: 6.208
Epoch: 973, Loss: 9.539
Epoch: 974, Loss: 7.002
Epoch: 975, Loss: 6.268
Epoch: 976, Loss: 8.287
Epoch: 977, Loss: 5.763
Epoch: 978, Loss: 6.266
Epoch: 979, Loss: 6.889
Epoch: 980, Loss: 5.691
Epoch: 981, Loss: 8.012
Epoch: 982, Loss: 6.665
Epoch: 983, Loss: 9.121
Epoch: 984, Loss: 8.156
Epoch: 985, Loss: 7.903
Epoch: 986, Loss: 7.430
Epoch: 987, Loss: 6.759
Epoch: 988, Loss: 6.377
Epoch: 989, Loss: 7.000
Epoch: 990, Loss: 7.169
Epoch: 991, Loss: 5.761
Epoch: 992, Loss: 6.888
Epoch: 993, Loss: 7.052
Epoch: 994, Loss: 6.207
Epoch: 995, Loss: 8.187
Epoch: 996, Loss: 7.376
Epoch: 997, Loss: 9.196
Epoch: 998, Loss: 6.608
Epoch: 999, Loss: 7.355
Epoch: 1000, Loss: 8.478