In [1]:
import sys
sys.path.append('..')

from deepgraph.utils.logging import log
from deepgraph.utils.common import batch_parallel, ConfigMixin, shuffle_in_unison_inplace, pickle_dump
from deepgraph.utils.image import batch_pad_mirror
from deepgraph.constants import *
from deepgraph.conf import rng

from deepgraph.pipeline import Processor, Packet


Using gpu device 0: GeForce GTX TITAN X (CNMeM is enabled with initial size: 90.0% of memory, CuDNN 3007)

  _____                _____                 _
 |  _  \              |  __ \               | |
 | | | |___  ___ _ __ | |  \/_ __ __ _ _ __ | |__
 | | | / _ \/ _ \ '_ \| | __| '__/ _` | '_ \| '_ \
 | |/ /  __/  __/ |_) | |_\ \ | | (_| | |_) | | | |
 |___/ \___|\___| .__/ \____/_|  \__,_| .__/|_| |_|
                | |                   | |
                |_|                   |_|


Available on GitHub: https://github.com/sebastian-schlecht/deepgraph


In [2]:
from deepgraph.nn.init import *
class Transformer(Processor):
    """
    Apply online random augmentation.
    """
    def __init__(self, name, shapes, config, buffer_size=10):
        super(Transformer, self).__init__(name, shapes, config, buffer_size)
        self.mean = None

    def init(self):
        if self.conf("mean_file") is not None:
            self.mean = np.load(self.conf("mean_file"))
        else:
            log("Transformer - No mean file specified.", LOG_LEVEL_WARNING)

    def process(self):
        packet = self.pull()
        # Return if no data is there
        if not packet:
            return False
        # Unpack
        data, label = packet.data
        # Do processing
        log("Transformer - Processing data", LOG_LEVEL_VERBOSE)
        
        h = 240
        w = 320
        
        start = time.time()
        # Mean
        if packet.phase == PHASE_TRAIN or packet.phase == PHASE_VAL:
            data = data.astype(np.float32)
            if self.mean is not None:
                std = self.conf("std")
                for idx in range(data.shape[0]):
                    # Subtract mean
                    data[idx] = data[idx] - self.mean.astype(np.float32)
                    if std is not None:
                        data[idx] =  data[idx] * std
            if self.conf("offset") is not None:
                label -= self.conf("offset")

        if packet.phase == PHASE_TRAIN:
             # Do elementwise operations
            data_old = data
            label_old = label
            data = np.zeros((data_old.shape[0], data_old.shape[1], h, w), dtype=np.float32)
            label = np.zeros((label_old.shape[0], h, w), dtype=np.float32)
            for idx in range(data.shape[0]):
                # Rotate
                # We rotate before cropping to be able to get filled corners
                # Maybe even adjust the border after rotating
                deg = np.random.randint(-5,6)
                # Operate on old data. Careful - data is already in float so we need to normalize and rescale afterwards
                # data_old[idx] = 255. * rotate_transformer_rgb_uint8(data_old[idx] * 0.003921568627, deg).astype(np.float32)
                # label_old[idx] = rotate_transformer_scalar_float32(label_old[idx], deg)
                
                # Take care of any empty areas, we crop on a smaller surface depending on the angle
                # TODO Remove this once loss supports masking
                shift = 0 #np.tan((deg/180.) * math.pi)
                # Random crops
                #cy = rng.randint(data_old.shape[2] - h - shift, size=1)
                #cx = rng.randint(data_old.shape[3] - w - shift, size=1)

                data[idx] = data_old[idx]
                label[idx] = label_old[idx]

                # Flip horizontally with probability 0.5
                """
                p = rng.randint(2)
                if p > 0:
                    data[idx] = data[idx, :, :, ::-1]
                    label[idx] = label[idx, :, ::-1]

                # RGB we mult with a random value between 0.8 and 1.2
                r = rng.randint(80,121) / 100.
                g = rng.randint(80,121) / 100.
                b = rng.randint(80,121) / 100.
                data[idx, 0] = data[idx, 0] * r
                data[idx, 1] = data[idx, 1] * g
                data[idx, 2] = data[idx, 2] * b
                """
            # Shuffle
            # data, label = shuffle_in_unison_inplace(data, label)
            
        elif packet.phase == PHASE_VAL:
            # Center crop
            pass
            #cy = (data.shape[2] - h) // 2
            #cx = (data.shape[3] - w) // 2
            #data = data[:, :, cy:cy+h, cx:cx+w]
            #label = label[:, cy:cy+h, cx:cx+w]
            
        end = time.time()
        log("Transformer - Processing took " + str(end - start) + " seconds.", LOG_LEVEL_VERBOSE)
        # Try to push into queue as long as thread should not terminate
        self.push(Packet(identifier=packet.id, phase=packet.phase, num=2, data=(data, label)))
        return True

    def setup_defaults(self):
        super(Transformer, self).setup_defaults()
        self.conf_default("mean_file", None)
        self.conf_default("offset", None)
        self.conf_default("std", 1.0)

In [3]:
from theano.tensor.nnet import relu

from deepgraph.graph import *
from deepgraph.nn.core import *
from deepgraph.nn.conv import *
from deepgraph.nn.loss import *
from deepgraph.solver import *
from deepgraph.nn.init import *

from deepgraph.pipeline import Optimizer, H5DBLoader, Pipeline


def build_u_graph():
    graph = Graph("u_depth")

    """
    Inputs
    """
    data = Data(graph, "data", T.ftensor4, shape=(-1, 3, 240, 320))
    label = Data(graph, "label", T.ftensor3, shape=(-1, 1, 240, 320), config={
        "phase": PHASE_TRAIN
    })
    """
    Contractive part
    """
    conv_1 = Conv2D(
        graph,
        "conv_1",
        config={
            "channels": 64,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_2 = Conv2D(
        graph,
        "conv_2",
        config={
            "channels": 64,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    pool_2 = Pool(graph, "pool_2", config={
        "kernel": (2, 2)
    })
    conv_3 = Conv2D(
        graph,
        "conv_3",
        config={
            "channels": 128,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_4 = Conv2D(
        graph,
        "conv_4",
        config={
            "channels": 128,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    pool_4 = Pool(graph, "pool_4", config={
        "kernel": (2, 2)
    })

    conv_5 = Conv2D(
        graph,
        "conv_5",
        config={
            "channels": 256,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_6 = Conv2D(
        graph,
        "conv_6",
        config={
            "channels": 256,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )

    pool_6 = Pool(graph, "pool_6", config={
        "kernel": (2, 2)
    })

    conv_7 = Conv2D(
        graph,
        "conv_7",
        config={
            "channels": 512,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_8 = Conv2D(
        graph,
        "conv_8",
        config={
            "channels": 512,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    pool_8 = Pool(graph, "pool_8", config={
        "kernel": (2, 2)
    })
    """
    conv_9 = Conv2D(
        graph,
        "conv_9",
        config={
            "channels": 1024,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_10 = Conv2D(
            graph,
            "conv_10",
            config={
                "channels": 1024,
                "kernel": (3, 3),
                "border_mode": (1, 1),
                "activation": relu,
                "weight_filler": xavier(gain="relu"),
                "bias_filler": constant(0)
            }
    )
    """
    fl = Flatten(graph, "fl",config={
            "dims": 2
    })
    fc_8 = Dense(graph, "fc_8", config={
        "out": 4096,
        "activation": relu,
        "weight_filler": xavier(),
        "bias_filler": constant(0.1)
    })
    dp_8 = Dropout(graph, "dp_8")
    fc_9 = Dense(graph, "fc_9", config={
        "out": 19200,
        "activation": relu,
        "weight_filler": xavier(),
        "bias_filler": constant(0.1)
    })
    dp_9 = Dropout(graph, "dp_9")
    rs_10 = Reshape(graph, "rs_10", config={
        "shape": (-1, 64, 15, 20)
    })
    """
    Expansive path
    """
    up_11 = Upsample(graph, "up_11", config={
        "kernel": (2, 2)
    })
    conv_11 = Conv2D(
        graph,
        "conv_11",
        config={
            "channels": 512,
            "kernel": (3, 3),
            "border_mode": 1,
            "weight_filler": xavier(),
            "bias_filler": constant(0)
        }
    )
    conv_12 = Conv2D(
        graph,
        "conv_12",
        config={
            "channels": 512,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_13 = Conv2D(
        graph,
        "conv_13",
        config={
            "channels": 512,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    up_14 = Upsample(graph, "up_14", config={
        "kernel": (2, 2)
    })
    conv_14 = Conv2D(
        graph,
        "conv_14",
        config={
            "channels": 256,
            "kernel": (3, 3),
            "border_mode": 1,
            "weight_filler": xavier(),
            "bias_filler": constant(0)
        }
    )
    conv_15 = Conv2D(
        graph,
        "conv_15",
        config={
            "channels": 256,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_16 = Conv2D(
        graph,
        "conv_16",
        config={
            "channels": 256,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )

    up_17 = Upsample(graph, "up_17", config={
        "kernel": (2, 2)
    })
    conv_17 = Conv2D(graph, "conv_17", config={
            "channels": 128,
            "kernel": (3, 3),
            "border_mode": 1,
            "weight_filler": xavier(),
            "bias_filler": constant(0)
    })
    conv_18 = Conv2D(
        graph,
        "conv_18",
        config={
            "channels": 128,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_19 = Conv2D(
        graph,
        "conv_19",
        config={
            "channels": 128,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    up_20 = Upsample(graph, "up_20", config={
        "mode": "constant",
        "kernel": (2, 2)
    })
    conv_20 = Conv2D(graph, "conv_20", config={
            "channels": 64,
            "kernel": (3, 3),
            "border_mode": 1,
            "weight_filler": xavier(),
            "bias_filler": constant(0)
    })
    conv_21 = Conv2D(
        graph,
        "conv_21",
        config={
            "channels": 64,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_22 = Conv2D(
        graph,
        "conv_22",
        config={
            "channels": 64,
            "kernel": (3, 3),
            "border_mode": (1, 1),
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_23 = Conv2D(
        graph,
        "conv_23",
        config={
            "channels": 1,
            "kernel": (1, 1),
            "activation": None,
            "weight_filler": xavier(),
            "bias_filler": constant(0)
        }
    )

    """
    Feed forward nodes
    """
    

    concat_20 = Concatenate(graph, "concat_20", config={
        "axis": 1
    })

    

    concat_17 = Concatenate(graph, "concat_17", config={
        "axis": 1
    })

    
    concat_14 = Concatenate(graph, "concat_14", config={
        "axis": 1
    })

    

    concat_11 = Concatenate(graph, "concat_11", config={
        "axis": 1
    })


    """
    Losses / Error
    """
    loss = EuclideanLoss(graph, "loss")

    error = MSE(graph, "mse", config={
        "root": True,
        "is_output": True,
        "phase": PHASE_TRAIN
    })


    """
    Make connections
    """
    data.connect(conv_1)
    conv_1.connect(conv_2)
    conv_2.connect(concat_20)
    conv_2.connect(pool_2)
    pool_2.connect(conv_3)
    conv_3.connect(conv_4)
    conv_4.connect(concat_17)
    conv_4.connect(pool_4)
    pool_4.connect(conv_5)
    conv_5.connect(conv_6)
    conv_6.connect(concat_14)
    conv_6.connect(pool_6)
    pool_6.connect(conv_7)
    conv_7.connect(conv_8)
    conv_8.connect(concat_11)
    conv_8.connect(pool_8)
    pool_8.connect(fl)
    fl.connect(fc_8)
    fc_8.connect(dp_8)
    dp_8.connect(fc_9)
    fc_9.connect(dp_9)
    dp_9.connect(rs_10)
    rs_10.connect(up_11)
    up_11.connect(conv_11)
    conv_11.connect(concat_11)
    concat_11.connect(conv_12)
    conv_12.connect(conv_13)
    conv_13.connect(up_14)
    up_14.connect(conv_14)
    conv_14.connect(concat_14)
    concat_14.connect(conv_15)
    conv_15.connect(conv_16)
    conv_16.connect(up_17)
    up_17.connect(conv_17)
    conv_17.connect(concat_17)
    concat_17.connect(conv_18)
    conv_18.connect(conv_19)
    conv_19.connect(up_20)
    up_20.connect(conv_20)
    conv_20.connect(concat_20)
    concat_20.connect(conv_21)
    conv_21.connect(conv_22)
    conv_22.connect(conv_23)

    conv_23.connect(loss)
    label.connect(loss)

    conv_23.connect(error)
    label.connect(error)

    return graph


if __name__ == "__main__":

    batch_size = 4
    chunk_size = 10*batch_size
    transfer_shape = ((chunk_size, 3, 240, 320), (chunk_size, 240, 320))

    g = build_u_graph()

    # Build the training pipeline
    db_loader = H5DBLoader("db", ((chunk_size, 3, 480, 640), (chunk_size, 1, 480, 640)), config={
        "db": "/home/ga29mix/nashome/data/nyu_depth_v2_combined_50.hdf5",
        # "db": '../data/nyu_depth_unet_large.hdf5',
        "key_data": "images",
        "key_label": "depths",
        "chunk_size": chunk_size
    })
    transformer = Transformer("tr", transfer_shape, config={
        # Measured empirically for the data-set
        # "offset": 2.7321029
        "mean_file": "/home/ga29mix/nashome/data/nyu_depth_v2_combined_50.npy",
    })
    optimizer = Optimizer("opt", g, transfer_shape, config={
        "batch_size":  batch_size,
        "chunk_size": chunk_size,
        # "learning_rate": 0.000001,
        "learning_rate": w
        "momentum": 0.9,
        "weight_decay": 0.0005,
        "print_freq": 50,
        "save_freq": 3000,
        # "weights": "data/depth_pipeline_alexnet_test_noaug_iter_60000.zip",
        "save_prefix": "../data/vnet2"
    })

    p = Pipeline(config={
        "validation_frequency": 20,
        "cycles": 650
    })
    p.add(db_loader)
    p.add(transformer)
    p.add(optimizer)
    p.run()


[2016-04-15 12:06:06] INFO: H5DBLoader - Caching DB in memory
[2016-04-15 12:06:57] INFO: Pipeline - Starting computation
[2016-04-15 12:06:58] INFO: Graph - Setting up graph
[2016-04-15 12:06:58] INFO: Node - data has shape (-1, 3, 240, 320)
[2016-04-15 12:06:58] INFO: Node - label has shape (-1, 1, 240, 320)
[2016-04-15 12:06:58] INFO: Node - conv_1 has shape (-1, 64, 240, 320)
[2016-04-15 12:06:58] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:06:58] INFO: Node - conv_2 has shape (-1, 64, 240, 320)
[2016-04-15 12:06:58] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:06:58] INFO: Node - pool_2 has shape (-1, 64, 120, 160)
[2016-04-15 12:06:58] INFO: Pool - Using DNN CUDA Module
[2016-04-15 12:06:58] INFO: Node - conv_3 has shape (-1, 128, 120, 160)
[2016-04-15 12:06:58] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:06:58] INFO: Node - conv_4 has shape (-1, 128, 120, 160)
[2016-04-15 12:06:58] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:06:58] INFO: Node - pool_4 has shape (-1, 128, 60, 80)
[2016-04-15 12:06:58] INFO: Pool - Using DNN CUDA Module
[2016-04-15 12:06:58] INFO: Node - conv_5 has shape (-1, 256, 60, 80)
[2016-04-15 12:06:58] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:06:58] INFO: Node - conv_6 has shape (-1, 256, 60, 80)
[2016-04-15 12:06:58] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:06:58] INFO: Node - pool_6 has shape (-1, 256, 30, 40)
[2016-04-15 12:06:58] INFO: Pool - Using DNN CUDA Module
[2016-04-15 12:08:09] INFO: Node - conv_7 has shape (-1, 512, 30, 40)
[2016-04-15 12:08:09] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:09] INFO: Node - conv_8 has shape (-1, 512, 30, 40)
[2016-04-15 12:08:09] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:09] INFO: Node - pool_8 has shape (-1, 512, 15, 20)
[2016-04-15 12:08:09] INFO: Pool - Using DNN CUDA Module
[2016-04-15 12:08:09] INFO: Node - fl has shape (-1, 153600)
[2016-04-15 12:08:42] INFO: Node - fc_8 has shape (-1, 4096)
[2016-04-15 12:08:42] INFO: Node - dp_8 has shape (-1, 4096)
[2016-04-15 12:08:46] INFO: Node - fc_9 has shape (-1, 19200)
[2016-04-15 12:08:46] INFO: Node - dp_9 has shape (-1, 19200)
[2016-04-15 12:08:46] INFO: Node - rs_10 has shape (-1, 64, 15, 20)
[2016-04-15 12:08:46] INFO: Node - up_11 has shape (-1, 64, 30, 40)
[2016-04-15 12:08:46] INFO: Node - conv_11 has shape (-1, 512, 30, 40)
[2016-04-15 12:08:46] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:46] INFO: Node - concat_11 has shape (-1, 1024, 30, 40)
[2016-04-15 12:08:46] INFO: Node - conv_12 has shape (-1, 512, 30, 40)
[2016-04-15 12:08:46] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - conv_13 has shape (-1, 512, 30, 40)
[2016-04-15 12:08:47] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - up_14 has shape (-1, 512, 60, 80)
[2016-04-15 12:08:47] INFO: Node - conv_14 has shape (-1, 256, 60, 80)
[2016-04-15 12:08:47] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - concat_14 has shape (-1, 512, 60, 80)
[2016-04-15 12:08:47] INFO: Node - conv_15 has shape (-1, 256, 60, 80)
[2016-04-15 12:08:47] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - conv_16 has shape (-1, 256, 60, 80)
[2016-04-15 12:08:47] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - up_17 has shape (-1, 256, 120, 160)
[2016-04-15 12:08:47] INFO: Node - conv_17 has shape (-1, 128, 120, 160)
[2016-04-15 12:08:47] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - concat_17 has shape (-1, 256, 120, 160)
[2016-04-15 12:08:47] INFO: Node - conv_18 has shape (-1, 128, 120, 160)
[2016-04-15 12:08:47] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - conv_19 has shape (-1, 128, 120, 160)
[2016-04-15 12:08:47] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - up_20 has shape (-1, 128, 240, 320)
[2016-04-15 12:08:47] INFO: Node - conv_20 has shape (-1, 64, 240, 320)
[2016-04-15 12:08:47] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - concat_20 has shape (-1, 128, 240, 320)
[2016-04-15 12:08:47] INFO: Node - conv_21 has shape (-1, 64, 240, 320)
[2016-04-15 12:08:47] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - conv_22 has shape (-1, 64, 240, 320)
[2016-04-15 12:08:47] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - conv_23 has shape (-1, 1, 240, 320)
[2016-04-15 12:08:47] INFO: Conv2D - Using DNN CUDA Module
[2016-04-15 12:08:47] INFO: Node - loss has shape (1,)
[2016-04-15 12:08:47] INFO: Node - mse has shape (1,)
[2016-04-15 12:09:06] INFO: Graph - Invoking Theano compiler
[2016-04-15 12:09:44] INFO: Optimizer - Compilation finished
[2016-04-15 12:10:34] INFO: Optimizer - Training score at iteration 50: {'loss': array(3.6898860931396484, dtype=float32), 'mse': array(1.9209076166152954, dtype=float32)}
[2016-04-15 12:11:26] INFO: Optimizer - Training score at iteration 100: {'loss': array(6.039698600769043, dtype=float32), 'mse': array(2.4575798511505127, dtype=float32)}
[2016-04-15 12:12:18] INFO: Optimizer - Training score at iteration 150: {'loss': array(4.121854305267334, dtype=float32), 'mse': array(2.0302350521087646, dtype=float32)}
[2016-04-15 12:13:10] INFO: Optimizer - Training score at iteration 200: {'loss': array(1.2761114835739136, dtype=float32), 'mse': array(1.1296510696411133, dtype=float32)}
[2016-04-15 12:13:31] INFO: Optimizer - Mean loss values for validation at iteration 200 is: {'loss': 3.9892375, 'mse': 1.9299775}
[2016-04-15 12:14:22] INFO: Optimizer - Training score at iteration 250: {'loss': array(1.8368662595748901, dtype=float32), 'mse': array(1.3553104400634766, dtype=float32)}
[2016-04-15 12:15:13] INFO: Optimizer - Training score at iteration 300: {'loss': array(3.3300416469573975, dtype=float32), 'mse': array(1.8248401880264282, dtype=float32)}
[2016-04-15 12:16:04] INFO: Optimizer - Training score at iteration 350: {'loss': array(2.932969093322754, dtype=float32), 'mse': array(1.712591290473938, dtype=float32)}
[2016-04-15 12:16:56] INFO: Optimizer - Training score at iteration 400: {'loss': array(5.348709583282471, dtype=float32), 'mse': array(2.312727689743042, dtype=float32)}
[2016-04-15 12:17:17] INFO: Optimizer - Mean loss values for validation at iteration 400 is: {'loss': 3.4877591, 'mse': 1.8196418}
[2016-04-15 12:18:08] INFO: Optimizer - Training score at iteration 450: {'loss': array(6.388795375823975, dtype=float32), 'mse': array(2.52760648727417, dtype=float32)}
[2016-04-15 12:18:59] INFO: Optimizer - Training score at iteration 500: {'loss': array(1.601336121559143, dtype=float32), 'mse': array(1.2654390335083008, dtype=float32)}
[2016-04-15 12:19:50] INFO: Optimizer - Training score at iteration 550: {'loss': array(2.7529220581054688, dtype=float32), 'mse': array(1.6591931581497192, dtype=float32)}
[2016-04-15 12:20:41] INFO: Optimizer - Training score at iteration 600: {'loss': array(3.1141138076782227, dtype=float32), 'mse': array(1.7646851539611816, dtype=float32)}
[2016-04-15 12:21:03] INFO: Optimizer - Mean loss values for validation at iteration 600 is: {'loss': 3.2145681, 'mse': 1.7353328}
[2016-04-15 12:21:54] INFO: Optimizer - Training score at iteration 650: {'loss': array(1.4677759408950806, dtype=float32), 'mse': array(1.2115180492401123, dtype=float32)}
[2016-04-15 12:22:45] INFO: Optimizer - Training score at iteration 700: {'loss': array(2.00187611579895, dtype=float32), 'mse': array(1.4148766994476318, dtype=float32)}
[2016-04-15 12:23:37] INFO: Optimizer - Training score at iteration 750: {'loss': array(3.4963295459747314, dtype=float32), 'mse': array(1.8698474168777466, dtype=float32)}
[2016-04-15 12:24:28] INFO: Optimizer - Training score at iteration 800: {'loss': array(2.919304609298706, dtype=float32), 'mse': array(1.708597183227539, dtype=float32)}
[2016-04-15 12:24:50] INFO: Optimizer - Mean loss values for validation at iteration 800 is: {'loss': 3.1505487, 'mse': 1.7089934}
[2016-04-15 12:25:41] INFO: Optimizer - Training score at iteration 850: {'loss': array(6.182797908782959, dtype=float32), 'mse': array(2.486523151397705, dtype=float32)}
[2016-04-15 12:26:32] INFO: Optimizer - Training score at iteration 900: {'loss': array(2.407020330429077, dtype=float32), 'mse': array(1.551457405090332, dtype=float32)}
[2016-04-15 12:27:24] INFO: Optimizer - Training score at iteration 950: {'loss': array(0.6334241032600403, dtype=float32), 'mse': array(0.7958794236183167, dtype=float32)}
[2016-04-15 12:28:36] INFO: Optimizer - Mean loss values for validation at iteration 999 is: {'loss': 3.0468802, 'mse': 1.6804588}
[2016-04-15 12:28:36] INFO: Optimizer - Training score at iteration 1000: {'loss': array(0.9695623517036438, dtype=float32), 'mse': array(0.9846635460853577, dtype=float32)}
[2016-04-15 12:29:28] INFO: Optimizer - Training score at iteration 1050: {'loss': array(2.4216063022613525, dtype=float32), 'mse': array(1.5561511516571045, dtype=float32)}
[2016-04-15 12:30:19] INFO: Optimizer - Training score at iteration 1100: {'loss': array(1.0969470739364624, dtype=float32), 'mse': array(1.0473524332046509, dtype=float32)}
[2016-04-15 12:31:10] INFO: Optimizer - Training score at iteration 1150: {'loss': array(3.954561233520508, dtype=float32), 'mse': array(1.9886078834533691, dtype=float32)}
[2016-04-15 12:32:22] INFO: Optimizer - Mean loss values for validation at iteration 1199 is: {'loss': 2.9987833, 'mse': 1.6660901}
[2016-04-15 12:32:23] INFO: Optimizer - Training score at iteration 1200: {'loss': array(1.3546457290649414, dtype=float32), 'mse': array(1.1638925075531006, dtype=float32)}
[2016-04-15 12:33:14] INFO: Optimizer - Training score at iteration 1250: {'loss': array(1.1276434659957886, dtype=float32), 'mse': array(1.0619055032730103, dtype=float32)}
[2016-04-15 12:34:06] INFO: Optimizer - Training score at iteration 1300: {'loss': array(1.1437352895736694, dtype=float32), 'mse': array(1.069455623626709, dtype=float32)}
[2016-04-15 12:34:57] INFO: Optimizer - Training score at iteration 1350: {'loss': array(0.9916593432426453, dtype=float32), 'mse': array(0.995820939540863, dtype=float32)}
[2016-04-15 12:36:09] INFO: Optimizer - Mean loss values for validation at iteration 1399 is: {'loss': 2.9808652, 'mse': 1.6721261}
[2016-04-15 12:36:10] INFO: Optimizer - Training score at iteration 1400: {'loss': array(5.2344818115234375, dtype=float32), 'mse': array(2.2878990173339844, dtype=float32)}
[2016-04-15 12:37:01] INFO: Optimizer - Training score at iteration 1450: {'loss': array(4.200952529907227, dtype=float32), 'mse': array(2.0496225357055664, dtype=float32)}
[2016-04-15 12:37:53] INFO: Optimizer - Training score at iteration 1500: {'loss': array(1.632554054260254, dtype=float32), 'mse': array(1.2777143716812134, dtype=float32)}
[2016-04-15 12:38:44] INFO: Optimizer - Training score at iteration 1550: {'loss': array(1.9021379947662354, dtype=float32), 'mse': array(1.3791801929473877, dtype=float32)}
[2016-04-15 12:39:56] INFO: Optimizer - Mean loss values for validation at iteration 1599 is: {'loss': 3.0878115, 'mse': 1.7135918}
[2016-04-15 12:39:57] INFO: Optimizer - Training score at iteration 1600: {'loss': array(1.6363474130630493, dtype=float32), 'mse': array(1.2791979312896729, dtype=float32)}
[2016-04-15 12:40:48] INFO: Optimizer - Training score at iteration 1650: {'loss': array(5.3735575675964355, dtype=float32), 'mse': array(2.3180935382843018, dtype=float32)}
[2016-04-15 12:41:40] INFO: Optimizer - Training score at iteration 1700: {'loss': array(3.662386417388916, dtype=float32), 'mse': array(1.9137362241744995, dtype=float32)}
[2016-04-15 12:42:31] INFO: Optimizer - Training score at iteration 1750: {'loss': array(0.9008821845054626, dtype=float32), 'mse': array(0.9491481184959412, dtype=float32)}
[2016-04-15 12:43:42] INFO: Optimizer - Mean loss values for validation at iteration 1798 is: {'loss': 2.9346809, 'mse': 1.6455784}
[2016-04-15 12:43:44] INFO: Optimizer - Training score at iteration 1800: {'loss': array(0.7771921753883362, dtype=float32), 'mse': array(0.8815850019454956, dtype=float32)}
[2016-04-15 12:44:35] INFO: Optimizer - Training score at iteration 1850: {'loss': array(3.7727890014648438, dtype=float32), 'mse': array(1.9423668384552002, dtype=float32)}
[2016-04-15 12:45:27] INFO: Optimizer - Training score at iteration 1900: {'loss': array(1.1306822299957275, dtype=float32), 'mse': array(1.0633354187011719, dtype=float32)}
[2016-04-15 12:46:18] INFO: Optimizer - Training score at iteration 1950: {'loss': array(4.445053577423096, dtype=float32), 'mse': array(2.1083295345306396, dtype=float32)}
[2016-04-15 12:47:29] INFO: Optimizer - Mean loss values for validation at iteration 1998 is: {'loss': 2.8823514, 'mse': 1.6310947}
[2016-04-15 12:47:31] INFO: Optimizer - Training score at iteration 2000: {'loss': array(1.00186288356781, dtype=float32), 'mse': array(1.0009310245513916, dtype=float32)}
[2016-04-15 12:48:22] INFO: Optimizer - Training score at iteration 2050: {'loss': array(3.3791234493255615, dtype=float32), 'mse': array(1.8382391929626465, dtype=float32)}
[2016-04-15 12:49:14] INFO: Optimizer - Training score at iteration 2100: {'loss': array(0.8788202404975891, dtype=float32), 'mse': array(0.937454104423523, dtype=float32)}
[2016-04-15 12:50:05] INFO: Optimizer - Training score at iteration 2150: {'loss': array(5.990590572357178, dtype=float32), 'mse': array(2.44756817817688, dtype=float32)}
[2016-04-15 12:51:16] INFO: Optimizer - Mean loss values for validation at iteration 2198 is: {'loss': 2.8677907, 'mse': 1.6253463}
[2016-04-15 12:51:18] INFO: Optimizer - Training score at iteration 2200: {'loss': array(4.118265151977539, dtype=float32), 'mse': array(2.029350996017456, dtype=float32)}
[2016-04-15 12:52:09] INFO: Optimizer - Training score at iteration 2250: {'loss': array(4.3525214195251465, dtype=float32), 'mse': array(2.0862696170806885, dtype=float32)}
[2016-04-15 12:53:01] INFO: Optimizer - Training score at iteration 2300: {'loss': array(5.397786617279053, dtype=float32), 'mse': array(2.3233137130737305, dtype=float32)}
[2016-04-15 12:53:52] INFO: Optimizer - Training score at iteration 2350: {'loss': array(0.8001124262809753, dtype=float32), 'mse': array(0.8944900035858154, dtype=float32)}
[2016-04-15 12:55:03] INFO: Optimizer - Mean loss values for validation at iteration 2398 is: {'loss': 2.8701539, 'mse': 1.6406822}
[2016-04-15 12:55:05] INFO: Optimizer - Training score at iteration 2400: {'loss': array(2.208056926727295, dtype=float32), 'mse': array(1.4859532117843628, dtype=float32)}
[2016-04-15 12:55:57] INFO: Optimizer - Training score at iteration 2450: {'loss': array(2.1976077556610107, dtype=float32), 'mse': array(1.4824330806732178, dtype=float32)}
[2016-04-15 12:56:48] INFO: Optimizer - Training score at iteration 2500: {'loss': array(0.7919110655784607, dtype=float32), 'mse': array(0.8898938298225403, dtype=float32)}
[2016-04-15 12:57:40] INFO: Optimizer - Training score at iteration 2550: {'loss': array(1.6744413375854492, dtype=float32), 'mse': array(1.2940020561218262, dtype=float32)}
[2016-04-15 12:58:50] INFO: Optimizer - Mean loss values for validation at iteration 2597 is: {'loss': 2.8931336, 'mse': 1.6250683}
[2016-04-15 12:58:53] INFO: Optimizer - Training score at iteration 2600: {'loss': array(1.6555432081222534, dtype=float32), 'mse': array(1.2866791486740112, dtype=float32)}
[2016-04-15 12:59:44] INFO: Optimizer - Training score at iteration 2650: {'loss': array(3.1531875133514404, dtype=float32), 'mse': array(1.7757216691970825, dtype=float32)}
[2016-04-15 13:00:36] INFO: Optimizer - Training score at iteration 2700: {'loss': array(2.457340955734253, dtype=float32), 'mse': array(1.5675908327102661, dtype=float32)}
[2016-04-15 13:01:27] INFO: Optimizer - Training score at iteration 2750: {'loss': array(2.7218410968780518, dtype=float32), 'mse': array(1.6498003005981445, dtype=float32)}
[2016-04-15 13:02:37] INFO: Optimizer - Mean loss values for validation at iteration 2797 is: {'loss': 2.8316016, 'mse': 1.6126974}
[2016-04-15 13:02:40] INFO: Optimizer - Training score at iteration 2800: {'loss': array(3.6440606117248535, dtype=float32), 'mse': array(1.9089422225952148, dtype=float32)}
[2016-04-15 13:03:31] INFO: Optimizer - Training score at iteration 2850: {'loss': array(1.7267907857894897, dtype=float32), 'mse': array(1.3140740394592285, dtype=float32)}
[2016-04-15 13:04:22] INFO: Optimizer - Training score at iteration 2900: {'loss': array(4.331826686859131, dtype=float32), 'mse': array(2.0813040733337402, dtype=float32)}
[2016-04-15 13:05:14] INFO: Optimizer - Training score at iteration 2950: {'loss': array(1.3107174634933472, dtype=float32), 'mse': array(1.1448657512664795, dtype=float32)}
[2016-04-15 13:06:24] INFO: Optimizer - Mean loss values for validation at iteration 2997 is: {'loss': 2.796242, 'mse': 1.6110734}
[2016-04-15 13:06:27] INFO: Optimizer - Training score at iteration 3000: {'loss': array(1.2418111562728882, dtype=float32), 'mse': array(1.114365816116333, dtype=float32)}
[2016-04-15 13:06:27] INFO: Optimizer - Saving intermediate model state
[2016-04-15 13:08:44] INFO: Graph - Model file saved as: ../data/vnet2_iter_3000.zip
[2016-04-15 13:09:43] INFO: Optimizer - Training score at iteration 3050: {'loss': array(1.6866401433944702, dtype=float32), 'mse': array(1.298707127571106, dtype=float32)}
[2016-04-15 13:10:36] INFO: Optimizer - Training score at iteration 3100: {'loss': array(5.160858631134033, dtype=float32), 'mse': array(2.27175235748291, dtype=float32)}
[2016-04-15 13:11:29] INFO: Optimizer - Training score at iteration 3150: {'loss': array(3.984471082687378, dtype=float32), 'mse': array(1.9961140155792236, dtype=float32)}
[2016-04-15 13:12:41] INFO: Optimizer - Mean loss values for validation at iteration 3197 is: {'loss': 2.7955372, 'mse': 1.6031873}
[2016-04-15 13:12:43] INFO: Optimizer - Training score at iteration 3200: {'loss': array(1.746590495109558, dtype=float32), 'mse': array(1.3215863704681396, dtype=float32)}
[2016-04-15 13:13:35] INFO: Optimizer - Training score at iteration 3250: {'loss': array(3.478565216064453, dtype=float32), 'mse': array(1.8650912046432495, dtype=float32)}
[2016-04-15 13:14:27] INFO: Optimizer - Training score at iteration 3300: {'loss': array(1.7409676313400269, dtype=float32), 'mse': array(1.3194572925567627, dtype=float32)}
[2016-04-15 13:15:19] INFO: Optimizer - Training score at iteration 3350: {'loss': array(1.0467818975448608, dtype=float32), 'mse': array(1.0231236219406128, dtype=float32)}
[2016-04-15 13:16:28] INFO: Optimizer - Mean loss values for validation at iteration 3396 is: {'loss': 2.7949743, 'mse': 1.6020783}
[2016-04-15 13:16:32] INFO: Optimizer - Training score at iteration 3400: {'loss': array(2.875065565109253, dtype=float32), 'mse': array(1.6956018209457397, dtype=float32)}
[2016-04-15 13:17:24] INFO: Optimizer - Training score at iteration 3450: {'loss': array(5.846728324890137, dtype=float32), 'mse': array(2.4180009365081787, dtype=float32)}
[2016-04-15 13:18:15] INFO: Optimizer - Training score at iteration 3500: {'loss': array(2.1856343746185303, dtype=float32), 'mse': array(1.4783891439437866, dtype=float32)}
[2016-04-15 13:19:06] INFO: Optimizer - Training score at iteration 3550: {'loss': array(1.048744559288025, dtype=float32), 'mse': array(1.0240823030471802, dtype=float32)}
[2016-04-15 13:20:15] INFO: Optimizer - Mean loss values for validation at iteration 3596 is: {'loss': 2.7825544, 'mse': 1.6129748}
[2016-04-15 13:20:19] INFO: Optimizer - Training score at iteration 3600: {'loss': array(2.461890459060669, dtype=float32), 'mse': array(1.5690412521362305, dtype=float32)}
[2016-04-15 13:21:10] INFO: Optimizer - Training score at iteration 3650: {'loss': array(2.9446334838867188, dtype=float32), 'mse': array(1.7159934043884277, dtype=float32)}
[2016-04-15 13:22:02] INFO: Optimizer - Training score at iteration 3700: {'loss': array(1.667388916015625, dtype=float32), 'mse': array(1.2912741899490356, dtype=float32)}
[2016-04-15 13:22:54] INFO: Optimizer - Training score at iteration 3750: {'loss': array(2.0587375164031982, dtype=float32), 'mse': array(1.4348301887512207, dtype=float32)}
[2016-04-15 13:24:03] INFO: Optimizer - Mean loss values for validation at iteration 3796 is: {'loss': 2.7541695, 'mse': 1.6021954}
[2016-04-15 13:24:06] INFO: Optimizer - Training score at iteration 3800: {'loss': array(0.6491919755935669, dtype=float32), 'mse': array(0.8057245016098022, dtype=float32)}
[2016-04-15 13:24:58] INFO: Optimizer - Training score at iteration 3850: {'loss': array(2.189539909362793, dtype=float32), 'mse': array(1.4797093868255615, dtype=float32)}
[2016-04-15 13:25:49] INFO: Optimizer - Training score at iteration 3900: {'loss': array(4.219930171966553, dtype=float32), 'mse': array(2.0542469024658203, dtype=float32)}
[2016-04-15 13:26:41] INFO: Optimizer - Training score at iteration 3950: {'loss': array(1.0025628805160522, dtype=float32), 'mse': array(1.0012805461883545, dtype=float32)}
[2016-04-15 13:27:50] INFO: Optimizer - Mean loss values for validation at iteration 3996 is: {'loss': 2.7558253, 'mse': 1.6055255}
[2016-04-15 13:27:54] INFO: Optimizer - Training score at iteration 4000: {'loss': array(4.58544397354126, dtype=float32), 'mse': array(2.1413650512695312, dtype=float32)}
[2016-04-15 13:28:45] INFO: Optimizer - Training score at iteration 4050: {'loss': array(1.764044165611267, dtype=float32), 'mse': array(1.328173279762268, dtype=float32)}
[2016-04-15 13:29:36] INFO: Optimizer - Training score at iteration 4100: {'loss': array(0.8692458271980286, dtype=float32), 'mse': array(0.9323335289955139, dtype=float32)}
[2016-04-15 13:30:28] INFO: Optimizer - Training score at iteration 4150: {'loss': array(1.6209144592285156, dtype=float32), 'mse': array(1.2731513977050781, dtype=float32)}
[2016-04-15 13:31:36] INFO: Optimizer - Mean loss values for validation at iteration 4195 is: {'loss': 2.7220306, 'mse': 1.5880781}
[2016-04-15 13:31:41] INFO: Optimizer - Training score at iteration 4200: {'loss': array(1.2763537168502808, dtype=float32), 'mse': array(1.1297582387924194, dtype=float32)}
[2016-04-15 13:32:32] INFO: Optimizer - Training score at iteration 4250: {'loss': array(4.529118061065674, dtype=float32), 'mse': array(2.1281723976135254, dtype=float32)}
[2016-04-15 13:33:24] INFO: Optimizer - Training score at iteration 4300: {'loss': array(0.9983643889427185, dtype=float32), 'mse': array(0.999181866645813, dtype=float32)}
[2016-04-15 13:34:15] INFO: Optimizer - Training score at iteration 4350: {'loss': array(2.4426980018615723, dtype=float32), 'mse': array(1.5629132986068726, dtype=float32)}
[2016-04-15 13:35:23] INFO: Optimizer - Mean loss values for validation at iteration 4395 is: {'loss': 2.7170451, 'mse': 1.5902046}
[2016-04-15 13:35:28] INFO: Optimizer - Training score at iteration 4400: {'loss': array(7.313144683837891, dtype=float32), 'mse': array(2.704282522201538, dtype=float32)}
[2016-04-15 13:36:19] INFO: Optimizer - Training score at iteration 4450: {'loss': array(5.544116020202637, dtype=float32), 'mse': array(2.3545947074890137, dtype=float32)}
[2016-04-15 13:37:11] INFO: Optimizer - Training score at iteration 4500: {'loss': array(3.2680790424346924, dtype=float32), 'mse': array(1.8077828884124756, dtype=float32)}
[2016-04-15 13:38:02] INFO: Optimizer - Training score at iteration 4550: {'loss': array(2.557941198348999, dtype=float32), 'mse': array(1.5993565320968628, dtype=float32)}
[2016-04-15 13:39:10] INFO: Optimizer - Mean loss values for validation at iteration 4595 is: {'loss': 3.0098283, 'mse': 1.6986328}
[2016-04-15 13:39:15] INFO: Optimizer - Training score at iteration 4600: {'loss': array(3.304504156112671, dtype=float32), 'mse': array(1.8178294897079468, dtype=float32)}
[2016-04-15 13:40:07] INFO: Optimizer - Training score at iteration 4650: {'loss': array(2.374776601791382, dtype=float32), 'mse': array(1.541031002998352, dtype=float32)}
[2016-04-15 13:40:58] INFO: Optimizer - Training score at iteration 4700: {'loss': array(1.2189159393310547, dtype=float32), 'mse': array(1.1040452718734741, dtype=float32)}
[2016-04-15 13:41:50] INFO: Optimizer - Training score at iteration 4750: {'loss': array(2.023629903793335, dtype=float32), 'mse': array(1.4225435256958008, dtype=float32)}
[2016-04-15 13:42:57] INFO: Optimizer - Mean loss values for validation at iteration 4795 is: {'loss': 2.7334402, 'mse': 1.6023519}
[2016-04-15 13:43:02] INFO: Optimizer - Training score at iteration 4800: {'loss': array(1.8386577367782593, dtype=float32), 'mse': array(1.355971097946167, dtype=float32)}
[2016-04-15 13:43:54] INFO: Optimizer - Training score at iteration 4850: {'loss': array(1.8738847970962524, dtype=float32), 'mse': array(1.3688991069793701, dtype=float32)}
[2016-04-15 13:44:45] INFO: Optimizer - Training score at iteration 4900: {'loss': array(0.9289280772209167, dtype=float32), 'mse': array(0.9638091325759888, dtype=float32)}
[2016-04-15 13:45:37] INFO: Optimizer - Training score at iteration 4950: {'loss': array(1.5488895177841187, dtype=float32), 'mse': array(1.2445439100265503, dtype=float32)}
[2016-04-15 13:46:44] INFO: Optimizer - Mean loss values for validation at iteration 4995 is: {'loss': 2.6995893, 'mse': 1.5741982}
[2016-04-15 13:46:49] INFO: Optimizer - Training score at iteration 5000: {'loss': array(3.9133529663085938, dtype=float32), 'mse': array(1.9782196283340454, dtype=float32)}
[2016-04-15 13:47:41] INFO: Optimizer - Training score at iteration 5050: {'loss': array(1.178904414176941, dtype=float32), 'mse': array(1.0857735872268677, dtype=float32)}
[2016-04-15 13:48:32] INFO: Optimizer - Training score at iteration 5100: {'loss': array(1.0133557319641113, dtype=float32), 'mse': array(1.0066556930541992, dtype=float32)}
[2016-04-15 13:49:24] INFO: Optimizer - Training score at iteration 5150: {'loss': array(2.6047146320343018, dtype=float32), 'mse': array(1.61391282081604, dtype=float32)}
[2016-04-15 13:50:30] INFO: Optimizer - Mean loss values for validation at iteration 5194 is: {'loss': 2.6568432, 'mse': 1.5697215}
[2016-04-15 13:50:36] INFO: Optimizer - Training score at iteration 5200: {'loss': array(2.5717933177948, dtype=float32), 'mse': array(1.603681206703186, dtype=float32)}
[2016-04-15 13:51:28] INFO: Optimizer - Training score at iteration 5250: {'loss': array(0.7891891002655029, dtype=float32), 'mse': array(0.8883631229400635, dtype=float32)}
[2016-04-15 13:52:19] INFO: Optimizer - Training score at iteration 5300: {'loss': array(5.454504013061523, dtype=float32), 'mse': array(2.3354878425598145, dtype=float32)}
[2016-04-15 13:53:11] INFO: Optimizer - Training score at iteration 5350: {'loss': array(2.292912006378174, dtype=float32), 'mse': array(1.5142364501953125, dtype=float32)}
[2016-04-15 13:54:18] INFO: Optimizer - Mean loss values for validation at iteration 5394 is: {'loss': 2.6389468, 'mse': 1.5614171}
[2016-04-15 13:54:23] INFO: Optimizer - Training score at iteration 5400: {'loss': array(1.7653192281723022, dtype=float32), 'mse': array(1.32865309715271, dtype=float32)}
[2016-04-15 13:55:15] INFO: Optimizer - Training score at iteration 5450: {'loss': array(1.98757803440094, dtype=float32), 'mse': array(1.4098148345947266, dtype=float32)}
[2016-04-15 13:56:06] INFO: Optimizer - Training score at iteration 5500: {'loss': array(4.998775482177734, dtype=float32), 'mse': array(2.2357943058013916, dtype=float32)}
[2016-04-15 13:56:58] INFO: Optimizer - Training score at iteration 5550: {'loss': array(2.54329252243042, dtype=float32), 'mse': array(1.5947703123092651, dtype=float32)}
[2016-04-15 13:58:05] INFO: Optimizer - Mean loss values for validation at iteration 5594 is: {'loss': 2.6411781, 'mse': 1.5715328}
[2016-04-15 13:58:11] INFO: Optimizer - Training score at iteration 5600: {'loss': array(3.3825206756591797, dtype=float32), 'mse': array(1.839163064956665, dtype=float32)}
[2016-04-15 13:59:02] INFO: Optimizer - Training score at iteration 5650: {'loss': array(2.185026168823242, dtype=float32), 'mse': array(1.478183388710022, dtype=float32)}
[2016-04-15 13:59:54] INFO: Optimizer - Training score at iteration 5700: {'loss': array(1.9164535999298096, dtype=float32), 'mse': array(1.3843603134155273, dtype=float32)}
[2016-04-15 14:00:45] INFO: Optimizer - Training score at iteration 5750: {'loss': array(1.4647948741912842, dtype=float32), 'mse': array(1.210287094116211, dtype=float32)}
[2016-04-15 14:01:52] INFO: Optimizer - Mean loss values for validation at iteration 5794 is: {'loss': 2.6571846, 'mse': 1.5814924}
[2016-04-15 14:01:58] INFO: Optimizer - Training score at iteration 5800: {'loss': array(1.757467269897461, dtype=float32), 'mse': array(1.3256950378417969, dtype=float32)}
[2016-04-15 14:02:49] INFO: Optimizer - Training score at iteration 5850: {'loss': array(1.1162073612213135, dtype=float32), 'mse': array(1.0565071105957031, dtype=float32)}
[2016-04-15 14:03:41] INFO: Optimizer - Training score at iteration 5900: {'loss': array(1.0208338499069214, dtype=float32), 'mse': array(1.010363221168518, dtype=float32)}
[2016-04-15 14:04:33] INFO: Optimizer - Training score at iteration 5950: {'loss': array(2.052830219268799, dtype=float32), 'mse': array(1.4327701330184937, dtype=float32)}
[2016-04-15 14:05:38] INFO: Optimizer - Mean loss values for validation at iteration 5993 is: {'loss': 2.6837602, 'mse': 1.5651491}
[2016-04-15 14:05:45] INFO: Optimizer - Training score at iteration 6000: {'loss': array(2.9813320636749268, dtype=float32), 'mse': array(1.7266534566879272, dtype=float32)}
[2016-04-15 14:05:45] INFO: Optimizer - Saving intermediate model state
[2016-04-15 14:08:15] INFO: Graph - Model file saved as: ../data/vnet2_iter_6000.zip
[2016-04-15 14:09:06] INFO: Optimizer - Training score at iteration 6050: {'loss': array(3.8653500080108643, dtype=float32), 'mse': array(1.966049313545227, dtype=float32)}
[2016-04-15 14:09:58] INFO: Optimizer - Training score at iteration 6100: {'loss': array(2.1321327686309814, dtype=float32), 'mse': array(1.4601824283599854, dtype=float32)}
[2016-04-15 14:10:51] INFO: Optimizer - Training score at iteration 6150: {'loss': array(0.9187813997268677, dtype=float32), 'mse': array(0.958530843257904, dtype=float32)}
[2016-04-15 14:11:25] INFO: Pipeline - All commands have been dispatched
[2016-04-15 14:11:57] INFO: Optimizer - Mean loss values for validation at iteration 6193 is: {'loss': 2.5589616, 'mse': 1.5380768}
[2016-04-15 14:12:04] INFO: Optimizer - Training score at iteration 6200: {'loss': array(1.3876805305480957, dtype=float32), 'mse': array(1.1779985427856445, dtype=float32)}
[2016-04-15 14:12:56] INFO: Optimizer - Training score at iteration 6250: {'loss': array(0.9649991989135742, dtype=float32), 'mse': array(0.9823437333106995, dtype=float32)}
[2016-04-15 14:13:48] INFO: Optimizer - Training score at iteration 6300: {'loss': array(2.944833278656006, dtype=float32), 'mse': array(1.7160515785217285, dtype=float32)}
[2016-04-15 14:14:39] INFO: Optimizer - Training score at iteration 6350: {'loss': array(2.6850812435150146, dtype=float32), 'mse': array(1.6386216878890991, dtype=float32)}
[2016-04-15 14:15:45] INFO: Optimizer - Mean loss values for validation at iteration 6393 is: {'loss': 2.5654776, 'mse': 1.5353386}
[2016-04-15 14:15:52] INFO: Optimizer - Training score at iteration 6400: {'loss': array(2.0865869522094727, dtype=float32), 'mse': array(1.444502353668213, dtype=float32)}
[2016-04-15 14:16:43] INFO: Optimizer - Training score at iteration 6450: {'loss': array(0.6583791971206665, dtype=float32), 'mse': array(0.8114056587219238, dtype=float32)}
[2016-04-15 14:17:27] INFO: Pipeline - Complete signal received.
[2016-04-15 14:17:27] INFO: Pipeline - Stopping.

In [4]:
%matplotlib inline
import matplotlib.pyplot as plt
l = np.array([s["loss"] for s in optimizer.losses])
e = np.array([s["mse"] for s in optimizer.losses])
print l.mean()
plt.plot(l)
plt.show()
plt.plot(e)
plt.show()


2.80378

In [ ]: