In [1]:
import sys
sys.path.append('..')

from deepgraph.utils.logging import log
from deepgraph.utils.common import batch_parallel, ConfigMixin, shuffle_in_unison_inplace, pickle_dump
from deepgraph.utils.image import batch_pad_mirror, rotate_transformer_scalar_float32, rotate_transformer_rgb_uint8
from deepgraph.constants import *
from deepgraph.conf import rng
from deepgraph.nn.core import Dropout

from deepgraph.pipeline import *


Using gpu device 0: GeForce GTX TITAN X (CNMeM is enabled with initial size: 85.0% of memory, CuDNN 3007)

  _____                _____                 _
 |  _  \              |  __ \               | |
 | | | |___  ___ _ __ | |  \/_ __ __ _ _ __ | |__
 | | | / _ \/ _ \ '_ \| | __| '__/ _` | '_ \| '_ \
 | |/ /  __/  __/ |_) | |_\ \ | | (_| | |_) | | | |
 |___/ \___|\___| .__/ \____/_|  \__,_| .__/|_| |_|
                | |                   | |
                |_|                   |_|


Available on GitHub: https://github.com/sebastian-schlecht/deepgraph


In [2]:
import math

def build_checkerboard(w, h) :
    re = np.r_[ w*[0,1] ]        # even-numbered rows
    ro = np.r_[ w*[1,0] ]        # odd-numbered rows
    return np.row_stack(h*(re, ro))

class Checkboard(Processor):
    def __init__(self, name, shapes, config, buffer_size=10):
        super(Checkboard, self).__init__(name, shapes, config, buffer_size)
        self.mean = None

    def init(self):
        pass
    def process(self):
        packet = self.pull()
        # Return if no data is there
        if not packet:
            return False
        
        # Do processing
        log("Transformer - Processing data", LOG_LEVEL_VERBOSE)
        i_h = 228
        i_w = 304

        d_h = 228
        d_w = 304

        start = time.time()
        
        # Checkerboard to test conv autoencoder
        
        #data = build_checkerboard( 304/4, 228/4,).repeat(2, axis=0).repeat(2,axis=1).reshape(1,228,304).astype(np.float32)
        data = np.zeros((228, 304), dtype=np.float32)
        data[0:114,0:152] = 1.0
        data[114:,152:] = 1.0
        
        data = data.reshape(1,228,304)
        data = np.repeat(data[np.newaxis,:,:, :], 10, axis=0)
        label = data.copy()
        
        # Try to push into queue as long as thread should not terminate
        self.push(Packet(identifier=packet.id, phase=packet.phase, num=2, data=(data, label)))
        return True

In [7]:
from theano.tensor.nnet import relu

from deepgraph.graph import *
from deepgraph.nn.core import *
from deepgraph.nn.conv import *
from deepgraph.nn.loss import *
from deepgraph.solver import *
from deepgraph.nn.init import *

from deepgraph.pipeline import Optimizer, H5DBLoader, Pipeline


def build_graph():
    graph = Graph("unet")

    data            = Data(graph, "data", T.ftensor4, shape=(-1, 1, 228, 304))
    label           = Data(graph, "label", T.ftensor4, shape=(-1, 1, 228, 304), config={
        "phase": PHASE_TRAIN
    })
    
    conv_c1_1     = Conv2D(graph, "conv_c1_1", config={
            "channels": 64,
            "kernel": (3, 3),
            "border_mode": 1,
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_c1_2     = Conv2D(graph, "conv_c1_2", config={
            "channels": 64,
            "kernel": (3, 3),
            "border_mode": 1,
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    pool_c1 = Pool(graph, "pool_c0", config={
        "kernel": (2, 2)
    })
    
    conv_c2_1     = Conv2D(graph, "conv_c2_1", config={
            "channels": 128,
            "kernel": (3, 3),
            "border_mode": 1,
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_c2_2     = Conv2D(graph, "conv_c2_2", config={
            "channels": 128,
            "kernel": (3, 3),
            "border_mode": 1,
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    
    
    up_e2 = Upsample(graph, "up_e2", config={
            "kernel": (2, 2)
    })
    up_conv_e2 = Conv2D(graph, "up_conv_e2", config={
            "channels": 64,
            "kernel": (3, 3),
            "border_mode": 1,
            "activation": None,
            "weight_filler": xavier(),
            "bias_filler": constant(0)
        }
    )
    
    concat_1 = Concatenate(graph, "concat_1", config={
            "axis": 1
    })
    
    conv_e1_1 = Conv2D(graph, "conv_e1_1", config={
            "channels": 64,
            "kernel": (3, 3),
            "border_mode": 1,
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_e1_2 = Conv2D(graph, "conv_e1_2", config={
            "channels": 64,
            "kernel": (3, 3),
            "border_mode": 1,
            "activation": relu,
            "weight_filler": xavier(gain="relu"),
            "bias_filler": constant(0)
        }
    )
    conv_e_f= Conv2D(graph, "conv_e_f", config={
            "channels": 1,
            "kernel": (1, 1),
            "activation": None,
            "weight_filler": xavier(),
            "bias_filler": constant(0.1),
            "is_output": True

        }
    )
    
    
    
    loss            = EuclideanLoss(graph, "loss")

    error = MSE(graph, "mse", config={
        "root": True,
        "is_output": True,
        "phase": PHASE_TRAIN
    })

    # Connect
    data.connect(conv_c1_1)
    conv_c1_1.connect(conv_c1_2)
    conv_c1_2.connect(concat_1)
    conv_c1_2.connect(pool_c1)
    pool_c1.connect(conv_c2_1)
    conv_c2_1.connect(conv_c2_2)
    conv_c2_2.connect(up_e2)
    up_e2.connect(up_conv_e2)
    up_conv_e2.connect(concat_1)
    concat_1.connect(conv_e1_1)
    conv_e1_1.connect(conv_e1_2)
    conv_e1_2.connect(conv_e_f)
    
    conv_e_f.connect(loss)
    conv_e_f.connect(error)
    
    label.connect(loss)
    label.connect(error)
    
    

    return graph

In [4]:
if __name__ == "__main__":

    batch_size = 5
    chunk_size = 2*batch_size
    transfer_shape = ((chunk_size, 1, 228, 304), (chunk_size, 1, 228, 304))

    g = build_graph()

    # Build the training pipeline
    cb = Checkboard("cb", transfer_shape, config={
    })
    optimizer = Optimizer("opt", g, transfer_shape, config={
        "batch_size":  batch_size,
        "chunk_size": chunk_size,
        "learning_rate": 0.001,
        "momentum": 0.9,
        "weight_decay": 0,
        "print_freq": 100,
        "save_freq": 1000,
        # "weights": "../data/alexnet_combined_iter_10000.zip",
        "save_prefix": "../data/unet_test_two_paths_only_checkerboard"
    })

    p = Pipeline(config={
        "validation_frequency": 50,
        "cycles": 500 # 2 minibatches per cycle = 1000 iters
    })
    p.add(cb)
    p.add(optimizer)
    p.run()


[2016-04-12 10:04:59] INFO: Pipeline - Starting computation
[2016-04-12 10:04:59] INFO: Graph - Setting up graph
[2016-04-12 10:04:59] INFO: Node - data has shape (-1, 1, 228, 304)
[2016-04-12 10:04:59] INFO: Node - label has shape (-1, 1, 228, 304)
[2016-04-12 10:04:59] INFO: Node - conv_c1_1 has shape (-1, 64, 228, 304)
[2016-04-12 10:04:59] INFO: Node - conv_c1_2 has shape (-1, 64, 228, 304)
[2016-04-12 10:04:59] INFO: Node - pool_c0 has shape (-1, 64, 114, 152)
[2016-04-12 10:04:59] INFO: Node - conv_c2_1 has shape (-1, 128, 114, 152)
[2016-04-12 10:04:59] INFO: Node - conv_c2_2 has shape (-1, 128, 114, 152)
[2016-04-12 10:04:59] INFO: Node - up_e2 has shape (-1, 128, 228, 304)
[2016-04-12 10:04:59] INFO: Node - up_conv_e2 has shape (-1, 64, 228, 304)
[2016-04-12 10:04:59] INFO: Node - concat_1 has shape (-1, 128, 228, 304)
[2016-04-12 10:04:59] INFO: Node - conv_e1_1 has shape (-1, 64, 228, 304)
[2016-04-12 10:04:59] INFO: Node - conv_e1_2 has shape (-1, 64, 228, 304)
[2016-04-12 10:04:59] INFO: Node - conv_e_f has shape (-1, 1, 228, 304)
[2016-04-12 10:04:59] INFO: Node - loss has shape (1,)
[2016-04-12 10:04:59] INFO: Node - mse has shape (1,)
[2016-04-12 10:05:00] INFO: Graph - Invoking Theano compiler
[2016-04-12 10:05:11] INFO: Optimizer - Compilation finished
[2016-04-12 10:06:00] INFO: Optimizer - Training score at iteration 100: {'loss': array(0.006321268156170845, dtype=float32), 'mse': array(0.07950640469789505, dtype=float32)}
[2016-04-12 10:06:00] INFO: Optimizer - Mean loss values for validation at iteration 100 is: {'loss': 0.0062991846, 'mse': 0.079367399}
[2016-04-12 10:06:49] INFO: Optimizer - Training score at iteration 200: {'loss': array(0.004687233362346888, dtype=float32), 'mse': array(0.06846337020397186, dtype=float32)}
[2016-04-12 10:06:50] INFO: Optimizer - Mean loss values for validation at iteration 200 is: {'loss': 0.0046745148, 'mse': 0.068370424}
[2016-04-12 10:07:39] INFO: Optimizer - Training score at iteration 300: {'loss': array(0.0036131320521235466, dtype=float32), 'mse': array(0.06010933220386505, dtype=float32)}
[2016-04-12 10:07:40] INFO: Optimizer - Mean loss values for validation at iteration 300 is: {'loss': 0.0036044579, 'mse': 0.060037136}
[2016-04-12 10:08:29] INFO: Optimizer - Training score at iteration 400: {'loss': array(0.0028670369647443295, dtype=float32), 'mse': array(0.05354471877217293, dtype=float32)}
[2016-04-12 10:08:30] INFO: Optimizer - Mean loss values for validation at iteration 400 is: {'loss': 0.0028606611, 'mse': 0.053485148}
[2016-04-12 10:09:19] INFO: Optimizer - Training score at iteration 500: {'loss': array(0.0023098746314644814, dtype=float32), 'mse': array(0.048061154782772064, dtype=float32)}
[2016-04-12 10:09:19] INFO: Optimizer - Mean loss values for validation at iteration 500 is: {'loss': 0.0023048681, 'mse': 0.048009042}
[2016-04-12 10:10:08] INFO: Optimizer - Training score at iteration 600: {'loss': array(0.0018436993705108762, dtype=float32), 'mse': array(0.042938318103551865, dtype=float32)}
[2016-04-12 10:10:09] INFO: Optimizer - Mean loss values for validation at iteration 600 is: {'loss': 0.0018395841, 'mse': 0.042890374}
[2016-04-12 10:10:58] INFO: Optimizer - Training score at iteration 700: {'loss': array(0.0014639387372881174, dtype=float32), 'mse': array(0.038261450827121735, dtype=float32)}
[2016-04-12 10:10:58] INFO: Optimizer - Mean loss values for validation at iteration 700 is: {'loss': 0.0014604795, 'mse': 0.038216218}
[2016-04-12 10:11:47] INFO: Optimizer - Training score at iteration 800: {'loss': array(0.001163272769190371, dtype=float32), 'mse': array(0.0341067835688591, dtype=float32)}
[2016-04-12 10:11:47] INFO: Optimizer - Mean loss values for validation at iteration 800 is: {'loss': 0.0011606845, 'mse': 0.034068819}
[2016-04-12 10:12:36] INFO: Optimizer - Training score at iteration 900: {'loss': array(0.0009355314541608095, dtype=float32), 'mse': array(0.03058645874261856, dtype=float32)}
[2016-04-12 10:12:37] INFO: Optimizer - Mean loss values for validation at iteration 900 is: {'loss': 0.00093351543, 'mse': 0.030553484}
[2016-04-12 10:13:05] INFO: Pipeline - All commands have been dispatched
[2016-04-12 10:13:26] INFO: Optimizer - Training score at iteration 1000: {'loss': array(0.000757161935325712, dtype=float32), 'mse': array(0.027516575530171394, dtype=float32)}
[2016-04-12 10:13:26] INFO: Optimizer - Saving intermediate model state
[2016-04-12 10:13:26] INFO: Graph - Model file saved as: ../data/unet_test_two_paths_only_checkerboard_iter_1000.zip
[2016-04-12 10:13:26] INFO: Optimizer - Mean loss values for validation at iteration 1000 is: {'loss': 0.00075557758, 'mse': 0.027487772}
[2016-04-12 10:13:26] INFO: Pipeline - Complete signal received.
[2016-04-12 10:13:26] INFO: Pipeline - Stopping.

In [ ]:
# Check gradients
# g.last_updates[16].get_value()

In [8]:
e = build_graph()
e.load_weights("../data/unet_test_two_paths_only_checkerboard_iter_1000.zip")
e.compile(phase=PHASE_INFER)


[2016-04-12 10:20:43] INFO: Graph - Loading parameters from file '../data/unet_test_two_paths_only_checkerboard_iter_1000.zip'
[2016-04-12 10:20:43] INFO: Graph - Setting up graph
[2016-04-12 10:20:43] INFO: Node - data has shape (-1, 1, 228, 304)
[2016-04-12 10:20:43] INFO: Node - label has shape (-1, 1, 228, 304)
[2016-04-12 10:20:43] INFO: Node - conv_c1_1 has shape (-1, 64, 228, 304)
[2016-04-12 10:20:43] INFO: Node - conv_c1_2 has shape (-1, 64, 228, 304)
[2016-04-12 10:20:43] INFO: Node - pool_c0 has shape (-1, 64, 114, 152)
[2016-04-12 10:20:43] INFO: Node - conv_c2_1 has shape (-1, 128, 114, 152)
[2016-04-12 10:20:43] INFO: Node - conv_c2_2 has shape (-1, 128, 114, 152)
[2016-04-12 10:20:43] INFO: Node - up_e2 has shape (-1, 128, 228, 304)
[2016-04-12 10:20:43] INFO: Node - up_conv_e2 has shape (-1, 64, 228, 304)
[2016-04-12 10:20:43] INFO: Node - concat_1 has shape (-1, 128, 228, 304)
[2016-04-12 10:20:43] INFO: Node - conv_e1_1 has shape (-1, 64, 228, 304)
[2016-04-12 10:20:43] INFO: Node - conv_e1_2 has shape (-1, 64, 228, 304)
[2016-04-12 10:20:43] INFO: Node - conv_e_f has shape (-1, 1, 228, 304)
[2016-04-12 10:20:43] INFO: Node - loss has shape (1,)
[2016-04-12 10:20:43] INFO: Node - mse has shape (1,)
[2016-04-12 10:20:44] INFO: Graph - Invoking Theano compiler

In [12]:
%matplotlib inline
import matplotlib.pyplot as plt
from deepgraph.nn.core import Dropout
w = 304
h = 228
Dropout.set_dp_off()

data = np.zeros((228, 304), dtype=np.float32)
data[0:114,0:152] = 1.0
data[114:,152:] = 1.0

data = data.reshape(1,228,304)
data = np.repeat(data[np.newaxis,:,:, :], 1, axis=0)
res = e.infer([data])
o = res["conv_e_f"].squeeze()
plt.imshow(o)
print o.mean()
plt.show()
plt.imshow(data.squeeze())
plt.show()

print ((o - data.squeeze()) ** 2).mean()


0.499659
0.000755582

In [ ]: