0. Dependency


In [1]:
import json
import typing
import numpy as np

1. Activation


In [2]:
class Activation:
    @staticmethod
    def calculate(data_in: np.ndarray) -> np.ndarray:
        pass

    @staticmethod
    def derivative(data_in: np.ndarray) -> np.ndarray:
        pass

    @staticmethod
    def weight_initial(previous_dim: int, dim: int = None) -> float:
        pass

    @classmethod
    def name(cls):
        return cls.__name__.replace('Activation', '')


class ReluActivation(Activation):
    @staticmethod
    def calculate(data_in: np.ndarray) -> np.ndarray:
        return np.maximum(0, data_in)

    @staticmethod
    def derivative(data_in: np.ndarray) -> np.ndarray:
        return np.array(data_in > 0, dtype=np.int)

    @staticmethod
    def weight_initial(previous_dim: int, dim: int = None) -> float:
        return (1. / previous_dim) ** 0.5


class SigmoidActivation(Activation):
    @staticmethod
    def calculate(data_in: np.ndarray) -> np.ndarray:
        return np.reciprocal(np.add(np.exp(-data_in), 1.0))

    @staticmethod
    def derivative(data_in: np.ndarray) -> np.ndarray:
        sigmoid = SigmoidActivation.calculate(data_in)
        return sigmoid * (1.0 - sigmoid)

    @staticmethod
    def weight_initial(previous_dim: int, dim: int = None) -> float:
        return (1. / previous_dim) ** 0.5


class TanhActivation(Activation):
    @staticmethod
    def calculate(data_in: np.ndarray) -> np.ndarray:
        exp_a = np.exp(+data_in)
        exp_b = np.exp(-data_in)
        return (exp_a - exp_b) / (exp_a + exp_b)

    @staticmethod
    def derivative(data_in: np.ndarray) -> np.ndarray:
        return np.subtract(1.0, np.power(TanhActivation.calculate(data_in), 2))

    @staticmethod
    def weight_initial(previous_dim: int, dim: int = None) -> float:
        return (1. / previous_dim) ** 0.5


def ActivationFactory(activation_type: str) -> Activation:
    activation_func = eval('{}Activation'.format(activation_type.capitalize()))
    assert activation_func, '{} Activation Not Found'.format(activation_type.capitalize())
    return activation_func

2. Demo Data


In [3]:
pass

3. Define Net


In [4]:
define_net = """
[
  {
    "name": "fc1",
    "type": "FullConnectLayer",
    "parameters": {
        "activation": "relu",
        "neuron_number": 20
    }
  },
  {
    "name": "fc2",
    "type": "FullConnectLayer",
    "parameters": {
        "activation": "relu",
        "neuron_number": 4
    }
  },
  {
    "name": "loss",
    "type": "SoftmaxLayer",
    "parameters": {
        "class_number": 2
    }
  }
]
"""
net_description = json.loads(define_net)

4. Layer


In [5]:
class Layer:
    def __init__(self):
        self.name = ''
        self.activation = ''

    @property
    def type(self) -> str:
        return self.__class__.__name__

    @property
    def description(self) -> dict:
        return {
            'name': self.name,
            'type': self.type,
            'parameters': self.parameters
        }

    @property
    def parameters(self) -> dict:
        return {}

    def init_parameters(self, parameters: dict):
        pass


class FullConnectLayer(Layer):
    def __init__(self):
        super().__init__()

        self.activation = Activation()
        self.neuron_number = 0
        self.w = np.array([])
        self.b = np.array([])

    @property
    def parameters(self) -> dict:
        return {
            'activation': self.activation.name(),
            'neuron_number': self.neuron_number,
            'w': self.w.tolist(),
            'b': self.b.tolist()
        }

    def init_parameters(self, parameters: dict):
        self.activation = ActivationFactory(parameters['activation'])
        self.neuron_number = parameters['neuron_number']

        if 'w' in parameters and 'b' in parameters:
            self.w = np.array(parameters['w'])
            self.b = np.array(parameters['b'])


class SoftmaxLayer(Layer):
    def __init__(self):
        super().__init__()

        self.class_number = 0

    @property
    def parameters(self) -> dict:
        return {
            'class_number': self.class_number
        }

    def init_parameters(self, parameters: dict):
        self.class_number = parameters['class_number']


def LayerFactory(description: dict):
    type_name = description['type']

    layer_class = eval(type_name)
    layer = layer_class()
    layer.name = description['name']
    layer.init_parameters(description['parameters'])

    return layer

5. Net


In [6]:
class Net:
    def __init__(self):
        self.indexes = {}
        self.layers = []

    @property
    def description(self):
        return [layer.description for layer in self.layers]

    def from_description(self, net_description: typing.List[dict]):
        for idx, description in enumerate(net_description):
            name = description['name']
            self.indexes[name] = idx
            self.layers.append(LayerFactory(description))

    def get_layer(self, name: str) -> Layer:
        idx = self.indexes[name]
        return self.layers[idx]

    def get_full_connect_layer(self, name: str) -> FullConnectLayer:
        return self.get_layer(name)

    def get_softmax_layer(self, name: str) -> SoftmaxLayer:
        return self.get_layer(name)

In [7]:
net = Net()
net.from_description(net_description)

net_description = net.description
print(json.dumps(net_description, indent=2))


[
  {
    "name": "fc1",
    "parameters": {
      "w": [],
      "b": [],
      "activation": "Relu",
      "neuron_number": 20
    },
    "type": "FullConnectLayer"
  },
  {
    "name": "fc2",
    "parameters": {
      "w": [],
      "b": [],
      "activation": "Relu",
      "neuron_number": 4
    },
    "type": "FullConnectLayer"
  },
  {
    "name": "loss",
    "parameters": {
      "class_number": 2
    },
    "type": "SoftmaxLayer"
  }
]