def __init__(
self, input_size, hidden_size, output_size, init_weight=0.01):
self.input_size = input_size
self.params = {}
self.params['W1'] = init_weight * np.random.randn(
input_size, hidden_size)
self.params['b1'] = np.zeros(hidden_size)
self.params['W2'] = init_weight * np.random.randn(
hidden_size, output_size)
self.params['b2'] = np.zeros(output_size)
# setup layers
self.layers = OrderedDict()
self.layers['Hidden1'] = HiddenLayer(
self.params['W1'], self.params['b1'])
self.layers['ReLU1'] = ReLULayer()
self.layers['Hidden2'] = HiddenLayer(
self.params['W2'], self.params['b2'])
self.lastLayer = SoftmaxWithLossLayer()